code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
#
# (c) Jan Gehring <jan.gehring@gmail.com>
#
# vim: set ts=2 sw=2 tw=0:
# vim: set expandtab:
package Rex::Virtualization::LibVirt::start;
use strict;
use warnings;
our $VERSION = '0.56.1'; # VERSION
use Rex::Logger;
use Rex::Helper::Run;
sub execute {
my ( $class, $arg1, %opt ) = @_;
my $virt_settings = Rex::Config->get("virtualization");
Rex::Logger::debug("Starting vm: $arg1");
chomp( my $uri =
ref($virt_settings) ? $virt_settings->{connect} : i_run "virsh uri" );
unless ($arg1) {
die("You have to define the vm name!");
}
my $dom = $arg1;
Rex::Logger::debug("starting domain: $dom");
unless ($dom) {
die("VM $dom not found.");
}
my $output = i_run "virsh -c $uri start '$dom' 2>&1";
if ( $? != 0 ) {
die("Error starting vm $dom\nError: $output");
}
}
1;
| gitpan/Rex | lib/Rex/Virtualization/LibVirt/start.pm | Perl | apache-2.0 | 821 |
#!/usr/local/ensembl/bin/perl -w
use strict;
use Bio::EnsEMBL::Compara::DBSQL::DBAdaptor;
use Bio::AlignIO;
use Bio::EnsEMBL::Registry;
use File::Spec;
use Getopt::Long;
my $tree_id_list;
my $url;
my $help = 0;
my $aln_out;
my $fasta_out;
my $fasta_cds_out;
my $nh_out;
my $nhx_out;
my $verbose = 0;
my $aa = 1;
my $nc = 0;
my %compara_conf;
$| = 1;
GetOptions('help' => \$help,
'tree_id_list|list=s' => \$tree_id_list,
'url=s' => \$url,
'a|aln_out=s' => \$aln_out,
'f|fasta_out=s' => \$fasta_out,
'fc|fasta_cds_out=s' => \$fasta_cds_out,
'nh|nh_out=s' => \$nh_out,
'nhx|nhx_out=s' => \$nhx_out,
'nc=s' => \$nc,
'verbose=s' => \$verbose,
'aa' => \$aa);
if ($help) {
print "
$0 --tree_id_list file.txt --url mysql://ensro\@ens-livemirror:3306/42
--tree_id_list list of tree_ids (node_ids that are root_id=parent_id in protein_tree_node)
--url string database url location of the form,
mysql://username[:password]\@host[:port]/[release_version]
--aln_out string alignment output filename (extension .emf.gz will be added)
--nh_out string newick output filename (extension .emf.gz will be added)
--nhx_out string extended newick output filename (extension .emf.gz will be added)
--aa dump alignment in amino acid (default is in DNA)
This scripts assumes that the compara db and all the core dbs related
to it are on the same server
\n";
exit 0;
}
unless (defined $tree_id_list && (-e $tree_id_list)) {
print "\n--tree_id_list is not defined properly. It should be an integer > 0\nEXIT 1\n\n";
exit 1;
}
unless (defined $url) {
print "\n--url is not defined. It should be something like mysql://ensro\@ens-livemirror:3306/42\nEXIT 2\n\n";
exit 2;
}
my $dba;
eval { require Bio::EnsEMBL::Hive::URLFactory;};
if ($@) {
# Crude alternative to parsing the url
# format is mysql://user:pass@host/dbname
$url =~ /mysql\:\/\/(\S+)\@(\S+)\/(\S+)/g;
my ($myuserpass,$myhost,$mydbname) = ($1,$2,$3);
my ($myuser,$mypass);
if ($myuserpass =~ /(\S+)\:(\S+)/) {
$myuser = $1;
$mypass = $1;
} else {
$myuser = $myuserpass;
}
$compara_conf{-user} = $myuser;
$compara_conf{-pass} = $mypass if (defined($mypass));
$compara_conf{-host} = $myhost;
$compara_conf{-dbname} = $mydbname;
eval { $dba = new Bio::EnsEMBL::Compara::DBSQL::DBAdaptor(%compara_conf); }
} else {
$dba = Bio::EnsEMBL::Hive::URLFactory->fetch($url . ';type=compara');
}
# Bio::EnsEMBL::Registry->no_version_check(1);
# Bio::EnsEMBL::Registry->load_registry_from_url($url);
#my $dba = new Bio::EnsEMBL::Compara::DBSQL::DBAdaptor
# (-host => 'ens-livemirror',
# -port => 3306,
# -user => 'ensro',
# -dbname => 'ensembl_compara_42');
# $dba = Bio::EnsEMBL::Registry->get_DBAdaptor('Multi','compara');
my $mc;
eval { $mc = $dba->get_MetaContainer;};
unless ($@) {
my $release = $mc->get_schema_version;
}
my $pta = $dba->get_ProteinTreeAdaptor;
my $nta = $dba->get_NCTreeAdaptor;
open LIST, "$tree_id_list" or die "couldnt open $tree_id_list: $!\n";
my @ids;
while (<LIST>) {
chomp $_;
push @ids, $_;
};
close LIST;
my ($treevolume,$treedirectories,$treefile) = File::Spec->splitpath( $tree_id_list );
foreach my $tree_id (@ids) {
my $root = $pta->fetch_node_by_node_id($tree_id) unless (1==$nc);
if (1==$nc) {
$root = $nta->fetch_node_by_node_id($tree_id);
$tree_id = "nc".$tree_id;
}
my $fh1;
my $fh2;
my $fh3;
my $fh4;
my $fh5;
last if (
(-s "$treedirectories/$tree_id.aln.emf") &&
(-s "$treedirectories/$tree_id.nh.emf") &&
(-s "$treedirectories/$tree_id.nhx.emf") &&
(-s "$treedirectories/$tree_id.aa.fasta") &&
(-s "$treedirectories/$tree_id.cds.fasta"));
if ($aln_out) { open $fh1, ">$treedirectories/$tree_id.aln.emf" or die "couldnt open $treedirectories/$tree_id.aln.emf:$!\n"; }
if ($nh_out) { open $fh2, ">$treedirectories/$tree_id.nh.emf" or die "couldnt open $treedirectories/$tree_id.nh.emf:$!\n"; }
if ($nhx_out) { open $fh3, ">$treedirectories/$tree_id.nhx.emf" or die "couldnt open $treedirectories/$tree_id.nhx.emf:$!\n"; }
if ($fasta_out) { open $fh4, ">$treedirectories/$tree_id.aa.fasta" or die "couldnt open $treedirectories/$tree_id.aa.fasta:$!\n"; }
if ($fasta_cds_out) { open $fh5, ">$treedirectories/$tree_id.cds.fasta" or die "couldnt open $treedirectories/$tree_id.cds.fasta:$!\n"; }
dumpTreeMultipleAlignment($root, $fh1) if ($aln_out);
dumpNewickTree($root,$fh2,0) if (defined $nh_out);
dumpNewickTree($root,$fh3,1) if (defined $nhx_out);
dumpTreeFasta($root, $fh4,0) if ($fasta_out);
dumpTreeFasta($root, $fh5,1) if ($fasta_cds_out);
$root->release_tree;
close $fh1 if (defined $fh1);
close $fh2 if (defined $fh2);
close $fh3 if (defined $fh3);
close $fh4 if (defined $fh4);
close $fh5 if (defined $fh5);
}
sub dumpTreeMultipleAlignment
{
my $tree = shift;
my $fh = shift;
my @aligned_seqs;
foreach my $leaf (@{$tree->get_all_leaves}) {
#SEQ organism peptide_stable_id chr sequence_start sequence_stop strand gene_stable_id display_label
my $species = $leaf->genome_db->name;
$species =~ s/ /_/;
print $fh "SEQ $species ".$leaf->stable_id." ".$leaf->chr_name." ".$leaf->chr_start." ".$leaf->chr_end." ".$leaf->chr_strand." ".$leaf->gene_member->stable_id." ".($leaf->gene_member->display_label || "NULL") ."\n";
my $alignment_string;
if ($aa) {
$alignment_string = $leaf->alignment_string;
} else {
$alignment_string = $leaf->cdna_alignment_string;
$alignment_string =~ s/\s+//g;
}
for (my $i = 0; $i<length($alignment_string); $i++) {
$aligned_seqs[$i] .= substr($alignment_string, $i, 1);
}
}
# $tree->release_tree;
# undef $tree;
# will need to update the script when we will produce omega score for each column
# of the alignment.
# print "SCORE NULL\n";
print $fh "DATA\n";
print $fh join("\n", @aligned_seqs);
print $fh "\n//\n\n";
}
sub dumpNewickTree
{
my $tree = shift;
my $fh = shift;
my $nhx = shift;
# print STDERR "node_id: ",$tree->node_id,"\n";
my @aligned_seqs;
foreach my $leaf (@{$tree->get_all_leaves}) {
#SEQ organism peptide_stable_id chr sequence_start sequence_stop strand gene_stable_id display_label
my $species = $leaf->genome_db->name;
$species =~ s/ /_/;
print $fh "SEQ $species ".$leaf->stable_id." ".$leaf->chr_name." ".$leaf->chr_start." ".$leaf->chr_end." ".$leaf->chr_strand." ".$leaf->gene_member->stable_id." ".($leaf->gene_member->display_label || "NULL") ."\n";
}
# will need to update the script when we will produce omega score for each column
# of the alignment.
# print "SCORE NULL\n";
print $fh "DATA\n";
if ($nhx) {
print $fh $tree->nhx_format;
} else {
print $fh $tree->newick_simple_format;
}
print $fh "\n//\n\n";
# $tree->release_tree;
# undef $tree;
}
sub dumpTreeFasta
{
my $tree = shift;
my $fh = shift;
my $cdna = shift;
my @aligned_seqs;
warn("missing tree\n") unless($tree);
my $sa;
$sa = $tree->get_SimpleAlign(-id_type => 'STABLE', -CDNA=>$cdna);
$sa->set_displayname_flat(1);
my $alignIO = Bio::AlignIO->newFh(-fh => $fh,
-format => 'fasta'
);
print $alignIO $sa;
print $fh "\n//\n\n";
}
| adamsardar/perl-libs-custom | EnsemblAPI/ensembl-compara/scripts/tree/dumpTreeMSA_id.pl | Perl | apache-2.0 | 7,495 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::DBSQL::MethodLinkSpeciesSetAdaptor - Object to access data in the method_link_species_set
and method_link tables
=head1 SYNOPSIS
=head2 Retrieve data from the database
my $method_link_species_sets = $mlssa->fetch_all;
my $method_link_species_set = $mlssa->fetch_by_dbID(1);
my $method_link_species_set = $mlssa->fetch_by_method_link_type_registry_aliases(
"LASTZ_NET", ["human", "Mus musculus"]);
my $method_link_species_set = $mlssa->fetch_by_method_link_type_species_set_name(
"EPO", "mammals")
my $method_link_species_sets = $mlssa->fetch_all_by_method_link_type("LASTZ_NET");
my $method_link_species_sets = $mlssa->fetch_all_by_GenomeDB($genome_db);
my $method_link_species_sets = $mlssa->fetch_all_by_method_link_type_GenomeDB(
"PECAN", $gdb1);
my $method_link_species_set = $mlssa->fetch_by_method_link_type_GenomeDBs(
"TRANSLATED_BLAT", [$gdb1, $gdb2]);
=head2 Store/Delete data from the database
$mlssa->store($method_link_species_set);
=head1 DESCRIPTION
This object is intended for accessing data in the method_link and method_link_species_set tables.
=head1 INHERITANCE
This class inherits all the methods and attributes from Bio::EnsEMBL::DBSQL::BaseAdaptor
=head1 SEE ALSO
- Bio::EnsEMBL::Registry
- Bio::EnsEMBL::DBSQL::BaseAdaptor
- Bio::EnsEMBL::BaseAdaptor
- Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
- Bio::EnsEMBL::Compara::GenomeDB
- Bio::EnsEMBL::Compara::DBSQL::GenomeDBAdaptor
=head1 APPENDIX
The rest of the documentation details each of the object methods. Internal methods are usually preceded with a _
=cut
package Bio::EnsEMBL::Compara::DBSQL::MethodLinkSpeciesSetAdaptor;
use strict;
use warnings;
use Bio::EnsEMBL::Registry;
use Bio::EnsEMBL::Compara::Method;
use Bio::EnsEMBL::Compara::MethodLinkSpeciesSet;
use Bio::EnsEMBL::Utils::Exception;
use Bio::EnsEMBL::Utils::Scalar qw(:assert);
use base ('Bio::EnsEMBL::Compara::DBSQL::BaseReleaseHistoryAdaptor', 'Bio::EnsEMBL::Compara::DBSQL::TagAdaptor');
###########################
# Automatic URL expansion #
###########################
=head2 base_dir_location
Example : $mlss_adaptor->base_dir_location();
Description : Getter/setter for the default location of the file URLs.
This is used to resolve URLs of the form #base_dir#/XXX/YYY
Returntype : String
Exceptions : none
=cut
sub base_dir_location {
my $self = shift;
if (@_) {
$self->{'_base_dir_location'} = shift;
} elsif (!defined $self->{'_base_dir_location'}) {
$self->base_dir_location($self->_detect_location_on_platform);
}
return $self->{'_base_dir_location'};
}
=head2 _detect_location_on_platform
Example : $mlss_adaptor->_detect_location_on_platform();
Description : Replaces #base_dir# stubs with the most appropriate path for each platform.
Currently understand Web (via SiteDefs) and user-defined path ($COMPARA_HAL_DIR)
Returntype : none
Exceptions : none
=cut
sub _detect_location_on_platform {
my ($self) = @_;
my $data_dir;
if ( defined $ENV{COMPARA_HAL_DIR} ) {
$data_dir = $ENV{COMPARA_HAL_DIR};
die ( "$data_dir (defined in \$COMPARA_HAL_DIR) does not exist" ) unless ( -e $data_dir );
} else {
die "Cannot establish a default location for files\n";
}
return $data_dir;
}
#############################################################
# Implements Bio::EnsEMBL::Compara::RunnableDB::ObjectStore #
#############################################################
sub object_class {
return 'Bio::EnsEMBL::Compara::MethodLinkSpeciesSet';
}
##################
# store* methods #
##################
=head2 store
Arg 1 : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Example : $mlssa->store($method_link_species_set)
Description: Stores a Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object into
the database if it does not exist yet. It also stores or updates
accordingly the meta table if this object has a
max_alignment_length attribute.
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exception : Thrown if the argument is not a
Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exception : Thrown if the corresponding method_link is not in the
database
Caller :
=cut
sub store {
my ($self, $mlss, $store_components_first) = @_;
assert_ref($mlss, 'Bio::EnsEMBL::Compara::MethodLinkSpeciesSet', 'mlss');
#FIXME: $store_components_first should be used for the method as well
my $method = $mlss->method() or die "No Method defined, cannot store\n";
$self->db->get_MethodAdaptor->store( $method ); # will only store if the object needs storing (type is missing) and reload the dbID otherwise
# die "Cannot store LASTZ_NET MethodLinkSpeciesSet without a reference species" if ( $method->name eq 'LASTZ_NET' && !defined $mlss->{_reference_species} );
my $species_set = $mlss->species_set() or die "No SpeciesSet defined, cannot store\n";
$self->db->get_SpeciesSetAdaptor->store( $species_set, $store_components_first );
my $dbID;
if(my $already_stored_method_link_species_set = $self->fetch_by_method_link_id_species_set_id($method->dbID, $species_set->dbID) ) {
$dbID = $already_stored_method_link_species_set->dbID;
}
if (!$dbID) {
my $columns = '(method_link_species_set_id, method_link_id, species_set_id, name, source, url, first_release, last_release)';
my $mlss_placeholders = '?, ?, ?, ?, ?, ?, ?';
my @mlss_data = ($method->dbID, $species_set->dbID, $mlss->name || '', $mlss->source || '', $mlss->get_original_url || '', $mlss->first_release, $mlss->last_release);
$dbID = $mlss->dbID();
if (!$dbID) {
## Use conversion rule for getting a new dbID. At the moment, we use the following ranges:
##
## dna-dna alignments: method_link_id E [1-100], method_link_species_set_id E [1-10000]
## synteny: method_link_id E [101-100], method_link_species_set_id E [10001-20000]
## homology: method_link_id E [201-300], method_link_species_set_id E [20001-30000]
## families: method_link_id E [301-400], method_link_species_set_id E [30001-40000]
##
## => the method_link_species_set_id must be between 10000 times the hundreds in the
## method_link_id and the next hundred.
my $mlss_id_factor = int($method->dbID / 100);
my $min_mlss_id = 10000 * $mlss_id_factor + 1;
my $max_mlss_id = 10000 * ($mlss_id_factor + 1);
my $val = $self->dbc->sql_helper->transaction(
-RETRY => 3,
-CALLBACK => sub {
my $sth2 = $self->prepare("INSERT INTO method_link_species_set $columns SELECT
IF(
MAX(method_link_species_set_id) = $max_mlss_id,
NULL,
IFNULL(
MAX(method_link_species_set_id) + 1,
$min_mlss_id
)
), $mlss_placeholders
FROM method_link_species_set
WHERE method_link_species_set_id BETWEEN $min_mlss_id AND $max_mlss_id
");
my $r = $sth2->execute(@mlss_data);
$dbID = $self->dbc->db_handle->last_insert_id(undef, undef, 'method_link_species_set', 'method_link_species_set_id');
$sth2->finish();
return $r;
}
);
} else {
my $method_link_species_set_sql = qq{INSERT INTO method_link_species_set $columns VALUES (?, $mlss_placeholders)};
my $sth3 = $self->prepare($method_link_species_set_sql);
$sth3->execute($dbID, @mlss_data);
$sth3->finish();
}
$self->_id_cache->put($dbID, $mlss);
}
$self->attach( $mlss, $dbID);
$self->sync_tags_to_database( $mlss );
return $mlss;
}
=head2 register_url
Arg 1 : integer $method_link_species_set_id
Arg 2 : string $database url
Example : $mlssa->register_url(mlss_id, url)
Description: store a Bio::EnsEMBL::Compara::MethodLinkSpeciesSet production pipeline url in the master database the database.
Returntype : none
Exception :
Caller :
=cut
sub register_url {
my ($self, $url, $mlss_id) = @_;
throw("mlss dbID is required") unless ($mlss_id);
unless (defined $url) {
warn("mlss needs valid production pipeline url to store");
return $self;
}
my $sql = 'UPDATE method_link_species_set SET url = ? WHERE method_link_species_set_id = ?';
my $sth = $self->prepare($sql);
$sth->execute($url, $mlss_id);
$sth->finish();
return $self;
}
=head2 delete
Arg 1 : integer $method_link_species_set_id
Example : $mlssa->delete(23)
Description: Deletes a Bio::EnsEMBL::Compara::MethodLinkSpeciesSet entry from
the database.
Returntype : none
Exception :
Caller :
=cut
sub delete {
my ($self, $method_link_species_set_id) = @_;
my $method_link_species_set_sql = 'DELETE mlsst, mlss FROM method_link_species_set mlss LEFT JOIN method_link_species_set_tag mlsst USING (method_link_species_set_id) WHERE method_link_species_set_id = ?';
my $sth = $self->prepare($method_link_species_set_sql);
$sth->execute($method_link_species_set_id);
$sth->finish();
$self->_id_cache->remove($method_link_species_set_id);
}
########################################################
# Implements Bio::EnsEMBL::Compara::DBSQL::BaseAdaptor #
########################################################
sub _objs_from_sth {
my ($self, $sth) = @_;
my $method_hash = $self->db->get_MethodAdaptor()->_id_cache;
my $species_set_hash = $self->db->get_SpeciesSetAdaptor()->_id_cache;
my $mlsss = $self->generic_objs_from_sth($sth, 'Bio::EnsEMBL::Compara::MethodLinkSpeciesSet', [
'dbID',
undef, # method_link_id itself is not put in the object, but instead the object will have "method" (see below)
undef, # species_set_id itself is not put in the object, but instead the object will have "species_set" (see below)
'name',
'source',
'url',
'_first_release',
'_last_release',
], sub {
my $a = shift;
my $method = $method_hash->get($a->[1]);
my $species_set = $species_set_hash->get($a->[2]);
if (!$method) {
warning("MethodLinkSpeciesSet with dbID=$a->[0] is missing method_link entry with dbID=$a->[1], so it will not be fetched");
}
if (!$species_set) {
warning("MethodLinkSpeciesSet with dbID=$a->[0] is missing species_set(_header) entry with dbID=$a->[2], so it will not be fetched");
}
return {
method => $method,
species_set => $species_set,
}
});
return [grep {$_->{method} && $_->{species_set}} @$mlsss];
}
sub _tables {
return (['method_link_species_set', 'm'])
}
sub _columns {
return qw(
m.method_link_species_set_id
m.method_link_id
m.species_set_id
m.name
m.source
m.url
m.first_release
m.last_release
)
}
sub _unique_attributes {
return qw(
method_link_id
species_set_id
)
}
###################
# fetch_* methods #
###################
=head2 fetch_all_by_species_set_id
Arg 1 : int $species_set_id
Example : my $method_link_species_set =
$mlss_adaptor->fetch_all_by_species_set_id($ss->dbID)
Description : Retrieve the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
corresponding to the given species_set_id
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
Exceptions : none
=cut
sub fetch_all_by_species_set_id {
my ($self, $species_set_id) = @_;
return $self->_id_cache->get_all_by_additional_lookup('species_set_id', $species_set_id);
}
=head2 fetch_by_method_link_id_species_set_id
Arg 1 : int $method_link_id
Arg 2 : int $species_set_id
Example : my $method_link_species_set =
$mlssa->fetch_by_method_link_id_species_set_id(1, 1234)
Description: Retrieve the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
corresponding to the given method_link_id and species_set_id
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exceptions : Returns undef if no Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
object is found
Caller :
=cut
sub fetch_by_method_link_id_species_set_id {
my ($self, $method_link_id, $species_set_id) = @_;
return $self->_id_cache->get_by_additional_lookup('method_species_set', sprintf('%d_%d', $method_link_id, $species_set_id));
}
=head2 fetch_all_by_method_link_type
Arg 1 : string method_link_type
Example : my $method_link_species_sets =
$mlssa->fetch_all_by_method_link_type("LASTZ_NET")
Description: Retrieve all the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
corresponding to the given method_link_type
Returntype : listref of Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
Exceptions : none
Caller :
=cut
sub fetch_all_by_method_link_type {
my ($self, $method_link_type) = @_;
my $method = $self->db->get_MethodAdaptor->fetch_by_type($method_link_type);
unless ($method) {
my $empty_mlsss = [];
return $empty_mlsss;
}
return $self->_id_cache->get_all_by_additional_lookup('method', $method->dbID);
}
=head2 fetch_all_by_GenomeDB
Arg 1 : Bio::EnsEMBL::Compara::GenomeDB $genome_db
Example : my $method_link_species_sets = $mlssa->fetch_all_by_genome_db($genome_db)
Description: Retrieve all the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
which includes the genome defined by the Bio::EnsEMBL::Compara::GenomeDB
object or the genome_db_id in the species_set
Returntype : listref of Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
Exceptions : wrong argument throws
Caller :
=cut
sub fetch_all_by_GenomeDB {
my ($self, $genome_db) = @_;
assert_ref($genome_db, 'Bio::EnsEMBL::Compara::GenomeDB', 'genome_db');
my $genome_db_id = $genome_db->dbID
or throw "[$genome_db] must have a dbID";
return $self->_id_cache->get_all_by_additional_lookup(sprintf('genome_db_%d', $genome_db_id), 1);
}
=head2 fetch_all_by_method_link_type_GenomeDB
Arg 1 : string method_link_type
Arg 2 : Bio::EnsEMBL::Compara::GenomeDB $genome_db
Example : my $method_link_species_sets =
$mlssa->fetch_all_by_method_link_type_GenomeDB("LASTZ_NET", $rat_genome_db)
Description: Retrieve all the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
corresponding to the given method_link_type and which include the
given Bio::EnsEBML::Compara::GenomeDB
Returntype : listref of Bio::EnsEMBL::Compara::MethodLinkSpeciesSet objects
Exceptions : none
Caller :
=cut
sub fetch_all_by_method_link_type_GenomeDB {
my ($self, $method_link_type, $genome_db) = @_;
assert_ref($genome_db, 'Bio::EnsEMBL::Compara::GenomeDB', 'genome_db');
my $genome_db_id = $genome_db->dbID;
throw "[$genome_db] must have a dbID" if (!$genome_db_id);
return $self->_id_cache->get_all_by_additional_lookup(sprintf('genome_db_%d_method_%s', $genome_db_id, uc $method_link_type), 1);
}
=head2 fetch_by_method_link_type_GenomeDBs
Arg 1 : string $method_link_type
Arg 2 : listref of Bio::EnsEMBL::Compara::GenomeDB objects
Example : my $method_link_species_set =
$mlssa->fetch_by_method_link_type_GenomeDBs('ENSEMBL_ORTHOLOGUES',
[$human_genome_db,
$mouse_genome_db])
Description: Retrieve the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
corresponding to the given method_link and the given set of
Bio::EnsEMBL::Compara::GenomeDB objects.
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exceptions : Returns undef if no Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
object is found
Caller :
=cut
sub fetch_by_method_link_type_GenomeDBs {
my ($self, $method_link_type, $genome_dbs, $undef_on_missing_methods) = @_;
my $method = $self->db->get_MethodAdaptor->fetch_by_type($method_link_type);
if (not defined $method) {
# Do not complain if asked not to
return undef if $undef_on_missing_methods;
die "Could not fetch Method with type='$method_link_type'";
}
my $method_link_id = $method->dbID;
my $species_set = $self->db->get_SpeciesSetAdaptor->fetch_by_GenomeDBs( $genome_dbs );
unless ($species_set) {
return undef;
}
return $self->fetch_by_method_link_id_species_set_id($method_link_id, $species_set->dbID);
}
=head2 fetch_by_method_link_type_genome_db_ids
Arg 1 : string $method_link_type
Arg 2 : listref of int (dbIDs of GenomeDBs)
Example : my $method_link_species_set =
$mlssa->fetch_by_method_link_type_genome_db_ids('ENSEMBL_ORTHOLOGUES',
[$human_genome_db->dbID,
$mouse_genome_db->dbID])
Description: Retrieve the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
corresponding to the given method_link and the given set of
Bio::EnsEMBL::Compara::GenomeDB objects defined by the set of
$genome_db_ids
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exceptions : Returns undef if no Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
object is found
Caller :
=cut
sub fetch_by_method_link_type_genome_db_ids {
my ($self, $method_link_type, $genome_db_ids) = @_;
my $method = $self->db->get_MethodAdaptor->fetch_by_type($method_link_type);
if (not defined $method) {
# Do not complain if ENSEMBL_HOMOEOLOGUES does not exist
return undef if $method_link_type eq 'ENSEMBL_HOMOEOLOGUES';
die "Could not fetch Method with type='$method_link_type'";
}
my $method_link_id = $method->dbID;
my $species_set = $self->db->get_SpeciesSetAdaptor->fetch_by_GenomeDBs( $genome_db_ids );
return undef unless $species_set;
return $self->fetch_by_method_link_id_species_set_id($method_link_id, $species_set->dbID);
}
=head2 fetch_by_method_link_type_registry_aliases
Arg 1 : string $method_link_type
Arg 2 : listref of core database aliases
Example : my $method_link_species_set =
$mlssa->fetch_by_method_link_type_registry_aliases("ENSEMBL_ORTHOLOGUES",
["human","mouse"])
Description: Retrieve the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
corresponding to the given method_link and the given set of
core database aliases defined in the Bio::EnsEMBL::Registry
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exceptions : Returns undef if no Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
object is found
Caller :
=cut
sub fetch_by_method_link_type_registry_aliases {
my ($self,$method_link_type, $registry_aliases) = @_;
my $gdba = $self->db->get_GenomeDBAdaptor;
my @genome_dbs;
foreach my $alias (@{$registry_aliases}) {
if (my $production_name = Bio::EnsEMBL::Registry->get_alias($alias)) {
my $gdb = $gdba->fetch_by_name_assembly($production_name);
if (!$gdb) {
my $meta_c = Bio::EnsEMBL::Registry->get_adaptor($alias, 'core', 'MetaContainer');
$gdb = $gdba->fetch_by_name_assembly($meta_c->get_production_name());
};
push @genome_dbs, $gdb;
} else {
throw("Database alias $alias is not known\n");
}
}
return $self->fetch_by_method_link_type_GenomeDBs($method_link_type,\@genome_dbs);
}
=head2 fetch_by_method_link_type_species_set_name
Arg 1 : string method_link_type
Arg 2 : string species_set_name
Example : my $method_link_species_set =
$mlssa->fetch_by_method_link_type_species_set_name("EPO", "mammals")
Description: Retrieve the Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
corresponding to the given method_link_type and and species_set_name value
Returntype : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet object
Exceptions : Returns undef if no Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
object is found
Caller :
=cut
sub fetch_by_method_link_type_species_set_name {
my ($self, $method_link_type, $species_set_name) = @_;
my $species_set_adaptor = $self->db->get_SpeciesSetAdaptor;
my $alt_ss_name = $species_set_name =~ /^collection-/ ? substr($species_set_name, 11) : "collection-" . $species_set_name;
my $all_species_sets = $species_set_adaptor->fetch_all_by_name($species_set_name);
my $alt_species_sets = $species_set_adaptor->fetch_all_by_name($alt_ss_name);
push @$all_species_sets, @$alt_species_sets;
my $method = $self->db->get_MethodAdaptor->fetch_by_type($method_link_type);
if ($method) {
foreach my $this_species_set (@$all_species_sets) {
my $mlss = $self->fetch_by_method_link_id_species_set_id($method->dbID, $this_species_set->dbID);
return $mlss if $mlss;
}
}
return undef;
}
######################################################################
# Implements Bio::EnsEMBL::Compara::DBSQL::BaseReleaseHistoryAdaptor #
######################################################################
=head2 make_object_current
Arg[1] : Bio::EnsEMBL::Compara::MethodLinkSpeciesSet
Example : $mlss_adaptor->make_object_current($mlss);
Description : Mark the MethodLinkSpeciesSet as current, i.e. with a defined first_release and an undefined last_release
Also mark all the contained SpeciesSets as current
Returntype : none
Exceptions : none
Caller : general
Status : Stable
=cut
sub make_object_current {
my ($self, $mlss) = @_;
# Update the fields in the table
$self->SUPER::make_object_current($mlss);
# Also update the linked SpeciesSet
$self->db->get_SpeciesSetAdaptor->make_object_current($mlss->species_set);
# In a release database, the pair (method, species-set name) should be
# unique. As this object is made current, others may have to be retired
my @mlsss_retired;
# It can only happen for multiple sets
if ($mlss->species_set->size >= 3) {
my $other_mlsss = $self->_id_cache->get_all_by_additional_lookup('method', $mlss->method->dbID);
foreach my $other_mlss (@$other_mlsss) {
if ($other_mlss->is_current and ($other_mlss->species_set->name eq $mlss->species_set->name) and ($other_mlss->dbID != $mlss->dbID)) {
$self->retire_object($other_mlss);
push @mlsss_retired, $other_mlss
}
}
}
return \@mlsss_retired;
}
###################################
#
# tagging
#
###################################
sub _tag_capabilities {
return ('method_link_species_set_tag', 'method_link_species_set_attr', 'method_link_species_set_id', 'dbID', 'tag', 'value');
}
############################################################
# Implements Bio::EnsEMBL::Compara::DBSQL::BaseFullAdaptor #
############################################################
sub _build_id_cache {
my $self = shift;
return Bio::EnsEMBL::DBSQL::Cache::MethodLinkSpeciesSet->new($self);
}
package Bio::EnsEMBL::DBSQL::Cache::MethodLinkSpeciesSet;
use base qw/Bio::EnsEMBL::DBSQL::Support::FullIdCache/;
use strict;
use warnings;
sub support_additional_lookups {
return 1;
}
sub compute_keys {
my ($self, $mlss) = @_;
return {
species_set_id => $mlss->species_set->dbID,
method => sprintf('%d', $mlss->method->dbID),
method_species_set => sprintf('%d_%d', $mlss->method->dbID, $mlss->species_set->dbID),
(map {sprintf('genome_db_%d', $_->dbID) => 1} @{$mlss->species_set->genome_dbs()}),
(map {sprintf('genome_db_%d_method_%s', $_->dbID, uc $mlss->method->type) => 1} @{$mlss->species_set->genome_dbs()}),
}
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/DBSQL/MethodLinkSpeciesSetAdaptor.pm | Perl | apache-2.0 | 25,525 |
package Moose::Exception::NoBodyToInitializeInAnAbstractBaseClass;
our $VERSION = '2.1404';
use Moose;
extends 'Moose::Exception';
has 'package_name' => (
is => 'ro',
isa => 'Str',
required => 1
);
sub _build_message {
my $self = shift;
"No body to initialize, " .$self->package_name. " is an abstract base class";
}
1;
| ray66rus/vndrv | local/lib/perl5/x86_64-linux-thread-multi/Moose/Exception/NoBodyToInitializeInAnAbstractBaseClass.pm | Perl | apache-2.0 | 355 |
#!/usr/bin/perl
# ****************************************************************************
# Script to generate the JIRA gadget XML files for TestRail activity summary
# for a specific test plan
# ****************************************************************************
use Config::Properties;
use MIME::Base64;
use JSON;
use REST::Client;
use HTML::Entities;
# ****************************************************************************
# Main
# ****************************************************************************
open my $PROPERTIES, '<', "./generate-gadget-xml.properties" or die "Unable to open configuration file: $!";
$properties = Config::Properties->new();
$properties->load($PROPERTIES);
close ($PROPERTIES);
my $gadgetDir = $properties->getProperty("gadgetDir");
# Set the URL, credentials, and headers for the REST calls
my $url = $properties->getProperty("url");
my $user = $properties->getProperty("username");
my $pass = $properties->getProperty("password");
my $headers = {
Authorization => 'Basic '. encode_base64($user . ':' . $pass),
'Content-type' => 'application/json'
};
my $rest = REST::Client->new({host => "$url"});
my $templateXML, $projectPlanListXML = "";
my $defaultList = "0|0";
my $dataType = "enum";
my @projectPlanList;
my $arr_index = 0;
open $INXML, '<', "./template-activity-summary.xml" or die "Cannot open: $!";
$templateXML = join('',<$INXML>);
close($INXML);
$rest->GET("/index.php?/api/v2/get_projects", $headers );
my $project_data = decode_json( $rest->responseContent() );
if ($rest->responseCode() != 200) {
printf("\nAPI call returned %s\n Error message: %s\n", $rest->responseCode(), $project_data->{error});
exit(1);
}
# Loop through all of the projects
for my $project_node ( @$project_data ) {
# Ignore completed projects
if ($project_node->{'is_completed'} == 0) {
$rest->GET("/index.php?/api/v2/get_plans/" . $project_node->{'id'}, $headers );
my $plan_data = decode_json( $rest->responseContent() );
if ($rest->responseCode() != 200) {
printf("\nAPI call returned %s\n Error message: %s\n", $rest->responseCode(), $plan_data->{error});
exit(1);
}
# Loop through all of the plans
for my $plan_node ( @$plan_data ) {
# Ignore completed plans
if ($plan_node->{'is_completed'} == 0) {
$projectPlanList[$arr_index][0] = $project_node->{'id'};
$projectPlanList[$arr_index][1] = encode_entities($project_node->{'name'});
$projectPlanList[$arr_index][2] = $plan_node->{'id'};
$projectPlanList[$arr_index][3] = encode_entities($plan_node->{'name'});
$arr_index++;
}
}
}
}
# Sort by project name, plan name
@projectPlanList = sort { lc($a->[1]) cmp lc($b->[1])||lc($a->[3]) cmp lc($b->[3]) } (@projectPlanList);
# Loop through all of the projects and plans to generate the UserPref XML
for (my $i = 0; $i < $arr_index; $i++) {
$projectPlanListXML .= "\n <EnumValue value=\"$projectPlanList[$i][0]|$projectPlanList[$i][2]\" display_value=\"($projectPlanList[$i][1]) $projectPlanList[$i][3]\"/>";
if ($defaultList eq "0|0") {
$defaultList = "$projectPlanList[$i][0]|$projectPlanList[$i][2]";
}
}
if ($defaultList eq "0|0") {
$dataType = "hidden";
}
$templateXML =~ s/<%DEFAULTLIST%>/$defaultList/g;
$templateXML =~ s/<%PROJECTPLANLIST%>/$projectPlanListXML/g;
$templateXML =~ s/<%DATATYPE%>/$dataType/g;
open $OUTXML, '>', "$gadgetDir/testrail-activity-summary.xml" or die "Cannot open: $!";
print($OUTXML "$templateXML");
close($OUTXML);
| zenoss/testrail-jira-gadgets | scripts/generate-activity-gadget-xml.pl | Perl | apache-2.0 | 3,609 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::linux::local::mode::cmdreturn;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::misc;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"hostname:s" => { name => 'hostname' },
"remote" => { name => 'remote' },
"ssh-option:s@" => { name => 'ssh_option' },
"ssh-path:s" => { name => 'ssh_path' },
"ssh-command:s" => { name => 'ssh_command', default => 'ssh' },
"timeout:s" => { name => 'timeout', default => 30 },
"sudo" => { name => 'sudo' },
"command:s" => { name => 'command' },
"command-path:s" => { name => 'command_path' },
"command-options:s" => { name => 'command_options' },
"manage-returns:s" => { name => 'manage_returns', default => '' },
});
$self->{manage_returns} = {};
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (!defined($self->{option_results}->{command})) {
$self->{output}->add_option_msg(short_msg => "Need to specify command option.");
$self->{output}->option_exit();
}
foreach my $entry (split(/#/, $self->{option_results}->{manage_returns})) {
next if (!($entry =~ /(.*?),(.*?),(.*)/));
next if (!$self->{output}->is_litteral_status(status => $2));
if ($1 ne '') {
$self->{manage_returns}->{$1} = {return => $2, msg => $3};
} else {
$self->{manage_returns}->{default} = {return => $2, msg => $3};
}
}
if ($self->{option_results}->{manage_returns} eq '' || scalar(keys %{$self->{manage_returns}}) == 0) {
$self->{output}->add_option_msg(short_msg => "Need to specify manage-returns option correctly.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
my ($stdout, $exit_code) = centreon::plugins::misc::execute(output => $self->{output},
options => $self->{option_results},
sudo => $self->{option_results}->{sudo},
command => $self->{option_results}->{command},
command_path => $self->{option_results}->{command_path},
command_options => $self->{option_results}->{command_options},
no_quit => 1);
my $long_msg = $stdout;
$long_msg =~ s/\|/~/mg;
$self->{output}->output_add(long_msg => $long_msg);
if (defined($self->{manage_returns}->{$exit_code})) {
$self->{output}->output_add(severity => $self->{manage_returns}->{$exit_code}->{return},
short_msg => $self->{manage_returns}->{$exit_code}->{msg});
} elsif (defined($self->{manage_returns}->{default})) {
$self->{output}->output_add(severity => $self->{manage_returns}->{default}->{return},
short_msg => $self->{manage_returns}->{default}->{msg});
} else {
$self->{output}->output_add(severity => 'UNKNWON',
short_msg => 'Exit code from command');
}
if (defined($exit_code)) {
$self->{output}->perfdata_add(label => "code",
value => $exit_code);
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check command returns.
=over 8
=item B<--manage-returns>
Set action according command exit code.
Example: 0,OK,File xxx exist#1,CRITICAL,File xxx not exist#,UNKNOWN,Command problem
=item B<--remote>
Execute command remotely in 'ssh'.
=item B<--hostname>
Hostname to query (need --remote).
=item B<--ssh-option>
Specify multiple options like the user (example: --ssh-option='-l=centreon-engine' --ssh-option='-p=52').
=item B<--ssh-path>
Specify ssh command path (default: none)
=item B<--ssh-command>
Specify ssh command (default: 'ssh'). Useful to use 'plink'.
=item B<--timeout>
Timeout in seconds for the command (Default: 30).
=item B<--sudo>
Use 'sudo' to execute the command.
=item B<--command>
Command to test (Default: none).
You can use 'sh' to use '&&' or '||'.
=item B<--command-path>
Command path (Default: none).
=item B<--command-options>
Command options (Default: none).
=back
=cut
| wilfriedcomte/centreon-plugins | os/linux/local/mode/cmdreturn.pm | Perl | apache-2.0 | 5,868 |
package API::Profile;
#
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# JvD Note: you always want to put Utils as the first use. Sh*t don't work if it's after the Mojo lines.
use UI::Utils;
use Mojo::Base 'Mojolicious::Controller';
use Data::Dumper;
use JSON;
sub index {
my $self = shift;
my @data;
my $orderby = $self->param('orderby') || "me.name";
my $rs_data
= $self->db->resultset("Profile")
->search( undef,
{ order_by => $orderby } );
while ( my $row = $rs_data->next ) {
push(
@data,
{ "id" => $row->id,
"name" => $row->name,
"description" => $row->description,
"lastUpdated" => $row->last_updated,
}
);
}
$self->success( \@data );
}
sub index_trimmed {
my $self = shift;
my @data;
my $orderby = $self->param('orderby') || "name";
my $rs_data = $self->db->resultset("Profile")->search( undef, { order_by => $orderby } );
while ( my $row = $rs_data->next ) {
push(
@data, {
"name" => $row->name,
}
);
}
$self->render( json => \@data );
}
sub create {
my $self = shift;
my $params = $self->req->json;
if ( !defined($params) ) {
return $self->alert("parameters must be in JSON format, please check!");
}
if ( !&is_oper($self) ) {
return $self->alert( { Error => " - You must be an admin or oper to perform this operation!" } );
}
my $name = $params->{name};
if ( !defined($name) ) {
return $self->alert("profile 'name' is not given.");
}
if ( $name eq "" ) {
return $self->alert("profile 'name' can't be null.");
}
my $description = $params->{description};
if ( !defined($description) ) {
return $self->alert("profile 'description' is not given.");
}
if ( $description eq "" ) {
return $self->alert("profile 'description' can't be null.");
}
my $existing_profile = $self->db->resultset('Profile')->search( { name => $name } )->get_column('name')->single();
if ( $existing_profile && $name eq $existing_profile ) {
return $self->alert("profile with name $name already exists.");
}
my $insert = $self->db->resultset('Profile')->create(
{
name => $name,
description => $description,
}
);
$insert->insert();
my $new_id = $insert->id;
my $response;
$response->{id} = $new_id;
$response->{name} = $name;
$response->{description} = $description;
return $self->success($response);
}
sub copy {
my $self = shift;
if ( !&is_oper($self) ) {
return $self->alert( { Error => " - You must be an admin or oper to perform this operation!" } );
}
my $name = $self->param('profile_name');
my $profile_copy_from_name = $self->param('profile_copy_from');
if ( !defined($name) ) {
return $self->alert("profile 'name' is not given.");
}
if ( $name eq "" ) {
return $self->alert("profile 'name' can't be null.");
}
if ( defined($profile_copy_from_name) and ( $profile_copy_from_name eq "" ) ) {
return $self->alert("profile name 'profile_copy_from' can't be null.");
}
my $existing_profile = $self->db->resultset('Profile')->search( { name => $name } )->get_column('name')->single();
if ( $existing_profile && $name eq $existing_profile ) {
return $self->alert("profile with name $name already exists.");
}
my $rs = $self->db->resultset('Profile')->search( { name => $profile_copy_from_name } );
my $row1 = $rs->next;
if ( !$row1 ) {
return $self->alert("profile_copy_from $profile_copy_from_name doesn't exist.");
}
my $profile_copy_from_id = $row1->id;
my $description = $row1->description;
my $insert = $self->db->resultset('Profile')->create(
{
name => $name,
description => $description,
}
);
$insert->insert();
my $new_id = $insert->id;
if ( defined($profile_copy_from_name) ) {
my $rs_param =
$self->db->resultset('ProfileParameter')->search( { profile => $profile_copy_from_id }, { prefetch => [ { profile => undef }, { parameter => undef } ] } );
while ( my $row = $rs_param->next ) {
my $insert = $self->db->resultset('ProfileParameter')->create(
{
profile => $new_id,
parameter => $row->parameter->id,
}
);
$insert->insert();
}
}
my $response;
$response->{id} = $new_id;
$response->{name} = $name;
$response->{description} = $description;
$response->{profile_copy_from} = $profile_copy_from_name;
$response->{id_copy_from} = $profile_copy_from_id;
return $self->success($response);
}
sub availableprofile {
my $self = shift;
my @data;
my $paramid = $self->param('paramid');
my %dsids;
my %in_use;
# Get a list of all profile id's associated with this param id
my $rs_in_use = $self->db->resultset("ProfileParameter")->search( { 'parameter' => $paramid } );
while ( my $row = $rs_in_use->next ) {
$in_use{ $row->profile->id } = undef;
}
# Add remaining profile ids to @data
my $rs_links = $self->db->resultset("Profile")->search( undef, { order_by => "description" } );
while ( my $row = $rs_links->next ) {
if ( !exists( $in_use{ $row->id } ) ) {
push( @data, { "id" => $row->id, "description" => $row->description } );
}
}
$self->success( \@data );
}
1;
| PSUdaemon/traffic_control | traffic_ops/app/lib/API/Profile.pm | Perl | apache-2.0 | 5,906 |
package Paws::IoT::AttachPrincipalPolicy;
use Moose;
has PolicyName => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'policyName', required => 1);
has Principal => (is => 'ro', isa => 'Str', traits => ['ParamInHeader'], header_name => 'principal', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'AttachPrincipalPolicy');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/principal-policies/{policyName}');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'PUT');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::API::Response');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::IoT::AttachPrincipalPolicy - Arguments for method AttachPrincipalPolicy on Paws::IoT
=head1 DESCRIPTION
This class represents the parameters used for calling the method AttachPrincipalPolicy on the
AWS IoT service. Use the attributes of this class
as arguments to method AttachPrincipalPolicy.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to AttachPrincipalPolicy.
As an example:
$service_obj->AttachPrincipalPolicy(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> PolicyName => Str
The policy name.
=head2 B<REQUIRED> Principal => Str
The principal, which can be a certificate ARN (as returned from the
CreateCertificate operation) or an Amazon Cognito ID.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method AttachPrincipalPolicy in L<Paws::IoT>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/IoT/AttachPrincipalPolicy.pm | Perl | apache-2.0 | 2,104 |
#
# Copyright 2015 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::lmsensors::mode::voltage;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
my $oid_SensorDesc = '.1.3.6.1.4.1.2021.13.16.4.1.2'; # voltage entry description
my $oid_SensorValue = '.1.3.6.1.4.1.2021.13.16.4.1.3'; # voltage entry value (mV)
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning' },
"critical:s" => { name => 'critical' },
"name" => { name => 'use_name' },
"sensor:s" => { name => 'sensor' },
"regexp" => { name => 'use_regexp' },
"regexp-isensitive" => { name => 'use_regexpi' },
});
$self->{Sensor_id_selected} = [];
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
# $options{snmp} = snmp object
$self->{snmp} = $options{snmp};
$self->{hostname} = $self->{snmp}->get_hostname();
$self->{snmp_port} = $self->{snmp}->get_port();
$self->manage_selection();
$self->{snmp}->load(oids => [$oid_SensorDesc, $oid_SensorValue], instances => $self->{Sensor_id_selected});
my $SensorValueResult = $self->{snmp}->get_leef(nothing_quit => 1);
if (!defined($self->{option_results}->{sensor}) || defined($self->{option_results}->{use_regexp})) {
$self->{output}->output_add(severity => 'OK',
short_msg => 'All Voltages are ok.');
}
foreach my $SensorId (sort @{$self->{Sensor_id_selected}}) {
my $SensorDesc = $SensorValueResult->{$oid_SensorDesc . '.' . $SensorId};
my $SensorValue = $SensorValueResult->{$oid_SensorValue . '.' . $SensorId} / 1000;
my $exit = $self->{perfdata}->threshold_check(value => $SensorValue, threshold => [ { label => 'critical', 'exit_litteral' => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
$self->{output}->output_add(long_msg => sprintf("Sensor '%s' Volt: %s",
$SensorDesc, $SensorValue));
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1) || (defined($self->{option_results}->{sensor}) && !defined($self->{option_results}->{use_regexp}))) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Sensor '%s' Volt: %s",
$SensorDesc, $SensorValue));
}
my $label = 'sensor_voltage';
my $extra_label = '';
$extra_label = '_' . $SensorId . "_" . $SensorDesc if (!defined($self->{option_results}->{sensor}) || defined($self->{option_results}->{use_regexp}));
$self->{output}->perfdata_add(label => $label . $extra_label, unit => 'V',
value => $SensorValue,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'));
}
$self->{output}->display();
$self->{output}->exit();
}
sub manage_selection {
my ($self, %options) = @_;
my $result = $self->{snmp}->get_table(oid => $oid_SensorDesc, nothing_quit => 1);
foreach my $key ($self->{snmp}->oid_lex_sort(keys %$result)) {
next if ($key !~ /\.([0-9]+)$/);
my $SensorId = $1;
my $SensorDesc = $result->{$key};
next if (defined($self->{option_results}->{sensor}) && !defined($self->{option_results}->{use_name}) && !defined($self->{option_results}->{use_regexp}) && !defined($self->{option_results}->{use_regexpi})
&& $SensorId !~ /$self->{option_results}->{sensor}/i);
next if (defined($self->{option_results}->{use_name}) && defined($self->{option_results}->{use_regexp}) && defined($self->{option_results}->{use_regexpi})
&& $SensorDesc !~ /$self->{option_results}->{sensor}/i);
next if (defined($self->{option_results}->{use_name}) && defined($self->{option_results}->{use_regexp}) && !defined($self->{option_results}->{use_regexpi})
&& $SensorDesc !~ /$self->{option_results}->{sensor}/);
next if (defined($self->{option_results}->{use_name}) && !defined($self->{option_results}->{use_regexp}) && !defined($self->{option_results}->{use_regexpi})
&& $SensorDesc ne $self->{option_results}->{sensor});
push @{$self->{Sensor_id_selected}}, $SensorId;
}
if (scalar(@{$self->{Sensor_id_selected}}) <= 0) {
if (defined($self->{option_results}->{sensor})) {
$self->{output}->add_option_msg(short_msg => "No Sensors found for '" . $self->{option_results}->{sensor} . "'.");
} else {
$self->{output}->add_option_msg(short_msg => "No Sensors found.");
};
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check LM-Sensors: Voltage Sensors
=over 8
=item B<--warning>
Threshold warning (Volt)
=item B<--critical>
Threshold critical (Volt)
=item B<--sensor>
Set the Sensor Desc (number expected) ex: 1, 2,... (empty means 'check all sensors').
=item B<--name>
Allows to use Sensor Desc name with option --sensor instead of Sensor Desc oid index.
=item B<--regexp>
Allows to use regexp to filter sensordesc (with option --name).
=item B<--regexp-isensitive>
Allows to use regexp non case-sensitive (with --regexp).
=back
=cut
| s-duret/centreon-plugins | apps/lmsensors/mode/voltage.pm | Perl | apache-2.0 | 7,268 |
package VMOMI::CryptoSpecEncrypt;
use parent 'VMOMI::CryptoSpec';
use strict;
use warnings;
our @class_ancestors = (
'CryptoSpec',
'DynamicData',
);
our @class_members = (
['cryptoKeyId', 'CryptoKeyId', 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/CryptoSpecEncrypt.pm | Perl | apache-2.0 | 446 |
package VMOMI::TaskFilterSpecRecursionOption;
use parent 'VMOMI::SimpleType';
use strict;
use warnings;
1;
| stumpr/p5-vmomi | lib/VMOMI/TaskFilterSpecRecursionOption.pm | Perl | apache-2.0 | 109 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
use strict;
use warnings;
use SeqStoreConverter::BasicConverter;
package SeqStoreConverter::CaenorhabditisBriggsae;
use vars qw(@ISA);
@ISA = qw(SeqStoreConverter::BasicConverter);
sub create_coord_systems {
my $self = shift;
$self->debug("CaenorhabditisBriggsae Specific: creating clone, scaffold," .
" and contig coordinate systems");
my $target = $self->target();
my $dbh = $self->dbh();
my $ass_def = $self->get_default_assembly();
my @coords =
(["scaffold" , $ass_def, "default_version", 1 ],
['clone' , undef , 'default_version', 2 ],
["contig" , undef , "default_version,sequence_level", 3]);
my @assembly_mappings = ("scaffold:$ass_def|contig",
"clone|contig",
"scaffold:$ass_def|contig|clone");
$self->debug("Building coord_system table");
my $sth = $dbh->prepare("INSERT INTO $target.coord_system " .
"(name, version, attrib, rank) VALUES (?,?,?,?)");
my %coord_system_ids;
foreach my $cs (@coords) {
$sth->execute(@$cs);
$coord_system_ids{$cs->[0]} = $sth->{'mysql_insertid'};
}
$sth->finish();
$self->debug("Adding assembly.mapping entries to meta table");
$sth = $dbh->prepare("INSERT INTO $target.meta(meta_key, meta_value) " .
"VALUES ('assembly.mapping', ?)");
foreach my $mapping (@assembly_mappings) {
$sth->execute($mapping);
}
$sth->finish();
return;
}
sub create_seq_regions {
my $self = shift;
$self->debug("CaenorhabditisBriggsae Specific: creating contig, " .
"clone, contig and scaffold seq_regions");
$self->contig_to_seq_region();
$self->clone_to_seq_region();
$self->chromosome_to_seq_region('scaffold');
}
sub chromosome_to_seq_region {
my $self = shift;
my $target_cs_name = shift;
my $target = $self->target();
my $source = $self->source();
my $dbh = $self->dbh();
$target_cs_name ||= "chromosome";
my $cs_id = $self->get_coord_system_id($target_cs_name);
$self->debug("CaenorhabditisBriggsae Specific: Transforming " .
"chromosomes into $target_cs_name seq_regions");
## For consistancy with mart and v19 we need to keep chr name the same for
## now, so the following section is commented out and replaced:
##strip off the leading 'cb25.' from the chromosome name
#my $select_sth = $dbh->prepare
# ("SELECT chromosome_id,substring(name,6),length FROM $source.chromosome");
my $select_sth = $dbh->prepare
("SELECT chromosome_id,name,length FROM $source.chromosome");
my $insert_sth = $dbh->prepare
("INSERT INTO $target.seq_region (name, coord_system_id, length) " .
"VALUES (?,?,?)");
my $tmp_insert_sth = $dbh->prepare
("INSERT INTO $target.tmp_chr_map (old_id, new_id) VALUES (?, ?)");
$select_sth->execute();
my ($chrom_id, $name, $length);
$select_sth->bind_columns(\$chrom_id, \$name, \$length);
while ($select_sth->fetch()) {
#insert into seq_region table
$insert_sth->execute($name, $cs_id, $length);
#copy old/new mapping into temporary table
$tmp_insert_sth->execute($chrom_id, $insert_sth->{'mysql_insertid'});
}
$select_sth->finish();
$insert_sth->finish();
$tmp_insert_sth->finish();
return;
}
sub create_assembly {
my $self = shift;
$self->debug("CaenorhabditisBriggsae Specific: loading assembly data");
$self->assembly_contig_chromosome();
$self->assembly_contig_clone();
}
#
# Override the assembly contig clone method because the briggsae database
# does not have any embl_offsets
#
sub assembly_contig_clone {
my $self = shift;
my $target = $self->target();
my $source = $self->source();
my $dbh = $self->dbh();
$self->debug("CaenorhabditisBriggsae Specific: loading contig/clone " .
"assembly relationship");
my $asm_sth = $dbh->prepare
("INSERT INTO $target.assembly " .
"set asm_seq_region_id = ?, ".
" asm_start = ?, " .
" asm_end = ?, " .
" cmp_seq_region_id = ?, ".
" cmp_start = ?, " .
" cmp_end = ?, " .
" ori = ?");
# get a list of the contigs that have clones, their ids, and the
# corresponding clone ids
my $ctg_sth = $dbh->prepare
("SELECT ctg.name, ctg.contig_id, ctg.length, cln.new_id " .
"FROM $source.contig ctg, $target.tmp_cln_map cln " .
"WHERE ctg.name not like 'c%' " . # only contigs w/ proper accessions
"AND ctg.clone_id = cln.old_id");
$ctg_sth->execute();
my ($ctg_name, $ctg_id, $ctg_len, $cln_id);
$ctg_sth->bind_columns(\$ctg_name, \$ctg_id, \$ctg_len, \$cln_id);
while($ctg_sth->fetch()) {
my (undef,$cln_start, $cln_end) = split(/\./, $ctg_name);
my $cln_len = $cln_end - $cln_start + 1;
if($cln_len != $ctg_len) {
die("Contig len $ctg_len != Clone len $cln_len");
}
$asm_sth->execute($cln_id, $cln_start, $cln_end,
$ctg_id, 1, $ctg_len, 1);
}
$ctg_sth->finish();
$asm_sth->finish();
return;
}
#
# Override contig_to_seq_region and clone_to_seq_region to provide
# briggsae specific behaviour
#
# sub contig_to_seq_region {
# my $self = shift;
# my $target_cs_name = shift;
# my $target = $self->target();
# my $source = $self->source();
# my $dbh = $self->dbh();
# $target_cs_name ||= 'contig';
# $self->debug("CaenorhabditisBriggsae Specific: Transforming contigs into " .
# "$target_cs_name seq_regions");
# my $cs_id = $self->get_coord_system_id($target_cs_name);
# #There are two types of contigs in briggsae:
# #
# # cosmids/clones
# #
# my $sth = $dbh->prepare
# ("INSERT INTO $target.seq_region " .
# "SELECT contig_id, name, $cs_id, length " .
# "FROM $source.contig " .
# "WHERE name not like 'c%'");
# $sth->execute();
# $sth->finish();
# #
# # WGS contigs
# #
# $sth = $dbh->prepare
# ("INSERT INTO $target.seq_region " .
# "SELECT ctg.contig_id, cln.name, $cs_id, length " .
# "FROM $source.contig ctg, $source.clone cln " .
# "WHERE ctg.clone_id = cln.clone_id " .
# "AND ctg.name like 'c%'");
# $sth->execute();
# $sth->finish();
# return;
# }
sub clone_to_seq_region {
my $self = shift;
my $target_cs_name = shift;
my $target = $self->target();
my $source = $self->source();
my $dbh = $self->dbh();
# target coord_system will have a different ID
$target_cs_name ||= "clone";
my $cs_id = $self->get_coord_system_id($target_cs_name);
$self->debug("CaenorhabditisBriggsae Specific:Transforming clones " .
"into $target_cs_name seq_regions");
#
# We don't want to make clones out of the WGS contigs, only out of
# the clones with proper embl accessions. Also for some reason the embl_offset
# is not set in the briggsae 17/18/19 databases, which means we have to deduce the
# length from the name of the contigs!
#
my $select_sth = $dbh->prepare
("SELECT cl.clone_id,
CONCAT(cl.embl_acc, '.', cl.embl_version),
ctg.name
FROM $source.clone cl, $source.contig ctg
WHERE cl.clone_id = ctg.clone_id
AND cl.embl_acc not like 'c%'
ORDER BY cl.clone_id");
$select_sth->execute();
my ($clone_id, $embl_acc, $ctg_name);
$select_sth->bind_columns(\$clone_id, \$embl_acc, \$ctg_name);
my $highest_end = undef;
my $current_clone = undef;
my $current_clone_id = undef;
my $length;
my $insert_sth = $dbh->prepare
("INSERT INTO $target.seq_region (name, coord_system_id, length) " .
"VALUES(?,?,?)");
my $tmp_insert_sth = $dbh->prepare
("INSERT INTO $target.tmp_cln_map (old_id, new_id) VALUES (?, ?)");
while ($select_sth->fetch()) {
#extract the end position of the contig
my $ctg_end;
(undef,undef,$ctg_end) = split(/\./, $ctg_name);
if(!defined($current_clone)) {
$current_clone = $embl_acc;
$current_clone_id = $clone_id;
$highest_end = $ctg_end;
}
if($current_clone ne $embl_acc) {
#started new clone, store last one
$insert_sth->execute($current_clone, $cs_id, $highest_end);
#store mapping of old -> new ids in temp table
$tmp_insert_sth->execute($current_clone_id, $insert_sth->{'mysql_insertid'});
$current_clone = $embl_acc;
$current_clone_id = $clone_id;
$highest_end = $ctg_end;
} elsif($ctg_end > $highest_end) {
#same clone, adjust end if end of contig is highest yet seen
$highest_end = $ctg_end;
}
}
#insert the last clone
$insert_sth->execute($current_clone, $cs_id, $highest_end);
$tmp_insert_sth->execute($current_clone_id, $insert_sth->{'mysql_insertid'});
$select_sth->finish();
$insert_sth->finish();
$tmp_insert_sth->finish();
return;
}
1;
| willmclaren/ensembl | misc-scripts/surgery/SeqStoreConverter/CaenorhabditisBriggsae.pm | Perl | apache-2.0 | 9,506 |
=head1 LICENSE
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2020] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Pipeline::DBSQL::FlagAdaptor -
=head1 SYNOPSIS
$Adaptor = $dbobj->getFlagAdaptor();
=head1 DESCRIPTION
Module to encapsulate all db access to flag table
=head1 APPENDIX
The rest of the documentation details each of the object methods.
Internal methods are usually preceded with a _
=cut
package Bio::EnsEMBL::Pipeline::DBSQL::FlagAdaptor;
use strict;
use warnings ;
use Bio::EnsEMBL::Pipeline::Flag;
use vars qw(@ISA);
use Carp;
=head2 new
Title : new
Usage : $dbobj->get_FlagAdaptor
Function: Standard Adaptor Constructor
Returns : Bio::EnsEMBL::Pipeline::DBSQL::FlagAdaptor
Args : Bio::EnsEMBL::Pipeline::DBSQL::DBAdaptor
=cut
sub new {
my ($class,$dbobj) = @_;
my $self = $class->SUPER::new();
$self->db( $dbobj );
return $self;
}
=head2 store
Title : store
Usage : $self->store( $flag );
Function: Stores a flag in db
Sets adaptor and dbID in Flag
Returns : -
Args : Bio::EnsEMBL::Pipeline::Flag
=cut
sub store {
my ( $self, $flag ) = @_;
$self->check_flag($flag);
my $sql = "INSERT INTO flag SET ensembl_id = '".
$flag->ensembl_id."' , analysis_id = '".
$flag->goalAnalysis->dbID."' , table_name = '".
$flag->type."';";
my $sth = $self->prepare($sql);
$sth->execute;
$sth = $self->prepare("SELECT last_insert_id()");
$sth->execute;
my $dbID = ($sth->fetchrow_array)[0];
$flag->dbID( $dbID );
$flag->adaptor( $self );
$sth->finish;
return 1;
}
=head2 remove
Title : remove
Usage : $self->remove( $flag );
Function: removes given object from database.
Returns : dbID of object that was removed
Args : Bio::EnsEMBL::Pipeline::Flag
=cut
sub remove {
my ( $self, $flag ) = @_;
my $dbID = $flag->dbID;
if( !defined $dbID ) {
$self->throw( "FlagAdaptor->remove called with non persistent Flag" );
}
my $sth = $self->prepare("
DELETE FROM flag
WHERE flag_id = $dbID" );
$sth->execute;
$sth->finish;
return $dbID;
}
=head2 fetch_all
Title : fetch_all
Usage : @flags = $self->fetch_all;
Function: retrieves list ref of all flags from db;
Returns : List ref of Bio::EnsEMBL::Pipeline::Flag
Args : -
=cut
sub fetch_all {
my $self = shift;
my $anaAdaptor = $self->db->get_AnalysisAdaptor;
my %flags;
my ( $analysis,$id,$type, $dbID );
my $sth = $self->prepare("
SELECT flag_id, ensembl_id, table_name, analysis_id
FROM flag " );
$sth->execute;
FLAG: while( my($flag_id, $ensembl_id, $table_name, $analysis_id ) = $sth->fetchrow_array ) {
$analysis = $anaAdaptor->fetch_by_dbID($analysis_id) or do {
$self->warn("Couldn't find analysis to match dbID $analysis_id");
next FLAG;
};
$dbID = $flag_id;
$id = $ensembl_id;
$type = $table_name;
my $flag = Bio::EnsEMBL::Pipeline::Flag->new
( '-dbid' => $dbID,
'-type' => $type,
'-ensembl_id' => $id,
'-goalAnalysis' => $analysis,
'-adaptor' => $self );
$flags{$dbID} = $flag;
}
my @array = values %flags;
$sth->finish;
return \@array;
}
=head2 fetch_by_dbID
Title : fetch_by_dbID
Usage : $self->fetch_by_dbID
Function: Fetches object by its db identifier
Returns : Bio::EnsEMBL::Pipeline::Flag
Args : Scalar
=cut
sub fetch_by_dbID {
my ($self, $dbID) = @_;
my $anaAdaptor = $self->db->get_AnalysisAdaptor;
my ( $analysis,$id,$type,$flag );
my $queryResult;
my $sth = $self->prepare("
SELECT flag_id, ensembl_id, table_name, analysis_id
FROM flag
WHERE flag_id = $dbID");
$sth->execute;
my ($flag_id, $ensembl_id, $table_name, $analysis_id ) = $sth->fetchrow;
if( !defined $flag_id ) {
return undef;
}
$analysis = $anaAdaptor->fetch_by_dbID($analysis_id)
or $self->throw("Can't find analysis with dbID $analysis_id\n");
$flag = Bio::EnsEMBL::Pipeline::Flag->new
( '-dbid' => $dbID,
'-type' => $table_name,
'-ensembl_id' => $ensembl_id,
'-goalAnalysis' => $analysis,
'-adaptor' => $self );
$sth->finish;
return $flag;
}
=head2 fetch_by_analysis
Title : fetch_by_analysis
Usage : $self->fetch_by_analysis( $analysis );
Function: fetches flag objects based on analysis object
Returns : Array ref of Flag objects
Args : Bio::EnsEMBL::Analysis
=cut
sub fetch_by_analysis{
my ($self, $goal_analysis) = @_;
my @flags;
if(!$goal_analysis ||
!$goal_analysis->isa("Bio::EnsEMBL::Analysis")){
$self->throw("analysis ".$goal_analysis." must be a ".
"Bio:EnsEMBL::Analysis object");
}
my $sql = "SELECT flag_id
FROM flag
WHERE analysis_id = ".$goal_analysis->dbID;
my $sth = $self->prepare($sql);
$sth->execute;
FLAG: while( my($flag_id) = $sth->fetchrow_array ) {
push @flags, $self->fetch_by_dbID($flag_id);
}
$sth->finish;
return \@flags;
}
=head2 fetch_by_ensembl_id
Title : fetch_by_ensembl_id
Usage : $self->fetch_by_ensembl_id( $id );
Function: fetches all flag objects with the specified ensembl identifier
ie: a transcript dbid or gene dbid, not stable identifiers
Returns : Array ref of Flag objects
Args : Scalar
=cut
sub fetch_by_ensembl_id{
my ($self, $id) = @_;
my @flags;
my $sql = "SELECT flag_id FROM flag WHERE ensembl_id = $id";
my $sth = $self->prepare($sql);
$sth->execute;
while ( my($result) = $sth->fetchrow_array){
push @flags, $self->fetch_by_dbID($result);
}
$sth->finish;
return \@flags;
}
sub check_flag{
my ($self,$flag)=@_;
unless ($flag->goalAnalysis->isa("Bio::EnsEMBL::Analysis")){
$self->throw("analysis ".$flag->goalAnalysis." must be a ".
"Bio:EnsEMBL::Analysis object");
}
my $sql = "show tables;";
my $sth = $self->prepare($sql);
$sth->execute;
while ( my($result) = $sth->fetchrow_array){
if ($result eq $flag->type){
$sth->finish;
return 1;
}
}
$self->throw("Cannot find table that corresponds to flag type ".$flag->type."\n");
$sth->finish;
return 1;
}
=head2 db
Title : db
Usage : $self->db;
Function: gets the DBSQL::DBAdaptor for the Adaptor. Set is private.
Returns : Bio::EnsEMBL::Pipeline::DBSQL::DBAdaptor;
Args : -
=cut
sub db {
my ($self,$db) = @_;
( defined $db ) &&
( $self->{'_db'} = $db );
$self->{'_db'};
}
# Convenience prepare function
sub prepare {
my ($self,$query) = @_;
$self->db->dbc->prepare( $query );
}
1;
| Ensembl/ensembl-pipeline | modules/Bio/EnsEMBL/Pipeline/DBSQL/FlagAdaptor.pm | Perl | apache-2.0 | 7,452 |
#line 1
package Module::Install::Base;
use strict 'vars';
use vars qw{$VERSION};
BEGIN {
$VERSION = '0.91';
}
# Suspend handler for "redefined" warnings
BEGIN {
# my $w = $SIG{__WARN__};
# $SIG{__WARN__} = sub { $w };
}
#line 42
sub new {
my $class = shift;
unless ( defined &{"${class}::call"} ) {
*{"${class}::call"} = sub { shift->_top->call(@_) };
}
unless ( defined &{"${class}::load"} ) {
*{"${class}::load"} = sub { shift->_top->load(@_) };
}
bless { @_ }, $class;
}
#line 61
sub AUTOLOAD {
local $@;
my $func = eval { shift->_top->autoload } or return;
goto &$func;
}
#line 75
sub _top {
$_[0]->{_top};
}
#line 90
sub admin {
$_[0]->_top->{admin}
or
Module::Install::Base::FakeAdmin->new;
}
#line 106
sub is_admin {
$_[0]->admin->VERSION;
}
sub DESTROY {}
package Module::Install::Base::FakeAdmin;
my $fake;
sub new {
$fake ||= bless(\@_, $_[0]);
}
sub AUTOLOAD {}
sub DESTROY {}
# Restore warning handler
BEGIN {
# $SIG{__WARN__} = $SIG{__WARN__}->();
}
1;
#line 154
| gitpan/Text-Markdown | inc/Module/Install/Base.pm | Perl | bsd-3-clause | 1,017 |
# SNMP::Info::Layer2::ZyXEL_DSLAM
# $Id$
#
# Copyright (c) 2008 Max Baker
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Santa Cruz nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package SNMP::Info::Layer2::ZyXEL_DSLAM;
use strict;
use Exporter;
use SNMP::Info::Layer2;
@SNMP::Info::Layer2::ZyXEL_DSLAM::ISA = qw/SNMP::Info::Layer2 Exporter/;
@SNMP::Info::Layer2::ZyXEL_DSLAM::EXPORT_OK = qw//;
use vars qw/$VERSION %FUNCS %GLOBALS %MIBS %MUNGE/;
$VERSION = '3.34';
# Set for No CDP
%GLOBALS = ( %SNMP::Info::Layer2::GLOBALS );
%FUNCS = (
%SNMP::Info::Layer2::FUNCS,
'ip_adresses' => 'ipAdEntAddr',
'i_name' => 'ifDescr',
'i_description' => 'adslLineConfProfile',
);
%MIBS
= ( %SNMP::Info::Layer2::MIBS, 'ADSL-LINE-MIB' => 'adslLineConfProfile' );
%MUNGE = ( %SNMP::Info::Layer2::MUNGE );
sub layers {
my $zyxel = shift;
my $layers = $zyxel->layers();
return $layers if defined $layers;
# If these don't claim to have any layers, so we'll give them 1+2
return '00000011';
}
sub vendor {
return 'zyxel';
}
sub os {
return 'zyxel';
}
sub os_ver {
my $zyxel = shift;
my $descr = $zyxel->description();
if ( $descr =~ m/version (\S+) / ) {
return $1;
}
return;
}
sub model {
my $zyxel = shift;
my $desc = $zyxel->description();
if ( $desc =~ /8-port ADSL Module\(Annex A\)/ ) {
return "AAM1008-61";
}
elsif ( $desc =~ /8-port ADSL Module\(Annex B\)/ ) {
return "AAM1008-63";
}
return;
}
sub ip {
my $zyxel = shift;
my $ip_hash = $zyxel->ip_addresses();
my $found_ip;
foreach my $ip ( keys %{$ip_hash} ) {
$found_ip = $ip
if ( defined $ip
and $ip =~ /\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/ );
}
return $found_ip;
}
1;
__END__
=head1 NAME
SNMP::Info::Layer2::ZyXEL_DSLAM - SNMP Interface to ZyXEL DSLAM
=head1 AUTHOR
Dmitry Sergienko (C<dmitry@trifle.net>)
=head1 SYNOPSIS
# Let SNMP::Info determine the correct subclass for you.
my $zyxel = new SNMP::Info(
AutoSpecify => 1,
Debug => 1,
DestHost => 'myhub',
Community => 'public',
Version => 1
)
or die "Can't connect to DestHost.\n";
my $class = $l2->class();
print "SNMP::Info determined this device to fall under subclass : $class\n";
=head1 DESCRIPTION
Provides abstraction to the configuration information obtainable from a
ZyXEL device through SNMP. See inherited classes' documentation for
inherited methods.
=head2 Inherited Classes
=over
=item SNMP::Info::Layer2
=back
=head2 Required MIBs
=over
=item F<ADSL-LINE-MIB>
=item Inherited Classes
MIBs listed in L<SNMP::Info::Layer2/"Required MIBs"> and their inherited
classes.
=back
=head1 GLOBALS
These are methods that return scalar value from SNMP
=head2 Overrides
=over
=item $zyxel->vendor()
Returns 'ZyXEL' :)
=item $zyxel->os()
Returns 'ZyXEL'
=item $zyxel->os_ver()
Culls Version from description()
=item $zyxel->ip()
Returns IP Address of DSLAM.
(C<ipAdEntAddr>)
=item $zyxel->model()
Tries to cull out model out of the description field.
=item $zyxel->layers()
Returns 00000011.
=back
=head2 Global Methods imported from SNMP::Info::Layer2
See documentation in L<SNMP::Info::Layer2/"GLOBALS"> for details.
=head1 TABLE METHODS
=head2 Overrides
=over
=item $zyxel->i_name()
Returns reference to map of IIDs to port name (C<ifDescr>).
=item $zyxel->i_description()
Returns reference to map of IIDs to human-set port description (profile name).
=back
=head2 Table Methods imported from SNMP::Info::Layer2
See documentation in L<SNMP::Info::Layer2/"TABLE METHODS"> for details.
=cut
| 42wim/snmp-info | Info/Layer2/ZyXEL_DSLAM.pm | Perl | bsd-3-clause | 5,290 |
#!/usr/bin/perl -w
#
# Version: 0.3, 121809
#
# purpose: Transfer particle coordinates from xmipp format (.raw) to Boxer.
#
# description: Takes as input
#
# author: Slaton Lipscomb <slaton@ocf.berkeley.edu> & Michael Cianfrocco (2010)
#
# usage: spi2boxdb -b BOXSIZE -i FILE -o FILE [-d] -s image Size of micrograph
#
# todo: 1) replace spaces with tabs in output
#
use Getopt::Long;
use Text::Tabs;
my $input; # input spi file
my $output; # output box file
my $boxsize; # particle boxsize (pixels)
my $field5 = -3; # field 5 constant in Box DB format
my $debug = 0; # boolean flag
my @fields; # array of SPIDER registers parsed from each line
my $xcoord;
my $ycoord;
my $imageSize;
$tabstop = 8;
GetOptions("i=s" => \$input, # string
"o=s" => \$output, # string
"b=s" => \$boxsize, # string
"s=s" => \$imageSize, # string
"debug" => \$debug); # flag
if (!$input || !$output) { usage() }
if ($debug) { # print values of all parameters
print "input = $input\n";
print "output = $output\n";
print "boxsize = $boxsize\n";
print "imageSize = $imageSize\n";
print "debug = $debug\n";
}
if ($ARGV[0]) {
print "$0: Unknown option(s) unprocessed by Getopt::Long\n";
foreach (@ARGV) {
print "$_\n";
}
exit 1;
}
if ($debug) { print "Parsing SPIDER data from $input\n" }
## open spi file for reading & box file for writing
#print unexpand (
open (SPIFILE, "$input") || die "Can't open input SPIDER file `$input`: $!";
open (BOXFILE, ">$output") || die "Can't open output Box DB file `$output`$!";
## line by line, read & parse box file and write out spi file
while (defined ($line = <SPIFILE>)) {
chomp ($line); # remove endline
$line =~ s/^\s+//; # delete leading whitespace
$line =~ s/\s+$//; # delete trailing whitespace
print " -> trimmed line is \"$line\"\n" if $debug;
if (substr($line,0,1) eq "#") { # if comment skip this line
print " --> skipped comment \"$line\"\n" if $debug;
next;
}
@fields = split(/\s+/, $line); # split by whitespace
if ($debug) {
print " --> newline";
foreach $x (@fields) { print " ---> field = \"$x\"\n" }
}
$half = $boxsize / 2;
$xcoord = sprintf("%-10.0f", $fields[0]-$half); # 10 digits, no sig figs, align left
$ycoord = sprintf("%-10.0f", $fields[1]-$half);
print " -> xcoord = \"$xcoord\"\n"," -> ycoord = \"$ycoord\"\n" if $debug;
write (BOXFILE);
}
close (BOXFILE) || die "Couldn't close Box DB file `$input`: $!";
exit 0;
##
## print usage & quit
##
sub usage {
print <<END;
Usage: spi2boxdb -b BOXSIZE -i FILE -o FILE [-d]
Transfer particle coordinates from a SPIDER document file (.spi) to a Boxer
Box DB file (.box).
Required arguments:
-b BOXSIZE your particle boxsize in pixels
-i FILE read SPIDER input from FILE
-o FILE write Box DB output to FILE
END
exit 1;
}
format BOXFILE =
@<<<<<< @<<<<<< @<<<<<< @<<<<<< @>
$xcoord, $ycoord, $boxsize, $boxsize, $field5
.
| leschzinerlab/FreeHand | raw2boxdb2.pl | Perl | mit | 2,914 |
#!/usr/bin/perl
BEGIN {
push(@INC, "../modules/isis");
# these are for the "menu.pl" script, which
# requires different relative paths
push(@INC, "../queries/modules/");
push(@INC, "../queries/modules/isis");
push(@INC, "../../lib/");
}
use dup_addrs;
use adjacency;
use mtu_mismatch;
use authentication;
use routes;
use ConfigCommon;
use Getopt::Long;
my %options;
my $q;
GetOptions(\%options, "quiet", "summary", "files=s");
if (defined($options{'quiet'})) {
$q = 1;
} elsif (defined($options{'summary'})) {
$q = 2;
}
if (defined($options{'files'})) {
system("mkdir -p $options{'files'}");
}
######################################################################
sub redirect {
my ($filename, $append) = @_;
my $d = ">";
if ($append) {
$d = ">>";
}
if (defined($options{'files'})) {
my $basedir = $options{'files'};
open(F, "$d $basedir/$filename");
*STDOUT = *F;
}
}
sub header {
my ($header) = @_;
if (!defined($options{'files'})) {
printf "$header";
}
}
sub make_index {
my $basedir;
if (defined($options{'files'})) {
$basedir = $options{'files'};
open(F, ">$basedir/index_isis.html");
print F "<html><h3>rcc ISIS Error Summary</h3><center><table width=90% border=0>\n";
} else {
return;
}
my %title = (
'isis-duplicate-address' => 'Duplicate Address Check',
'isis-adjacency' => 'Adjacency Checks',
'isis-mtu-mismatch' => 'MTU Mismatch Checks',
'isis-authentication' => 'Authentication Checks',
'isis-routes' => 'Route Checks');
my %contents = (
'isis-duplicate-address' => 'Check for duplicate network IDs',
'isis-adjacency' => 'Check for IS-IS adjacency misconfigurations',
'isis-mtu-mismatch' => 'Check that MTUs are not mismatched over adjacencies',
'isis-authentication' => 'Checks authentication type and key is consistent',
'isis-routes' => 'FIXME');
my @files = (keys %title);
foreach my $file (@files) {
my $filename = sprintf("%s/%s.txt", $basedir, $file);
my $filename_rel = sprintf("%s.txt", $file);
my @stats = stat($filename);
my $size = $stats[7];
next if (!$size);
printf F ("<tr><td><a href=%s>%s</a></td><td>%s</td></tr>\n",
$filename_rel,
$title{$file}, $contents{$file});
}
close(F);
}
######################################################################
&redirect("isis-duplicate-address.txt");
&header("\n\nDuplicate Address Check\n=====================\n");
my $dup_addrs = new dup_addrs;
$dup_addrs->check_duplicate_address($q);
&redirect("isis-adjacency.txt");
&header("\n\nAdjacency Check\n=====================\n");
my $adjacency = new adjacency;
$adjacency->check_dangling_adjacencies($q);
$adjacency->check_adjacency_levels($q);
$adjacency->check_area_adjacencies($q);
&redirect("isis-mtu-mismatch.txt");
&header("\n\nMTU Mismatch Check\n=====================\n");
my $mtu_mismatch = new mtu_mismatch;
$mtu_mismatch->check_mtu_mismatch($q);
&redirect("isis-authentication.txt");
&header("\n\nAuthentication Check\n=====================\n");
my $authentication = new authentication;
$authentication->check_auth_type($q);
$authentication->check_auth_key($q);
&redirect("isis-routes.txt");
&header("\n\nRoutes Check\n=====================\n");
my $routes = new routes;
$routes->build_shortest_paths_table($q);
&make_index();
| noise-lab/rcc | perl/src/queries/scripts/test_all_isis.pl | Perl | mit | 3,381 |
###########################################################################
#
# This file is partially auto-generated by the DateTime::Locale generator
# tools (v0.10). This code generator comes with the DateTime::Locale
# distribution in the tools/ directory, and is called generate-modules.
#
# This file was generated from the CLDR JSON locale data. See the LICENSE.cldr
# file included in this distribution for license details.
#
# Do not edit this file directly unless you are sure the part you are editing
# is not created by the generator.
#
###########################################################################
=pod
=encoding UTF-8
=head1 NAME
DateTime::Locale::as_IN - Locale data examples for the as-IN locale.
=head1 DESCRIPTION
This pod file contains examples of the locale data available for the
Assamese India locale.
=head2 Days
=head3 Wide (format)
সোমবাৰ
মঙ্গলবাৰ
বুধবাৰ
বৃহষ্পতিবাৰ
শুক্ৰবাৰ
শনিবাৰ
দেওবাৰ
=head3 Abbreviated (format)
সোম
মঙ্গল
বুধ
বৃহষ্পতি
শুক্ৰ
শনি
ৰবি
=head3 Narrow (format)
M
T
W
T
F
S
S
=head3 Wide (stand-alone)
সোমবাৰ
মঙ্গলবাৰ
বুধবাৰ
বৃহষ্পতিবাৰ
শুক্ৰবাৰ
শনিবাৰ
দেওবাৰ
=head3 Abbreviated (stand-alone)
সোম
মঙ্গল
বুধ
বৃহষ্পতি
শুক্ৰ
শনি
ৰবি
=head3 Narrow (stand-alone)
M
T
W
T
F
S
S
=head2 Months
=head3 Wide (format)
জানুৱাৰী
ফেব্ৰুৱাৰী
মাৰ্চ
এপ্ৰিল
মে
জুন
জুলাই
আগষ্ট
ছেপ্তেম্বৰ
অক্টোবৰ
নৱেম্বৰ
ডিচেম্বৰ
=head3 Abbreviated (format)
জানু
ফেব্ৰু
মাৰ্চ
এপ্ৰিল
মে
জুন
জুলাই
আগ
সেপ্ট
অক্টো
নভে
ডিসে
=head3 Narrow (format)
1
2
3
4
5
6
7
8
9
10
11
12
=head3 Wide (stand-alone)
জানুৱাৰী
ফেব্ৰুৱাৰী
মাৰ্চ
এপ্ৰিল
মে
জুন
জুলাই
আগষ্ট
ছেপ্তেম্বৰ
অক্টোবৰ
নৱেম্বৰ
ডিচেম্বৰ
=head3 Abbreviated (stand-alone)
জানু
ফেব্ৰু
মাৰ্চ
এপ্ৰিল
মে
জুন
জুলাই
আগ
সেপ্ট
অক্টো
নভে
ডিসে
=head3 Narrow (stand-alone)
1
2
3
4
5
6
7
8
9
10
11
12
=head2 Quarters
=head3 Wide (format)
প্ৰথম প্ৰহৰ
দ্বিতীয় প্ৰহৰ
তৃতীয় প্ৰহৰ
চতুৰ্থ প্ৰহৰ
=head3 Abbreviated (format)
প্ৰথম প্ৰহৰ
দ্বিতীয় প্ৰহৰ
তৃতীয় প্ৰহৰ
চতুৰ্থ প্ৰহৰ
=head3 Narrow (format)
1
2
3
4
=head3 Wide (stand-alone)
প্ৰথম প্ৰহৰ
দ্বিতীয় প্ৰহৰ
তৃতীয় প্ৰহৰ
চতুৰ্থ প্ৰহৰ
=head3 Abbreviated (stand-alone)
প্ৰথম প্ৰহৰ
দ্বিতীয় প্ৰহৰ
তৃতীয় প্ৰহৰ
চতুৰ্থ প্ৰহৰ
=head3 Narrow (stand-alone)
1
2
3
4
=head2 Eras
=head3 Wide (format)
BCE
CE
=head3 Abbreviated (format)
BCE
CE
=head3 Narrow (format)
BCE
CE
=head2 Date Formats
=head3 Full
2008-02-05T18:30:30 = 2008 ফেব্ৰুৱাৰী 5, মঙ্গলবাৰ
1995-12-22T09:05:02 = 1995 ডিচেম্বৰ 22, শুক্ৰবাৰ
-0010-09-15T04:44:23 = -10 ছেপ্তেম্বৰ 15, শনিবাৰ
=head3 Long
2008-02-05T18:30:30 = 2008 ফেব্ৰুৱাৰী 5
1995-12-22T09:05:02 = 1995 ডিচেম্বৰ 22
-0010-09-15T04:44:23 = -10 ছেপ্তেম্বৰ 15
=head3 Medium
2008-02-05T18:30:30 = 2008 ফেব্ৰু 5
1995-12-22T09:05:02 = 1995 ডিসে 22
-0010-09-15T04:44:23 = -10 সেপ্ট 15
=head3 Short
2008-02-05T18:30:30 = 2008-02-05
1995-12-22T09:05:02 = 1995-12-22
-0010-09-15T04:44:23 = -10-09-15
=head2 Time Formats
=head3 Full
2008-02-05T18:30:30 = 18:30:30 UTC
1995-12-22T09:05:02 = 09:05:02 UTC
-0010-09-15T04:44:23 = 04:44:23 UTC
=head3 Long
2008-02-05T18:30:30 = 18:30:30 UTC
1995-12-22T09:05:02 = 09:05:02 UTC
-0010-09-15T04:44:23 = 04:44:23 UTC
=head3 Medium
2008-02-05T18:30:30 = 18:30:30
1995-12-22T09:05:02 = 09:05:02
-0010-09-15T04:44:23 = 04:44:23
=head3 Short
2008-02-05T18:30:30 = 18:30
1995-12-22T09:05:02 = 09:05
-0010-09-15T04:44:23 = 04:44
=head2 Datetime Formats
=head3 Full
2008-02-05T18:30:30 = 2008 ফেব্ৰুৱাৰী 5, মঙ্গলবাৰ 18:30:30 UTC
1995-12-22T09:05:02 = 1995 ডিচেম্বৰ 22, শুক্ৰবাৰ 09:05:02 UTC
-0010-09-15T04:44:23 = -10 ছেপ্তেম্বৰ 15, শনিবাৰ 04:44:23 UTC
=head3 Long
2008-02-05T18:30:30 = 2008 ফেব্ৰুৱাৰী 5 18:30:30 UTC
1995-12-22T09:05:02 = 1995 ডিচেম্বৰ 22 09:05:02 UTC
-0010-09-15T04:44:23 = -10 ছেপ্তেম্বৰ 15 04:44:23 UTC
=head3 Medium
2008-02-05T18:30:30 = 2008 ফেব্ৰু 5 18:30:30
1995-12-22T09:05:02 = 1995 ডিসে 22 09:05:02
-0010-09-15T04:44:23 = -10 সেপ্ট 15 04:44:23
=head3 Short
2008-02-05T18:30:30 = 2008-02-05 18:30
1995-12-22T09:05:02 = 1995-12-22 09:05
-0010-09-15T04:44:23 = -10-09-15 04:44
=head2 Available Formats
=head3 E (ccc)
2008-02-05T18:30:30 = মঙ্গল
1995-12-22T09:05:02 = শুক্ৰ
-0010-09-15T04:44:23 = শনি
=head3 EHm (E HH:mm)
2008-02-05T18:30:30 = মঙ্গল 18:30
1995-12-22T09:05:02 = শুক্ৰ 09:05
-0010-09-15T04:44:23 = শনি 04:44
=head3 EHms (E HH:mm:ss)
2008-02-05T18:30:30 = মঙ্গল 18:30:30
1995-12-22T09:05:02 = শুক্ৰ 09:05:02
-0010-09-15T04:44:23 = শনি 04:44:23
=head3 Ed (d, E)
2008-02-05T18:30:30 = 5, মঙ্গল
1995-12-22T09:05:02 = 22, শুক্ৰ
-0010-09-15T04:44:23 = 15, শনি
=head3 Ehm (E h:mm a)
2008-02-05T18:30:30 = মঙ্গল 6:30 PM
1995-12-22T09:05:02 = শুক্ৰ 9:05 AM
-0010-09-15T04:44:23 = শনি 4:44 AM
=head3 Ehms (E h:mm:ss a)
2008-02-05T18:30:30 = মঙ্গল 6:30:30 PM
1995-12-22T09:05:02 = শুক্ৰ 9:05:02 AM
-0010-09-15T04:44:23 = শনি 4:44:23 AM
=head3 Gy (G y)
2008-02-05T18:30:30 = CE 2008
1995-12-22T09:05:02 = CE 1995
-0010-09-15T04:44:23 = BCE -10
=head3 GyMMM (G y MMM)
2008-02-05T18:30:30 = CE 2008 ফেব্ৰু
1995-12-22T09:05:02 = CE 1995 ডিসে
-0010-09-15T04:44:23 = BCE -10 সেপ্ট
=head3 GyMMMEd (G y MMM d, E)
2008-02-05T18:30:30 = CE 2008 ফেব্ৰু 5, মঙ্গল
1995-12-22T09:05:02 = CE 1995 ডিসে 22, শুক্ৰ
-0010-09-15T04:44:23 = BCE -10 সেপ্ট 15, শনি
=head3 GyMMMd (G y MMM d)
2008-02-05T18:30:30 = CE 2008 ফেব্ৰু 5
1995-12-22T09:05:02 = CE 1995 ডিসে 22
-0010-09-15T04:44:23 = BCE -10 সেপ্ট 15
=head3 H (HH)
2008-02-05T18:30:30 = 18
1995-12-22T09:05:02 = 09
-0010-09-15T04:44:23 = 04
=head3 Hm (HH:mm)
2008-02-05T18:30:30 = 18:30
1995-12-22T09:05:02 = 09:05
-0010-09-15T04:44:23 = 04:44
=head3 Hms (HH:mm:ss)
2008-02-05T18:30:30 = 18:30:30
1995-12-22T09:05:02 = 09:05:02
-0010-09-15T04:44:23 = 04:44:23
=head3 Hmsv (HH:mm:ss v)
2008-02-05T18:30:30 = 18:30:30 UTC
1995-12-22T09:05:02 = 09:05:02 UTC
-0010-09-15T04:44:23 = 04:44:23 UTC
=head3 Hmv (HH:mm v)
2008-02-05T18:30:30 = 18:30 UTC
1995-12-22T09:05:02 = 09:05 UTC
-0010-09-15T04:44:23 = 04:44 UTC
=head3 M (L)
2008-02-05T18:30:30 = 2
1995-12-22T09:05:02 = 12
-0010-09-15T04:44:23 = 9
=head3 MEd (MM-dd, E)
2008-02-05T18:30:30 = 02-05, মঙ্গল
1995-12-22T09:05:02 = 12-22, শুক্ৰ
-0010-09-15T04:44:23 = 09-15, শনি
=head3 MMM (LLL)
2008-02-05T18:30:30 = ফেব্ৰু
1995-12-22T09:05:02 = ডিসে
-0010-09-15T04:44:23 = সেপ্ট
=head3 MMMEd (MMM d, E)
2008-02-05T18:30:30 = ফেব্ৰু 5, মঙ্গল
1995-12-22T09:05:02 = ডিসে 22, শুক্ৰ
-0010-09-15T04:44:23 = সেপ্ট 15, শনি
=head3 MMMMd (MMMM d)
2008-02-05T18:30:30 = ফেব্ৰুৱাৰী 5
1995-12-22T09:05:02 = ডিচেম্বৰ 22
-0010-09-15T04:44:23 = ছেপ্তেম্বৰ 15
=head3 MMMd (MMM d)
2008-02-05T18:30:30 = ফেব্ৰু 5
1995-12-22T09:05:02 = ডিসে 22
-0010-09-15T04:44:23 = সেপ্ট 15
=head3 Md (MM-dd)
2008-02-05T18:30:30 = 02-05
1995-12-22T09:05:02 = 12-22
-0010-09-15T04:44:23 = 09-15
=head3 d (d)
2008-02-05T18:30:30 = 5
1995-12-22T09:05:02 = 22
-0010-09-15T04:44:23 = 15
=head3 h (h a)
2008-02-05T18:30:30 = 6 PM
1995-12-22T09:05:02 = 9 AM
-0010-09-15T04:44:23 = 4 AM
=head3 hm (h:mm a)
2008-02-05T18:30:30 = 6:30 PM
1995-12-22T09:05:02 = 9:05 AM
-0010-09-15T04:44:23 = 4:44 AM
=head3 hms (h:mm:ss a)
2008-02-05T18:30:30 = 6:30:30 PM
1995-12-22T09:05:02 = 9:05:02 AM
-0010-09-15T04:44:23 = 4:44:23 AM
=head3 hmsv (h:mm:ss a v)
2008-02-05T18:30:30 = 6:30:30 PM UTC
1995-12-22T09:05:02 = 9:05:02 AM UTC
-0010-09-15T04:44:23 = 4:44:23 AM UTC
=head3 hmv (h:mm a v)
2008-02-05T18:30:30 = 6:30 PM UTC
1995-12-22T09:05:02 = 9:05 AM UTC
-0010-09-15T04:44:23 = 4:44 AM UTC
=head3 ms (mm:ss)
2008-02-05T18:30:30 = 30:30
1995-12-22T09:05:02 = 05:02
-0010-09-15T04:44:23 = 44:23
=head3 y (y)
2008-02-05T18:30:30 = 2008
1995-12-22T09:05:02 = 1995
-0010-09-15T04:44:23 = -10
=head3 yM (y-MM)
2008-02-05T18:30:30 = 2008-02
1995-12-22T09:05:02 = 1995-12
-0010-09-15T04:44:23 = -10-09
=head3 yMEd (y-MM-dd, E)
2008-02-05T18:30:30 = 2008-02-05, মঙ্গল
1995-12-22T09:05:02 = 1995-12-22, শুক্ৰ
-0010-09-15T04:44:23 = -10-09-15, শনি
=head3 yMMM (y MMM)
2008-02-05T18:30:30 = 2008 ফেব্ৰু
1995-12-22T09:05:02 = 1995 ডিসে
-0010-09-15T04:44:23 = -10 সেপ্ট
=head3 yMMMEd (y MMM d, E)
2008-02-05T18:30:30 = 2008 ফেব্ৰু 5, মঙ্গল
1995-12-22T09:05:02 = 1995 ডিসে 22, শুক্ৰ
-0010-09-15T04:44:23 = -10 সেপ্ট 15, শনি
=head3 yMMMM (y MMMM)
2008-02-05T18:30:30 = 2008 ফেব্ৰুৱাৰী
1995-12-22T09:05:02 = 1995 ডিচেম্বৰ
-0010-09-15T04:44:23 = -10 ছেপ্তেম্বৰ
=head3 yMMMd (y MMM d)
2008-02-05T18:30:30 = 2008 ফেব্ৰু 5
1995-12-22T09:05:02 = 1995 ডিসে 22
-0010-09-15T04:44:23 = -10 সেপ্ট 15
=head3 yMd (y-MM-dd)
2008-02-05T18:30:30 = 2008-02-05
1995-12-22T09:05:02 = 1995-12-22
-0010-09-15T04:44:23 = -10-09-15
=head3 yQQQ (y QQQ)
2008-02-05T18:30:30 = 2008 প্ৰথম প্ৰহৰ
1995-12-22T09:05:02 = 1995 চতুৰ্থ প্ৰহৰ
-0010-09-15T04:44:23 = -10 তৃতীয় প্ৰহৰ
=head3 yQQQQ (y QQQQ)
2008-02-05T18:30:30 = 2008 প্ৰথম প্ৰহৰ
1995-12-22T09:05:02 = 1995 চতুৰ্থ প্ৰহৰ
-0010-09-15T04:44:23 = -10 তৃতীয় প্ৰহৰ
=head2 Miscellaneous
=head3 Prefers 24 hour time?
Yes
=head3 Local first day of the week
7 (দেওবাৰ)
=head1 SUPPORT
See L<DateTime::Locale>.
=cut
| jkb78/extrajnm | local/lib/perl5/DateTime/Locale/as_IN.pod | Perl | mit | 12,051 |
package Date::Manip::Date;
# Copyright (c) 1995-2014 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
########################################################################
# Any routine that starts with an underscore (_) is NOT intended for
# public use. They are for internal use in the the Date::Manip
# modules and are subject to change without warning or notice.
#
# ABSOLUTELY NO USER SUPPORT IS OFFERED FOR THESE ROUTINES!
########################################################################
use Date::Manip::Obj;
@ISA = ('Date::Manip::Obj');
require 5.010000;
use warnings;
use strict;
use integer;
use utf8;
use IO::File;
use Storable qw(dclone);
#use re 'debug';
use Date::Manip::Base;
use Date::Manip::TZ;
our $VERSION;
$VERSION='6.48';
END { undef $VERSION; }
########################################################################
# BASE METHODS
########################################################################
# Call this every time a new date is put in to make sure everything is
# correctly initialized.
#
sub _init {
my($self) = @_;
$$self{'err'} = '';
$$self{'data'} =
{
'set' => 0, # 1 if the date has been set
# 2 if the date is in the process of being set
# The date as input
'in' => '', # the string that was parsed (if any)
'zin' => '', # the timezone that was parsed (if any)
# The date in the parsed timezone
'date' => [], # the parsed date split
'def' => [0,0,0,0,0,0],
# 1 for each field that came from
# defaults rather than parsed
# '' for an implied field
'tz' => '', # the timezone of the date
'isdst' => '', # 1 if the date is in DST.
'offset' => [], # The offset from GMT
'abb' => '', # The timezone abbreviation.
'f' => {}, # fields used in printing a date
# The date in GMT
'gmt' => [], # the date converted to GMT
# The date in local timezone
'loc' => [], # the date converted to local timezone
};
}
sub _init_args {
my($self) = @_;
my @args = @{ $$self{'args'} };
if (@args) {
if ($#args == 0) {
$self->parse($args[0]);
} else {
warn "WARNING: [new] invalid arguments: @args\n";
}
}
}
sub input {
my($self) = @_;
return $$self{'data'}{'in'};
}
########################################################################
# DATE PARSING
########################################################################
sub parse {
my($self,$instring,@opts) = @_;
$self->_init();
my $noupdate = 0;
if (! $instring) {
$$self{'err'} = '[parse] Empty date string';
return 1;
}
my %opts = map { $_,1 } @opts;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($done,$y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off,$dow,$got_time,
$default_time,$firsterr);
ENCODING:
foreach my $string ($dmb->_encoding($instring)) {
$got_time = 0;
$default_time = 0;
# Put parse in a simple loop for an easy exit.
PARSE:
{
my(@tmp,$tmp);
$$self{'err'} = '';
# Check the standard date format
$tmp = $dmb->split('date',$string);
if (defined($tmp)) {
($y,$m,$d,$h,$mn,$s) = @$tmp;
$got_time = 1;
last PARSE;
}
# Parse ISO 8601 dates now (which may have a timezone).
if (! exists $opts{'noiso8601'}) {
($done,@tmp) = $self->_parse_datetime_iso8601($string,\$noupdate);
if ($done) {
($y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off) = @tmp;
$got_time = 1;
last PARSE;
}
}
# There's lots of ways that commas may be included. Remove
# them (unless it's preceded and followed by a digit in
# which case it's probably a fractional separator).
$string =~ s/(?<!\d),/ /g;
$string =~ s/,(?!\d)/ /g;
# Some special full date/time formats ('now', 'epoch')
if (! exists $opts{'nospecial'}) {
($done,@tmp) = $self->_parse_datetime_other($string,\$noupdate);
if ($done) {
($y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off) = @tmp;
$got_time = 1;
last PARSE;
}
}
# Parse (and remove) the time (and an immediately following timezone).
($got_time,@tmp) = $self->_parse_time('parse',$string,\$noupdate,%opts);
if ($got_time) {
($string,$h,$mn,$s,$tzstring,$zone,$abb,$off) = @tmp;
}
if (! $string) {
($y,$m,$d) = $self->_def_date($y,$m,$d,\$noupdate);
last;
}
# Parse (and remove) the day of week. Also, handle the simple DoW
# formats.
if (! exists $opts{'nodow'}) {
($done,@tmp) = $self->_parse_dow($string,\$noupdate);
if (@tmp) {
if ($done) {
($y,$m,$d) = @tmp;
$default_time = 1;
last PARSE;
} else {
($string,$dow) = @tmp;
}
}
}
$dow = 0 if (! $dow);
# At this point, the string might contain the following dates:
#
# OTHER
# OTHER ZONE / ZONE OTHER
# DELTA
# DELTA ZONE / ZONE DELTA
# HOLIDAY
# HOLIDAY ZONE / ZONE HOLIDAY
#
# ZONE is only allowed if it wasn't parsed with the time
# Unfortunately, there are some conflicts between zones and
# some other formats, so try parsing the entire string as a date.
(@tmp) = $self->_parse_date($string,$dow,\$noupdate,%opts);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
$default_time = 1;
last PARSE;
}
# Parse any timezone
if (! $tzstring) {
($string,@tmp) = $self->_parse_tz($string,\$noupdate);
($tzstring,$zone,$abb,$off) = @tmp if (@tmp);
last PARSE if (! $string);
}
# Try the remainder of the string as a date.
if ($tzstring) {
(@tmp) = $self->_parse_date($string,$dow,\$noupdate,%opts);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
$default_time = 1;
last PARSE;
}
}
# Parse deltas
#
# Occasionally, a delta is entered for a date (which is
# interpreted as the date relative to now). There can be some
# confusion between a date and a delta, but the most
# important conflicts are the ISO 8601 dates (many of which
# could be interpreted as a delta), but those have already
# been taken care of.
#
# We may have already gotten the time:
# 3 days ago at midnight UTC
# (we already stripped off the 'at midnight UTC' above).
#
# We also need to handle the sitution of a delta and a timezone.
# in 2 hours EST
# in 2 days EST
# but only if no time was entered.
if (! exists $opts{'nodelta'}) {
($done,@tmp) =
$self->_parse_delta($string,$dow,$got_time,$h,$mn,$s,\$noupdate);
if (@tmp) {
($y,$m,$d,$h,$mn,$s) = @tmp;
$got_time = 1;
$dow = '';
}
last PARSE if ($done);
}
# Parse holidays
unless (exists $opts{'noholidays'}) {
($done,@tmp) =
$self->_parse_holidays($string,\$noupdate);
if (@tmp) {
($y,$m,$d) = @tmp;
}
last PARSE if ($done);
}
$$self{'err'} = '[parse] Invalid date string';
last PARSE;
}
# We got an error parsing this encoding of the string. It could
# be that it is a genuine error, or it may be that we simply
# need to try a different encoding. If ALL encodings fail, we'll
# return the error from the first one.
if ($$self{'err'}) {
if (! $firsterr) {
$firsterr = $$self{'err'};
}
next ENCODING;
}
# If we didn't get an error, this is the string to use.
last ENCODING;
}
if ($$self{'err'}) {
$$self{'err'} = $firsterr;
return 1;
}
# Make sure that a time is set
if (! $got_time) {
if ($default_time) {
if ($dmb->_config('defaulttime') eq 'midnight') {
($h,$mn,$s) = (0,0,0);
} else {
($h,$mn,$s) = $dmt->_now('time',$noupdate);
$noupdate = 1;
}
$got_time = 1;
} else {
($h,$mn,$s) = $self->_def_time(undef,undef,undef,\$noupdate);
}
}
$$self{'data'}{'set'} = 2;
return $self->_parse_check('parse',$instring,
$y,$m,$d,$h,$mn,$s,$dow,$tzstring,$zone,$abb,$off);
}
sub parse_time {
my($self,$string,@opts) = @_;
my %opts = map { $_,1 } @opts;
my $noupdate = 0;
if (! $string) {
$$self{'err'} = '[parse_time] Empty time string';
return 1;
}
my($y,$m,$d,$h,$mn,$s);
if ($$self{'err'}) {
$self->_init();
}
if ($$self{'data'}{'set'}) {
($y,$m,$d,$h,$mn,$s) = @{ $$self{'data'}{'date'} };
} else {
my $dmt = $$self{'tz'};
($y,$m,$d,$h,$mn,$s) = $dmt->_now('now',$noupdate);
$noupdate = 1;
}
my($tzstring,$zone,$abb,$off);
($h,$mn,$s,$tzstring,$zone,$abb,$off) =
$self->_parse_time('parse_time',$string,\$noupdate,%opts);
return 1 if ($$self{'err'});
$$self{'data'}{'set'} = 2;
return $self->_parse_check('parse_time','',
$y,$m,$d,$h,$mn,$s,'',$tzstring,$zone,$abb,$off);
}
sub parse_date {
my($self,$string,@opts) = @_;
my %opts = map { $_,1 } @opts;
my $noupdate = 0;
if (! $string) {
$$self{'err'} = '[parse_date] Empty date string';
return 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s);
if ($$self{'err'}) {
$self->_init();
}
if ($$self{'data'}{'set'}) {
($y,$m,$d,$h,$mn,$s) = @{ $$self{'data'}{'date'} };
} else {
($h,$mn,$s) = (0,0,0);
}
# Put parse in a simple loop for an easy exit.
my($done,@tmp,$dow);
PARSE:
{
# Parse ISO 8601 dates now
unless (exists $opts{'noiso8601'}) {
($done,@tmp) = $self->_parse_date_iso8601($string,\$noupdate);
if ($done) {
($y,$m,$d) = @tmp;
last PARSE;
}
}
(@tmp) = $self->_parse_date($string,undef,\$noupdate,%opts);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
last PARSE;
}
$$self{'err'} = '[parse_date] Invalid date string';
return 1;
}
return 1 if ($$self{'err'});
$y = $dmt->_fix_year($y);
$$self{'data'}{'set'} = 2;
return $self->_parse_check('parse_date','',$y,$m,$d,$h,$mn,$s,$dow);
}
sub _parse_date {
my($self,$string,$dow,$noupdate,%opts) = @_;
# There's lots of ways that commas may be included. Remove
# them.
#
# Also remove some words we should ignore.
$string =~ s/,/ /g;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $ign = (exists $$dmb{'data'}{'rx'}{'other'}{'ignore'} ?
$$dmb{'data'}{'rx'}{'other'}{'ignore'} :
$self->_other_rx('ignore'));
$string =~ s/$ign/ /g;
my $of = $+{'of'};
$string =~ s/\s*$//;
return () if (! $string);
my($done,$y,$m,$d,@tmp);
# Put parse in a simple loop for an easy exit.
PARSE:
{
# Parse (and remove) the day of week. Also, handle the simple DoW
# formats.
unless (exists $opts{'nodow'}) {
if (! defined($dow)) {
($done,@tmp) = $self->_parse_dow($string,$noupdate);
if (@tmp) {
if ($done) {
($y,$m,$d) = @tmp;
last PARSE;
} else {
($string,$dow) = @tmp;
}
}
$dow = 0 if (! $dow);
}
}
# Parse common dates
unless (exists $opts{'nocommon'}) {
(@tmp) = $self->_parse_date_common($string,$noupdate);
if (@tmp) {
($y,$m,$d) = @tmp;
last PARSE;
}
}
# Parse less common dates
unless (exists $opts{'noother'}) {
(@tmp) = $self->_parse_date_other($string,$dow,$of,$noupdate);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
last PARSE;
}
}
return ();
}
return($y,$m,$d,$dow);
}
sub parse_format {
my($self,$format,$string) = @_;
$self->_init();
my $noupdate = 0;
if (! $string) {
$$self{'err'} = '[parse_format] Empty date string';
return 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($err,$re) = $self->_format_regexp($format);
return $err if ($err);
return 1 if ($string !~ $re);
my($y,$m,$d,$h,$mn,$s,
$mon_name,$mon_abb,$dow_name,$dow_abb,$dow_char,$dow_num,
$doy,$nth,$ampm,$epochs,$epocho,
$tzstring,$off,$abb,$zone,
$g,$w,$l,$u) =
@+{qw(y m d h mn s
mon_name mon_abb dow_name dow_abb dow_char dow_num doy
nth ampm epochs epocho tzstring off abb zone g w l u)};
while (1) {
# Get y/m/d/h/mn/s from:
# $epochs,$epocho
if (defined($epochs)) {
($y,$m,$d,$h,$mn,$s) = @{ $dmb->secs_since_1970($epochs) };
my $z;
if ($zone) {
$z = $dmt->_zone($zone);
return 'Invalid zone' if (! $z);
} elsif ($abb || $off) {
$z = $dmt->zone($off,$abb);
return 'Invalid zone' if (! $z);
} else {
$z = $dmt->_now('tz',$noupdate);
$noupdate = 1;
}
my($err,$date) = $dmt->convert_from_gmt([$y,$m,$d,$h,$mn,$s],$z);
($y,$m,$d,$h,$mn,$s) = @$date;
last;
}
if (defined($epocho)) {
($y,$m,$d,$h,$mn,$s) = @{ $dmb->secs_since_1970($epocho) };
last;
}
# Get y/m/d from:
# $y,$m,$d,
# $mon_name,$mon_abb
# $doy,$nth
# $g/$w,$l/$u
if ($mon_name) {
$m = $$dmb{'data'}{'wordmatch'}{'month_name'}{lc($mon_name)};
} elsif ($mon_abb) {
$m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mon_abb)};
}
if ($nth) {
$d = $$dmb{'data'}{'wordmatch'}{'nth'}{lc($nth)};
}
if ($doy) {
$y = $dmt->_now('y',$noupdate) if (! $y);
$noupdate = 1;
($y,$m,$d) = @{ $dmb->day_of_year($y,$doy) };
} elsif ($g) {
$y = $dmt->_now('y',$noupdate) if (! $y);
$noupdate = 1;
($y,$m,$d) = @{ $dmb->_week_of_year($g,$w,1) };
} elsif ($l) {
$y = $dmt->_now('y',$noupdate) if (! $y);
$noupdate = 1;
($y,$m,$d) = @{ $dmb->_week_of_year($l,$u,7) };
} elsif ($m) {
($y,$m,$d) = $self->_def_date($y,$m,$d,\$noupdate);
}
# Get h/mn/s from:
# $h,$mn,$s,$ampm
if (defined($h)) {
($h,$mn,$s) = $self->_def_time($h,$mn,$s,\$noupdate);
}
if ($ampm) {
if ($$dmb{'data'}{'wordmatch'}{'ampm'}{lc($ampm)} == 2) {
# pm times
$h+=12 unless ($h==12);
} else {
# am times
$h=0 if ($h==12);
}
}
# Get dow from:
# $dow_name,$dow_abb,$dow_char,$dow_num
if ($dow_name) {
$dow_num = $$dmb{'data'}{'wordmatch'}{'day_name'}{lc($dow_name)};
} elsif ($dow_abb) {
$dow_num = $$dmb{'data'}{'wordmatch'}{'day_abb'}{lc($dow_abb)};
} elsif ($dow_char) {
$dow_num = $$dmb{'data'}{'wordmatch'}{'day_char'}{lc($dow_char)};
}
last;
}
if (! $m) {
($y,$m,$d) = $dmt->_now('now',$noupdate);
$noupdate = 1;
}
if (! defined($h)) {
($h,$mn,$s) = (0,0,0);
}
$$self{'data'}{'set'} = 2;
$err = $self->_parse_check('parse_format',$string,
$y,$m,$d,$h,$mn,$s,$dow_num,
$tzstring,$zone,$abb,$off);
if (wantarray) {
my %tmp = %{ dclone(\%+) };
return ($err,%tmp);
}
return $err;
}
BEGIN {
my %y_form = map { $_,1 } qw( Y y s o G L );
my %m_form = map { $_,1 } qw( m f b h B j s o W U );
my %d_form = map { $_,1 } qw( j d e E s o W U );
my %h_form = map { $_,1 } qw( H I k i s o );
my %mn_form = map { $_,1 } qw( M s o );
my %s_form = map { $_,1 } qw( S s o );
my %dow_form = map { $_,1 } qw( v a A w );
my %am_form = map { $_,1 } qw( p s o );
my %z_form = map { $_,1 } qw( Z z N );
my %mon_form = map { $_,1 } qw( b h B );
my %day_form = map { $_,1 } qw( v a A );
sub _format_regexp {
my($self,$format) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if (exists $$dmb{'data'}{'format'}{$format}) {
return @{ $$dmb{'data'}{'format'}{$format} };
}
my $re;
my $err;
my($y,$m,$d,$h,$mn,$s) = (0,0,0,0,0,0);
my($dow,$ampm,$zone,$G,$W,$L,$U) = (0,0,0,0,0,0,0);
while ($format) {
last if ($format eq '%');
if ($format =~ s/^([^%]+)//) {
$re .= $1;
next;
}
$format =~ s/^%(.)//;
my $f = $1;
if (exists $y_form{$f}) {
if ($y) {
$err = 'Year specified multiple times';
last;
}
$y = 1;
}
if (exists $m_form{$f}) {
if ($m) {
$err = 'Month specified multiple times';
last;
}
$m = 1;
}
if (exists $d_form{$f}) {
if ($d) {
$err = 'Day specified multiple times';
last;
}
$d = 1;
}
if (exists $h_form{$f}) {
if ($h) {
$err = 'Hour specified multiple times';
last;
}
$h = 1;
}
if (exists $mn_form{$f}) {
if ($mn) {
$err = 'Minutes specified multiple times';
last;
}
$mn = 1;
}
if (exists $s_form{$f}) {
if ($s) {
$err = 'Seconds specified multiple times';
last;
}
$s = 1;
}
if (exists $dow_form{$f}) {
if ($dow) {
$err = 'Day-of-week specified multiple times';
last;
}
$dow = 1;
}
if (exists $am_form{$f}) {
if ($ampm) {
$err = 'AM/PM specified multiple times';
last;
}
$ampm = 1;
}
if (exists $z_form{$f}) {
if ($zone) {
$err = 'Zone specified multiple times';
last;
}
$zone = 1;
}
if ($f eq 'G') {
if ($G) {
$err = 'G specified multiple times';
last;
}
$G = 1;
} elsif ($f eq 'W') {
if ($W) {
$err = 'W specified multiple times';
last;
}
$W = 1;
} elsif ($f eq 'L') {
if ($L) {
$err = 'L specified multiple times';
last;
}
$L = 1;
} elsif ($f eq 'U') {
if ($U) {
$err = 'U specified multiple times';
last;
}
$U = 1;
}
###
if ($f eq 'Y') {
$re .= '(?<y>\d\d\d\d)';
} elsif ($f eq 'y') {
$re .= '(?<y>\d\d)';
} elsif ($f eq 'm') {
$re .= '(?<m>\d\d)';
} elsif ($f eq 'f') {
$re .= '(?:(?<m>\d\d)| ?(?<m>\d))';
} elsif (exists $mon_form{$f}) {
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $nam = $$dmb{'data'}{'rx'}{'month_name'}[0];
$re .= "(?:(?<mon_name>$nam)|(?<mon_abb>$abb))";
} elsif ($f eq 'j') {
$re .= '(?<doy>\d\d\d)';
} elsif ($f eq 'd') {
$re .= '(?<d>\d\d)';
} elsif ($f eq 'e') {
$re .= '(?:(?<d>\d\d)| ?(?<d>\d))';
} elsif (exists $day_form{$f}) {
my $abb = $$dmb{'data'}{'rx'}{'day_abb'}[0];
my $name = $$dmb{'data'}{'rx'}{'day_name'}[0];
my $char = $$dmb{'data'}{'rx'}{'day_char'}[0];
$re .= "(?:(?<dow_name>$name)|(?<dow_abb>$abb)|(?<dow_char>$char))";
} elsif ($f eq 'w') {
$re .= '(?<dow_num>[1-7])';
} elsif ($f eq 'E') {
my $nth = $$dmb{'data'}{'rx'}{'nth'}[0];
$re .= "(?<nth>$nth)"
} elsif ($f eq 'H' || $f eq 'I') {
$re .= '(?<h>\d\d)';
} elsif ($f eq 'k' || $f eq 'i') {
$re .= '(?:(?<h>\d\d)| ?(?<h>\d))';
} elsif ($f eq 'p') {
my $ampm = $$dmb{data}{rx}{ampm}[0];
$re .= "(?<ampm>$ampm)";
} elsif ($f eq 'M') {
$re .= '(?<mn>\d\d)';
} elsif ($f eq 'S') {
$re .= '(?<s>\d\d)';
} elsif (exists $z_form{$f}) {
$re .= $dmt->_zrx('zrx');
} elsif ($f eq 's') {
$re .= '(?<epochs>\d+)';
} elsif ($f eq 'o') {
$re .= '(?<epocho>\d+)';
} elsif ($f eq 'G') {
$re .= '(?<g>\d\d\d\d)';
} elsif ($f eq 'W') {
$re .= '(?<w>\d\d)';
} elsif ($f eq 'L') {
$re .= '(?<l>\d\d\d\d)';
} elsif ($f eq 'U') {
$re .= '(?<u>\d\d)';
} elsif ($f eq 'c') {
$format = '%a %b %e %H:%M:%S %Y' . $format;
} elsif ($f eq 'C' || $f eq 'u') {
$format = '%a %b %e %H:%M:%S %Z %Y' . $format;
} elsif ($f eq 'g') {
$format = '%a, %d %b %Y %H:%M:%S %Z' . $format;
} elsif ($f eq 'D') {
$format = '%m/%d/%y' . $format;
} elsif ($f eq 'r') {
$format = '%I:%M:%S %p' . $format;
} elsif ($f eq 'R') {
$format = '%H:%M' . $format;
} elsif ($f eq 'T' || $f eq 'X') {
$format = '%H:%M:%S' . $format;
} elsif ($f eq 'V') {
$format = '%m%d%H%M%y' . $format;
} elsif ($f eq 'Q') {
$format = '%Y%m%d' . $format;
} elsif ($f eq 'q') {
$format = '%Y%m%d%H%M%S' . $format;
} elsif ($f eq 'P') {
$format = '%Y%m%d%H:%M:%S' . $format;
} elsif ($f eq 'O') {
$format = '%Y\\-%m\\-%dT%H:%M:%S' . $format;
} elsif ($f eq 'F') {
$format = '%A, %B %e, %Y' . $format;
} elsif ($f eq 'K') {
$format = '%Y-%j' . $format;
} elsif ($f eq 'J') {
$format = '%G-W%W-%w' . $format;
} elsif ($f eq 'x') {
if ($dmb->_config('dateformat') eq 'US') {
$format = '%m/%d/%y' . $format;
} else {
$format = '%d/%m/%y' . $format;
}
} elsif ($f eq 't') {
$re .= "\t";
} elsif ($f eq '%') {
$re .= '%';
} elsif ($f eq '+') {
$re .= '\\+';
}
}
if ($m != $d) {
$err = 'Date not fully specified';
} elsif ( ($h || $mn || $s) && (! $h || ! $mn) ) {
$err = 'Time not fully specified';
} elsif ($ampm && ! $h) {
$err = 'Time not fully specified';
} elsif ($G != $W) {
$err = 'G/W must both be specified';
} elsif ($L != $U) {
$err = 'L/U must both be specified';
}
if ($err) {
$$dmb{'data'}{'format'}{$format} = [$err];
return ($err);
}
$$dmb{'data'}{'format'}{$format} = [0, qr/$re/i];
return @{ $$dmb{'data'}{'format'}{$format} };
}
}
########################################################################
# DATE FORMATS
########################################################################
sub _parse_check {
my($self,$caller,$instring,
$y,$m,$d,$h,$mn,$s,$dow,$tzstring,$zone,$abb,$off) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Check day_of_week for validity BEFORE converting 24:00:00 to the
# next day
if ($dow) {
my $tmp = $dmb->day_of_week([$y,$m,$d]);
if ($tmp != $dow) {
$$self{'err'} = "[$caller] Day of week invalid";
return 1;
}
}
# Handle 24:00:00 times.
if ($h == 24) {
($h,$mn,$s) = (0,0,0);
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
}
if (! $dmb->check([$y,$m,$d,$h,$mn,$s])) {
$$self{'err'} = "[$caller] Invalid date";
return 1;
}
# Interpret timezone information and check that date is valid
# in the timezone.
my ($zonename,$isdst);
if (defined($zone)) {
$zonename = $dmt->_zone($zone);
if (! $zonename) {
$$self{'err'} = "[$caller] Unable to determine timezone: $zone";
return 1;
}
} elsif (defined($abb) || defined($off)) {
my (@tmp,$err);
push(@tmp,[$y,$m,$d,$h,$mn,$s]);
push(@tmp,$off) if (defined $off);
push(@tmp,$abb) if (defined $abb);
$zonename = $dmt->zone(@tmp);
if (! $zonename) {
$$self{'err'} = 'Unable to determine timezone';
return 1;
}
# Figure out $isdst from $abb/$off (for everything else, we'll
# try both values).
if (defined $off || defined $abb) {
my @off = @{ $dmb->split('offset',$off) } if (defined($off));
my $err = 1;
foreach my $i (0,1) {
my $per = $dmt->date_period([$y,$m,$d,$h,$mn,$s],$zonename,1,$i);
next if (! $per);
my $a = $$per[4];
my $o = $$per[3];
if (defined $abb && lc($a) eq lc($abb)) {
$err = 0;
$isdst = $i;
$abb = $a;
last;
}
if (defined ($off)) {
if ($off[0] == $$o[0] &&
$off[1] == $$o[1] &&
$off[2] == $$o[2]) {
$err = 0;
$isdst = $i;
last;
}
}
}
if ($err) {
$$self{'err'} = 'Invalid timezone';
return 1;
}
}
} else {
$zonename = $dmt->_now('tz');
}
# Store the date
$self->set('zdate',$zonename,[$y,$m,$d,$h,$mn,$s],$isdst);
return 1 if ($$self{'err'});
$$self{'data'}{'in'} = $instring;
$$self{'data'}{'zin'} = $zone if (defined($zone));
return 0;
}
# Set up the regular expressions for ISO 8601 parsing. Returns the
# requested regexp. $rx can be:
# cdate : regular expression for a complete date
# tdate : regular expression for a truncated date
# ctime : regular expression for a complete time
# ttime : regular expression for a truncated time
# date : regular expression for a date only
# time : regular expression for a time only
# UNDEF : regular expression for a valid date and/or time
#
# Date matches are:
# y m d doy w dow yod c
# Time matches are:
# h h24 mn s fh fm
#
sub _iso8601_rx {
my($self,$rx) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
return $$dmb{'data'}{'rx'}{'iso'}{$rx}
if (exists $$dmb{'data'}{'rx'}{'iso'}{$rx});
if ($rx eq 'cdate' || $rx eq 'tdate') {
my $y4 = '(?<y>\d\d\d\d)';
my $y2 = '(?<y>\d\d)';
my $m = '(?<m>0[1-9]|1[0-2])';
my $d = '(?<d>0[1-9]|[12][0-9]|3[01])';
my $doy = '(?<doy>00[1-9]|0[1-9][0-9]|[1-2][0-9][0-9]|3[0-5][0-9]|36[0-6])';
my $w = '(?<w>0[1-9]|[1-4][0-9]|5[0-3])';
my $dow = '(?<dow>[1-7])';
my $yod = '(?<yod>\d)';
my $cc = '(?<c>\d\d)';
my $cdaterx =
"${y4}${m}${d}|" . # CCYYMMDD
"${y4}\\-${m}\\-${d}|" . # CCYY-MM-DD
"\\-${y2}${m}${d}|" . # -YYMMDD
"\\-${y2}\\-${m}\\-${d}|" . # -YY-MM-DD
"\\-?${y2}${m}${d}|" . # YYMMDD
"\\-?${y2}\\-${m}\\-${d}|" . # YY-MM-DD
"\\-\\-${m}\\-?${d}|" . # --MM-DD --MMDD
"\\-\\-\\-${d}|" . # ---DD
"${y4}\\-?${doy}|" . # CCYY-DoY CCYYDoY
"\\-?${y2}\\-?${doy}|" . # YY-DoY -YY-DoY
# YYDoY -YYDoY
"\\-${doy}|" . # -DoY
"${y4}W${w}${dow}|" . # CCYYWwwD
"${y4}\\-W${w}\\-${dow}|" . # CCYY-Www-D
"\\-?${y2}W${w}${dow}|" . # YYWwwD -YYWwwD
"\\-?${y2}\\-W${w}\\-${dow}|" . # YY-Www-D -YY-Www-D
"\\-?${yod}W${w}${dow}|" . # YWwwD -YWwwD
"\\-?${yod}\\-W${w}\\-${dow}|" . # Y-Www-D -Y-Www-D
"\\-W${w}\\-?${dow}|" . # -Www-D -WwwD
"\\-W\\-${dow}|" . # -W-D
"\\-\\-\\-${dow}"; # ---D
$cdaterx = qr/(?:$cdaterx)/i;
my $tdaterx =
"${y4}\\-${m}|" . # CCYY-MM
"${y4}|" . # CCYY
"\\-${y2}\\-?${m}|" . # -YY-MM -YYMM
"\\-${y2}|" . # -YY
"\\-\\-${m}|" . # --MM
"${y4}\\-?W${w}|" . # CCYYWww CCYY-Www
"\\-?${y2}\\-?W${w}|" . # YY-Www YYWww
# -YY-Www -YYWww
"\\-?W${w}|" . # -Www Www
"${cc}"; # CC
$tdaterx = qr/(?:$tdaterx)/i;
$$dmb{'data'}{'rx'}{'iso'}{'cdate'} = $cdaterx;
$$dmb{'data'}{'rx'}{'iso'}{'tdate'} = $tdaterx;
} elsif ($rx eq 'ctime' || $rx eq 'ttime') {
my $hh = '(?<h>[0-1][0-9]|2[0-3])';
my $mn = '(?<mn>[0-5][0-9])';
my $ss = '(?<s>[0-5][0-9])';
my $h24a = '(?<h24>24(?::00){0,2})';
my $h24b = '(?<h24>24(?:00){0,2})';
my $h = '(?<h>[0-9])';
my $fh = '(?:[\.,](?<fh>\d*))'; # fractional hours (keep)
my $fm = '(?:[\.,](?<fm>\d*))'; # fractional seconds (keep)
my $fs = '(?:[\.,]\d*)'; # fractional hours (discard)
my $zrx = $dmt->_zrx('zrx');
my $ctimerx =
"${hh}${mn}${ss}${fs}?|" . # HHMNSS[,S+]
"${hh}:${mn}:${ss}${fs}?|" . # HH:MN:SS[,S+]
"${hh}:?${mn}${fm}|" . # HH:MN,M+ HHMN,M+
"${hh}${fh}|" . # HH,H+
"\\-${mn}:?${ss}${fs}?|" . # -MN:SS[,S+] -MNSS[,S+]
"\\-${mn}${fm}|" . # -MN,M+
"\\-\\-${ss}${fs}?|" . # --SS[,S+]
"${hh}:?${mn}|" . # HH:MN HHMN
"${h24a}|" . # 24:00:00 24:00 24
"${h24b}|" . # 240000 2400
"${h}:${mn}:${ss}${fs}?|" . # H:MN:SS[,S+]
"${h}:${mn}${fm}"; # H:MN,M+
$ctimerx = qr/(?:$ctimerx)(?:\s*$zrx)?/;
my $ttimerx =
"${hh}|" . # HH
"\\-${mn}"; # -MN
$ttimerx = qr/(?:$ttimerx)/;
$$dmb{'data'}{'rx'}{'iso'}{'ctime'} = $ctimerx;
$$dmb{'data'}{'rx'}{'iso'}{'ttime'} = $ttimerx;
} elsif ($rx eq 'date') {
my $cdaterx = $self->_iso8601_rx('cdate');
my $tdaterx = $self->_iso8601_rx('tdate');
$$dmb{'data'}{'rx'}{'iso'}{'date'} = qr/(?:$cdaterx|$tdaterx)/;
} elsif ($rx eq 'time') {
my $ctimerx = $self->_iso8601_rx('ctime');
my $ttimerx = $self->_iso8601_rx('ttime');
$$dmb{'data'}{'rx'}{'iso'}{'time'} = qr/(?:$ctimerx|$ttimerx)/;
} elsif ($rx eq 'fulldate') {
# A parseable string contains:
# a complete date and complete time
# a complete date and truncated time
# a truncated date
# a complete time
# a truncated time
# If the string contains both a time and date, they may be adjacent
# or separated by:
# whitespace
# T (which must be followed by a number)
# a dash
my $cdaterx = $self->_iso8601_rx('cdate');
my $tdaterx = $self->_iso8601_rx('tdate');
my $ctimerx = $self->_iso8601_rx('ctime');
my $ttimerx = $self->_iso8601_rx('ttime');
my $sep = qr/(?:T|\-|\s*)/i;
my $daterx = qr/^\s*(?: $cdaterx(?:$sep(?:$ctimerx|$ttimerx))? |
$tdaterx |
$ctimerx |
$ttimerx
)\s*$/x;
$$dmb{'data'}{'rx'}{'iso'}{'fulldate'} = $daterx;
}
return $$dmb{'data'}{'rx'}{'iso'}{$rx};
}
sub _parse_datetime_iso8601 {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $daterx = $self->_iso8601_rx('fulldate');
my($y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off);
my($doy,$dow,$yod,$c,$w,$fh,$fm,$h24);
if ($string =~ $daterx) {
($y,$m,$d,$h,$mn,$s,$doy,$dow,$yod,$c,$w,$fh,$fm,$h24,
$tzstring,$zone,$abb,$off) =
@+{qw(y m d h mn s doy dow yod c w fh fm h24 tzstring zone abb off)};
if (defined $w || defined $dow) {
($y,$m,$d) = $self->_def_date_dow($y,$w,$dow,$noupdate);
} elsif (defined $doy) {
($y,$m,$d) = $self->_def_date_doy($y,$doy,$noupdate);
} else {
$y = $c . '00' if (defined $c);
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
}
($h,$mn,$s) = $self->_time($h,$mn,$s,$fh,$fm,$h24,undef,$noupdate);
} else {
return (0);
}
return (1,$y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off);
}
sub _parse_date_iso8601 {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $daterx = $self->_iso8601_rx('date');
my($y,$m,$d);
my($doy,$dow,$yod,$c,$w);
if ($string =~ /^$daterx$/) {
($y,$m,$d,$doy,$dow,$yod,$c,$w) =
@+{qw(y m d doy dow yod c w)};
if (defined $w || defined $dow) {
($y,$m,$d) = $self->_def_date_dow($y,$w,$dow,$noupdate);
} elsif (defined $doy) {
($y,$m,$d) = $self->_def_date_doy($y,$doy,$noupdate);
} else {
$y = $c . '00' if (defined $c);
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
}
} else {
return (0);
}
return (1,$y,$m,$d);
}
# Handle all of the time fields.
#
no integer;
sub _time {
my($self,$h,$mn,$s,$fh,$fm,$h24,$ampm,$noupdate) = @_;
if (defined($ampm) && $ampm) {
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if ($$dmb{'data'}{'wordmatch'}{'ampm'}{lc($ampm)} == 2) {
# pm times
$h+=12 unless ($h==12);
} else {
# am times
$h=0 if ($h==12);
}
}
if (defined $h24) {
return(24,0,0);
} elsif (defined $fh && $fh ne "") {
$fh = "0.$fh";
$s = int($fh * 3600);
$mn = int($s/60);
$s -= $mn*60;
} elsif (defined $fm && $fm ne "") {
$fm = "0.$fm";
$s = int($fm*60);
}
($h,$mn,$s) = $self->_def_time($h,$mn,$s,$noupdate);
return($h,$mn,$s);
}
use integer;
# Set up the regular expressions for other date and time formats. Returns the
# requested regexp.
#
sub _other_rx {
my($self,$rx) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$rx = '_' if (! defined $rx);
if ($rx eq 'time') {
my $h24 = '(?<h>2[0-3]|1[0-9]|0?[0-9])'; # 0-23 00-23
my $h12 = '(?<h>1[0-2]|0?[1-9])'; # 1-12 01-12
my $mn = '(?<mn>[0-5][0-9])'; # 00-59
my $ss = '(?<s>[0-5][0-9])'; # 00-59
# how to express fractions
my($f1,$f2,$sepfr);
if (exists $$dmb{'data'}{'rx'}{'sepfr'} &&
$$dmb{'data'}{'rx'}{'sepfr'}) {
$sepfr = $$dmb{'data'}{'rx'}{'sepfr'};
} else {
$sepfr = '';
}
if ($sepfr) {
$f1 = "(?:[.,]|$sepfr)";
$f2 = "(?:[.,:]|$sepfr)";
} else {
$f1 = "[.,]";
$f2 = "[.,:]";
}
my $fh = "(?:$f1(?<fh>\\d*))"; # fractional hours (keep)
my $fm = "(?:$f1(?<fm>\\d*))"; # fractional minutes (keep)
my $fs = "(?:$f2\\d*)"; # fractional seconds
# AM/PM
my($ampm);
if (exists $$dmb{'data'}{'rx'}{'ampm'}) {
$ampm = "(?:\\s*(?<ampm>$$dmb{data}{rx}{ampm}[0]))";
}
# H:MN and MN:S separators
my @hm = ("\Q:\E");
my @ms = ("\Q:\E");
if ($dmb->_config('periodtimesep')) {
push(@hm,"\Q.\E");
push(@ms,"\Q.\E");
}
if (exists $$dmb{'data'}{'rx'}{'sephm'} &&
defined $$dmb{'data'}{'rx'}{'sephm'} &&
exists $$dmb{'data'}{'rx'}{'sepms'} &&
defined $$dmb{'data'}{'rx'}{'sepms'}) {
push(@hm,@{ $$dmb{'data'}{'rx'}{'sephm'} });
push(@ms,@{ $$dmb{'data'}{'rx'}{'sepms'} });
}
# How to express the time
# matches = (H, FH, MN, FMN, S, AM, TZSTRING, ZONE, ABB, OFF, ABB)
my $timerx;
for (my $i=0; $i<=$#hm; $i++) {
my $hm = $hm[$i];
my $ms = $ms[$i];
$timerx .= "${h12}$hm${mn}$ms${ss}${fs}?${ampm}?|" # H12:MN:SS[,S+] [AM]
if ($ampm);
$timerx .= "${h24}$hm${mn}$ms${ss}${fs}?|" . # H24:MN:SS[,S+]
"(?<h>24)$hm(?<mn>00)$ms(?<s>00)|"; # 24:00:00
}
for (my $i=0; $i<=$#hm; $i++) {
my $hm = $hm[$i];
my $ms = $ms[$i];
$timerx .= "${h12}$hm${mn}${fm}${ampm}?|" # H12:MN,M+ [AM]
if ($ampm);
$timerx .= "${h24}$hm${mn}${fm}|"; # H24:MN,M+
}
for (my $i=0; $i<=$#hm; $i++) {
my $hm = $hm[$i];
my $ms = $ms[$i];
$timerx .= "${h12}$hm${mn}${ampm}?|" # H12:MN [AM]
if ($ampm);
$timerx .= "${h24}$hm${mn}|" . # H24:MN
"(?<h>24)$hm(?<mn>00)|"; # 24:00
}
$timerx .= "${h12}${fh}${ampm}|" # H12,H+ AM
if ($ampm);
$timerx .= "${h12}${ampm}|" if ($ampm); # H12 AM
$timerx .= "${h24}${fh}|"; # H24,H+
chop($timerx); # remove trailing pipe
my $zrx = $dmt->_zrx('zrx');
my $at = $$dmb{'data'}{'rx'}{'at'};
my $atrx = qr/(?:^|\s+)(?:$at)\s+/;
$timerx = qr/(?:$atrx|^|\s+)(?:$timerx)(?:\s*$zrx)?(?:\s+|$)/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $timerx;
} elsif ($rx eq 'common_1') {
# These are of the format M/D/Y
# Do NOT replace <m> and <d> with a regular expression to
# match 1-12 since the DateFormat config may reverse the two.
my $y4 = '(?<y>\d\d\d\d)';
my $y2 = '(?<y>\d\d)';
my $m = '(?<m>\d\d?)';
my $d = '(?<d>\d\d?)';
my $sep = '(?<sep>[\s\.\/\-])';
my $daterx =
"${m}${sep}${d}\\k<sep>$y4|" . # M/D/YYYY
"${m}${sep}${d}\\k<sep>$y2|" . # M/D/YY
"${m}${sep}${d}"; # M/D
$daterx = qr/^\s*(?:$daterx)\s*$/;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
} elsif ($rx eq 'common_2') {
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $nam = $$dmb{'data'}{'rx'}{'month_name'}[0];
my $y4 = '(?<y>\d\d\d\d)';
my $y2 = '(?<y>\d\d)';
my $m = '(?<m>\d\d?)';
my $d = '(?<d>\d\d?)';
my $dd = '(?<d>\d\d)';
my $mmm = "(?:(?<mmm>$abb)|(?<month>$nam))";
my $sep = '(?<sep>[\s\.\/\-])';
my $daterx =
"${y4}${sep}${m}\\k<sep>$d|" . # YYYY/M/D
"${mmm}\\s*${dd}\\s*${y4}|" . # mmmDDYYYY
"${mmm}\\s*${dd}\\s*${y2}|" . # mmmDDYY
"${mmm}\\s*${d}|" . # mmmD
"${d}\\s*${mmm}\\s*${y4}|" . # DmmmYYYY
"${d}\\s*${mmm}\\s*${y2}|" . # DmmmYY
"${d}\\s*${mmm}|" . # Dmmm
"${y4}\\s*${mmm}\\s*${d}|" . # YYYYmmmD
"${mmm}${sep}${d}\\k<sep>${y4}|" . # mmm/D/YYYY
"${mmm}${sep}${d}\\k<sep>${y2}|" . # mmm/D/YY
"${mmm}${sep}${d}|" . # mmm/D
"${d}${sep}${mmm}\\k<sep>${y4}|" . # D/mmm/YYYY
"${d}${sep}${mmm}\\k<sep>${y2}|" . # D/mmm/YY
"${d}${sep}${mmm}|" . # D/mmm
"${y4}${sep}${mmm}\\k<sep>${d}|" . # YYYY/mmm/D
"${mmm}${sep}?${d}\\s+${y2}|" . # mmmD YY mmm/D YY
"${mmm}${sep}?${d}\\s+${y4}|" . # mmmD YYYY mmm/D YYYY
"${d}${sep}?${mmm}\\s+${y2}|" . # Dmmm YY D/mmm YY
"${d}${sep}?${mmm}\\s+${y4}|" . # Dmmm YYYY D/mmm YYYY
"${y2}\\s+${mmm}${sep}?${d}|" . # YY mmmD YY mmm/D
"${y4}\\s+${mmm}${sep}?${d}|" . # YYYY mmmD YYYY mmm/D
"${y2}\\s+${d}${sep}?${mmm}|" . # YY Dmmm YY D/mmm
"${y4}\\s+${d}${sep}?${mmm}|" . # YYYY Dmmm YYYY D/mmm
"${y4}:${m}:${d}"; # YYYY:MM:DD
$daterx = qr/^\s*(?:$daterx)\s*$/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
} elsif ($rx eq 'dow') {
my $day_abb = $$dmb{'data'}{'rx'}{'day_abb'}[0];
my $day_name = $$dmb{'data'}{'rx'}{'day_name'}[0];
my $on = $$dmb{'data'}{'rx'}{'on'};
my $onrx = qr/(?:^|\s+)(?:$on)\s+/;
my $dowrx = qr/(?:$onrx|^|\s+)(?<dow>$day_name|$day_abb)($|\s+)/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $dowrx;
} elsif ($rx eq 'ignore') {
my $of = $$dmb{'data'}{'rx'}{'of'};
my $ignrx = qr/(?:^|\s+)(?<of>$of)(\s+|$)/;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $ignrx;
} elsif ($rx eq 'miscdatetime') {
my $special = $$dmb{'data'}{'rx'}{'offset_time'}[0];
$special = "(?<special>$special)";
my $secs = "(?<epoch>[-+]?\\d+)";
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $mmm = "(?<mmm>$abb)";
my $y4 = '(?<y>\d\d\d\d)';
my $dd = '(?<d>\d\d)';
my $h24 = '(?<h>2[0-3]|[01][0-9])'; # 00-23
my $mn = '(?<mn>[0-5][0-9])'; # 00-59
my $ss = '(?<s>[0-5][0-9])'; # 00-59
my $offrx = $dmt->_zrx('offrx');
my $zrx = $dmt->_zrx('zrx');
my $daterx =
"${special}|" . # now
"${special}\\s+${zrx}|" . # now EDT
"epoch\\s+$secs|" . # epoch SECS
"epoch\\s+$secs\\s+${zrx}|" . # epoch SECS EDT
"${dd}\\/${mmm}\\/${y4}:${h24}:${mn}:${ss}\\s*${offrx}";
# Common log format: 10/Oct/2000:13:55:36 -0700
$daterx = qr/^\s*(?:$daterx)\s*$/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
} elsif ($rx eq 'misc') {
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $nam = $$dmb{'data'}{'rx'}{'month_name'}[0];
my $next = $$dmb{'data'}{'rx'}{'nextprev'}[0];
my $last = $$dmb{'data'}{'rx'}{'last'};
my $yf = $$dmb{data}{rx}{fields}[1];
my $mf = $$dmb{data}{rx}{fields}[2];
my $wf = $$dmb{data}{rx}{fields}[3];
my $df = $$dmb{data}{rx}{fields}[4];
my $nth = $$dmb{'data'}{'rx'}{'nth'}[0];
my $nth_wom = $$dmb{'data'}{'rx'}{'nth_wom'}[0];
my $special = $$dmb{'data'}{'rx'}{'offset_date'}[0];
my $y = '(?:(?<y>\d\d\d\d)|(?<y>\d\d))';
my $mmm = "(?:(?<mmm>$abb)|(?<month>$nam))";
$next = "(?<next>$next)";
$last = "(?<last>$last)";
$yf = "(?<field_y>$yf)";
$mf = "(?<field_m>$mf)";
$wf = "(?<field_w>$wf)";
$df = "(?<field_d>$df)";
my $fld = "(?:$yf|$mf|$wf)";
$nth = "(?<nth>$nth)";
$nth_wom = "(?<nth>$nth_wom)";
$special = "(?<special>$special)";
my $daterx =
"${mmm}\\s+${nth}\\s*$y?|" . # Dec 1st [1970]
"${nth}\\s+${mmm}\\s*$y?|" . # 1st Dec [1970]
"$y\\s+${mmm}\\s+${nth}|" . # 1970 Dec 1st
"$y\\s+${nth}\\s+${mmm}|" . # 1970 1st Dec
"${next}\\s+${fld}|" . # next year, next month, next week
"${next}|" . # next friday
"${last}\\s+${mmm}\\s*$y?|" . # last friday in october 95
"${last}\\s+${df}\\s+${mmm}\\s*$y?|" .
# last day in october 95
"${last}\\s*$y?|" . # last friday in 95
"${nth_wom}\\s+${mmm}\\s*$y?|" .
# nth DoW in MMM [YYYY]
"${nth}\\s*$y?|" . # nth DoW in [YYYY]
"${nth}\\s+$df\\s+${mmm}\\s*$y?|" .
# nth day in MMM [YYYY]
"${nth}\\s+${wf}\\s*$y?|" . # DoW Nth week [YYYY]
"${wf}\\s+(?<n>\\d+)\\s*$y?|" . # DoW week N [YYYY]
"${special}|" . # today, tomorrow
"${special}\\s+${wf}|" . # today week
# British: same as 1 week from today
"${nth}|" . # nth
"${wf}"; # monday week
# British: same as 'in 1 week on monday'
$daterx = qr/^\s*(?:$daterx)\s*$/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
}
return $$dmb{'data'}{'rx'}{'other'}{$rx};
}
sub _parse_time {
my($self,$caller,$string,$noupdate,%opts) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($timerx,$h,$mn,$s,$fh,$fm,$h24,$ampm,$tzstring,$zone,$abb,$off);
my $got_time = 0;
# Check for ISO 8601 time
#
# This is only called via. parse_time (parse_date uses a regexp
# that matches a full ISO 8601 date/time instead of parsing them
# separately. Since some ISO 8601 times are a substring of non-ISO
# 8601 times (i.e. 12:30 is a substring of '12:30 PM'), we need to
# match entire strings here.
if ($caller eq 'parse_time') {
$timerx = (exists $$dmb{'data'}{'rx'}{'iso'}{'time'} ?
$$dmb{'data'}{'rx'}{'iso'}{'time'} :
$self->_iso8601_rx('time'));
if (! exists $opts{'noiso8601'}) {
if ($string =~ s/^\s*$timerx\s*$//) {
($h,$fh,$mn,$fm,$s,$ampm,$tzstring,$zone,$abb,$off) =
@+{qw(h fh mn fm s ampm tzstring zone abb off)};
($h,$mn,$s) = $self->_def_time($h,$mn,$s,$noupdate);
$h24 = 1 if ($h == 24 && $mn == 0 && $s == 0);
$string =~ s/\s*$//;
$got_time = 1;
}
}
}
# Make time substitutions (i.e. noon => 12:00:00)
if (! $got_time &&
! exists $opts{'noother'}) {
my @rx = @{ $$dmb{'data'}{'rx'}{'times'} };
shift(@rx);
foreach my $rx (@rx) {
if ($string =~ $rx) {
my $repl = $$dmb{'data'}{'wordmatch'}{'times'}{lc($1)};
$string =~ s/$rx/$repl/g;
}
}
}
# Check to see if there is a time in the string
if (! $got_time) {
$timerx = (exists $$dmb{'data'}{'rx'}{'other'}{'time'} ?
$$dmb{'data'}{'rx'}{'other'}{'time'} :
$self->_other_rx('time'));
if ($string =~ s/$timerx/ /) {
($h,$fh,$mn,$fm,$s,$ampm,$tzstring,$zone,$abb,$off) =
@+{qw(h fh mn fm s ampm tzstring zone abb off)};
($h,$mn,$s) = $self->_def_time($h,$mn,$s,$noupdate);
$h24 = 1 if ($h == 24 && $mn == 0 && $s == 0);
$string =~ s/\s*$//;
$got_time = 1;
}
}
# If we called this from $date->parse()
# returns the string and a list of time components
if ($caller eq 'parse') {
if ($got_time) {
($h,$mn,$s) = $self->_time($h,$mn,$s,$fh,$fm,$h24,$ampm,$noupdate);
return ($got_time,$string,$h,$mn,$s,$tzstring,$zone,$abb,$off);
} else {
return (0);
}
}
# If we called this from $date->parse_time()
if (! $got_time || $string) {
$$self{'err'} = "[$caller] Invalid time string";
return ();
}
($h,$mn,$s) = $self->_time($h,$mn,$s,$fh,$fm,$h24,$ampm,$noupdate);
return ($h,$mn,$s,$tzstring,$zone,$abb,$off);
}
# Parse common dates
sub _parse_date_common {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Since we want whitespace to be used as a separator, turn all
# whitespace into single spaces. This is necessary since the
# regexps do backreferences to make sure that separators are
# not mixed.
$string =~ s/\s+/ /g;
my $daterx = (exists $$dmb{'data'}{'rx'}{'other'}{'common_1'} ?
$$dmb{'data'}{'rx'}{'other'}{'common_1'} :
$self->_other_rx('common_1'));
if ($string =~ $daterx) {
my($y,$m,$d) = @+{qw(y m d)};
if ($dmb->_config('dateformat') ne 'US') {
($m,$d) = ($d,$m);
}
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
return($y,$m,$d);
}
$daterx = (exists $$dmb{'data'}{'rx'}{'other'}{'common_2'} ?
$$dmb{'data'}{'rx'}{'other'}{'common_2'} :
$self->_other_rx('common_2'));
if ($string =~ $daterx) {
my($y,$m,$d,$mmm,$month) = @+{qw(y m d mmm month)};
if ($mmm) {
$m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mmm)};
} elsif ($month) {
$m = $$dmb{'data'}{'wordmatch'}{'month_name'}{lc($month)};
}
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
return($y,$m,$d);
}
return ();
}
sub _parse_tz {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my($tzstring,$zone,$abb,$off);
my $rx = $dmt->_zrx('zrx');
if ($string =~ s/(?:^|\s)$rx(?:$|\s)/ /) {
($tzstring,$zone,$abb,$off) = @+{qw(tzstring zone abb off)};
return($string,$tzstring,$zone,$abb,$off);
}
return($string);
}
sub _parse_dow {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$dow);
# Remove the day of week
my $rx = (exists $$dmb{'data'}{'rx'}{'other'}{'dow'} ?
$$dmb{'data'}{'rx'}{'other'}{'dow'} :
$self->_other_rx('dow'));
if ($string =~ s/$rx/ /) {
$dow = $+{'dow'};
$dow = lc($dow);
$dow = $$dmb{'data'}{'wordmatch'}{'day_abb'}{$dow}
if (exists $$dmb{'data'}{'wordmatch'}{'day_abb'}{$dow});
$dow = $$dmb{'data'}{'wordmatch'}{'day_name'}{$dow}
if (exists $$dmb{'data'}{'wordmatch'}{'day_name'}{$dow});
} else {
return (0);
}
$string =~ s/\s*$//;
$string =~ s/^\s*//;
return (0,$string,$dow) if ($string);
# Handle the simple DoW format
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
my($w,$dow1);
($y,$w) = $dmb->week_of_year([$y,$m,$d]); # week of year
($y,$m,$d) = @{ $dmb->week_of_year($y,$w) }; # first day
$dow1 = $dmb->day_of_week([$y,$m,$d]); # DoW of first day
$dow1 -= 7 if ($dow1 > $dow);
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dow-$dow1) };
return(1,$y,$m,$d);
}
sub _parse_holidays {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d);
if (! exists $$dmb{'data'}{'rx'}{'holidays'}) {
return (0);
}
$string =~ s/\s*$//;
$string =~ s/^\s*//;
my $rx = $$dmb{'data'}{'rx'}{'holidays'};
if ($string =~ $rx) {
my $hol;
($y,$hol) = @+{qw(y holiday)};
$y = $dmt->_now('y',$noupdate) if (! $y);
$y += 0;
$self->_holidays($y,2);
return (0) if (! exists $$dmb{'data'}{'holidays'}{'dates'}{$y});
foreach my $m (keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y} }) {
foreach my $d (keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y}{$m} }) {
foreach my $nam (@{ $$dmb{'data'}{'holidays'}{'dates'}{$y}{$m}{$d} }) {
if (lc($nam) eq lc($hol)) {
return(1,$y,$m,$d);
}
}
}
}
}
return (0);
}
sub _parse_delta {
my($self,$string,$dow,$got_time,$h,$mn,$s,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d);
my $delta = $self->new_delta();
my $err = $delta->parse($string);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
if (! $err) {
my($dy,$dm,$dw,$dd,$dh,$dmn,$ds) = @{ $$delta{'data'}{'delta'} };
if ($got_time &&
($dh != 0 || $dmn != 0 || $ds != 0)) {
$$self{'err'} = '[parse] Two times entered or implied';
return (1);
}
if ($got_time) {
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
} else {
($y,$m,$d,$h,$mn,$s) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
}
my $business = $$delta{'data'}{'business'};
my($date2,$offset,$abbrev);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([$y,$m,$d,$h,$mn,$s],
[$dy,$dm,$dw,$dd,$dh,$dmn,$ds],
0,$business,$tz,$isdst);
($y,$m,$d,$h,$mn,$s) = @$date2;
if ($dow) {
if ($dd != 0 || $dh != 0 || $dmn != 0 || $ds != 0) {
$$self{'err'} = '[parse] Day of week not allowed';
return (1);
}
my($w,$dow1);
($y,$w) = $dmb->week_of_year([$y,$m,$d]); # week of year
($y,$m,$d) = @{ $dmb->week_of_year($y,$w) }; # first day
$dow1 = $dmb->day_of_week([$y,$m,$d]); # DoW of first day
$dow1 -= 7 if ($dow1 > $dow);
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dow-$dow1) };
}
return (1,$y,$m,$d,$h,$mn,$s);
}
return (0);
}
sub _parse_datetime_other {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $rx = (exists $$dmb{'data'}{'rx'}{'other'}{'miscdatetime'} ?
$$dmb{'data'}{'rx'}{'other'}{'miscdatetime'} :
$self->_other_rx('miscdatetime'));
if ($string =~ $rx) {
my ($special,$epoch,$y,$mmm,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off) =
@+{qw(special epoch y mmm d h mn s tzstring zone abb off)};
if ($tzstring) {
}
if (defined($special)) {
my $delta = $$dmb{'data'}{'wordmatch'}{'offset_time'}{lc($special)};
my @delta = @{ $dmb->split('delta',$delta) };
my @date = $dmt->_now('now',$$noupdate);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
$$noupdate = 1;
my($err,$date2,$offset,$abbrev);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([@date],[@delta],0,0,$tz,$isdst);
if ($tzstring) {
my(@args);
push(@args,$zone) if ($zone);
push(@args,$abb) if ($abb);
push(@args,$off) if ($off);
push(@args,$date2);
$zone = $dmt->zone(@args);
return (0) if (! $zone);
my(@tmp) = $dmt->_convert('_parse_datetime_other',$date2,$tz,$zone);
$date2 = $tmp[1];
}
@date = @$date2;
return (1,@date,$tzstring,$zone,$abb,$off);
} elsif (defined($epoch)) {
my $date = [1970,1,1,0,0,0];
my @delta = (0,0,$epoch);
$date = $dmb->calc_date_time($date,\@delta);
my($err);
if ($tzstring) {
my(@args);
push(@args,$zone) if ($zone);
push(@args,$abb) if ($abb);
push(@args,$off) if ($off);
push(@args,$date);
$zone = $dmt->zone(@args);
return (0) if (! $zone);
($err,$date) = $dmt->convert_from_gmt($date,$zone);
} else {
($err,$date) = $dmt->convert_from_gmt($date);
}
return (1,@$date,$tzstring,$zone,$abb,$off);
} elsif (defined($y)) {
my $m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mmm)};
return (1,$y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off);
}
}
return (0);
}
sub _parse_date_other {
my($self,$string,$dow,$of,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s);
my $rx = (exists $$dmb{'data'}{'rx'}{'other'}{'misc'} ?
$$dmb{'data'}{'rx'}{'other'}{'misc'} :
$self->_other_rx('misc'));
my($mmm,$month,$nextprev,$last,$field_y,$field_m,$field_w,$field_d,$nth);
my($special,$got_m,$n,$got_y);
if ($string =~ $rx) {
($y,$mmm,$month,$nextprev,$last,$field_y,$field_m,$field_w,$field_d,$nth,
$special,$n) =
@+{qw(y mmm month next last field_y field_m field_w field_d
nth special n)};
if (defined($y)) {
$y = $dmt->_fix_year($y);
$got_y = 1;
return () if (! $y);
} else {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$got_y = 0;
$$self{'data'}{'def'}[0] = '';
}
if (defined($mmm)) {
$m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mmm)};
$got_m = 1;
} elsif ($month) {
$m = $$dmb{'data'}{'wordmatch'}{'month_name'}{lc($month)};
$got_m = 1;
}
if ($nth) {
$nth = $$dmb{'data'}{'wordmatch'}{'nth'}{lc($nth)};
}
if ($got_m && $nth && ! $dow) {
# Dec 1st 1970
# 1st Dec 1970
# 1970 Dec 1st
# 1970 1st Dec
$d = $nth;
} elsif ($nextprev) {
my $next = 0;
my $sign = -1;
if ($$dmb{'data'}{'wordmatch'}{'nextprev'}{lc($nextprev)} == 1) {
$next = 1;
$sign = 1;
}
if ($field_y || $field_m || $field_w) {
# next/prev year/month/week
my(@delta);
if ($field_y) {
@delta = ($sign*1,0,0,0,0,0,0);
} elsif ($field_m) {
@delta = (0,$sign*1,0,0,0,0,0);
} else {
@delta = (0,0,$sign*1,0,0,0,0);
}
my @now = $dmt->_now('now',$$noupdate);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
$$noupdate = 1;
my($err,$offset,$abbrev,$date2);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([@now],[@delta],0,0,$tz,$isdst);
($y,$m,$d,$h,$mn,$s) = @$date2;
} elsif ($dow) {
# next/prev friday
my @now = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
($y,$m,$d,$h,$mn,$s) = @{ $self->__next_prev(\@now,$next,$dow,0) };
$dow = 0;
} else {
return ();
}
} elsif ($last) {
if ($field_d && $got_m) {
# last day in october 95
$d = $dmb->days_in_month($y,$m);
} elsif ($dow && $got_m) {
# last friday in october 95
$d = $dmb->days_in_month($y,$m);
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,$m,$d,0,0,0],0,$dow,1) };
$dow = 0;
} elsif ($dow) {
# last friday in 95
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,12,31,0,0,0],0,$dow,0) };
} else {
return ();
}
} elsif ($nth && $dow && ! $field_w) {
if ($got_m) {
if ($of) {
# nth DoW of MMM [YYYY]
return () if ($nth > 5);
$d = 1;
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,$m,1,0,0,0],1,$dow,1) };
my $m2 = $m;
($y,$m2,$d) = @{ $dmb->calc_date_days([$y,$m,$d],7*($nth-1)) }
if ($nth > 1);
return () if (! $m2 || $m2 != $m);
} else {
# DoW, nth MMM [YYYY] (i.e. Sunday, 9th Dec 2008)
$d = $nth;
}
} else {
# nth DoW [in YYYY]
($y,$m,$d,$h,$mn,$s) = @{ $self->__next_prev([$y,1,1,0,0,0],1,$dow,1) };
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],7*($nth-1)) }
if ($nth > 1);
}
} elsif ($field_w && $dow) {
if (defined($n) || $nth) {
# sunday week 22 in 1996
# sunday 22nd week in 1996
$n = $nth if ($nth);
return () if (! $n);
($y,$m,$d) = @{ $dmb->week_of_year($y,$n) };
($y,$m,$d) = @{ $self->__next_prev([$y,$m,$d,0,0,0],1,$dow,1) };
} else {
# DoW week
($y,$m,$d) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
my $tmp = $dmb->_config('firstday');
($y,$m,$d) = @{ $self->__next_prev([$y,$m,$d,0,0,0],1,$tmp,0) };
($y,$m,$d) = @{ $self->__next_prev([$y,$m,$d,0,0,0],1,$dow,1) };
}
} elsif ($nth && ! $got_y) {
# 'in one week' makes it here too so return nothing in that case so it
# drops through to the deltas.
return () if ($field_d || $field_w || $field_m || $field_y);
($y,$m,$d) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
$d = $nth;
} elsif ($special) {
my $delta = $$dmb{'data'}{'wordmatch'}{'offset_date'}{lc($special)};
my @delta = @{ $dmb->split('delta',$delta) };
($y,$m,$d) = $dmt->_now('now',$$noupdate);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
$$noupdate = 1;
my($err,$offset,$abbrev,$date2);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([$y,$m,$d,0,0,0],[@delta],0,0,$tz,$isdst);
($y,$m,$d) = @$date2;
if ($field_w) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],7) };
}
}
} else {
return ();
}
return($y,$m,$d,$dow);
}
# Supply defaults for missing values (Y/M/D)
sub _def_date {
my($self,$y,$m,$d,$noupdate) = @_;
$y = '' if (! defined $y);
$m = '' if (! defined $m);
$d = '' if (! defined $d);
my $defined = 0;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If year was not specified, defaults to current year.
#
# We'll also fix the year (turn 2-digit into 4-digit).
if ($y eq '') {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[0] = '';
} else {
$y = $dmt->_fix_year($y);
$defined = 1;
}
# If the month was not specifed, but the year was, a default of
# 01 is supplied (this is a truncated date).
#
# If neither was specified, month defaults to the current month.
if ($m ne '') {
$defined = 1;
} elsif ($defined) {
$m = 1;
$$self{'data'}{'def'}[1] = 1;
} else {
$m = $dmt->_now('m',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[1] = '';
}
# If the day was not specified, but the year or month was, a default
# of 01 is supplied (this is a truncated date).
#
# If none were specified, it default to the current day.
if ($d ne '') {
$defined = 1;
} elsif ($defined) {
$d = 1;
$$self{'data'}{'def'}[2] = 1;
} else {
$d = $dmt->_now('d',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[2] = '';
}
return($y,$m,$d);
}
# Supply defaults for missing values (Y/DoY)
sub _def_date_doy {
my($self,$y,$doy,$noupdate) = @_;
$y = '' if (! defined $y);
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If year was not specified, defaults to current year.
#
# We'll also fix the year (turn 2-digit into 4-digit).
if ($y eq '') {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[0] = '';
} else {
$y = $dmt->_fix_year($y);
}
# DoY must be specified.
my($m,$d);
my $ymd = $dmb->day_of_year($y,$doy);
return @$ymd;
}
# Supply defaults for missing values (YY/Www/D) and (Y/Www/D)
sub _def_date_dow {
my($self,$y,$w,$dow,$noupdate) = @_;
$y = '' if (! defined $y);
$w = '' if (! defined $w);
$dow = '' if (! defined $dow);
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If year was not specified, defaults to current year.
#
# If it was specified and is a single digit, it is the
# year in the current decade.
#
# We'll also fix the year (turn 2-digit into 4-digit).
if ($y ne '') {
if (length($y) == 1) {
my $tmp = $dmt->_now('y',$$noupdate);
$tmp =~ s/.$/$y/;
$y = $tmp;
$$noupdate = 1;
} else {
$y = $dmt->_fix_year($y);
}
} else {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[0] = '';
}
# If week was not specified, it defaults to the current
# week. Get the first day of the week.
my($m,$d);
if ($w ne '') {
($y,$m,$d) = @{ $dmb->week_of_year($y,$w) };
} else {
my($nowy,$nowm,$nowd) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
my $noww;
($nowy,$noww) = $dmb->week_of_year([$nowy,$nowm,$nowd]);
($y,$m,$d) = @{ $dmb->week_of_year($nowy,$noww) };
}
# Handle the DoW
if ($dow eq '') {
$dow = 1;
}
my $n = $dmb->days_in_month($y,$m);
$d += ($dow-1);
if ($d > $n) {
$m++;
if ($m==13) {
$y++;
$m = 1;
}
$d = $d-$n;
}
return($y,$m,$d);
}
# Supply defaults for missing values (HH:MN:SS)
sub _def_time {
my($self,$h,$m,$s,$noupdate) = @_;
$h = '' if (! defined $h);
$m = '' if (! defined $m);
$s = '' if (! defined $s);
my $defined = 0;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If no time was specified, defaults to 00:00:00.
if ($h eq '' &&
$m eq '' &&
$s eq '') {
$$self{'data'}{'def'}[3] = 1;
$$self{'data'}{'def'}[4] = 1;
$$self{'data'}{'def'}[5] = 1;
return(0,0,0);
}
# If hour was not specified, defaults to current hour.
if ($h ne '') {
$defined = 1;
} else {
$h = $dmt->_now('h',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[3] = '';
}
# If the minute was not specifed, but the hour was, a default of
# 00 is supplied (this is a truncated time).
#
# If neither was specified, minute defaults to the current minute.
if ($m ne '') {
$defined = 1;
} elsif ($defined) {
$m = 0;
$$self{'data'}{'def'}[4] = 1;
} else {
$m = $dmt->_now('mn',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[4] = '';
}
# If the second was not specified (either the hour or the minute were),
# a default of 00 is supplied (this is a truncated time).
if ($s eq '') {
$s = 0;
$$self{'data'}{'def'}[5] = 1;
}
return($h,$m,$s);
}
########################################################################
# OTHER DATE METHODS
########################################################################
# Gets the date in the parsed timezone (if $type = ''), local timezone
# (if $type = 'local') or GMT timezone (if $type = 'gmt').
#
# Gets the string value in scalar context, the split value in list
# context.
#
sub value {
my($self,$type) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $date;
while (1) {
if (! $$self{'data'}{'set'}) {
$$self{'err'} = '[value] Object does not contain a date';
last;
}
$type = '' if (! $type);
if ($type eq 'gmt') {
if (! @{ $$self{'data'}{'gmt'} }) {
my $zone = $$self{'data'}{'tz'};
my $date = $$self{'data'}{'date'};
if ($zone eq 'Etc/GMT') {
$$self{'data'}{'gmt'} = $date;
} else {
my $isdst = $$self{'data'}{'isdst'};
my($err,$d) = $dmt->convert_to_gmt($date,$zone,$isdst);
if ($err) {
$$self{'err'} = '[value] Unable to convert date to GMT';
last;
}
$$self{'data'}{'gmt'} = $d;
}
}
$date = $$self{'data'}{'gmt'};
} elsif ($type eq 'local') {
if (! @{ $$self{'data'}{'loc'} }) {
my $zone = $$self{'data'}{'tz'};
$date = $$self{'data'}{'date'};
my $local = $dmt->_now('tz',1);
if ($zone eq $local) {
$$self{'data'}{'loc'} = $date;
} else {
my $isdst = $$self{'data'}{'isdst'};
my($err,$d) = $dmt->convert_to_local($date,$zone,$isdst);
if ($err) {
$$self{'err'} = '[value] Unable to convert date to localtime';
last;
}
$$self{'data'}{'loc'} = $d;
}
}
$date = $$self{'data'}{'loc'};
} else {
$date = $$self{'data'}{'date'};
}
last;
}
if ($$self{'err'}) {
if (wantarray) {
return ();
} else {
return '';
}
}
if (wantarray) {
return @$date;
} else {
return $dmb->join('date',$date);
}
}
sub cmp {
my($self,$date) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [cmp] Arguments must be valid dates: date1\n";
return undef;
}
if (! ref($date) eq 'Date::Manip::Date') {
warn "WARNING: [cmp] Argument must be a Date::Manip::Date object\n";
return undef;
}
if ($$date{'err'} || ! $$date{'data'}{'set'}) {
warn "WARNING: [cmp] Arguments must be valid dates: date2\n";
return undef;
}
my($d1,$d2);
if ($$self{'data'}{'tz'} eq $$date{'data'}{'tz'}) {
$d1 = $self->value();
$d2 = $date->value();
} else {
$d1 = $self->value('gmt');
$d2 = $date->value('gmt');
}
return ($d1 cmp $d2);
}
BEGIN {
my %field = qw(y 0 m 1 d 2 h 3 mn 4 s 5);
sub set {
my($self,$field,@val) = @_;
$field = lc($field);
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Make sure $self includes a valid date (unless the entire date is
# being set, in which case it doesn't matter).
my($date,@def,$tz,$isdst);
if ($field eq 'zdate') {
# If {data}{set} = 2, we want to preserve the defaults. Also, we've
# already initialized.
#
# It is only set in the parse routines which means that this was
# called via _parse_check.
$self->_init() if ($$self{'data'}{'set'} != 2);
@def = @{ $$self{'data'}{'def'} };
} elsif ($field eq 'date') {
if ($$self{'data'}{'set'} && ! $$self{'err'}) {
$tz = $$self{'data'}{'tz'};
} else {
$tz = $dmt->_now('tz',1);
}
$self->_init();
@def = @{ $$self{'data'}{'def'} };
} else {
return 1 if ($$self{'err'} || ! $$self{'data'}{'set'});
$date = $$self{'data'}{'date'};
$tz = $$self{'data'}{'tz'};
$isdst = $$self{'data'}{'isdst'};
@def = @{ $$self{'data'}{'def'} };
$self->_init();
}
# Check the arguments
my($err,$new_tz,$new_date,$new_time);
if ($field eq 'date') {
if ($#val == 0) {
# date,DATE
$new_date = $val[0];
} elsif ($#val == 1) {
# date,DATE,ISDST
($new_date,$isdst) = @val;
} else {
$err = 1;
}
for (my $i=0; $i<=5; $i++) {
$def[$i] = 0 if ($def[$i]);
}
} elsif ($field eq 'time') {
if ($#val == 0) {
# time,TIME
$new_time = $val[0];
} elsif ($#val == 1) {
# time,TIME,ISDST
($new_time,$isdst) = @val;
} else {
$err = 1;
}
$def[3] = 0 if ($def[3]);
$def[4] = 0 if ($def[4]);
$def[5] = 0 if ($def[5]);
} elsif ($field eq 'zdate') {
if ($#val == 0) {
# zdate,DATE
$new_date = $val[0];
} elsif ($#val == 1 && ($val[1] eq '0' || $val[1] eq '1')) {
# zdate,DATE,ISDST
($new_date,$isdst) = @val;
} elsif ($#val == 1) {
# zdate,ZONE,DATE
($new_tz,$new_date) = @val;
} elsif ($#val == 2) {
# zdate,ZONE,DATE,ISDST
($new_tz,$new_date,$isdst) = @val;
} else {
$err = 1;
}
for (my $i=0; $i<=5; $i++) {
$def[$i] = 0 if ($def[$i]);
}
$tz = $dmt->_now('tz',1) if (! $new_tz);
} elsif ($field eq 'zone') {
if ($#val == -1) {
# zone
} elsif ($#val == 0 && ($val[0] eq '0' || $val[0] eq '1')) {
# zone,ISDST
$isdst = $val[0];
} elsif ($#val == 0) {
# zone,ZONE
$new_tz = $val[0];
} elsif ($#val == 1) {
# zone,ZONE,ISDST
($new_tz,$isdst) = @val;
} else {
$err = 1;
}
$tz = $dmt->_now('tz',1) if (! $new_tz);
} elsif (exists $field{$field}) {
my $i = $field{$field};
my $val;
if ($#val == 0) {
$val = $val[0];
} elsif ($#val == 1) {
($val,$isdst) = @val;
} else {
$err = 1;
}
$$date[$i] = $val;
$def[$i] = 0 if ($def[$i]);
} else {
$err = 2;
}
if ($err) {
if ($err == 1) {
$$self{'err'} = '[set] Invalid arguments';
} else {
$$self{'err'} = '[set] Invalid field';
}
return 1;
}
# Handle the arguments
if ($new_tz) {
my $tmp = $dmt->_zone($new_tz);
if ($tmp) {
# A zone/alias
$tz = $tmp;
} else {
# An offset
my ($err,@args);
push(@args,$date) if ($date);
push(@args,$new_tz);
push(@args,($isdst ? 'dstonly' : 'stdonly')) if (defined $isdst);
$tz = $dmb->zone(@args);
if (! $tz) {
$$self{'err'} = "[set] Invalid timezone argument: $new_tz";
return 1;
}
}
}
if ($new_date) {
if ($dmb->check($new_date)) {
$date = $new_date;
} else {
$$self{'err'} = '[set] Invalid date argument';
return 1;
}
}
if ($new_time) {
if ($dmb->check_time($new_time)) {
$$date[3] = $$new_time[0];
$$date[4] = $$new_time[1];
$$date[5] = $$new_time[2];
} else {
$$self{'err'} = '[set] Invalid time argument';
return 1;
}
}
# Check the date/timezone combination
my($abb,$off);
if ($tz eq 'etc/gmt') {
$abb = 'GMT';
$off = [0,0,0];
$isdst = 0;
} else {
my $per = $dmt->date_period($date,$tz,1,$isdst);
if (! $per) {
$$self{'err'} = '[set] Invalid date/timezone';
return 1;
}
$isdst = $$per[5];
$abb = $$per[4];
$off = $$per[3];
}
# Set the information
$$self{'data'}{'set'} = 1;
$$self{'data'}{'date'} = $date;
$$self{'data'}{'tz'} = $tz;
$$self{'data'}{'isdst'} = $isdst;
$$self{'data'}{'offset'}= $off;
$$self{'data'}{'abb'} = $abb;
$$self{'data'}{'def'} = [ @def ];
return 0;
}
}
########################################################################
# NEXT/PREV METHODS
sub prev {
my($self,@args) = @_;
return 1 if ($$self{'err'} || ! $$self{'data'}{'set'});
my $date = $$self{'data'}{'date'};
$date = $self->__next_prev($date,0,@args);
return 1 if (! defined($date));
$self->set('date',$date);
return 0;
}
sub next {
my($self,@args) = @_;
return 1 if ($$self{'err'} || ! $$self{'data'}{'set'});
my $date = $$self{'data'}{'date'};
$date = $self->__next_prev($date,1,@args);
return 1 if (! defined($date));
$self->set('date',$date);
return 0;
}
sub __next_prev {
my($self,$date,$next,$dow,$curr,$time) = @_;
my ($caller,$sign,$prev);
if ($next) {
$caller = 'next';
$sign = 1;
$prev = 0;
} else {
$caller = 'prev';
$sign = -1;
$prev = 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $orig = [ @$date ];
# Check the time (if any)
if (defined($time)) {
if ($dow) {
# $time will refer to a full [H,MN,S]
my($err,$h,$mn,$s) = $dmb->_hms_fields({ 'out' => 'list' },$time);
if ($err) {
$$self{'err'} = "[$caller] invalid time argument";
return undef;
}
$time = [$h,$mn,$s];
} else {
# $time may have leading undefs
my @tmp = @$time;
if ($#tmp != 2) {
$$self{'err'} = "[$caller] invalid time argument";
return undef;
}
my($h,$mn,$s) = @$time;
if (defined($h)) {
$mn = 0 if (! defined($mn));
$s = 0 if (! defined($s));
} elsif (defined($mn)) {
$s = 0 if (! defined($s));
} else {
$s = 0 if (! defined($s));
}
$time = [$h,$mn,$s];
}
}
# Find the next DoW
if ($dow) {
if (! $dmb->_is_int($dow,1,7)) {
$$self{'err'} = "[$caller] Invalid DOW: $dow";
return undef;
}
# Find the next/previous occurrence of DoW
my $curr_dow = $dmb->day_of_week($date);
my $adjust = 0;
if ($dow == $curr_dow) {
$adjust = 1 if ($curr == 0);
} else {
my $num;
if ($next) {
# force $dow to be more than $curr_dow
$dow += 7 if ($dow<$curr_dow);
$num = $dow - $curr_dow;
} else {
# force $dow to be less than $curr_dow
$dow -= 7 if ($dow>$curr_dow);
$num = $curr_dow - $dow;
$num *= -1;
}
# Add/subtract $num days
$date = $dmb->calc_date_days($date,$num);
}
if (defined($time)) {
my ($y,$m,$d,$h,$mn,$s) = @$date;
($h,$mn,$s) = @$time;
$date = [$y,$m,$d,$h,$mn,$s];
}
my $cmp = $dmb->cmp($orig,$date);
$adjust = 1 if ($curr == 2 && $cmp != -1*$sign);
if ($adjust) {
# Add/subtract 1 week
$date = $dmb->calc_date_days($date,$sign*7);
}
return $date;
}
# Find the next Time
if (defined($time)) {
my ($h,$mn,$s) = @$time;
my $orig = [ @$date ];
my $cmp;
if (defined $h) {
# Find next/prev HH:MN:SS
@$date[3..5] = @$time;
$cmp = $dmb->cmp($orig,$date);
if ($cmp == -1) {
if ($prev) {
$date = $dmb->calc_date_days($date,-1);
}
} elsif ($cmp == 1) {
if ($next) {
$date = $dmb->calc_date_days($date,1);
}
} else {
if (! $curr) {
$date = $dmb->calc_date_days($date,$sign);
}
}
} elsif (defined $mn) {
# Find next/prev MN:SS
@$date[4..5] = @$time[1..2];
$cmp = $dmb->cmp($orig,$date);
if ($cmp == -1) {
if ($prev) {
$date = $dmb->calc_date_time($date,[-1,0,0]);
}
} elsif ($cmp == 1) {
if ($next) {
$date = $dmb->calc_date_time($date,[1,0,0]);
}
} else {
if (! $curr) {
$date = $dmb->calc_date_time($date,[$sign,0,0]);
}
}
} else {
# Find next/prev SS
$$date[5] = $$time[2];
$cmp = $dmb->cmp($orig,$date);
if ($cmp == -1) {
if ($prev) {
$date = $dmb->calc_date_time($date,[0,-1,0]);
}
} elsif ($cmp == 1) {
if ($next) {
$date = $dmb->calc_date_time($date,[0,1,0]);
}
} else {
if (! $curr) {
$date = $dmb->calc_date_time($date,[0,$sign,0]);
}
}
}
return $date;
}
$$self{'err'} = "[$caller] Either DoW or time (or both) required";
return undef;
}
########################################################################
# CALC METHOD
sub calc {
my($self,$obj,@args) = @_;
if (ref($obj) eq 'Date::Manip::Date') {
return $self->_calc_date_date($obj,@args);
} elsif (ref($obj) eq 'Date::Manip::Delta') {
return $self->_calc_date_delta($obj,@args);
} else {
return undef;
}
}
sub _calc_date_date {
my($self,$date,@args) = @_;
my $ret = $self->new_delta();
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
$$ret{'err'} = '[calc] First object invalid (date)';
return $ret;
}
if ($$date{'err'} || ! $$date{'data'}{'set'}) {
$$ret{'err'} = '[calc] Second object invalid (date)';
return $ret;
}
# Handle subtract/mode arguments
my($subtract,$mode);
if ($#args == -1) {
($subtract,$mode) = (0,'');
} elsif ($#args == 0) {
if ($args[0] eq '0' || $args[0] eq '1') {
($subtract,$mode) = ($args[0],'');
} else {
($subtract,$mode) = (0,$args[0]);
}
} elsif ($#args == 1) {
($subtract,$mode) = @args;
} else {
$$ret{'err'} = '[calc] Invalid arguments';
return $ret;
}
$mode = 'exact' if (! $mode);
if ($mode !~ /^(business|bsemi|bapprox|approx|semi|exact)$/i) {
$$ret{'err'} = '[calc] Invalid mode argument';
return $ret;
}
# if business mode
# dates must be in the same timezone
# use dates in that zone
#
# otherwise if both dates are in the same timezone && approx/semi mode
# use the dates in that zone
#
# otherwise
# convert to gmt
# use those dates
my($date1,$date2,$tz1,$isdst1,$tz2,$isdst2);
if ($mode eq 'business' || $mode eq 'bapprox' || $mode eq 'bsemi') {
if ($$self{'data'}{'tz'} eq $$date{'data'}{'tz'}) {
$date1 = [ $self->value() ];
$date2 = [ $date->value() ];
$tz1 = $$self{'data'}{'tz'};
$tz2 = $tz1;
$isdst1 = $$self{'data'}{'isdst'};
$isdst2 = $$date{'data'}{'isdst'};
} else {
$$ret{'err'} = '[calc] Dates must be in the same timezone for ' .
'business mode calculations';
return $ret;
}
} elsif (($mode eq 'approx' || $mode eq 'semi') &&
$$self{'data'}{'tz'} eq $$date{'data'}{'tz'}) {
$date1 = [ $self->value() ];
$date2 = [ $date->value() ];
$tz1 = $$self{'data'}{'tz'};
$tz2 = $tz1;
$isdst1 = $$self{'data'}{'isdst'};
$isdst2 = $$date{'data'}{'isdst'};
} else {
$date1 = [ $self->value('gmt') ];
$date2 = [ $date->value('gmt') ];
$tz1 = 'GMT';
$tz2 = $tz1;
$isdst1 = 0;
$isdst2 = 0;
}
# Do the calculation
my(@delta);
if ($subtract) {
if ($mode eq 'business' || $mode eq 'exact' || $subtract == 2) {
@delta = @{ $self->__calc_date_date($mode,$date2,$tz2,$isdst2,
$date1,$tz1,$isdst1) };
} else {
@delta = @{ $self->__calc_date_date($mode,$date1,$tz1,$isdst1,
$date2,$tz2,$isdst2) };
@delta = map { -1*$_ } @delta;
}
} else {
@delta = @{ $self->__calc_date_date($mode,$date1,$tz1,$isdst1,
$date2,$tz2,$isdst2) };
}
# Save the delta
if ($mode eq 'business' || $mode eq 'bapprox' || $mode eq 'bsemi') {
$ret->set('business',\@delta);
} else {
$ret->set('delta',\@delta);
}
return $ret;
}
sub __calc_date_date {
my($self,$mode,$date1,$tz1,$isdst1,$date2,$tz2,$isdst2) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($dy,$dm,$dw,$dd,$dh,$dmn,$ds) = (0,0,0,0,0,0,0);
if ($mode eq 'approx' || $mode eq 'bapprox') {
my($y1,$m1,$d1,$h1,$mn1,$s1) = @$date1;
my($y2,$m2,$d2,$h2,$mn2,$s2) = @$date2;
$dy = $y2-$y1;
$dm = $m2-$m1;
if ($dy || $dm) {
# If $d1 is greater than the number of days allowed in the
# month $y2/$m2, set it equal to the number of days. In other
# words:
# Jan 31 2006 to Feb 28 2008 = 2 years 1 month
#
my $dim = $dmb->days_in_month($y2,$m2);
$d1 = $dim if ($d1 > $dim);
$date1 = [$y2,$m2,$d1,$h1,$mn1,$s1];
}
}
if ($mode eq 'semi' || $mode eq 'approx') {
# Calculate the number of weeks/days apart (temporarily ignoring
# DST effects).
$dd = $dmb->days_since_1BC($date2) -
$dmb->days_since_1BC($date1);
$dw = int($dd/7);
$dd -= $dw*7;
# Adding $dd to $date1 gives: ($y2,$m2,$d2, $h1,$mn1,$s1)
# Make sure this is valid (taking into account DST effects).
# If it isn't, make it valid.
if ($dw || $dd) {
my($y1,$m1,$d1,$h1,$mn1,$s1) = @$date1;
my($y2,$m2,$d2,$h2,$mn2,$s2) = @$date2;
$date1 = [$y2,$m2,$d2,$h1,$mn1,$s1];
}
if ($dy || $dm || $dw || $dd) {
my $force = ( ($dw > 0 || $dd > 0) ? 1 : -1 );
my($off,$isdst,$abb);
($date1,$off,$isdst,$abb) =
$self->_calc_date_check_dst($date1,$tz2,$isdst2,$force);
}
}
if ($mode eq 'bsemi' || $mode eq 'bapprox') {
# Calculate the number of weeks. Ignore the days
# part. Also, since there are no DST effects, we don't
# have to check for validity.
$dd = $dmb->days_since_1BC($date2) -
$dmb->days_since_1BC($date1);
$dw = int($dd/7);
$dd = 0;
$date1 = $dmb->calc_date_days($date1,$dw*7);
}
if ($mode eq 'exact' || $mode eq 'semi' || $mode eq 'approx') {
my $sec1 = $dmb->secs_since_1970($date1);
my $sec2 = $dmb->secs_since_1970($date2);
$ds = $sec2 - $sec1;
{
no integer;
$dh = int($ds/3600);
$ds -= $dh*3600;
}
$dmn = int($ds/60);
$ds -= $dmn*60;
}
if ($mode eq 'business' || $mode eq 'bsemi' || $mode eq 'bapprox') {
# Make sure both are work days
$date1 = $self->__nextprev_business_day(0,0,1,$date1);
$date2 = $self->__nextprev_business_day(0,0,1,$date2);
my($y1,$m1,$d1,$h1,$mn1,$s1) = @$date1;
my($y2,$m2,$d2,$h2,$mn2,$s2) = @$date2;
# Find out which direction we need to move $date1 to get to $date2
my $dir = 0;
if ($y1 < $y2) {
$dir = 1;
} elsif ($y1 > $y2) {
$dir = -1;
} elsif ($m1 < $m2) {
$dir = 1;
} elsif ($m1 > $m2) {
$dir = -1;
} elsif ($d1 < $d2) {
$dir = 1;
} elsif ($d1 > $d2) {
$dir = -1;
}
# Now do the day part (to get to the same day)
$dd = 0;
while ($dir) {
($y1,$m1,$d1) = @{ $dmb->calc_date_days([$y1,$m1,$d1],$dir) };
$dd += $dir if ($self->__is_business_day([$y1,$m1,$d1,0,0,0],0));
$dir = 0 if ($y1 == $y2 && $m1 == $m2 && $d1 == $d2);
}
# Both dates are now on a business day, and during business
# hours, so do the hr/min/sec part trivially
$dh = $h2-$h1;
$dmn = $mn2-$mn1;
$ds = $s2-$s1;
}
return [ $dy,$dm,$dw,$dd,$dh,$dmn,$ds ];
}
sub _calc_date_delta {
my($self,$delta,$subtract) = @_;
my $ret = $self->new_date();
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
$$ret{'err'} = '[calc] Date object invalid';
return $ret;
}
if ($$delta{'err'}) {
$$ret{'err'} = '[calc] Delta object invalid';
return $ret;
}
# Get the date/delta fields
$subtract = 0 if (! $subtract);
my @delta = @{ $$delta{'data'}{'delta'} };
my @date = @{ $$self{'data'}{'date'} };
my $business = $$delta{'data'}{'business'};
my $tz = $$self{'data'}{'tz'};
my $isdst = $$self{'data'}{'isdst'};
my($err,$date2,$offset,$abbrev);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([@date],[@delta],$subtract,$business,$tz,$isdst);
if ($err) {
$$ret{'err'} = '[calc] Unable to perform calculation';
} else {
$$ret{'data'}{'set'} = 1;
$$ret{'data'}{'date'} = $date2;
$$ret{'data'}{'tz'} = $tz;
$$ret{'data'}{'isdst'} = $isdst;
$$ret{'data'}{'offset'}= $offset;
$$ret{'data'}{'abb'} = $abbrev;
}
return $ret;
}
sub __calc_date_delta {
my($self,$date,$delta,$subtract,$business,$tz,$isdst) = @_;
my ($dy,$dm,$dw,$dd,$dh,$dmn,$ds) = @$delta;
my @date = @$date;
my ($err,$date2,$offset,$abbrev);
# In business mode, daylight saving time is ignored, so days are
# of a constant, known length, so they'll be done in the exact
# function. Otherwise, they'll be done in the approximate function.
#
# Also in business mode, if $subtract = 2, then the starting date
# must be a business date or an error occurs.
my($dd_exact,$dd_approx);
if ($business) {
$dd_exact = $dd;
$dd_approx = 0;
if ($subtract == 2 && ! $self->__is_business_day($date,1)) {
return (1);
}
} else {
$dd_exact = 0;
$dd_approx = $dd;
}
if ($subtract == 2 && ($dy || $dm || $dw || $dd_approx)) {
# For subtract=2:
# DATE = RET + DELTA
#
# The delta consisists of an approximate part (which is added first)
# and an exact part (added second):
# DATE = RET + DELTA(approx) + DELTA(exact)
# DATE = RET' + DELTA(exact)
# where RET' = RET + DELTA(approx)
#
# For an exact delta, subtract==2 and subtract==1 are equivalent,
# so this can be written:
# DATE - DELTA(exact) = RET'
#
# So the inverse subtract only needs include the approximate
# portion of the delta.
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_exact([@date],[-1*$dd_exact,-1*$dh,-1*$dmn,-1*$ds],
$business,$tz,$isdst);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_inverse($date2,[$dy,$dm,$dw,$dd_approx],
$business,$tz,$isdst)
if (! $err);
} else {
# We'll add the approximate part, followed by the exact part.
# After the approximate part, we need to make sure we're on
# a valid business day in business mode.
($dy,$dm,$dw,$dd_exact,$dd_approx,$dh,$dmn,$ds) =
map { -1*$_ } ($dy,$dm,$dw,$dd_exact,$dd_approx,$dh,$dmn,$ds)
if ($subtract);
@$date2 = @date;
if ($dy || $dm || $dw || $dd) {
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_approx($date2,[$dy,$dm,$dw,$dd_approx],
$business,$tz,$isdst);
} elsif ($business) {
$date2 = $self->__nextprev_business_day(0,0,1,$date2);
}
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_exact($date2,[$dd_exact,$dh,$dmn,$ds],
$business,$tz,$isdst)
if (! $err && ($dd_exact || $dh || $dmn || $ds));
}
return($err,$date2,$offset,$isdst,$abbrev);
}
# Do the inverse part of a calculation.
#
# $delta = [$dy,$dm,$dw,$dd]
#
sub __calc_date_delta_inverse {
my($self,$date,$delta,$business,$tz,$isdst) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my @date2;
# Given: DATE1, DELTA
# Find: DATE2
# where DATE2 + DELTA = DATE1
#
# Start with:
# DATE2 = DATE1 - DELTA
#
# if (DATE2+DELTA < DATE1)
# while (1)
# DATE2 = DATE2 + 1 day
# if DATE2+DELTA < DATE1
# next
# elsif DATE2+DELTA > DATE1
# return ERROR
# else
# return DATE2
# done
#
# elsif (DATE2+DELTA > DATE1)
# while (1)
# DATE2 = DATE2 - 1 day
# if DATE2+DELTA > DATE1
# next
# elsif DATE2+DELTA < DATE1
# return ERROR
# else
# return DATE2
# done
#
# else
# return DATE2
if ($business) {
my $date1 = $date;
my ($err,$date2,$off,$isd,$abb,@del,$tmp,$cmp);
@del = map { $_*-1 } @$delta;
($err,$date2,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date,[@del],$business,$tz,$isdst);
($err,$tmp,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date2,$delta,$business,$tz,$isdst);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
while (1) {
$date2 = $self->__nextprev_business_day(0,1,0,$date2);
($err,$tmp,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date2,$delta,$business,$tz,$isdst);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
next;
} elsif ($cmp > 0) {
return (1);
} else {
last;
}
}
} elsif ($cmp > 0) {
while (1) {
$date2 = $self->__nextprev_business_day(1,1,0,$date2);
($err,$tmp,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date2,$delta,$business,$tz,$isdst);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp > 0) {
next;
} elsif ($cmp < 0) {
return (1);
} else {
last;
}
}
}
@date2 = @$date2;
} else {
my @tmp = @$date[0..2]; # [y,m,d]
my @hms = @$date[3..5]; # [h,m,s]
my $date1 = [@tmp];
my $date2 = $dmb->_calc_date_ymwd($date1,$delta,1);
my $tmp = $dmb->_calc_date_ymwd($date2,$delta);
my $cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
while (1) {
$date2 = $dmb->calc_date_days($date2,1);
$tmp = $dmb->_calc_date_ymwd($date2,$delta);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
next;
} elsif ($cmp > 0) {
return (1);
} else {
last;
}
}
} elsif ($cmp > 0) {
while (1) {
$date2 = $dmb->calc_date_days($date2,-1);
$tmp = $dmb->_calc_date_ymwd($date2,$delta);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp > 0) {
next;
} elsif ($cmp < 0) {
return (1);
} else {
last;
}
}
}
@date2 = (@$date2,@hms);
}
# Make sure DATE2 is valid (within DST constraints) and
# return it.
my($date2,$abb,$off,$err);
($date2,$off,$isdst,$abb) = $self->_calc_date_check_dst([@date2],$tz,$isdst,0);
return (1) if (! defined($date2));
return (0,$date2,$off,$isdst,$abb);
}
sub _cmp_date {
my($self,$date0,$date1) = @_;
return ($$date0[0] <=> $$date1[0] ||
$$date0[1] <=> $$date1[1] ||
$$date0[2] <=> $$date1[2]);
}
# Do the approximate part of a calculation.
#
sub __calc_date_delta_approx {
my($self,$date,$delta,$business,$tz,$isdst) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s) = @$date;
my($dy,$dm,$dw,$dd) = @$delta;
#
# Do the year/month part.
#
# If we are past the last day of a month, move the date back to
# the last day of the month. i.e. Jan 31 + 1 month = Feb 28.
#
$y += $dy if ($dy);
$dmb->_mod_add(-12,$dm,\$m,\$y) # -12 means 1-12 instead of 0-11
if ($dm);
my $dim = $dmb->days_in_month($y,$m);
$d = $dim if ($d > $dim);
#
# Do the week part.
#
# The week is treated as 7 days for both business and non-business
# calculations.
#
# In a business calculation, make sure we're on a business date.
#
if ($business) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dw*7) } if ($dw);
($y,$m,$d,$h,$mn,$s) =
@{ $self->__nextprev_business_day(0,0,1,[$y,$m,$d,$h,$mn,$s]) };
} else {
$dd += $dw*7;
}
#
# Now do the day part. $dd is always 0 in business calculations.
#
if ($dd) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dd) };
}
#
# At this point, we need to make sure that we're a valid date
# (within the constraints of DST).
#
# If it is not valid in this offset, try the other one. If neither
# works, then we want the the date to be 24 hours later than the
# previous day at this time (if $dd > 0) or 24 hours earlier than
# the next day at this time (if $dd < 0). We'll use the 24 hour
# definition even for business days, but then we'll double check
# that the resulting date is a business date.
#
my $force = ( ($dd > 0 || $dw > 0 || $dm > 0 || $dy > 0) ? 1 : -1 );
my($off,$abb);
($date,$off,$isdst,$abb) =
$self->_calc_date_check_dst([$y,$m,$d,$h,$mn,$s],$tz,$isdst,$force);
return (0,$date,$off,$isdst,$abb);
}
# Do the exact part of a calculation.
#
sub __calc_date_delta_exact {
my($self,$date,$delta,$business,$tz,$isdst) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if ($business) {
# Simplify hours/minutes/seconds where the day length is defined
# by the start/end of the business day.
my ($dd,$dh,$dmn,$ds) = @$delta;
my ($y,$m,$d,$h,$mn,$s)= @$date;
my ($hbeg,$mbeg,$sbeg) = @{ $$dmb{'data'}{'calc'}{'workdaybeg'} };
my ($hend,$mend,$send) = @{ $$dmb{'data'}{'calc'}{'workdayend'} };
my $bdlen = $$dmb{'data'}{'len'}{'bdlength'};
no integer;
my $tmp;
$ds += $dh*3600 + $dmn*60;
$tmp = int($ds/$bdlen);
$dd += $tmp;
$ds -= $tmp*$bdlen;
$dh = int($ds/3600);
$ds -= $dh*3600;
$dmn = int($ds/60);
$ds -= $dmn*60;
use integer;
if ($dd) {
my $prev = 0;
if ($dd < 1) {
$prev = 1;
$dd *= -1;
}
($y,$m,$d,$h,$mn,$s) =
@{ $self->__nextprev_business_day($prev,$dd,0,[$y,$m,$d,$h,$mn,$s]) };
}
# At this point, we're adding less than a day for the
# hours/minutes/seconds part AND we know that the current
# day is during business hours.
#
# We'll add them (without affecting days... we'll need to
# test things by hand to make sure we should or shouldn't
# do that.
$dmb->_mod_add(60,$ds,\$s,\$mn);
$dmb->_mod_add(60,$dmn,\$mn,\$h);
$h += $dh;
# Note: it's possible that $h > 23 at this point or $h < 0
if ($h > $hend ||
($h == $hend && $mn > $mend) ||
($h == $hend && $mn == $mend && $s > $send) ||
($h == $hend && $mn == $mend && $s == $send)) {
# We've gone past the end of the business day.
my $t2 = $dmb->calc_time_time([$h,$mn,$s],[$hend,$mend,$send],1);
while (1) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
last if ($self->__is_business_day([$y,$m,$d,$h,$mn,$s]));
}
($h,$mn,$s) = @{ $dmb->calc_time_time([$hbeg,$mbeg,$sbeg],$t2) };
} elsif ($h < $hbeg ||
($h == $hbeg && $mn < $mbeg) ||
($h == $hbeg && $mn == $mbeg && $s < $sbeg)) {
# We've gone back past the start of the business day.
my $t2 = $dmb->calc_time_time([$hbeg,$mbeg,$sbeg],[$h,$mn,$s],1);
while (1) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],-1) };
last if ($self->__is_business_day([$y,$m,$d,$h,$mn,$s]));
}
($h,$mn,$s) = @{ $dmb->calc_time_time([$hend,$mend,$send],$t2,1) };
}
# Now make sure that the date is valid within DST constraints.
my $force = ( ($dd > 0 || $dh > 0 || $dmn > 0 || $ds > 0) ? 1 : -1 );
my($off,$abb);
($date,$off,$isdst,$abb) =
$self->_calc_date_check_dst([$y,$m,$d,$h,$mn,$s],$tz,$isdst,$force);
return (0,$date,$off,$isdst,$abb);
} else {
# Convert to GTM
# Do the calculation
# Convert back
my ($dd,$dh,$dm,$ds) = @$delta; # $dd is always 0
my $del = [$dh,$dm,$ds];
my ($err,$offset,$abbrev);
($err,$date,$offset,$isdst,$abbrev) =
$dmt->_convert('__calc_date_delta_exact',$date,$tz,'GMT',$isdst);
$date = $dmb->calc_date_time($date,$del,0);
($err,$date,$offset,$isdst,$abbrev) =
$dmt->_convert('__calc_date_delta_exact',$date,'GMT',$tz,$isdst);
return($err,$date,$offset,$isdst,$abbrev);
}
}
# This checks to see which time (STD or DST) a date is in. It checks
# $isdst first, and the other value (1-$isdst) second.
#
# If the date is found in either time, it is returned.
#
# If the date is NOT found, then we got here by adding/subtracting 1 day
# from a different value, and we've obtained an invalid value. In this
# case, if $force = 0, then return nothing.
#
# If $force = 1, then go to the previous day and add 24 hours. If force
# is -1, then go to the next day and subtract 24 hours.
#
# Returns:
# ($date,$off,$isdst,$abb)
# or
# (undef)
#
sub _calc_date_check_dst {
my($self,$date,$tz,$isdst,$force) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($abb,$off,$err);
# Try the date as is in both ISDST and 1-ISDST times
my $per = $dmt->date_period($date,$tz,1,$isdst);
if ($per) {
$abb = $$per[4];
$off = $$per[3];
return($date,$off,$isdst,$abb);
}
$per = $dmt->date_period($date,$tz,1,1-$isdst);
if ($per) {
$isdst = 1-$isdst;
$abb = $$per[4];
$off = $$per[3];
return($date,$off,$isdst,$abb);
}
# If we made it here, the date is invalid in this timezone.
# Either return undef, or add/subtract a day from the date
# and find out what time period we're in (all we care about
# is the ISDST value).
if (! $force) {
return(undef);
}
my($dd);
if ($force > 0) {
$date = $dmb->calc_date_days($date,-1);
$dd = 1;
} else {
$date = $dmb->calc_date_days($date,+1);
$dd = -1;
}
$per = $dmt->date_period($date,$tz,1,$isdst);
$isdst = (1-$isdst) if (! $per);
# Now, convert it to GMT, add/subtract 24 hours, and convert
# it back.
($err,$date,$off,$isdst,$abb) = $dmt->convert_to_gmt($date,$tz,$isdst);
$date = $dmb->calc_date_days($date,$dd);
($err,$date,$off,$isdst,$abb) = $dmt->convert_from_gmt($date,$tz);
return($date,$off,$isdst,$abb);
}
########################################################################
# MISC METHODS
sub secs_since_1970_GMT {
my($self,$secs) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if (defined $secs) {
my $date = $dmb->secs_since_1970($secs);
my $err;
($err,$date) = $dmt->convert_from_gmt($date);
return 1 if ($err);
$self->set('date',$date);
return 0;
}
my @date = $self->value('gmt');
$secs = $dmb->secs_since_1970(\@date);
return $secs;
}
sub week_of_year {
my($self,$first) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [week_of_year] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $date = $$self{'data'}{'date'};
my $y = $$date[0];
my($day,$dow,$doy,$f);
$doy = $dmb->day_of_year($date);
# The date in January which must belong to the first week, and
# it's DayOfWeek.
if ($dmb->_config('jan1week1')) {
$day=1;
} else {
$day=4;
}
$dow = $dmb->day_of_week([$y,1,$day]);
# The start DayOfWeek. If $first is passed in, use it. Otherwise,
# use FirstDay.
if (! $first) {
$first = $dmb->_config('firstday');
}
# Find the pseudo-date of the first day of the first week (it may
# be negative meaning it occurs last year).
$first -= 7 if ($first > $dow);
$day -= ($dow-$first);
return 0 if ($day>$doy); # Day is in last week of previous year
return (($doy-$day)/7 + 1);
}
sub complete {
my($self,$field) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [complete] Object must contain a valid date\n";
return undef;
}
if (! $field) {
return 1 if (! $$self{'data'}{'def'}[1] &&
! $$self{'data'}{'def'}[2] &&
! $$self{'data'}{'def'}[3] &&
! $$self{'data'}{'def'}[4] &&
! $$self{'data'}{'def'}[5]);
return 0;
}
if ($field eq 'm') {
return 1 if (! $$self{'data'}{'def'}[1]);
}
if ($field eq 'd') {
return 1 if (! $$self{'data'}{'def'}[2]);
}
if ($field eq 'h') {
return 1 if (! $$self{'data'}{'def'}[3]);
}
if ($field eq 'mn') {
return 1 if (! $$self{'data'}{'def'}[4]);
}
if ($field eq 's') {
return 1 if (! $$self{'data'}{'def'}[5]);
}
return 0;
}
sub convert {
my($self,$zone) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [convert] Object must contain a valid date\n";
return 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $zonename = $dmt->_zone($zone);
if (! $zonename) {
$$self{'err'} = "[convert] Unable to determine timezone: $zone";
return 1;
}
my $date0 = $$self{'data'}{'date'};
my $zone0 = $$self{'data'}{'tz'};
my $isdst0 = $$self{'data'}{'isdst'};
my($err,$date,$off,$isdst,$abb) = $dmt->convert($date0,$zone0,$zonename,$isdst0);
if ($err) {
$$self{'err'} = '[convert] Unable to convert date to new timezone';
return 1;
}
$self->_init();
$$self{'data'}{'date'} = $date;
$$self{'data'}{'tz'} = $zonename;
$$self{'data'}{'isdst'} = $isdst;
$$self{'data'}{'offset'} = $off;
$$self{'data'}{'abb'} = $abb;
$$self{'data'}{'set'} = 1;
return 0;
}
########################################################################
# BUSINESS DAY METHODS
sub is_business_day {
my($self,$checktime) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [is_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
return $self->__is_business_day($date,$checktime);
}
sub __is_business_day {
my($self,$date,$checktime) = @_;
my($y,$m,$d,$h,$mn,$s) = @$date;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Return 0 if it's a weekend.
my $dow = $dmb->day_of_week([$y,$m,$d]);
return 0 if ($dow < $dmb->_config('workweekbeg') ||
$dow > $dmb->_config('workweekend'));
# Return 0 if it's not during work hours (and we're checking
# for that).
if ($checktime &&
! $dmb->_config('workday24hr')) {
my $t = $dmb->join('hms',[$h,$mn,$s]);
my $t0 = $dmb->join('hms',$$dmb{'data'}{'calc'}{'workdaybeg'});
my $t1 = $dmb->join('hms',$$dmb{'data'}{'calc'}{'workdayend'});
return 0 if ($t lt $t0 || $t gt $t1);
}
# Check for holidays
$self->_holidays($y,2) unless ($$dmb{'data'}{'init_holidays'});
return 0 if (exists $$dmb{'data'}{'holidays'}{'dates'} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0});
return 1;
}
sub list_holidays {
my($self,$y) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$y = $dmt->_now('y',1) if (! $y);
$self->_holidays($y,2);
my @ret;
my @m = sort { $a <=> $b } keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0} };
foreach my $m (@m) {
my @d = sort { $a <=> $b } keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m} };
foreach my $d (@d) {
my $hol = $self->new_date();
$hol->set('date',[$y,$m,$d,0,0,0]);
push(@ret,$hol);
}
}
return @ret;
}
sub holiday {
my($self) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [holiday] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d) = @{ $$self{'data'}{'date'} };
$self->_holidays($y,2);
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
my @tmp = @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} };
if (wantarray) {
return () if (! @tmp);
return @tmp;
} else {
return '' if (! @tmp);
return $tmp[0];
}
}
return undef;
}
sub next_business_day {
my($self,$off,$checktime) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [next_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
$date = $self->__nextprev_business_day(0,$off,$checktime,$date);
$self->set('date',$date);
}
sub prev_business_day {
my($self,$off,$checktime) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [prev_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
$date = $self->__nextprev_business_day(1,$off,$checktime,$date);
$self->set('date',$date);
}
sub __nextprev_business_day {
my($self,$prev,$off,$checktime,$date) = @_;
my($y,$m,$d,$h,$mn,$s) = @$date;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Get day 0
while (! $self->__is_business_day([$y,$m,$d,$h,$mn,$s],$checktime)) {
if ($checktime) {
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,$m,$d,$h,$mn,$s],1,undef,0,
$$dmb{'data'}{'calc'}{'workdaybeg'}) };
} else {
# Move forward 1 day
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
}
}
# Move $off days into the future/past
while ($off > 0) {
while (1) {
if ($prev) {
# Move backward 1 day
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],-1) };
} else {
# Move forward 1 day
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
}
last if ($self->__is_business_day([$y,$m,$d,$h,$mn,$s]));
}
$off--;
}
return [$y,$m,$d,$h,$mn,$s];
}
sub nearest_business_day {
my($self,$tomorrow) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [nearest_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
$date = $self->__nearest_business_day($tomorrow,$date);
# If @date is empty, the date is a business day and doesn't need
# to be changed.
return if (! defined($date));
$self->set('date',$date);
}
sub __nearest_business_day {
my($self,$tomorrow,$date) = @_;
# We're done if this is a business day
return undef if ($self->__is_business_day($date,0));
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$tomorrow = $dmb->_config('tomorrowfirst') if (! defined $tomorrow);
my($a1,$a2);
if ($tomorrow) {
($a1,$a2) = (1,-1);
} else {
($a1,$a2) = (-1,1);
}
my ($y,$m,$d,$h,$mn,$s) = @$date;
my ($y1,$m1,$d1) = ($y,$m,$d);
my ($y2,$m2,$d2) = ($y,$m,$d);
while (1) {
($y1,$m1,$d1) = @{ $dmb->calc_date_days([$y1,$m1,$d1],$a1) };
if ($self->__is_business_day([$y1,$m1,$d1,$h,$mn,$s],0)) {
($y,$m,$d) = ($y1,$m1,$d1);
last;
}
($y2,$m2,$d2) = @{ $dmb->calc_date_days([$y2,$m2,$d2],$a2) };
if ($self->__is_business_day([$y2,$m2,$d2,$h,$mn,$s],0)) {
($y,$m,$d) = ($y2,$m2,$d2);
last;
}
}
return [$y,$m,$d,$h,$mn,$s];
}
# We need to create all the objects which will be used to determine holidays.
# By doing this once only, a lot of time is saved.
#
sub _holiday_objs {
my($self) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$$dmb{'data'}{'holidays'}{'init'} = 1;
# Go through all of the strings from the config file.
#
my (@str) = @{ $$dmb{'data'}{'sections'}{'holidays'} };
$$dmb{'data'}{'holidays'}{'hols'} = [];
while (@str) {
my($string) = shift(@str);
my($name) = shift(@str);
# If $string is a parse_date string AND it contains a year, we'll
# store the date as a holiday, but not store the holiday description
# so it never needs to be re-parsed.
my $date = $self->new_date();
my $err = $date->parse_date($string);
if (! $err) {
if ($$date{'data'}{'def'}[0] eq '') {
push(@{ $$dmb{'data'}{'holidays'}{'hols'} },$string,$name);
} else {
my($y,$m,$d) = @{ $$date{'data'}{'date'} };
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
push @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} },$name;
} else {
$$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} = [ $name ];
}
}
next;
}
$date->err(1);
# If $string is a recurrence, we'll create a Recur object (which we
# only have to do once) and store it.
my $recur = $self->new_recur();
$recur->_holiday();
$err = $recur->parse($string);
if (! $err) {
push(@{ $$dmb{'data'}{'holidays'}{'hols'} },$recur,$name);
next;
}
$recur->err(1);
warn "WARNING: invalid holiday description: $string\n";
}
}
# Make sure that holidays are set for a given year.
#
# $$dmb{'data'}{'holidays'}{'years'}{$year} = 0 nothing done
# 1 this year done
# 2 both adjacent years done
#
sub _holidays {
my($self,$year,$level) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$self->_holiday_objs($year) if (! $$dmb{'data'}{'holidays'}{'init'});
$$dmb{'data'}{'holidays'}{'years'}{$year} = 0
if (! exists $$dmb{'data'}{'holidays'}{'years'}{$year});
my $curr_level = $$dmb{'data'}{'holidays'}{'years'}{$year};
return if ($curr_level >= $level);
$$dmb{'data'}{'holidays'}{'years'}{$year} = $level;
# Parse the year
if ($curr_level == 0) {
$self->_holidays_year($year);
return if ($level == 1);
}
# Parse the years around it.
$self->_holidays($year-1,1);
$self->_holidays($year+1,1);
}
sub _holidays_year {
my($self,$y) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Get the objects and set them to use the new year. Also, get the
# range for recurrences.
my @hol = @{ $$dmb{'data'}{'holidays'}{'hols'} };
my $beg = $self->new_date();
$beg->set('date',[$y-1,12,1,0,0,0]);
my $end = $self->new_date();
$end->set('date',[$y+1,2,1,0,0,0]);
# Get the date for each holiday.
$$dmb{'data'}{'init_holidays'} = 1;
while (@hol) {
my($obj) = shift(@hol);
my($name) = shift(@hol);
$$dmb{'data'}{'tmpnow'} = [$y,1,1,0,0,0];
if (ref($obj)) {
# It's a recurrence
# If the recurrence has a date range built in, we won't override it.
# Otherwise, we'll only look for dates in this year.
if ($obj->start() && $obj->end()) {
$obj->dates();
} else {
$obj->dates($beg,$end);
}
foreach my $i (keys %{ $$obj{'data'}{'dates'} }) {
next if ($$obj{'data'}{'saved'}{$i});
my $date = $$obj{'data'}{'dates'}{$i};
my($y,$m,$d) = @{ $$date{'data'}{'date'} };
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
push @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} },$name;
} else {
$$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} = [$name];
}
$$obj{'data'}{'saved'}{$i} = 1;
}
} else {
my $date = $self->new_date();
$date->parse_date($obj);
my($y,$m,$d) = @{ $$date{'data'}{'date'} };
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
push @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} },$name;
} else {
$$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} = [$name];
}
}
$$dmb{'data'}{'tmpnow'} = [];
}
$$dmb{'data'}{'init_holidays'} = 0;
}
########################################################################
# PRINTF METHOD
BEGIN {
my %pad_0 = map { $_,1 } qw ( Y m d H M S I j G W L U );
my %pad_sp = map { $_,1 } qw ( y f e k i );
my %hr = map { $_,1 } qw ( H k I i );
my %dow = map { $_,1 } qw ( v a A w );
my %num = map { $_,1 } qw ( Y m d H M S y f e k I i j G W L U );
sub printf {
my($self,@in) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [printf] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s) = @{ $$self{'data'}{'date'} };
my(@out);
foreach my $in (@in) {
my $out = '';
while ($in) {
last if ($in eq '%');
# Everything up to the first '%'
if ($in =~ s/^([^%]+)//) {
$out .= $1;
next;
}
# Extended formats: %<...>
if ($in =~ s/^%<([^>]+)>//) {
my $f = $1;
my $val;
if ($f =~ /^a=([1-7])$/) {
$val = $$dmb{'data'}{'wordlist'}{'day_abb'}[$1-1];
} elsif ($f =~ /^v=([1-7])$/) {
$val = $$dmb{'data'}{'wordlist'}{'day_char'}[$1-1];
} elsif ($f =~ /^A=([1-7])$/) {
$val = $$dmb{'data'}{'wordlist'}{'day_name'}[$1-1];
} elsif ($f =~ /^p=([1-2])$/) {
$val = $$dmb{'data'}{'wordlist'}{'ampm'}[$1-1];
} elsif ($f =~ /^b=(0?[1-9]|1[0-2])$/) {
$val = $$dmb{'data'}{'wordlist'}{'month_abb'}[$1-1];
} elsif ($f =~ /^B=(0?[1-9]|1[0-2])$/) {
$val = $$dmb{'data'}{'wordlist'}{'month_name'}[$1-1];
} elsif ($f =~ /^E=(0?[1-9]|[1-4][0-9]|5[0-3])$/) {
$val = $$dmb{'data'}{'wordlist'}{'nth'}[$1-1];
} else {
$val = '%<' . $1 . '>';
}
$out .= $val;
next;
}
# Normals one-character formats
$in =~ s/^%(.)//s;
my $f = $1;
if (exists $$self{'data'}{'f'}{$f}) {
$out .= $$self{'data'}{'f'}{$f};
next;
}
my ($val,$pad,$len,$dow);
if (exists $pad_0{$f}) {
$pad = '0';
}
if (exists $pad_sp{$f}) {
$pad = ' ';
}
if ($f eq 'G' || $f eq 'W') {
my($yy,$ww) = $dmb->_week_of_year(1,[$y,$m,$d]);
if ($f eq 'G') {
$val = $yy;
$len = 4;
} else {
$val = $ww;
$len = 2;
}
}
if ($f eq 'L' || $f eq 'U') {
my($yy,$ww) = $dmb->_week_of_year(7,[$y,$m,$d]);
if ($f eq 'L') {
$val = $yy;
$len = 4;
} else {
$val = $ww;
$len = 2;
}
}
if ($f eq 'Y' || $f eq 'y') {
$val = $y;
$len = 4;
}
if ($f eq 'm' || $f eq 'f') {
$val = $m;
$len = 2;
}
if ($f eq 'd' || $f eq 'e') {
$val = $d;
$len = 2;
}
if ($f eq 'j') {
$val = $dmb->day_of_year([$y,$m,$d]);
$len = 3;
}
if (exists $hr{$f}) {
$val = $h;
if ($f eq 'I' || $f eq 'i') {
$val -= 12 if ($val > 12);
$val = 12 if ($val == 0);
}
$len = 2;
}
if ($f eq 'M') {
$val = $mn;
$len = 2;
}
if ($f eq 'S') {
$val = $s;
$len = 2;
}
if (exists $dow{$f}) {
$dow = $dmb->day_of_week([$y,$m,$d]);
}
###
if (exists $num{$f}) {
while (length($val) < $len) {
$val = "$pad$val";
}
$val = substr($val,2,2) if ($f eq 'y');
} elsif ($f eq 'b' || $f eq 'h') {
$val = $$dmb{'data'}{'wordlist'}{'month_abb'}[$m-1];
} elsif ($f eq 'B') {
$val = $$dmb{'data'}{'wordlist'}{'month_name'}[$m-1];
} elsif ($f eq 'v') {
$val = $$dmb{'data'}{'wordlist'}{'day_char'}[$dow-1];
} elsif ($f eq 'a') {
$val = $$dmb{'data'}{'wordlist'}{'day_abb'}[$dow-1];
} elsif ($f eq 'A') {
$val = $$dmb{'data'}{'wordlist'}{'day_name'}[$dow-1];
} elsif ($f eq 'w') {
$val = $dow;
} elsif ($f eq 'p') {
my $i = ($h >= 12 ? 1 : 0);
$val = $$dmb{'data'}{'wordlist'}{'ampm'}[$i];
} elsif ($f eq 'Z') {
$val = $$self{'data'}{'abb'};
} elsif ($f eq 'N') {
my $off = $$self{'data'}{'offset'};
$val = $dmb->join('offset',$off);
} elsif ($f eq 'z') {
my $off = $$self{'data'}{'offset'};
$val = $dmb->join('offset',$off);
$val =~ s/://g;
$val =~ s/00$//;
} elsif ($f eq 'E') {
$val = $$dmb{'data'}{'wordlist'}{'nth_dom'}[$d-1];
} elsif ($f eq 's') {
$val = $self->secs_since_1970_GMT();
} elsif ($f eq 'o') {
my $date2 = $self->new_date();
$date2->parse('1970-01-01 00:00:00');
my $delta = $date2->calc($self);
$val = $delta->printf('%sys');
} elsif ($f eq 'l') {
my $d0 = $self->new_date();
my $d1 = $self->new_date();
$d0->parse('-0:6:0:0:0:0:0'); # 6 months ago
$d1->parse('+0:6:0:0:0:0:0'); # in 6 months
$d0 = $d0->value();
$d1 = $d1->value();
my $date = $self->value();
if ($date lt $d0 || $date ge $d1) {
$in = '%b %e %Y' . $in;
} else {
$in = '%b %e %H:%M' . $in;
}
$val = '';
} elsif ($f eq 'c') {
$in = '%a %b %e %H:%M:%S %Y' . $in;
$val = '';
} elsif ($f eq 'C' || $f eq 'u') {
$in = '%a %b %e %H:%M:%S %Z %Y' . $in;
$val = '';
} elsif ($f eq 'g') {
$in = '%a, %d %b %Y %H:%M:%S %Z' . $in;
$val = '';
} elsif ($f eq 'D') {
$in = '%m/%d/%y' . $in;
$val = '';
} elsif ($f eq 'r') {
$in = '%I:%M:%S %p' . $in;
$val = '';
} elsif ($f eq 'R') {
$in = '%H:%M' . $in;
$val = '';
} elsif ($f eq 'T' || $f eq 'X') {
$in = '%H:%M:%S' . $in;
$val = '';
} elsif ($f eq 'V') {
$in = '%m%d%H%M%y' . $in;
$val = '';
} elsif ($f eq 'Q') {
$in = '%Y%m%d' . $in;
$val = '';
} elsif ($f eq 'q') {
$in = '%Y%m%d%H%M%S' . $in;
$val = '';
} elsif ($f eq 'P') {
$in = '%Y%m%d%H:%M:%S' . $in;
$val = '';
} elsif ($f eq 'O') {
$in = '%Y-%m-%dT%H:%M:%S' . $in;
$val = '';
} elsif ($f eq 'F') {
$in = '%A, %B %e, %Y' . $in;
$val = '';
} elsif ($f eq 'K') {
$in = '%Y-%j' . $in;
$val = '';
} elsif ($f eq 'x') {
if ($dmb->_config('dateformat') eq 'US') {
$in = '%m/%d/%y' . $in;
} else {
$in = '%d/%m/%y' . $in;
}
$val = '';
} elsif ($f eq 'J') {
$in = '%G-W%W-%w' . $in;
$val = '';
} elsif ($f eq 'n') {
$val = "\n";
} elsif ($f eq 't') {
$val = "\t";
} else {
$val = $f;
}
if ($val ne '') {
$$self{'data'}{'f'}{$f} = $val;
$out .= $val;
}
}
push(@out,$out);
}
if (wantarray) {
return @out;
} elsif (@out == 1) {
return $out[0];
}
return ''
}
}
########################################################################
# EVENT METHODS
sub list_events {
my($self,@args) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [list_events] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Arguments
my($date,$day,$format);
if (@args && $args[$#args] eq 'dates') {
pop(@args);
$format = 'dates';
} else {
$format = 'std';
}
if (@args && $#args==0 && ref($args[0]) eq 'Date::Manip::Date') {
$date = $args[0];
} elsif (@args && $#args==0 && $args[0]==0) {
$day = 1;
} elsif (@args) {
warn "ERROR: [list_events] unknown argument list\n";
return [];
}
# Get the beginning/end dates we're looking for events in
my($beg,$end);
if ($date) {
$beg = $self;
$end = $date;
} elsif ($day) {
$beg = $self->new_date();
$end = $self->new_date();
my($y,$m,$d) = $self->value();
$beg->set('date',[$y,$m,$d,0,0,0]);
$end->set('date',[$y,$m,$d,23,59,59]);
} else {
$beg = $self;
$end = $self;
}
if ($beg->cmp($end) == 1) {
my $tmp = $beg;
$beg = $end;
$end = $tmp;
}
# We need to get a list of all events which may apply.
my($y0) = $beg->value();
my($y1) = $end->value();
foreach my $y ($y0..$y1) {
$self->_events_year($y);
}
my @events = ();
foreach my $i (keys %{ $$dmb{'data'}{'events'} }) {
my $event = $$dmb{'data'}{'events'}{$i};
my $type = $$event{'type'};
my $name = $$event{'name'};
if ($type eq 'specified') {
my $d0 = $$dmb{'data'}{'events'}{$i}{'beg'};
my $d1 = $$dmb{'data'}{'events'}{$i}{'end'};
push @events,[$d0,$d1,$name];
} elsif ($type eq 'ym' || $type eq 'date') {
foreach my $y ($y0..$y1) {
if (exists $$dmb{'data'}{'events'}{$i}{$y}) {
my($d0,$d1) = @{ $$dmb{'data'}{'events'}{$i}{$y} };
push @events,[$d0,$d1,$name];
}
}
} elsif ($type eq 'recur') {
my $rec = $$dmb{'data'}{'events'}{$i}{'recur'};
my $del = $$dmb{'data'}{'events'}{$i}{'delta'};
my @d = $rec->dates($beg,$end);
foreach my $d0 (@d) {
my $d1 = $d0->calc($del);
push @events,[$d0,$d1,$name];
}
}
}
# Next we need to see which ones apply.
my @tmp;
foreach my $e (@events) {
my($d0,$d1,$name) = @$e;
push(@tmp,$e) if ($beg->cmp($d1) != 1 &&
$end->cmp($d0) != -1);
}
# Now format them...
if ($format eq 'std') {
@events = sort { $$a[0]->cmp($$b[0]) ||
$$a[1]->cmp($$b[1]) ||
$$a[2] cmp $$b[2] } @tmp;
} elsif ($format eq 'dates') {
my $p1s = $self->new_delta();
$p1s->parse('+0:0:0:0:0:0:1');
@events = ();
my (@tmp2);
foreach my $e (@tmp) {
my $name = $$e[2];
if ($$e[0]->cmp($beg) == -1) {
# Event begins before the start
push(@tmp2,[$beg,'+',$name]);
} else {
push(@tmp2,[$$e[0],'+',$name]);
}
my $d1 = $$e[1]->calc($p1s);
if ($d1->cmp($end) == -1) {
# Event ends before the end
push(@tmp2,[$d1,'-',$name]);
}
}
return () if (! @tmp2);
@tmp2 = sort { $$a[0]->cmp($$b[0]) ||
$$a[1] cmp $$b[1] ||
$$a[2] cmp $$b[2] } @tmp2;
# @tmp2 is now:
# ( [ DATE1, OP1, NAME1 ], [ DATE2, OP2, NAME2 ], ... )
# which is sorted by date.
my $d = $tmp2[0]->[0];
if ($beg->cmp($d) != 0) {
push(@events,[$beg]);
}
my %e;
while (1) {
# If the first element is the same date as we're
# currently working with, just perform the operation
# and remove it from the list. If the list is not empty,
# we'll proceed to the next element.
my $d0 = $tmp2[0]->[0];
if ($d->cmp($d0) == 0) {
my $e = shift(@tmp2);
my $op = $$e[1];
my $n = $$e[2];
if ($op eq '+') {
$e{$n} = 1;
} else {
delete $e{$n};
}
next if (@tmp2);
}
# We need to store the existing %e.
my @n = sort keys %e;
push(@events,[$d,@n]);
# If the list is empty, we're done. Otherwise, we need to
# reset the date and continue.
last if (! @tmp2);
$d = $tmp2[0]->[0];
}
}
return @events;
}
# The events of type date and ym are determined on a year-by-year basis
#
sub _events_year {
my($self,$y) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $tz = $dmt->_now('tz',1);
return if (exists $$dmb{'data'}{'eventyears'}{$y});
$self->_event_objs() if (! $$dmb{'data'}{'eventobjs'});
my $d = $self->new_date();
$d->config('forcedate',"${y}-01-01-00:00:00,$tz");
my $hrM1 = $d->new_delta();
$hrM1->set('delta',[0,0,0,0,0,59,59]);
my $dayM1 = $d->new_delta();
$dayM1->set('delta',[0,0,0,0,23,59,59]);
foreach my $i (keys %{ $$dmb{'data'}{'events'} }) {
my $event = $$dmb{'data'}{'events'}{$i};
my $type = $$event{'type'};
if ($type eq 'ym') {
my $beg = $$event{'beg'};
my $end = $$event{'end'};
my $d0 = $d->new_date();
$d0->parse_date($beg);
$d0->set('time',[0,0,0]);
my $d1;
if ($end) {
$d1 = $d0->new_date();
$d1->parse_date($end);
$d1->set('time',[23,59,59]);
} else {
$d1 = $d0->calc($dayM1);
}
$$dmb{'data'}{'events'}{$i}{$y} = [ $d0,$d1 ];
} elsif ($type eq 'date') {
my $beg = $$event{'beg'};
my $end = $$event{'end'};
my $del = $$event{'delta'};
my $d0 = $d->new_date();
$d0->parse($beg);
my $d1;
if ($end) {
$d1 = $d0->new_date();
$d1->parse($end);
} elsif ($del) {
$d1 = $d0->calc($del);
} else {
$d1 = $d0->calc($hrM1);
}
$$dmb{'data'}{'events'}{$i}{$y} = [ $d0,$d1 ];
}
}
}
# This parses the raw event list. It only has to be done once.
#
sub _event_objs {
my($self) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Only parse once.
$$dmb{'data'}{'eventobjs'} = 1;
my $hrM1 = $self->new_delta();
$hrM1->set('delta',[0,0,0,0,0,59,59]);
my $M1 = $self->new_delta();
$M1->set('delta',[0,0,0,0,0,0,-1]);
my @tmp = @{ $$dmb{'data'}{'sections'}{'events'} };
my $i = 0;
while (@tmp) {
my $string = shift(@tmp);
my $name = shift(@tmp);
my @event = split(/\s*;\s*/,$string);
if ($#event == 0) {
# YMD/YM
my $d1 = $self->new_date();
my $err = $d1->parse_date($event[0]);
if (! $err) {
if ($$d1{'data'}{'def'}[0] eq '') {
# YM
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'ym',
'name' => $name,
'beg' => $event[0] };
} else {
# YMD
my $d2 = $d1->new_date();
my ($y,$m,$d) = $d1->value();
$d1->set('time',[0,0,0]);
$d2->set('date',[$y,$m,$d,23,59,59]);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2 };
}
next;
}
# Date
$err = $d1->parse($event[0]);
if (! $err) {
if ($$d1{'data'}{'def'}[0] eq '') {
# Date (no year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'date',
'name' => $name,
'beg' => $event[0],
'delta' => $hrM1
};
} else {
# Date (year)
my $d2 = $d1->calc($hrM1);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
next;
}
# Recur
my $r = $self->new_recur();
$err = $r->parse($event[0]);
if ($err) {
warn "ERROR: invalid event definition (must be Date, YMD, YM, or Recur)\n"
. " $string\n";
next;
}
my @d = $r->dates();
if (@d) {
foreach my $d (@d) {
my $d2 = $d->calc($hrM1);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
} else {
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'recur',
'name' => $name,
'recur' => $r,
'delta' => $hrM1
};
}
} elsif ($#event == 1) {
my($o1,$o2) = @event;
# YMD;YMD
# YM;YM
my $d1 = $self->new_date();
my $err = $d1->parse_date($o1);
if (! $err) {
my $d2 = $self->new_date();
$err = $d2->parse_date($o2);
if ($err) {
warn "ERROR: invalid event definition (must be YMD;YMD or YM;YM)\n"
. " $string\n";
next;
} elsif ($$d1{'data'}{'def'}[0] ne $$d2{'data'}{'def'}[0]) {
warn "ERROR: invalid event definition (YMD;YM or YM;YMD not allowed)\n"
. " $string\n";
next;
}
if ($$d1{'data'}{'def'}[0] eq '') {
# YM;YM
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'ym',
'name' => $name,
'beg' => $o1,
'end' => $o2
};
} else {
# YMD;YMD
$d1->set('time',[0,0,0]);
$d2->set('time',[23,59,59]);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2 };
}
next;
}
# Date;Date
# Date;Delta
$err = $d1->parse($o1);
if (! $err) {
my $d2 = $self->new_date();
$err = $d2->parse($o2,'nodelta');
if (! $err) {
# Date;Date
if ($$d1{'data'}{'def'}[0] ne $$d2{'data'}{'def'}[0]) {
warn "ERROR: invalid event definition (year must be absent or\n"
. " included in both dats in Date;Date)\n"
. " $string\n";
next;
}
if ($$d1{'data'}{'def'}[0] eq '') {
# Date (no year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'date',
'name' => $name,
'beg' => $o1,
'end' => $o2
};
} else {
# Date (year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
next;
}
# Date;Delta
my $del = $self->new_delta();
$err = $del->parse($o2);
if ($err) {
warn "ERROR: invalid event definition (must be Date;Date or\n"
. " Date;Delta) $string\n";
next;
}
$del = $del->calc($M1);
if ($$d1{'data'}{'def'}[0] eq '') {
# Date (no year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'date',
'name' => $name,
'beg' => $o1,
'delta' => $del
};
} else {
# Date (year)
$d2 = $d1->calc($del);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
next;
}
# Recur;Delta
my $r = $self->new_recur();
$err = $r->parse($o1);
my $del = $self->new_delta();
if (! $err) {
$err = $del->parse($o2);
}
if ($err) {
warn "ERROR: invalid event definition (must be Date;Date, YMD;YMD, "
. " YM;YM, Date;Delta, or Recur;Delta)\n"
. " $string\n";
next;
}
$del = $del->calc($M1);
my @d = $r->dates();
if (@d) {
foreach my $d1 (@d) {
my $d2 = $d1->calc($del);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
} else {
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'recur',
'name' => $name,
'recur' => $r,
'delta' => $del
};
}
} else {
warn "ERROR: invalid event definition\n"
. " $string\n";
next;
}
}
}
1;
# Local Variables:
# mode: cperl
# indent-tabs-mode: nil
# cperl-indent-level: 3
# cperl-continued-statement-offset: 2
# cperl-continued-brace-offset: 0
# cperl-brace-offset: 0
# cperl-brace-imaginary-offset: 0
# cperl-label-offset: 0
# End:
| nriley/Pester | Source/Manip/Date.pm | Perl | bsd-2-clause | 143,848 |
#!/usr/bin/env perl
# Copyright 2017-2020 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
#
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# Keccak-1600 for AVX-512F.
#
# July 2017.
#
# Below code is KECCAK_1X_ALT implementation (see sha/keccak1600.c).
# Pretty straightforward, the only "magic" is data layout in registers.
# It's impossible to have one that is optimal for every step, hence
# it's changing as algorithm progresses. Data is saved in linear order,
# but in-register order morphs between rounds. Even rounds take in
# linear layout, and odd rounds - transposed, or "verticaly-shaped"...
#
########################################################################
# Numbers are cycles per processed byte out of large message.
#
# r=1088(*)
#
# Knights Landing 7.6
# Skylake-X 5.7
#
# (*) Corresponds to SHA3-256.
########################################################################
# Below code is combination of two ideas. One is taken from Keccak Code
# Package, hereafter KCP, and another one from initial version of this
# module. What is common is observation that Pi's input and output are
# "mostly transposed", i.e. if input is aligned by x coordinate, then
# output is [mostly] aligned by y. Both versions, KCP and predecessor,
# were trying to use one of them from round to round, which resulted in
# some kind of transposition in each round. This version still does
# transpose data, but only every second round. Another essential factor
# is that KCP transposition has to be performed with instructions that
# turned to be rather expensive on Knights Landing, both latency- and
# throughput-wise. Not to mention that some of them have to depend on
# each other. On the other hand initial version of this module was
# relying heavily on blend instructions. There were lots of them,
# resulting in higher instruction count, yet it performed better on
# Knights Landing, because processor can execute pair of them each
# cycle and they have minimal latency. This module is an attempt to
# bring best parts together:-)
#
# Coordinates below correspond to those in sha/keccak1600.c. Input
# layout is straight linear:
#
# [0][4] [0][3] [0][2] [0][1] [0][0]
# [1][4] [1][3] [1][2] [1][1] [1][0]
# [2][4] [2][3] [2][2] [2][1] [2][0]
# [3][4] [3][3] [3][2] [3][1] [3][0]
# [4][4] [4][3] [4][2] [4][1] [4][0]
#
# It's perfect for Theta, while Pi is reduced to intra-register
# permutations which yield layout perfect for Chi:
#
# [4][0] [3][0] [2][0] [1][0] [0][0]
# [4][1] [3][1] [2][1] [1][1] [0][1]
# [4][2] [3][2] [2][2] [1][2] [0][2]
# [4][3] [3][3] [2][3] [1][3] [0][3]
# [4][4] [3][4] [2][4] [1][4] [0][4]
#
# Now instead of performing full transposition and feeding it to next
# identical round, we perform kind of diagonal transposition to layout
# from initial version of this module, and make it suitable for Theta:
#
# [4][4] [3][3] [2][2] [1][1] [0][0]>4.3.2.1.0>[4][4] [3][3] [2][2] [1][1] [0][0]
# [4][0] [3][4] [2][3] [1][2] [0][1]>3.2.1.0.4>[3][4] [2][3] [1][2] [0][1] [4][0]
# [4][1] [3][0] [2][4] [1][3] [0][2]>2.1.0.4.3>[2][4] [1][3] [0][2] [4][1] [3][0]
# [4][2] [3][1] [2][0] [1][4] [0][3]>1.0.4.3.2>[1][4] [0][3] [4][2] [3][1] [2][0]
# [4][3] [3][2] [2][1] [1][0] [0][4]>0.4.3.2.1>[0][4] [4][3] [3][2] [2][1] [1][0]
#
# Now intra-register permutations yield initial [almost] straight
# linear layout:
#
# [4][4] [3][3] [2][2] [1][1] [0][0]
##[0][4] [0][3] [0][2] [0][1] [0][0]
# [3][4] [2][3] [1][2] [0][1] [4][0]
##[2][3] [2][2] [2][1] [2][0] [2][4]
# [2][4] [1][3] [0][2] [4][1] [3][0]
##[4][2] [4][1] [4][0] [4][4] [4][3]
# [1][4] [0][3] [4][2] [3][1] [2][0]
##[1][1] [1][0] [1][4] [1][3] [1][2]
# [0][4] [4][3] [3][2] [2][1] [1][0]
##[3][0] [3][4] [3][3] [3][2] [3][1]
#
# This means that odd round Chi is performed in less suitable layout,
# with a number of additional permutations. But overall it turned to be
# a win. Permutations are fastest possible on Knights Landing and they
# are laid down to be independent of each other. In the essence I traded
# 20 blend instructions for 3 permutations. The result is 13% faster
# than KCP on Skylake-X, and >40% on Knights Landing.
#
# As implied, data is loaded in straight linear order. Digits in
# variables' names represent coordinates of right-most element of
# loaded data chunk:
my ($A00, # [0][4] [0][3] [0][2] [0][1] [0][0]
$A10, # [1][4] [1][3] [1][2] [1][1] [1][0]
$A20, # [2][4] [2][3] [2][2] [2][1] [2][0]
$A30, # [3][4] [3][3] [3][2] [3][1] [3][0]
$A40) = # [4][4] [4][3] [4][2] [4][1] [4][0]
map("%zmm$_",(0..4));
# We also need to map the magic order into offsets within structure:
my @A_jagged = ([0,0], [0,1], [0,2], [0,3], [0,4],
[1,0], [1,1], [1,2], [1,3], [1,4],
[2,0], [2,1], [2,2], [2,3], [2,4],
[3,0], [3,1], [3,2], [3,3], [3,4],
[4,0], [4,1], [4,2], [4,3], [4,4]);
@A_jagged = map(8*($$_[0]*8+$$_[1]), @A_jagged); # ... and now linear
my @T = map("%zmm$_",(5..12));
my @Theta = map("%zmm$_",(33,13..16)); # invalid @Theta[0] is not typo
my @Pi0 = map("%zmm$_",(17..21));
my @Rhotate0 = map("%zmm$_",(22..26));
my @Rhotate1 = map("%zmm$_",(27..31));
my ($C00,$D00) = @T[0..1];
my ($k00001,$k00010,$k00100,$k01000,$k10000,$k11111) = map("%k$_",(1..6));
$code.=<<___;
.text
.type __KeccakF1600,\@function
.align 32
__KeccakF1600:
lea iotas(%rip),%r10
mov \$12,%eax
jmp .Loop_avx512
.align 32
.Loop_avx512:
######################################### Theta, even round
vmovdqa64 $A00,@T[0] # put aside original A00
vpternlogq \$0x96,$A20,$A10,$A00 # and use it as "C00"
vpternlogq \$0x96,$A40,$A30,$A00
vprolq \$1,$A00,$D00
vpermq $A00,@Theta[1],$A00
vpermq $D00,@Theta[4],$D00
vpternlogq \$0x96,$A00,$D00,@T[0] # T[0] is original A00
vpternlogq \$0x96,$A00,$D00,$A10
vpternlogq \$0x96,$A00,$D00,$A20
vpternlogq \$0x96,$A00,$D00,$A30
vpternlogq \$0x96,$A00,$D00,$A40
######################################### Rho
vprolvq @Rhotate0[0],@T[0],$A00 # T[0] is original A00
vprolvq @Rhotate0[1],$A10,$A10
vprolvq @Rhotate0[2],$A20,$A20
vprolvq @Rhotate0[3],$A30,$A30
vprolvq @Rhotate0[4],$A40,$A40
######################################### Pi
vpermq $A00,@Pi0[0],$A00
vpermq $A10,@Pi0[1],$A10
vpermq $A20,@Pi0[2],$A20
vpermq $A30,@Pi0[3],$A30
vpermq $A40,@Pi0[4],$A40
######################################### Chi
vmovdqa64 $A00,@T[0]
vmovdqa64 $A10,@T[1]
vpternlogq \$0xD2,$A20,$A10,$A00
vpternlogq \$0xD2,$A30,$A20,$A10
vpternlogq \$0xD2,$A40,$A30,$A20
vpternlogq \$0xD2,@T[0],$A40,$A30
vpternlogq \$0xD2,@T[1],@T[0],$A40
######################################### Iota
vpxorq (%r10),$A00,${A00}{$k00001}
lea 16(%r10),%r10
######################################### Harmonize rounds
vpblendmq $A20,$A10,@{T[1]}{$k00010}
vpblendmq $A30,$A20,@{T[2]}{$k00010}
vpblendmq $A40,$A30,@{T[3]}{$k00010}
vpblendmq $A10,$A00,@{T[0]}{$k00010}
vpblendmq $A00,$A40,@{T[4]}{$k00010}
vpblendmq $A30,@T[1],@{T[1]}{$k00100}
vpblendmq $A40,@T[2],@{T[2]}{$k00100}
vpblendmq $A20,@T[0],@{T[0]}{$k00100}
vpblendmq $A00,@T[3],@{T[3]}{$k00100}
vpblendmq $A10,@T[4],@{T[4]}{$k00100}
vpblendmq $A40,@T[1],@{T[1]}{$k01000}
vpblendmq $A30,@T[0],@{T[0]}{$k01000}
vpblendmq $A00,@T[2],@{T[2]}{$k01000}
vpblendmq $A10,@T[3],@{T[3]}{$k01000}
vpblendmq $A20,@T[4],@{T[4]}{$k01000}
vpblendmq $A40,@T[0],@{T[0]}{$k10000}
vpblendmq $A00,@T[1],@{T[1]}{$k10000}
vpblendmq $A10,@T[2],@{T[2]}{$k10000}
vpblendmq $A20,@T[3],@{T[3]}{$k10000}
vpblendmq $A30,@T[4],@{T[4]}{$k10000}
#vpermq @T[0],@Theta[0],$A00 # doesn't actually change order
vpermq @T[1],@Theta[1],$A10
vpermq @T[2],@Theta[2],$A20
vpermq @T[3],@Theta[3],$A30
vpermq @T[4],@Theta[4],$A40
######################################### Theta, odd round
vmovdqa64 $T[0],$A00 # real A00
vpternlogq \$0x96,$A20,$A10,$C00 # C00 is @T[0]'s alias
vpternlogq \$0x96,$A40,$A30,$C00
vprolq \$1,$C00,$D00
vpermq $C00,@Theta[1],$C00
vpermq $D00,@Theta[4],$D00
vpternlogq \$0x96,$C00,$D00,$A00
vpternlogq \$0x96,$C00,$D00,$A30
vpternlogq \$0x96,$C00,$D00,$A10
vpternlogq \$0x96,$C00,$D00,$A40
vpternlogq \$0x96,$C00,$D00,$A20
######################################### Rho
vprolvq @Rhotate1[0],$A00,$A00
vprolvq @Rhotate1[3],$A30,@T[1]
vprolvq @Rhotate1[1],$A10,@T[2]
vprolvq @Rhotate1[4],$A40,@T[3]
vprolvq @Rhotate1[2],$A20,@T[4]
vpermq $A00,@Theta[4],@T[5]
vpermq $A00,@Theta[3],@T[6]
######################################### Iota
vpxorq -8(%r10),$A00,${A00}{$k00001}
######################################### Pi
vpermq @T[1],@Theta[2],$A10
vpermq @T[2],@Theta[4],$A20
vpermq @T[3],@Theta[1],$A30
vpermq @T[4],@Theta[3],$A40
######################################### Chi
vpternlogq \$0xD2,@T[6],@T[5],$A00
vpermq @T[1],@Theta[1],@T[7]
#vpermq @T[1],@Theta[0],@T[1]
vpternlogq \$0xD2,@T[1],@T[7],$A10
vpermq @T[2],@Theta[3],@T[0]
vpermq @T[2],@Theta[2],@T[2]
vpternlogq \$0xD2,@T[2],@T[0],$A20
#vpermq @T[3],@Theta[0],@T[3]
vpermq @T[3],@Theta[4],@T[1]
vpternlogq \$0xD2,@T[1],@T[3],$A30
vpermq @T[4],@Theta[2],@T[0]
vpermq @T[4],@Theta[1],@T[4]
vpternlogq \$0xD2,@T[4],@T[0],$A40
dec %eax
jnz .Loop_avx512
ret
.size __KeccakF1600,.-__KeccakF1600
___
my ($A_flat,$inp,$len,$bsz) = ("%rdi","%rsi","%rdx","%rcx");
my $out = $inp; # in squeeze
$code.=<<___;
.globl SHA3_absorb
.type SHA3_absorb,\@function
.align 32
SHA3_absorb:
mov %rsp,%r11
lea -320(%rsp),%rsp
and \$-64,%rsp
lea 96($A_flat),$A_flat
lea 96($inp),$inp
lea 128(%rsp),%r9
lea theta_perm(%rip),%r8
kxnorw $k11111,$k11111,$k11111
kshiftrw \$15,$k11111,$k00001
kshiftrw \$11,$k11111,$k11111
kshiftlw \$1,$k00001,$k00010
kshiftlw \$2,$k00001,$k00100
kshiftlw \$3,$k00001,$k01000
kshiftlw \$4,$k00001,$k10000
#vmovdqa64 64*0(%r8),@Theta[0]
vmovdqa64 64*1(%r8),@Theta[1]
vmovdqa64 64*2(%r8),@Theta[2]
vmovdqa64 64*3(%r8),@Theta[3]
vmovdqa64 64*4(%r8),@Theta[4]
vmovdqa64 64*5(%r8),@Rhotate1[0]
vmovdqa64 64*6(%r8),@Rhotate1[1]
vmovdqa64 64*7(%r8),@Rhotate1[2]
vmovdqa64 64*8(%r8),@Rhotate1[3]
vmovdqa64 64*9(%r8),@Rhotate1[4]
vmovdqa64 64*10(%r8),@Rhotate0[0]
vmovdqa64 64*11(%r8),@Rhotate0[1]
vmovdqa64 64*12(%r8),@Rhotate0[2]
vmovdqa64 64*13(%r8),@Rhotate0[3]
vmovdqa64 64*14(%r8),@Rhotate0[4]
vmovdqa64 64*15(%r8),@Pi0[0]
vmovdqa64 64*16(%r8),@Pi0[1]
vmovdqa64 64*17(%r8),@Pi0[2]
vmovdqa64 64*18(%r8),@Pi0[3]
vmovdqa64 64*19(%r8),@Pi0[4]
vmovdqu64 40*0-96($A_flat),${A00}{$k11111}{z}
vpxorq @T[0],@T[0],@T[0]
vmovdqu64 40*1-96($A_flat),${A10}{$k11111}{z}
vmovdqu64 40*2-96($A_flat),${A20}{$k11111}{z}
vmovdqu64 40*3-96($A_flat),${A30}{$k11111}{z}
vmovdqu64 40*4-96($A_flat),${A40}{$k11111}{z}
vmovdqa64 @T[0],0*64-128(%r9) # zero transfer area on stack
vmovdqa64 @T[0],1*64-128(%r9)
vmovdqa64 @T[0],2*64-128(%r9)
vmovdqa64 @T[0],3*64-128(%r9)
vmovdqa64 @T[0],4*64-128(%r9)
jmp .Loop_absorb_avx512
.align 32
.Loop_absorb_avx512:
mov $bsz,%rax
sub $bsz,$len
jc .Ldone_absorb_avx512
shr \$3,%eax
___
for(my $i=0; $i<25; $i++) {
$code.=<<___
mov 8*$i-96($inp),%r8
mov %r8,$A_jagged[$i]-128(%r9)
dec %eax
jz .Labsorved_avx512
___
}
$code.=<<___;
.Labsorved_avx512:
lea ($inp,$bsz),$inp
vpxorq 64*0-128(%r9),$A00,$A00
vpxorq 64*1-128(%r9),$A10,$A10
vpxorq 64*2-128(%r9),$A20,$A20
vpxorq 64*3-128(%r9),$A30,$A30
vpxorq 64*4-128(%r9),$A40,$A40
call __KeccakF1600
jmp .Loop_absorb_avx512
.align 32
.Ldone_absorb_avx512:
vmovdqu64 $A00,40*0-96($A_flat){$k11111}
vmovdqu64 $A10,40*1-96($A_flat){$k11111}
vmovdqu64 $A20,40*2-96($A_flat){$k11111}
vmovdqu64 $A30,40*3-96($A_flat){$k11111}
vmovdqu64 $A40,40*4-96($A_flat){$k11111}
vzeroupper
lea (%r11),%rsp
lea ($len,$bsz),%rax # return value
ret
.size SHA3_absorb,.-SHA3_absorb
.globl SHA3_squeeze
.type SHA3_squeeze,\@function
.align 32
SHA3_squeeze:
mov %rsp,%r11
lea 96($A_flat),$A_flat
cmp $bsz,$len
jbe .Lno_output_extension_avx512
lea theta_perm(%rip),%r8
kxnorw $k11111,$k11111,$k11111
kshiftrw \$15,$k11111,$k00001
kshiftrw \$11,$k11111,$k11111
kshiftlw \$1,$k00001,$k00010
kshiftlw \$2,$k00001,$k00100
kshiftlw \$3,$k00001,$k01000
kshiftlw \$4,$k00001,$k10000
#vmovdqa64 64*0(%r8),@Theta[0]
vmovdqa64 64*1(%r8),@Theta[1]
vmovdqa64 64*2(%r8),@Theta[2]
vmovdqa64 64*3(%r8),@Theta[3]
vmovdqa64 64*4(%r8),@Theta[4]
vmovdqa64 64*5(%r8),@Rhotate1[0]
vmovdqa64 64*6(%r8),@Rhotate1[1]
vmovdqa64 64*7(%r8),@Rhotate1[2]
vmovdqa64 64*8(%r8),@Rhotate1[3]
vmovdqa64 64*9(%r8),@Rhotate1[4]
vmovdqa64 64*10(%r8),@Rhotate0[0]
vmovdqa64 64*11(%r8),@Rhotate0[1]
vmovdqa64 64*12(%r8),@Rhotate0[2]
vmovdqa64 64*13(%r8),@Rhotate0[3]
vmovdqa64 64*14(%r8),@Rhotate0[4]
vmovdqa64 64*15(%r8),@Pi0[0]
vmovdqa64 64*16(%r8),@Pi0[1]
vmovdqa64 64*17(%r8),@Pi0[2]
vmovdqa64 64*18(%r8),@Pi0[3]
vmovdqa64 64*19(%r8),@Pi0[4]
vmovdqu64 40*0-96($A_flat),${A00}{$k11111}{z}
vmovdqu64 40*1-96($A_flat),${A10}{$k11111}{z}
vmovdqu64 40*2-96($A_flat),${A20}{$k11111}{z}
vmovdqu64 40*3-96($A_flat),${A30}{$k11111}{z}
vmovdqu64 40*4-96($A_flat),${A40}{$k11111}{z}
.Lno_output_extension_avx512:
shr \$3,$bsz
lea -96($A_flat),%r9
mov $bsz,%rax
jmp .Loop_squeeze_avx512
.align 32
.Loop_squeeze_avx512:
cmp \$8,$len
jb .Ltail_squeeze_avx512
mov (%r9),%r8
lea 8(%r9),%r9
mov %r8,($out)
lea 8($out),$out
sub \$8,$len # len -= 8
jz .Ldone_squeeze_avx512
sub \$1,%rax # bsz--
jnz .Loop_squeeze_avx512
#vpermq @Theta[4],@Theta[4],@Theta[3]
#vpermq @Theta[3],@Theta[4],@Theta[2]
#vpermq @Theta[3],@Theta[3],@Theta[1]
call __KeccakF1600
vmovdqu64 $A00,40*0-96($A_flat){$k11111}
vmovdqu64 $A10,40*1-96($A_flat){$k11111}
vmovdqu64 $A20,40*2-96($A_flat){$k11111}
vmovdqu64 $A30,40*3-96($A_flat){$k11111}
vmovdqu64 $A40,40*4-96($A_flat){$k11111}
lea -96($A_flat),%r9
mov $bsz,%rax
jmp .Loop_squeeze_avx512
.Ltail_squeeze_avx512:
mov $out,%rdi
mov %r9,%rsi
mov $len,%rcx
.byte 0xf3,0xa4 # rep movsb
.Ldone_squeeze_avx512:
vzeroupper
lea (%r11),%rsp
ret
.size SHA3_squeeze,.-SHA3_squeeze
.align 64
theta_perm:
.quad 0, 1, 2, 3, 4, 5, 6, 7 # [not used]
.quad 4, 0, 1, 2, 3, 5, 6, 7
.quad 3, 4, 0, 1, 2, 5, 6, 7
.quad 2, 3, 4, 0, 1, 5, 6, 7
.quad 1, 2, 3, 4, 0, 5, 6, 7
rhotates1:
.quad 0, 44, 43, 21, 14, 0, 0, 0 # [0][0] [1][1] [2][2] [3][3] [4][4]
.quad 18, 1, 6, 25, 8, 0, 0, 0 # [4][0] [0][1] [1][2] [2][3] [3][4]
.quad 41, 2, 62, 55, 39, 0, 0, 0 # [3][0] [4][1] [0][2] [1][3] [2][4]
.quad 3, 45, 61, 28, 20, 0, 0, 0 # [2][0] [3][1] [4][2] [0][3] [1][4]
.quad 36, 10, 15, 56, 27, 0, 0, 0 # [1][0] [2][1] [3][2] [4][3] [0][4]
rhotates0:
.quad 0, 1, 62, 28, 27, 0, 0, 0
.quad 36, 44, 6, 55, 20, 0, 0, 0
.quad 3, 10, 43, 25, 39, 0, 0, 0
.quad 41, 45, 15, 21, 8, 0, 0, 0
.quad 18, 2, 61, 56, 14, 0, 0, 0
pi0_perm:
.quad 0, 3, 1, 4, 2, 5, 6, 7
.quad 1, 4, 2, 0, 3, 5, 6, 7
.quad 2, 0, 3, 1, 4, 5, 6, 7
.quad 3, 1, 4, 2, 0, 5, 6, 7
.quad 4, 2, 0, 3, 1, 5, 6, 7
iotas:
.quad 0x0000000000000001
.quad 0x0000000000008082
.quad 0x800000000000808a
.quad 0x8000000080008000
.quad 0x000000000000808b
.quad 0x0000000080000001
.quad 0x8000000080008081
.quad 0x8000000000008009
.quad 0x000000000000008a
.quad 0x0000000000000088
.quad 0x0000000080008009
.quad 0x000000008000000a
.quad 0x000000008000808b
.quad 0x800000000000008b
.quad 0x8000000000008089
.quad 0x8000000000008003
.quad 0x8000000000008002
.quad 0x8000000000000080
.quad 0x000000000000800a
.quad 0x800000008000000a
.quad 0x8000000080008081
.quad 0x8000000000008080
.quad 0x0000000080000001
.quad 0x8000000080008008
.asciz "Keccak-1600 absorb and squeeze for AVX-512F, CRYPTOGAMS by <appro\@openssl.org>"
___
$output=pop;
open STDOUT,">$output";
print $code;
close STDOUT or die "error closing STDOUT: $!";
| pmq20/ruby-compiler | vendor/openssl/crypto/sha/asm/keccak1600-avx512.pl | Perl | mit | 16,330 |
use strict;
use warnings;
use Socket;
binmode(STDIN);
$| = 1;
binmode(STDOUT);
if (@ARGV != 2) {
die "Usage: netcat.pl HOST PORT\n";
}
my $host = shift;
my $port = shift;
my $address = inet_aton($host) or die;
my $address_and_port = sockaddr_in($port, $address);
my $protocol = getprotobyname('tcp');
socket(SOCKET, PF_INET, SOCK_STREAM, $protocol) or die;
my $old = select(SOCKET);
$| = 1;
select($old);
binmode(SOCKET);
connect(SOCKET, $address_and_port) or die;
while (<STDIN>) {
print SOCKET $_;
}
while (<SOCKET>) {
print;
}
close(SOCKET);
exit 0; | bennekrouf/comaz | node_modules/socket.io/node_modules/expresso/deps/jscoverage/tests/netcat.pl | Perl | mit | 565 |
#!/usr/bin/perl -w
use strict;
use Getopt::Long;
use SAPserver;
use ScriptThing;
#
# This is a SAS Component.
#
=head1 svr_discriminating_functions
svr_discriminating_functions genome_ids1.tbl genome_ids2.tbl >role_list.tbl
Analyze two groups of genomes and return a list of the functions that discriminate
between them.
A function discriminates between two groups of genomes if it is common in one and
uncommon in the other.
This script takes as input two tab-delimited files with genome IDs at the end of each
line. It writes out a single tab-delimited file with four columns.
Alternatively, the script can be used as a pipe command. If no positional parameters
are specified, the first group will be taken from the standard input.
If no second group is specified, then the second group will be all complete
genomes not in the first group. Optionally, it can be all prokaryotic complete
genomes not in the first group.
=over 4
=item 1
The FIGfam ID of a function.
=item 2
The function of the identified FIGfam.
=item 3
A score indicating the degree of discrimination. A score of 2 indicates the function
occurs universally in one group and not at all in the other. All scores will be
greater than 1
=item 4
C<1> if the function tends to be in the first group and C<2> if it tends to be in
the second.
=back
The output will be sorted by the fourth column, so the results will be presented with
the roles discriminating in favor of the first group followed by the roles discriminating
in favor of the second group.
=head2 Command-Line Options
=over 4
=item url
The URL for the Sapling server, if it is to be different from the default.
=item c
Column index. If specified, indicates that the input IDs should be taken from the
indicated column instead of the last column. The first column is column 1. This
parameter applies to both input files.
=item prok
If specified, and if no second group is present, the second group will be limited to
prokaryotic genomes.
=back
=cut
# Parse the command-line options.
my $url = '';
my $column = '';
my $prok;
my $opted = GetOptions('url=s' => \$url, 'c=i' => \$column, prok => \$prok);
if (! $opted) {
print "usage: svr_discriminating_functions [--url=http://...] [--c=N] group1 group2 >output\n";
} else {
# Get the server object.
my $sapServer = SAPserver->new(url => $url);
# Get the genome lists.
my ($g1File, $g2File) = @ARGV;
my ($group1, $group2);
if ($g1File) {
# Here the first group is specified in a file.
$group1 = GetGenomes($g1File, $column);
} else {
# Here the first group is taken from the standard input.
$group1 = [ ScriptThing::GetList(\*STDIN, $column) ];
}
if ($g2File) {
# Here the second group is specified in a file.
$group2 = GetGenomes($g2File, $column);
} else {
# Here the second group is the complement of the first. Get a hash of the genomes
# in the first group.
my %g1Hash = map { $_ => 1 } @$group1;
# Get the list of genomes to be used for the second group.
my $allGenomes = $sapServer->all_genomes(-prokaryotic => $prok, -complete => 1);
# The returned list is a hash. Filter its keys to produce group 2.
$group2 = [ grep { ! $g1Hash{$_ } } keys %$allGenomes ];
}
# Compute the discriminating figfams.
my $groupList = $sapServer->discriminating_figfams(-group1 => $group1,
-group2 => $group2);
# Output the groups.
for my $i (1, 2) {
# Get the current group's hash.
my $groupH = $groupList->[$i-1];
# Get the functions for the indicated FIGfams.
my $famH = $sapServer->figfam_function(-ids => [keys %$groupH]);
# Loop through the FIGfams in this section.
for my $fam (sort keys %$groupH) {
# Write out this FIGfam's data.
print join("\t", $fam, $famH->{$fam}, $groupH->{$fam}, $i) . "\n";
}
}
}
# Get a genome list.
sub GetGenomes {
# Get the file name and column ID.
my ($fileName, $column) = @_;
# Try to open the file.
open my $gh, "<$fileName" || die "Genome file error: $!";
# Get the input data.
my @retVal = ScriptThing::GetList($gh, $column);
# Return the result.
return \@retVal;
}
| kbase/kb_seed | service-scripts/svr_discriminating_functions.pl | Perl | mit | 4,345 |
#!/C:\strawberry\perl\bin\perl.exe
# Explain output and find bug
use strict;
use warnings;
print format_number( 9999 ), "\n";
sub format_number {
local $_ = shift;
1 while s/^([-+]?\d+)(\d{3})/$1,$2/;
$_; # was print - returns 1 instead of $_
}
| bewuethr/ctci | llama_book/chapter05/wb_chapter05_ex03.pl | Perl | mit | 262 |
package SanrioCharacterRanking::M::Character;
use strict;
use warnings;
use utf8;
use 5.012;
use parent qw/SanrioCharacterRanking::M/;
use Amon2::Declare;
sub daily_votes {
my ($class, $id) = @_;
c()->db->search_by_sql("
SELECT
TO_CHAR(statuses.created_at, 'YYYY-MM-DD') AS date,
COUNT(votes.status_id) AS votes
FROM votes
JOIN statuses ON votes.status_id = statuses.id
WHERE votes.character_id = ?
GROUP BY date
ORDER BY date ASC
", [ $id ]);
}
1;
| mono0x/ranking.sucretown.net | lib/SanrioCharacterRanking/M/Character.pm | Perl | mit | 538 |
package SGN::Controller::MixedModels;
use Moose;
use URI::FromHash qw | uri |;
BEGIN { extends 'Catalyst::Controller' };
sub mixed_model_index :Path('/tools/mixedmodels') Args(0) {
my $self = shift;
my $c = shift;
if (!$c->user()) {
$c->res->redirect( uri( path => '/user/login', query => { goto_url => $c->req->uri->path_query } ) );
}
$c->stash->{template} = '/tools/mixedmodels.mas';
}
1;
| solgenomics/sgn | lib/SGN/Controller/MixedModels.pm | Perl | mit | 428 |
go:- nl,write('Enter a list : '), read(L),
write('Enter position: '), read(N),
nth_ele(N,L,X),
write('Element : '), write(X),nl.
nth_ele(1,[X|_],X):-!.
nth_ele(N,[_|T],X):- N1 is N-1, nth_ele(N1,T,X).
| jatin69/lab-codes | artificial-intelligence/practicals/q15.pl | Perl | mit | 216 |
#!/usr/bin/env perl
#===============================================================================
#
# FILE: logtcp.pl
#
# USAGE: ./logtcp.pl
#
# DESCRIPTION: Logs the output from tcpping.
#
# OPTIONS: ---
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: YOUR NAME (),
# ORGANIZATION:
# VERSION: 1.0
# CREATED: 28/08/17 11:20:47
# REVISION: ---
#===============================================================================
use strict;
use warnings;
use utf8;
use File::Slurp;
my $host = $ARGV[0] || 'localhost';
my $port = $ARGV[1] || '22';
my $filename = $ARGV[2] || undef;
my $rtt_max = $ARGV[3] || '60'; # in % of avg rtt
my $DEBUG = $ARGV[4] || '0';
print "Usage: ./logtcp.pl <host> <port> [filename] [rtt_max_%_of_avg] [debug]\n./logtcp.pl localhost 80 60 file.log\n" and exit(0) if $host =~ m/-h|--help/xm;
my ($dt, $down_date, $down, @avg_ms);
my $repeat = 2; my $i=0; my $last_time_of_rtt_spike = time;
print "TCP-Pinging ${host}:${port} RTT_MAX: $rtt_max";
$DEBUG ? print " DEBUG 1\n" : print "\n";
while ( 1 ) {
my $tp = `sudo hping3 -c 1 -i 2 -S -p $port $host 2>&1 `;
are_you_there($tp, $i);
check_for_rtt_spikes($tp);
$i++ if $i < 5;
sleep $repeat;
}
sub check_for_rtt_spikes{
my $tp = shift;
if ( $tp =~ m/rtt=(.*?)\sms/xm ) {
my $ms = $1;
my $avg_ms = 0;
shift @avg_ms if scalar @avg_ms > 100;
map {$avg_ms = $avg_ms + $_ } @avg_ms;
my $avg = ( $avg_ms / scalar @avg_ms ) if scalar @avg_ms;
my $a_diff = $ms - $avg if scalar @avg_ms;
my $a_per = ($a_diff / $ms) * 100 if scalar @avg_ms;
my $lat = sprintf "%.3fms %10.3fms %10.3f", $ms , $avg , $a_per if scalar @avg_ms;
push @avg_ms, $ms; # push after all other things to not skew the average
print "rtt=$ms " if $DEBUG;
defined $a_per ? printf "avg_rtt=%.1f diff_rtt=%.1f%%\n" , $avg, $a_per : print "\n" if $DEBUG;
if (defined $a_per and $a_per > $rtt_max ) {
chomp(my $date = `date +%c`);
my $t_rtt = time;
my $out = sprintf "%s >>>> RTT spike of %4.1f%% , %6.3fms , avg %.3fms, last spike %5ds ago\n", $date, $a_per, $ms, $avg, ($t_rtt - $last_time_of_rtt_spike);
print $out;
write_file($filename, { append => 1 }, $out ) if defined $filename;
pop @avg_ms; # Remove spike latency , as not to skew to much
$last_time_of_rtt_spike = time;
}
}
}
sub are_you_there{
my $tp = shift;
my $i= shift;
if ( $tp =~ m/\s100%\s/xm ) { # 100% packet loss
print "." if $DEBUG;
chomp($down_date = `date +%c`) if ! $down;
$dt = time if ! $down;
$down = 1;
# if ( $i <1 ) {
# die "${host}:${port} not reachable\n";
# }
}
if ( $tp =~ m/\s0%\s/xm ) {
if ( $down ) {
chomp(my $date = `date +%c`);
my $ct = time;
my $out = "$down_date to $date ${host}:${port} down for ". ($ct - $dt) ."s\n";
print $out;
write_file($filename, { append => 1 }, $out ) if defined $filename;
$down = 0;
}
}
}
| torstefan/logtcp | logtcp.pl | Perl | mit | 2,961 |
#!/usr/local/bin/perl
use MSPDI;
#########################################################################
# Usage:
#
# use MSPDI;
# use InclusionRectangles;
#
# # This function returns the left and right vector respectively
# sub differentialInclusion { my ($x,$y) = @_; return((($y, -$x), ($y, -$x)); }
#
# rectanglesCentre ((-10,-10), (10,10), (20,20));
# ^ ^ ^
# lower upper number of
# left right rectangles
# coord coord in the x and
# y direction
#
# or
#
# sub differentialInclusion { my (($x1,$y1),($x2,$y2), ($x3,$y3), ($x4, $y4)) = @_; return(...); }
#
# rectanglesCorners((-10,-10), (10,10), (20,20));
#
#########################################################################
sub rectanglesCentre { rectanglesGeneric("1",@_); }
sub rectanglesCorners { rectanglesGeneric("0",@_); }
# A generic rectangle grid generator
sub rectanglesGeneric {
my ($centre,$grid_left, $grid_bottom, $grid_right, $grid_top, $xsize, $ysize) = @_;
my ($base, $height, $ycoord, $xcoord, $xcentre, $ycentre, $x, $y, $inc_ycoord, $inc_xcoord);
my ($inc_x, $inc_y,$vector_left_x,$vector_left_y,$vector_right_x,$vector_right_y,$inc_inc_xcoord);
my ($y_dot, $x_dot);
startMSPDI;
# create grid with rectangles of size width x height
$base = ($grid_right - $grid_left) / $xsize;
$height = ($grid_top - $grid_bottom) / $ysize;
# definition of vectors
# Since the above may be changed by the user, make sure that they
# are well defined by putting a constraint that is checked at spdi
# generation time
constraint ("Top of grid must be higher than bottom", $grid_top > $grid_bottom);
constraint ("Left of grid must be smaller than right limit", $grid_left < $grid_right);
constraint ("Grid must be at least 1x1", $xsize > 0 && $ysize > 0);
# Generate the points, vectors, regions automatically
$ycoord=$grid_bottom;
for ($y=0; $y <= $ysize; $y++) {
$xcoord=$grid_left;
for ($x=0; $x <= $xsize; $x++) {
newPoint("P_$x\_$y", $xcoord, $ycoord);
$xcoord+=$base;
}
$ycoord+=$height;
}
$ycoord = $grid_bottom;
for ($y = 0; $y < $ysize; $y++) {
$xcoord = $grid_left;
for ($x = 0; $x < $xsize; $x++) {
$inc_x = $x+1; $inc_y = $y+1; $inc_ycoord=$ycoord+$height; $inc_xcoord=$xcoord+$base;
if ($centre=="1") {
($xcentre,$ycentre) = ($inc_xcoord + $base/2, $inc_ycoord + $height/2);
(($vector_left_x, $vector_left_y),($vector_right_x, $vector_right_y)) =
differentialInclusion($xcentre,$ycentre);
} else {
(($vector_left_x, $vector_left_y),($vector_right_x, $vector_right_y)) =
differentialInclusion($xcoord, $ycoord, $inc_xcoord, $ycoord,
$inc_xcoord, $inc_ycoord, $xcoord, $inc_ycoord);
}
newVector("Vl_$x\_$y", $vector_left_x, $vector_left_y);
newVector("Vr_$x\_$y", $vector_right_x, $vector_right_y);
newRegionComment(" rectangle whose lower left corner is at ($xcoord, $ycoord)");
newRegion("Vl_$x\_$y", "Vr_$x\_$y",
"P_$x\_$y", "P_$inc_x\_$y", "P_$inc_x\_$inc_y", "P_$x\_$inc_y", "P_$x\_$y");
$xcoord = $xcoord + $base;
}
$ycoord = $ycoord+$height;
}
endMSPDI;
}
1;
| asandler/spdi | examples/MSPDI/lib/InclusionRectangles.pm | Perl | mit | 3,524 |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from debian/tzdata/northamerica. Olson data version 2008c
#
# Do not edit this file directly.
#
package DateTime::TimeZone::America::North_Dakota::Center;
use strict;
use Class::Singleton;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::America::North_Dakota::Center::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY,
59418039600,
DateTime::TimeZone::NEG_INFINITY,
59418015288,
-24312,
0,
'LMT'
],
[
59418039600,
60502410000,
59418014400,
60502384800,
-25200,
0,
'MT'
],
[
60502410000,
60520550400,
60502388400,
60520528800,
-21600,
1,
'MDT'
],
[
60520550400,
60533859600,
60520525200,
60533834400,
-25200,
0,
'MST'
],
[
60533859600,
60552000000,
60533838000,
60551978400,
-21600,
1,
'MDT'
],
[
60552000000,
61255472400,
60551974800,
61255447200,
-25200,
0,
'MST'
],
[
61255472400,
61366287600,
61255450800,
61366266000,
-21600,
1,
'MWT'
],
[
61366287600,
61370294400,
61366266000,
61370272800,
-21600,
1,
'MPT'
],
[
61370294400,
62051302800,
61370269200,
62051277600,
-25200,
0,
'MST'
],
[
62051302800,
62067024000,
62051281200,
62067002400,
-21600,
1,
'MDT'
],
[
62067024000,
62082752400,
62066998800,
62082727200,
-25200,
0,
'MST'
],
[
62082752400,
62098473600,
62082730800,
62098452000,
-21600,
1,
'MDT'
],
[
62098473600,
62114202000,
62098448400,
62114176800,
-25200,
0,
'MST'
],
[
62114202000,
62129923200,
62114180400,
62129901600,
-21600,
1,
'MDT'
],
[
62129923200,
62145651600,
62129898000,
62145626400,
-25200,
0,
'MST'
],
[
62145651600,
62161372800,
62145630000,
62161351200,
-21600,
1,
'MDT'
],
[
62161372800,
62177101200,
62161347600,
62177076000,
-25200,
0,
'MST'
],
[
62177101200,
62193427200,
62177079600,
62193405600,
-21600,
1,
'MDT'
],
[
62193427200,
62209155600,
62193402000,
62209130400,
-25200,
0,
'MST'
],
[
62209155600,
62224876800,
62209134000,
62224855200,
-21600,
1,
'MDT'
],
[
62224876800,
62240605200,
62224851600,
62240580000,
-25200,
0,
'MST'
],
[
62240605200,
62256326400,
62240583600,
62256304800,
-21600,
1,
'MDT'
],
[
62256326400,
62262378000,
62256301200,
62262352800,
-25200,
0,
'MST'
],
[
62262378000,
62287776000,
62262356400,
62287754400,
-21600,
1,
'MDT'
],
[
62287776000,
62298061200,
62287750800,
62298036000,
-25200,
0,
'MST'
],
[
62298061200,
62319225600,
62298039600,
62319204000,
-21600,
1,
'MDT'
],
[
62319225600,
62334954000,
62319200400,
62334928800,
-25200,
0,
'MST'
],
[
62334954000,
62351280000,
62334932400,
62351258400,
-21600,
1,
'MDT'
],
[
62351280000,
62366403600,
62351254800,
62366378400,
-25200,
0,
'MST'
],
[
62366403600,
62382729600,
62366382000,
62382708000,
-21600,
1,
'MDT'
],
[
62382729600,
62398458000,
62382704400,
62398432800,
-25200,
0,
'MST'
],
[
62398458000,
62414179200,
62398436400,
62414157600,
-21600,
1,
'MDT'
],
[
62414179200,
62429907600,
62414154000,
62429882400,
-25200,
0,
'MST'
],
[
62429907600,
62445628800,
62429886000,
62445607200,
-21600,
1,
'MDT'
],
[
62445628800,
62461357200,
62445603600,
62461332000,
-25200,
0,
'MST'
],
[
62461357200,
62477078400,
62461335600,
62477056800,
-21600,
1,
'MDT'
],
[
62477078400,
62492806800,
62477053200,
62492781600,
-25200,
0,
'MST'
],
[
62492806800,
62508528000,
62492785200,
62508506400,
-21600,
1,
'MDT'
],
[
62508528000,
62524256400,
62508502800,
62524231200,
-25200,
0,
'MST'
],
[
62524256400,
62540582400,
62524234800,
62540560800,
-21600,
1,
'MDT'
],
[
62540582400,
62555706000,
62540557200,
62555680800,
-25200,
0,
'MST'
],
[
62555706000,
62572032000,
62555684400,
62572010400,
-21600,
1,
'MDT'
],
[
62572032000,
62587760400,
62572006800,
62587735200,
-25200,
0,
'MST'
],
[
62587760400,
62603481600,
62587738800,
62603460000,
-21600,
1,
'MDT'
],
[
62603481600,
62619210000,
62603456400,
62619184800,
-25200,
0,
'MST'
],
[
62619210000,
62634931200,
62619188400,
62634909600,
-21600,
1,
'MDT'
],
[
62634931200,
62650659600,
62634906000,
62650634400,
-25200,
0,
'MST'
],
[
62650659600,
62666380800,
62650638000,
62666359200,
-21600,
1,
'MDT'
],
[
62666380800,
62680294800,
62666355600,
62680269600,
-25200,
0,
'MST'
],
[
62680294800,
62697830400,
62680273200,
62697808800,
-21600,
1,
'MDT'
],
[
62697830400,
62711744400,
62697805200,
62711719200,
-25200,
0,
'MST'
],
[
62711744400,
62729884800,
62711722800,
62729863200,
-21600,
1,
'MDT'
],
[
62729884800,
62743194000,
62729859600,
62743168800,
-25200,
0,
'MST'
],
[
62743194000,
62761334400,
62743172400,
62761312800,
-21600,
1,
'MDT'
],
[
62761334400,
62774643600,
62761309200,
62774618400,
-25200,
0,
'MST'
],
[
62774643600,
62792784000,
62774622000,
62792762400,
-21600,
1,
'MDT'
],
[
62792784000,
62806698000,
62792758800,
62806672800,
-25200,
0,
'MST'
],
[
62806698000,
62824233600,
62806676400,
62824212000,
-21600,
1,
'MDT'
],
[
62824233600,
62838147600,
62824208400,
62838122400,
-25200,
0,
'MST'
],
[
62838147600,
62855683200,
62838126000,
62855661600,
-21600,
1,
'MDT'
],
[
62855683200,
62869593600,
62855661600,
62869572000,
-21600,
0,
'CST'
],
[
62869593600,
62887734000,
62869575600,
62887716000,
-18000,
1,
'CDT'
],
[
62887734000,
62901043200,
62887712400,
62901021600,
-21600,
0,
'CST'
],
[
62901043200,
62919183600,
62901025200,
62919165600,
-18000,
1,
'CDT'
],
[
62919183600,
62932492800,
62919162000,
62932471200,
-21600,
0,
'CST'
],
[
62932492800,
62950633200,
62932474800,
62950615200,
-18000,
1,
'CDT'
],
[
62950633200,
62964547200,
62950611600,
62964525600,
-21600,
0,
'CST'
],
[
62964547200,
62982082800,
62964529200,
62982064800,
-18000,
1,
'CDT'
],
[
62982082800,
62995996800,
62982061200,
62995975200,
-21600,
0,
'CST'
],
[
62995996800,
63013532400,
62995978800,
63013514400,
-18000,
1,
'CDT'
],
[
63013532400,
63027446400,
63013510800,
63027424800,
-21600,
0,
'CST'
],
[
63027446400,
63044982000,
63027428400,
63044964000,
-18000,
1,
'CDT'
],
[
63044982000,
63058896000,
63044960400,
63058874400,
-21600,
0,
'CST'
],
[
63058896000,
63077036400,
63058878000,
63077018400,
-18000,
1,
'CDT'
],
[
63077036400,
63090345600,
63077014800,
63090324000,
-21600,
0,
'CST'
],
[
63090345600,
63108486000,
63090327600,
63108468000,
-18000,
1,
'CDT'
],
[
63108486000,
63121795200,
63108464400,
63121773600,
-21600,
0,
'CST'
],
[
63121795200,
63139935600,
63121777200,
63139917600,
-18000,
1,
'CDT'
],
[
63139935600,
63153849600,
63139914000,
63153828000,
-21600,
0,
'CST'
],
[
63153849600,
63171385200,
63153831600,
63171367200,
-18000,
1,
'CDT'
],
[
63171385200,
63185299200,
63171363600,
63185277600,
-21600,
0,
'CST'
],
[
63185299200,
63202834800,
63185281200,
63202816800,
-18000,
1,
'CDT'
],
[
63202834800,
63216748800,
63202813200,
63216727200,
-21600,
0,
'CST'
],
[
63216748800,
63234889200,
63216730800,
63234871200,
-18000,
1,
'CDT'
],
[
63234889200,
63248198400,
63234867600,
63248176800,
-21600,
0,
'CST'
],
[
63248198400,
63266338800,
63248180400,
63266320800,
-18000,
1,
'CDT'
],
[
63266338800,
63279648000,
63266317200,
63279626400,
-21600,
0,
'CST'
],
[
63279648000,
63297788400,
63279630000,
63297770400,
-18000,
1,
'CDT'
],
[
63297788400,
63309283200,
63297766800,
63309261600,
-21600,
0,
'CST'
],
[
63309283200,
63329842800,
63309265200,
63329824800,
-18000,
1,
'CDT'
],
[
63329842800,
63340732800,
63329821200,
63340711200,
-21600,
0,
'CST'
],
[
63340732800,
63361292400,
63340714800,
63361274400,
-18000,
1,
'CDT'
],
[
63361292400,
63372182400,
63361270800,
63372160800,
-21600,
0,
'CST'
],
[
63372182400,
63392742000,
63372164400,
63392724000,
-18000,
1,
'CDT'
],
[
63392742000,
63404236800,
63392720400,
63404215200,
-21600,
0,
'CST'
],
[
63404236800,
63424796400,
63404218800,
63424778400,
-18000,
1,
'CDT'
],
[
63424796400,
63435686400,
63424774800,
63435664800,
-21600,
0,
'CST'
],
[
63435686400,
63456246000,
63435668400,
63456228000,
-18000,
1,
'CDT'
],
[
63456246000,
63467136000,
63456224400,
63467114400,
-21600,
0,
'CST'
],
[
63467136000,
63487695600,
63467118000,
63487677600,
-18000,
1,
'CDT'
],
[
63487695600,
63498585600,
63487674000,
63498564000,
-21600,
0,
'CST'
],
[
63498585600,
63519145200,
63498567600,
63519127200,
-18000,
1,
'CDT'
],
[
63519145200,
63530035200,
63519123600,
63530013600,
-21600,
0,
'CST'
],
[
63530035200,
63550594800,
63530017200,
63550576800,
-18000,
1,
'CDT'
],
[
63550594800,
63561484800,
63550573200,
63561463200,
-21600,
0,
'CST'
],
[
63561484800,
63582044400,
63561466800,
63582026400,
-18000,
1,
'CDT'
],
[
63582044400,
63593539200,
63582022800,
63593517600,
-21600,
0,
'CST'
],
[
63593539200,
63614098800,
63593521200,
63614080800,
-18000,
1,
'CDT'
],
[
63614098800,
63624988800,
63614077200,
63624967200,
-21600,
0,
'CST'
],
[
63624988800,
63645548400,
63624970800,
63645530400,
-18000,
1,
'CDT'
],
[
63645548400,
63656438400,
63645526800,
63656416800,
-21600,
0,
'CST'
],
[
63656438400,
63676998000,
63656420400,
63676980000,
-18000,
1,
'CDT'
],
[
63676998000,
63687888000,
63676976400,
63687866400,
-21600,
0,
'CST'
],
[
63687888000,
63708447600,
63687870000,
63708429600,
-18000,
1,
'CDT'
],
];
sub olson_version { '2008c' }
sub has_dst_changes { 57 }
sub _max_year { 2018 }
sub _new_instance
{
return shift->_init( @_, spans => $spans );
}
sub _last_offset { -21600 }
my $last_observance = bless( {
'format' => 'C%sT',
'gmtoff' => '-6:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 727496,
'local_rd_secs' => 7200,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 727496,
'utc_rd_secs' => 7200,
'utc_year' => 1993
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => -21600,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 727496,
'local_rd_secs' => 28800,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 727496,
'utc_rd_secs' => 28800,
'utc_year' => 1993
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '2:00',
'from' => '2007',
'in' => 'Mar',
'letter' => 'D',
'name' => 'US',
'offset_from_std' => 3600,
'on' => 'Sun>=8',
'save' => '1:00',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '2:00',
'from' => '2007',
'in' => 'Nov',
'letter' => 'S',
'name' => 'US',
'offset_from_std' => 0,
'on' => 'Sun>=1',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/DateTime/TimeZone/America/North_Dakota/Center.pm | Perl | mit | 11,739 |
use bytes;
#=====================================================================================
# Help.pm
# by Shinsuke MORI
# Last change : 17 February 1998
#=====================================================================================
# µ¡ ǽ : ¥¹¥¯¥ê¥×¥È¤Î¥É¥¥å¥á¥ó¥È¤òɽ¼¨¤¹¤ë¡£
#
# ¼Â Îã : ¤Ê¤·
#
# Ãí°ÕÅÀ : ¥¹¥¯¥ê¥×¥È¤Ë¥É¥¥å¥á¥ó¥È¤¬ÅºÉÕ¤µ¤ì¤Æ¤¤¤ë¤³¤È¡£
#-------------------------------------------------------------------------------------
# declalations
#-------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------
# check arguments
#-------------------------------------------------------------------------------------
sub Help{
$SCRIPT = shift;
$/ = "\n\n\n";
open(SCRIPT) || die;
@line = <SCRIPT>;
print STDERR $line[0], $line[$#line];
close(SCRIPT);
exit(0);
}
#-------------------------------------------------------------------------------------
# return
#-------------------------------------------------------------------------------------
1;
#=====================================================================================
# END
#=====================================================================================
| tkd53/KKConv | lib/perl/Help.pm | Perl | mit | 1,475 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 AUTHOR
Juguang Xiao <juguang@tll.org.sg>
=cut
=head1 NAME
Bio::EnsEMBL::Utils::Converter::bio_ens_featurePair
=head1 SYNOPISIS
=head1 DESCRIPTION
=head1 METHODS
=cut
package Bio::EnsEMBL::Utils::Converter::bio_ens_featurePair;
use strict;
use vars qw(@ISA);
use Bio::EnsEMBL::FeaturePair;
use Bio::EnsEMBL::RepeatConsensus;
use Bio::EnsEMBL::ProteinFeature;
use Bio::EnsEMBL::Utils::Converter;
use Bio::EnsEMBL::Utils::Converter::bio_ens;
@ISA = qw(Bio::EnsEMBL::Utils::Converter::bio_ens);
sub _initialize {
my ($self, @args) = @_;
$self->SUPER::_initialize(@args);
my ($translation_id) = $self->_rearrange([qw(TRANSLATION_ID)], @args);
$self->translation_id($translation_id);
# internal converter for seqFeature
$self->{_bio_ens_seqFeature} = new Bio::EnsEMBL::Utils::Converter (
-in => 'Bio::SeqFeature::Generic',
-out => 'Bio::EnsEMBL::SeqFeature',
);
}
sub _convert_single {
my ($self, $pair) = @_;
unless($pair && $pair->isa('Bio::SeqFeature::FeaturePair')){
$self->throw('a Bio::SeqFeature::FeaturePair object needed');
}
if($self->out eq 'Bio::EnsEMBL::RepeatFeature'){
return $self->_convert_single_to_repeatFeature($pair);
}elsif($self->out eq 'Bio::EnsEMBL::FeaturePair'){
return $self->_convert_single_to_featurePair($pair);
}elsif($self->out eq 'Bio::EnsEMBL::ProteinFeature'){
return $self->_convert_single_to_proteinFeature($pair);
}else{
my $output_module = $self->out;
$self->throw("Cannot covert to [$output_module]");
}
}
sub _convert_single_to_featurePair {
my ($self, $pair) = @_;
my $feature1 = $pair->feature1;
my $feature2 = $pair->feature2;
$self->{_bio_ens_seqFeature}->contig($self->contig);
$self->{_bio_ens_seqFeature}->analysis($self->analysis);
my $ens_f1 = $self->{_bio_ens_seqFeature}->_convert_single($feature1);
my $ens_f2 = $self->{_bio_ens_seqFeature}->_convert_single($feature2);
my $ens_fp = Bio::EnsEMBL::FeaturePair->new(
-feature1 => $ens_f1,
-feature2 => $ens_f2
);
return $ens_fp;
}
sub _convert_single_to_proteinFeature {
my ($self, $pair) = @_;
my $featurePair = $self->_convert_single_to_featurePair($pair);
my $proteinFeature = Bio::EnsEMBL::ProteinFeature->new(
-feature1 => $featurePair->feature1,
-feature2 => $featurePair->feature2
);
$proteinFeature->seqname($self->translation_id);
return $proteinFeature;
}
sub _convert_single_to_repeatFeature {
my ($self, $pair) = @_;
my $feature1 = $pair->feature1;
my $feature2 = $pair->feature2;
my $ens_repeatfeature = new Bio::EnsEMBL::RepeatFeature(
-seqname => $feature1->seq_id,
-start => $feature1->start,
-end => $feature1->end,
-strand => $feature1->strand,
-source_tag => $feature1->source_tag,
);
my ($h_start, $h_end);
if($feature1->strand == 1){
$h_start = $feature2->start;
$h_end = $feature2->end;
}elsif($feature1->strand == -1){
$h_start = $feature2->end;
$h_end = $feature2->start;
}else{
$self->throw("strand cannot be outside of (1, -1)");
}
$ens_repeatfeature->hstart($h_start);
$ens_repeatfeature->hend($h_end);
my $repeat_name = $feature2->seq_id;
my $repeat_class = $feature1->primary_tag;
$repeat_class ||= $feature2->primary_tag;
$repeat_class ||= "not sure";
my $ens_repeat_consensus =
$self->_create_consensus($repeat_name, $repeat_class);
$ens_repeatfeature->repeat_consensus($ens_repeat_consensus);
my($contig) = ref ($self->contig) eq 'ARRAY' ? @{$self->contig} : $self->contig;
$ens_repeatfeature->attach_seq($contig);
$ens_repeatfeature->analysis($self->analysis);
return $ens_repeatfeature;
}
sub _create_consensus{
my ($self, $repeat_name, $repeat_class) = @_;
my $consensus = new Bio::EnsEMBL::RepeatConsensus;
$consensus->name($repeat_name);
$consensus->repeat_class($repeat_class);
return $consensus;
}
1;
| james-monkeyshines/ensembl | modules/Bio/EnsEMBL/Utils/Converter/bio_ens_featurePair.pm | Perl | apache-2.0 | 5,056 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::checkpoint::snmp::mode::disk;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_disk_output {
my ($self, %options) = @_;
return sprintf(
'total: %s %s used: %s %s (%.2f%%) free: %s %s (%.2f%%)',
$self->{perfdata}->change_bytes(value => $self->{result_values}->{total}),
$self->{perfdata}->change_bytes(value => $self->{result_values}->{used}),
$self->{result_values}->{prct_used},
$self->{perfdata}->change_bytes(value => $self->{result_values}->{free}),
$self->{result_values}->{prct_free}
);
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'disk', type => 1, cb_prefix_output => 'prefix_disk_output', message_multiple => 'All disks are ok', skipped_code => { -10 => 1 } }
];
$self->{maps_counters}->{disk} = [
{ label => 'usage', nlabel => 'disk.usage.bytes', set => {
key_values => [ { name => 'used' }, { name => 'free' }, { name => 'prct_used' }, { name => 'prct_free' }, { name => 'total' }, { name => 'display' } ],
closure_custom_output => $self->can('custom_disk_output'),
perfdatas => [
{ template => '%d', min => 0, max => 'total', unit => 'B', cast_int => 1, label_extra_instance => 1 }
]
}
},
{ label => 'usage-free', display_ok => 0, nlabel => 'disk.free.bytes', set => {
key_values => [ { name => 'free' }, { name => 'used' }, { name => 'prct_used' }, { name => 'prct_free' }, { name => 'total' }, { name => 'display' } ],
closure_custom_output => $self->can('custom_disk_output'),
perfdatas => [
{ template => '%d', min => 0, max => 'total', unit => 'B', cast_int => 1, label_extra_instance => 1 }
]
}
},
{ label => 'usage-prct', display_ok => 0, nlabel => 'disk.usage.percentage', set => {
key_values => [ { name => 'prct_used' }, { name => 'display' } ],
output_template => 'used: %.2f %%',
perfdatas => [
{ template => '%.2f', min => 0, max => 100, unit => '%', label_extra_instance => 1 }
]
}
}
];
}
sub prefix_disk_output {
my ($self, %options) = @_;
return "Disk '" . $options{instance_value}->{display} . "' ";
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, force_new_perfdata => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-name:s' => { name => 'filter_name' }
});
return $self;
}
my $mapping = {
multiDiskName => { oid => '.1.3.6.1.4.1.2620.1.6.7.6.1.2' },
multiDiskSize => { oid => '.1.3.6.1.4.1.2620.1.6.7.6.1.3' },
multiDiskUsed => { oid => '.1.3.6.1.4.1.2620.1.6.7.6.1.4' }
};
my $oid_multiDiskEntry = '.1.3.6.1.4.1.2620.1.6.7.6.1';
sub manage_selection {
my ($self, %options) = @_;
if ($options{snmp}->is_snmpv1()) {
$self->{output}->add_option_msg(short_msg => 'Need to use SNMP v2c or v3.');
$self->{output}->option_exit();
}
my $snmp_result = $options{snmp}->get_table(
oid => $oid_multiDiskEntry,
start => $mapping->{multiDiskName}->{oid},
end => $mapping->{multiDiskUsed}->{oid},
nothing_quit => 1
);
foreach (keys %$snmp_result) {
next if (! /^$mapping->{multiDiskName}->{oid}\.(.*)$/);
my $result = $options{snmp}->map_instance(mapping => $mapping, results => $snmp_result, instance => $1);
if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' &&
$result->{multiDiskName} !~ /$self->{option_results}->{filter_name}/) {
$self->{output}->output_add(long_msg => "skipping '" . $result->{multiDiskName} . "': no matching filter.", debug => 1);
next;
}
$self->{disk}->{ $result->{multiDiskName} } = {
display => $result->{multiDiskName},
total => $result->{multiDiskSize},
prct_used => $result->{multiDiskUsed} * 100 / $result->{multiDiskSize},
prct_free => 100 - ($result->{multiDiskUsed} * 100 / $result->{multiDiskSize}),
used => $result->{multiDiskUsed},
free => $result->{multiDiskSize} - $result->{multiDiskUsed}
};
}
}
1;
__END__
=head1 MODE
Check disk usage.
=over 8
=item B<--filter-name>
Filter disk name.
=item B<--warning-*> B<--critical-*>
Thresholds.
Can be: 'usage' (B), 'usage-free' (B), 'usage-prct' (%).
=back
=cut
| Tpo76/centreon-plugins | network/checkpoint/snmp/mode/disk.pm | Perl | apache-2.0 | 5,505 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package XrefParser::InterproFromCoreParser;
use strict;
use warnings;
use Carp;
use vars qw(@ISA);
@ISA = qw(XrefParser::BaseParser);
use Bio::EnsEMBL::Registry;
sub run_script {
my ($self, $ref_arg) = @_;
my $source_id = $ref_arg->{source_id};
my $species_id = $ref_arg->{species_id};
my $file = $ref_arg->{file};
my $verbose = $ref_arg->{verbose};
if((!defined $source_id) or (!defined $species_id) or (!defined $file) ){
croak "Need to pass source_id, species_id and file as pairs";
}
$verbose |=0;
my $project;
if ($file =~ /project[=][>](\S+?)[,]/) {
$project = $1;
}
my $registry = "Bio::EnsEMBL::Registry";
if ($project eq 'ensembl') {
$registry->load_registry_from_multiple_dbs(
{
'-host' => 'ens-staging1',
'-user' => 'ensro',
},
{
'-host' => 'ens-staging2',
'-user' => 'ensro',
},
);
} elsif ($project eq 'ensemblgenomes') {
$registry->load_registry_from_multiple_dbs(
{
'-host' => 'mysql-eg-staging-1.ebi.ac.uk',
'-port' => 4160,
'-user' => 'ensro',
},
{
'-host' => 'mysql-eg-staging-2.ebi.ac.uk',
'-port' => 4275,
'-user' => 'ensro',
},
);
} else {
die("Missing or unsupported project value. Supported values: ensembl, ensemblgenomes");
}
my %id2name = $self->species_id2name;
my $species_name = $id2name{$species_id}[0];
my %interpro = $self->get_core_interpro($registry, $species_name);
my $add_interpro_sth = $self->dbi()->prepare(
"INSERT IGNORE INTO interpro (interpro, pfam, dbtype) VALUES(?,?,?)"
);
my $add_xref_sth = $self->dbi()->prepare(
"INSERT IGNORE INTO xref ".
"(accession, label, description, source_id, species_id, info_type) ".
"VALUES(?,?,?,?,?,?)"
);
# The InterproScan pipeline uses additional sources for Interpro
# links (e.g. Gene3D, Panther), so in order to replicate that in the xref
# database, remove the restriction on the column contents.
$self->dbi()->do("ALTER TABLE interpro MODIFY COLUMN dbtype VARCHAR(25);");
foreach my $interpro_id (sort keys %interpro) {
foreach my $db_type (sort keys %{$interpro{$interpro_id}}) {
foreach my $id (sort keys %{$interpro{$interpro_id}{$db_type}}) {
my $added =
$add_xref_sth->execute(
$interpro_id,
$interpro{$interpro_id}{$db_type}{$id}{'short_name'},
$interpro{$interpro_id}{$db_type}{$id}{'name'},
$source_id,
$species_id,
$interpro{$interpro_id}{$db_type}{$id}{'info_type'},
);
if ( !$added ) {
print STDERR "Problem adding '$interpro_id'\n";
return 1; # 1 is an error
}
$added =
$add_interpro_sth->execute(
$interpro_id,
$id,
$db_type
);
if ( !$added ) {
print STDERR "Problem adding '$interpro_id'/".$interpro{$interpro_id}{$db_type}{'id'}."\n";
return 1; # 1 is an error
}
}
}
}
return 0;
}
sub get_core_interpro {
my ($self, $registry, $species_name) = @_;
my $dba = $registry->get_DBAdaptor($species_name, "core");
# Get interpro terms and related information
my %interpro;
my $sql =
'select distinct '.
'i.interpro_ac, i.id, '.
'x.display_label, x.description, x.info_type, '.
'a.logic_name '.
'from xref x '.
'inner join interpro i on x.dbprimary_acc = i.interpro_ac '.
'inner join protein_feature pf on i.id = pf.hit_name '.
'inner join analysis a on pf.analysis_id = a.analysis_id;';
my $sth = $dba->dbc()->prepare($sql);
$sth->execute();
# Ensembl analysis logic names don't match with the terms that
# Interpro uses, but mapping is easy enough.
my %dbtypes = (
'gene3d' => 'GENE3D',
'hmmpanther' => 'PANTHER',
'pfam' => 'PFAM',
'pfscan' => 'PROFILE',
'pirsf' => 'PIRSF',
'prints' => 'PRINTS',
'scanprosite' => 'PROSITE',
'smart' => 'SMART',
'superfamily' => 'SSF',
'tigrfam' => 'TIGRFAMs',
);
while (my @row = $sth->fetchrow_array()) {
my $interpro_id = $row[0];
my $db_type = $dbtypes{$row[5]};
my $id = $row[1];
if (defined $db_type) {
$interpro{$interpro_id}{$db_type}{$id}{'short_name'} = $row[2];
$interpro{$interpro_id}{$db_type}{$id}{'name'} = $row[3];
$interpro{$interpro_id}{$db_type}{$id}{'info_type'} = $row[4];
}
}
print "Retrieved ".scalar(keys %interpro)." interpro ids.\n";
return %interpro;
}
1;
| danstaines/ensembl | misc-scripts/xref_mapping/XrefParser/InterproFromCoreParser.pm | Perl | apache-2.0 | 5,343 |
#
# Copyright 2015 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package snmp_standard::mode::liststorages;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
my %oids_hrStorageTable = (
'hrstoragedescr' => '.1.3.6.1.2.1.25.2.3.1.3',
'hrfsmountpoint' => '.1.3.6.1.2.1.25.3.8.1.2',
'hrfsstorageindex' => '.1.3.6.1.2.1.25.3.8.1.7',
'hrstoragetype' => '.1.3.6.1.2.1.25.2.3.1.2',
);
my $oid_hrStorageAllocationUnits = '.1.3.6.1.2.1.25.2.3.1.4';
my $oid_hrStorageSize = '.1.3.6.1.2.1.25.2.3.1.5';
my $oid_hrStorageType = '.1.3.6.1.2.1.25.2.3.1.2';
my %storage_types_manage = (
'.1.3.6.1.2.1.25.2.1.1' => 'hrStorageOther',
'.1.3.6.1.2.1.25.2.1.2' => 'hrStorageRam',
'.1.3.6.1.2.1.25.2.1.3' => 'hrStorageVirtualMemory',
'.1.3.6.1.2.1.25.2.1.4' => 'hrStorageFixedDisk',
'.1.3.6.1.2.1.25.2.1.5' => 'hrStorageRemovableDisk',
'.1.3.6.1.2.1.25.2.1.6' => 'hrStorageFloppyDisk',
'.1.3.6.1.2.1.25.2.1.7' => 'hrStorageCompactDisc',
'.1.3.6.1.2.1.25.2.1.8' => 'hrStorageRamDisk',
'.1.3.6.1.2.1.25.2.1.9' => 'hrStorageFlashMemory',
'.1.3.6.1.2.1.25.2.1.10' => 'hrStorageNetworkDisk',
'.1.3.6.1.2.1.25.3.9.1' => 'hrFSOther',
'.1.3.6.1.2.1.25.3.9.2' => 'hrFSUnknown',
'.1.3.6.1.2.1.25.3.9.3' => 'hrFSBerkeleyFFS', # For Freebsd
'.1.3.6.1.2.1.25.3.9.4' => 'hrFSSys5FS',
'.1.3.6.1.2.1.25.3.9.5' => 'hrFSFat',
'.1.3.6.1.2.1.25.3.9.6' => 'hrFSHPFS',
'.1.3.6.1.2.1.25.3.9.7' => 'hrFSHFS',
'.1.3.6.1.2.1.25.3.9.8' => 'hrFSMFS',
'.1.3.6.1.2.1.25.3.9.9' => 'hrFSNTFS',
'.1.3.6.1.2.1.25.3.9.10' => 'hrFSVNode',
'.1.3.6.1.2.1.25.3.9.11' => 'hrFSJournaled',
'.1.3.6.1.2.1.25.3.9.12' => 'hrFSiso9660',
'.1.3.6.1.2.1.25.3.9.13' => 'hrFSRockRidge',
'.1.3.6.1.2.1.25.3.9.14' => 'hrFSNFS',
'.1.3.6.1.2.1.25.3.9.15' => 'hrFSNetware',
'.1.3.6.1.2.1.25.3.9.16' => 'hrFSAFS',
'.1.3.6.1.2.1.25.3.9.17' => 'hrFSDFS',
'.1.3.6.1.2.1.25.3.9.18' => 'hrFSAppleshare',
'.1.3.6.1.2.1.25.3.9.19' => 'hrFSRFS',
'.1.3.6.1.2.1.25.3.9.20' => 'hrFSDGCFS',
'.1.3.6.1.2.1.25.3.9.21' => 'hrFSBFS',
'.1.3.6.1.2.1.25.3.9.22' => 'hrFSFAT32',
'.1.3.6.1.2.1.25.3.9.23' => 'hrFSLinuxExt2',
);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"storage:s" => { name => 'storage' },
"name" => { name => 'use_name' },
"regexp" => { name => 'use_regexp' },
"regexp-isensitive" => { name => 'use_regexpi' },
"oid-filter:s" => { name => 'oid_filter', default => 'hrStorageDescr'},
"oid-display:s" => { name => 'oid_display', default => 'hrStorageDescr'},
"display-transform-src:s" => { name => 'display_transform_src' },
"display-transform-dst:s" => { name => 'display_transform_dst' },
"filter-storage-type:s" => { name => 'filter_storage_type', default => '^(hrStorageFixedDisk|hrStorageNetworkDisk|hrFSBerkeleyFFS)$' },
});
$self->{storage_id_selected} = [];
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
$self->{option_results}->{oid_filter} = lc($self->{option_results}->{oid_filter});
if ($self->{option_results}->{oid_filter} !~ /^(hrstoragedescr|hrfsmountpoint)$/) {
$self->{output}->add_option_msg(short_msg => "Unsupported --oid-filter option.");
$self->{output}->option_exit();
}
$self->{option_results}->{oid_display} = lc($self->{option_results}->{oid_display});
if ($self->{option_results}->{oid_display} !~ /^(hrstoragedescr|hrfsmountpoint)$/) {
$self->{output}->add_option_msg(short_msg => "Unsupported --oid-display option.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
# $options{snmp} = snmp object
$self->{snmp} = $options{snmp};
$self->manage_selection();
my $result = $self->get_additional_information();
foreach (sort @{$self->{storage_id_selected}}) {
my $display_value = $self->get_display_value(id => $_);
my $storage_type = $result->{$oid_hrStorageType . "." . $_};
if (!defined($storage_type) ||
($storage_types_manage{$storage_type} !~ /$self->{option_results}->{filter_storage_type}/i)) {
$self->{output}->output_add(long_msg => "Skipping storage '" . $display_value . "': no type or no matching filter type");
next;
}
$self->{output}->output_add(long_msg => "'" . $display_value . "' [size = " . $result->{$oid_hrStorageSize . "." . $_} * $result->{$oid_hrStorageAllocationUnits . "." . $_} . "B] [id = $_]");
}
$self->{output}->output_add(severity => 'OK',
short_msg => 'List storage:');
$self->{output}->display(nolabel => 1, force_ignore_perfdata => 1, force_long_output => 1);
$self->{output}->exit();
}
sub get_additional_information {
my ($self, %options) = @_;
$self->{snmp}->load(oids => [$oid_hrStorageType, $oid_hrStorageAllocationUnits, $oid_hrStorageSize], instances => $self->{storage_id_selected});
return $self->{snmp}->get_leef();
}
sub get_display_value {
my ($self, %options) = @_;
my $value = $self->{datas}->{$self->{option_results}->{oid_display} . "_" . $options{id}};
if (defined($self->{option_results}->{display_transform_src})) {
$self->{option_results}->{display_transform_dst} = '' if (!defined($self->{option_results}->{display_transform_dst}));
eval "\$value =~ s{$self->{option_results}->{display_transform_src}}{$self->{option_results}->{display_transform_dst}}";
}
return $value;
}
sub manage_selection {
my ($self, %options) = @_;
$self->{datas} = {};
$self->{datas}->{oid_filter} = $self->{option_results}->{oid_filter};
$self->{datas}->{oid_display} = $self->{option_results}->{oid_display};
$self->{datas}->{all_ids} = [];
my $request = [];
my $added = {};
foreach (($self->{option_results}->{oid_filter}, $self->{option_results}->{oid_display} )) {
next if (defined($added->{$_}));
$added->{$_} = 1;
if (/hrFSMountPoint/i) {
push @{$request}, ({ oid => $oids_hrStorageTable{hrfsmountpoint} }, { oid => $oids_hrStorageTable{hrfsstorageindex} });
} else {
push @{$request}, { oid => $oids_hrStorageTable{hrstoragedescr} };
}
}
my $result = $self->{snmp}->get_multiple_table(oids => $request);
foreach ((['filter', $self->{option_results}->{oid_filter}], ['display', $self->{option_results}->{oid_display}])) {
foreach my $key ($self->{snmp}->oid_lex_sort(keys %{$result->{ $oids_hrStorageTable{$$_[1]} }})) {
next if ($key !~ /\.([0-9]+)$/);
# get storage index
my $storage_index = $1;
if ($$_[1] =~ /hrFSMountPoint/i) {
$storage_index = $result->{ $oids_hrStorageTable{hrfsstorageindex} }->{$oids_hrStorageTable{hrfsstorageindex} . '.' . $storage_index};
}
if ($$_[0] eq 'filter') {
push @{$self->{datas}->{all_ids}}, $storage_index;
}
$self->{datas}->{$$_[1] . "_" . $storage_index} = $self->{output}->to_utf8($result->{ $oids_hrStorageTable{$$_[1]} }->{$key});
}
}
if (scalar(keys %{$self->{datas}}) <= 0) {
$self->{output}->add_option_msg(short_msg => "Can't get storages...");
$self->{output}->option_exit();
}
if ($self->{option_results}->{oid_filter} ne $self->{option_results}->{oid_display}) {
$result = $self->{snmp}->get_table(oid => $oids_hrStorageTable{$self->{option_results}->{oid_display}});
foreach my $key ($self->{snmp}->oid_lex_sort(keys %$result)) {
next if ($key !~ /\.([0-9]+)$/);
$self->{datas}->{$self->{option_results}->{oid_display} . "_" . $1} = $self->{output}->to_utf8($result->{$key});
}
}
if (!defined($self->{option_results}->{use_name}) && defined($self->{option_results}->{storage})) {
# get by ID
push @{$self->{storage_id_selected}}, $self->{option_results}->{storage};
my $name = $self->{datas}->{$self->{option_results}->{oid_display} . "_" . $self->{option_results}->{storage}};
if (!defined($name) && !defined($options{disco})) {
$self->{output}->add_option_msg(short_msg => "No storage found for id '" . $self->{option_results}->{storage} . "'.");
$self->{output}->option_exit();
}
} else {
foreach my $i (@{$self->{datas}->{all_ids}}) {
my $filter_name = $self->{datas}->{$self->{option_results}->{oid_filter} . "_" . $i};
next if (!defined($filter_name));
if (!defined($self->{option_results}->{storage})) {
push @{$self->{storage_id_selected}}, $i;
next;
}
if (defined($self->{option_results}->{use_regexp}) && defined($self->{option_results}->{use_regexpi}) && $filter_name =~ /$self->{option_results}->{storage}/i) {
push @{$self->{storage_id_selected}}, $i;
}
if (defined($self->{option_results}->{use_regexp}) && !defined($self->{option_results}->{use_regexpi}) && $filter_name =~ /$self->{option_results}->{storage}/) {
push @{$self->{storage_id_selected}}, $i;
}
if (!defined($self->{option_results}->{use_regexp}) && !defined($self->{option_results}->{use_regexpi}) && $filter_name eq $self->{option_results}->{storage}) {
push @{$self->{storage_id_selected}}, $i;
}
}
if (scalar(@{$self->{storage_id_selected}}) <= 0 && !defined($options{disco})) {
if (defined($self->{option_results}->{storage})) {
$self->{output}->add_option_msg(short_msg => "No storage found for name '" . $self->{option_results}->{storage} . "'.");
} else {
$self->{output}->add_option_msg(short_msg => "No storage found.");
}
$self->{output}->option_exit();
}
}
}
sub disco_format {
my ($self, %options) = @_;
$self->{output}->add_disco_format(elements => ['name', 'total', 'storageid']);
}
sub disco_show {
my ($self, %options) = @_;
# $options{snmp} = snmp object
$self->{snmp} = $options{snmp};
$self->manage_selection(disco => 1);
my $result;
if (scalar(@{$self->{storage_id_selected}}) > 0) {
$result = $self->get_additional_information()
}
foreach (sort @{$self->{storage_id_selected}}) {
my $display_value = $self->get_display_value(id => $_);
my $storage_type = $result->{$oid_hrStorageType . "." . $_};
next if (!defined($storage_type) ||
($storage_types_manage{$storage_type} !~ /$self->{option_results}->{filter_storage_type}/i));
$self->{output}->add_disco_entry(name => $display_value,
total => $result->{$oid_hrStorageSize . "." . $_} * $result->{$oid_hrStorageAllocationUnits . "." . $_},
storageid => $_);
}
}
1;
__END__
=head1 MODE
=over 8
=item B<--storage>
Set the storage (number expected) ex: 1, 2,... (empty means 'check all storage').
=item B<--name>
Allows to use storage name with option --storage instead of storage oid index.
=item B<--regexp>
Allows to use regexp to filter storage (with option --name).
=item B<--regexp-isensitive>
Allows to use regexp non case-sensitive (with --regexp).
=item B<--oid-filter>
Choose OID used to filter storage (default: hrStorageDescr) (values: hrStorageDescr, hrFSMountPoint).
=item B<--oid-display>
Choose OID used to display storage (default: hrStorageDescr) (values: hrStorageDescr, hrFSMountPoint).
=item B<--display-transform-src>
Regexp src to transform display value. (security risk!!!)
=item B<--display-transform-dst>
Regexp dst to transform display value. (security risk!!!)
=item B<--filter-storage-type>
Filter storage types with a regexp (Default: '^(hrStorageFixedDisk|hrStorageNetworkDisk|hrFSBerkeleyFFS)$').
=back
=cut
| s-duret/centreon-plugins | snmp_standard/mode/liststorages.pm | Perl | apache-2.0 | 13,379 |
##
## Copyright (c) 2017, Alliance for Open Media. All rights reserved
##
## This source code is subject to the terms of the BSD 2 Clause License and
## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
## was not distributed with this source code in the LICENSE file, you can
## obtain it at www.aomedia.org/license/software. If the Alliance for Open
## Media Patent License 1.0 was not distributed with this source code in the
## PATENTS file, you can obtain it at www.aomedia.org/license/patent.
##
sub aom_scale_forward_decls() {
print <<EOF
struct yv12_buffer_config;
EOF
}
forward_decls qw/aom_scale_forward_decls/;
# Scaler functions
if (aom_config("CONFIG_SPATIAL_RESAMPLING") eq "yes") {
add_proto qw/void aom_horizontal_line_5_4_scale/, "const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width";
add_proto qw/void aom_vertical_band_5_4_scale/, "unsigned char *source, int src_pitch, unsigned char *dest, int dest_pitch, unsigned int dest_width";
add_proto qw/void aom_horizontal_line_5_3_scale/, "const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width";
add_proto qw/void aom_vertical_band_5_3_scale/, "unsigned char *source, int src_pitch, unsigned char *dest, int dest_pitch, unsigned int dest_width";
add_proto qw/void aom_horizontal_line_2_1_scale/, "const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width";
add_proto qw/void aom_vertical_band_2_1_scale/, "unsigned char *source, int src_pitch, unsigned char *dest, int dest_pitch, unsigned int dest_width";
add_proto qw/void aom_vertical_band_2_1_scale_i/, "unsigned char *source, int src_pitch, unsigned char *dest, int dest_pitch, unsigned int dest_width";
}
add_proto qw/void aom_yv12_extend_frame_borders/, "struct yv12_buffer_config *ybf, const int num_planes";
add_proto qw/void aom_yv12_copy_frame/, "const struct yv12_buffer_config *src_bc, struct yv12_buffer_config *dst_bc, const int num_planes";
add_proto qw/void aom_yv12_copy_y/, "const struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc";
add_proto qw/void aom_yv12_copy_u/, "const struct yv12_buffer_config *src_bc, struct yv12_buffer_config *dst_bc";
add_proto qw/void aom_yv12_copy_v/, "const struct yv12_buffer_config *src_bc, struct yv12_buffer_config *dst_bc";
if (aom_config("CONFIG_AV1") eq "yes") {
add_proto qw/void aom_extend_frame_borders/, "struct yv12_buffer_config *ybf, const int num_planes";
specialize qw/aom_extend_frame_borders dspr2/;
add_proto qw/void aom_extend_frame_inner_borders/, "struct yv12_buffer_config *ybf, const int num_planes";
specialize qw/aom_extend_frame_inner_borders dspr2/;
add_proto qw/void aom_extend_frame_borders_y/, "struct yv12_buffer_config *ybf";
}
1;
| GoogleChromeLabs/wasm-av1 | third_party/aom/aom_scale/aom_scale_rtcd.pl | Perl | apache-2.0 | 2,847 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::azure::database::sqldatabase::mode::databasesize;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub prefix_metric_output {
my ($self, %options) = @_;
return "Database '" . $options{instance_value}->{display} . "' " . $options{instance_value}->{stat} . " ";
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'metric', type => 1, cb_prefix_output => 'prefix_metric_output', message_multiple => "All usage metrics are ok", skipped_code => { -10 => 1 } },
];
foreach my $aggregation ('minimum', 'maximum', 'average') {
foreach my $metric ('storage_percent') {
my $metric_perf = lc($metric);
my $metric_label = lc($metric);
$metric_perf =~ s/ /_/g;
$metric_label =~ s/ /-/g;
my $entry = { label => $metric_label . '-' . $aggregation, set => {
key_values => [ { name => $metric_perf . '_' . $aggregation }, { name => 'display' }, { name => 'stat' } ],
output_template => 'Database size percentage: %.2f %%',
perfdatas => [
{ label => $metric_perf . '_' . $aggregation, value => $metric_perf . '_' . $aggregation . '_absolute',
template => '%.2f', unit => '%', label_extra_instance => 1, instance_use => 'display_absolute',
min => 0, max => 100 },
],
}
};
push @{$self->{maps_counters}->{metric}}, $entry;
}
}
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"resource:s@" => { name => 'resource' },
"resource-group:s" => { name => 'resource_group' },
"filter-metric:s" => { name => 'filter_metric' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
if (!defined($self->{option_results}->{resource})) {
$self->{output}->add_option_msg(short_msg => "Need to specify either --resource <name> with --resource-group option or --resource <id>.");
$self->{output}->option_exit();
}
$self->{az_resource} = $self->{option_results}->{resource};
$self->{az_resource_group} = $self->{option_results}->{resource_group} if (defined($self->{option_results}->{resource_group}));
$self->{az_resource_type} = 'servers';
$self->{az_resource_namespace} = 'Microsoft.Sql';
$self->{az_timeframe} = defined($self->{option_results}->{timeframe}) ? $self->{option_results}->{timeframe} : 900;
$self->{az_interval} = defined($self->{option_results}->{interval}) ? $self->{option_results}->{interval} : "PT5M";
$self->{az_aggregations} = ['Maximum'];
if (defined($self->{option_results}->{aggregation})) {
$self->{az_aggregations} = [];
foreach my $stat (@{$self->{option_results}->{aggregation}}) {
if ($stat ne '') {
push @{$self->{az_aggregations}}, ucfirst(lc($stat));
}
}
}
foreach my $metric ('storage_percent') {
next if (defined($self->{option_results}->{filter_metric}) && $self->{option_results}->{filter_metric} ne ''
&& $metric !~ /$self->{option_results}->{filter_metric}/);
push @{$self->{az_metrics}}, $metric;
}
}
sub manage_selection {
my ($self, %options) = @_;
my %metric_results;
foreach my $resource (@{$self->{az_resource}}) {
my $resource_group = $self->{az_resource_group};
my $resource_name = $resource;
if ($resource =~ /^\/subscriptions\/.*\/resourceGroups\/(.*)\/providers\/Microsoft\.Sql\/servers\/(.*)\/databases\/(.*)$/) {
$resource_group = $1;
$resource_name = $2 . '/databases/' . $3;
}
($metric_results{$resource_name}, undef, undef) = $options{custom}->azure_get_metrics(
resource => $resource_name,
resource_group => $resource_group,
resource_type => $self->{az_resource_type},
resource_namespace => $self->{az_resource_namespace},
metrics => $self->{az_metrics},
aggregations => $self->{az_aggregations},
timeframe => $self->{az_timeframe},
interval => $self->{az_interval},
);
foreach my $metric (@{$self->{az_metrics}}) {
my $metric_name = lc($metric);
$metric_name =~ s/ /_/g;
foreach my $aggregation (@{$self->{az_aggregations}}) {
next if (!defined($metric_results{$resource_name}->{$metric_name}->{lc($aggregation)}) && !defined($self->{option_results}->{zeroed}));
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{display} = $resource_name;
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{stat} = lc($aggregation);
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{$metric_name . "_" . lc($aggregation)} = defined($metric_results{$resource_name}->{$metric_name}->{lc($aggregation)}) ? $metric_results{$resource_name}->{$metric_name}->{lc($aggregation)} : 0;
}
}
}
if (scalar(keys %{$self->{metric}}) <= 0) {
$self->{output}->add_option_msg(short_msg => 'No metrics. Check your options or use --zeroed option to set 0 on undefined values');
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check databases usage metrics.
Example:
Using resource name :
perl centreon_plugins.pl --plugin=cloud::azure::database::sqldatabase::plugin --custommode=azcli --mode=database-size
--resource=MYDATABASE --resource-group=MYHOSTGROUP --aggregation='maximum'
--critical-storage-percent-maximum='ç0' --verbose
Using resource id :
perl centreon_plugins.pl --plugin=cloud::azure::compute::virtualmachine::plugin --custommode=azcli --mode=cpu
--resource='/subscriptions/xxx/resourceGroups/xxx/providers/Microsoft.Sql/servers/xxx/databases/xxx'
--aggregation='maximum' --critical-storage-percent-maximum='90' --verbose
Default aggregation: 'maximum' / 'average', 'minimum' and 'maximum' are valid.
=over 8
=item B<--resource>
Set resource name or id (Required).
=item B<--resource-group>
Set resource group (Required if resource's name is used).
=item B<--filter-metric>
Filter metrics (Can be: 'storage_percent') (Can be a regexp).
=item B<--warning-$metric$-$aggregation$>
Thresholds warning ($metric$ can be: 'storage-percent',
$aggregation$ can be: 'minimum', 'maximum', 'average').
=item B<--critical-$metric$-$aggregation$>
Thresholds critical ($metric$ can be: 'storage-percent',
$aggregation$ can be: 'minimum', 'maximum', 'average').
=back
=cut
| Sims24/centreon-plugins | cloud/azure/database/sqldatabase/mode/databasesize.pm | Perl | apache-2.0 | 7,918 |
package Paws::ApiGateway::ApiKey;
use Moose;
has CreatedDate => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'createdDate');
has CustomerId => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'customerId');
has Description => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'description');
has Enabled => (is => 'ro', isa => 'Bool', traits => ['NameInRequest'], request_name => 'enabled');
has Id => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'id');
has LastUpdatedDate => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'lastUpdatedDate');
has Name => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'name');
has StageKeys => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'stageKeys');
has Value => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'value');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ApiGateway::ApiKey
=head1 ATTRIBUTES
=head2 CreatedDate => Str
The timestamp when the API Key was created.
=head2 CustomerId => Str
An AWS Marketplace customer identifier , when integrating with the AWS
SaaS Marketplace.
=head2 Description => Str
The description of the API Key.
=head2 Enabled => Bool
Specifies whether the API Key can be used by callers.
=head2 Id => Str
The identifier of the API Key.
=head2 LastUpdatedDate => Str
The timestamp when the API Key was last updated.
=head2 Name => Str
The name of the API Key.
=head2 StageKeys => ArrayRef[Str|Undef]
A list of Stage resources that are associated with the ApiKey resource.
=head2 Value => Str
The value of the API Key.
=head2 _request_id => Str
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/ApiGateway/ApiKey.pm | Perl | apache-2.0 | 1,856 |
package Paws::ServiceCatalog::DescribeProvisioningArtifact;
use Moose;
has AcceptLanguage => (is => 'ro', isa => 'Str');
has ProductId => (is => 'ro', isa => 'Str', required => 1);
has ProvisioningArtifactId => (is => 'ro', isa => 'Str', required => 1);
has Verbose => (is => 'ro', isa => 'Bool');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DescribeProvisioningArtifact');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::ServiceCatalog::DescribeProvisioningArtifactOutput');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ServiceCatalog::DescribeProvisioningArtifact - Arguments for method DescribeProvisioningArtifact on Paws::ServiceCatalog
=head1 DESCRIPTION
This class represents the parameters used for calling the method DescribeProvisioningArtifact on the
AWS Service Catalog service. Use the attributes of this class
as arguments to method DescribeProvisioningArtifact.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DescribeProvisioningArtifact.
As an example:
$service_obj->DescribeProvisioningArtifact(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 AcceptLanguage => Str
The language code.
=over
=item *
C<en> - English (default)
=item *
C<jp> - Japanese
=item *
C<zh> - Chinese
=back
=head2 B<REQUIRED> ProductId => Str
The product identifier.
=head2 B<REQUIRED> ProvisioningArtifactId => Str
The identifier of the provisioning artifact. This is sometimes referred
to as the product version.
=head2 Verbose => Bool
Enable a verbose level of details for the provisioning artifact.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DescribeProvisioningArtifact in L<Paws::ServiceCatalog>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/ServiceCatalog/DescribeProvisioningArtifact.pm | Perl | apache-2.0 | 2,331 |
package Moose::Exception::InvalidValueForIs;
our $VERSION = '2.1404';
use Moose;
extends 'Moose::Exception';
with 'Moose::Exception::Role::InvalidAttributeOptions';
sub _build_message {
my $self = shift;
"I do not understand this option (is => ".$self->params->{is}.") on attribute (".$self->attribute_name.")";
}
1;
| ray66rus/vndrv | local/lib/perl5/x86_64-linux-thread-multi/Moose/Exception/InvalidValueForIs.pm | Perl | apache-2.0 | 328 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
# Ensembl module for Bio::EnsEMBL::Variation::PopulationGenotype
#
#
=head1 NAME
Bio::EnsEMBL::Variation::PopulationGenotype - Module for a genotype
represented in a population.
=head1 SYNOPSIS
print $genotype->variation()->name(), "\n";
print $genotype->allele1(), '/', $genotype->allele2(), "\n";
print $genotype->frequency(), "\n";
print $genotype->population()->name(), "\n";
=head1 DESCRIPTION
This class represents a genotype which is present in a population.
=head1 METHODS
=cut
use strict;
use warnings;
package Bio::EnsEMBL::Variation::PopulationGenotype;
use Bio::EnsEMBL::Variation::Genotype;
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Exception qw(throw deprecate warning);
use vars qw(@ISA);
@ISA = qw(Bio::EnsEMBL::Variation::Genotype);
=head2 new
Arg [-dbID] :
int - unique internal identifier
Arg [-adaptor] :
Bio::EnsEMBL::Variation::DBSQL::PopulationAdaptor
Arg [-genotype] :
arrayref of strings - The alleles defining this genotype
Arg [-variation] :
Bio::EnsEMBL::Variation::Variation - The variation associated with this
genotype
Arg [-population] :
Bio::EnsEMBL::Population - The population this genotype is for.
Arg [-frequency] :
int - the frequency this genotype occurs in this population
Example : $pop_genotype = Bio:EnsEMBL::Variation::PopulationGenotype->new
(-genotype => ['A','T'],
-variation => $variation,
-population => $pop
-frequency => 0.87);
Description: Constructor. Instantiates a PopulationGenotype object.
Returntype : Bio::EnsEMBL::Variation::PopulationGenotype
Exceptions : throw on bad argument
Caller : general
Status : Stable
=cut
sub new {
my $class = shift;
my ($dbID, $adaptor, $genotype, $var, $pop, $freq, $count, $var_id, $ss_id) =
rearrange([qw(dbID adaptor genotype variation population frequency count _variation_id subsnp)],@_);
if(defined($var) &&
(!ref($var) || !$var->isa('Bio::EnsEMBL::Variation::Variation'))) {
throw("Bio::EnsEMBL::Variation::Variation argument expected");
}
if(defined($pop) &&
(!ref($pop) || !$pop->isa('Bio::EnsEMBL::Variation::Population'))) {
throw("Bio::EnsEMBL::Variation::Population argument expected");
}
# set subsnp_id to undefined if it's 0 in the DB
$ss_id = undef if defined($ss_id) && $ss_id == 0;
# add ss to the subsnp_id
$ss_id = 'ss'.$ss_id if defined $ss_id && $ss_id !~ /^ss/;
return bless {
'dbID' => $dbID,
'adaptor' => $adaptor,
'genotype' => $genotype,
'variation' => $var,
'_variation_id' => defined($var) ? undef : $var_id,
'population' => $pop,
'frequency' => $freq,
'count' => $count,
'subsnp' => $ss_id
}, $class;
}
=head2 population
Arg [1] : (optional) Bio::EnsEMBL::Variation::Population $pop
Example : $pop = $pop_genotype->population();
Description: Getter/Setter for the population associated with this genotype
Returntype : Bio::EnsEMBL::Variation::Population
Exceptions : throw on bad argument
Caller : general
Status : Stable
=cut
sub population {
my $self = shift;
if(@_) {
my $pop = shift;
if(defined($pop) &&
(!ref($pop) || !$pop->isa('Bio::EnsEMBL::Variation::Population'))) {
throw('Bio::EnsEMBL::Variation::Population argument expected');
}
return $self->{'population'} = $pop;
}
return $self->{'population'};
}
=head2 frequency
Arg [1] : string $freq (optional)
The new value to set the frequency attribute to
Example : $frequency = $pop_gtype->frequency()
Description: Getter/Setter for the frequency of occurance of this genotype
within its associated population.
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub frequency{
my $self = shift;
return $self->{'frequency'} = shift if(@_);
return $self->{'frequency'};
}
=head2 count
Arg [1] : int $count (optional)
The new value to set the count attribute to
Example : $frequency = $pop_gtype->count()
Description: Getter/Setter for the observed count of this genotype
within its associated population.
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub count{
my $self = shift;
return $self->{'count'} = shift if(@_);
return $self->{'count'};
}
1;
| dbolser-ebi/ensembl-variation | modules/Bio/EnsEMBL/Variation/PopulationGenotype.pm | Perl | apache-2.0 | 5,445 |
package Paws::MobileHub::ListBundles;
use Moose;
has MaxResults => (is => 'ro', isa => 'Int', traits => ['ParamInQuery'], query_name => 'maxResults');
has NextToken => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'nextToken');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'ListBundles');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/bundles');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::MobileHub::ListBundlesResult');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::MobileHub::ListBundles - Arguments for method ListBundles on Paws::MobileHub
=head1 DESCRIPTION
This class represents the parameters used for calling the method ListBundles on the
AWS Mobile service. Use the attributes of this class
as arguments to method ListBundles.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ListBundles.
As an example:
$service_obj->ListBundles(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 MaxResults => Int
Maximum number of records to list in a single response.
=head2 NextToken => Str
Pagination token. Set to null to start listing bundles from start. If
non-null pagination token is returned in a result, then pass its value
in here in another request to list more bundles.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method ListBundles in L<Paws::MobileHub>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/MobileHub/ListBundles.pm | Perl | apache-2.0 | 2,087 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 NAME
Bio::EnsEMBL::Compara::PipeConfig::Metazoa::ProteinTrees_conf
=head1 SYNOPSIS
init_pipeline.pl Bio::EnsEMBL::Compara::PipeConfig::Metazoa::ProteinTrees_conf -host mysql-ens-compara-prod-X -port XXXX
=head1 DESCRIPTION
The Metazoa PipeConfig file for ProteinTrees pipeline that should automate most of the pre-execution tasks.
=cut
package Bio::EnsEMBL::Compara::PipeConfig::Metazoa::ProteinTrees_conf;
use strict;
use warnings;
use Bio::EnsEMBL::Hive::Utils ('stringify');
use base ('Bio::EnsEMBL::Compara::PipeConfig::ProteinTrees_conf');
sub default_options {
my ($self) = @_;
return {
%{$self->SUPER::default_options}, # inherit the generic ones
'division' => 'metazoa',
'collection' => $self->o('division'),
# homology_dnds parameters:
'taxlevels' => ['Drosophila' ,'Hymenoptera', 'Nematoda'],
# GOC parameters:
'goc_taxlevels' => ['Diptera', 'Hymenoptera', 'Nematoda'],
# HighConfidenceOrthologs parameters:
# In this structure, the "thresholds" are for resp. the GOC score, the WGA coverage and %identity
'threshold_levels' => [
{
'taxa' => [ 'Euteleostomi', 'Ciona' ],
'thresholds' => [ 50, 50, 25 ],
},
{
'taxa' => [ 'Aculeata', 'Anophelinae', 'Caenorhabditis', 'Drosophila', 'Glossinidae', 'Onchocercidae' ],
'thresholds' => [ 50, 50, 25 ],
},
{
'taxa' => [ 'Brachycera', 'Culicinae', 'Hemiptera', 'Phlebotominae' ],
'thresholds' => [ 25, 25, 25 ],
},
{
'taxa' => [ 'Chelicerata', 'Diptera', 'Hymenoptera', 'Nematoda' ],
'thresholds' => [ undef, undef, 25 ],
},
{
'taxa' => [ 'all' ],
'thresholds' => [ undef, undef, 25 ],
},
],
# Extra analyses:
# Gain/loss analysis?
'do_cafe' => 0,
# Compute dNdS for homologies?
'do_dnds' => 1,
# Do we want the Gene QC part to run?
'do_gene_qc' => 0,
# Do we need a mapping between homology_ids of this database to another database?
# This parameter is automatically set to 1 when the GOC pipeline is going to run with a reuse database
'do_homology_id_mapping' => 0,
# hive_capacity values for some analyses:
'blastp_capacity' => 420,
'blastpu_capacity' => 100,
'split_genes_capacity' => 200,
'cluster_tagging_capacity' => 200,
'homology_dNdS_capacity' => 200,
'treebest_capacity' => 200,
'ortho_tree_capacity' => 200,
'quick_tree_break_capacity' => 100,
'goc_capacity' => 200,
'goc_stats_capacity' => 15,
'other_paralogs_capacity' => 100,
'mcoffee_short_capacity' => 200,
'hc_capacity' => 4,
'decision_capacity' => 4,
};
}
sub tweak_analyses {
my $self = shift;
my $analyses_by_name = shift;
# Extend this section to redefine the resource names of some analysis
my %overriden_rc_names = (
'mcoffee' => '8Gb_job',
'mcoffee_himem' => '32Gb_job',
'mafft' => '8Gb_2c_job',
'mafft_himem' => '32Gb_4c_job',
'treebest' => '4Gb_job',
'members_against_allspecies_factory' => '2Gb_job',
'members_against_nonreusedspecies_factory' => '2Gb_job',
'homology_dumps_mlss_id_factory' => '500Mb_job',
'ortholog_mlss_factory' => '500Mb_job',
);
foreach my $logic_name (keys %overriden_rc_names) {
$analyses_by_name->{$logic_name}->{'-rc_name'} = $overriden_rc_names{$logic_name};
}
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/PipeConfig/Metazoa/ProteinTrees_conf.pm | Perl | apache-2.0 | 4,673 |
=head1 LICENSE
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Analysis::Runnable::EPCR -
=head1 SYNOPSIS
my $runnable = Bio::EnsEMBL::Analysis::Runnable::EPCR->new(
-query => $slice,
-program => $self->analysis->dbfile,
%{$self->parameters_hash};
);
$runnable->run;
my @marker_features = @{$runnable->output};
=head1 DESCRIPTION
Wrapper to run EPCR and parse the results into marker features
=head1 METHODS
=cut
package Bio::EnsEMBL::Analysis::Runnable::EPCR;
use strict;
use warnings;
use Bio::EnsEMBL::Analysis::Runnable;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::Utils::Argument qw( rearrange );
use vars qw(@ISA);
@ISA = qw(Bio::EnsEMBL::Analysis::Runnable);
=head2 new
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : string, sts file
Arg [3] : arrayref of Bio::EnsEMBL::Map::Markers
Arg [4] : int, margin
Arg [5] : int, word_size
Arg [6] : int, min mismatch
Arg [7] : int, max mismatch
Function : create a Bio::EnsEMBL::Analysis::Runnable::EPCR
Returntype: Bio::EnsEMBL::Analysis::Runnable::EPCR
Exceptions: throws if passed both an sts file and an array of features
Example :
=cut
sub new {
my ($class,@args) = @_;
my $self = $class->SUPER::new(@args);
my ($sts_file, $sts_features, $margin, $word_size, $min_mismatch,
$max_mismatch) = rearrange(['STS_FILE', 'STS_FEATURES', 'M',
'W', 'NMIN', 'NMAX'], @args);
######################
#SETTING THE DEFAULTS#
######################
$self->program('e-PCR') if(!$self->program);
######################
if($sts_file && $sts_features){
throw("Must pass either an STS_FILE $sts_file or an array of ".
"STS_FEATURES $sts_features not both");
}
$self->sts_file($sts_file) if($sts_file);
$self->sts_features($sts_features) if($sts_features);
$self->margin($margin) if($margin);
$self->word_size($word_size) if($word_size);
$self->min_mismatch($min_mismatch) if(defined $min_mismatch);
$self->max_mismatch($max_mismatch) if(defined $max_mismatch);
return $self;
}
#containers
=head2 margin
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : int/string variable
Function : container for the specified variable. This pod
refers the the 4 methods below, margin, word_size, min_mistmatch
and max_mismatch
Returntype: int/string
Exceptions: none
Example :
=cut
sub margin{
my $self = shift;
$self->{'margin'} = shift if(@_);
return $self->{'margin'};
}
=head2 word_size
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : int/string variable
Function : container for the specified variable. This pod
refers the the 4 methods below, margin, word_size, min_mistmatch
and max_mismatch
Returntype: int/string
Exceptions: none
Example :
=cut
sub word_size{
my $self = shift;
$self->{'word_size'} = shift if(@_);
return $self->{'word_size'};
}
=head2 min_mismatch
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : int/string variable
Function : container for the specified variable. This pod
refers the the 4 methods below, margin, word_size, min_mistmatch
and max_mismatch
Returntype: int/string
Exceptions: none
Example :
=cut
sub min_mismatch{
my $self = shift;
$self->{'min_mismatch'} = shift if(@_);
return $self->{'min_mismatch'};
}
=head2 max_mismatch
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : int/string variable
Function : container for the specified variable. This pod
refers the the 4 methods below, margin, word_size, min_mistmatch
and max_mismatch
Returntype: int/string
Exceptions: none
Example :
=cut
sub max_mismatch{
my $self = shift;
$self->{'max_mismatch'} = shift if(@_);
return $self->{'max_mismatch'};
}
=head2 sts_file
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : string, file path
Function : container for sts file path, will use the find file
method for Runnable to locate the file
Returntype: string
Exceptions: none
Example :
=cut
sub sts_file{
my ($self, $file) = @_;
if($file){
my $found = $self->find_file($file);
$self->{'sts_file'} = $found;
}
return $self->{'sts_file'};
}
=head2 sts_features
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : arrayref of Bio::EnsEMBL::Map::Markers
Function : container for arrayref of Bio::EnsEMBL::Map::Markers
Returntype: arrayref
Exceptions: throw if not passed an arrayref or if first element of
array isnt a Bio::EnsEMBL::Map::Marker
Example :
=cut
sub sts_features{
my ($self, $features) = @_;
if($features){
throw("Must pass EPCR sts_features an arrayref not ".$features)
unless(ref($features) eq 'ARRAY');
my $test = $features->[0];
if(!$test || !($test->isa("Bio::EnsEMBL::Map::Marker"))){
my $err = "arrayref ".$features." must contain ".
"Bio::EnsEMBL::Map::Marker";
$err .= " not ".$test if($test);
throw($err);
}
$self->{'sts_features'} = $features;
}
return $self->{'sts_features'};
}
=head2 hit_list
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : int, hit database id
Function : take the hit ids passed and store in a hash
Returntype: hashref
Exceptions: none
Example :
=cut
sub hit_list{
my ($self, $hit) = @_;
if(!$self->{'hit_list'}){
$self->{'hit_list'} = {};
}
if($hit){
$self->{'hit_list'}->{$hit} = $hit;
}
return $self->{'hit_list'};
}
#utility methods
=head2 run
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : string, working directory
Function : coordinates the running and parsing of EPCR
EPCR is run for every mismatch value between the minimum and maximum
Returntype: none
Exceptions: throws if doesnt have a query sequence or if doesnt have
either and sts file or sts features
Example :
=cut
sub run{
my ($self, $dir) = @_;
$self->workdir($dir) if($dir);
throw("Can't run ".$self." without a query sequence")
unless($self->query);
$self->checkdir($dir);
my $filename = $self->write_seq_file();
$self->files_to_delete($filename);
my $mismatch = $self->min_mismatch;
my $sts_file;
while($mismatch <= $self->max_mismatch){
if($self->sts_features){
$sts_file = $self->dump_sts_features($self->sts_features);
}elsif($self->sts_file){
$sts_file = $self->copy_sts_file($self->sts_file);
}else{
throw("Don't have either sts feature or a file");
}
my $results = $self->run_epcr($mismatch, $self->queryfile, $sts_file);
$self->parse_results($results);
$mismatch++;
}
$self->delete_files;
}
=head2 run_epcr
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : int, the mismatch
Arg [3] : string, query sequence filename
Arg [4] : string, sts filename
Function : construct commandline and run epcr
Returntype: string, filename
Exceptions: throws if system call fails
Example :
=cut
sub run_epcr{
my ($self, $mismatch, $query_file, $sts_file) = @_;
my $results = $self->resultsfile;
if(-e $results){
$results .= ".".$mismatch.".results";
}
$mismatch = $self->min_mismatch if(!$mismatch);
$query_file = $self->query_file if(!$query_file);
$sts_file = $self->sts_file if(!$sts_file);
my $options;
$options = " M=".$self->margin if(defined $self->margin);
$options .= " W=".$self->word_size if(defined $self->word_size);
$options .= " N=$mismatch " if(defined $mismatch);
my $command = $self->program." ".$sts_file." ".$query_file." ".
$options." > ".$results;
print "Running analysis ".$command."\n";
system($command) == 0 or throw("FAILED to run ".$command);
$self->files_to_delete($results);
return $results;
}
=head2 dump_sts_features
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : arrayref of Bio::EnsEMBL::Map::Markers
Function : dump the markers to a file in the format expected by epcr
markers who have already been hit or whose max_primer_dist is 0 or
has zero left or right markers are ignored
Returntype:
Exceptions:
Example :
=cut
sub dump_sts_features{
my ($self, $sts_features, $filename) = @_;
if(!$sts_features){
$sts_features = $self->sts_features;
}
my %hit_list = %{$self->hit_list};
if(!$filename){
$filename = $self->create_filename("sts", "out");
}
$self->files_to_delete($filename);
open(OUT, ">".$filename) or throw("FAILED to open $filename");
MARKER:foreach my $m (@$sts_features){
next MARKER if($hit_list{$m->dbID});
next MARKER if($m->max_primer_dist == 0);
next MARKER unless(length($m->left_primer) > 0);
next MARKER unless(length($m->right_primer) > 0);
my $string = $m->dbID."\t".$m->left_primer."\t".
$m->right_primer."\t".$m->min_primer_dist."-".
$m->max_primer_dist."\n";
print OUT $string;
}
close(OUT) or throw("FAILED to close $filename");
return $filename;
}
=head2 copy_sts_file
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : string, sts file
Arg [3] : string, filename for copy
Function : copys entries from one file into another while
skipping the entries already on the hit list
Returntype: filename
Exceptions: throws if given sts file doesnt exist or if any of the
open or close file commands fail
Example :
=cut
sub copy_sts_file{
my ($self, $sts_file, $filename) = @_;
if(!$sts_file){
$sts_file = $self->sts_file;
}
if(! -e $sts_file){
throw("Can't copy file ".$sts_file." which doesn't exist");
}
if(!$filename){
$filename = $self->create_filename("sts", "out");
}
$self->files_to_delete($filename);
my %hit_list = %{$self->hit_list};
if(keys(%hit_list) == 0){
return $sts_file;
}
eval{
open(OUT, ">".$filename) or throw("FAILED to open $filename");
open(IN, $sts_file) or throw("FAILED to open $sts_file");
MARKER:while(<IN>){
my $id = (split)[0];
next MARKER if($hit_list{$id});
print OUT;
}
close(OUT) or throw("FAILED to close $filename");
close(IN) or throw("FAILED to close $sts_file");
};
if($@){
throw("FAILED to copy $sts_file $@");
}
return $filename;
}
=head2 parse_results
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::EPCR
Arg [2] : string, filename
Function : parse the file given into Bio:EnsEMBL::Map::MarkerFeatures
Returntype: none
Exceptions: throws if results file doesnt exist or if open and closes
fail
Example :
=cut
sub parse_results{
my ($self, $results) = @_;
if(!$results){
$results = $self->resultsfile;
}
if(!-e $results){
throw("Can't open ".$results." as it doesn't exist");
}
my $ff = $self->feature_factory;
my @output;
open(FH, $results) or throw("FAILED to open ".$results);
while(<FH>){
chomp;
#chromosome:NCBI34:1:1:920598:1 615132..615340 121028
#chromosome:NCBI34:1:1:920598:1 622477..622687 121028
my ($name, $start, $end, $dbid) =
$_ =~ m!(\S+)\s+(\d+)\.\.(\d+)\s+(\w+)!;
my $m = $ff->create_marker($dbid);
my $mf = $ff->create_marker_feature($start, $end, 0, $m, $name,
$self->query);
push(@output, $mf);
$self->hit_list($dbid);
}
$self->output(\@output);
close(FH) or throw("FAILED to close ".$results);
}
| mn1/ensembl-analysis | modules/Bio/EnsEMBL/Analysis/Runnable/EPCR.pm | Perl | apache-2.0 | 12,300 |
package VMOMI::ArrayOfHostNatServicePortForwardSpec;
use parent 'VMOMI::ComplexType';
use strict;
use warnings;
our @class_ancestors = ( );
our @class_members = (
['HostNatServicePortForwardSpec', 'HostNatServicePortForwardSpec', 1, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/ArrayOfHostNatServicePortForwardSpec.pm | Perl | apache-2.0 | 465 |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% entrada
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(1,
'Imprime los teatros que echan a Hamlet a las 20',
[
("BAS0AC0001",_,_,_),
("TEA1ELTEAT",_,_,_),
("BAS0SXRELA",_,_,_),
("TEA1ASTEOB",_,_,_),
("BAS0SXPRPO",_,_,_),
("TEA1VANOO3",_,_,_),
("TEA1SXPRPO",_,_,_),
("TEA1ELHSES",_,_,_)
]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(2,
'Imprime los Hamlets de Madrid',
[
("BAS0AC0001",_,_,_),
("TEA1VANOO3",_,_,_),
("TEA1VANOC1",_,_,_)
]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(3,
'Dime los Autores de Madrid de las 15:00 con dramas',
[
("BAS0AC0007",_,_,_),
("TEA1ELAUTO",_,_,_),
("TEA1SXPRPO",_,_,_),
("TEA1VANOC1",_,_,_),
("TEA1SXPRPO",_,_,_),
("TEA1VAHSE1",_,_,_),
("TEA1SXPRPO",_,_,_),
("TEA1VAESOB",_,_,_)
]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(4,
'Imprime todas las obras de Shakespeare que se echan en Madrid',
[
("BAS0AC0001",_,_,_), %Imprime
("BAS0SXCTFC",'ALL',_,_), %todas
("TEA1ELOBRA",_,_,_), %las obras
("BAS0SXPRPO",_,_,_), %de
("TEA1VANOA1",_,_,_), %Shakespeare
("BAS0SXRELA",_,_,_), %que
("BAS0SXPRPO",_,_,_), %se /como preposicion :-{
("TEA1ASTEOB",_,_,_), %echan
("BAS0SXPRPO",_,_,_), %en
("TEA1VANOC1",_,_,_) %Madrid
]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(5,
'Imprime teatros de Madrid',
[
("BAS0AC0001",_,_,_),
("TEA1ELTEAT",_,_,_),
("TEA1VANOC1",_,_,_)
]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(6,
'Imprime teatros con dramas',
[
("BAS0AC0001",_,_,_),
("TEA1ELTEAT",_,_,_),
("TEA1VAESOB",_,_,_)
]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
entry(7,
'Imprime todos los dramas',
[
("BAS0AC0001",_,_,_),
("BAS0SXCTFC",'ALL',_,_),
("TEA1VAESOB",_,_,_)
]).
| leuschel/ecce | www/CiaoDE/ciao/bugs/Fixed/SAGE/sage/entry.pl | Perl | apache-2.0 | 2,593 |
# exoclient: the first exochat client
# version 1.0
# by Byron Kellett
# http://almightybyron.co.uk/a/exochat
use File::Slurp;
#use CSV;
print "exoclient\n\n";
$ans = <>;
chomp($ans); | ByronKellett/exoclient | exoclient.pl | Perl | bsd-2-clause | 185 |
#!/usr/bin/perl -w
# set.pl -- solver for the card game "Set".
# by Neil Kandalgaonkar
# Copyright (c) 2007, Neil Kandalgaonkar
# Released under the BSD license
# http://www.opensource.org/licenses/bsd-license.php
# How to use it:
#
# run program
# on standard input, enter in a group of cards encoded like so
# 1rfo 2pss 3ged
# followed by a newline
#
# the example set means "one red filled oval", "two purple shaded squiggles",
# "3 green empty diamonds"
#
# program will output all valid sets
my $ALL_FIRST = 0b001;
my $ALL_SECOND = 0b010;
my $ALL_THIRD = 0b100;
my $ALL_DIFFERENT = 0b111;
my @attr_allowed = (
["number", "123"], # 1, 2, 3
["color", "rgp"], # red, green, purple
["shade", "fse"], # filled, shaded, empty
["shape", "sod"], # squiggle, oval, diamond
);
while (<>) {
chomp;
my @input_card = split ' ' => $_;
my @bcard;
# for every card, convert it to a binary representation -- this
# is useful for fast evaluation of the set
for my $card (@input_card) {
my $bcard;
for my $attr_idx (0..$#attr_allowed) {
my ($attrname, $allowed) = @{$attr_allowed[$attr_idx]};
my $attr = substr($card, $attr_idx, 1);
my $found_idx = index($allowed, $attr);
if ($found_idx == -1) {
die "did not recognize $attrname = $attr";
}
$bcard |= 1 << ($attr_idx * 4 + $found_idx);
}
push @bcard, $bcard;
}
# we send off an array with a binary representation of the cards,
# and get back an array of arrays of positions of cards that are sets
my @allSetIndex = allSetIndex(@bcard);
if (@allSetIndex) {
# remap the indices back to the original string representations of the cards.
for my $setIndex (@allSetIndex) {
my $printableSet = join " & ", map { $input_card[$_] } @$setIndex;
print "$printableSet\n";
}
} else {
warn "no valid sets found\n";
}
}
sub allSetIndex {
my (@bcard) = @_;
my @setIndex;
for my $i (0..$#bcard) {
for my $j ($i+1..$#bcard) {
for my $k ($j+1..$#bcard) {
if (isSet($bcard[$i] | $bcard[$j] | $bcard[$k])) {
push @setIndex, [$i, $j, $k];
}
}
}
}
return @setIndex;
}
# compare each combined property to an expected bitmask.
# a property, such as color, can only be 001, 010, 100, or 111
# just do that for each property
sub isSet {
my $set = shift;
for my $i (0..3) {
# bitshift to get a group of three over, then zero out all
# else
$allAttr = ($set >> ($i * 4)) & 7;
if ($allAttr != $ALL_FIRST and
$allAttr != $ALL_SECOND and
$allAttr != $ALL_THIRD and
$allAttr != $ALL_DIFFERENT) {
return 0;
}
}
return 1;
}
| neilk/set-solver | set.pl | Perl | bsd-2-clause | 2,700 |
package App::Wubot::Plugin::CPAN;
use Moose;
our $VERSION = '0.3.4'; # VERSION
use App::Wubot::Logger;
has 'expire_age' => ( is => 'rw',
isa => 'Num',
default => sub { 60*60 },
);
has 'reactor' => ( is => 'ro',
isa => 'CodeRef',
required => 1,
);
has 'logger' => ( is => 'ro',
isa => 'Log::Log4perl::Logger',
lazy => 1,
default => sub {
return Log::Log4perl::get_logger( __PACKAGE__ );
},
);
with 'App::Wubot::Plugin::Roles::Cache';
with 'App::Wubot::Plugin::Roles::Plugin';
sub check {
my ( $self, $inputs ) = @_;
my $cache = $inputs->{cache};
my $config = $inputs->{config};
# todo: forking plugin fu to prevent running more than one at once
unless ( $config->{nofork} ) {
my $pid = fork();
if ( $pid ) {
# parent process
return { react => { subject => "launched cpan child process: $pid",
coalesce => $self->key,
} }
}
}
eval { # try
my $perl = $config->{perl} || 'perl';
my $command = "$perl -MCPAN -e 'CPAN::Shell->r'";
my @react;
# run command capturing output
open my $run, "-|", "$command 2>&1" or die "Unable to execute $command: $!";
MODULE:
while ( my $line = <$run> ) {
chomp $line;
$self->logger->trace( $line );
next unless $line =~ m|^(\S+)\s+(\S+)\s+(\S+)\s+(\S+)$|;
my ( $module, $installed, $latest, $path ) = ( $1, $2, $3, $4 );
next MODULE if $installed eq "undef" || ! $latest;
#$self->logger->info( "Module needs update: $module: $installed => $latest" );
my $cache_string = "$module:$latest";
# if we've already seen this item, move along
if ( $self->cache_is_seen( $cache, $cache_string ) ) {
$self->logger->trace( "Already seen: ", $cache_string );
# touch cache time on this subject
$self->cache_mark_seen( $cache, $cache_string );
next MODULE;
}
# keep track of this item so we don't fetch it again
$self->cache_mark_seen( $cache, $cache_string );
my $subject = "perl module out of date: $module: $installed => $latest";
$self->logger->info( $subject );
$self->reactor->( { subject => $subject,
module => $module,
installed => $installed,
lastest => $latest,
path => $path,
link => "http://search.cpan.org/perldoc?$module",
} );
}
close $run;
# check exit status
unless ( $? eq 0 ) {
my $status = $? >> 8;
my $signal = $? & 127;
$self->reactor->( { subject => "error running '$command': status=$status signal=$signal" } );
}
# write out the updated cache
$self->write_cache( $cache );
1;
} or do { # catch
$self->logger->info( "ERROR: getting cpan module info: $@" );
};
exit 0;
}
__PACKAGE__->meta->make_immutable;
1;
__END__
=head1 NAME
App::Wubot::Plugin::CPAN - verify that the latest versions of all Perl modules are installed
=head1 VERSION
version 0.3.4
=head1 SYNOPSIS
# The plugin configuration lives here:
~/wubot/config/plugins/CPAN/myhostname.yaml
---
delay: 1d
timeout: 300
perl: /usr/local/bin/perl
=head1 DESCRIPTION
This plugin checks if there are any perl modules installed locally
which have a newer version available on CPAN. The idea was stolen
from theory's nagios check:
https://github.com/theory/check_perl_modules/blob/master/bin/check_perl_modules
I originally tried to steal the logic from the check_perl_modules
script, but the script makes a large number of calls (one for every
module installed) to a web service (cpanmetadb.appspot.com) which
overran its quota several times during my test. So for the time
being, it uses the rather ugly approach of parsing the output of the
command:
perl -MCPAN -e 'CPAN::Shell->r'
By default it will use the first 'perl' in the path, although you can
set the perl path (see the example above). This makes it possible to
configure multiple monitors per host if there is more than one perl
installation you want to monitor.
=head1 SUBROUTINES/METHODS
=over 8
=item check( $inputs )
The standard monitor check() method.
=back
| gitpan/wubot | lib/App/Wubot/Plugin/CPAN.pm | Perl | bsd-3-clause | 4,824 |
# vim:ts=4:sw=4:expandtab
# x11vis - an X11 protocol visualizer
# © 2011 Michael Stapelberg and contributors (see ../LICENSE)
#
package PacketHandler;
use strict;
use warnings;
use Data::Dumper;
use AnyEvent::Socket;
use AnyEvent::Handle;
use AnyEvent;
use Moose;
use JSON::XS;
use IO::Handle;
use Time::HiRes qw(gettimeofday tv_interval);
use Burst;
use FindBin;
use lib "$FindBin::RealBin/gen/";
use RequestDissector;
use RequestDissector::RANDR;
use ReplyDissector;
use ReplyDissector::RANDR;
use EventDissector;
use ErrorDissector;
use Dissector::ICCCM;
use Mappings;
use FileOutput;
use Extension;
use v5.10;
with 'Elapsed';
has 'conn_id' => (is => 'ro', isa => 'Int', required => 1);
# mapping of X11 IDs to our own IDs
has 'x_ids' => (
traits => [ 'Hash' ],
is => 'rw',
isa => 'HashRef[Str]',
default => sub { {} },
handles => {
add_mapping => 'set',
id_for_xid => 'get',
xid_known => 'exists'
}
);
has '_extensions' => (
traits => [ 'Array' ],
is => 'rw',
isa => 'ArrayRef[Extension]',
handles => {
add_extension => 'push',
extensions => 'elements',
}
);
has 'sequence' => (
traits => [ 'Counter' ],
is => 'rw',
isa => 'Int',
default => 1, # sequence 0 is the x11 connection handshake
handles => {
inc_sequence => 'inc',
}
);
has '_outstanding_replies' => (
traits => [ 'Hash' ],
is => 'rw',
isa => 'HashRef',
default => sub { {} },
handles => {
expect_reply => 'set',
awaiting_reply => 'exists',
type_of_reply => 'get',
}
);
has [ 'child_burst', 'x11_burst' ] => (
is => 'rw',
isa => 'Burst',
);
# shortcut to the Mappings singleton
my $mappings = Mappings->instance;
sub BUILD {
my ($self) = @_;
$self->child_burst(Burst->new(conn_id => $self->conn_id, direction => 'to_server'));
$self->x11_burst(Burst->new(conn_id => $self->conn_id, direction => 'to_client'));
}
sub dump_request {
my ($self, $data) = @_;
$data->{type} = 'request';
$data->{seq} = $self->sequence;
$data->{elapsed} = $self->elapsed;
$self->child_burst->add_packet(encode_json($data));
$self->expect_reply($self->sequence, $data);
$self->inc_sequence;
}
sub dump_reply {
my ($self, $data) = @_;
$data->{type} = 'reply';
$data->{elapsed} = $self->elapsed;
$self->x11_burst->add_packet(encode_json($data));
# handle X extensions
if ($data->{name} eq 'QueryExtension' &&
$data->{moredetails}->{present} == 1) {
my %d = %{$data->{moredetails}};
my $req_data = $self->type_of_reply($data->{seq});
my %rd = %{$req_data->{moredetails}};
my $ext = Extension->new(
name => $rd{name},
opcode => $d{major_opcode},
first_error => $d{first_error},
first_event => $d{first_event}
);
say "ext = " . Dumper($ext);
$self->add_extension($ext);
}
}
sub dump_event {
my ($self, $data) = @_;
$data->{type} = 'event';
$data->{elapsed} = $self->elapsed;
$self->x11_burst->add_packet(encode_json($data));
}
sub dump_error {
my ($self, $data) = @_;
$data->{type} = 'error';
$data->{elapsed} = $self->elapsed;
$self->x11_burst->add_packet(encode_json($data));
}
sub dump_cleverness {
my ($self, $data) = @_;
$data->{type} = 'cleverness';
$data->{elapsed} = $self->elapsed;
FileOutput->instance->write(encode_json($data));
}
# shortcut which retuns a formatted ID (within % signs)
sub id {
return '%' . $mappings->id_for(@_) . '%';
}
# shortcut which turns an array of ID's into a string of comma-seperated,
# formatted ID's
sub ids {
return join(', ', map { id($_) } shift);
}
sub reply_icing {
my ($self, $data) = @_;
my $name = $data->{name};
my %d = %{$data->{moredetails}};
#say "(reply) icing for $name, data = " . Dumper(\%d);
my $req_data = $self->type_of_reply($data->{seq});
my %rd = %{$req_data->{moredetails}};
return id($d{focus} => 'window') if $name eq 'GetInputFocus';
if ($name eq 'InternAtom') {
$mappings->add_atom($rd{name} => $d{atom});
my $id = $mappings->id_for($d{atom}, 'atom');
$self->dump_cleverness({
id => $id,
title => $rd{name},
idtype => 'atom',
moredetails => {
name => $rd{name},
}
});
return id($d{atom} => 'atom');
}
if ($name eq 'GetAtomName') {
$mappings->add_atom($d{name} => $rd{atom});
my $id = $mappings->id_for($rd{atom} => 'atom');
$self->dump_cleverness({
id => $id,
title => $d{name},
idtype => 'atom',
moredetails => {
name => $d{name},
}
});
return "$d{name}";
}
if ($name eq 'GetGeometry') {
return id($rd{drawable}) . " ($d{x}, $d{y}) $d{width} x $d{height}";
}
if ($name eq 'GetSelectionOwner') {
return id($d{owner} => 'window');
}
if ($name eq 'TranslateCoordinates') {
return "($d{dst_x}, $d{dst_y}) on " . id($rd{dst_window});
}
if ($name eq 'GetWindowAttributes') {
return id($rd{window}) . " class $d{class}, state $d{map_state}, o_redir $d{override_redirect}";
}
if ($name eq 'QueryBestSize') {
return "$d{width} x $d{height}";
}
if ($name eq 'ListExtensions') {
return join(', ', map { $_->{name} } @{$d{names}});
}
if ($name eq 'ListProperties') {
return id($rd{window} => 'window') . ' has ' . join(', ', map { id($_ => 'atom') } @{$d{atoms}});
}
if ($name eq 'GetProperty') {
my $atom = $mappings->get_atom_xid('WM_NAME');
if (defined($atom) && $atom == $rd{property} && $d{type} != 0) {
$self->dump_cleverness({
id => $mappings->id_for($rd{window}, 'window'),
title => $d{value},
idtype => 'window',
moredetails => {
name => $d{value},
}
});
}
my $details = id($rd{property} => 'atom');
if ($d{type} == 0) {
$details .= ' is not set';
} else {
$details .= " = $d{value} (type " . id($d{type} => 'atom') . ')';
}
return $details . ' on ' . id($rd{window} => 'window');
}
if ($name eq 'QueryTree') {
return "(" . (scalar @{$d{children}}) . ' children)';
}
if ($name eq 'QueryExtension') {
return "$rd{name} " . ($d{present} ? 'present' : 'not present');
}
undef;
}
sub request_icing {
my ($self, $data) = @_;
my $name = $data->{name};
my %d = %{$data->{moredetails}};
say "icing for $name, data = " . Dumper($data);
# these requests have no details
my @no_details = qw(GetInputFocus GetModifierMapping ListExtensions);
return '' if $name ~~ @no_details;
# display the ASCII names of atoms and extensions
return $d{name} if $name eq 'InternAtom';
return id($d{atom} => 'atom') if $name eq 'GetAtomName';
return $d{name} if $name eq 'QueryExtension';
return id($d{focus} => 'window') if $name eq 'SetInputFocus';
my @single_window = qw(MapWindow MapSubWindows DestroySubwindows UnmapWindow ListProperties);
return id($d{window} => 'window') if $name ~~ @single_window;
if ($name eq 'DestroyWindow') {
my $win = $mappings->id_for($d{window}, 'window');
$mappings->delete_mapping($d{window});
return "%$win%";
}
if ($name eq 'GrabKey') {
# TODO: modifier human readable
return "$d{key} on " . id($d{grab_window} => 'window');
}
if ($name eq 'GrabServer' || $name eq 'UngrabServer') {
return "";
}
if ($name eq 'GrabButton') {
return "button $d{button} on " . id($d{grab_window}, 'window');
}
if ($name eq 'CopyArea') {
return "$d{width} x $d{height} from " . id($d{src_drawable}) .
" ($d{src_x}, $d{src_y}) to " . id($d{dst_drawable}) .
" ($d{dst_x}, $d{dst_y})";
}
if ($name eq 'PolyFillRectangle') {
return (scalar @{$d{rectangles}}) . " rects on " . id($d{drawable});
}
if ($name eq 'PolyLine') {
return (scalar @{$d{points}}) . " points on " . id($d{drawable});
}
if ($name eq 'PolySegment') {
return (scalar @{$d{segments}}) . " segments on " . id($d{drawable});
}
if ($name eq 'FillPoly') {
return (scalar @{$d{points}}) . " points on " . id($d{drawable});
}
if ($name eq 'CreateWindow') {
return id($d{wid} => 'window') . ' (parent ' . id($d{parent} => 'window') . ") ($d{x}, $d{y}) $d{width} x $d{height}";
}
if ($name eq 'GetWindowAttributes') {
return id($d{window} => 'window');
}
if ($name eq 'ReparentWindow') {
return id($d{window} => 'window') . ' into ' . id($d{parent} => 'window') . " at ($d{x}, $d{y})";
}
if ($name eq 'ChangeSaveSet') {
return "$d{mode} " . id($d{window} => 'window');
}
if ($name eq 'GetKeyboardMapping') {
return "$d{count} codes starting from $d{first_keycode}"
}
if ($name eq 'OpenFont') {
my $id = $mappings->id_for($d{fid} => 'font');
$self->dump_cleverness({
id => $id,
title => $d{name},
idtype => 'font',
});
return "%$id%";
}
if ($name eq 'ListFontsWithInfo' ||
$name eq 'ListFonts') {
return "$d{pattern}";
}
if ($name eq 'QueryFont') {
return id($d{font} => 'font');
}
# display translated X11 IDs
if ($name eq 'GetProperty') {
my $property = id($d{property} => 'atom');
my $window = id($d{window} => 'window');
$data->{_references} = [ $property, $window ];
return "$property of $window";
}
if ($name eq 'GetGeometry') {
return id($d{drawable});
}
if ($name eq 'TranslateCoordinates') {
my $src = id($d{src_window});
my $dst = id($d{dst_window});
my $src_x = $d{src_x};
my $src_y = $d{src_y};
# TODO: better description?
return "($src_x, $src_y) from $src to $dst";
}
if ($name eq 'QueryTree') {
return id($d{window} => 'window');
}
if ($name eq 'CreatePixmap') {
return id($d{pid} => 'pixmap') . ' on ' . id($d{drawable}) . " ($d{width} x $d{height})";
}
if ($name eq 'CreateGC') {
return id($d{cid} => 'gcontext') . ' on ' . id($d{drawable});
}
if ($name eq 'ChangeWindowAttributes') {
my $details = id($d{window} => 'window');
delete $d{window};
delete $d{value_mask};
say "left:" . Dumper(\%d);
if ((keys %d) == 1) {
my $key = (keys %d)[0];
if ($key eq 'cursor') {
$details .= ' cursor=' . id($d{cursor} => 'cursor');
} elsif (ref($d{$key}) eq 'ARRAY') {
$details .= " $key=" . join(', ', @{$d{$key}});
} else {
$details .= " $key=$d{$key}";
}
}
return $details;
}
if ($name eq 'ChangeGC') {
my $details = id($d{gc} => 'gcontext');
delete $d{gc};
delete $d{value_mask};
if ((keys %d) == 1) {
my $key = (keys %d)[0];
if ($key eq 'foreground' || $key eq 'background') {
# TODO: colorpixel to hex
$details .= " $key=" ;
} elsif (ref($d{$key}) eq 'ARRAY') {
$details .= " $key=" . join(', ', @{$d{$key}});
} else {
$details .= " $key=$d{$key}";
}
}
return $details;
}
if ($name eq 'ConfigureWindow') {
my $details = id($d{window} => 'window');
if (exists $d{x} && exists $d{y}) {
$details .= " ($d{x}, $d{y})";
}
# TODO: single of x, y, w, h
if (exists $d{width} && exists $d{height}) {
$details .= " $d{width} x $d{height}";
}
return $details;
}
if ($name eq 'ChangeProperty') {
my $win = $mappings->id_for($d{window}, 'window');
my $name_atom = $mappings->get_atom_xid('WM_NAME');
if (defined($name_atom) && $name_atom == $d{property}) {
$self->dump_cleverness({
id => $win,
title => $d{data},
idtype => 'window',
moredetails => {
name => $d{data},
}
});
}
my $normal_hints_atom = $mappings->get_atom_xid('WM_NORMAL_HINTS');
if (defined($normal_hints_atom) && $normal_hints_atom == $d{property}) {
return id($d{property} => 'atom') . " on %$win%: " . Dissector::ICCCM::decode_wm_size_hints($d{data});
}
my $wm_state_atom = $mappings->get_atom_xid('_NET_WM_STATE');
if (defined($wm_state_atom) && $wm_state_atom == $d{property}) {
return id($d{property} => 'atom') . " on %$win%: " . ids(unpack('L' x $d{data_len}, $d{data}));
}
return id($d{property} => 'atom') . " on %$win%";
}
if ($name eq 'FreePixmap') {
my $details = id($d{pixmap} => 'pixmap');
$mappings->delete_mapping($d{pixmap});
return $details;
}
if ($name eq 'FreeGC') {
my $details = id($d{gc} => 'gcontext');
$mappings->delete_mapping($d{gc});
return $details;
}
if ($name eq 'CloseFont') {
my $details = id($d{font} => 'font');
$mappings->delete_mapping($d{font});
return $details;
}
if ($name eq 'ImageText8') {
# TODO: ellipsize
return id($d{drawable}) . " at $d{x}, $d{y}: $d{string}";
}
if ($name eq 'ClearArea') {
return id($d{window} => 'window') . " ($d{x}, $d{y}) $d{width} x $d{height}";
}
if ($name eq 'UngrabKey') {
# TODO: modifier
return "$d{key} on " . id($d{grab_window});
}
if ($name eq 'QueryBestSize') {
if ($d{class} eq 'LargestCursor') {
return 'largest cursor size on ' . id($d{drawable} => 'window');
}
}
if ($name eq 'CreateGlyphCursor') {
return id($d{cid} => 'cursor') . " from char $d{source_char} of " . id($d{source_font} => 'font');
}
if ($name eq 'SetSelectionOwner') {
return id($d{owner}) . ' owns ' . id($d{selection} => 'atom');
}
if ($name eq 'SendEvent') {
return 'to ' . id($d{destination});
}
undef
}
sub event_icing {
my ($self, $data) = @_;
my $name = $data->{name};
my %d = %{$data->{moredetails}};
say "(event) icing for $name";
if ($name eq 'MapNotify') {
return id($d{window} => 'window');
}
if ($name eq 'MapRequest') {
return id($d{window} => 'window') . ' (parent ' . id($d{parent} => 'window') . ')';
}
if ($name eq 'PropertyNotify') {
return id($d{atom} => 'atom') . ' on ' . id($d{window} => 'window');
}
if ($name eq 'ConfigureNotify') {
return id($d{window} => 'window') . " ($d{x}, $d{y}) $d{width} x $d{height}";
}
if ($name eq 'Expose') {
return id($d{window} => 'window') . " ($d{x}, $d{y}) $d{width} x $d{height}, $d{count} following";
}
if ($name eq 'FocusIn') {
return id($d{event} => 'window') . " (mode = $d{mode}, detail = $d{detail})";
}
if ($name eq 'ReparentNotify') {
return id($d{window} => 'window') . ' now in ' . id($d{parent} => 'window') . " at ($d{x}, $d{y})";
}
if ($name eq 'NoExposure') {
return id($d{drawable});
}
if ($name eq 'VisibilityNotify') {
return id($d{window} => 'window') . " $d{state}";
}
if ($name eq 'MappingNotify') {
return "$d{request}";
}
if ($name eq 'EnterNotify') {
return id($d{event} => 'event') . " at ($d{event_x}, $d{event_y})";
}
if ($name eq 'KeyPress') {
return "key $d{detail} on " . id($d{event});
}
if ($name eq 'UnmapNotify') {
return id($d{window} => 'window');
}
if ($name eq 'DestroyNotify') {
return id($d{window} => 'window');
}
# TODO: ButtonPress
# TODO: MotionNotify
undef
}
sub handle_request {
my ($self, $request) = @_;
my ($opcode, $subreq) = unpack('CC', $request);
say "Handling request opcode $opcode";
my $data = RequestDissector::dissect_request($request);
if (!defined($data)) {
my ($ext) = grep { $_->opcode == $opcode } $self->extensions;
if (defined($ext)) {
say "ext = " . $ext->name;
# XXX: generate name
if ($ext->name eq 'RANDR') {
say "subreq = $subreq";
$data = RequestDissector::RANDR::dissect_request($request);
say "now = " . Dumper($data);
}
}
}
if (defined($data) && length($data) > 5) {
# add the icing to the cake
my $details = $self->request_icing($data);
$details = '<strong>NOT YET IMPLEMENTED</strong>' unless defined($details);
$data->{details} = $details;
$self->dump_request($data);
return;
}
say "Unhandled request with opcode $opcode";
$self->inc_sequence;
}
sub handle_error {
my ($self, $error) = @_;
say "handling error";
my $data = ErrorDissector::dissect_error($error, $self);
if (defined($data) && length($data) > 5) {
# add the icing to the cake
#my $details = $self->error_icing($data);
my $details = undef;
$details = 'bad_value=' . id($data->{moredetails}->{bad_value});
$data->{details} = $details;
$self->dump_error($data);
return;
}
say "Unhandled error";
}
sub handle_reply {
my ($self, $reply) = @_;
my ($sequence) = unpack("xxS", $reply);
if (!$self->awaiting_reply($sequence)) {
say "Received an unexpected reply?!";
return;
}
my $_data = $self->type_of_reply($sequence);
say "Received reply for " . $_data->{name} . " with length " . length($reply);
my $data;
if ($_data->{name} =~ /^RANDR:/) {
$data = ReplyDissector::RANDR::dissect_reply($reply, $self);
} else {
# Generic reply dissector
$data = ReplyDissector::dissect_reply($reply, $self);
}
if (defined($data) && length($data) > 5) {
#say "data = " . Dumper($data);
## add the icing to the cake
my $details = $self->reply_icing($data);
$details = '<strong>NOT YET IMPLEMENTED</strong>' unless defined($details);
$data->{details} = $details;
$self->dump_reply($data);
return;
}
return;
}
sub handle_event {
my ($self, $event) = @_;
my ($number) = unpack('c', $event);
say "Should dump an event with length ". length($event);
my $data = EventDissector::dissect_event($event, $self);
if (defined($data) && length($data) > 5) {
say "data = " . Dumper($data);
## add the icing to the cake
my $details = $self->event_icing($data);
$details = '<strong>NOT YET IMPLEMENTED</strong>' unless defined($details);
$data->{details} = $details;
$self->dump_event($data);
return;
}
say "Unhandled event with number $number";
return;
}
sub client_disconnected {
my ($self) = @_;
my $fo = FileOutput->instance;
my $fh = $fo->output_file;
#print $fh "]";
}
__PACKAGE__->meta->make_immutable;
1
| x11vis/x11vis | interceptor/lib/PacketHandler.pm | Perl | bsd-3-clause | 19,650 |
#!/usr/bin/perl
use strict;
use utf8;
use open ':utf8';
binmode STDERR, ':utf8';
use XML::LibXML;
my $num_args = $#ARGV + 1;
if ($num_args != 2) {
print "\nUsage: perl xfst2xml.pl quz.xfst book_id \n";
exit;
}
my $file = $ARGV[0];
my $bookID = $ARGV[1];
open (XFST, "<", $file) or die "Can't open input file \"$file\": $!\n";
my $abs_sentence_count=1; # absolute sentence number, needed because of Intertext..
my $sentence_count = 1;
my $article_count = 0;
my $token_count =1;
my $sentence = XML::LibXML::Element->new( 's' ); # actual sentence
my $article; # actual chapter
my $dom = XML::LibXML->createDocument ('1.0', 'UTF-8');
my $book = $dom->createElementNS( "", "book" );
$book->setAttribute('id', $bookID);
$dom->setDocumentElement( $book );
my $allInOneChapter =1;
while(<XFST>){
#if(/^[XIV]+\t/){ # for chapters in gregorio..
#if(/^newChapter/){ # other texts
if($allInOneChapter){ # texts with no chapter
$article = XML::LibXML::Element->new( 'article' );
$book->appendChild($article);
#undef $article;
$article_count++;
$article->setAttribute('n',$article_count);
my $tocEntry = XML::LibXML::Element->new( 'tocEntry' );
my ($toc) = ($_ =~ /^([XIV]+)/);
if($toc){
$tocEntry->setAttribute('title', $toc);
$article->appendChild($tocEntry);
};
$sentence_count=1;
$allInOneChapter =0; # texts without chapters -> treat all as same chapter
}
elsif(/#EOS/){
# append prev sentence
$article->appendChild($sentence);
$sentence->setAttribute('n', $article_count."-".$sentence_count);
$sentence->setAttribute('Intertext_id', '1:'.$abs_sentence_count);
$sentence->setAttribute('lang', 'quz');
# reset $sentence
undef $sentence;
$sentence = XML::LibXML::Element->new( 's' );
$sentence_count++;
$abs_sentence_count++;
$token_count=0;
}
else{
unless(/^\s*$/){
$token_count++;
my $t = XML::LibXML::Element->new( 't' );
$t->setAttribute('n', $article_count."-".$sentence_count."-".$token_count);
$sentence->appendChild($t);
# root + derivational suffixes = one token (for lexical alignment), all the rest of the morphemes: 1 morpheme = 1 token
my ($word, $analysis) = split('\t');
my $w = XML::LibXML::Element->new( 'w' );
$w->appendText($word);
$t->appendChild($w);
my ($wroot, $rest) = split('\[\^DB\]',$analysis);
$analysis =~ s/\Q$wroot\E//;
$analysis =~ s/\[\^DB\]//;
my ($empty, @morphs) = split('\[--\]' , $analysis);
my $morph_count =1;
# root
my $root = XML::LibXML::Element->new( 'root' );
my @rootMorphs = split('\[--\]' , $wroot);
my @rootforms = ($wroot =~ m/([A-Za-zñéóúíáüäöÑ']+?)\[/g) ;
my ($root_only) = ($wroot =~ m/^([A-Za-zñéóúíáüäöÑ']+?)\[/) ;
my $rootform;
foreach my $r (@rootforms){
$rootform .= $r;
}
$root->appendText($rootform);
$root->setAttribute('root', $root_only);
my $rootmorph_count =1;
foreach my $m (@rootMorphs){
my $morpheme = XML::LibXML::Element->new( 'rootmorph' );
$morpheme->setAttribute('n', $article_count."-".$sentence_count."-".$token_count."-".$morph_count."-".$rootmorph_count);
$root->appendChild($morpheme);
my ($form) = ($m =~ /^([^\[]+)/) ;
#$rootform .= $form;
$morpheme->appendText($form);
my ($translation) = ($m =~ /=([^\]]+)/) ;
if($translation){
$morpheme->setAttribute('translation',$translation);
# my $trans = XML::LibXML::Element->new( 'trans' );
# $trans->appendText($translation);
# $morpheme->appendChild($trans);
}
my ($postag) = ($m =~ /\[([^\]\+]+)\]/) ;
if($postag){
$morpheme->setAttribute('pos',$postag);
# my $pos = XML::LibXML::Element->new( 'pos' );
# $pos->appendText($postag);
# $morpheme->appendChild($pos);
}
my ($mtag) =($m =~ /(\+[^\]]+)/) ;
if($mtag){
$morpheme->setAttribute('tag',$mtag);
# my $tag = XML::LibXML::Element->new( 'tag' );
# $tag->appendText($mtag);
# $morpheme->appendChild($tag);
}
$rootmorph_count++;
#print "f: $form, t: $trans, p: $pos, t: $tag\t";
}
#print" rootform: $rootform \n";
$t->appendChild($root);
$root->setAttribute('n', $article_count."-".$sentence_count."-".$token_count."-".$morph_count);
$morph_count++;
# morphs
#print $analysis."\n";
foreach my $m (@morphs){
my $morpheme = XML::LibXML::Element->new( 'morph' );
$morpheme->setAttribute('n', $article_count."-".$sentence_count."-".$token_count."-".$morph_count);
$t->appendChild($morpheme);
my ($form) = ($m =~ /^([^\[]+)/) ;
#$rootform .= $form;
$morpheme->appendText("-".$form);
my ($postag) = ($m =~ /\[([^\]\+]+)\]/) ;
if($postag){
$morpheme->setAttribute('pos',$postag);
# my $pos = XML::LibXML::Element->new( 'pos' );
# $pos->appendText($postag);
# $morpheme->appendChild($pos);
}
my ($mtag) =($m =~ /(\+[^\]]+)/) ;
if($mtag){
$morpheme->setAttribute('tag',$mtag);
# my $tag = XML::LibXML::Element->new( 'tag' );
# $tag->appendText($mtag);
# $morpheme->appendChild($tag);
}
$morph_count++;
}
#print "root: $wroot, tags: @rootPosTags, morphs: @rootMorphs \n";
}
}
}
close(XFST);
my $docstring = $dom->toString(3);
print STDOUT $docstring; | a-rios/squoia | bilingwis_stuff/xfst2xml.pl | Perl | apache-2.0 | 5,335 |
use strict;
package main;
require("lib_userConfig.pl");
# Set UserIndex
$userIndex = setUserIndex();
# init message variables
$sessionObj->param("userMessage2", "");
my $errorMessage = "";
# Login is perfstat admin
if ($sessionObj->param("userName") eq "perfstat") {
$adminName = $request->param('adminName');
checkAdminName($adminName);
$userRole = "user";
my $userName = trim($request->param('insertUserName'));
$errorMessage = checkUserNameLocal($userName);
if (length($errorMessage) eq 0) {
$password = trim($request->param('password'));
$errorMessage = checkPassword($password);
}
if (length($errorMessage) eq 0) {
$confirmPassword = trim($request->param('confirmPassword'));
$errorMessage = checkConfirmPassword($password, $confirmPassword);
}
if (length($errorMessage) ne 0) {
$sessionObj->param("userMessage2", $errorMessage);
$queryString = "adminName=$adminName&insertUserName=$userName";
} else {
insertUser($userName, $password, $adminName, $userRole, $perfhome);
$queryString = "&adminName=$adminName&updateNavCode=2";
}
# Login is group admin
} elsif ($sessionObj->param("role") eq "admin") {
$adminName = $sessionObj->param("userName");
checkAdminName($adminName);
my $userName = trim($request->param('insertUserName'));
$userRole = "user";
$errorMessage = checkUserNameLocal($userName);
if (length($errorMessage) eq 0) {
$password = trim($request->param('password'));
$errorMessage = checkPassword($password);
}
if (length($errorMessage) eq 0) {
$confirmPassword = trim($request->param('confirmPassword'));
$errorMessage = checkConfirmPassword($password, $confirmPassword);
}
if (length($errorMessage) ne 0) {
$sessionObj->param("userMessage2", $errorMessage);
$queryString = "insertUserName=$userName";
} else {
insertUser($userName, $password, $adminName, $userRole, $perfhome);
$queryString = "updateNavCode=2";
}
# Login is user
} else {
die('ERROR: invalid value for $sessionObj->param("role")')
}
1; | ktenzer/perfstat | ui/appConfigs/userConfig/level1/act_insertUser.pl | Perl | apache-2.0 | 1,985 |
#!/usr/bin/env perl
#
# Cross-platform Makefile generator.
#
# Reads the file `Recipe' to determine the list of generated
# executables and their component objects. Then reads the source
# files to compute #include dependencies. Finally, writes out the
# various target Makefiles.
# PuTTY specifics which could still do with removing:
# - Mac makefile is not portabilised at all. Include directories
# are hardwired, and also the libraries are fixed. This is
# mainly because I was too scared to go anywhere near it.
# - sbcsgen.pl is still run at startup.
#
# FIXME: no attempt made to handle !forceobj in the project files.
use warnings;
use FileHandle;
use File::Basename;
use Cwd;
use Digest::SHA qw(sha512_hex);
if ($#ARGV >= 0 and ($ARGV[0] eq "-u" or $ARGV[0] eq "-U")) {
# Convenience for Unix users: -u means that after we finish what
# we're doing here, we also run mkauto.sh and then 'configure' in
# the Unix subdirectory. So it's a one-stop shop for regenerating
# the actual end-product Unix makefile.
#
# Arguments supplied after -u go to configure.
#
# -U is identical, but runs 'configure' at the _top_ level, for
# people who habitually do that.
$do_unix = ($ARGV[0] eq "-U" ? 2 : 1);
shift @ARGV;
@confargs = @ARGV;
}
open IN, "Recipe" or do {
# We want to deal correctly with being run from one of the
# subdirs in the source tree. So if we can't find Recipe here,
# try one level up.
chdir "..";
open IN, "Recipe" or die "unable to open Recipe file\n";
};
# HACK: One of the source files in `charset' is auto-generated by
# sbcsgen.pl. We need to generate that _now_, before attempting
# dependency analysis.
eval 'chdir "charset"; require "sbcsgen.pl"; chdir ".."; select STDOUT;';
@srcdirs = ("./");
$divert = undef; # ref to scalar in which text is currently being put
$help = ""; # list of newline-free lines of help text
$project_name = "project"; # this is a good enough default
%makefiles = (); # maps makefile types to output makefile pathnames
%makefile_extra = (); # maps makefile types to extra Makefile text
%programs = (); # maps prog name + type letter to listref of objects/resources
%groups = (); # maps group name to listref of objects/resources
while (<IN>) {
chomp;
@_ = split;
# If we're gathering help text, keep doing so.
if (defined $divert) {
if ((defined $_[0]) && $_[0] eq "!end") {
$divert = undef;
} else {
${$divert} .= "$_\n";
}
next;
}
# Skip comments and blank lines.
next if /^\s*#/ or scalar @_ == 0;
if ($_[0] eq "!begin" and $_[1] eq "help") { $divert = \$help; next; }
if ($_[0] eq "!end") { $divert = undef; next; }
if ($_[0] eq "!name") { $project_name = $_[1]; next; }
if ($_[0] eq "!srcdir") { push @srcdirs, $_[1]; next; }
if ($_[0] eq "!makefile" and &mfval($_[1])) { $makefiles{$_[1]}=$_[2]; next;}
if ($_[0] eq "!specialobj" and &mfval($_[1])) { $specialobj{$_[1]}->{$_[2]} = 1; next;}
if ($_[0] eq "!cflags" and &mfval($_[1])) {
($rest = $_) =~ s/^\s*\S+\s+\S+\s+\S+\s*//; # find rest of input line
$rest = 1 if $rest eq "";
$cflags{$_[1]}->{$_[2]} = $rest;
next;
}
if ($_[0] eq "!forceobj") { $forceobj{$_[1]} = 1; next; }
if ($_[0] eq "!begin") {
if ($_[1] =~ /^>(.*)/) {
$divert = \$auxfiles{$1};
} elsif (&mfval($_[1])) {
$sect = $_[2] ? $_[2] : "end";
$divert = \($makefile_extra{$_[1]}->{$sect});
} else {
$dummy = '';
$divert = \$dummy;
}
next;
}
# If we're gathering help/verbatim text, keep doing so.
if (defined $divert) { ${$divert} .= "$_\n"; next; }
# Ignore blank lines.
next if scalar @_ == 0;
# Now we have an ordinary line. See if it's an = line, a : line
# or a + line.
@objs = @_;
if ($_[0] eq "+") {
$listref = $lastlistref;
$prog = undef;
die "$.: unexpected + line\n" if !defined $lastlistref;
} elsif ($_[1] eq "=") {
$groups{$_[0]} = [] if !defined $groups{$_[0]};
$listref = $groups{$_[0]};
$prog = undef;
shift @objs; # eat the group name
} elsif ($_[1] eq ":") {
$listref = [];
$prog = $_[0];
shift @objs; # eat the program name
} else {
die "$.: unrecognised line type\n";
}
shift @objs; # eat the +, the = or the :
while (scalar @objs > 0) {
$i = shift @objs;
if ($groups{$i}) {
foreach $j (@{$groups{$i}}) { unshift @objs, $j; }
} elsif (($i eq "[G]" or $i eq "[C]" or $i eq "[M]" or
$i eq "[X]" or $i eq "[U]" or $i eq "[MX]") and defined $prog) {
$type = substr($i,1,(length $i)-2);
} else {
push @$listref, $i;
}
}
if ($prog and $type) {
die "multiple program entries for $prog [$type]\n"
if defined $programs{$prog . "," . $type};
$programs{$prog . "," . $type} = $listref;
}
$lastlistref = $listref;
}
close IN;
foreach $aux (sort keys %auxfiles) {
open AUX, ">$aux";
print AUX $auxfiles{$aux};
close AUX;
}
# Now retrieve the complete list of objects and resource files, and
# construct dependency data for them. While we're here, expand the
# object list for each program, and complain if its type isn't set.
@prognames = sort keys %programs;
%depends = ();
@scanlist = ();
foreach $i (@prognames) {
($prog, $type) = split ",", $i;
# Strip duplicate object names.
$prev = '';
@list = grep { $status = ($prev ne $_); $prev=$_; $status }
sort @{$programs{$i}};
$programs{$i} = [@list];
foreach $j (@list) {
# Dependencies for "x" start with "x.c" or "x.m" (depending on
# which one exists).
# Dependencies for "x.res" start with "x.rc".
# Dependencies for "x.rsrc" start with "x.r".
# Both types of file are pushed on the list of files to scan.
# Libraries (.lib) don't have dependencies at all.
if ($j =~ /^(.*)\.res$/) {
$file = "$1.rc";
$depends{$j} = [$file];
push @scanlist, $file;
} elsif ($j =~ /^(.*)\.rsrc$/) {
$file = "$1.r";
$depends{$j} = [$file];
push @scanlist, $file;
} elsif ($j !~ /\./) {
$file = "$j.c";
$file = "$j.m" unless &findfile($file);
$depends{$j} = [$file];
push @scanlist, $file;
}
}
}
# Scan each file on @scanlist and find further inclusions.
# Inclusions are given by lines of the form `#include "otherfile"'
# (system headers are automatically ignored by this because they'll
# be given in angle brackets). Files included by this method are
# added back on to @scanlist to be scanned in turn (if not already
# done).
#
# Resource scripts (.rc) can also include a file by means of:
# - a line # ending `ICON "filename"';
# - a line ending `RT_MANIFEST "filename"'.
# Files included by this method are not added to @scanlist because
# they can never include further files.
#
# In this pass we write out a hash %further which maps a source
# file name into a listref containing further source file names.
%further = ();
%allsourcefiles = (); # this is wanted by some makefiles
while (scalar @scanlist > 0) {
$file = shift @scanlist;
next if defined $further{$file}; # skip if we've already done it
$further{$file} = [];
$dirfile = &findfile($file);
$allsourcefiles{$dirfile} = 1;
open IN, "$dirfile" or die "unable to open source file $file\n";
while (<IN>) {
chomp;
/^\s*#include\s+\"([^\"]+)\"/ and do {
push @{$further{$file}}, $1;
push @scanlist, $1;
next;
};
/(RT_MANIFEST|ICON)\s+\"([^\"]+)\"\s*$/ and do {
push @{$further{$file}}, $2;
next;
}
}
close IN;
}
# Now we're ready to generate the final dependencies section. For
# each key in %depends, we must expand the dependencies list by
# iteratively adding entries from %further.
foreach $i (keys %depends) {
%dep = ();
@scanlist = @{$depends{$i}};
foreach $i (@scanlist) { $dep{$i} = 1; }
while (scalar @scanlist > 0) {
$file = shift @scanlist;
foreach $j (@{$further{$file}}) {
if (!$dep{$j}) {
$dep{$j} = 1;
push @{$depends{$i}}, $j;
push @scanlist, $j;
}
}
}
# printf "%s: %s\n", $i, join ' ',@{$depends{$i}};
}
# Validation of input.
sub mfval($) {
my ($type) = @_;
# Returns true if the argument is a known makefile type. Otherwise,
# prints a warning and returns false;
if (grep { $type eq $_ }
("vc","vcproj","cygwin","borland","lcc","devcppproj","gtk","unix",
"am","osx","vstudio10","vstudio12")) {
return 1;
}
warn "$.:unknown makefile type '$type'\n";
return 0;
}
# Utility routines while writing out the Makefiles.
sub def {
my ($x) = shift @_;
return (defined $x) ? $x : "";
}
sub dirpfx {
my ($path) = shift @_;
my ($sep) = shift @_;
my $ret = "";
my $i;
while (($i = index $path, $sep) >= 0 ||
($j = index $path, "/") >= 0) {
if ($i >= 0 and ($j < 0 or $i < $j)) {
$path = substr $path, ($i + length $sep);
} else {
$path = substr $path, ($j + 1);
}
$ret .= "..$sep";
}
return $ret;
}
sub findfile {
my ($name) = @_;
my $dir = '';
my $i;
my $outdir = undef;
unless (defined $findfilecache{$name}) {
$i = 0;
foreach $dir (@srcdirs) {
if (-f "$dir$name") {
$outdir = $dir;
$i++;
$outdir =~ s/^\.\///;
}
}
die "multiple instances of source file $name\n" if $i > 1;
$findfilecache{$name} = (defined $outdir ? $outdir . $name : undef);
}
return $findfilecache{$name};
}
sub objects {
my ($prog, $otmpl, $rtmpl, $ltmpl, $prefix, $dirsep) = @_;
my @ret;
my ($i, $x, $y);
($otmpl, $rtmpl, $ltmpl) = map { defined $_ ? $_ : "" } ($otmpl, $rtmpl, $ltmpl);
@ret = ();
foreach $i (@{$programs{$prog}}) {
$x = "";
if ($i =~ /^(.*)\.(res|rsrc)/) {
$y = $1;
($x = $rtmpl) =~ s/X/$y/;
} elsif ($i =~ /^(.*)\.lib/) {
$y = $1;
($x = $ltmpl) =~ s/X/$y/;
} elsif ($i !~ /\./) {
($x = $otmpl) =~ s/X/$i/;
}
push @ret, $x if $x ne "";
}
return join " ", @ret;
}
sub special {
my ($prog, $suffix) = @_;
my @ret;
my ($i, $x, $y);
($otmpl, $rtmpl, $ltmpl) = map { defined $_ ? $_ : "" } ($otmpl, $rtmpl, $ltmpl);
@ret = ();
foreach $i (@{$programs{$prog}}) {
if (substr($i, (length $i) - (length $suffix)) eq $suffix) {
push @ret, $i;
}
}
return (scalar @ret) ? (join " ", @ret) : undef;
}
sub splitline {
my ($line, $width, $splitchar) = @_;
my $result = "";
my $len;
$len = (defined $width ? $width : 76);
$splitchar = (defined $splitchar ? $splitchar : '\\');
while (length $line > $len) {
$line =~ /^(.{0,$len})\s(.*)$/ or $line =~ /^(.{$len,}?\s(.*)$/;
$result .= $1;
$result .= " ${splitchar}\n\t\t" if $2 ne '';
$line = $2;
$len = 60;
}
return $result . $line;
}
sub deps {
my ($otmpl, $rtmpl, $prefix, $dirsep, $mftyp, $depchar, $splitchar) = @_;
my ($i, $x, $y);
my @deps;
my @ret;
@ret = ();
$depchar ||= ':';
foreach $i (sort keys %depends) {
next if $specialobj{$mftyp}->{$i};
if ($i =~ /^(.*)\.(res|rsrc)/) {
next if !defined $rtmpl;
$y = $1;
($x = $rtmpl) =~ s/X/$y/;
} else {
($x = $otmpl) =~ s/X/$i/;
}
@deps = @{$depends{$i}};
@deps = map {
$_ = &findfile($_);
s/\//$dirsep/g;
$_ = $prefix . $_;
} @deps;
push @ret, {obj => $x, obj_orig => $i, deps => [@deps]};
}
return @ret;
}
sub prognames {
my ($types) = @_;
my ($n, $prog, $type);
my @ret;
@ret = ();
foreach $n (@prognames) {
($prog, $type) = split ",", $n;
push @ret, $n if index(":$types:", ":$type:") >= 0;
}
return @ret;
}
sub progrealnames {
my ($types) = @_;
my ($n, $prog, $type);
my @ret;
@ret = ();
foreach $n (@prognames) {
($prog, $type) = split ",", $n;
push @ret, $prog if index(":$types:", ":$type:") >= 0;
}
return @ret;
}
sub manpages {
my ($types,$suffix) = @_;
# assume that all UNIX programs have a man page
if($suffix eq "1" && $types =~ /:X:/) {
return map("$_.1", &progrealnames($types));
}
return ();
}
$orig_dir = cwd;
# Now we're ready to output the actual Makefiles.
if (defined $makefiles{'cygwin'}) {
$dirpfx = &dirpfx($makefiles{'cygwin'}, "/");
##-- CygWin makefile
open OUT, ">$makefiles{'cygwin'}"; select OUT;
print
"# Makefile for $project_name under Cygwin, MinGW, or Winelib.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
# gcc command line option is -D not /D
($_ = $help) =~ s/([=" ])\/D/$1-D/gs;
print $_;
print
"\n".
"# You can define this path to point at your tools if you need to\n".
"# TOOLPATH = c:\\cygwin\\bin\\ # or similar, if you're running Windows\n".
"# TOOLPATH = /pkg/mingw32msvc/i386-mingw32msvc/bin/\n".
"CC = \$(TOOLPATH)gcc\n".
"RC = \$(TOOLPATH)windres\n".
"# Uncomment the following two lines to compile under Winelib\n".
"# CC = winegcc\n".
"# RC = wrc\n".
"# You may also need to tell windres where to find include files:\n".
"# RCINC = --include-dir c:\\cygwin\\include\\\n".
"\n".
&splitline("CFLAGS = -mno-cygwin -Wall -O2 -D_WINDOWS -DDEBUG -DWIN32S_COMPAT".
" -D_NO_OLDNAMES -DNO_MULTIMON -DNO_HTMLHELP -DNO_SECUREZEROMEMORY " .
(join " ", map {"-I$dirpfx$_"} @srcdirs)) .
"\n".
"LDFLAGS = -mno-cygwin -s\n".
&splitline("RCFLAGS = \$(RCINC) --define WIN32=1 --define _WIN32=1 ".
"--define WINVER=0x0400 ".(join " ", map {"-I$dirpfx$_"} @srcdirs))."\n".
"\n".
$makefile_extra{'cygwin'}->{'vars'} .
"\n".
".SUFFIXES:\n".
"\n";
print &splitline("all:" . join "", map { " $_.exe" } &progrealnames("G:C"));
print "\n\n";
foreach $p (&prognames("G:C")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.o", "X.res.o", undef);
print &splitline($prog . ".exe: " . $objstr), "\n";
my $mw = $type eq "G" ? " -mwindows" : "";
$libstr = &objects($p, undef, undef, "-lX");
print &splitline("\t\$(CC)" . $mw . " \$(LDFLAGS) -o \$@ " .
"-Wl,-Map,$prog.map " .
$objstr . " $libstr", 69), "\n\n";
}
foreach $d (&deps("X.o", "X.res.o", $dirpfx, "/", "cygwin")) {
if ($forceobj{$d->{obj_orig}}) {
printf ("%s: FORCE\n", $d->{obj});
} else {
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @{$d->{deps}})), "\n";
}
if ($d->{obj} =~ /\.res\.o$/) {
print "\t\$(RC) \$(RCFL) \$(RCFLAGS) ".$d->{deps}->[0]." -o ".$d->{obj}."\n\n";
} else {
print "\t\$(CC) \$(COMPAT) \$(CFLAGS) \$(XFLAGS) -c ".$d->{deps}->[0]."\n\n";
}
}
print "\n";
print $makefile_extra{'cygwin'}->{'end'} if defined $makefile_extra{'cygwin'}->{'end'};
print "\nclean:\n".
"\trm -f *.o *.exe *.res.o *.so *.map\n".
"\n".
"FORCE:\n";
select STDOUT; close OUT;
}
##-- Borland makefile
if (defined $makefiles{'borland'}) {
$dirpfx = &dirpfx($makefiles{'borland'}, "\\");
%stdlibs = ( # Borland provides many Win32 API libraries intrinsically
"advapi32" => 1,
"comctl32" => 1,
"comdlg32" => 1,
"gdi32" => 1,
"imm32" => 1,
"shell32" => 1,
"user32" => 1,
"winmm" => 1,
"winspool" => 1,
"wsock32" => 1,
);
open OUT, ">$makefiles{'borland'}"; select OUT;
print
"# Makefile for $project_name under Borland C.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
# bcc32 command line option is -D not /D
($_ = $help) =~ s/([=" ])\/D/$1-D/gs;
print $_;
print
"\n".
"# If you rename this file to `Makefile', you should change this line,\n".
"# so that the .rsp files still depend on the correct makefile.\n".
"MAKEFILE = Makefile.bor\n".
"\n".
"# C compilation flags\n".
"CFLAGS = -D_WINDOWS -DWINVER=0x0500\n".
"# Resource compilation flags\n".
"RCFLAGS = -DNO_WINRESRC_H -DWIN32 -D_WIN32 -DWINVER=0x0401\n".
"\n".
"# Get include directory for resource compiler\n".
"!if !\$d(BCB)\n".
"BCB = \$(MAKEDIR)\\..\n".
"!endif\n".
"\n".
$makefile_extra{'borland'}->{'vars'} .
"\n".
".c.obj:\n".
&splitline("\tbcc32 -w-aus -w-ccc -w-par -w-pia \$(COMPAT)".
" \$(CFLAGS) \$(XFLAGS) ".
(join " ", map {"-I$dirpfx$_"} @srcdirs) .
" /c \$*.c",69)."\n".
".rc.res:\n".
&splitline("\tbrcc32 \$(RCFL) -i \$(BCB)\\include -r".
" \$(RCFLAGS) \$*.rc",69)."\n".
"\n";
print &splitline("all:" . join "", map { " $_.exe" } &progrealnames("G:C"));
print "\n\n";
foreach $p (&prognames("G:C")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.obj", "X.res", undef);
print &splitline("$prog.exe: " . $objstr . " $prog.rsp"), "\n";
my $ap = ($type eq "G") ? "-aa" : "-ap";
print "\tilink32 $ap -Gn -L\$(BCB)\\lib \@$prog.rsp\n\n";
}
foreach $p (&prognames("G:C")) {
($prog, $type) = split ",", $p;
print $prog, ".rsp: \$(MAKEFILE)\n";
$objstr = &objects($p, "X.obj", undef, undef);
@objlist = split " ", $objstr;
@objlines = ("");
foreach $i (@objlist) {
if (length($objlines[$#objlines] . " $i") > 50) {
push @objlines, "";
}
$objlines[$#objlines] .= " $i";
}
$c0w = ($type eq "G") ? "c0w32" : "c0x32";
print "\techo $c0w + > $prog.rsp\n";
for ($i=0; $i<=$#objlines; $i++) {
$plus = ($i < $#objlines ? " +" : "");
print "\techo$objlines[$i]$plus >> $prog.rsp\n";
}
print "\techo $prog.exe >> $prog.rsp\n";
$objstr = &objects($p, "X.obj", "X.res", undef);
@libs = split " ", &objects($p, undef, undef, "X");
@libs = grep { !$stdlibs{$_} } @libs;
unshift @libs, "cw32", "import32";
$libstr = join ' ', @libs;
print "\techo nul,$libstr, >> $prog.rsp\n";
print "\techo " . &objects($p, undef, "X.res", undef) . " >> $prog.rsp\n";
print "\n";
}
foreach $d (&deps("X.obj", "X.res", $dirpfx, "\\", "borland")) {
if ($forceobj{$d->{obj_orig}}) {
printf("%s: FORCE\n", $d->{obj});
} else {
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @{$d->{deps}})), "\n";
}
}
print "\n";
print $makefile_extra{'borland'}->{'end'} if defined $makefile_extra{'borland'}->{'end'};
print "\nclean:\n".
"\t-del *.obj\n".
"\t-del *.exe\n".
"\t-del *.res\n".
"\t-del *.pch\n".
"\t-del *.aps\n".
"\t-del *.il*\n".
"\t-del *.pdb\n".
"\t-del *.rsp\n".
"\t-del *.tds\n".
"\t-del *.\$\$\$\$\$\$\n".
"\n".
"FORCE:\n".
"\t-rem dummy command\n";
select STDOUT; close OUT;
}
if (defined $makefiles{'vc'}) {
$dirpfx = &dirpfx($makefiles{'vc'}, "\\");
##-- Visual C++ makefile
open OUT, ">$makefiles{'vc'}"; select OUT;
print
"# Makefile for $project_name under Visual C.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
print $help;
print
"\n".
"# If you rename this file to `Makefile', you should change this line,\n".
"# so that the .rsp files still depend on the correct makefile.\n".
"MAKEFILE = Makefile.vc\n".
"\n".
"# C compilation flags\n".
"CFLAGS = /nologo /W3 /O1 " .
(join " ", map {"-I$dirpfx$_"} @srcdirs) .
" /D_WINDOWS /D_WIN32_WINDOWS=0x500 /DWINVER=0x500\n".
"LFLAGS = /incremental:no /fixed\n".
"RCFLAGS = ".(join " ", map {"-I$dirpfx$_"} @srcdirs).
" -DWIN32 -D_WIN32 -DWINVER=0x0400\n".
"\n".
$makefile_extra{'vc'}->{'vars'} .
"\n".
"\n";
print &splitline("all:" . join "", map { " $_.exe" } &progrealnames("G:C"));
print "\n\n";
foreach $p (&prognames("G:C")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.obj", "X.res", undef);
print &splitline("$prog.exe: " . $objstr . " $prog.rsp"), "\n";
print "\tlink \$(LFLAGS) \$(XLFLAGS) -out:$prog.exe -map:$prog.map \@$prog.rsp\n\n";
}
foreach $p (&prognames("G:C")) {
($prog, $type) = split ",", $p;
print $prog, ".rsp: \$(MAKEFILE)\n";
$objstr = &objects($p, "X.obj", "X.res", "X.lib");
@objlist = split " ", $objstr;
@objlines = ("");
foreach $i (@objlist) {
if (length($objlines[$#objlines] . " $i") > 50) {
push @objlines, "";
}
$objlines[$#objlines] .= " $i";
}
$subsys = ($type eq "G") ? "windows" : "console";
print "\techo /nologo /subsystem:$subsys > $prog.rsp\n";
for ($i=0; $i<=$#objlines; $i++) {
print "\techo$objlines[$i] >> $prog.rsp\n";
}
print "\n";
}
foreach $d (&deps("X.obj", "X.res", $dirpfx, "\\", "vc")) {
$extradeps = $forceobj{$d->{obj_orig}} ? ["*.c","*.h","*.rc"] : [];
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @$extradeps, @{$d->{deps}})), "\n";
if ($d->{obj} =~ /.obj$/) {
print "\tcl \$(COMPAT) \$(CFLAGS) \$(XFLAGS) /c ".$d->{deps}->[0],"\n\n";
} else {
print "\trc \$(RCFL) -r \$(RCFLAGS) ".$d->{deps}->[0],"\n\n";
}
}
print "\n";
print $makefile_extra{'vc'}->{'end'} if defined $makefile_extra{'vc'}->{'end'};
print "\nclean: tidy\n".
"\t-del *.exe\n\n".
"tidy:\n".
"\t-del *.obj\n".
"\t-del *.res\n".
"\t-del *.pch\n".
"\t-del *.aps\n".
"\t-del *.ilk\n".
"\t-del *.pdb\n".
"\t-del *.rsp\n".
"\t-del *.dsp\n".
"\t-del *.dsw\n".
"\t-del *.ncb\n".
"\t-del *.opt\n".
"\t-del *.plg\n".
"\t-del *.map\n".
"\t-del *.idb\n".
"\t-del debug.log\n";
select STDOUT; close OUT;
}
if (defined $makefiles{'vcproj'}) {
$dirpfx = &dirpfx($makefiles{'vcproj'}, "\\");
##-- MSVC 6 Workspace and projects
#
# Note: All files created in this section are written in binary
# mode, because although MSVC's command-line make can deal with
# LF-only line endings, MSVC project files really _need_ to be
# CRLF. Hence, in order for mkfiles.pl to generate usable project
# files even when run from Unix, I make sure all files are binary
# and explicitly write the CRLFs.
#
# Create directories if necessary
mkdir $makefiles{'vcproj'}
if(! -d $makefiles{'vcproj'});
chdir $makefiles{'vcproj'};
@deps = &deps("X.obj", "X.res", $dirpfx, "\\", "vcproj");
%all_object_deps = map {$_->{obj} => $_->{deps}} @deps;
# Create the project files
# Get names of all Windows projects (GUI and console)
my @prognames = &prognames("G:C");
foreach $progname (@prognames) {
create_vc_project(\%all_object_deps, $progname);
}
# Create the workspace file
open OUT, ">$project_name.dsw"; binmode OUT; select OUT;
print
"Microsoft Developer Studio Workspace File, Format Version 6.00\r\n".
"# WARNING: DO NOT EDIT OR DELETE THIS WORKSPACE FILE!\r\n".
"\r\n".
"###############################################################################\r\n".
"\r\n";
# List projects
foreach $progname (@prognames) {
($windows_project, $type) = split ",", $progname;
print "Project: \"$windows_project\"=\".\\$windows_project\\$windows_project.dsp\" - Package Owner=<4>\r\n";
}
print
"\r\n".
"Package=<5>\r\n".
"{{{\r\n".
"}}}\r\n".
"\r\n".
"Package=<4>\r\n".
"{{{\r\n".
"}}}\r\n".
"\r\n".
"###############################################################################\r\n".
"\r\n".
"Global:\r\n".
"\r\n".
"Package=<5>\r\n".
"{{{\r\n".
"}}}\r\n".
"\r\n".
"Package=<3>\r\n".
"{{{\r\n".
"}}}\r\n".
"\r\n".
"###############################################################################\r\n".
"\r\n";
select STDOUT; close OUT;
chdir $orig_dir;
sub create_vc_project {
my ($all_object_deps, $progname) = @_;
# Construct program's dependency info
%seen_objects = ();
%lib_files = ();
%source_files = ();
%header_files = ();
%resource_files = ();
@object_files = split " ", &objects($progname, "X.obj", "X.res", "X.lib");
foreach $object_file (@object_files) {
next if defined $seen_objects{$object_file};
$seen_objects{$object_file} = 1;
if($object_file =~ /\.lib$/io) {
$lib_files{$object_file} = 1;
next;
}
$object_deps = $all_object_deps{$object_file};
foreach $object_dep (@$object_deps) {
if($object_dep =~ /\.c$/io) {
$source_files{$object_dep} = 1;
next;
}
if($object_dep =~ /\.h$/io) {
$header_files{$object_dep} = 1;
next;
}
if($object_dep =~ /\.(rc|ico)$/io) {
$resource_files{$object_dep} = 1;
next;
}
}
}
$libs = join " ", sort keys %lib_files;
@source_files = sort keys %source_files;
@header_files = sort keys %header_files;
@resources = sort keys %resource_files;
($windows_project, $type) = split ",", $progname;
mkdir $windows_project
if(! -d $windows_project);
chdir $windows_project;
$subsys = ($type eq "G") ? "windows" : "console";
open OUT, ">$windows_project.dsp"; binmode OUT; select OUT;
print
"# Microsoft Developer Studio Project File - Name=\"$windows_project\" - Package Owner=<4>\r\n".
"# Microsoft Developer Studio Generated Build File, Format Version 6.00\r\n".
"# ** DO NOT EDIT **\r\n".
"\r\n".
"# TARGTYPE \"Win32 (x86) Application\" 0x0101\r\n".
"\r\n".
"CFG=$windows_project - Win32 Debug\r\n".
"!MESSAGE This is not a valid makefile. To build this project using NMAKE,\r\n".
"!MESSAGE use the Export Makefile command and run\r\n".
"!MESSAGE \r\n".
"!MESSAGE NMAKE /f \"$windows_project.mak\".\r\n".
"!MESSAGE \r\n".
"!MESSAGE You can specify a configuration when running NMAKE\r\n".
"!MESSAGE by defining the macro CFG on the command line. For example:\r\n".
"!MESSAGE \r\n".
"!MESSAGE NMAKE /f \"$windows_project.mak\" CFG=\"$windows_project - Win32 Debug\"\r\n".
"!MESSAGE \r\n".
"!MESSAGE Possible choices for configuration are:\r\n".
"!MESSAGE \r\n".
"!MESSAGE \"$windows_project - Win32 Release\" (based on \"Win32 (x86) Application\")\r\n".
"!MESSAGE \"$windows_project - Win32 Debug\" (based on \"Win32 (x86) Application\")\r\n".
"!MESSAGE \r\n".
"\r\n".
"# Begin Project\r\n".
"# PROP AllowPerConfigDependencies 0\r\n".
"# PROP Scc_ProjName \"\"\r\n".
"# PROP Scc_LocalPath \"\"\r\n".
"CPP=cl.exe\r\n".
"MTL=midl.exe\r\n".
"RSC=rc.exe\r\n".
"\r\n".
"!IF \"\$(CFG)\" == \"$windows_project - Win32 Release\"\r\n".
"\r\n".
"# PROP BASE Use_MFC 0\r\n".
"# PROP BASE Use_Debug_Libraries 0\r\n".
"# PROP BASE Output_Dir \"Release\"\r\n".
"# PROP BASE Intermediate_Dir \"Release\"\r\n".
"# PROP BASE Target_Dir \"\"\r\n".
"# PROP Use_MFC 0\r\n".
"# PROP Use_Debug_Libraries 0\r\n".
"# PROP Output_Dir \"Release\"\r\n".
"# PROP Intermediate_Dir \"Release\"\r\n".
"# PROP Ignore_Export_Lib 0\r\n".
"# PROP Target_Dir \"\"\r\n".
"# ADD BASE CPP /nologo /W3 /GX /O2 ".
(join " ", map {"/I \"..\\..\\$dirpfx$_\""} @srcdirs) .
" /D \"WIN32\" /D \"NDEBUG\" /D \"_WINDOWS\" /D \"_MBCS\" /YX /FD /c\r\n".
"# ADD CPP /nologo /W3 /GX /O2 ".
(join " ", map {"/I \"..\\..\\$dirpfx$_\""} @srcdirs) .
" /D \"WIN32\" /D \"NDEBUG\" /D \"_WINDOWS\" /D \"_MBCS\" /YX /FD /c\r\n".
"# ADD BASE MTL /nologo /D \"NDEBUG\" /mktyplib203 /win32\r\n".
"# ADD MTL /nologo /D \"NDEBUG\" /mktyplib203 /win32\r\n".
"# ADD BASE RSC /l 0x809 /d \"NDEBUG\"\r\n".
"# ADD RSC /l 0x809 /d \"NDEBUG\"\r\n".
"BSC32=bscmake.exe\r\n".
"# ADD BASE BSC32 /nologo\r\n".
"# ADD BSC32 /nologo\r\n".
"LINK32=link.exe\r\n".
"# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:$subsys /machine:I386\r\n".
"# ADD LINK32 $libs /nologo /subsystem:$subsys /machine:I386\r\n".
"# SUBTRACT LINK32 /pdb:none\r\n".
"\r\n".
"!ELSEIF \"\$(CFG)\" == \"$windows_project - Win32 Debug\"\r\n".
"\r\n".
"# PROP BASE Use_MFC 0\r\n".
"# PROP BASE Use_Debug_Libraries 1\r\n".
"# PROP BASE Output_Dir \"Debug\"\r\n".
"# PROP BASE Intermediate_Dir \"Debug\"\r\n".
"# PROP BASE Target_Dir \"\"\r\n".
"# PROP Use_MFC 0\r\n".
"# PROP Use_Debug_Libraries 1\r\n".
"# PROP Output_Dir \"Debug\"\r\n".
"# PROP Intermediate_Dir \"Debug\"\r\n".
"# PROP Ignore_Export_Lib 0\r\n".
"# PROP Target_Dir \"\"\r\n".
"# ADD BASE CPP /nologo /W3 /Gm /GX /ZI /Od ".
(join " ", map {"/I \"..\\..\\$dirpfx$_\""} @srcdirs) .
" /D \"WIN32\" /D \"_DEBUG\" /D \"_WINDOWS\" /D \"_MBCS\" /YX /FD /GZ /c\r\n".
"# ADD CPP /nologo /W3 /Gm /GX /ZI /Od ".
(join " ", map {"/I \"..\\..\\$dirpfx$_\""} @srcdirs) .
" /D \"WIN32\" /D \"_DEBUG\" /D \"_WINDOWS\" /D \"_MBCS\" /YX /FD /GZ /c\r\n".
"# ADD BASE MTL /nologo /D \"_DEBUG\" /mktyplib203 /win32\r\n".
"# ADD MTL /nologo /D \"_DEBUG\" /mktyplib203 /win32\r\n".
"# ADD BASE RSC /l 0x809 /d \"_DEBUG\"\r\n".
"# ADD RSC /l 0x809 /d \"_DEBUG\"\r\n".
"BSC32=bscmake.exe\r\n".
"# ADD BASE BSC32 /nologo\r\n".
"# ADD BSC32 /nologo\r\n".
"LINK32=link.exe\r\n".
"# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:$subsys /debug /machine:I386 /pdbtype:sept\r\n".
"# ADD LINK32 $libs /nologo /subsystem:$subsys /debug /machine:I386 /pdbtype:sept\r\n".
"# SUBTRACT LINK32 /pdb:none\r\n".
"\r\n".
"!ENDIF \r\n".
"\r\n".
"# Begin Target\r\n".
"\r\n".
"# Name \"$windows_project - Win32 Release\"\r\n".
"# Name \"$windows_project - Win32 Debug\"\r\n".
"# Begin Group \"Source Files\"\r\n".
"\r\n".
"# PROP Default_Filter \"cpp;c;cxx;rc;def;r;odl;idl;hpj;bat\"\r\n";
foreach $source_file (@source_files) {
print
"# Begin Source File\r\n".
"\r\n".
"SOURCE=..\\..\\$source_file\r\n";
if($source_file =~ /ssh\.c/io) {
# Disable 'Edit and continue' as Visual Studio can't handle the macros
print
"\r\n".
"!IF \"\$(CFG)\" == \"$windows_project - Win32 Release\"\r\n".
"\r\n".
"!ELSEIF \"\$(CFG)\" == \"$windows_project - Win32 Debug\"\r\n".
"\r\n".
"# ADD CPP /Zi\r\n".
"\r\n".
"!ENDIF \r\n".
"\r\n";
}
print "# End Source File\r\n";
}
print
"# End Group\r\n".
"# Begin Group \"Header Files\"\r\n".
"\r\n".
"# PROP Default_Filter \"h;hpp;hxx;hm;inl\"\r\n";
foreach $header_file (@header_files) {
print
"# Begin Source File\r\n".
"\r\n".
"SOURCE=..\\..\\$header_file\r\n".
"# End Source File\r\n";
}
print
"# End Group\r\n".
"# Begin Group \"Resource Files\"\r\n".
"\r\n".
"# PROP Default_Filter \"ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe\"\r\n";
foreach $resource_file (@resources) {
print
"# Begin Source File\r\n".
"\r\n".
"SOURCE=..\\..\\$resource_file\r\n".
"# End Source File\r\n";
}
print
"# End Group\r\n".
"# End Target\r\n".
"# End Project\r\n";
select STDOUT; close OUT;
chdir "..";
}
}
if (defined $makefiles{'vstudio10'} || defined $makefiles{'vstudio12'}) {
##-- Visual Studio 2010+ Solution and Projects
if (defined $makefiles{'vstudio10'}) {
create_vs_solution('vstudio10', "2010", "11.00", "v100");
}
if (defined $makefiles{'vstudio12'}) {
create_vs_solution('vstudio12', "2012", "12.00", "v110");
}
sub create_vs_solution {
my ($makefilename, $name, $version, $toolsver) = @_;
$dirpfx = &dirpfx($makefiles{$makefilename}, "\\");
@deps = &deps("X.obj", "X.res", $dirpfx, "\\", $makefilename);
%all_object_deps = map {$_->{obj} => $_->{deps}} @deps;
my @prognames = &prognames("G:C");
# Create the solution file.
mkdir $makefiles{$makefilename}
if(! -f $makefiles{$makefilename});
chdir $makefiles{$makefilename};
open OUT, ">$project_name.sln"; select OUT;
print
"Microsoft Visual Studio Solution File, Format Version $version\n" .
"# Visual Studio $name\n";
my %projguids = ();
foreach $progname (@prognames) {
($windows_project, $type) = split ",", $progname;
$projguids{$windows_project} = $guid =
&invent_guid("project:$progname");
print
"Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = \"$windows_project\", \"$windows_project\\$windows_project.vcxproj\", \"{$guid}\"\n" .
"EndProject\n";
}
print
"Global\n" .
" GlobalSection(SolutionConfigurationPlatforms) = preSolution\n" .
" Debug|Win32 = Debug|Win32\n" .
" Release|Win32 = Release|Win32\n" .
" EndGlobalSection\n" .
" GlobalSection(ProjectConfigurationPlatforms) = postSolution\n" ;
foreach my $projguid (values %projguids) {
print
" {$projguid}.Debug|Win32.ActiveCfg = Debug|Win32\n" .
" {$projguid}.Debug|Win32.Build.0 = Debug|Win32\n" .
" {$projguid}.Release|Win32.ActiveCfg = Release|Win32\n" .
" {$projguid}.Release|Win32.Build.0 = Release|Win32\n";
}
print
" EndGlobalSection\n" .
" GlobalSection(SolutionProperties) = preSolution\n" .
" HideSolutionNode = FALSE\n" .
" EndGlobalSection\n" .
"EndGlobal\n";
select STDOUT; close OUT;
foreach $progname (@prognames) {
($windows_project, $type) = split ",", $progname;
create_vs_project(\%all_object_deps, $windows_project, $type, $projguids{$windows_project}, $toolsver);
}
chdir $orig_dir;
}
sub create_vs_project {
my ($all_object_deps, $windows_project, $type, $projguid, $toolsver) = @_;
# Break down the project's dependency information into the appropriate
# groups.
%seen_objects = ();
%lib_files = ();
%source_files = ();
%header_files = ();
%resource_files = ();
%icon_files = ();
@object_files = split " ", &objects($progname, "X.obj", "X.res", "X.lib");
foreach $object_file (@object_files) {
next if defined $seen_objects{$object_file};
$seen_objects{$object_file} = 1;
if($object_file =~ /\.lib$/io) {
$lib_files{$object_file} = 1;
next;
}
$object_deps = $all_object_deps{$object_file};
foreach $object_dep (@$object_deps) {
if($object_dep eq $object_deps->[0]) {
if($object_dep =~ /\.c$/io) {
$source_files{$object_dep} = 1;
} elsif($object_dep =~ /\.rc$/io) {
$resource_files{$object_dep} = 1;
}
} elsif ($object_dep =~ /\.[ch]$/io) {
$header_files{$object_dep} = 1;
} elsif ($object_dep =~ /\.ico$/io) {
$icon_files{$object_dep} = 1;
}
}
}
$libs = join ";", sort keys %lib_files;
@source_files = sort keys %source_files;
@header_files = sort keys %header_files;
@resources = sort keys %resource_files;
@icons = sort keys %icon_files;
$subsystem = ($type eq "G") ? "Windows" : "Console";
mkdir $windows_project
if(! -d $windows_project);
chdir $windows_project;
open OUT, ">$windows_project.vcxproj"; select OUT;
open FILTERS, ">$windows_project.vcxproj.filters";
# The bulk of the project file is just boilerplate stuff, so we
# can mostly just dump it out here. Note, buried in the ClCompile
# item definition, that we use a debug information format of
# ProgramDatabase, which disables the edit-and-continue support
# that breaks most of the project builds.
print
"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" .
"<Project DefaultTargets=\"Build\" ToolsVersion=\"4.0\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n" .
" <ItemGroup Label=\"ProjectConfigurations\">\n" .
" <ProjectConfiguration Include=\"Debug|Win32\">\n" .
" <Configuration>Debug</Configuration>\n" .
" <Platform>Win32</Platform>\n" .
" </ProjectConfiguration>\n" .
" <ProjectConfiguration Include=\"Release|Win32\">\n" .
" <Configuration>Release</Configuration>\n" .
" <Platform>Win32</Platform>\n" .
" </ProjectConfiguration>\n" .
" </ItemGroup>\n" .
" <PropertyGroup Label=\"Globals\">\n" .
" <SccProjectName />\n" .
" <SccLocalPath />\n" .
" <ProjectGuid>{$projguid}</ProjectGuid>\n" .
" </PropertyGroup>\n" .
" <Import Project=\"\$(VCTargetsPath)\\Microsoft.Cpp.Default.props\" />\n" .
" <PropertyGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Debug|Win32'\" Label=\"Configuration\">\n" .
" <ConfigurationType>Application</ConfigurationType>\n" .
" <UseOfMfc>false</UseOfMfc>\n" .
" <CharacterSet>MultiByte</CharacterSet>\n" .
" <PlatformToolset>$toolsver</PlatformToolset>\n" .
" </PropertyGroup>\n" .
" <PropertyGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Release|Win32'\" Label=\"Configuration\">\n" .
" <ConfigurationType>Application</ConfigurationType>\n" .
" <UseOfMfc>false</UseOfMfc>\n" .
" <CharacterSet>MultiByte</CharacterSet>\n" .
" <PlatformToolset>$toolsver</PlatformToolset>\n" .
" </PropertyGroup>\n" .
" <Import Project=\"\$(VCTargetsPath)\\Microsoft.Cpp.props\" />\n" .
" <ImportGroup Label=\"ExtensionTargets\">\n" .
" </ImportGroup>\n" .
" <ImportGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Debug|Win32'\" Label=\"PropertySheets\">\n" .
" <Import Project=\"\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props\" Condition=\"exists('\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props')\" Label=\"LocalAppDataPlatform\" />\n" .
" </ImportGroup>\n" .
" <ImportGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Release|Win32'\" Label=\"PropertySheets\">\n" .
" <Import Project=\"\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props\" Condition=\"exists('\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props')\" Label=\"LocalAppDataPlatform\" />\n" .
" </ImportGroup>\n" .
" <PropertyGroup Label=\"UserMacros\" />\n" .
" <PropertyGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Release|Win32'\">\n" .
" <OutDir>.\\Release\\</OutDir>\n" .
" <IntDir>.\\Release\\</IntDir>\n" .
" <LinkIncremental>false</LinkIncremental>\n" .
" </PropertyGroup>\n" .
" <PropertyGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Debug|Win32'\">\n" .
" <OutDir>.\\Debug\\</OutDir>\n" .
" <IntDir>.\\Debug\\</IntDir>\n" .
" <LinkIncremental>true</LinkIncremental>\n" .
" </PropertyGroup>\n" .
" <ItemDefinitionGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Release|Win32'\">\n" .
" <ClCompile>\n" .
" <RuntimeLibrary>MultiThreaded</RuntimeLibrary>\n" .
" <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion>\n" .
" <StringPooling>true</StringPooling>\n" .
" <FunctionLevelLinking>true</FunctionLevelLinking>\n" .
" <Optimization>MaxSpeed</Optimization>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <WarningLevel>Level3</WarningLevel>\n" .
" <AdditionalIncludeDirectories>" . (join ";", map {"..\\..\\$dirpfx$_"} @srcdirs) . ";%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n" .
" <PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;SECURITY_WIN32;POSIX;_CRT_SECURE_NO_WARNINGS;_CRT_NONSTDC_NO_DEPRECATE;%(PreprocessorDefinitions)</PreprocessorDefinitions>\n" .
" <AssemblerListingLocation>.\\Release\\</AssemblerListingLocation>\n" .
" <PrecompiledHeaderOutputFile>.\\Release\\$windows_project.pch</PrecompiledHeaderOutputFile>\n" .
" <ObjectFileName>.\\Release\\</ObjectFileName>\n" .
" <ProgramDataBaseFileName>.\\Release\\</ProgramDataBaseFileName>\n" .
" </ClCompile>\n" .
" <Midl>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <PreprocessorDefinitions>NDEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\n" .
" <TypeLibraryName>.\\Release\\$windows_project.tlb</TypeLibraryName>\n" .
" <MkTypLibCompatible>true</MkTypLibCompatible>\n" .
" <TargetEnvironment>Win32</TargetEnvironment>\n" .
" </Midl>\n" .
" <ResourceCompile>\n" .
" <Culture>0x0809</Culture>\n" .
" <PreprocessorDefinitions>NDEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\n" .
" </ResourceCompile>\n" .
" <Bscmake>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <OutputFile>.\\Release\\$windows_project.bsc</OutputFile>\n" .
" </Bscmake>\n" .
" <Link>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <SubSystem>$subsystem</SubSystem>\n" .
" <OutputFile>.\\Release\\$windows_project.exe</OutputFile>\n" .
" <AdditionalDependencies>$libs;%(AdditionalDependencies)</AdditionalDependencies>\n" .
" </Link>\n" .
" </ItemDefinitionGroup>\n" .
" <ItemDefinitionGroup Condition=\"'\$(Configuration)|\$(Platform)'=='Debug|Win32'\">\n" .
" <ClCompile>\n" .
" <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>\n" .
" <InlineFunctionExpansion>Default</InlineFunctionExpansion>\n" .
" <FunctionLevelLinking>false</FunctionLevelLinking>\n" .
" <Optimization>Disabled</Optimization>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <WarningLevel>Level3</WarningLevel>\n" .
" <MinimalRebuild>true</MinimalRebuild>\n" .
" <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\n" .
" <AdditionalIncludeDirectories>" . (join ";", map {"..\\..\\$dirpfx$_"} @srcdirs) . ";%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n" .
" <PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;SECURITY_WIN32;POSIX;_CRT_SECURE_NO_WARNINGS;_CRT_NONSTDC_NO_DEPRECATE;%(PreprocessorDefinitions)</PreprocessorDefinitions>\n" .
" <AssemblerListingLocation>.\\Debug\\</AssemblerListingLocation>\n" .
" <PrecompiledHeaderOutputFile>.\\Debug\\$windows_project.pch</PrecompiledHeaderOutputFile>\n" .
" <ObjectFileName>.\\Debug\\</ObjectFileName>\n" .
" <ProgramDataBaseFileName>.\\Debug\\</ProgramDataBaseFileName>\n" .
" <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>\n" .
" </ClCompile>\n" .
" <Midl>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <PreprocessorDefinitions>_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\n" .
" <TypeLibraryName>.\\Debug\\$windows_project.tlb</TypeLibraryName>\n" .
" <MkTypLibCompatible>true</MkTypLibCompatible>\n" .
" <TargetEnvironment>Win32</TargetEnvironment>\n" .
" </Midl>\n" .
" <ResourceCompile>\n" .
" <Culture>0x0809</Culture>\n" .
" <PreprocessorDefinitions>_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>\n" .
" </ResourceCompile>\n" .
" <Bscmake>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <OutputFile>.\\Debug\\$windows_project.bsc</OutputFile>\n" .
" </Bscmake>\n" .
" <Link>\n" .
" <SuppressStartupBanner>true</SuppressStartupBanner>\n" .
" <GenerateDebugInformation>true</GenerateDebugInformation>\n" .
" <SubSystem>$subsystem</SubSystem>\n" .
" <OutputFile>\$(TargetPath)</OutputFile>\n" .
" <AdditionalDependencies>$libs;%(AdditionalDependencies)</AdditionalDependencies>\n" .
" </Link>\n" .
" </ItemDefinitionGroup>\n";
# The VC++ projects don't have physical structure to them, instead
# the files are organized by logical "filters" that are stored in
# a separate file, so different users can organize things differently.
# The filters file contains a copy of the ItemGroup elements from
# the main project file that list the included items, but tack
# on a filter name where needed.
print FILTERS
"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" .
"<Project ToolsVersion=\"4.0\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n";
print " <ItemGroup>\n";
print FILTERS " <ItemGroup>\n";
foreach $icon_file (@icons) {
$icon_file =~ s/..\\windows\\//;
print " <CustomBuild Include=\"..\\..\\$icon_file\" />\n";
print FILTERS
" <CustomBuild Include=\"..\\..\\$icon_file\">\n" .
" <Filter>Resource Files</Filter>\n" .
" </CustomBuild>\n";
}
print FILTERS " </ItemGroup>\n";
print " </ItemGroup>\n";
print " <ItemGroup>\n";
print FILTERS " <ItemGroup>\n";
foreach $resource_file (@resources) {
$resource_file =~ s/..\\windows\\//;
print
" <ResourceCompile Include=\"..\\..\\$resource_file\">\n" .
" <AdditionalIncludeDirectories Condition=\"'\$(Configuration)|\$(Platform)'=='Release|Win32'\">..\\..;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n" .
" <AdditionalIncludeDirectories Condition=\"'\$(Configuration)|\$(Platform)'=='Debug|Win32'\">..\\..;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\n" .
" </ResourceCompile>\n";
print FILTERS
" <ResourceCompile Include=\"..\\..\\$resource_file\">\n" .
" <Filter>Resource Files</Filter>\n" .
" </ResourceCompile>\n";
}
print FILTERS " </ItemGroup>\n";
print " </ItemGroup>\n";
print " <ItemGroup>\n";
print FILTERS " <ItemGroup>\n";
foreach $source_file (@source_files) {
$source_file =~ s/..\\windows\\//;
print " <ClCompile Include=\"..\\..\\$source_file\" />\n";
print FILTERS
" <ClCompile Include=\"..\\..\\$source_file\">\n" .
" <Filter>Source Files</Filter>\n" .
" </ClCompile>";
}
print FILTERS " </ItemGroup>\n";
print " </ItemGroup>\n";
print " <ItemGroup>\n";
print FILTERS " <ItemGroup>\n";
foreach $header_file (@header_files) {
$header_file =~ s/..\\windows\\//;
print " <ClInclude Include=\"..\\..\\$header_file\" />\n";
print FILTERS
" <ClInclude Include=\"..\\..\\$header_file\">\n" .
" <Filter>Header Files</Filter>\n" .
" </ClInclude>";
}
print FILTERS " </ItemGroup>\n";
print " </ItemGroup>\n";
print
" <Import Project=\"\$(VCTargetsPath)\\Microsoft.Cpp.targets\" />\n" .
"</Project>";
print FILTERS
" <ItemGroup>\n" .
" <Filter Include=\"Source Files\">\n" .
" <UniqueIdentifier>{" . &invent_guid("sources:$windows_project") . "}</UniqueIdentifier>\n" .
" </Filter>\n" .
" <Filter Include=\"Header Files\">\n" .
" <UniqueIdentifier>{" . &invent_guid("headers:$windows_project") . "}</UniqueIdentifier>\n" .
" </Filter>\n" .
" <Filter Include=\"Resource Files\">\n" .
" <UniqueIdentifier>{" . &invent_guid("resources:$windows_project") . "}</UniqueIdentifier>\n" .
" </Filter>\n" .
" </ItemGroup>\n" .
"</Project>";
select STDOUT; close OUT; close FILTERS;
chdir "..";
}
}
if (defined $makefiles{'gtk'}) {
$dirpfx = &dirpfx($makefiles{'gtk'}, "/");
##-- X/GTK/Unix makefile
open OUT, ">$makefiles{'gtk'}"; select OUT;
print
"# Makefile for $project_name under X/GTK and Unix.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
# gcc command line option is -D not /D
($_ = $help) =~ s/([=" ])\/D/$1-D/gs;
print $_;
print
"\n".
"# You can define this path to point at your tools if you need to\n".
"# TOOLPATH = /opt/gcc/bin\n".
"CC = \$(TOOLPATH)cc\n".
"# If necessary set the path to krb5-config here\n".
"KRB5CONFIG=krb5-config\n".
"# You can manually set this to `gtk-config' or `pkg-config gtk+-1.2'\n".
"# (depending on what works on your system) if you want to enforce\n".
"# building with GTK 1.2, or you can set it to `pkg-config gtk+-2.0 x11'\n".
"# if you want to enforce 2.0. The default is to try 2.0 and fall back\n".
"# to 1.2 if it isn't found.\n".
"GTK_CONFIG = sh -c 'pkg-config gtk+-2.0 x11 \$\$0 2>/dev/null || gtk-config \$\$0'\n".
"\n".
"-include Makefile.local\n".
"\n".
"unexport CFLAGS # work around a weird issue with krb5-config\n".
"\n".
&splitline("CFLAGS = -O2 -Wall -Werror -g " .
(join " ", map {"-I$dirpfx$_"} @srcdirs) .
" \$(shell \$(GTK_CONFIG) --cflags)").
" -D _FILE_OFFSET_BITS=64\n".
"XLDFLAGS = \$(LDFLAGS) \$(shell \$(GTK_CONFIG) --libs)\n".
"ULDFLAGS = \$(LDFLAGS)\n".
"ifeq (,\$(findstring NO_GSSAPI,\$(COMPAT)))\n".
"ifeq (,\$(findstring STATIC_GSSAPI,\$(COMPAT)))\n".
"XLDFLAGS+= -ldl\n".
"ULDFLAGS+= -ldl\n".
"else\n".
"CFLAGS+= -DNO_LIBDL \$(shell \$(KRB5CONFIG) --cflags gssapi)\n".
"XLDFLAGS+= \$(shell \$(KRB5CONFIG) --libs gssapi)\n".
"ULDFLAGS+= \$(shell \$(KRB5CONFIG) --libs gssapi)\n".
"endif\n".
"endif\n".
"INSTALL=install\n".
"INSTALL_PROGRAM=\$(INSTALL)\n".
"INSTALL_DATA=\$(INSTALL)\n".
"prefix=/usr/local\n".
"exec_prefix=\$(prefix)\n".
"bindir=\$(exec_prefix)/bin\n".
"mandir=\$(prefix)/man\n".
"man1dir=\$(mandir)/man1\n".
"\n".
&def($makefile_extra{'gtk'}->{'vars'}) .
"\n".
".SUFFIXES:\n".
"\n".
"\n";
print &splitline("all:" . join "", map { " $_" } &progrealnames("X:U"));
print "\n\n";
foreach $p (&prognames("X:U")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.o", undef, undef);
print &splitline($prog . ": " . $objstr), "\n";
$libstr = &objects($p, undef, undef, "-lX");
print &splitline("\t\$(CC) -o \$@ " .
$objstr . " \$(${type}LDFLAGS) $libstr", 69), "\n\n";
}
foreach $d (&deps("X.o", undef, $dirpfx, "/", "gtk")) {
if ($forceobj{$d->{obj_orig}}) {
printf("%s: FORCE\n", $d->{obj});
} else {
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @{$d->{deps}})), "\n";
}
print &splitline("\t\$(CC) \$(COMPAT) \$(CFLAGS) \$(XFLAGS) -c $d->{deps}->[0]\n");
}
print "\n";
print $makefile_extra{'gtk'}->{'end'};
print "\nclean:\n".
"\trm -f *.o". (join "", map { " $_" } &progrealnames("X:U")) . "\n";
print "\nFORCE:\n";
select STDOUT; close OUT;
}
if (defined $makefiles{'unix'}) {
$dirpfx = &dirpfx($makefiles{'unix'}, "/");
##-- GTK-free pure-Unix makefile for non-GUI apps only
open OUT, ">$makefiles{'unix'}"; select OUT;
print
"# Makefile for $project_name under Unix.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
# gcc command line option is -D not /D
($_ = $help) =~ s/([=" ])\/D/$1-D/gs;
print $_;
print
"\n".
"# You can define this path to point at your tools if you need to\n".
"# TOOLPATH = /opt/gcc/bin\n".
"CC = \$(TOOLPATH)cc\n".
"\n".
"-include Makefile.local\n".
"\n".
"unexport CFLAGS # work around a weird issue with krb5-config\n".
"\n".
&splitline("CFLAGS = -O2 -Wall -Werror -g " .
(join " ", map {"-I$dirpfx$_"} @srcdirs)).
" -D _FILE_OFFSET_BITS=64\n".
"ULDFLAGS = \$(LDFLAGS)\n".
"INSTALL=install\n".
"INSTALL_PROGRAM=\$(INSTALL)\n".
"INSTALL_DATA=\$(INSTALL)\n".
"prefix=/usr/local\n".
"exec_prefix=\$(prefix)\n".
"bindir=\$(exec_prefix)/bin\n".
"mandir=\$(prefix)/man\n".
"man1dir=\$(mandir)/man1\n".
"\n".
&def($makefile_extra{'unix'}->{'vars'}) .
"\n".
".SUFFIXES:\n".
"\n".
"\n";
print &splitline("all:" . join "", map { " $_" } &progrealnames("U"));
print "\n\n";
foreach $p (&prognames("U")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.o", undef, undef);
print &splitline($prog . ": " . $objstr), "\n";
$libstr = &objects($p, undef, undef, "-lX");
print &splitline("\t\$(CC) -o \$@ " .
$objstr . " \$(${type}LDFLAGS) $libstr", 69), "\n\n";
}
foreach $d (&deps("X.o", undef, $dirpfx, "/", "unix")) {
if ($forceobj{$d->{obj_orig}}) {
printf("%s: FORCE\n", $d->{obj});
} else {
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @{$d->{deps}})), "\n";
}
print &splitline("\t\$(CC) \$(COMPAT) \$(CFLAGS) \$(XFLAGS) -c $d->{deps}->[0]\n");
}
print "\n";
print &def($makefile_extra{'unix'}->{'end'});
print "\nclean:\n".
"\trm -f *.o". (join "", map { " $_" } &progrealnames("U")) . "\n";
print "\nFORCE:\n";
select STDOUT; close OUT;
}
if (defined $makefiles{'am'}) {
die "Makefile.am in a subdirectory is not supported\n"
if &dirpfx($makefiles{'am'}, "/") ne "";
##-- Unix/autoconf Makefile.am
open OUT, ">$makefiles{'am'}"; select OUT;
print
"# Makefile.am for $project_name under Unix with Autoconf/Automake.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n\n";
# 2014-02-22: as of automake-1.14 we begin to get complained at if
# we don't use this option
print "AUTOMAKE_OPTIONS = subdir-objects\n\n";
# Complete list of source and header files. Not used by the
# auto-generated parts of this makefile, but Recipe might like to
# have it available as a variable so that mandatory-rebuild things
# (version.o) can conveniently be made to depend on it.
@sources = ("allsources", "=", sort keys %allsourcefiles);
print &splitline(join " ", @sources), "\n\n";
@cliprogs = ("bin_PROGRAMS", "=");
foreach $p (&prognames("U")) {
($prog, $type) = split ",", $p;
push @cliprogs, $prog;
}
@allprogs = @cliprogs;
foreach $p (&prognames("X")) {
($prog, $type) = split ",", $p;
push @allprogs, $prog;
}
print "if HAVE_GTK\n";
print &splitline(join " ", @allprogs), "\n";
print "else\n";
print &splitline(join " ", @cliprogs), "\n";
print "endif\n\n";
%objtosrc = ();
foreach $d (&deps("X", undef, "", "/", "am")) {
$objtosrc{$d->{obj}} = $d->{deps}->[0];
}
print &splitline(join " ", "AM_CPPFLAGS", "=",
map {"-I\$(srcdir)/$_"} @srcdirs), "\n";
@amcflags = ("\$(COMPAT)", "\$(XFLAGS)", "\$(WARNINGOPTS)");
print "if HAVE_GTK\n";
print &splitline(join " ", "AM_CFLAGS", "=",
"\$(GTK_CFLAGS)", @amcflags), "\n";
print "else\n";
print &splitline(join " ", "AM_CFLAGS", "=", @amcflags), "\n";
print "endif\n\n";
%amspeciallibs = ();
foreach $obj (sort { $a cmp $b } keys %{$cflags{'am'}}) {
print "lib${obj}_a_SOURCES = ", $objtosrc{$obj}, "\n";
print &splitline(join " ", "lib${obj}_a_CFLAGS", "=", @amcflags,
$cflags{'am'}->{$obj}), "\n";
$amspeciallibs{$obj} = "lib${obj}.a";
}
print &splitline(join " ", "noinst_LIBRARIES", "=",
sort { $a cmp $b } values %amspeciallibs), "\n\n";
foreach $p (&prognames("X:U")) {
($prog, $type) = split ",", $p;
print "if HAVE_GTK\n" if $type eq "X";
@progsources = ("${prog}_SOURCES", "=");
%sourcefiles = ();
@ldadd = ();
$objstr = &objects($p, "X", undef, undef);
foreach $obj (split / /,$objstr) {
if ($amspeciallibs{$obj}) {
push @ldadd, $amspeciallibs{$obj};
} else {
$sourcefiles{$objtosrc{$obj}} = 1;
}
}
push @progsources, sort { $a cmp $b } keys %sourcefiles;
print &splitline(join " ", @progsources), "\n";
if ($type eq "X") {
push @ldadd, "\$(GTK_LIBS)";
}
if (@ldadd) {
print &splitline(join " ", "${prog}_LDADD", "=", @ldadd), "\n";
}
print "endif\n" if $type eq "X";
print "\n";
}
print $makefile_extra{'am'}->{'end'};
select STDOUT; close OUT;
}
if (defined $makefiles{'lcc'}) {
$dirpfx = &dirpfx($makefiles{'lcc'}, "\\");
##-- lcc makefile
open OUT, ">$makefiles{'lcc'}"; select OUT;
print
"# Makefile for $project_name under lcc.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
# lcc command line option is -D not /D
($_ = $help) =~ s/([=" ])\/D/$1-D/gs;
print $_;
print
"\n".
"# If you rename this file to `Makefile', you should change this line,\n".
"# so that the .rsp files still depend on the correct makefile.\n".
"MAKEFILE = Makefile.lcc\n".
"\n".
"# C compilation flags\n".
"CFLAGS = -D_WINDOWS " .
(join " ", map {"-I$dirpfx$_"} @srcdirs) .
"\n".
"# Resource compilation flags\n".
"RCFLAGS = ".(join " ", map {"-I$dirpfx$_"} @srcdirs)."\n".
"\n".
"# Get include directory for resource compiler\n".
"\n".
$makefile_extra{'lcc'}->{'vars'} .
"\n";
print &splitline("all:" . join "", map { " $_.exe" } &progrealnames("G:C"));
print "\n\n";
foreach $p (&prognames("G:C")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.obj", "X.res", undef);
print &splitline("$prog.exe: " . $objstr ), "\n";
$subsystemtype = '';
if ($type eq "G") { $subsystemtype = "-subsystem windows"; }
my $libss = "shell32.lib wsock32.lib ws2_32.lib winspool.lib winmm.lib imm32.lib";
print &splitline("\tlcclnk $subsystemtype -o $prog.exe $objstr $libss");
print "\n\n";
}
foreach $d (&deps("X.obj", "X.res", $dirpfx, "\\", "lcc")) {
if ($forceobj{$d->{obj_orig}}) {
printf("%s: FORCE\n", $d->{obj});
} else {
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @{$d->{deps}})), "\n";
}
if ($d->{obj} =~ /\.obj$/) {
print &splitline("\tlcc -O -p6 \$(COMPAT)".
" \$(CFLAGS) \$(XFLAGS) ".$d->{deps}->[0],69)."\n";
} else {
print &splitline("\tlrc \$(RCFL) -r \$(RCFLAGS) ".
$d->{deps}->[0],69)."\n";
}
}
print "\n";
print $makefile_extra{'lcc'}->{'end'} if defined $makefile_extra{'lcc'}->{'end'};
print "\nclean:\n".
"\t-del *.obj\n".
"\t-del *.exe\n".
"\t-del *.res\n".
"\n".
"FORCE:\n";
select STDOUT; close OUT;
}
if (defined $makefiles{'osx'}) {
$dirpfx = &dirpfx($makefiles{'osx'}, "/");
##-- Mac OS X makefile
open OUT, ">$makefiles{'osx'}"; select OUT;
print
"# Makefile for $project_name under Mac OS X.\n".
"#\n# This file was created by `mkfiles.pl' from the `Recipe' file.\n".
"# DO NOT EDIT THIS FILE DIRECTLY; edit Recipe or mkfiles.pl instead.\n";
# gcc command line option is -D not /D
($_ = $help) =~ s/([=" ])\/D/$1-D/gs;
print $_;
print
"CC = \$(TOOLPATH)gcc\n".
"\n".
&splitline("CFLAGS = -O2 -Wall -Werror -g " .
(join " ", map {"-I$dirpfx$_"} @srcdirs))."\n".
"MLDFLAGS = -framework Cocoa\n".
"ULDFLAGS =\n".
"\n" .
$makefile_extra{'osx'}->{'vars'} .
"\n" .
&splitline("all:" . join "", map { " $_" } &progrealnames("MX:U")) .
"\n";
foreach $p (&prognames("MX")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.o", undef, undef);
$icon = &special($p, ".icns");
$infoplist = &special($p, "info.plist");
print "${prog}.app:\n\tmkdir -p \$\@\n";
print "${prog}.app/Contents: ${prog}.app\n\tmkdir -p \$\@\n";
print "${prog}.app/Contents/MacOS: ${prog}.app/Contents\n\tmkdir -p \$\@\n";
$targets = "${prog}.app/Contents/MacOS/$prog";
if (defined $icon) {
print "${prog}.app/Contents/Resources: ${prog}.app/Contents\n\tmkdir -p \$\@\n";
print "${prog}.app/Contents/Resources/${prog}.icns: ${prog}.app/Contents/Resources $icon\n\tcp $icon \$\@\n";
$targets .= " ${prog}.app/Contents/Resources/${prog}.icns";
}
if (defined $infoplist) {
print "${prog}.app/Contents/Info.plist: ${prog}.app/Contents/Resources $infoplist\n\tcp $infoplist \$\@\n";
$targets .= " ${prog}.app/Contents/Info.plist";
}
$targets .= " \$(${prog}_extra)";
print &splitline("${prog}: $targets", 69) . "\n\n";
print &splitline("${prog}.app/Contents/MacOS/$prog: ".
"${prog}.app/Contents/MacOS " . $objstr), "\n";
$libstr = &objects($p, undef, undef, "-lX");
print &splitline("\t\$(CC) \$(MLDFLAGS) -o \$@ " .
$objstr . " $libstr", 69), "\n\n";
}
foreach $p (&prognames("U")) {
($prog, $type) = split ",", $p;
$objstr = &objects($p, "X.o", undef, undef);
print &splitline($prog . ": " . $objstr), "\n";
$libstr = &objects($p, undef, undef, "-lX");
print &splitline("\t\$(CC) \$(ULDFLAGS) -o \$@ " .
$objstr . " $libstr", 69), "\n\n";
}
foreach $d (&deps("X.o", undef, $dirpfx, "/", "osx")) {
if ($forceobj{$d->{obj_orig}}) {
printf("%s: FORCE\n", $d->{obj});
} else {
print &splitline(sprintf("%s: %s", $d->{obj},
join " ", @{$d->{deps}})), "\n";
}
$firstdep = $d->{deps}->[0];
if ($firstdep =~ /\.c$/) {
print "\t\$(CC) \$(COMPAT) \$(FWHACK) \$(CFLAGS) \$(XFLAGS) -c \$<\n";
} elsif ($firstdep =~ /\.m$/) {
print "\t\$(CC) -x objective-c \$(COMPAT) \$(FWHACK) \$(CFLAGS) \$(XFLAGS) -c \$<\n";
}
}
print "\n".&def($makefile_extra{'osx'}->{'end'});
print "\nclean:\n".
"\trm -f *.o *.dmg". (join "", map { " $_" } &progrealnames("U")) . "\n".
"\trm -rf *.app\n".
"\n".
"FORCE:\n";
select STDOUT; close OUT;
}
if (defined $makefiles{'devcppproj'}) {
$dirpfx = &dirpfx($makefiles{'devcppproj'}, "\\");
$orig_dir = cwd;
##-- Dev-C++ 5 projects
#
# Note: All files created in this section are written in binary
# mode to prevent any posibility of misinterpreted line endings.
# I don't know if Dev-C++ is as touchy as MSVC with LF-only line
# endings. But however, CRLF line endings are the common way on
# Win32 machines where Dev-C++ is running.
# Hence, in order for mkfiles.pl to generate CRLF project files
# even when run from Unix, I make sure all files are binary and
# explicitly write the CRLFs.
#
# Create directories if necessary
mkdir $makefiles{'devcppproj'}
if(! -d $makefiles{'devcppproj'});
chdir $makefiles{'devcppproj'};
@deps = &deps("X.obj", "X.res", $dirpfx, "\\", "devcppproj");
%all_object_deps = map {$_->{obj} => $_->{deps}} @deps;
# Make dir names FAT/NTFS compatible
my @srcdirs = @srcdirs;
for ($i=0; $i<@srcdirs; $i++) {
$srcdirs[$i] =~ s/\//\\/g;
$srcdirs[$i] =~ s/\\$//;
}
# Create the project files
# Get names of all Windows projects (GUI and console)
my @prognames = &prognames("G:C");
foreach $progname (@prognames) {
create_devcpp_project(\%all_object_deps, $progname);
}
chdir $orig_dir;
sub create_devcpp_project {
my ($all_object_deps, $progname) = @_;
# Construct program's dependency info (Taken from 'vcproj', seems to work right here, too.)
%seen_objects = ();
%lib_files = ();
%source_files = ();
%header_files = ();
%resource_files = ();
@object_files = split " ", &objects($progname, "X.obj", "X.res", "X.lib");
foreach $object_file (@object_files) {
next if defined $seen_objects{$object_file};
$seen_objects{$object_file} = 1;
if($object_file =~ /\.lib$/io) {
$lib_files{$object_file} = 1;
next;
}
$object_deps = $all_object_deps{$object_file};
foreach $object_dep (@$object_deps) {
if($object_dep =~ /\.c$/io) {
$source_files{$object_dep} = 1;
next;
}
if($object_dep =~ /\.h$/io) {
$header_files{$object_dep} = 1;
next;
}
if($object_dep =~ /\.(rc|ico)$/io) {
$resource_files{$object_dep} = 1;
next;
}
}
}
$libs = join " ", sort keys %lib_files;
@source_files = sort keys %source_files;
@header_files = sort keys %header_files;
@resources = sort keys %resource_files;
($windows_project, $type) = split ",", $progname;
mkdir $windows_project
if(! -d $windows_project);
chdir $windows_project;
$subsys = ($type eq "G") ? "0" : "1"; # 0 = Win32 GUI, 1 = Win32 Console
open OUT, ">$windows_project.dev"; binmode OUT; select OUT;
print
"# DEV-C++ 5 Project File - $windows_project.dev\r\n".
"# ** DO NOT EDIT **\r\n".
"\r\n".
# No difference between DEBUG and RELEASE here as in 'vcproj', because
# Dev-C++ does not support mutiple compilation profiles in one single project.
# (At least I can say this for Dev-C++ 5 Beta)
"[Project]\r\n".
"FileName=$windows_project.dev\r\n".
"Name=$windows_project\r\n".
"Ver=1\r\n".
"IsCpp=1\r\n".
"Type=$subsys\r\n".
# Multimon is disabled here, as Dev-C++ (Version 5 Beta) does not have multimon.h
"Compiler=-W -D__GNUWIN32__ -DWIN32 -DNDEBUG -D_WINDOWS -DNO_MULTIMON -D_MBCS_\@\@_\r\n".
"CppCompiler=-W -D__GNUWIN32__ -DWIN32 -DNDEBUG -D_WINDOWS -DNO_MULTIMON -D_MBCS_\@\@_\r\n".
"Includes=" . (join ";", map {"..\\..\\$dirpfx$_"} @srcdirs) . "\r\n".
"Linker=-ladvapi32 -lcomctl32 -lcomdlg32 -lgdi32 -limm32 -lshell32 -luser32 -lwinmm -lwinspool_\@\@_\r\n".
"Libs=\r\n".
"UnitCount=" . (@source_files + @header_files + @resources) . "\r\n".
"Folders=\"Header Files\",\"Resource Files\",\"Source Files\"\r\n".
"ObjFiles=\r\n".
"PrivateResource=${windows_project}_private.rc\r\n".
"ResourceIncludes=..\\..\\..\\WINDOWS\r\n".
"MakeIncludes=\r\n".
"Icon=\r\n". # It's ok to leave this blank.
"ExeOutput=\r\n".
"ObjectOutput=\r\n".
"OverrideOutput=0\r\n".
"OverrideOutputName=$windows_project.exe\r\n".
"HostApplication=\r\n".
"CommandLine=\r\n".
"UseCustomMakefile=0\r\n".
"CustomMakefile=\r\n".
"IncludeVersionInfo=0\r\n".
"SupportXPThemes=0\r\n".
"CompilerSet=0\r\n".
"CompilerSettings=0000000000000000000000\r\n".
"\r\n";
$unit_count = 1;
foreach $source_file (@source_files) {
print
"[Unit$unit_count]\r\n".
"FileName=..\\..\\$source_file\r\n".
"Folder=Source Files\r\n".
"Compile=1\r\n".
"CompileCpp=0\r\n".
"Link=1\r\n".
"Priority=1000\r\n".
"OverrideBuildCmd=0\r\n".
"BuildCmd=\r\n".
"\r\n";
$unit_count++;
}
foreach $header_file (@header_files) {
print
"[Unit$unit_count]\r\n".
"FileName=..\\..\\$header_file\r\n".
"Folder=Header Files\r\n".
"Compile=1\r\n".
"CompileCpp=1\r\n". # Dev-C++ want's to compile all header files with both compilers C and C++. It does not hurt.
"Link=1\r\n".
"Priority=1000\r\n".
"OverrideBuildCmd=0\r\n".
"BuildCmd=\r\n".
"\r\n";
$unit_count++;
}
foreach $resource_file (@resources) {
if ($resource_file =~ /.*\.(ico|cur|bmp|dlg|rc2|rct|bin|rgs|gif|jpg|jpeg|jpe)/io) { # Default filter as in 'vcproj'
$Compile = "0"; # Don't compile images and other binary resource files
$CompileCpp = "0";
} else {
$Compile = "1";
$CompileCpp = "1"; # Dev-C++ want's to compile all .rc files with both compilers C and C++. It does not hurt.
}
print
"[Unit$unit_count]\r\n".
"FileName=..\\..\\$resource_file\r\n".
"Folder=Resource Files\r\n".
"Compile=$Compile\r\n".
"CompileCpp=$CompileCpp\r\n".
"Link=0\r\n".
"Priority=1000\r\n".
"OverrideBuildCmd=0\r\n".
"BuildCmd=\r\n".
"\r\n";
$unit_count++;
}
#Note: By default, [VersionInfo] is not used.
print
"[VersionInfo]\r\n".
"Major=0\r\n".
"Minor=0\r\n".
"Release=1\r\n".
"Build=1\r\n".
"LanguageID=1033\r\n".
"CharsetID=1252\r\n".
"CompanyName=\r\n".
"FileVersion=0.1\r\n".
"FileDescription=\r\n".
"InternalName=\r\n".
"LegalCopyright=\r\n".
"LegalTrademarks=\r\n".
"OriginalFilename=$windows_project.exe\r\n".
"ProductName=$windows_project\r\n".
"ProductVersion=0.1\r\n".
"AutoIncBuildNr=0\r\n";
select STDOUT; close OUT;
chdir "..";
}
}
# All done, so do the Unix postprocessing if asked to.
if ($do_unix) {
chdir $orig_dir;
system "./mkauto.sh";
die "mkfiles.pl: mkauto.sh returned $?\n" if $? > 0;
if ($do_unix == 1) {
chdir ($targetdir = "unix")
or die "$targetdir: chdir: $!\n";
}
system "./configure", @confargs;
die "mkfiles.pl: configure returned $?\n" if $? > 0;
}
sub invent_guid($) {
my ($name) = @_;
# Invent a GUID for use in Visual Studio project files. We need
# a few of these for every executable file we build.
#
# In order to avoid having to use the non-core Perl module
# Data::GUID, and also arrange for GUIDs to be stable, we generate
# our GUIDs by hashing a pile of fixed (but originally randomly
# generated) data with the filename for which we need an id.
#
# Hashing _just_ the filenames would clearly be cheating (it's
# quite conceivable that someone might hash the same string for
# another reason and so generate a colliding GUID), but hashing a
# whole SHA-512 data block of random gibberish as well should make
# these GUIDs pseudo-random enough to not collide with anyone
# else's.
my $randdata = pack "N*",
0xD4AB035F,0x76998BA0,0x2DCCB0BD,0x6D3FA320,0x53638051,0xFE312F35,
0xDE1CECC0,0x784DF852,0x6C9F4589,0x54B7AC23,0x14E7A1C4,0xF9BF04DF,
0x19C08B6D,0x3FB69EF1,0xB2DA9043,0xDB5362F3,0x25718DB6,0x733560DA,
0xFEF871B0,0xFECF7A0C,0x67D19C95,0xB492E911,0xF5D562A3,0xFCE1D478,
0x02C50434,0xF7326B7E,0x93D39872,0xCF0D0269,0x9EF24C0F,0x827689AD,
0x88BD20BC,0x74EA6AFE,0x29223682,0xB9AB9287,0x7EA7CE4F,0xCF81B379,
0x9AE4A954,0x81C7AD97,0x2FF2F031,0xC51DA3C2,0xD311CCE7,0x0A31EB8B,
0x1AB04242,0xAF53B714,0xFC574D40,0x8CB4ED01,0x29FEB16F,0x4904D7ED,
0xF5C5F5E1,0xF138A4C2,0xA9D881CE,0xCEA65187,0x4421BA97,0x0EE8428E,
0x9556E384,0x6D0484C9,0x561BD84B,0xD9516A40,0x6B4FD33F,0xDDFFE4C8,
0x3D5DF8A5,0xFE6B7D99,0x3443371B,0xF4E30A3E,0xE62B9FDA,0x6BAA75DB,
0x9EF3C2C7,0x6815CA42,0xE6536076,0xF851E6E2,0x39D16E69,0xBCDF3BB6,
0x50EFFA41,0x378CDF2A,0xB5EC0D0C,0x1E94C433,0xE818241A,0x2689EB1F,
0xB649CEF9,0xD7344D46,0x59C1BB13,0x27511FDF,0x7DAD1768,0xB355E29E,
0xDFAE550C,0x2433005B,0x09DE10B0,0xAA00BA6B,0xC144ED2D,0x8513D007,
0xB0315232,0x7A10DAB6,0x1D97654E,0xF048214D,0xE3059E75,0x83C225D1,
0xFC7AB177,0x83F2B553,0x79F7A0AF,0x1C94582C,0xF5E4AF4B,0xFB39C865,
0x58ABEB27,0xAAB28058,0x52C15A89,0x0EBE9741,0x343F4D26,0xF941202A,
0xA32FD32F,0xDCC055B8,0x64281BF3,0x468BD7BA,0x0CEE09D3,0xBB5FD2B6,
0xA528D412,0xA6A6967E,0xEAAF5DAE,0xDE7B2FAE,0xCA36887B,0x0DE196EB,
0x74B95EF0,0x9EB8B7C2,0x020BFC83,0x1445086F,0xBF4B61B2,0x89AFACEC,
0x80A5CD69,0xC790F744,0x435A6998,0x8DE7AC48,0x32F31BC9,0x8F760D3D,
0xF02A74CB,0xD7B47E20,0x9EC91035,0x70FDE74D,0x9B531362,0x9D81739A,
0x59ADC2EB,0x511555B5,0xCA84B8D5,0x3EC325FF,0x2E442A4C,0x82AF30D9,
0xBFD3EC87,0x90C59E07,0x1C6DC991,0x2D16B822,0x7EA44EB5,0x3A655A39,
0xAB640886,0x09311821,0x777801D9,0x489DBE61,0xA1FFEC65,0x978B49B1,
0x7DB700CD,0x263CF3D6,0xF977E89F,0xBA0B3D01,0x6C6CED19,0x1BE6F23A,
0x19E0ED98,0x8E71A499,0x70BA3271,0x3FB7EE98,0xABA46848,0x2B797959,
0x72C6DE59,0xE08B795C,0x02936C39,0x02185CCB,0xD6F3CE18,0xD0157A40,
0x833DEC3F,0x319B00C4,0x97B59513,0x900B81FD,0x9A022379,0x16E44E1A,
0x0C4CC540,0xCA98E7F9,0xF9431A26,0x290BCFAC,0x406B82C0,0xBC1C4585,
0x55C54528,0x811EBB77,0xD4EDD4F3,0xA70DC02E,0x8AD5C0D1,0x28D64EF4,
0xBEFF5C69,0x99852C4A,0xB4BBFF7B,0x069230AC,0xA3E141FA,0x4E99FB0E,
0xBC154DAA,0x323C7F15,0x86E0247E,0x2EEA3054,0xC9CA1D32,0x8964A006,
0xC93978AC,0xF9B2C159,0x03F2079E,0xB051D284,0x4A7EA9A9,0xF001DA1F,
0xD47A0DAA,0xCF7B6B73,0xF18293B2,0x84303E34,0xF8BC76C4,0xAFBEE24F,
0xB589CA80,0x77B5BF86,0x21B9FD5B,0x1A5071DF,0xA3863110,0x0E50CA61,
0x939151A5,0xD2A59021,0x83A9CDCE,0xCEC69767,0xC906BB16,0x3EE1FF4D,
0x1321EAE4,0x0BF940D6,0x52471E61,0x8A087056,0x66E54293,0xF84AAB9B,
0x08835EF1,0x8F12B77A,0xD86935A5,0x200281D7,0xCD3C37C9,0x30ABEC05,
0x7067E8A0,0x608C4838,0xC9F51CDE,0xA6D318DE,0x41C05B2A,0x694CCE0E,
0xC7842451,0xA3194393,0xFBDC2C84,0xA6D2B577,0xC91E7924,0x01EDA708,
0x22FBB61E,0x662F9B7B,0xDE3150C3,0x2397058C;
my $digest = sha512_hex($name . "\0" . $randdata);
return sprintf("%s-%s-%04x-%04x-%s",
substr($digest,0,8),
substr($digest,8,4),
0x4000 | (0xFFF & hex(substr($digest,12,4))),
0x8000 | (0x3FFF & hex(substr($digest,16,4))),
substr($digest,20,12));
}
| juergenpf/MINGW-packages | mingw-w64-putty/putty/mkfiles.pl | Perl | bsd-3-clause | 76,580 |
#------------------------------------------------------------------------------
# File: fr.pm
#
# Description: ExifTool French language translations
#
# Notes: This file generated automatically by Image::ExifTool::TagInfoXML
#------------------------------------------------------------------------------
package Image::ExifTool::Lang::fr;
use strict;
use vars qw($VERSION);
$VERSION = '1.34';
%Image::ExifTool::Lang::fr::Translate = (
'AEAperture' => 'Ouverture AE',
'AEBAutoCancel' => {
Description => 'Annulation bracketing auto',
PrintConv => {
'Off' => 'Arrêt',
'On' => 'Marche',
},
},
'AEBSequence' => 'Séquence de bracketing',
'AEBSequenceAutoCancel' => {
Description => 'Séquence auto AEB/annuler',
PrintConv => {
'-,0,+/Disabled' => '-,0,+/Désactivé',
'-,0,+/Enabled' => '-,0,+/Activé',
'0,-,+/Disabled' => '0,-,+/Désactivé',
'0,-,+/Enabled' => '0,-,+/Activé',
},
},
'AEBShotCount' => 'Nombre de vues bracketées',
'AEBXv' => 'Compensation d\'expo. auto en bracketing',
'AEExposureTime' => 'Temps d\'exposition AE',
'AEExtra' => 'Suppléments AE',
'AEInfo' => 'Info sur l\'exposition auto',
'AELock' => {
Description => 'Verrouillage AE',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AEMaxAperture' => 'Ouverture maxi AE',
'AEMaxAperture2' => 'Ouverture maxi AE (2)',
'AEMeteringMode' => {
Description => 'Mode de mesure AE',
PrintConv => {
'Multi-segment' => 'Multizone',
},
},
'AEMeteringSegments' => 'Segments de mesure AE',
'AEMinAperture' => 'Ouverture mini AE',
'AEMinExposureTime' => 'Temps d\'exposition mini AE',
'AEProgramMode' => {
Description => 'Mode programme AE',
PrintConv => {
'Av, B or X' => 'Av, B ou X',
'Candlelight' => 'Bougie',
'DOF Program' => 'Programme PdC',
'DOF Program (P-Shift)' => 'Programme PdC (décalage P)',
'Hi-speed Program' => 'Programme grande vitesse',
'Hi-speed Program (P-Shift)' => 'Programme grande vitesse (décalage P)',
'Kids' => 'Enfants',
'Landscape' => 'Paysage',
'M, P or TAv' => 'M, P ou TAv',
'MTF Program' => 'Programme FTM',
'MTF Program (P-Shift)' => 'Programme FTM (décalage P)',
'Museum' => 'Musée',
'Night Scene' => 'Nocturne',
'Night Scene Portrait' => 'Portrait nocturne',
'No Flash' => 'Sans flash',
'Pet' => 'Animaux de compagnie',
'Sunset' => 'Coucher de soleil',
'Surf & Snow' => 'Surf et neige',
'Sv or Green Mode' => 'Sv ou mode vert',
'Text' => 'Texte',
},
},
'AEXv' => 'Compensation d\'exposition auto',
'AE_ISO' => 'Sensibilité ISO AE',
'AFAdjustment' => 'Ajustement AF',
'AFAperture' => 'Ouverture AF',
'AFAreaIllumination' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AFAreaMode' => {
Description => 'Mode de zone AF',
PrintConv => {
'1-area' => 'Mise au point 1 zone',
'1-area (high speed)' => 'Mise au point 1 zone (haute vitesse)',
'3-area (center)?' => 'Mise au point 3 zones (au centre) ?',
'3-area (high speed)' => 'Mise au point 3 zones (haute vitesse)',
'3-area (left)?' => 'Mise au point 3 zones (à gauche) ?',
'3-area (right)?' => 'Mise au point 3 zones (à droite) ?',
'5-area' => 'Mise au point 5 zones',
'9-area' => 'Mise au point 9 zones',
'Face Detect AF' => 'Dét. visage',
'Spot Focusing' => 'Mise au point Spot',
'Spot Mode Off' => 'Mode Spot désactivé',
'Spot Mode On' => 'Mode Spot enclenché',
},
},
'AFAssist' => {
Description => 'Faisceau d\'assistance AF',
PrintConv => {
'Does not emit/Fires' => 'N\'émet pas/Se déclenche',
'Emits/Does not fire' => 'Emet/Ne se déclenche pas',
'Emits/Fires' => 'Emet/Se déclenche',
'Off' => 'Désactivé',
'On' => 'Activé',
'Only ext. flash emits/Fires' => 'Flash ext émet/Se déclenche',
},
},
'AFAssistBeam' => {
Description => 'Faisceau d\'assistance AF',
PrintConv => {
'Does not emit' => 'Désactivé',
'Emits' => 'Activé',
'Only ext. flash emits' => 'Uniquement par flash ext.',
},
},
'AFDefocus' => 'Défocalisation AF',
'AFDuringLiveView' => {
Description => 'AF pendant la visée directe',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
'Live mode' => 'Mode visée directe',
'Quick mode' => 'Mode rapide',
},
},
'AFInfo' => 'Info autofocus',
'AFInfo2' => 'Infos AF',
'AFInfo2Version' => 'Version des infos AF',
'AFIntegrationTime' => 'Temps d\'intégration AF',
'AFMicroadjustment' => {
Description => 'Micro-ajustement de l\'AF',
PrintConv => {
'Adjust all by same amount' => 'Ajuster idem tous obj',
'Adjust by lens' => 'Ajuster par objectif',
'Disable' => 'Désactivé',
},
},
'AFMode' => 'Mode AF',
'AFOnAELockButtonSwitch' => {
Description => 'Permutation touche AF/Mémo',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'AFPoint' => {
Description => 'Point AF',
PrintConv => {
'Bottom' => 'Bas',
'Center' => 'Centre',
'Far Left' => 'Extrême-gauche',
'Far Right' => 'Extrême-droit',
'Left' => 'Gauche',
'Lower-left' => 'Bas-gauche',
'Lower-right' => 'Bas-droit',
'Mid-left' => 'Milieu gauche',
'Mid-right' => 'Milieu droit',
'None' => 'Aucune',
'Right' => 'Droit',
'Top' => 'Haut',
'Upper-left' => 'Haut-gauche',
'Upper-right' => 'Haut-droit',
},
},
'AFPointActivationArea' => {
Description => 'Zone activation collimateurs AF',
PrintConv => {
'Automatic expanded (max. 13)' => 'Expansion auto (13 max.)',
'Expanded (TTL. of 7 AF points)' => 'Expansion (TTL 7 collimat.)',
'Single AF point' => 'Un seul collimateur AF',
},
},
'AFPointAreaExpansion' => {
Description => 'Extension de la zone AF',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
'Left/right AF points' => 'Activé (gauche/droite collimateurs autofocus d\'assistance)',
'Surrounding AF points' => 'Activée (Collimateurs autofocus d\'assistance environnants)',
},
},
'AFPointAutoSelection' => {
Description => 'Sélection des collimateurs automatique',
PrintConv => {
'Control-direct:disable/Main:disable' => 'Contrôle rapide-Directe:désactivé/Principale:désactivé',
'Control-direct:disable/Main:enable' => 'Contrôle rapide-Directe:désactivé/Principale:activé',
'Control-direct:enable/Main:enable' => 'Contrôle rapide-Directe:activé/Principale:activé',
},
},
'AFPointBrightness' => {
Description => 'Intensité d\'illumination AF',
PrintConv => {
'Brighter' => 'Forte',
'Normal' => 'Normale',
},
},
'AFPointDisplayDuringFocus' => {
Description => 'Affichage de point AF pendant mise au point',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
'On (when focus achieved)' => 'Activé (si mise au point effectuée)',
},
},
'AFPointIllumination' => {
Description => 'Eclairage des collimateurs AF',
PrintConv => {
'Brighter' => 'Plus brillant',
'Off' => 'Désactivé',
'On' => 'Activé',
'On without dimming' => 'Activé sans atténuation',
},
},
'AFPointMode' => 'Mode de mise au point AF',
'AFPointRegistration' => {
Description => 'Validation du point AF',
PrintConv => {
'Automatic' => 'Auto',
'Bottom' => 'Bas',
'Center' => 'Centre',
'Extreme Left' => 'Extrême gauche',
'Extreme Right' => 'Extrême droite',
'Left' => 'Gauche',
'Right' => 'Droit',
'Top' => 'Haut',
},
},
'AFPointSelected' => {
Description => 'Point AF sélectionné',
PrintConv => {
'Automatic Tracking AF' => 'AF en suivi auto',
'Bottom' => 'Bas',
'Center' => 'Centre',
'Face Detect AF' => 'AF en reconnaissance de visage',
'Fixed Center' => 'Fixe au centre',
'Left' => 'Gauche',
'Lower-left' => 'Bas gauche',
'Lower-right' => 'Bas droit',
'Mid-left' => 'Milieu gauche',
'Mid-right' => 'Milieu droit',
'Right' => 'Droit',
'Top' => 'Haut',
'Upper-left' => 'Haut gauche',
'Upper-right' => 'Haut droite',
},
},
'AFPointSelected2' => 'Point AF sélectionné 2',
'AFPointSelection' => 'Méthode sélect. collimateurs AF',
'AFPointSelectionMethod' => {
Description => 'Méthode sélection collim. AF',
PrintConv => {
'Multi-controller direct' => 'Multicontrôleur direct',
'Normal' => 'Normale',
'Quick Control Dial direct' => 'Molette AR directe',
},
},
'AFPointSpotMetering' => {
Description => 'Nombre collimateurs/mesure spot',
PrintConv => {
'11/Active AF point' => '11/collimateur AF actif',
'11/Center AF point' => '11/collimateur AF central',
'45/Center AF point' => '45/collimateur AF central',
'9/Active AF point' => '9/collimateur AF actif',
},
},
'AFPointsInFocus' => {
Description => 'Points AF nets',
PrintConv => {
'All' => 'Tous',
'Bottom' => 'Bas',
'Bottom, Center' => 'Bas + centre',
'Bottom-center' => 'Bas centre',
'Bottom-left' => 'Bas gauche',
'Bottom-right' => 'Bas droit',
'Center' => 'Centre',
'Center (horizontal)' => 'Centre (horizontal)',
'Center (vertical)' => 'Centre (vertical)',
'Center+Right' => 'Centre+droit',
'Fixed Center or Multiple' => 'Centre fixe ou multiple',
'Left' => 'Gauche',
'Left+Center' => 'Gauch+centre',
'Left+Right' => 'Gauche+droit',
'Lower-left, Bottom' => 'Bas gauche + bas',
'Lower-left, Mid-left' => 'Bas gauche + milieu gauche',
'Lower-right, Bottom' => 'Bas droit + bas',
'Lower-right, Mid-right' => 'Bas droit + milieu droit',
'Mid-left' => 'Milieu gauche',
'Mid-left, Center' => 'Milieu gauche + centre',
'Mid-right' => 'Milieu droit',
'Mid-right, Center' => 'Milieu droit + centre',
'None' => 'Aucune',
'None (MF)' => 'Aucune (MF)',
'Right' => 'Droit',
'Top' => 'Haut',
'Top, Center' => 'Haut + centre',
'Top-center' => 'Haut centre',
'Top-left' => 'Haut gauche',
'Top-right' => 'Haut droit',
'Upper-left, Mid-left' => 'Haut gauche + milieu gauche',
'Upper-left, Top' => 'Haut gauche + haut',
'Upper-right, Mid-right' => 'Haut droit + milieu droit',
'Upper-right, Top' => 'Haut droit + haut',
},
},
'AFPointsSelected' => 'Points AF sélectionnés',
'AFPointsUnknown1' => {
PrintConv => {
'All' => 'Tous',
'Central 9 points' => '9 points centraux',
},
},
'AFPointsUnknown2' => 'Points AF inconnus 2',
'AFPointsUsed' => {
Description => 'Points AF utilisés',
PrintConv => {
'Bottom' => 'Bas',
'Center' => 'Centre',
'Mid-left' => 'Milieu gauche',
'Mid-right' => 'Milieu droit',
'Top' => 'Haut',
},
},
'AFPredictor' => 'Prédicteur AF',
'AFResponse' => 'Réponse AF',
'AIServoContinuousShooting' => 'Priorité vit. méca. AI Servo',
'AIServoImagePriority' => {
Description => '1er Servo Ai/2e priorité déclenchement',
PrintConv => {
'1: AF, 2: Drive speed' => 'Priorité AF/Priorité cadence vues',
'1: AF, 2: Tracking' => 'Priorité AF/Priorité suivi AF',
'1: Release, 2: Drive speed' => 'Déclenchement/Priorité cadence vues',
},
},
'AIServoTrackingMethod' => {
Description => 'Méthode de suivi autofocus AI Servo',
PrintConv => {
'Continuous AF track priority' => 'Priorité suivi AF en continu',
'Main focus point priority' => 'Priorité point AF principal',
},
},
'AIServoTrackingSensitivity' => {
Description => 'Sensibili. de suivi AI Servo',
PrintConv => {
'Fast' => 'Rapide',
'Medium Fast' => 'Moyenne rapide',
'Medium Slow' => 'Moyenne lent',
'Moderately fast' => 'Moyennement rapide',
'Moderately slow' => 'Moyennement lent',
'Slow' => 'Lent',
},
},
'APEVersion' => 'Version APE',
'ARMIdentifier' => 'Identificateur ARM',
'ARMVersion' => 'Version ARM',
'AToB0' => 'A à B0',
'AToB1' => 'A à B1',
'AToB2' => 'A à B2',
'AccessoryType' => 'Type d\'accessoire',
'ActionAdvised' => {
Description => 'Action conseillée',
PrintConv => {
'Object Append' => 'Ajout d\'objet',
'Object Kill' => 'Destruction d\'objet',
'Object Reference' => 'Référence d\'objet',
'Object Replace' => 'Remplacement d\'objet',
'Ojbect Append' => 'Ajout d\'objet',
},
},
'ActiveArea' => 'Zone active',
'ActiveD-Lighting' => {
PrintConv => {
'Low' => 'Bas',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ActiveD-LightingMode' => {
PrintConv => {
'Low' => 'Bas',
'Normal' => 'Normale',
'Off' => 'Désactivé',
},
},
'AddAspectRatioInfo' => {
Description => 'Ajouter info ratio d\'aspect',
PrintConv => {
'Off' => 'Désactivé',
},
},
'AddOriginalDecisionData' => {
Description => 'Aj. données décis. origine',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AdditionalModelInformation' => 'Modèle d\'Information additionnel',
'Address' => 'Adresse',
'AdultContentWarning' => {
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'AdvancedRaw' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AdvancedSceneMode' => {
PrintConv => {
'Color Select' => 'Désaturation partielle',
'Cross Process' => 'Dévelop. croisé',
'Dynamic Monochrome' => 'Monochrome dynamique',
'Expressive' => 'Expressif',
'High Dynamic' => 'Dynamique haute',
'High Key' => 'Tons clairs',
'Impressive Art' => 'Impressionisme',
'Low Key' => 'Clair-obscur',
'Miniature' => 'Effet miniature',
'Retro' => 'Rétro',
'Sepia' => 'Sépia',
'Soft' => 'Mise au point douce',
'Star' => 'Filtre étoile',
'Toy Effect' => 'Effet jouet',
},
},
'Advisory' => 'Adversité',
'AnalogBalance' => 'Balance analogique',
'Annotations' => 'Annotations Photoshop',
'Anti-Blur' => {
PrintConv => {
'Off' => 'Désactivé',
'n/a' => 'Non établie',
},
},
'AntiAliasStrength' => 'Puissance relative du filtre anticrénelage de l\'appareil',
'Aperture' => 'Ouverture',
'ApertureRange' => {
Description => 'Régler gamme d\'ouvertures',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'ApertureRingUse' => {
Description => 'Utilisation de la bague de diaphragme',
PrintConv => {
'Permitted' => 'Autorisée',
'Prohibited' => 'Interdite',
},
},
'ApertureValue' => 'Ouverture',
'ApplicationRecordVersion' => 'Version d\'enregistrement',
'ApplyShootingMeteringMode' => {
Description => 'Appliquer mode de prise de vue/de mesure',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'Artist' => 'Artiste',
'ArtworkCopyrightNotice' => 'Notice copyright de l\'Illustration',
'ArtworkCreator' => 'Créateur de l\'Illustration',
'ArtworkDateCreated' => 'Date de création de l\'Illustration',
'ArtworkSource' => 'Source de l\'Illustration',
'ArtworkSourceInventoryNo' => 'No d\'Inventaire du source de l\'Illustration',
'ArtworkTitle' => 'Titre de l\'Illustration',
'AsShotICCProfile' => 'Profil ICC à la prise de vue',
'AsShotNeutral' => 'Balance neutre à la prise de vue',
'AsShotPreProfileMatrix' => 'Matrice de pré-profil à la prise de vue',
'AsShotProfileName' => 'Nom du profil du cliché',
'AsShotWhiteXY' => 'Balance blanc X-Y à la prise de vue',
'AssignFuncButton' => {
Description => 'Changer fonct. touche FUNC.',
PrintConv => {
'Exposure comp./AEB setting' => 'Correct. expo/réglage AEB',
'Image jump with main dial' => 'Saut image par molette principale',
'Image quality' => 'Changer de qualité',
'LCD brightness' => 'Luminosité LCD',
'Live view function settings' => 'Réglages Visée par l’écran',
},
},
'AssistButtonFunction' => {
Description => 'Touche de fonction rapide',
PrintConv => {
'Av+/- (AF point by QCD)' => 'Av+/- (AF par mol. AR)',
'FE lock' => 'Mémo expo. au flash',
'Normal' => 'Normale',
'Select HP (while pressing)' => 'Sélect. HP (en appuyant)',
'Select Home Position' => 'Sélect. position origine',
},
},
'Audio' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'AudioDuration' => 'Durée audio',
'AudioOutcue' => 'Queue audio',
'AudioSamplingRate' => 'Taux d\'échantillonnage audio',
'AudioSamplingResolution' => 'Résolution d\'échantillonnage audio',
'AudioType' => {
Description => 'Type audio',
PrintConv => {
'Mono Actuality' => 'Actualité (audio mono (1 canal))',
'Mono Music' => 'Musique, transmise par elle-même (audio mono (1 canal))',
'Mono Question and Answer Session' => 'Question et réponse (audio mono (1 canal))',
'Mono Raw Sound' => 'Son brut (audio mono (1 canal))',
'Mono Response to a Question' => 'Réponse à une question (audio mono (1 canal))',
'Mono Scener' => 'Scener (audio mono (1 canal))',
'Mono Voicer' => 'Voix (audio mono (1 canal))',
'Mono Wrap' => 'Wrap (audio mono (1 canal))',
'Stereo Actuality' => 'Actualité (audio stéréo (2 canaux))',
'Stereo Music' => 'Musique, transmise par elle-même (audio stéréo (2 canaux))',
'Stereo Question and Answer Session' => 'Question et réponse (audio stéréo (2 canaux))',
'Stereo Raw Sound' => 'Son brut (audio stéréo (2 canaux))',
'Stereo Response to a Question' => 'Réponse à une question (audio stéréo (2 canaux))',
'Stereo Scener' => 'Scener (audio stéréo (2 canaux))',
'Stereo Voicer' => 'Voix (audio stéréo (2 canaux))',
'Stereo Wrap' => 'Wrap (audio stéréo (2 canaux))',
'Text Only' => 'Texte seul (pas de données d\'objet)',
},
},
'Author' => 'Auteur',
'AuthorsPosition' => 'Titre du créateur',
'AutoAperture' => {
Description => 'Auto-diaph',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoBracketing' => {
Description => 'Bracketing auto',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoExposureBracketing' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoFP' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoFocus' => {
Description => 'Auto-Focus',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoISO' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoLightingOptimizer' => {
Description => 'Correction auto de luminosité',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Actif',
'Low' => 'Faible',
'Off' => 'Désactivé',
'Strong' => 'Importante',
'n/a' => 'Non établie',
},
},
'AutoLightingOptimizerOn' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'AutoRedEye' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'AutoRotate' => {
Description => 'Rotation automatique',
PrintConv => {
'None' => 'Aucune',
'Rotate 180' => '180° (bas/droit)',
'Rotate 270 CW' => '90° sens horaire (gauche/bas)',
'Rotate 90 CW' => '90° sens antihoraire (droit/haut)',
'n/a' => 'Inconnu',
},
},
'AuxiliaryLens' => 'Objectif Auxiliaire',
'AvApertureSetting' => 'Réglage d\'ouverture Av',
'AvSettingWithoutLens' => {
Description => 'Réglage Av sans objectif',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'BToA0' => 'B à A0',
'BToA1' => 'B à A1',
'BToA2' => 'B à A2',
'BWMode' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'BackgroundColorIndicator' => 'Indicateur de couleur d\'arrière-plan',
'BackgroundColorValue' => 'Valeur de couleur d\'arrière-plan',
'BackgroundTiling' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'BadFaxLines' => 'Mauvaises lignes de Fax',
'BannerImageType' => {
PrintConv => {
'None' => 'Aucune',
},
},
'BaseExposureCompensation' => 'Compensation d\'exposition de base',
'BaseURL' => 'URL de base',
'BaselineExposure' => 'Exposition de base',
'BaselineNoise' => 'Bruit de base',
'BaselineSharpness' => 'Accentuation de base',
'BatteryInfo' => 'Source d\'alimentation',
'BatteryLevel' => 'Niveau de batterie',
'BayerGreenSplit' => 'Séparation de vert Bayer',
'Beep' => {
PrintConv => {
'High' => 'Bruyant',
'Low' => 'Calme',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'BestQualityScale' => 'Echelle de meilleure qualité',
'BitsPerComponent' => 'Bits par composante',
'BitsPerExtendedRunLength' => 'Bits par « Run Length » étendue',
'BitsPerRunLength' => 'Bits par « Run Length »',
'BitsPerSample' => 'Nombre de bits par échantillon',
'BlackLevel' => 'Niveau noir',
'BlackLevelDeltaH' => 'Delta H du niveau noir',
'BlackLevelDeltaV' => 'Delta V du niveau noir',
'BlackLevelRepeatDim' => 'Dimension de répétition du niveau noir',
'BlackPoint' => 'Point noir',
'BlueBalance' => 'Balance bleue',
'BlueMatrixColumn' => 'Colonne de matrice bleue',
'BlueTRC' => 'Courbe de reproduction des tons bleus',
'BlurWarning' => {
PrintConv => {
'None' => 'Aucune',
},
},
'BodyBatteryADLoad' => 'Tension accu boîtier en charge',
'BodyBatteryADNoLoad' => 'Tension accu boîtier à vide',
'BodyBatteryState' => {
Description => 'État de accu boîtier',
PrintConv => {
'Almost Empty' => 'Presque vide',
'Empty or Missing' => 'Vide ou absent',
'Full' => 'Plein',
'Running Low' => 'En baisse',
},
},
'BracketMode' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'BracketShotNumber' => {
Description => 'Numéro de cliché en bracketing',
PrintConv => {
'1 of 3' => '1 sur 3',
'1 of 5' => '1 sur 5',
'2 of 3' => '2 sur 3',
'2 of 5' => '2 sur 5',
'3 of 3' => '3 sur 3',
'3 of 5' => '3 sur 5',
'4 of 5' => '4 sur 5',
'5 of 5' => '5 sur 5',
'n/a' => 'Non établie',
},
},
'Brightness' => 'Luminosité',
'BrightnessValue' => 'Luminosité',
'BulbDuration' => 'Durée du pose longue',
'BurstMode' => {
Description => 'Mode Rafale',
PrintConv => {
'Infinite' => 'Infini',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ButtonFunctionControlOff' => {
Description => 'Fonction de touche si Contrôle Rapide OFF',
PrintConv => {
'Disable main, Control, Multi-control' => 'Désactivés principale, Contrôle rapide, Multicontrôleur',
'Normal (enable)' => 'Normale (activée)',
},
},
'By-line' => 'Créateur',
'By-lineTitle' => 'Fonction du créateur',
'CFALayout' => {
Description => 'Organisation CFA',
PrintConv => {
'Even columns offset down 1/2 row' => 'Organisation décalée A : les colonnes paires sont décalées vers le bas d\'une demi-rangée.',
'Even columns offset up 1/2 row' => 'Organisation décalée B : les colonnes paires sont décalées vers le haut d\'une demi-rangée.',
'Even rows offset left 1/2 column' => 'Organisation décalée D : les rangées paires sont décalées vers la gauche d\'une demi-colonne.',
'Even rows offset right 1/2 column' => 'Organisation décalée C : les rangées paires sont décalées vers la droite d\'une demi-colonne.',
'Rectangular' => 'Plan rectangulaire (ou carré)',
},
},
'CFAPattern' => 'Matrice de filtrage couleur',
'CFAPattern2' => 'Modèle CFA 2',
'CFAPlaneColor' => 'Couleur de plan CFA',
'CFARepeatPatternDim' => 'Dimension du modèle de répétition CFA',
'CMMFlags' => 'Drapeaux CMM',
'CMYKEquivalent' => 'Equivalent CMJK',
'CPUFirmwareVersion' => 'Version de firmware de CPU',
'CPUType' => {
PrintConv => {
'None' => 'Aucune',
},
},
'CalibrationDateTime' => 'Date et heure de calibration',
'CalibrationIlluminant1' => {
Description => 'Illuminant de calibration 1',
PrintConv => {
'Cloudy' => 'Temps nuageux',
'Cool White Fluorescent' => 'Fluorescente type soft',
'Day White Fluorescent' => 'Fluorescente type blanc',
'Daylight' => 'Lumière du jour',
'Daylight Fluorescent' => 'Fluorescente type jour',
'Fine Weather' => 'Beau temps',
'Fluorescent' => 'Fluorescente',
'ISO Studio Tungsten' => 'Tungstène studio ISO',
'Other' => 'Autre source de lumière',
'Shade' => 'Ombre',
'Standard Light A' => 'Lumière standard A',
'Standard Light B' => 'Lumière standard B',
'Standard Light C' => 'Lumière standard C',
'Tungsten (Incandescent)' => 'Tungstène (lumière incandescente)',
'Unknown' => 'Inconnue',
'Warm White Fluorescent' => 'Fluorescent blanc chaud',
'White Fluorescent' => 'Fluorescent blanc',
},
},
'CalibrationIlluminant2' => {
Description => 'Illuminant de calibration 2',
PrintConv => {
'Cloudy' => 'Temps nuageux',
'Cool White Fluorescent' => 'Fluorescente type soft',
'Day White Fluorescent' => 'Fluorescente type blanc',
'Daylight' => 'Lumière du jour',
'Daylight Fluorescent' => 'Fluorescente type jour',
'Fine Weather' => 'Beau temps',
'Fluorescent' => 'Fluorescente',
'ISO Studio Tungsten' => 'Tungstène studio ISO',
'Other' => 'Autre source de lumière',
'Shade' => 'Ombre',
'Standard Light A' => 'Lumière standard A',
'Standard Light B' => 'Lumière standard B',
'Standard Light C' => 'Lumière standard C',
'Tungsten (Incandescent)' => 'Tungstène (lumière incandescente)',
'Unknown' => 'Inconnue',
'Warm White Fluorescent' => 'Fluorescent blanc chaud',
'White Fluorescent' => 'Fluorescent blanc',
},
},
'CameraCalibration1' => 'Calibration d\'appareil 1',
'CameraCalibration2' => 'Calibration d\'appareil 2',
'CameraCalibrationSig' => 'Signature de calibration de l\'appareil',
'CameraOrientation' => {
Description => 'Orientation de l\'image',
PrintConv => {
'Horizontal (normal)' => '0° (haut/gauche)',
'Rotate 270 CW' => '90° sens horaire (gauche/bas)',
'Rotate 90 CW' => '90° sens antihoraire (droit/haut)',
},
},
'CameraSerialNumber' => 'Numéro de série de l\'appareil',
'CameraSettings' => 'Réglages de l\'appareil',
'CameraTemperature' => 'Température de l\'appareil',
'CameraType' => 'Type d\'objectif Pentax',
'CanonExposureMode' => {
PrintConv => {
'Aperture-priority AE' => 'Priorité ouverture',
'Bulb' => 'Pose B',
'Manual' => 'Manuelle',
'Program AE' => 'Programme d\'exposition automatique',
'Shutter speed priority AE' => 'Priorité vitesse',
},
},
'CanonFirmwareVersion' => 'Version de firmware',
'CanonFlashMode' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
'Red-eye reduction' => 'Réduction yeux rouges',
},
},
'CanonImageSize' => {
PrintConv => {
'Large' => 'Grande',
'Medium' => 'Moyenne',
'Medium 1' => 'Moyenne 1',
'Medium 2' => 'Moyenne 2',
'Medium 3' => 'Moyenne 3',
'Small' => 'Petite',
'Small 1' => 'Petite 1',
'Small 2' => 'Petite 2',
'Small 3' => 'Petite 3',
},
},
'Caption-Abstract' => 'Légende / Description',
'CaptionWriter' => 'Rédacteur',
'CaptureXResolutionUnit' => {
PrintConv => {
'um' => 'µm (micromètre)',
},
},
'CaptureYResolutionUnit' => {
PrintConv => {
'um' => 'µm (micromètre)',
},
},
'Categories' => 'Catégories',
'Category' => 'Catégorie',
'CellLength' => 'Longueur de cellule',
'CellWidth' => 'Largeur de cellule',
'CenterWeightedAreaSize' => {
PrintConv => {
'Average' => 'Moyenne',
},
},
'Certificate' => 'Certificat',
'CharTarget' => 'Cible caractère',
'CharacterSet' => 'Jeu de caractères',
'ChromaBlurRadius' => 'Rayon de flou de chromatisme',
'ChromaticAdaptation' => 'Adaptation chromatique',
'Chromaticity' => 'Chromaticité',
'ChrominanceNR_TIFF_JPEG' => {
PrintConv => {
'Low' => 'Bas',
'Off' => 'Désactivé',
},
},
'ChrominanceNoiseReduction' => {
PrintConv => {
'Low' => 'Bas',
'Off' => 'Désactivé',
},
},
'CircleOfConfusion' => 'Cercle de confusion',
'City' => 'Ville',
'ClassifyState' => 'État de classification',
'CleanFaxData' => 'Données de Fax propres',
'ClipPath' => 'Chemin de rognage',
'CodedCharacterSet' => 'Jeu de caractères codé',
'CollectionName' => 'Nom de collection',
'ColorAberrationControl' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ColorAdjustmentMode' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ColorBalance' => 'Balance des couleurs',
'ColorBalanceAdj' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ColorBalanceVersion' => 'Version de la Balance des couleurs',
'ColorBooster' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ColorCalibrationMatrix' => 'Table de matrice de calibration de couleur',
'ColorCharacterization' => 'Caractérisation de couleur',
'ColorComponents' => 'Composants colorimétriques',
'ColorEffect' => {
Description => 'Effet de couleurs',
PrintConv => {
'Black & White' => 'Noir et blanc',
'Cool' => 'Froide',
'Off' => 'Désactivé',
'Sepia' => 'Sépia',
'Warm' => 'Chaude',
},
},
'ColorFilter' => {
Description => 'Filtre de couleur',
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'Off' => 'Désactivé',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'ColorHue' => 'Teinte de couleur',
'ColorInfo' => 'Info couleur',
'ColorMap' => 'Charte de couleur',
'ColorMatrix1' => 'Matrice de couleur 1',
'ColorMatrix2' => 'Matrice de couleur 2',
'ColorMode' => {
Description => 'Mode colorimétrique',
PrintConv => {
'Adobe RGB' => 'AdobeRVB',
'Autumn Leaves' => 'Feuilles automne',
'B&W' => 'Noir & Blanc',
'Clear' => 'Lumineux',
'Deep' => 'Profond',
'Evening' => 'Soir',
'Landscape' => 'Paysage',
'Light' => 'Pastel',
'Natural' => 'Naturel',
'Neutral' => 'Neutre',
'Night Scene' => 'Nocturne',
'Night View' => 'Vision nocturne',
'Night View/Portrait' => 'Portrait nocturne',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'RGB' => 'RVB',
'Sunset' => 'Coucher de soleil',
'Vivid' => 'Vives',
},
},
'ColorMoireReduction' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ColorMoireReductionMode' => {
PrintConv => {
'Low' => 'Bas',
'Off' => 'Désactivé',
},
},
'ColorPalette' => 'Palette de couleur',
'ColorRepresentation' => {
Description => 'Représentation de couleur',
PrintConv => {
'3 Components, Frame Sequential in Multiple Objects' => 'Trois composantes, Vue séquentielle dans différents objets',
'3 Components, Frame Sequential in One Object' => 'Trois composantes, Vue séquentielle dans un objet',
'3 Components, Line Sequential' => 'Trois composantes, Ligne séquentielle',
'3 Components, Pixel Sequential' => 'Trois composantes, Pixel séquentiel',
'3 Components, Single Frame' => 'Trois composantes, Vue unique',
'3 Components, Special Interleaving' => 'Trois composantes, Entrelacement spécial',
'4 Components, Frame Sequential in Multiple Objects' => 'Quatre composantes, Vue séquentielle dans différents objets',
'4 Components, Frame Sequential in One Object' => 'Quatre composantes, Vue séquentielle dans un objet',
'4 Components, Line Sequential' => 'Quatre composantes, Ligne séquentielle',
'4 Components, Pixel Sequential' => 'Quatre composantes, Pixel séquentiel',
'4 Components, Single Frame' => 'Quatre composantes, Vue unique',
'4 Components, Special Interleaving' => 'Quatre composantes, Entrelacement spécial',
'Monochrome, Single Frame' => 'Monochrome, Vue unique',
'No Image, Single Frame' => 'Pas d\'image, Vue unique',
},
},
'ColorResponseUnit' => 'Unité de réponse couleur',
'ColorSequence' => 'Séquence de couleur',
'ColorSpace' => {
Description => 'Espace colorimétrique',
PrintConv => {
'ICC Profile' => 'Profil ICC',
'RGB' => 'RVB',
'Uncalibrated' => 'Non calibré',
'Wide Gamut RGB' => 'Wide Gamut RVB',
'sRGB' => 'sRVB',
},
},
'ColorSpaceData' => 'Espace de couleur de données',
'ColorTable' => 'Tableau de couleurs',
'ColorTemperature' => 'Température de couleur',
'ColorTone' => {
Description => 'Teinte couleur',
PrintConv => {
'Normal' => 'Normale',
},
},
'ColorType' => {
PrintConv => {
'RGB' => 'RVB',
},
},
'ColorantOrder' => 'Ordre de colorant',
'ColorantTable' => 'Table de colorant',
'ColorimetricReference' => 'Référence colorimétrique',
'CommandDialsChangeMainSub' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'CommandDialsMenuAndPlayback' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'CommandDialsReverseRotation' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'CommanderGroupAMode' => {
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'CommanderGroupBMode' => {
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'CommanderInternalFlash' => {
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'Comment' => 'Commentaire',
'Comments' => 'Commentaires',
'Compilation' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'ComponentsConfiguration' => 'Signification de chaque composante',
'CompressedBitsPerPixel' => 'Mode de compression d\'image',
'Compression' => {
Description => 'Schéma de compression',
PrintConv => {
'JBIG Color' => 'JBIG Couleur',
'JPEG' => 'Compression JPEG',
'JPEG (old-style)' => 'JPEG (ancien style)',
'Kodak DCR Compressed' => 'Compression Kodak DCR',
'Kodak KDC Compressed' => 'Compression Kodak KDC',
'Next' => 'Encodage NeXT 2 bits',
'Nikon NEF Compressed' => 'Compression Nikon NEF',
'None' => 'Aucune',
'Pentax PEF Compressed' => 'Compression Pentax PEF',
'SGILog' => 'Encodage Log luminance SGI 32 bits',
'SGILog24' => 'Encodage Log luminance SGI 24 bits',
'Sony ARW Compressed' => 'Compression Sony ARW',
'Thunderscan' => 'Encodage ThunderScan 4 bits',
'Uncompressed' => 'Non compressé',
},
},
'CompressionType' => {
PrintConv => {
'None' => 'Aucune',
},
},
'ConditionalFEC' => 'Compensation exposition flash',
'ConnectionSpaceIlluminant' => 'Illuminant d\'espace de connexion',
'ConsecutiveBadFaxLines' => 'Mauvaises lignes de Fax consécutives',
'ContentLocationCode' => 'Code du lieu du contenu',
'ContentLocationName' => 'Nom du lieu du contenu',
'ContinuousDrive' => {
PrintConv => {
'Movie' => 'Vidéo',
},
},
'ContinuousShootingSpeed' => {
Description => 'Vitesse de prise de vues en continu',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'ContinuousShotLimit' => {
Description => 'Limiter nombre de vues en continu',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'Contrast' => {
Description => 'Contraste',
PrintConv => {
'+1 (medium high)' => '+1 (Assez fort)',
'+2 (high)' => '+2 (Forte)',
'+3 (very high)' => '+3 (Très fort)',
'-1 (medium low)' => '-1 (Assez faible)',
'-2 (low)' => '-2 (Faible)',
'-3 (very low)' => '-3 (Très faible)',
'0 (normal)' => '0 (Normale)',
'High' => 'Dur',
'Low' => 'Doux',
'Medium High' => 'Moyen Haut',
'Medium Low' => 'Moyen Faible',
'Normal' => 'Normale',
'n/a' => 'Non établie',
},
},
'ContrastCurve' => 'Courbe de contraste',
'Contributor' => 'Contributeur',
'ControlMode' => {
PrintConv => {
'n/a' => 'Non établie',
},
},
'ConversionLens' => {
Description => 'Complément Optique',
PrintConv => {
'Off' => 'Désactivé',
'Telephoto' => 'Télé',
'Wide' => 'Grand angulaire',
},
},
'Copyright' => 'Propriétaire du copyright',
'CopyrightNotice' => 'Mention de copyright',
'CopyrightStatus' => {
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'Country' => 'Pays',
'Country-PrimaryLocationCode' => 'Code de pays ISO',
'Country-PrimaryLocationName' => 'Pays',
'CountryCode' => 'Code pays',
'Coverage' => 'Couverture',
'CreateDate' => 'Date de la création des données numériques',
'CreationDate' => 'Date de création',
'Creator' => 'Créateur',
'CreatorAddress' => 'Adresse du créateur',
'CreatorCity' => 'Lieu d\'Habitation du créateur',
'CreatorContactInfo' => 'Contact créateur',
'CreatorCountry' => 'Pays du créateur',
'CreatorPostalCode' => 'Code postal du créateur',
'CreatorRegion' => 'Région du créateur',
'CreatorTool' => 'Outil de création',
'CreatorWorkEmail' => 'Courriel professionnel du créateur',
'CreatorWorkTelephone' => 'Téléphone professionnel créateur',
'CreatorWorkURL' => 'URL professionnelle du créateur',
'Credit' => 'Fournisseur',
'CropActive' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'CropUnit' => {
PrintConv => {
'inches' => 'Pouce',
},
},
'CropUnits' => {
PrintConv => {
'inches' => 'Pouce',
},
},
'CurrentICCProfile' => 'Profil ICC actuel',
'CurrentIPTCDigest' => 'Sommaire courant IPTC',
'CurrentPreProfileMatrix' => 'Matrice de pré-profil actuelle',
'Curves' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'CustomRendered' => {
Description => 'Traitement d\'image personnalisé',
PrintConv => {
'Custom' => 'Traitement personnalisé',
'Normal' => 'Traitement normal',
},
},
'D-LightingHQ' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'D-LightingHQSelected' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'D-LightingHS' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'DNGBackwardVersion' => 'Version DNG antérieure',
'DNGLensInfo' => 'Distance focale minimale',
'DNGVersion' => 'Version DNG',
'DOF' => 'Profondeur de champ',
'DSPFirmwareVersion' => 'Version de firmware de DSP',
'DataCompressionMethod' => 'Fournisseur/propriétaire de l\'algorithme de compression de données',
'DataDump' => 'Vidage données',
'DataImprint' => {
PrintConv => {
'None' => 'Aucune',
'Text' => 'Texte',
},
},
'DataType' => 'Type de données',
'DateCreated' => 'Date de création',
'DateDisplayFormat' => {
Description => 'Format date',
PrintConv => {
'D/M/Y' => 'Jour/Mois/Année',
'M/D/Y' => 'Mois/Jour/Année',
'Y/M/D' => 'Année/Mois/Jour',
},
},
'DateSent' => 'Date d\'envoi',
'DateStampMode' => {
PrintConv => {
'Date & Time' => 'Date et heure',
'Off' => 'Désactivé',
},
},
'DateTime' => 'Date de modification du fichier',
'DateTimeCreated' => 'Date/heure de création',
'DateTimeDigitized' => 'Date/heure de la numérisation',
'DateTimeOriginal' => 'Date de la création des données originales',
'DaylightSavings' => {
Description => 'Heure d\'été',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'DefaultCropOrigin' => 'Origine de rognage par défaut',
'DefaultCropSize' => 'Taille de rognage par défaut',
'DefaultScale' => 'Echelle par défaut',
'DeletedImageCount' => 'Compteur d\'images supprimées',
'DestinationCity' => 'Ville de destination',
'DestinationCityCode' => 'Code ville de destination',
'DestinationDST' => {
Description => 'Heure d\'été de destination',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'DeviceAttributes' => 'Attributs d\'appareil',
'DeviceManufacturer' => 'Fabricant de l\'appareil',
'DeviceMfgDesc' => 'Description du fabricant d\'appareil',
'DeviceModel' => 'Modèle de l\'appareil',
'DeviceModelDesc' => 'Description du modèle d\'appareil',
'DeviceSettingDescription' => 'Description des réglages du dispositif',
'DialDirectionTvAv' => {
Description => 'Sens rotation molette Tv/Av',
PrintConv => {
'Normal' => 'Normale',
'Reversed' => 'Sens inversé',
},
},
'DigitalCreationDate' => 'Date de numérisation',
'DigitalCreationTime' => 'Heure de numérisation',
'DigitalImageGUID' => 'GUID de l\'image numérique',
'DigitalSourceFileType' => 'Type de fichier de la source numérique',
'DigitalZoom' => {
Description => 'Zoom numérique',
PrintConv => {
'None' => 'Aucune',
'Off' => 'Désactivé',
},
},
'DigitalZoomOn' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'DigitalZoomRatio' => 'Rapport de zoom numérique',
'Directory' => 'Dossier',
'DirectoryNumber' => 'Numéro de dossier',
'DisplaySize' => {
PrintConv => {
'Normal' => 'Normale',
},
},
'DisplayUnits' => {
PrintConv => {
'inches' => 'Pouce',
},
},
'DisplayXResolutionUnit' => {
PrintConv => {
'um' => 'µm (micromètre)',
},
},
'DisplayYResolutionUnit' => {
PrintConv => {
'um' => 'µm (micromètre)',
},
},
'DisplayedUnitsX' => {
PrintConv => {
'inches' => 'Pouce',
},
},
'DisplayedUnitsY' => {
PrintConv => {
'inches' => 'Pouce',
},
},
'DistortionCorrection' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'DistortionCorrection2' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'DjVuVersion' => 'Version DjVu',
'DocumentHistory' => 'Historique du document',
'DocumentName' => 'Nom du document',
'DocumentNotes' => 'Remarques sur le document',
'DotRange' => 'Étendue de points',
'DriveMode' => {
Description => 'Mode de prise de vue',
PrintConv => {
'Burst' => 'Rafale',
'Continuous' => 'Continu',
'Continuous High' => 'Continu (ultrarapide)',
'Continuous Shooting' => 'Prise de vues en continu',
'Multiple Exposure' => 'Exposition multiple',
'No Timer' => 'Pas de retardateur',
'Off' => 'Désactivé',
'Remote Control' => 'Télécommande',
'Remote Control (3 s delay)' => 'Télécommande (retard 3 s)',
'Self-timer (12 s)' => 'Retardateur (12 s)',
'Self-timer (2 s)' => 'Retardateur (2 s)',
'Self-timer Operation' => 'Retardateur',
'Shutter Button' => 'Déclencheur',
'Single Exposure' => 'Exposition unique',
'Single-frame' => 'Vue par vue',
'Single-frame Shooting' => 'Prise de vue unique',
},
},
'DriveMode2' => {
Description => 'Exposition multiple',
PrintConv => {
'Single-frame' => 'Vue par vue',
},
},
'Duration' => 'Durée',
'DynamicRangeExpansion' => {
Description => 'Expansion de la dynamique',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'DynamicRangeOptimizer' => {
Description => 'Optimiseur Dyna',
PrintConv => {
'Advanced Auto' => 'Avancé Auto',
'Advanced Lv1' => 'Avancé Niv1',
'Advanced Lv2' => 'Avancé Niv2',
'Advanced Lv3' => 'Avancé Niv3',
'Advanced Lv4' => 'Avancé Niv4',
'Advanced Lv5' => 'Avancé Niv5',
'Auto' => 'Auto.',
'Off' => 'Désactivé',
},
},
'E-DialInProgram' => {
PrintConv => {
'P Shift' => 'Décalage P',
'Tv or Av' => 'Tv ou Av',
},
},
'ETTLII' => {
PrintConv => {
'Average' => 'Moyenne',
'Evaluative' => 'Évaluative',
},
},
'EVStepInfo' => 'Info de pas IL',
'EVSteps' => {
Description => 'Pas IL',
PrintConv => {
'1/2 EV Steps' => 'Pas de 1/2 IL',
'1/3 EV Steps' => 'Pas de 1/3 IL',
},
},
'EasyExposureCompensation' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'EasyMode' => {
PrintConv => {
'Beach' => 'Plage',
'Color Accent' => 'Couleur contrastée',
'Color Swap' => 'Permuter couleur',
'Fireworks' => 'Feu d\'artifice',
'Foliage' => 'Feuillages',
'Indoor' => 'Intérieur',
'Kids & Pets' => 'Enfants & animaux',
'Landscape' => 'Paysage',
'Manual' => 'Manuelle',
'Night' => 'Scène de nuit',
'Night Snapshot' => 'Mode Nuit',
'Snow' => 'Neige',
'Sports' => 'Sport',
'Super Macro' => 'Super macro',
'Underwater' => 'Sous-marin',
},
},
'EdgeNoiseReduction' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'EditStatus' => 'Statut d\'édition',
'EditorialUpdate' => {
Description => 'Mise à jour éditoriale',
PrintConv => {
'Additional language' => 'Langues supplémentaires',
},
},
'EffectiveLV' => 'Indice de lumination effectif',
'EffectiveMaxAperture' => 'Ouverture effective maxi de l\'Objectif',
'Emphasis' => {
PrintConv => {
'None' => 'Aucune',
},
},
'EncodingProcess' => {
Description => 'Procédé de codage',
PrintConv => {
'Baseline DCT, Huffman coding' => 'Baseline DCT, codage Huffman',
'Extended sequential DCT, Huffman coding' => 'Extended sequential DCT, codage Huffman',
'Extended sequential DCT, arithmetic coding' => 'Extended sequential DCT, codage arithmétique',
'Lossless, Differential Huffman coding' => 'Lossless, codage Huffman différentiel',
'Lossless, Huffman coding' => 'Lossless, codage Huffman',
'Lossless, arithmetic coding' => 'Lossless, codage arithmétique',
'Lossless, differential arithmetic coding' => 'Lossless, codage arithmétique différentiel',
'Progressive DCT, Huffman coding' => 'Progressive DCT, codage Huffman',
'Progressive DCT, arithmetic coding' => 'Progressive DCT, codage arithmétique',
'Progressive DCT, differential Huffman coding' => 'Progressive DCT, codage Huffman différentiel',
'Progressive DCT, differential arithmetic coding' => 'Progressive DCT, codage arithmétique différentiel',
'Sequential DCT, differential Huffman coding' => 'Sequential DCT, codage Huffman différentiel',
'Sequential DCT, differential arithmetic coding' => 'Sequential DCT, codage arithmétique différentiel',
},
},
'Encryption' => 'Chiffrage',
'EndPoints' => 'Points de terminaison',
'EnhanceDarkTones' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Enhancement' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'Off' => 'Désactivé',
'Red' => 'Rouge',
},
},
'EnvelopeNumber' => 'Numéro d\'enveloppe',
'EnvelopePriority' => {
Description => 'Priorité d\'enveloppe',
PrintConv => {
'0 (reserved)' => '0 (réservé pour utilisation future)',
'1 (most urgent)' => '1 (très urgent)',
'5 (normal urgency)' => '5 (normalement urgent)',
'8 (least urgent)' => '8 (moins urgent)',
'9 (user-defined priority)' => '9 (priorité définie par l\'utilisateur)',
},
},
'EnvelopeRecordVersion' => 'Version d\'enregistrement',
'Error' => 'Erreur',
'Event' => 'Evenement',
'ExcursionTolerance' => {
Description => 'Tolérance d\'excursion ',
PrintConv => {
'Allowed' => 'Possible',
'Not Allowed' => 'Non permis (défaut)',
},
},
'ExifByteOrder' => 'Indicateur d\'ordre des octets Exif',
'ExifCameraInfo' => 'Info d\'appareil photo Exif',
'ExifImageHeight' => 'Hauteur d\'image',
'ExifImageWidth' => 'Largeur d\'image',
'ExifOffset' => 'Pointeur Exif IFD',
'ExifToolVersion' => 'Version ExifTool',
'ExifUnicodeByteOrder' => 'Indicateur d\'ordre des octets Unicode Exif',
'ExifVersion' => 'Version Exif',
'ExitPupilPosition' => 'Position de la pupille de sortie',
'ExpandFilm' => 'Extension film',
'ExpandFilterLens' => 'Extension lentille filtre',
'ExpandFlashLamp' => 'Extension lampe flash',
'ExpandLens' => 'Extension objectif',
'ExpandScanner' => 'Extension Scanner',
'ExpandSoftware' => 'Extension logiciel',
'ExpirationDate' => 'Date d\'expiration',
'ExpirationTime' => 'Heure d\'expiration',
'ExposureBracketStepSize' => 'Intervalle de bracketing d\'exposition',
'ExposureBracketValue' => 'Valeur Bracketing Expo',
'ExposureCompensation' => 'Décalage d\'exposition',
'ExposureDelayMode' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ExposureDifference' => 'Correction d\'exposition',
'ExposureIndex' => 'Indice d\'exposition',
'ExposureLevelIncrements' => {
Description => 'Paliers de réglage d\'expo',
PrintConv => {
'1-stop set, 1/3-stop comp.' => 'Réglage 1 valeur, correction 1/3 val.',
'1/2 Stop' => 'Palier 1/2',
'1/2-stop set, 1/2-stop comp.' => 'Réglage 1/2 valeur, correction 1/2 val.',
'1/3 Stop' => 'Palier 1/3',
'1/3-stop set, 1/3-stop comp.' => 'Réglage 1/3 valeur, correction 1/3 val.',
},
},
'ExposureMode' => {
Description => 'Mode d\'exposition',
PrintConv => {
'Aperture Priority' => 'Priorité ouverture',
'Aperture-priority AE' => 'Priorité ouverture',
'Auto' => 'Exposition automatique',
'Auto bracket' => 'Bracketting auto',
'Bulb' => 'Pose B',
'Landscape' => 'Paysage',
'Manual' => 'Exposition manuelle',
'Night Scene / Twilight' => 'Nocturne',
'Shutter Priority' => 'Priorité vitesse',
'Shutter speed priority AE' => 'Priorité vitesse',
},
},
'ExposureModeInManual' => {
Description => 'Mode d\'exposition manuelle',
PrintConv => {
'Center-weighted average' => 'Centrale pondérée',
'Evaluative metering' => 'Mesure évaluativ',
'Partial metering' => 'Partielle',
'Specified metering mode' => 'Mode de mesure spécifié',
'Spot metering' => 'Spot',
},
},
'ExposureProgram' => {
Description => 'Programme d\'exposition',
PrintConv => {
'Action (High speed)' => 'Programme action (orienté grandes vitesses d\'obturation)',
'Aperture Priority' => 'Priorité ouverture',
'Aperture-priority AE' => 'Priorité ouverture',
'Creative (Slow speed)' => 'Programme créatif (orienté profondeur de champ)',
'Landscape' => 'Mode paysage',
'Manual' => 'Manuel',
'Not Defined' => 'Non défini',
'Portrait' => 'Mode portrait',
'Program AE' => 'Programme normal',
'Shutter Priority' => 'Priorité vitesse',
'Shutter speed priority AE' => 'Priorité vitesse',
},
},
'ExposureTime' => 'Temps de pose',
'ExposureTime2' => 'Temps de pose 2',
'ExtendedWBDetect' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ExtenderStatus' => {
PrintConv => {
'Attached' => 'Attaché',
'Not attached' => 'Non attaché',
'Removed' => 'Retiré',
},
},
'ExternalFlash' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ExternalFlashBounce' => {
Description => 'Réflexion flash externe',
PrintConv => {
'Bounce' => 'Avec réflecteur',
'No' => 'Non',
'Yes' => 'Oui',
'n/a' => 'Non établie',
},
},
'ExternalFlashExposureComp' => {
Description => 'Compensation d\'exposition flash externe',
PrintConv => {
'-0.5' => '-0.5 IL',
'-1.0' => '-1.0 IL',
'-1.5' => '-1.5 IL',
'-2.0' => '-2.0 IL',
'-2.5' => '-2.5 IL',
'-3.0' => '-3.0 IL',
'0.0' => '0.0 IL',
'0.5' => '0.5 IL',
'1.0' => '1.0 IL',
'n/a' => 'Non établie (éteint ou modes auto)',
'n/a (Manual Mode)' => 'Non établie (mode manuel)',
},
},
'ExternalFlashGuideNumber' => 'Nombre guide flash externe',
'ExternalFlashMode' => {
Description => 'Segment de mesure flash esclave 3',
PrintConv => {
'Off' => 'Désactivé',
'On, Auto' => 'En service, auto',
'On, Contrast-control Sync' => 'En service, synchro contrôle des contrastes',
'On, Flash Problem' => 'En service, problème de flash',
'On, High-speed Sync' => 'En service, synchro haute vitesse',
'On, Manual' => 'En service, manuel',
'On, P-TTL Auto' => 'En service, auto P-TTL',
'On, Wireless' => 'En service, sans cordon',
'On, Wireless, High-speed Sync' => 'En service, sans cordon, synchro haute vitesse',
'n/a - Off-Auto-Aperture' => 'N/c - auto-diaph hors service',
},
},
'ExtraSamples' => 'Echantillons supplémentaires',
'FNumber' => 'Nombre F',
'FOV' => 'Champ de vision',
'FaceOrientation' => {
PrintConv => {
'Horizontal (normal)' => '0° (haut/gauche)',
'Rotate 180' => '180° (bas/droit)',
'Rotate 270 CW' => '90° sens horaire (gauche/bas)',
'Rotate 90 CW' => '90° sens antihoraire (droit/haut)',
},
},
'FastSeek' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'FaxProfile' => {
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'FaxRecvParams' => 'Paramètres de réception Fax',
'FaxRecvTime' => 'Temps de réception Fax',
'FaxSubAddress' => 'Sous-adresse Fax',
'FileFormat' => 'Format de fichier',
'FileInfo' => 'Infos Fichier',
'FileInfoVersion' => 'Version des Infos Fichier',
'FileModifyDate' => 'Date/heure de modification du fichier',
'FileName' => 'Nom de fichier',
'FileNumber' => 'Numéro de fichier',
'FileNumberMemory' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FileNumberSequence' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FileSize' => 'Taille du fichier',
'FileSource' => {
Description => 'Source du fichier',
PrintConv => {
'Digital Camera' => 'Appareil photo numérique',
'Film Scanner' => 'Scanner de film',
'Reflection Print Scanner' => 'Scanner par réflexion',
},
},
'FileType' => 'Type de fichier',
'FileVersion' => 'Version de format de fichier',
'Filename' => 'Nom du fichier ',
'FillFlashAutoReduction' => {
Description => 'Mesure E-TTL',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'FillOrder' => {
Description => 'Ordre de remplissage',
PrintConv => {
'Normal' => 'Normale',
},
},
'FilmMode' => {
Description => 'Mode Film',
PrintConv => {
'Dynamic (B&W)' => 'Vives (N & Bà)',
'Dynamic (color)' => 'Couleurs vives',
'Nature (color)' => 'Couleurs naturelles',
'Smooth (B&W)' => 'Pastel (N & B)',
'Smooth (color)' => 'Couleurs pastel',
'Standard (B&W)' => 'Normales (N & B)',
'Standard (color)' => 'Couleurs normales',
},
},
'FilterEffect' => {
Description => 'Effet de filtre',
PrintConv => {
'Green' => 'Vert',
'None' => 'Aucune',
'Off' => 'Désactivé',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
'n/a' => 'Non établie',
},
},
'FilterEffectMonochrome' => {
PrintConv => {
'Green' => 'Vert',
'None' => 'Aucune',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'FinderDisplayDuringExposure' => {
Description => 'Affich. viseur pendant expo.',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FirmwareVersion' => 'Version de firmware',
'FixtureIdentifier' => 'Identificateur d\'installation',
'Flash' => {
Description => 'Flash ',
PrintConv => {
'Auto, Did not fire' => 'Flash non déclenché, mode auto',
'Auto, Did not fire, Red-eye reduction' => 'Auto, flash non déclenché, mode réduction yeux rouges',
'Auto, Fired' => 'Flash déclenché, mode auto',
'Auto, Fired, Red-eye reduction' => 'Flash déclenché, mode auto, mode réduction yeux rouges, lumière renvoyée détectée',
'Auto, Fired, Red-eye reduction, Return detected' => 'Flash déclenché, mode auto, lumière renvoyée détectée, mode réduction yeux rouges',
'Auto, Fired, Red-eye reduction, Return not detected' => 'Flash déclenché, mode auto, lumière renvoyée non détectée, mode réduction yeux rouges',
'Auto, Fired, Return detected' => 'Flash déclenché, mode auto, lumière renvoyée détectée',
'Auto, Fired, Return not detected' => 'Flash déclenché, mode auto, lumière renvoyée non détectée',
'Did not fire' => 'Flash non déclenché',
'Fired' => 'Flash déclenché',
'Fired, Red-eye reduction' => 'Flash déclenché, mode réduction yeux rouges',
'Fired, Red-eye reduction, Return detected' => 'Flash déclenché, mode réduction yeux rouges, lumière renvoyée détectée',
'Fired, Red-eye reduction, Return not detected' => 'Flash déclenché, mode réduction yeux rouges, lumière renvoyée non détectée',
'Fired, Return detected' => 'Lumière renvoyée sur le capteur détectée',
'Fired, Return not detected' => 'Lumière renvoyée sur le capteur non détectée',
'No Flash' => 'Flash non déclenché',
'No flash function' => 'Pas de fonction flash',
'Off' => 'Désactivé',
'Off, Did not fire' => 'Flash non déclenché, mode flash forcé',
'Off, Did not fire, Return not detected' => 'Éteint, flash non déclenché, lumière renvoyée non détectée',
'Off, No flash function' => 'Éteint, pas de fonction flash',
'Off, Red-eye reduction' => 'Éteint, mode réduction yeux rouges',
'On' => 'Activé',
'On, Did not fire' => 'Hors service, flash non déclenché',
'On, Fired' => 'Flash déclenché, mode flash forcé',
'On, Red-eye reduction' => 'Flash déclenché, mode forcé, mode réduction yeux rouges',
'On, Red-eye reduction, Return detected' => 'Flash déclenché, mode forcé, mode réduction yeux rouges, lumière renvoyée détectée',
'On, Red-eye reduction, Return not detected' => 'Flash déclenché, mode forcé, mode réduction yeux rouges, lumière renvoyée non détectée',
'On, Return detected' => 'Flash déclenché, mode flash forcé, lumière renvoyée détectée',
'On, Return not detected' => 'Flash déclenché, mode flash forcé, lumière renvoyée non détectée',
},
},
'FlashBias' => 'Décalage Flash',
'FlashCommanderMode' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FlashCompensation' => 'Compensation flash',
'FlashControlMode' => {
Description => 'Mode de Contrôle du Flash',
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'FlashDevice' => {
PrintConv => {
'None' => 'Aucune',
},
},
'FlashEnergy' => 'Énergie du flash',
'FlashExposureBracketValue' => 'Valeur Bracketing Flash',
'FlashExposureComp' => 'Compensation d\'exposition au flash',
'FlashExposureCompSet' => 'Réglage de compensation d\'exposition au flash',
'FlashExposureLock' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FlashFired' => {
Description => 'Flash utilisé',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'FlashFiring' => {
Description => 'Émission de l\'éclair',
PrintConv => {
'Does not fire' => 'Désactivé',
'Fires' => 'Activé',
},
},
'FlashFocalLength' => 'Focale Flash',
'FlashFunction' => 'Fonction flash',
'FlashGroupAControlMode' => {
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'FlashGroupBControlMode' => {
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'FlashGroupCControlMode' => {
PrintConv => {
'Manual' => 'Manuelle',
'Off' => 'Désactivé',
},
},
'FlashInfo' => 'Information flash',
'FlashInfoVersion' => 'Version de l\'info Flash',
'FlashIntensity' => {
PrintConv => {
'High' => 'Haut',
'Low' => 'Bas',
'Normal' => 'Normale',
'Strong' => 'Forte',
},
},
'FlashMeteringSegments' => 'Segments de mesure flash',
'FlashMode' => {
Description => 'Mode flash',
PrintConv => {
'Auto, Did not fire' => 'Auto, non déclenché',
'Auto, Did not fire, Red-eye reduction' => 'Auto, non déclenché, réduction yeux rouges',
'Auto, Fired' => 'Auto, déclenché',
'Auto, Fired, Red-eye reduction' => 'Auto, déclenché, réduction yeux rouges',
'Did Not Fire' => 'Eclair non-déclenché',
'External, Auto' => 'Externe, auto',
'External, Contrast-control Sync' => 'Externe, synchro contrôle des contrastes',
'External, Flash Problem' => 'Externe, problème de flash ?',
'External, High-speed Sync' => 'Externe, synchro haute vitesse',
'External, Manual' => 'Externe, manuel',
'External, P-TTL Auto' => 'Externe, P-TTL',
'External, Wireless' => 'Externe, sans cordon',
'External, Wireless, High-speed Sync' => 'Externe, sans cordon, synchro haute vitesse',
'Fired, Commander Mode' => 'Eclair déclenché, Mode maître',
'Fired, External' => 'Eclair déclenché, Exterieur',
'Fired, Manual' => 'Eclair déclenché, Manuel',
'Fired, TTL Mode' => 'Eclair déclenché, Mode TTL',
'Internal' => 'Interne',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'Off, Did not fire' => 'Hors service',
'On' => 'Activé',
'On, Did not fire' => 'En service, non déclenché',
'On, Fired' => 'En service',
'On, Red-eye reduction' => 'En service, réduction yeux rouges',
'On, Slow-sync' => 'En service, synchro lente',
'On, Slow-sync, Red-eye reduction' => 'En service, synchro lente, réduction yeux rouges',
'On, Soft' => 'En service, doux',
'On, Trailing-curtain Sync' => 'En service, synchro 2e rideau',
'On, Wireless (Control)' => 'En service, sans cordon (esclave)',
'On, Wireless (Master)' => 'En service, sans cordon (maître)',
'Red-eye Reduction' => 'Réduction yeux rouges',
'Red-eye reduction' => 'Réduction yeux rouges',
'Unknown' => 'Inconnu',
'n/a - Off-Auto-Aperture' => 'N/c - auto-diaph hors service',
},
},
'FlashModel' => {
Description => 'Modèle de Flash',
PrintConv => {
'None' => 'Aucune',
},
},
'FlashOptions' => {
Description => 'Options de flash',
PrintConv => {
'Auto, Red-eye reduction' => 'Auto, réduction yeux rouges',
'Normal' => 'Normale',
'Red-eye reduction' => 'Réduction yeux rouges',
'Slow-sync' => 'Synchro lente',
'Slow-sync, Red-eye reduction' => 'Synchro lente, réduction yeux rouges',
'Trailing-curtain Sync' => 'Synchro 2e rideau',
'Wireless (Control)' => 'Sans cordon (contrôleur)',
'Wireless (Master)' => 'Sans cordon (maître)',
},
},
'FlashOptions2' => {
Description => 'Options de flash (2)',
PrintConv => {
'Auto, Red-eye reduction' => 'Auto, réduction yeux rouges',
'Normal' => 'Normale',
'Red-eye reduction' => 'Réduction yeux rouges',
'Slow-sync' => 'Synchro lente',
'Slow-sync, Red-eye reduction' => 'Synchro lente, réduction yeux rouges',
'Trailing-curtain Sync' => 'Synchro 2e rideau',
'Wireless (Control)' => 'Sans cordon (contrôleur)',
'Wireless (Master)' => 'Sans cordon (maître)',
},
},
'FlashOutput' => 'Puissance de l\'éclair',
'FlashRedEyeMode' => 'Flash mode anti-yeux rouges',
'FlashReturn' => {
PrintConv => {
'No return detection' => 'Pas de détection de retour',
'Return detected' => 'Retour détecté',
'Return not detected' => 'Retour non détecté',
},
},
'FlashSetting' => 'Réglages Flash',
'FlashStatus' => {
Description => 'Segment de mesure flash esclave 1',
PrintConv => {
'External, Did not fire' => 'Externe, non déclenché',
'External, Fired' => 'Externe, déclenché',
'Internal, Did not fire' => 'Interne, non déclenché',
'Internal, Fired' => 'Interne, déclenché',
'Off' => 'Désactivé',
},
},
'FlashSyncSpeedAv' => {
Description => 'Vitesse synchro en mode Av',
PrintConv => {
'1/200 Fixed' => '1/200 fixe',
'1/250 Fixed' => '1/250 fixe',
'1/300 Fixed' => '1/300 fixe',
},
},
'FlashType' => {
Description => 'Type de flash',
PrintConv => {
'Built-In Flash' => 'Intégré',
'External' => 'Externe',
'None' => 'Aucune',
},
},
'FlashWarning' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FlashpixVersion' => 'Version Flashpix supportée',
'FlickerReduce' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'FlipHorizontal' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'FocalLength' => 'Focale de l\'objectif',
'FocalLength35efl' => 'Focale de l\'objectif',
'FocalLengthIn35mmFormat' => 'Distance focale sur film 35 mm',
'FocalPlaneResolutionUnit' => {
Description => 'Unité de résolution de plan focal',
PrintConv => {
'None' => 'Aucune',
'inches' => 'Pouce',
'um' => 'µm (micromètre)',
},
},
'FocalPlaneXResolution' => 'Résolution X du plan focal',
'FocalPlaneYResolution' => 'Résolution Y du plan focal',
'Focus' => {
PrintConv => {
'Manual' => 'Manuelle',
},
},
'FocusContinuous' => {
PrintConv => {
'Manual' => 'Manuelle',
},
},
'FocusDistance' => 'Distance de mise au point',
'FocusMode' => {
Description => 'Mode mise au point',
PrintConv => {
'AF-C' => 'AF-C (prise de vue en rafale)',
'AF-S' => 'AF-S (prise de vue unique)',
'Auto, Continuous' => 'Auto, continue',
'Auto, Focus button' => 'Bouton autofocus',
'Continuous' => 'Auto, continue',
'Infinity' => 'Infini',
'Manual' => 'Manuelle',
'Normal' => 'Normale',
'Pan Focus' => 'Hyperfocale',
},
},
'FocusMode2' => {
Description => 'Mode mise au point 2',
PrintConv => {
'AF-C' => 'AF-C (prise de vue en rafale)',
'AF-S' => 'AF-S (prise de vue unique)',
'Manual' => 'Manuelle',
},
},
'FocusModeSetting' => {
PrintConv => {
'AF-C' => 'AF-C (prise de vue en rafale)',
'AF-S' => 'AF-S (prise de vue unique)',
'Manual' => 'Manuelle',
},
},
'FocusPosition' => 'Distance de mise au point',
'FocusRange' => {
PrintConv => {
'Infinity' => 'Infini',
'Manual' => 'Manuelle',
'Normal' => 'Normale',
'Pan Focus' => 'Hyperfocale',
'Super Macro' => 'Super macro',
},
},
'FocusTrackingLockOn' => {
PrintConv => {
'Normal' => 'Normale',
'Off' => 'Désactivé',
},
},
'FocusingScreen' => 'Verre de visée',
'ForwardMatrix1' => 'Matrice forward 1',
'ForwardMatrix2' => 'Matrice forward 2',
'FrameNumber' => 'Numéro de vue',
'FrameRate' => 'Vitesse',
'FrameSize' => 'Taille du cadre',
'FreeByteCounts' => 'Nombre d\'octets libres',
'FreeOffsets' => 'Offsets libres',
'FujiFlashMode' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
'Red-eye reduction' => 'Réduction yeux rouges',
},
},
'GIFVersion' => 'Version GIF',
'GPSAltitude' => 'Altitude',
'GPSAltitudeRef' => {
Description => 'Référence d\'altitude',
PrintConv => {
'Above Sea Level' => 'Au-dessus du niveau de la mer',
'Below Sea Level' => 'Au-dessous du niveau de la mer',
},
},
'GPSAreaInformation' => 'Nom de la zone GPS',
'GPSDOP' => 'Précision de mesure',
'GPSDateStamp' => 'Date GPS',
'GPSDateTime' => 'Date/heure GPS (horloge atomique)',
'GPSDestBearing' => 'Orientation de la destination',
'GPSDestBearingRef' => {
Description => 'Référence de l\'orientation de la destination',
PrintConv => {
'Magnetic North' => 'Nord magnétique',
'True North' => 'Direction vraie',
},
},
'GPSDestDistance' => 'Distance à la destination',
'GPSDestDistanceRef' => {
Description => 'Référence de la distance à la destination',
PrintConv => {
'Kilometers' => 'Kilomètres',
'Nautical Miles' => 'Milles marins',
},
},
'GPSDestLatitude' => 'Latitude de destination',
'GPSDestLatitudeRef' => {
Description => 'Référence de la latitude de destination',
PrintConv => {
'North' => 'Latitude nord',
'South' => 'Latitude sud',
},
},
'GPSDestLongitude' => 'Longitude de destination',
'GPSDestLongitudeRef' => {
Description => 'Référence de la longitude de destination',
PrintConv => {
'East' => 'Longitude est',
'West' => 'Longitude ouest',
},
},
'GPSDifferential' => {
Description => 'Correction différentielle GPS',
PrintConv => {
'Differential Corrected' => 'Correction différentielle appliquée',
'No Correction' => 'Mesure sans correction différentielle',
},
},
'GPSImgDirection' => 'Direction de l\'image',
'GPSImgDirectionRef' => {
Description => 'Référence pour la direction l\'image',
PrintConv => {
'Magnetic North' => 'Direction magnétique',
'True North' => 'Direction vraie',
},
},
'GPSInfo' => 'Pointeur IFD d\'informations GPS',
'GPSLatitude' => 'Latitude',
'GPSLatitudeRef' => {
Description => 'Latitude nord ou sud',
PrintConv => {
'North' => 'Latitude nord',
'South' => 'Latitude sud',
},
},
'GPSLongitude' => 'Longitude',
'GPSLongitudeRef' => {
Description => 'Longitude est ou ouest',
PrintConv => {
'East' => 'Longitude est',
'West' => 'Longitude ouest',
},
},
'GPSMapDatum' => 'Données de surveillance géodésique utilisées',
'GPSMeasureMode' => {
Description => 'Mode de mesure GPS',
PrintConv => {
'2-D' => 'Mesure à deux dimensions',
'2-Dimensional' => 'Mesure à deux dimensions',
'2-Dimensional Measurement' => 'Mesure à deux dimensions',
'3-D' => 'Mesure à trois dimensions',
'3-Dimensional' => 'Mesure à trois dimensions',
'3-Dimensional Measurement' => 'Mesure à trois dimensions',
},
},
'GPSPosition' => 'Position GPS',
'GPSProcessingMethod' => 'Nom de la méthode de traitement GPS',
'GPSSatellites' => 'Satellites GPS utilisés pour la mesure',
'GPSSpeed' => 'Vitesse du récepteur GPS',
'GPSSpeedRef' => {
Description => 'Unité de vitesse',
PrintConv => {
'km/h' => 'Kilomètres par heure',
'knots' => 'Nœuds',
'mph' => 'Miles par heure',
},
},
'GPSStatus' => {
Description => 'État du récepteur GPS',
PrintConv => {
'Measurement Active' => 'Mesure active',
'Measurement Void' => 'Mesure vide',
},
},
'GPSTimeStamp' => 'Heure GPS (horloge atomique)',
'GPSTrack' => 'Direction de déplacement',
'GPSTrackRef' => {
Description => 'Référence pour la direction de déplacement',
PrintConv => {
'Magnetic North' => 'Direction magnétique',
'True North' => 'Direction vraie',
},
},
'GPSVersionID' => 'Version de tag GPS',
'GainControl' => {
Description => 'Contrôle de gain',
PrintConv => {
'High gain down' => 'Forte atténuation',
'High gain up' => 'Fort gain',
'Low gain down' => 'Faible atténuation',
'Low gain up' => 'Faible gain',
'None' => 'Aucune',
},
},
'GammaCompensatedValue' => 'Valeur de compensation gamma',
'Gapless' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'GeoTiffAsciiParams' => 'Tag de paramètres Ascii GeoTiff',
'GeoTiffDirectory' => 'Tag de répertoire de clé GeoTiff',
'GeoTiffDoubleParams' => 'Tag de paramètres doubles GeoTiff',
'Gradation' => 'Gradation',
'GrayResponseCurve' => 'Courbe de réponse du gris',
'GrayResponseUnit' => {
Description => 'Unité de réponse en gris',
PrintConv => {
'0.0001' => 'Le nombre représente des millièmes d\'unité',
'0.001' => 'Le nombre représente des centièmes d\'unité',
'0.1' => 'Le nombre représente des dixièmes d\'unité',
'1e-05' => 'Le nombre représente des dix-millièmes d\'unité',
'1e-06' => 'Le nombre représente des cent-millièmes d\'unité',
},
},
'GrayTRC' => 'Courbe de reproduction des tons gris',
'GreenMatrixColumn' => 'Colonne de matrice verte',
'GreenTRC' => 'Courbe de reproduction des tons verts',
'GridDisplay' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'GripBatteryADLoad' => 'Tension accu poignée en charge',
'GripBatteryADNoLoad' => 'Tension accu poignée à vide',
'GripBatteryState' => {
Description => 'État de accu poignée',
PrintConv => {
'Almost Empty' => 'Presque vide',
'Empty or Missing' => 'Vide ou absent',
'Full' => 'Plein',
'Running Low' => 'En baisse',
},
},
'HCUsage' => 'Usage HC',
'HDR' => {
Description => 'HDR auto',
PrintConv => {
'Off' => 'Désactivée',
},
},
'HalftoneHints' => 'Indications sur les demi-teintes',
'Headline' => 'Titre principal',
'HierarchicalSubject' => 'Sujet hiérarchique',
'HighISONoiseReduction' => {
Description => 'Réduction du bruit en haute sensibilité ISO',
PrintConv => {
'Auto' => 'Auto.',
'High' => 'Fort',
'Low' => 'Bas',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'On' => 'Activé',
'Strong' => 'Importante',
'Weak' => 'Faible',
'Weakest' => 'La plus faible',
},
},
'HighlightTonePriority' => {
Description => 'Priorité hautes lumières',
PrintConv => {
'Disable' => 'Désactivée',
'Enable' => 'Activée',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'History' => 'Récapitulatif',
'HometownCity' => 'Ville de résidence',
'HometownCityCode' => 'Code ville de résidence',
'HometownDST' => {
Description => 'Heure d\'été de résidence',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'HostComputer' => 'Ordinateur hôte',
'Hue' => 'Nuance',
'HueAdjustment' => 'Teinte',
'HyperfocalDistance' => 'Distance hyperfocale',
'ICCProfile' => 'Profil ICC',
'ICCProfileName' => 'Nom du profil ICC',
'ICC_Profile' => 'Profil de couleur ICC d\'entrée',
'ID3Size' => 'Taille ID3',
'IPTC-NAA' => 'Métadonnées IPTC-NAA',
'IPTCBitsPerSample' => 'Nombre de bits par échantillon',
'IPTCImageHeight' => 'Nombre de lignes',
'IPTCImageRotation' => {
Description => 'Rotation d\'image',
PrintConv => {
'0' => 'Pas de rotation',
'180' => 'Rotation de 180 degrés',
'270' => 'Rotation de 270 degrés',
'90' => 'Rotation de 90 degrés',
},
},
'IPTCImageWidth' => 'Pixels par ligne',
'IPTCPictureNumber' => 'Numéro d\'image',
'IPTCPixelHeight' => 'Taille de pixel perpendiculairement à la direction de scan',
'IPTCPixelWidth' => 'Taille de pixel dans la direction de scan',
'ISO' => 'Sensibilité ISO',
'ISOExpansion' => {
Description => 'Extension sensibilité ISO',
PrintConv => {
'Off' => 'Arrêt',
'On' => 'Marche',
},
},
'ISOExpansion2' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'ISOFloor' => 'Seuil ISO',
'ISOInfo' => 'Info ISO',
'ISOSelection' => 'Choix ISO',
'ISOSetting' => {
Description => 'Réglage ISO',
PrintConv => {
'Manual' => 'Manuelle',
},
},
'ISOSpeedExpansion' => {
Description => 'Extension de sensibilité ISO',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'ISOSpeedIncrements' => {
Description => 'Incréments de sensibilité ISO',
PrintConv => {
'1/3 Stop' => 'Palier 1/3',
},
},
'ISOSpeedRange' => {
Description => 'Régler l\'extension de sensibilité ISO',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'IT8Header' => 'En-tête IT8',
'Identifier' => 'Identifiant',
'Illumination' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ImageAdjustment' => 'Ajustement Image',
'ImageAreaOffset' => 'Décalage de zone d\'image',
'ImageAuthentication' => {
Description => 'Authentication de l\'image',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ImageBoundary' => 'Cadre Image',
'ImageColorIndicator' => 'Indicateur de couleur d\'image',
'ImageColorValue' => 'Valeur de couleur d\'image',
'ImageCount' => 'Compteur d\'images',
'ImageDataSize' => 'Taille de l\'image',
'ImageDepth' => 'Profondeur d\'image',
'ImageDescription' => 'Description d\'image',
'ImageDustOff' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ImageEditCount' => 'Compteur de traitement d\'image',
'ImageEditing' => {
Description => 'Traitement de l\'image',
PrintConv => {
'Cropped' => 'Recadré',
'Digital Filter' => 'Filtre numérique',
'Frame Synthesis?' => 'Synthèse de vue ?',
'None' => 'Aucun',
},
},
'ImageHeight' => 'Hauteur d\'image',
'ImageHistory' => 'Historique de l\'image',
'ImageID' => 'ID d\'image',
'ImageLayer' => 'Couche image',
'ImageNumber' => 'Numéro d\'image',
'ImageOptimization' => 'Optimisation d\'image',
'ImageOrientation' => {
Description => 'Orientation d\'image',
PrintConv => {
'Landscape' => 'Paysage',
'Square' => 'Carré',
},
},
'ImageProcessing' => 'Retouche d\'image',
'ImageQuality' => {
PrintConv => {
'Normal' => 'Normale',
},
},
'ImageReview' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ImageRotated' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'ImageSize' => 'Taille de l\'Image',
'ImageSourceData' => 'Données source d\'image',
'ImageStabilization' => {
Description => 'Stabilisation d\'image',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
'On, Mode 1' => 'Enclenché, Mode 1',
'On, Mode 2' => 'Enclenché, Mode 2',
},
},
'ImageTone' => {
Description => 'Ton de l\'image',
PrintConv => {
'Bright' => 'Brillant',
'Landscape' => 'Paysage',
'Natural' => 'Naturel',
},
},
'ImageType' => 'Type d\'image',
'ImageUniqueID' => 'Identificateur unique d\'image',
'ImageWidth' => 'Largeur d\'image',
'Indexed' => 'Indexé',
'InfoButtonWhenShooting' => {
Description => 'Touche INFO au déclenchement',
PrintConv => {
'Displays camera settings' => 'Affiche les réglages en cours',
'Displays shooting functions' => 'Affiche les fonctions',
},
},
'InkNames' => 'Nom des encres',
'InkSet' => 'Encrage',
'IntellectualGenre' => 'Genre intellectuel',
'IntelligentAuto' => 'Mode Auto intelligent',
'IntensityStereo' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'InterchangeColorSpace' => {
PrintConv => {
'CMY (K) Device Dependent' => 'CMY(K) dépendant de l\'appareil',
'RGB Device Dependent' => 'RVB dépendant de l\'appareil',
},
},
'IntergraphMatrix' => 'Tag de matrice intergraphe',
'Interlace' => 'Entrelacement',
'InternalFlash' => {
PrintConv => {
'Fired' => 'Flash déclenché',
'Manual' => 'Manuelle',
'No' => 'Flash non déclenché',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'InternalFlashMode' => {
Description => 'Segment de mesure flash esclave 2',
PrintConv => {
'Did not fire, (Unknown 0xf4)' => 'Hors service (inconnue 0xF4)',
'Did not fire, Auto' => 'Hors service, auto',
'Did not fire, Auto, Red-eye reduction' => 'Hors service, auto, réduction yeux rouges',
'Did not fire, Normal' => 'Hors service, normal',
'Did not fire, Red-eye reduction' => 'Hors service, réduction yeux rouges',
'Did not fire, Slow-sync' => 'Hors service, synchro lente',
'Did not fire, Slow-sync, Red-eye reduction' => 'Hors service, synchro lente, réduction yeux rouges',
'Did not fire, Trailing-curtain Sync' => 'Hors service, synchro 2e rideau',
'Did not fire, Wireless (Control)' => 'Hors service, sans cordon (contrôleur)',
'Did not fire, Wireless (Master)' => 'Hors service, sans cordon (maître)',
'Fired' => 'Activé',
'Fired, Auto' => 'En service, auto',
'Fired, Auto, Red-eye reduction' => 'En service, auto, réduction yeux rouges',
'Fired, Red-eye reduction' => 'En service, réduction yeux rouges',
'Fired, Slow-sync' => 'En service, synchro lente',
'Fired, Slow-sync, Red-eye reduction' => 'En service, synchro lente, réduction yeux rouges',
'Fired, Trailing-curtain Sync' => 'En service, synchro 2e rideau',
'Fired, Wireless (Control)' => 'En service, sans cordon (contrôleur)',
'Fired, Wireless (Master)' => 'En service, sans cordon (maître)',
'n/a - Off-Auto-Aperture' => 'N/c - auto-diaph hors service',
},
},
'InternalFlashStrength' => 'Segment de mesure flash esclave 4',
'InternalSerialNumber' => 'Numéro de série interne',
'InteropIndex' => {
Description => 'Identification d\'interopérabilité',
PrintConv => {
'R03 - DCF option file (Adobe RGB)' => 'R03: fichier d\'option DCF (Adobe RGB)',
'R98 - DCF basic file (sRGB)' => 'R98: fichier de base DCF (sRGB)',
'THM - DCF thumbnail file' => 'THM: fichier de vignette DCF',
},
},
'InteropOffset' => 'Indicateur d\'interfonctionnement',
'InteropVersion' => 'Version d\'interopérabilité',
'IptcLastEdited' => 'Dernière édition IPTC',
'JFIFVersion' => 'Version JFIF',
'JPEGACTables' => 'Tableaux AC JPEG',
'JPEGDCTables' => 'Tableaux DC JPEG',
'JPEGLosslessPredictors' => 'Prédicteurs JPEG sans perte',
'JPEGPointTransforms' => 'Transformations de point JPEG',
'JPEGProc' => 'Proc JPEG',
'JPEGQTables' => 'Tableaux Q JPEG',
'JPEGQuality' => {
Description => 'Qualité',
PrintConv => {
'Extra Fine' => 'Extra fine',
'Standard' => 'Normale',
},
},
'JPEGRestartInterval' => 'Intervalle de redémarrage JPEG',
'JPEGTables' => 'Tableaux JPEG',
'JobID' => 'ID de la tâche',
'JpgRecordedPixels' => {
Description => 'Pixels enregistrés JPEG',
PrintConv => {
'10 MP' => '10 Mpx',
'2 MP' => '2 Mpx',
'6 MP' => '6 Mpx',
},
},
'Keyword' => 'Mots clé',
'Keywords' => 'Mots-clés',
'LC1' => 'Données d\'objectif',
'LC10' => 'Données mv\' nv\'',
'LC11' => 'Données AVC 1/EXP',
'LC12' => 'Données mv1 Avminsif',
'LC14' => 'Données UNT_12 UNT_6',
'LC15' => 'Données d\'adaptation de flash incorporé',
'LC2' => 'Code de distance',
'LC3' => 'Valeur K',
'LC4' => 'Données de correction d\'aberration à courte distance',
'LC5' => 'Données de correction d\'aberration chromatique',
'LC6' => 'Données d\'aberration d\'ouverture',
'LC7' => 'Données de condition minimale de déclenchement AF',
'LCDDisplayAtPowerOn' => {
Description => 'État LCD lors de l\'allumage',
PrintConv => {
'Display' => 'Allumé',
'Retain power off status' => 'État précédent',
},
},
'LCDDisplayReturnToShoot' => {
Description => 'Affich. LCD -> Prise de vues',
PrintConv => {
'Also with * etc.' => 'Aussi par * etc.',
'With Shutter Button only' => 'Par déclencheur uniq.',
},
},
'LCDIllumination' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'LCDIlluminationDuringBulb' => {
Description => 'Éclairage LCD pendant pose longue',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'LCDPanels' => {
Description => 'Ecran LCD supérieur/arrière',
PrintConv => {
'ISO/File no.' => 'ISO/No. fichier',
'ISO/Remain. shots' => 'ISO/Vues restantes',
'Remain. shots/File no.' => 'Vues restantes/No. fichier',
'Shots in folder/Remain. shots' => 'Vues dans dossier/Vues restantes',
},
},
'LCHEditor' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Language' => 'Langage',
'LanguageIdentifier' => 'Identificateur de langue',
'LastKeywordIPTC' => 'Dernier mot-clé IPTC',
'LastKeywordXMP' => 'Dernier mot-clé XMP',
'LeafData' => 'Données Leaf',
'Lens' => 'Objectif ',
'LensAFStopButton' => {
Description => 'Fonct. touche AF objectif',
PrintConv => {
'AE lock' => 'Verrouillage AE',
'AE lock while metering' => 'Verr. AE posemètre actif',
'AF Stop' => 'Arrêt AF',
'AF mode: ONE SHOT <-> AI SERVO' => 'Mode AF: ONE SHOT <-> AI SERVO',
'AF point: M -> Auto / Auto -> Ctr.' => 'Colli: M -> Auto / Auto -> Ctr.',
'AF point: M->Auto/Auto->ctr' => 'Collim.AF: M->Auto/Auto->ctr',
'AF start' => 'Activation AF',
'AF stop' => 'Arrêt AF',
'IS start' => 'Activation stab. image',
'Switch to registered AF point' => 'Activer le collimateur autofocus enregistré',
},
},
'LensData' => 'Valeur K (LC3)',
'LensDataVersion' => 'Version des Données Objectif',
'LensDriveNoAF' => {
Description => 'Pilot. obj. si AF impossible',
PrintConv => {
'Focus search off' => 'Pas de recherche du point',
'Focus search on' => 'Recherche du point',
},
},
'LensFStops' => 'Nombre de diaphs de l\'objectif',
'LensID' => 'ID Lens',
'LensIDNumber' => 'Numéro d\'Objectif',
'LensInfo' => 'Informations sur l\'objectif',
'LensKind' => 'Sorte d\'objectif / version (LC0)',
'LensMake' => 'Fabricant d\'objectif',
'LensModel' => 'Modèle d\'objectif',
'LensSerialNumber' => 'Numéro de série objectif',
'LensType' => 'Sorte d\'objectif',
'LicenseType' => {
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'LightReading' => 'Lecture de la lumière',
'LightSource' => {
Description => 'Source de lumière',
PrintConv => {
'Cloudy' => 'Temps nuageux',
'Cool White Fluorescent' => 'Fluorescente type soft',
'Day White Fluorescent' => 'Fluorescente type blanc',
'Daylight' => 'Lumière du jour',
'Daylight Fluorescent' => 'Fluorescente type jour',
'Fine Weather' => 'Beau temps',
'Fluorescent' => 'Fluorescente',
'ISO Studio Tungsten' => 'Tungstène studio ISO',
'Other' => 'Autre source de lumière',
'Shade' => 'Ombre',
'Standard Light A' => 'Lumière standard A',
'Standard Light B' => 'Lumière standard B',
'Standard Light C' => 'Lumière standard C',
'Tungsten (Incandescent)' => 'Tungstène (lumière incandescente)',
'Unknown' => 'Inconnue',
'Warm White Fluorescent' => 'Fluorescent blanc chaud',
'White Fluorescent' => 'Fluorescent blanc',
},
},
'LightSourceSpecial' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'LightValue' => 'Luminosité',
'Lightness' => 'Luminosité',
'LinearResponseLimit' => 'Limite de réponse linéaire',
'LinearizationTable' => 'Table de linéarisation',
'Lit' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'LiveViewExposureSimulation' => {
Description => 'Simulation d\'exposition directe',
PrintConv => {
'Disable (LCD auto adjust)' => 'Désactivée (réglage écran auto)',
'Enable (simulates exposure)' => 'Activée (simulation exposition)',
},
},
'LiveViewShooting' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'LocalizedCameraModel' => 'Nom traduit de modèle d\'appareil',
'Location' => 'Lieu',
'LockMicrophoneButton' => {
Description => 'Fonction de touche microphone',
PrintConv => {
'Protect (hold:record memo)' => 'Protéger (maintien: enregistrement sonore)',
'Record memo (protect:disable)' => 'Enregistrement sonore (protéger: désactivée)',
},
},
'LongExposureNoiseReduction' => {
Description => 'Réduct. bruit longue expo.',
PrintConv => {
'Off' => 'Arrêt',
'On' => 'Marche',
},
},
'LookupTable' => 'Table de correspondance',
'LoopStyle' => {
PrintConv => {
'Normal' => 'Normale',
},
},
'LuminanceNoiseReduction' => {
PrintConv => {
'Low' => 'Bas',
'Off' => 'Désactivé',
},
},
'MCUVersion' => 'Version MCU',
'MIEVersion' => 'Version MIE',
'MIMEType' => 'Type MIME',
'MSStereo' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Macro' => {
PrintConv => {
'Manual' => 'Manuelle',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'On' => 'Activé',
'Super Macro' => 'Super macro',
},
},
'MacroMode' => {
Description => 'Mode Macro',
PrintConv => {
'Normal' => 'Normale',
'Off' => 'Désactivé',
'On' => 'Activé',
'Super Macro' => 'Super macro',
'Tele-Macro' => 'Macro en télé',
},
},
'MagnifiedView' => {
Description => 'Agrandissement en lecture',
PrintConv => {
'Image playback only' => 'Lecture image uniquement',
'Image review and playback' => 'Aff. inst. et lecture',
},
},
'MainDialExposureComp' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Make' => 'Fabricant',
'MakeAndModel' => 'Fabricant et modèle',
'MakerNote' => 'Données privées DNG',
'MakerNoteSafety' => {
Description => 'Sécurité de note de fabricant',
PrintConv => {
'Safe' => 'Sûre',
'Unsafe' => 'Pas sûre',
},
},
'MakerNoteVersion' => 'Version des informations spécifiques fabricant',
'MakerNotes' => 'Notes fabricant',
'ManualFlashOutput' => {
PrintConv => {
'Low' => 'Bas',
'n/a' => 'Non établie',
},
},
'ManualFocusDistance' => 'Distance de Mise-au-point Manuelle',
'ManualTv' => {
Description => 'Régl. Tv/Av manuel pour exp. M',
PrintConv => {
'Tv=Control/Av=Main' => 'Tv=Contrôle rapide/Av=Principale',
'Tv=Control/Av=Main w/o lens' => 'Tv=Contrôle rapide/Av=Principale sans objectif',
'Tv=Main/Av=Control' => 'Tv=Principale/Av=Contrôle rapide',
'Tv=Main/Av=Main w/o lens' => 'Tv=Principale/Av=Contrôle rapide sans objectif',
},
},
'ManufactureDate' => 'Date de fabrication',
'Marked' => 'Marqué',
'MaskedAreas' => 'Zones masquées',
'MasterDocumentID' => 'ID du document maître',
'Matteing' => 'Matité',
'MaxAperture' => 'Données Avmin',
'MaxApertureAtMaxFocal' => 'Ouverture à la focale maxi',
'MaxApertureAtMinFocal' => 'Ouverture à la focale mini',
'MaxApertureValue' => 'Ouverture maximale de l\'objectif',
'MaxAvailHeight' => 'Hauteur max Disponible',
'MaxAvailWidth' => 'Largeur max Disponible',
'MaxFocalLength' => 'Focale maxi',
'MaxSampleValue' => 'Valeur maxi d\'échantillon',
'MaxVal' => 'Valeur max',
'MaximumDensityRange' => 'Etendue maximale de densité',
'Measurement' => 'Observateur de mesure',
'MeasurementBacking' => 'Support de mesure',
'MeasurementFlare' => 'Flare de mesure',
'MeasurementGeometry' => {
Description => 'Géométrie de mesure',
PrintConv => {
'0/45 or 45/0' => '0/45 ou 45/0',
'0/d or d/0' => '0/d ou d/0',
},
},
'MeasurementIlluminant' => 'Illuminant de mesure',
'MeasurementObserver' => 'Observateur de mesure',
'MediaBlackPoint' => 'Point noir moyen',
'MediaType' => {
PrintConv => {
'Normal' => 'Normale',
},
},
'MediaWhitePoint' => 'Point blanc moyen',
'MenuButtonDisplayPosition' => {
Description => 'Position début touche menu',
PrintConv => {
'Previous' => 'Précédente',
'Previous (top if power off)' => 'Précédente (Haut si dés.)',
'Top' => 'Haut',
},
},
'MenuButtonReturn' => {
PrintConv => {
'Previous' => 'Précédente',
'Top' => 'Haut',
},
},
'MetadataDate' => 'Date des metadonnées',
'MeteringMode' => {
Description => 'Mode de mesure',
PrintConv => {
'Average' => 'Moyenne',
'Center-weighted average' => 'Centrale pondérée',
'Evaluative' => 'Évaluative',
'Multi-segment' => 'Multizone',
'Multi-spot' => 'MultiSpot',
'Other' => 'Autre',
'Partial' => 'Partielle',
'Unknown' => 'Inconnu',
},
},
'MeteringMode2' => {
Description => 'Mode de mesure 2',
PrintConv => {
'Multi-segment' => 'Multizone',
},
},
'MeteringMode3' => {
Description => 'Mode de mesure (3)',
PrintConv => {
'Multi-segment' => 'Multizone',
},
},
'MinAperture' => 'Ouverture mini',
'MinFocalLength' => 'Focale mini',
'MinSampleValue' => 'Valeur mini d\'échantillon',
'MinoltaQuality' => {
Description => 'Qualité',
PrintConv => {
'Normal' => 'Normale',
},
},
'MirrorLockup' => {
Description => 'Verrouillage du miroir',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
'Enable: Down with Set' => 'Activé: Retour par touche SET',
},
},
'ModDate' => 'Date de modification',
'Model' => 'Modèle d\'appareil photo',
'Model2' => 'Modèle d\'équipement de prise de vue (2)',
'ModelAge' => 'Age du modèle',
'ModelTiePoint' => 'Tag de lien d modèle',
'ModelTransform' => 'Tag de transformation de modèle',
'ModelingFlash' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ModifiedPictureStyle' => {
PrintConv => {
'Landscape' => 'Paysage',
'None' => 'Aucune',
},
},
'ModifiedSaturation' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'ModifiedSharpnessFreq' => {
PrintConv => {
'High' => 'Haut',
'Highest' => 'Plus haut',
'Low' => 'Doux',
'n/a' => 'Non établie',
},
},
'ModifiedToneCurve' => {
PrintConv => {
'Manual' => 'Manuelle',
},
},
'ModifiedWhiteBalance' => {
PrintConv => {
'Cloudy' => 'Temps nuageux',
'Daylight' => 'Lumière du jour',
'Daylight Fluorescent' => 'Fluorescente type jour',
'Fluorescent' => 'Fluorescente',
'Shade' => 'Ombre',
'Tungsten' => 'Tungstène (lumière incandescente)',
},
},
'ModifyDate' => 'Date de modification de fichier',
'MoireFilter' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'MonochromeFilterEffect' => {
PrintConv => {
'Green' => 'Vert',
'None' => 'Aucune',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'MonochromeLinear' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'MonochromeToningEffect' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'None' => 'Aucune',
},
},
'MultiExposure' => 'Infos Surimpression',
'MultiExposureAutoGain' => {
Description => 'Auto-expo des surimpressions',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'MultiExposureMode' => {
Description => 'Mode de surimpression',
PrintConv => {
'Off' => 'Désactivé',
},
},
'MultiExposureShots' => 'Nombre de prises de vue',
'MultiExposureVersion' => 'Version Surimpression',
'MultiFrameNoiseReduction' => {
Description => 'Réduc. bruit multi-photos',
PrintConv => {
'Off' => 'Désactivée',
'On' => 'Activé(e)',
},
},
'MultipleExposureSet' => {
Description => 'Exposition multiple',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Mute' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'MyColorMode' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'NDFilter' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'NEFCompression' => {
PrintConv => {
'Uncompressed' => 'Non compressé',
},
},
'NEFLinearizationTable' => 'Table de Linearization',
'Name' => 'Nom',
'NamedColor2' => 'Couleur nommée 2',
'NativeDigest' => 'Sommaire natif',
'NativeDisplayInfo' => 'Information sur l\'affichage natif',
'NewsPhotoVersion' => 'Version d\'enregistrement news photo',
'Nickname' => 'Surnom',
'NikonCaptureData' => 'Données Nikon Capture',
'NikonCaptureVersion' => 'Version Nikon Capture',
'Noise' => 'Bruit',
'NoiseFilter' => {
PrintConv => {
'Low' => 'Bas',
'Off' => 'Désactivé',
},
},
'NoiseReduction' => {
Description => 'Réduction du bruit',
PrintConv => {
'High (+1)' => '+1 (haut)',
'Highest (+2)' => '+2 (le plus haut)',
'Low' => 'Bas',
'Low (-1)' => '-1 (bas)',
'Lowest (-2)' => '-2 (le plus bas)',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'On' => 'Activé',
'Standard' => '±0 (normal)',
},
},
'NoiseReductionApplied' => 'Réduction de bruit appliquée',
'NominalMaxAperture' => 'Ouverture maxi nominal',
'NominalMinAperture' => 'Ouverture mini nominal',
'NumIndexEntries' => 'Nombre d\'entrées d\'index',
'NumberofInks' => 'Nombre d\'encres',
'OECFColumns' => 'Colonnes OECF',
'OECFNames' => 'Noms OECF',
'OECFRows' => 'Lignes OECF',
'OECFValues' => 'Valeurs OECF',
'OPIProxy' => 'Proxy OPI',
'ObjectAttributeReference' => 'Genre intellectuel',
'ObjectCycle' => {
Description => 'Cycle d\'objet',
PrintConv => {
'Both Morning and Evening' => 'Les deux',
'Evening' => 'Soir',
'Morning' => 'Matin',
},
},
'ObjectFileType' => {
PrintConv => {
'None' => 'Aucune',
'Unknown' => 'Inconnu',
},
},
'ObjectName' => 'Titre',
'ObjectPreviewData' => 'Données de la miniature de l\'objet',
'ObjectPreviewFileFormat' => 'Format de fichier de la miniature de l\'objet',
'ObjectPreviewFileVersion' => 'Version de format de fichier de la miniature de l\'objet',
'ObjectTypeReference' => 'Référence de type d\'objet',
'OffsetSchema' => 'Schéma de décalage',
'OldSubfileType' => 'Type du sous-fichier',
'OneTouchWB' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'OpticalZoomMode' => {
Description => 'Mode Zoom optique',
PrintConv => {
'Extended' => 'Optique EX',
'Standard' => 'Normal',
},
},
'OpticalZoomOn' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Opto-ElectricConvFactor' => 'Facteur de conversion optoélectrique',
'Orientation' => {
Description => 'Orientation de l\'image',
PrintConv => {
'Horizontal (normal)' => '0° (haut/gauche)',
'Mirror horizontal' => '0° (haut/droit)',
'Mirror horizontal and rotate 270 CW' => '90° sens horaire (gauche/haut)',
'Mirror horizontal and rotate 90 CW' => '90° sens antihoraire (droit/bas)',
'Mirror vertical' => '180° (bas/gauche)',
'Rotate 180' => '180° (bas/droit)',
'Rotate 270 CW' => '90° sens horaire (gauche/bas)',
'Rotate 90 CW' => '90° sens antihoraire (droit/haut)',
},
},
'OriginalRawFileData' => 'Données du fichier raw d\'origine',
'OriginalRawFileDigest' => 'Digest du fichier raw original',
'OriginalRawFileName' => 'Nom du fichier raw d\'origine',
'OriginalTransmissionReference' => 'Identificateur de tâche',
'OriginatingProgram' => 'Programme d\'origine',
'OtherImage' => 'Autre image',
'OutputResponse' => 'Réponse de sortie',
'Owner' => 'Propriétaire',
'OwnerID' => 'ID du propriétaire',
'OwnerName' => 'Nom du propriétaire',
'PDFVersion' => 'Version PDF',
'PEFVersion' => 'Version PEF',
'Padding' => 'Remplissage',
'PageName' => 'Nom de page',
'PageNumber' => 'Page numéro',
'PanasonicExifVersion' => 'Version Exif Panasonic',
'PanasonicRawVersion' => 'Version Panasonic RAW',
'PanasonicTitle' => 'Titre',
'PentaxImageSize' => {
Description => 'Taille d\'image Pentax',
PrintConv => {
'2304x1728 or 2592x1944' => '2304 x 1728 ou 2592 x 1944',
'2560x1920 or 2304x1728' => '2560 x 1920 ou 2304 x 1728',
'2816x2212 or 2816x2112' => '2816 x 2212 ou 2816 x 2112',
'3008x2008 or 3040x2024' => '3008 x 2008 ou 3040 x 2024',
'Full' => 'Pleine',
},
},
'PentaxModelID' => 'Modèle Pentax',
'PentaxVersion' => 'Version Pentax',
'PeripheralLighting' => {
Description => 'Correction éclairage périphérique',
PrintConv => {
'Off' => 'Désactiver',
'On' => 'Activer',
},
},
'PersonInImage' => 'Personnage sur l\'Image',
'PhaseDetectAF' => 'Auto-Focus',
'PhotoEffect' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'PhotoEffects' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'PhotoEffectsType' => {
PrintConv => {
'None' => 'Aucune',
},
},
'PhotometricInterpretation' => {
Description => 'Schéma de pixel',
PrintConv => {
'BlackIsZero' => 'Zéro pour noir',
'Color Filter Array' => 'CFA (Matrice de filtre de couleur)',
'Pixar LogL' => 'CIE Log2(L) (Log luminance)',
'Pixar LogLuv' => 'CIE Log2(L)(u\',v\') (Log luminance et chrominance)',
'RGB' => 'RVB',
'RGB Palette' => 'Palette RVB',
'Transparency Mask' => 'Masque de transparence',
'WhiteIsZero' => 'Zéro pour blanc',
},
},
'PhotoshopAnnotations' => 'Annotations Photoshop',
'PictureControl' => {
Description => 'Optimisation d\'image',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'PictureControlActive' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'PictureControlAdjust' => {
Description => 'Ajustement de l\'optimisation d\'image',
PrintConv => {
'Default Settings' => 'Paramètres par défault',
'Full Control' => 'Réglages manuels',
'Quick Adjust' => 'Réglages rapides',
},
},
'PictureControlBase' => 'Optimisation d\'image de base',
'PictureControlName' => 'Nom de l\'optimisation d\'image',
'PictureControlQuickAdjust' => 'Optimisation d\'image - Réglages rapides',
'PictureControlVersion' => 'Version de l\'Optimisation d\'image',
'PictureFinish' => {
PrintConv => {
'Natural' => 'Naturel',
'Night Scene' => 'Nocturne',
},
},
'PictureMode' => {
Description => 'Mode d\'image',
PrintConv => {
'1/2 EV steps' => 'Pas de 1/2 IL',
'1/3 EV steps' => 'Pas de 1/3 IL',
'Aperture Priority' => 'Priorité ouverture',
'Aperture Priority, Off-Auto-Aperture' => 'Priorité ouverture (auto-diaph hors service)',
'Aperture-priority AE' => 'Priorité ouverture',
'Auto PICT (Landscape)' => 'Auto PICT (paysage)',
'Auto PICT (Macro)' => 'Auto PICT (macro)',
'Auto PICT (Portrait)' => 'Auto PICT (portrait)',
'Auto PICT (Sport)' => 'Auto PICT (sport)',
'Auto PICT (Standard)' => 'Auto PICT (standard)',
'Autumn' => 'Automne',
'Blur Reduction' => 'Réduction du flou',
'Bulb' => 'Pose B',
'Bulb, Off-Auto-Aperture' => 'Pose B (auto-diaph hors service)',
'Candlelight' => 'Bougie',
'DOF Program' => 'Programme PdC',
'DOF Program (HyP)' => 'Programme PdC (Hyper-programme)',
'Dark Pet' => 'Animal foncé',
'Digital Filter' => 'Filtre numérique',
'Fireworks' => 'Feux d\'artifice',
'Flash X-Sync Speed AE' => 'Synchro X flash vitesse AE',
'Food' => 'Nourriture',
'Frame Composite' => 'Vue composite',
'Green Mode' => 'Mode vert',
'Half-length Portrait' => 'Portrait (buste)',
'Hi-speed Program' => 'Programme grande vitesse',
'Hi-speed Program (HyP)' => 'Programme grande vitesse (Hyper-programme)',
'Kids' => 'Enfants',
'Landscape' => 'Paysage',
'Light Pet' => 'Animal clair',
'MTF Program' => 'Programme FTM',
'MTF Program (HyP)' => 'Programme FTM (Hyper-programme)',
'Manual' => 'Manuelle',
'Manual, Off-Auto-Aperture' => 'Manuel (auto-diaph hors service)',
'Medium Pet' => 'Animal demi-teintes',
'Museum' => 'Musée',
'Natural Skin Tone' => 'Ton chair naturel',
'Night Scene' => 'Nocturne',
'Night Scene Portrait' => 'Portrait nocturne',
'No Flash' => 'Sans flash',
'Pet' => 'Animaux de compagnie',
'Program' => 'Programme',
'Program (HyP)' => 'Programme AE (Hyper-programme)',
'Program AE' => 'Priorité vitesse',
'Program Av Shift' => 'Décalage programme Av',
'Program Tv Shift' => 'Décalage programme Tv',
'Self Portrait' => 'Autoportrait',
'Sensitivity Priority AE' => 'Priorité sensibilité AE',
'Shutter & Aperture Priority AE' => 'Priorité vitesse et ouverture AE',
'Shutter Speed Priority' => 'Priorité vitesse',
'Shutter speed priority AE' => 'Priorité vitesse',
'Snow' => 'Neige',
'Soft' => 'Doux',
'Sunset' => 'Coucher de soleil',
'Surf & Snow' => 'Surf et neige',
'Synchro Sound Record' => 'Enregistrement de son synchro',
'Text' => 'Texte',
'Underwater' => 'Sous-marine',
},
},
'PictureMode2' => {
Description => 'Mode d\'image 2',
PrintConv => {
'Aperture Priority' => 'Priorité ouverture',
'Aperture Priority, Off-Auto-Aperture' => 'Priorité ouverture (auto-diaph hors service)',
'Auto PICT' => 'Image auto',
'Bulb' => 'Pose B',
'Bulb, Off-Auto-Aperture' => 'Pose B (auto-diaph hors service)',
'Flash X-Sync Speed AE' => 'Expo auto, vitesse de synchro flash X',
'Green Mode' => 'Mode vert',
'Manual' => 'Manuelle',
'Manual, Off-Auto-Aperture' => 'Manuel (auto-diaph hors service)',
'Program AE' => 'Programme AE',
'Program Av Shift' => 'Décalage programme Av',
'Program Tv Shift' => 'Décalage programme Tv',
'Scene Mode' => 'Mode scène',
'Sensitivity Priority AE' => 'Expo auto, priorité sensibilité',
'Shutter & Aperture Priority AE' => 'Expo auto, priorité vitesse et ouverture',
'Shutter Speed Priority' => 'Priorité vitesse',
},
},
'PictureModeBWFilter' => {
PrintConv => {
'Green' => 'Vert',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
'n/a' => 'Non établie',
},
},
'PictureModeTone' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'n/a' => 'Non établie',
},
},
'PictureStyle' => {
Description => 'Style d\'image',
PrintConv => {
'Faithful' => 'Fidèle',
'High Saturation' => 'Saturation élevée',
'Landscape' => 'Paysage',
'Low Saturation' => 'Faible saturation',
'Neutral' => 'Neutre',
'None' => 'Aucune',
},
},
'PixelIntensityRange' => 'Intervalle d\'intensité de pixel',
'PixelScale' => 'Tag d\'échelle de pixel modèle',
'PixelUnits' => {
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'PlanarConfiguration' => {
Description => 'Arrangement des données image',
PrintConv => {
'Chunky' => 'Format « chunky » (entrelacé)',
'Planar' => 'Format « planar »',
},
},
'PostalCode' => 'Code Postal',
'PowerSource' => {
Description => 'Source d\'alimentation',
PrintConv => {
'Body Battery' => 'Accu boîtier',
'External Power Supply' => 'Alimentation externe',
'Grip Battery' => 'Accu poignée',
},
},
'Predictor' => {
Description => 'Prédicteur',
PrintConv => {
'Horizontal differencing' => 'Différentiation horizontale',
'None' => 'Aucun schéma de prédicteur utilisé avant l\'encodage',
},
},
'Preview0' => 'Aperçu 0',
'Preview1' => 'Aperçu 1',
'Preview2' => 'Aperçu 2',
'PreviewApplicationName' => 'Nom de l\'application d\'aperçu',
'PreviewApplicationVersion' => 'Version de l\'application d\'aperçu',
'PreviewColorSpace' => {
Description => 'Espace de couleur de l\'aperçu',
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'PreviewDateTime' => 'Horodatage d\'aperçu',
'PreviewImage' => 'Aperçu',
'PreviewImageBorders' => 'Limites d\'image miniature',
'PreviewImageData' => 'Données d\'image miniature',
'PreviewImageLength' => 'Longueur d\'image miniature',
'PreviewImageSize' => 'Taille d\'image miniature',
'PreviewImageStart' => 'Début d\'image miniature',
'PreviewImageValid' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'PreviewQuality' => {
PrintConv => {
'Normal' => 'Normale',
},
},
'PreviewSettingsDigest' => 'Digest des réglages d\'aperçu',
'PreviewSettingsName' => 'Nom des réglages d\'aperçu',
'PrimaryAFPoint' => {
PrintConv => {
'Bottom' => 'Bas',
'C6 (Center)' => 'C6 (Centre)',
'Center' => 'Centre',
'Mid-left' => 'Milieu gauche',
'Mid-right' => 'Milieu droit',
'Top' => 'Haut',
},
},
'PrimaryChromaticities' => 'Chromaticité des couleurs primaires',
'PrimaryPlatform' => 'Plateforme primaire',
'ProcessingSoftware' => 'Logiciel de traitement',
'Producer' => 'Producteur',
'ProductID' => 'ID de produit',
'ProductionCode' => 'L\'appareil est passé en SAV',
'ProfileCMMType' => 'Type de profil CMM',
'ProfileCalibrationSig' => 'Signature de calibration de profil',
'ProfileClass' => {
Description => 'Classe de profil',
PrintConv => {
'Abstract Profile' => 'Profil de résumé',
'ColorSpace Conversion Profile' => 'Profil de conversion d\'espace de couleur',
'DeviceLink Profile' => 'Profil de liaison',
'Display Device Profile' => 'Profil d\'appareil d\'affichage',
'Input Device Profile' => 'Profil d\'appareil d\'entrée',
'NamedColor Profile' => 'Profil de couleur nommée',
'Nikon Input Device Profile (NON-STANDARD!)' => 'Profil Nikon ("nkpf")',
'Output Device Profile' => 'Profil d\'appareil de sortie',
},
},
'ProfileConnectionSpace' => 'Espace de connexion de profil',
'ProfileCopyright' => 'Copyright du profil',
'ProfileCreator' => 'Créateur du profil',
'ProfileDateTime' => 'Horodatage du profil',
'ProfileDescription' => 'Description du profil',
'ProfileDescriptionML' => 'Description de profil ML',
'ProfileEmbedPolicy' => {
Description => 'Règles d\'usage du profil incluses',
PrintConv => {
'Allow Copying' => 'Permet la copie',
'Embed if Used' => 'Inclus si utilisé',
'Never Embed' => 'Jamais inclus',
'No Restrictions' => 'Pas de restriction',
},
},
'ProfileFileSignature' => 'Signature de fichier de profil',
'ProfileHueSatMapData1' => 'Données de profil teinte sat. 1',
'ProfileHueSatMapData2' => 'Données de profil teinte sat. 2',
'ProfileHueSatMapDims' => 'Divisions de teinte',
'ProfileID' => 'ID du profil',
'ProfileLookTableData' => 'Données de table de correspondance de profil',
'ProfileLookTableDims' => 'Divisions de teinte',
'ProfileName' => 'Nom du profil',
'ProfileSequenceDesc' => 'Description de séquence du profil',
'ProfileToneCurve' => 'Courbe de ton du profil',
'ProfileVersion' => 'Version de profil',
'ProgramISO' => 'Programme ISO',
'ProgramLine' => {
Description => 'Ligne de programme',
PrintConv => {
'Depth' => 'Priorité profondeur de champ',
'Hi Speed' => 'Priorité grande vitesse',
'MTF' => 'Priorité FTM',
'Normal' => 'Normale',
},
},
'ProgramMode' => {
PrintConv => {
'None' => 'Aucune',
'Sunset' => 'Coucher de soleil',
'Text' => 'Texte',
},
},
'ProgramShift' => 'Décalage Programme',
'ProgramVersion' => 'Version du programme',
'Protect' => 'Protéger',
'Province-State' => 'État / Région',
'Publisher' => 'Editeur',
'Quality' => {
Description => 'Qualité',
PrintConv => {
'Best' => 'La meilleure',
'Better' => 'Meilleure',
'Compressed RAW' => 'cRAW',
'Compressed RAW + JPEG' => 'cRAW+JPEG',
'Extra Fine' => 'Extra fine',
'Good' => 'Bonne',
'Low' => 'Bas',
'Normal' => 'Normale',
'RAW + JPEG' => 'RAW+JPEG',
},
},
'QualityMode' => {
Description => 'Qualité',
PrintConv => {
'Fine' => 'Haute',
'Normal' => 'Normale',
},
},
'QuantizationMethod' => {
Description => 'Méthode de quantification',
PrintConv => {
'Color Space Specific' => 'Spécifique à l\'espace de couleur',
'Compression Method Specific' => 'Spécifique à la méthode de compression',
'Gamma Compensated' => 'Compensée gamma',
'IPTC Ref B' => 'IPTC réf "B"',
'Linear Density' => 'Densité linéaire',
'Linear Dot Percent' => 'Pourcentage de point linéaire',
'Linear Reflectance/Transmittance' => 'Réflectance/transmittance linéaire',
},
},
'QuickAdjust' => 'Réglages rapides',
'QuickControlDialInMeter' => {
Description => 'Molette de contrôle rapide en mesure',
PrintConv => {
'AF point selection' => 'Sélection collimateur AF',
'Exposure comp/Aperture' => 'Correction exposition/ouverture',
'ISO speed' => 'Sensibilité ISO',
},
},
'QuickShot' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'RAFVersion' => 'Version RAF',
'RasterPadding' => 'Remplissage raster',
'RasterizedCaption' => 'Légende rastérisée',
'Rating' => 'Évaluation',
'RatingPercent' => 'Rapport en pourcentage',
'RawAndJpgRecording' => {
Description => 'Enregistrement RAW et JPEG',
PrintConv => {
'JPEG (Best)' => 'JPEG (le meilleur)',
'JPEG (Better)' => 'JPEG (meilleur)',
'JPEG (Good)' => 'JPEG (bon)',
'RAW (DNG, Best)' => 'RAW (DNG, le meilleur)',
'RAW (DNG, Better)' => 'RAW (DNG, meilleur)',
'RAW (DNG, Good)' => 'RAW (DNG, bon)',
'RAW (PEF, Best)' => 'RAW (PEF, le meilleur)',
'RAW (PEF, Better)' => 'RAW (PEF, meilleur)',
'RAW (PEF, Good)' => 'RAW (PEF, bon)',
'RAW+JPEG (DNG, Best)' => 'RAW+JPEG (DNG, le meilleur)',
'RAW+JPEG (DNG, Better)' => 'RAW+JPEG (DNG, meilleur)',
'RAW+JPEG (DNG, Good)' => 'RAW+JPEG (DNG, bon)',
'RAW+JPEG (PEF, Best)' => 'RAW+JPEG (PEF, le meilleur)',
'RAW+JPEG (PEF, Better)' => 'RAW+JPEG (PEF, meilleur)',
'RAW+JPEG (PEF, Good)' => 'RAW+JPEG (PEF, bon)',
'RAW+Large/Fine' => 'RAW+grande/fine',
'RAW+Large/Normal' => 'RAW+grande/normale',
'RAW+Medium/Fine' => 'RAW+moyenne/fine',
'RAW+Medium/Normal' => 'RAW+moyenne/normale',
'RAW+Small/Fine' => 'RAW+petite/fine',
'RAW+Small/Normal' => 'RAW+petite/normale',
},
},
'RawDataOffset' => 'Décalage données Raw',
'RawDataUniqueID' => 'ID unique de données brutes',
'RawDevAutoGradation' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'RawDevPMPictureTone' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
},
},
'RawDevPM_BWFilter' => {
PrintConv => {
'Green' => 'Vert',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'RawDevPictureMode' => {
PrintConv => {
'Natural' => 'Naturel',
},
},
'RawDevWhiteBalance' => {
PrintConv => {
'Color Temperature' => 'Température de couleur',
},
},
'RawImageCenter' => 'Centre Image RAW',
'RawImageDigest' => 'Digest d\'image brute',
'RawImageHeight' => 'Hauteur de l\'image brute',
'RawImageSize' => 'Taille d\'image RAW',
'RawImageWidth' => 'Largeur de l\'image brute',
'RawJpgQuality' => {
PrintConv => {
'Normal' => 'Normale',
},
},
'RecordMode' => {
Description => 'Mode d\'enregistrement',
PrintConv => {
'Aperture Priority' => 'Priorité ouverture',
'Manual' => 'Manuelle',
'Shutter Priority' => 'Priorité vitesse',
},
},
'RecordingMode' => {
PrintConv => {
'Landscape' => 'Paysage',
'Manual' => 'Manuelle',
'Night Scene' => 'Nocturne',
},
},
'RedBalance' => 'Balance rouge',
'RedEyeCorrection' => {
PrintConv => {
'Automatic' => 'Auto',
'Off' => 'Désactivé',
},
},
'RedEyeReduction' => {
Description => 'Réduction yeux rouges',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'RedMatrixColumn' => 'Colonne de matrice rouge',
'RedTRC' => 'Courbe de reproduction des tons rouges',
'ReductionMatrix1' => 'Matrice de réduction 1',
'ReductionMatrix2' => 'Matrice de réduction 2',
'ReferenceBlackWhite' => 'Paire de valeurs de référence noir et blanc',
'ReferenceDate' => 'Date de référence',
'ReferenceNumber' => 'Numéro de référence',
'ReferenceService' => 'Service de référence',
'RelatedImageFileFormat' => 'Format de fichier image apparenté',
'RelatedImageHeight' => 'Hauteur d\'image apparentée',
'RelatedImageWidth' => 'Largeur d\'image apparentée',
'RelatedSoundFile' => 'Fichier audio apparenté',
'ReleaseButtonToUseDial' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'ReleaseDate' => 'Date de version',
'ReleaseTime' => 'Heure de version',
'RenderingIntent' => {
Description => 'Intention de rendu',
PrintConv => {
'ICC-Absolute Colorimetric' => 'Colorimétrique absolu',
'Media-Relative Colorimetric' => 'Colorimétrique relatif',
'Perceptual' => 'Perceptif',
},
},
'ResampleParamsQuality' => {
PrintConv => {
'Low' => 'Bas',
},
},
'Resaved' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'Resolution' => 'Résolution d\'image',
'ResolutionUnit' => {
Description => 'Unité de résolution en X et Y',
PrintConv => {
'None' => 'Aucune',
'cm' => 'Pixels/cm',
'inches' => 'Pouce',
},
},
'RetouchHistory' => {
Description => 'Historique retouche',
PrintConv => {
'None' => 'Aucune',
},
},
'RevisionNumber' => 'Numéro de révision',
'Rights' => 'Droits',
'Rotation' => {
PrintConv => {
'Rotate 270 CW' => 'Rotation à 270 ° - sens antihoraire',
'Rotate 90 CW' => 'Rotation 90 ° - sens horaire',
},
},
'RowInterleaveFactor' => 'Facteur d\'entrelacement des lignes',
'RowsPerStrip' => 'Nombre de rangées par bande',
'SMaxSampleValue' => 'Valeur maxi d\'échantillon S',
'SMinSampleValue' => 'Valeur mini d\'échantillon S',
'SPIFFVersion' => 'Version SPIFF',
'SRAWQuality' => {
PrintConv => {
'n/a' => 'Non établie',
},
},
'SRActive' => {
Description => 'Réduction de bougé active',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'SRFocalLength' => 'Focale de réduction de bougé',
'SRHalfPressTime' => 'Temps entre mesure et déclenchement',
'SRResult' => {
Description => 'Stabilisation',
PrintConv => {
'Not stabilized' => 'Non stabilisé',
},
},
'SVGVersion' => 'Version SVG',
'SafetyShift' => {
Description => 'Décalage de sécurité',
PrintConv => {
'Disable' => 'Désactivé',
'Enable (ISO speed)' => 'Activé (sensibilité ISO)',
'Enable (Tv/Av)' => 'Activé (Tv/Av)',
},
},
'SafetyShiftInAvOrTv' => {
Description => 'Décalage de sécurité Av ou Tv',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'SampleFormat' => {
Description => 'Format d\'échantillon',
PrintConv => {
'Complex int' => 'Entier complexe',
'Float' => 'Réel à virgule flottante',
'Signed' => 'Entier signé',
'Undefined' => 'Non défini',
'Unsigned' => 'Entier non signé',
},
},
'SampleStructure' => {
Description => 'Structure d\'échantillonnage',
PrintConv => {
'CompressionDependent' => 'Définie dans le processus de compression',
'Orthogonal4-2-2Sampling' => 'Orthogonale, avec les fréquences d\'échantillonnage dans le rapport 4:2:2:(4)',
'OrthogonalConstangSampling' => 'Orthogonale, avec les mêmes fréquences d\'échantillonnage relatives sur chaque composante',
},
},
'SamplesPerPixel' => 'Nombre de composantes',
'Saturation' => {
PrintConv => {
'+1 (medium high)' => '+1 (Assez fort)',
'+2 (high)' => '+2 (Forte)',
'+3 (very high)' => '+3 (Très fort)',
'+4 (highest)' => '+4',
'+4 (maximum)' => '+4',
'-1 (medium low)' => '-1 (Assez faible)',
'-2 (low)' => '-2 (Faible)',
'-3 (very low)' => '-3 (Très faible)',
'-4 (lowest)' => '-4',
'-4 (minimum)' => '-4',
'0 (normal)' => '0 (Normale)',
'High' => 'Forte',
'Low' => 'Faible',
'None' => 'Non établie',
'Normal' => 'Normale',
},
},
'ScanImageEnhancer' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ScanningDirection' => {
Description => 'Direction de scannage',
PrintConv => {
'Bottom-Top, L-R' => 'De bas en haut, de gauche à droite',
'Bottom-Top, R-L' => 'De bas en haut, de droite à gauche',
'L-R, Bottom-Top' => 'De gauche à droite, de bas en haut',
'L-R, Top-Bottom' => 'De gauche à droite, de haut en bas',
'R-L, Bottom-Top' => 'De droite à gauche, de bas en haut',
'R-L, Top-Bottom' => 'De droite à gauche, de haut en bas',
'Top-Bottom, L-R' => 'De haut en bas, de gauche à droite',
'Top-Bottom, R-L' => 'De haut en bas, de droite à gauche',
},
},
'Scene' => 'Scène',
'SceneAssist' => 'Assistant Scene',
'SceneCaptureType' => {
Description => 'Type de capture de scène',
PrintConv => {
'Landscape' => 'Paysage',
'Night' => 'Scène de nuit',
},
},
'SceneMode' => {
Description => 'Modes scène',
PrintConv => {
'3D Sweep Panorama' => '3D',
'Anti Motion Blur' => 'Anti-flou de mvt',
'Aperture Priority' => 'Priorité ouverture',
'Auto' => 'Auto.',
'Candlelight' => 'Bougie',
'Cont. Priority AE' => 'AE priorité continue',
'Handheld Night Shot' => 'Vue de nuit manuelle',
'Landscape' => 'Paysage',
'Manual' => 'Manuelle',
'Night Portrait' => 'Portrait nocturne',
'Night Scene' => 'Nocturne',
'Night View/Portrait' => 'Vision/portrait nocturne',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'Shutter Priority' => 'Priorité vitesse',
'Snow' => 'Neige',
'Sunset' => 'Coucher de soleil',
'Super Macro' => 'Super macro',
'Sweep Panorama' => 'Panora. par balayage',
'Text' => 'Texte',
},
},
'SceneModeUsed' => {
PrintConv => {
'Aperture Priority' => 'Priorité ouverture',
'Candlelight' => 'Bougie',
'Landscape' => 'Paysage',
'Manual' => 'Manuelle',
'Shutter Priority' => 'Priorité vitesse',
'Snow' => 'Neige',
'Sunset' => 'Coucher de soleil',
'Text' => 'Texte',
},
},
'SceneSelect' => {
PrintConv => {
'Night' => 'Scène de nuit',
'Off' => 'Désactivé',
},
},
'SceneType' => {
Description => 'Type de scène',
PrintConv => {
'Directly photographed' => 'Image photographiée directement',
},
},
'SecurityClassification' => {
Description => 'Classement de sécurité',
PrintConv => {
'Confidential' => 'Confidentiel',
'Restricted' => 'Restreint',
'Top Secret' => 'Top secret',
'Unclassified' => 'Non classé',
},
},
'SelectableAFPoint' => {
Description => 'Collimateurs AF sélectionnables',
PrintConv => {
'11 points' => '11 collimateurs',
'19 points' => '19 collimateurs',
'45 points' => '45 collimateurs',
'Inner 9 points' => '9 collimateurs centraux',
'Outer 9 points' => '9 collimateurs périphériques',
},
},
'SelfTimer' => {
Description => 'Retardateur',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'SelfTimer2' => 'Retardateur (2)',
'SelfTimerMode' => 'Mode auto-timer',
'SensingMethod' => {
Description => 'Méthode de capture',
PrintConv => {
'Color sequential area' => 'Capteur couleur séquentiel',
'Color sequential linear' => 'Capteur couleur séquentiel linéaire',
'Monochrome area' => 'Capteur monochrome',
'Monochrome linear' => 'Capteur linéaire monochrome',
'Not defined' => 'Non définie',
'One-chip color area' => 'Capteur monochip couleur',
'Three-chip color area' => 'Capteur trois chips couleur',
'Trilinear' => 'Capteur trilinéaire',
'Two-chip color area' => 'Capteur deux chips couleur',
},
},
'SensitivityAdjust' => 'Réglage de sensibilité',
'SensitivitySteps' => {
Description => 'Pas de sensibilité',
PrintConv => {
'1 EV Steps' => 'Pas de 1 IL',
'As EV Steps' => 'Comme pas IL',
},
},
'SensitivityType' => 'Type de sensibilité',
'SensorCleaning' => {
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'SensorHeight' => 'Hauteur du capteur',
'SensorPixelSize' => 'Taille des pixels du capteur',
'SensorWidth' => 'Largeur du capteur',
'SequenceNumber' => 'Numéro de Séquence',
'SequentialShot' => {
PrintConv => {
'None' => 'Aucune',
},
},
'SerialNumber' => 'Numéro de série',
'ServiceIdentifier' => 'Identificateur de service',
'SetButtonCrossKeysFunc' => {
Description => 'Réglage touche SET/joypad',
PrintConv => {
'Cross keys: AF point select' => 'Joypad:Sélec. collim. AF',
'Normal' => 'Normale',
'Set: Flash Exposure Comp' => 'SET:Cor expo flash',
'Set: Parameter' => 'SET:Changer de paramètres',
'Set: Picture Style' => 'SET:Style d’image',
'Set: Playback' => 'SET:Lecture',
'Set: Quality' => 'SET:Qualité',
},
},
'SetButtonWhenShooting' => {
Description => 'Touche SET au déclenchement',
PrintConv => {
'Change parameters' => 'Changer de paramètres',
'Default (no function)' => 'Normal (désactivée)',
'Disabled' => 'Désactivée',
'Flash exposure compensation' => 'Correction expo flash',
'ISO speed' => 'Sensibilité ISO',
'Image playback' => 'Lecture de l\'image',
'Image quality' => 'Changer de qualité',
'Image size' => 'Taille d\'image',
'LCD monitor On/Off' => 'Écran LCD On/Off',
'Menu display' => 'Affichage du menu',
'Normal (disabled)' => 'Normal (désactivée)',
'Picture style' => 'Style d\'image',
'Quick control screen' => 'Écran de contrôle rapide',
'Record func. + media/folder' => 'Fonction enregistrement + média/dossier',
'Record movie (Live View)' => 'Enr. vidéo (visée écran)',
'White balance' => 'Balance des blancs',
},
},
'SetFunctionWhenShooting' => {
Description => 'Touche SET au déclenchement',
PrintConv => {
'Change Parameters' => 'Changer de paramètres',
'Change Picture Style' => 'Style d\'image',
'Change quality' => 'Changer de qualité',
'Default (no function)' => 'Normal (désactivée)',
'Image replay' => 'Lecture de l\'image',
'Menu display' => 'Affichage du menu',
},
},
'ShadingCompensation' => {
Description => 'Compensation de l\'ombrage',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ShadingCompensation2' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ShadowScale' => 'Echelle d\'ombre',
'ShakeReduction' => {
Description => 'Réduction du bougé (réglage)',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ShakeReductionInfo' => 'Stabilisation',
'Sharpness' => {
Description => 'Accentuation',
PrintConv => {
'+1 (medium hard)' => '+1 (Assez dure)',
'+2 (hard)' => '+2 (Dure)',
'+3 (very hard)' => '+3 (Très dure)',
'+4 (hardest)' => '+4',
'+4 (maximum)' => '+4',
'-1 (medium soft)' => '-1 (Assez dure)',
'-2 (soft)' => '-2 (Douce)',
'-3 (very soft)' => '-3 (Très douce)',
'-4 (minimum)' => '-4',
'-4 (softest)' => '-4',
'0 (normal)' => '0 (Normale)',
'Hard' => 'Dure',
'Normal' => 'Normale',
'Sharp' => 'Dure',
'Soft' => 'Douce',
'n/a' => 'Non établie',
},
},
'SharpnessFrequency' => {
PrintConv => {
'High' => 'Haut',
'Highest' => 'Plus haut',
'Low' => 'Doux',
'n/a' => 'Non établie',
},
},
'ShootingMode' => {
Description => 'Télécommande IR',
PrintConv => {
'Aerial Photo' => 'Photo aérienne',
'Aperture Priority' => 'Priorité ouverture',
'Baby' => 'Bébé',
'Beach' => 'Plage',
'Candlelight' => 'Eclairage Bougie',
'Color Effects' => 'Effets de couleurs',
'Fireworks' => 'Feu d\'artifice',
'Food' => 'Nourriture',
'High Sensitivity' => 'Haute sensibilité',
'High Speed Continuous Shooting' => 'Déclenchement continu à grande vitesse',
'Intelligent Auto' => 'Mode Auto intelligent',
'Intelligent ISO' => 'ISO Intelligent',
'Manual' => 'Manuel',
'Movie Preview' => 'Prévisualisation vidéo',
'Night Portrait' => 'Portrait de nuit',
'Normal' => 'Normale',
'Panning' => 'Panoramique',
'Panorama Assist' => 'Assistant Panorama',
'Party' => 'Fête',
'Pet' => 'Animal domestique',
'Program' => 'Programme',
'Scenery' => 'Paysage',
'Shutter Priority' => 'Priorité vitesse',
'Snow' => 'Neige',
'Soft Skin' => 'Peau douce',
'Starry Night' => 'Nuit étoilée',
'Sunset' => 'Coucher de soleil',
'Underwater' => 'Subaquatique',
},
},
'ShortDocumentID' => 'ID court de document',
'ShortReleaseTimeLag' => {
Description => 'Inertie au déclenchement réduite',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'ShotInfoVersion' => 'Version des Infos prise de vue',
'Shutter-AELock' => {
Description => 'Déclencheur/Touche verr. AE',
PrintConv => {
'AE lock/AF' => 'Verrouillage AE/autofocus',
'AE/AF, No AE lock' => 'AE/AF, pas de verrou. AE',
'AF/AE lock' => 'Autofocus/verrouillage AE',
'AF/AF lock' => 'Autofocus/verrouillage AF',
'AF/AF lock, No AE lock' => 'AF/verr.AF, pas de verr.AE',
},
},
'ShutterAELButton' => {
Description => 'Déclencheur/Touche verr. AE',
PrintConv => {
'AE lock/AF' => 'Verrouillage AE/Autofocus',
'AE/AF, No AE lock' => 'AE/AF, pas de verrou. AE',
'AF/AE lock stop' => 'Autofocus/Verrouillage AE',
'AF/AF lock, No AE lock' => 'AF/verr.AF, pas de verr.AE',
},
},
'ShutterButtonAFOnButton' => {
Description => 'Déclencheur/Touche AF',
PrintConv => {
'AE lock/Metering + AF start' => 'Mémo expo/lct. mesure+AF',
'Metering + AF start' => 'Mesure + lancement AF',
'Metering + AF start/AF stop' => 'Mesure + lancement/arrêt AF',
'Metering + AF start/disable' => 'Lct. mesure+AF/désactivée',
'Metering start/Meter + AF start' => 'Lct. mesure/lct. mesure+AF',
},
},
'ShutterCount' => 'Comptage des déclenchements',
'ShutterCurtainSync' => {
Description => 'Synchronisation du rideau',
PrintConv => {
'1st-curtain sync' => 'Synchronisation premier rideau',
'2nd-curtain sync' => 'Synchronisation deuxième rideau',
},
},
'ShutterMode' => {
PrintConv => {
'Aperture Priority' => 'Priorité ouverture',
},
},
'ShutterReleaseButtonAE-L' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ShutterReleaseNoCFCard' => {
Description => 'Déclench. obtur. sans carte',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'ShutterSpeed' => 'Temps de pose',
'ShutterSpeedRange' => {
Description => 'Régler gamme de vitesses',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'ShutterSpeedValue' => 'Vitesse d\'obturation',
'SidecarForExtension' => 'Extension',
'SimilarityIndex' => 'Indice de similarité',
'SlaveFlashMeteringSegments' => 'Segments de mesure flash esclave',
'SlideShow' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'SlowShutter' => {
Description => 'Vitesse d\'obturation lente',
PrintConv => {
'Night Scene' => 'Nocturne',
'None' => 'Aucune',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'SlowSync' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Software' => 'Logiciel',
'SpatialFrequencyResponse' => 'Réponse spatiale en fréquence',
'SpecialEffectsOpticalFilter' => {
PrintConv => {
'None' => 'Aucune',
},
},
'SpectralSensitivity' => 'Sensibilité spectrale',
'SpotMeterLinkToAFPoint' => {
Description => 'Mesure spot liée au collimateur AF',
PrintConv => {
'Disable (use center AF point)' => 'Désactivée (utiliser collimateur AF central)',
'Enable (use active AF point)' => 'Activé (utiliser collimateur AF actif)',
},
},
'SpotMeteringMode' => {
PrintConv => {
'Center' => 'Centre',
},
},
'State' => 'État / Région',
'StreamType' => {
PrintConv => {
'Text' => 'Texte',
},
},
'StripByteCounts' => 'Octets par bande compressée',
'StripOffsets' => 'Emplacement des données image',
'Sub-location' => 'Lieu',
'SubSecCreateDate' => 'Date de la création des données numériques',
'SubSecDateTimeOriginal' => 'Date de la création des données originales',
'SubSecModifyDate' => 'Date de modification de fichier',
'SubSecTime' => 'Fractions de seconde de DateTime',
'SubSecTimeDigitized' => 'Fractions de seconde de DateTimeDigitized',
'SubSecTimeOriginal' => 'Fractions de seconde de DateTimeOriginal',
'SubTileBlockSize' => 'Taille de bloc de sous-tuile',
'SubfileType' => 'Type du nouveau sous-fichier',
'SubimageColor' => {
PrintConv => {
'RGB' => 'RVB',
},
},
'Subject' => 'Sujet',
'SubjectArea' => 'Zone du sujet',
'SubjectCode' => 'Code sujet',
'SubjectDistance' => 'Distance du sujet',
'SubjectDistanceRange' => {
Description => 'Intervalle de distance du sujet',
PrintConv => {
'Close' => 'Vue rapprochée',
'Distant' => 'Vue distante',
'Unknown' => 'Inconnu',
},
},
'SubjectLocation' => 'Zone du sujet',
'SubjectProgram' => {
PrintConv => {
'None' => 'Aucune',
'Sunset' => 'Coucher de soleil',
'Text' => 'Texte',
},
},
'SubjectReference' => 'Code de sujet',
'Subsystem' => {
PrintConv => {
'Unknown' => 'Inconnu',
},
},
'SuperMacro' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'SuperimposedDisplay' => {
Description => 'Affichage superposé',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'SupplementalCategories' => 'Catégorie d\'appoint',
'SupplementalType' => {
Description => 'Type de supplément',
PrintConv => {
'Main Image' => 'Non établi',
'Rasterized Caption' => 'Titre rastérisé',
'Reduced Resolution Image' => 'Image de résolution réduite',
},
},
'SvISOSetting' => 'Réglage ISO Sv',
'SwitchToRegisteredAFPoint' => {
Description => 'Activer collimateur enregistré',
PrintConv => {
'Assist' => 'Touche d\'assistance',
'Assist + AF' => 'Touche d\'assistance + touche AF',
'Disable' => 'Désactivé',
'Enable' => 'Activé',
'Only while pressing assist' => 'Seulement en appuyant touche d\'assistance',
},
},
'T4Options' => 'Bits de remplissage ajoutés',
'T6Options' => 'Options T6',
'TTL_DA_ADown' => 'Segment de mesure flash esclave 6',
'TTL_DA_AUp' => 'Segment de mesure flash esclave 5',
'TTL_DA_BDown' => 'Segment de mesure flash esclave 8',
'TTL_DA_BUp' => 'Segment de mesure flash esclave 7',
'Tagged' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'TargetPrinter' => 'Imprimante cible',
'Technology' => {
Description => 'Technologie',
PrintConv => {
'Active Matrix Display' => 'Afficheur à matrice active',
'Cathode Ray Tube Display' => 'Afficheur à tube cathodique',
'Digital Camera' => 'Appareil photo numérique',
'Dye Sublimation Printer' => 'Imprimante à sublimation thermique',
'Electrophotographic Printer' => 'Imprimante électrophotographique',
'Electrostatic Printer' => 'Imprimante électrostatique',
'Film Scanner' => 'Scanner de film',
'Flexography' => 'Flexographie',
'Ink Jet Printer' => 'Imprimante à jet d\'encre',
'Offset Lithography' => 'Lithographie offset',
'Passive Matrix Display' => 'Afficheur à matrice passive',
'Photo CD' => 'CD photo',
'Photo Image Setter' => 'Cadre photo',
'Photographic Paper Printer' => 'Imprimante à papier photo',
'Projection Television' => 'Téléviseur à projection',
'Reflective Scanner' => 'Scanner à réflexion',
'Silkscreen' => 'Ecran de soie',
'Thermal Wax Printer' => 'Imprimante thermique à cire',
'Video Camera' => 'Caméra vidéo',
'Video Monitor' => 'Moniteur vidéo',
},
},
'Teleconverter' => {
PrintConv => {
'None' => 'Aucune',
},
},
'Text' => 'Texte',
'TextStamp' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Thresholding' => 'Seuil',
'ThumbnailHeight' => 'Hauteur de la vignette',
'ThumbnailImage' => 'Vignette',
'ThumbnailImageSize' => 'Taille des miniatures',
'ThumbnailLength' => 'Longueur de la vignette',
'ThumbnailOffset' => 'Décalage de la vignette',
'ThumbnailWidth' => 'Hauteur de la vignette',
'TileByteCounts' => 'Nombre d\'octets d\'élément',
'TileDepth' => 'Profondeur d\'élément',
'TileLength' => 'Longueur d\'élément',
'TileOffsets' => 'Décalages d\'élément',
'TileWidth' => 'Largeur d\'élément',
'Time' => 'Heure',
'TimeCreated' => 'Heure de création',
'TimeScaleParamsQuality' => {
PrintConv => {
'Low' => 'Bas',
},
},
'TimeSent' => 'Heure d\'envoi',
'TimeSincePowerOn' => 'Temps écoulé depuis la mise en marche',
'TimeZone' => 'Fuseau horaire',
'TimeZoneOffset' => 'Offset de zone de date',
'TimerLength' => {
Description => 'Durée du retardateur',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'Title' => 'Titre',
'ToneComp' => 'Correction de tonalité',
'ToneCurve' => {
Description => 'Courbe de ton',
PrintConv => {
'Manual' => 'Manuelle',
},
},
'ToneCurveActive' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'ToneCurves' => 'Courbes de ton',
'ToningEffect' => {
Description => 'Virage',
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'None' => 'Aucune',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
'n/a' => 'Non établie',
},
},
'ToningEffectMonochrome' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'None' => 'Aucune',
},
},
'ToningSaturation' => 'Saturation du virage',
'TransferFunction' => 'Fonction de transfert',
'TransferRange' => 'Intervalle de transfert',
'Transformation' => {
PrintConv => {
'Horizontal (normal)' => '0° (haut/gauche)',
'Mirror horizontal' => '0° (haut/droit)',
'Mirror horizontal and rotate 270 CW' => '90° sens horaire (gauche/haut)',
'Mirror horizontal and rotate 90 CW' => '90° sens antihoraire (droit/bas)',
'Mirror vertical' => '180° (bas/gauche)',
'Rotate 180' => '180° (bas/droit)',
'Rotate 270 CW' => '90° sens horaire (gauche/bas)',
'Rotate 90 CW' => '90° sens antihoraire (droit/haut)',
},
},
'TransmissionReference' => 'Référence transmission',
'TransparencyIndicator' => 'Indicateur de transparence',
'TrapIndicator' => 'Indicateur de piège',
'Trapped' => {
Description => 'Piégé',
PrintConv => {
'False' => 'Faux',
'True' => 'Vrai',
'Unknown' => 'Inconnu',
},
},
'TravelDay' => 'Date du Voyage',
'TvExposureTimeSetting' => 'Réglage de temps de pose Tv',
'URL' => 'URL ',
'USMLensElectronicMF' => {
Description => 'MF électronique à objectif USM',
PrintConv => {
'Always turned off' => 'Toujours débrayé',
'Disable after one-shot AF' => 'Désactivée après One-Shot AF',
'Disable in AF mode' => 'Désactivée en mode AF',
'Enable after one-shot AF' => 'Activée après AF One-Shot',
'Turns off after one-shot AF' => 'Débrayé après One-Shot AF',
'Turns on after one-shot AF' => 'Activé après One-Shot AF',
},
},
'Uncompressed' => {
Description => 'Non.comprimé',
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'UniqueCameraModel' => 'Nom unique de modèle d\'appareil',
'UniqueDocumentID' => 'ID unique de document',
'UniqueObjectName' => 'Nom Unique d\'Objet',
'Unknown' => 'Inconnu',
'Unsharp1Color' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'RGB' => 'RVB',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'Unsharp2Color' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'RGB' => 'RVB',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'Unsharp3Color' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'RGB' => 'RVB',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'Unsharp4Color' => {
PrintConv => {
'Blue' => 'Bleu',
'Green' => 'Vert',
'RGB' => 'RVB',
'Red' => 'Rouge',
'Yellow' => 'Jaune',
},
},
'UnsharpMask' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'Urgency' => {
Description => 'Urgence',
PrintConv => {
'0 (reserved)' => '0 (réservé pour utilisation future)',
'1 (most urgent)' => '1 (très urgent)',
'5 (normal urgency)' => '5 (normalement urgent)',
'8 (least urgent)' => '8 (moins urgent)',
'9 (user-defined priority)' => '9 (réservé pour utilisation future)',
},
},
'UsableMeteringModes' => {
Description => 'Sélectionner modes de mesure',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'UsableShootingModes' => {
Description => 'Sélectionner modes de prise de vue',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activée',
},
},
'UsageTerms' => 'Conditions d\'Utilisation',
'UserComment' => 'Commentaire utilisateur',
'UserDef1PictureStyle' => {
PrintConv => {
'Landscape' => 'Paysage',
},
},
'UserDef2PictureStyle' => {
PrintConv => {
'Landscape' => 'Paysage',
},
},
'UserDef3PictureStyle' => {
PrintConv => {
'Landscape' => 'Paysage',
},
},
'VRDVersion' => 'Version VRD',
'VRInfo' => 'Information stabilisateur',
'VRInfoVersion' => 'Info Version VR',
'VR_0x66' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'VariProgram' => 'Variprogramme',
'VibrationReduction' => {
Description => 'Reduction des vibrations',
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
'n/a' => 'Non établie',
},
},
'VideoCardGamma' => 'Gamma de la carte vidéo',
'ViewInfoDuringExposure' => {
Description => 'Infos viseur pendant exposition',
PrintConv => {
'Disable' => 'Désactivé',
'Enable' => 'Activé',
},
},
'ViewfinderWarning' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'ViewingCondDesc' => 'Description des conditions de visionnage',
'ViewingCondIlluminant' => 'Illuminant des conditions de visionnage',
'ViewingCondIlluminantType' => 'Type d\'illuminant des conditions de visionnage',
'ViewingCondSurround' => 'Environnement des conditions de visionnage',
'VignetteControl' => {
Description => 'Controle du vignettage',
PrintConv => {
'High' => 'Haut',
'Low' => 'Bas',
'Normal' => 'Normale',
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'VoiceMemo' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'WBAdjLighting' => {
PrintConv => {
'Daylight (cloudy)' => 'Lumière du jour (2)',
'Daylight (direct sunlight)' => 'Lumière du jour (0)',
'Daylight (shade)' => 'Lumière du jour (1)',
'None' => 'Aucune',
},
},
'WBBlueLevel' => 'Niveau Bleu Balance des Blancs',
'WBBracketMode' => {
PrintConv => {
'Off' => 'Désactivé',
},
},
'WBFineTuneActive' => {
PrintConv => {
'No' => 'Non',
'Yes' => 'Oui',
},
},
'WBGreenLevel' => 'Niveau Vert Balance des Blancs',
'WBMediaImageSizeSetting' => {
Description => 'Réglage de balance des blancs + taille d\'image',
PrintConv => {
'LCD monitor' => 'Écran LCD',
'Rear LCD panel' => 'Panneau LCD arrière',
},
},
'WBRedLevel' => 'Niveau Rouge Balance des Blancs',
'WBShiftAB' => 'Décalage Balance Blancs ambre-bleu',
'WBShiftGM' => 'Décalage Balance Blancs vert-magenta',
'WB_GBRGLevels' => 'Niveaux BB VBRV',
'WB_GRBGLevels' => 'Niveaux BB VRBV',
'WB_GRGBLevels' => 'Niveaux BB VRVB',
'WB_RBGGLevels' => 'Niveaux BB RBVV',
'WB_RBLevels' => 'Niveaux BB RB',
'WB_RBLevels3000K' => 'Niveaux BB RB 3000K',
'WB_RBLevels3300K' => 'Niveaux BB RB 3300K',
'WB_RBLevels3600K' => 'Niveaux BB RB 3600K',
'WB_RBLevels3900K' => 'Niveaux BB RB 3800K',
'WB_RBLevels4000K' => 'Niveaux BB RB 4000K',
'WB_RBLevels4300K' => 'Niveaux BB RB 4300K',
'WB_RBLevels4500K' => 'Niveaux BB RB 4500K',
'WB_RBLevels4800K' => 'Niveaux BB RB 4800K',
'WB_RBLevels5300K' => 'Niveaux BB RB 5300K',
'WB_RBLevels6000K' => 'Niveaux BB RB 6000K',
'WB_RBLevels6600K' => 'Niveaux BB RB 6600K',
'WB_RBLevels7500K' => 'Niveaux BB RB 7500K',
'WB_RBLevelsCloudy' => 'Niveaux BB RB nuageux',
'WB_RBLevelsShade' => 'Niveaux BB RB ombre',
'WB_RBLevelsTungsten' => 'Niveaux BB RB tungstène',
'WB_RGBGLevels' => 'Niveaux BB RVBV',
'WB_RGBLevels' => 'Niveaux BB RVB',
'WB_RGBLevelsCloudy' => 'Niveaux BB RVB nuageux',
'WB_RGBLevelsDaylight' => 'Niveaux BB RVB lumière jour',
'WB_RGBLevelsFlash' => 'Niveaux BB RVB flash',
'WB_RGBLevelsFluorescent' => 'Niveaux BB RVB fluorescent',
'WB_RGBLevelsShade' => 'Niveaux BB RVB ombre',
'WB_RGBLevelsTungsten' => 'Niveaux BB RVB tungstène',
'WB_RGGBLevels' => 'Niveaux BB RVVB',
'WB_RGGBLevelsCloudy' => 'Niveaux BB RVVB nuageux',
'WB_RGGBLevelsDaylight' => 'Niveaux BB RVVB lumière jour',
'WB_RGGBLevelsFlash' => 'Niveaux BB RVVB flash',
'WB_RGGBLevelsFluorescent' => 'Niveaux BB RVVB fluorescent',
'WB_RGGBLevelsFluorescentD' => 'Niveaux BB RVVB fluorescent',
'WB_RGGBLevelsFluorescentN' => 'Niveaux BB RVVB fluo N',
'WB_RGGBLevelsFluorescentW' => 'Niveaux BB RVVB fluo W',
'WB_RGGBLevelsShade' => 'Niveaux BB RVVB ombre',
'WB_RGGBLevelsTungsten' => 'Niveaux BB RVVB tungstène',
'WCSProfiles' => 'Profil Windows Color System',
'Warning' => 'Attention',
'WebStatement' => 'Relevé Web',
'WhiteBalance' => {
Description => 'Balance des blancs',
PrintConv => {
'Auto' => 'Equilibrage des blancs automatique',
'Black & White' => 'Monochrome',
'Cloudy' => 'Temps nuageux',
'Color Temperature/Color Filter' => 'Temp. Couleur / Filtre couleur',
'Cool White Fluorescent' => 'Fluorescente type soft',
'Custom' => 'Personnalisée',
'Custom 1' => 'Personnalisée 1',
'Custom 2' => 'Personnalisée 2',
'Custom 3' => 'Personnalisée 3',
'Custom 4' => 'Personnalisée 4',
'Day White Fluorescent' => 'Fluorescente type blanc',
'Daylight' => 'Lumière du jour',
'Daylight Fluorescent' => 'Fluorescente type jour',
'Fluorescent' => 'Fluorescente',
'Manual' => 'Manuelle',
'Manual Temperature (Kelvin)' => 'Température de couleur (Kelvin)',
'Shade' => 'Ombre',
'Tungsten' => 'Tungstène (lumière incandescente)',
'Unknown' => 'Inconnu',
'User-Selected' => 'Sélectionnée par l\'utilisateur',
'Warm White Fluorescent' => 'Fluorescent blanc chaud',
'White Fluorescent' => 'Fluorescent blanc',
},
},
'WhiteBalanceAdj' => {
PrintConv => {
'Cloudy' => 'Temps nuageux',
'Daylight' => 'Lumière du jour',
'Fluorescent' => 'Fluorescente',
'Off' => 'Désactivé',
'On' => 'Activé',
'Shade' => 'Ombre',
'Tungsten' => 'Tungstène (lumière incandescente)',
},
},
'WhiteBalanceBias' => 'Décalage de Balance des blancs',
'WhiteBalanceFineTune' => 'Balance des blancs - Réglage fin',
'WhiteBalanceMode' => {
Description => 'Mode de balance des blancs',
PrintConv => {
'Auto (Cloudy)' => 'Auto (nuageux)',
'Auto (Day White Fluorescent)' => 'Auto (fluo jour)',
'Auto (Daylight Fluorescent)' => 'Auto (fluo lum. jour)',
'Auto (Daylight)' => 'Auto (lumière du jour)',
'Auto (Flash)' => 'Auto (flash)',
'Auto (Shade)' => 'Auto (ombre)',
'Auto (Tungsten)' => 'Auto (tungstène)',
'Auto (White Fluorescent)' => 'Auto (fluo blanc)',
'Unknown' => 'Inconnu',
'User-Selected' => 'Sélectionnée par l\'utilisateur',
},
},
'WhiteBalanceSet' => {
Description => 'Réglage de balance des blancs',
PrintConv => {
'Cloudy' => 'Temps nuageux',
'Day White Fluorescent' => 'Fluorescent blanc jour',
'Daylight' => 'Lumière du jour',
'Daylight Fluorescent' => 'Fluorescente type jour',
'Manual' => 'Manuelle',
'Set Color Temperature 1' => 'Température de couleur définie 1',
'Set Color Temperature 2' => 'Température de couleur définie 2',
'Set Color Temperature 3' => 'Température de couleur définie 3',
'Shade' => 'Ombre',
'Tungsten' => 'Tungstène (lumière incandescente)',
'White Fluorescent' => 'Fluorescent blanc',
},
},
'WhiteLevel' => 'Niveau blanc',
'WhitePoint' => 'Chromaticité du point blanc',
'WideRange' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
'WorldTime' => 'Fuseau horaire',
'WorldTimeLocation' => {
Description => 'Position en temps mondial',
PrintConv => {
'Home' => 'Départ',
'Hometown' => 'Résidence',
},
},
'Writer-Editor' => 'Auteur de la légende / description',
'XClipPathUnits' => 'Unités de chemin de rognage en X',
'XMP' => 'Métadonnées XMP',
'XPAuthor' => 'Auteur',
'XPComment' => 'Commentaire',
'XPKeywords' => 'Mots clé',
'XPSubject' => 'Sujet',
'XPTitle' => 'Titre',
'XPosition' => 'Position en X',
'XResolution' => 'Résolution d\'image horizontale',
'YCbCrCoefficients' => 'Coefficients de la matrice de transformation de l\'espace de couleurs',
'YCbCrPositioning' => {
Description => 'Positionnement Y et C',
PrintConv => {
'Centered' => 'Centré',
'Co-sited' => 'Côte à côte',
},
},
'YCbCrSubSampling' => 'Rapport de sous-échantillonnage Y à C',
'YClipPathUnits' => 'Unités de chemin de rognage en Y',
'YPosition' => 'Position en Y',
'YResolution' => 'Résolution d\'image verticale',
'Year' => 'Année',
'ZoneMatching' => {
Description => 'Ajustage de la zone',
PrintConv => {
'High Key' => 'Hi',
'ISO Setting Used' => 'Désactivée',
'Low Key' => 'Lo',
},
},
'ZoneMatchingOn' => {
PrintConv => {
'Off' => 'Désactivé',
'On' => 'Activé',
},
},
);
1; # end
__END__
=head1 NAME
Image::ExifTool::Lang::fr.pm - ExifTool French language translations
=head1 DESCRIPTION
This file is used by Image::ExifTool to generate localized tag descriptions
and values.
=head1 AUTHOR
Copyright 2003-2020, Phil Harvey (philharvey66 at gmail.com)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 ACKNOWLEDGEMENTS
Thanks to Jens Duttke, Bernard Guillotin, Jean Glasser, Jean Piquemal, Harry
Nizard and Alphonse Philippe for providing this translation.
=head1 SEE ALSO
L<Image::ExifTool(3pm)|Image::ExifTool>,
L<Image::ExifTool::TagInfoXML(3pm)|Image::ExifTool::TagInfoXML>
=cut
| mkjanke/Focus-Points | focuspoints.lrdevplugin/bin/exiftool/lib/Image/ExifTool/Lang/fr.pm | Perl | apache-2.0 | 162,795 |
#
# Copyright 2017 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::extreme::snmp::mode::components::psu;
use strict;
use warnings;
my %map_psu_status = (
1 => 'notPresent',
2 => 'presentOK',
3 => 'presentNotOK',
4 => 'presentPowerOff'
);
my $mapping = {
extremePowerSupplyStatus => { oid => '.1.3.6.1.4.1.1916.1.1.1.27.1.2', map => \%map_psu_status },
extremePowerSupplyFan1Speed => { oid => '.1.3.6.1.4.1.1916.1.1.1.27.1.6' },
extremePowerSupplyFan2Speed => { oid => '.1.3.6.1.4.1.1916.1.1.1.27.1.7' },
extremePowerSupplyInputPowerUsage => { oid => '.1.3.6.1.4.1.1916.1.1.1.27.1.9' },
extremePowerSupplyInputPowerUsageUnitMultiplier => { oid => '.1.3.6.1.4.1.1916.1.1.1.27.1.11' },
};
my $oid_extremePowerSupplyEntry = '.1.3.6.1.4.1.1916.1.1.1.27.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_extremePowerSupplyEntry, start => $mapping->{extremePowerSupplyStatus}->{oid} };
}
sub check_fan_speed {
my ($self, %options) = @_;
if (!defined($options{value}) || $options{value} < 0) {
return ;
}
my ($exit, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'psu.fan', instance => $options{instance}, value => $options{value});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power supply fan '%s' is '%s' rpm", $options{instance}, $options{value}));
}
$self->{output}->perfdata_add(label => 'psu_fan_' . $options{instance}, unit => 'rpm',
value => $options{value},
warning => $warn,
critical => $crit, min => 0
);
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking power supplies");
$self->{components}->{psu} = {name => 'power supplies', total => 0, skip => 0};
return if ($self->check_filter(section => 'psu'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_extremePowerSupplyEntry}})) {
next if ($oid !~ /^$mapping->{extremePowerSupplyStatus}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_extremePowerSupplyEntry}, instance => $instance);
next if ($self->check_filter(section => 'psu', instance => $instance));
if ($result->{extremePowerSupplyStatus} =~ /notPresent/i) {
$self->absent_problem(section => 'psu', instance => $instance);
next;
}
my $power = $result->{extremePowerSupplyInputPowerUsage} * (10 ** $result->{extremePowerSupplyInputPowerUsageUnitMultiplier});
$self->{components}->{psu}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Power supply '%s' status is '%s' [instance = %s, fan1speed = %s, fan2speed = %s, power = %s]",
$instance, $result->{extremePowerSupplyStatus}, $instance,
$result->{extremePowerSupplyFan1Speed}, $result->{extremePowerSupplyFan2Speed}, $power
));
my $exit = $self->get_severity(section => 'psu', value => $result->{extremePowerSupplyStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power supply '%s' status is '%s'", $instance, $result->{extremePowerSupplyStatus}));
}
check_fan_speed($self, value => $result->{extremePowerSupplyFan1Speed}, instance => $instance . '.1');
check_fan_speed($self, value => $result->{extremePowerSupplyFan2Speed}, instance => $instance . '.2');
if ($power != 0) {
my ($exit, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'psu', instance => 'psu', value => $power);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power supply '%s' power is '%s' W", $instance, $power));
}
$self->{output}->perfdata_add(label => 'psu_power_' . $instance, unit => 'W',
value => $power,
warning => $warn,
critical => $crit, min => 0
);
}
}
}
1; | nichols-356/centreon-plugins | network/extreme/snmp/mode/components/psu.pm | Perl | apache-2.0 | 5,562 |
#!/usr/local/bin/perl
use strict;
package Stomp::Receipt;
use base qw( Stomp::Message );
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = {};
bless $self, $class;
$self->_init(@_);
return $self;
}
sub id
{
my $self=shift;
return $self->header("receipt-id");
}
1;
| liuhs/stomperl | acceptance/Stomp-0.02/Stomp/Receipt.pm | Perl | apache-2.0 | 316 |
#!/usr/bin/perl -w
#
# Verifies that Bowtie's paired-end mode gives alignments that are
# consistent with the alignments produced in single-end mode with -a
# and --nostrata options.
#
# Run this from the Bowtie directory.
#
# Usage: perl ../scripts/pe_verify.pl [mode] [index] [mates1] [nates2]
#
# Defaults are:
# [mode] = -n 2
# [index] = e_coli
# [mates1] = reads/e_coli_1000_1.fq
# [mates2] = reads/e_coli_1000_2.fq
#
# E.g.: perl ../scripts/pe_verify.pl -v 0
# perl ../scripts/pe_verify.pl -v 0 e_coli reads/e_coli_1000_1.fq reads/e_coli_100000_e1_2.fq
#
use warnings;
use strict;
use List::Util qw[min max];
use Getopt::Long;
Getopt::Long::Configure ("no_ignore_case");
my $l = undef;
my $n = undef;
my $e = undef;
my $v = undef;
my $I = undef;
my $X = undef;
my $d = undef;
my $C = undef;
my $g = undef;
my $colCseq = undef;
my $colCqual = undef;
my $args = "";
my $verbose = undef;
my $m1fw = 1;
my $m2fw = 0;
GetOptions ("I=i" => \$I,
"X=i" => \$X,
"v=i" => \$v,
"n=i" => \$n,
"e=i" => \$e,
"l=i" => \$l,
"d" => \$d,
"g" => \$g,
"C" => \$C,
"verbose" => \$verbose,
"col-cseq" => \$colCseq,
"col-cqual" => \$colCqual,
"args:s" => \$args,
) || die "One or more errors parsing script arguments";
my $inner = 0;
my $outer = 250;
$inner = $I if ${I};
$outer = $X if ${X};
my $extra_args = "";
$extra_args = $e if defined($e);
my $match_mode = "-n 2";
$match_mode = "-v " . $v if defined($v);
$match_mode = "-n " . $n if defined($n);
$match_mode .= " -l " . $l if defined($l);
$match_mode .= " -e " . $e if defined($e);
$match_mode .= " -C" if defined($C);
$match_mode .= " -g" if defined($g);
$match_mode .= " --col-cseq" if defined($colCseq);
$m2fw = 1 if $C;
print "Using match mode: $match_mode\n";
my $bowtie_dir = ".";
my $bowtie_exe = "bowtie";
$bowtie_exe .= "-debug" if $d;
my $index = "e_coli";
$index = $ARGV[0] if defined($ARGV[0]);
my $reads1 = "reads/e_coli_1000_1.fq";
$reads1 = $ARGV[1] if (defined($ARGV[1]));
my $reads2 = "reads/e_coli_1000_2.fq";
$reads2 = $ARGV[2] if (defined($ARGV[2]));
# Infer input type so we can provide Bowtie with appropriate option
if($reads1 =~ /\.fa/) {
$reads2 =~ /\.fa/ || die "Reads files $reads1 and $reads2 have different extensions";
$extra_args .= " -f ";
} elsif($reads1 =~ /\.raw/) {
$reads2 =~ /\.raw/ || die "Reads files $reads1 and $reads2 have different extensions";
$extra_args .= " -r ";
} elsif(!($reads1 =~ /\.fq/)) {
(!($reads2 =~ /\.fq/)) || die "Reads files $reads1 and $reads2 have different extensions";
$extra_args .= " -c ";
}
my $sesMustHavePes = 0; # force se pairs to have corresponding pe pairs
system("make -C $bowtie_dir $bowtie_exe") == 0 || die;
# Run Bowtie not in paired-end mode for first mate file
my $bowtie_se_cmd1 = "$bowtie_dir/$bowtie_exe $match_mode $args -y $extra_args -a --refidx $index $reads1";
# Run Bowtie not in paired-end mode for second mate file
my $bowtie_se_cmd2 = "$bowtie_dir/$bowtie_exe $match_mode $args -y $extra_args -a --refidx $index $reads2";
# Run Bowtie in paired-end mode
my $bowtie_pe_cmd = "$bowtie_dir/$bowtie_exe $match_mode $args -I $inner -X $outer -y $extra_args -a --refidx $index -1 $reads1 -2 $reads2";
print "$bowtie_pe_cmd\n";
open BOWTIE_PE, "$bowtie_pe_cmd |";
if($C) {
$inner = max(0, $inner-1);
$outer = max(0, $outer-1);
}
my $pes = 0;
my $pesFw = 0;
my $pesRc = 0;
my %peHash = ();
while(<BOWTIE_PE>) {
chomp;
my $l1 = $_;
my $l2 = <BOWTIE_PE>;
chomp($l2);
print "$l1\n$l2\n";
my @l1s = split(/[\t]/, $l1);
my @l2s = split(/[\t]/, $l2);
$#l1s >= 5 || die "Paired-end alignment not formatted correctly: $l1";
$#l2s >= 5 || die "Paired-end alignment not formatted correctly: $l2";
# Split the read name
my @l1rs = split(/\//, $l1s[0]);
my @l2rs = split(/\//, $l2s[0]);
$#l1rs >= 1 || die "Read not formatted correctly: $l1s[0]";
$#l2rs >= 1 || die "Read not formatted correctly: $l2s[0]";
$l1rs[0] eq $l2rs[0] || die "Before-/ parts of read names don't match: $l1rs[0], $l2rs[0]";
my $mate1 = ($l1rs[$#l1rs] eq "1");
my $mate1str = $mate1 ? "1" : "0";
my $basename = $l1rs[0];
my $loff = int($l1s[3]);
my $roff = int($l2s[3]);
my $insLen = $roff - $loff + length($l2s[4]);
$insLen > length($l1s[4]) || die "Insert length did not exceed first mate length";
$insLen > length($l1s[5]) || die "Insert length did not exceed first mate length";
$insLen >= $inner || die "Insert length was $insLen < $inner\n";
$insLen <= $outer || die "Insert length was $insLen > $outer\n";
my $read1Short = $l1s[4];
my $qual1Short = $l1s[5];
my $read2Short = $l2s[4];
my $qual2Short = $l2s[5];
my $read1Mms = "";
$read1Mms = $l1s[7] if defined($l1s[7]);
$read1Mms = "-" if $read1Mms eq "";
my $read2Mms = "";
$read2Mms = $l2s[7] if defined($l2s[7]);
$read2Mms = "-" if $read2Mms eq "";
my $content = "$read1Short:$qual1Short $read2Short:$qual2Short";
$content = "" if $C && !$colCseq;
my $mcont = "$read1Mms $read2Mms";
$mcont = "" if $C;
my $val = "$basename $mate1str $l1s[2] $l1s[3] $l2s[3] $content $mcont";
#defined ($peHash{$basename}) && die "Already saw paired-end alignment for basename $basename";
$peHash{$basename} = $val;
if($mate1) { $pesFw++; } else { $pesRc++; }
$pes++;
}
close(BOWTIE_PE);
my $ses = 0;
my %seHash = ();
my %seMatedHash = ();
my %unmatchedSe = ();
my $unmatchedSes = 0;
my $unmatchedSeReads = 0;
print "$bowtie_se_cmd1\n";
open BOWTIE_SE1, "$bowtie_se_cmd1 |";
while(<BOWTIE_SE1>) {
print "$_";
chomp;
$ses++;
my @ls = split(/[\t]/, $_);
my @lrs = split(/\//, $ls[0]);
my $basename = $lrs[0];
my $ref = $ls[2];
my $off = int($ls[3]);
my $len = length($ls[4]);
my $readShort = $ls[4];
my $qualShort = $ls[5];
my $mms = "";
$mms = $ls[7] if defined($ls[7]);
$mms = "-" if $mms eq "";
my $content = "$readShort $qualShort $mms";
my $key = "$ref $ls[1] $off $len $content";
push @{ $seHash{$basename}{$ref} }, $key;
}
close(BOWTIE_SE1);
print "$bowtie_se_cmd2\n";
open BOWTIE_SE2, "$bowtie_se_cmd2 |";
open UNMATCHED_SE, ">.unmatched.se";
while(<BOWTIE_SE2>) {
print "$_";
chomp;
$ses++;
my @ls = split(/[\t]/, $_);
my @lrs = split(/\//, $ls[0]);
my $basename = $lrs[0];
my $ref = $ls[2];
my $off = int($ls[3]);
my $len = length($ls[4]);
my $fw = $ls[1] eq "+" ? 1 : 0;
my $readShort = $ls[4];
my $qualShort = $ls[5];
my $mms = "";
$mms = $ls[7] if defined($ls[7]);
$mms = "-" if $mms eq "";
my $content = "$readShort $qualShort";
my $mcont = "$mms";
my $key = "$ref $ls[1] $off $len $content $mcont";
# Is the other mate already aligned?
if(defined($seHash{$basename}{$ref})) {
# Get all of the alignments for the mate
for my $om (@{ $seHash{$basename}{$ref} }) {
my @oms = split(/ /, $om);
$#oms == 6 || die "Wrong number of elements for oms: $#oms";
my $oref = $oms[0];
my $ofw = $oms[1] eq "+" ? 1 : 0;
my $ooff = int($oms[2]);
my $olen = int($oms[3]);
my $oreadShort = $oms[4];
my $oqualShort = $oms[5];
my $omms = "";
print "Trying $ref:$off and $oref:$ooff\n" if $verbose;
$omms = $oms[6] if defined($oms[6]);
$oref eq $ref || die "Refs don't match: $oref, $ref";
my $diff;
my $peKey = "$basename ";
if($ooff > $off) {
# The #1 mate is on the right
my $my_m1fw = $m1fw ? 0 : 1;
my $my_m2fw = $m2fw ? 0 : 1;
$diff = $ooff - $off + $olen;
if ($diff <= $olen || $diff <= $len) {
print "diff $diff is <= $olen and $len\n" if $verbose;
next;
}
# upstream mate contains downstream one?
#next if $off + $len >= $ooff + $olen;
# mates are at the same position?
if($ooff == $off) {
print "overlapping offsets: $ooff, $off\n" if $verbose;
next;
}
if ($diff < $inner || $diff > $outer) {
print "diff $diff is outside of inner/outer: [$inner, $outer]\n" if $verbose;
next;
}
if($ofw != $my_m1fw) {
print "orientation of other $ofw doesn't match expected $my_m1fw\n" if $verbose;
next;
}
if($fw != $my_m2fw) {
print "orientation of anchor $fw doesn't match expected $my_m2fw\n" if $verbose;
next;
}
$content = "$readShort:$qualShort $oreadShort:$oqualShort";
$content = "" if $C && !$colCseq;
$mcont = "$mms $omms";
$mcont = "" if $C;
$peKey .= "0 $ref $off $ooff $content $mcont";
} else {
# The #1 mate is on the left
$diff = $off - $ooff + $len;
if ($diff <= $olen || $diff <= $len) {
print "diff $diff is <= $olen and $len\n" if $verbose;
next;
}
# upstream mate contains downstream one?
#next if $ooff + $olen >= $off + $len;
# mates are at the same position?
if($ooff == $off) {
print "overlapping offsets: $ooff, $off\n" if $verbose;
next;
}
if ($diff < $inner || $diff > $outer) {
print "diff $diff is outside of inner/outer: [$inner, $outer]\n" if $verbose;
next;
}
if($ofw != $m1fw) {
print "orientation of other $ofw doesn't match expected $m1fw\n" if $verbose;
next;
}
if($fw != $m2fw) {
print "orientation of anchor $fw doesn't match expected $m2fw\n" if $verbose;
next;
}
$content = "$oreadShort:$oqualShort $readShort:$qualShort";
$content = "" if $C && !$colCseq;
$mcont = "$omms $mms";
$mcont = "" if $C;
$peKey .= "1 $ref $ooff $off $content $mcont";
}
# Found a legitimate paired-end alignment using a pair of
# single-end alignments
if($seMatedHash{$basename}) {
print "already found corresponding paired-end\n" if $verbose;
next;
}
if($sesMustHavePes) {
defined($peHash{$basename}) ||
die "Found single-end alignment for $basename, but no paired-end";
} else {
if(!defined($peHash{$basename})) {
if(!defined($unmatchedSe{$basename})) {
$unmatchedSe{$basename} = 0;
$unmatchedSeReads++;
}
$unmatchedSe{$basename}++;
$unmatchedSes++;
print UNMATCHED_SE "Read $basename:\n$om\n$key\n";
}
}
if(defined($peHash{$basename}) && $peHash{$basename} eq $peKey) {
delete $peHash{$basename};
$seMatedHash{$basename} = 1;
} else {
print "No matchup:\n$peHash{$basename}\n$peKey\n" if $verbose;
}
#print "Found alignment for mate $otherMate of $ls[0]; diff: $diff\n";
}
}
}
close(BOWTIE_SE2);
close(UNMATCHED_SE);
my $die = 0;
for my $peKey (keys %peHash) {
print "Paired-end $peKey has a paired-end alignment without single-end support\n";
print "[ $peHash{$peKey} ]\n";
$die++;
}
$die && die "Found $die paired-end reads with no corresponding single-end mates";
if($unmatchedSes > 0) {
print "Total of $unmatchedSes unmatched single-end alignments found for $unmatchedSes distinct pairs\n";
print "Ref orientation off len seq quals mms\n";
system("cat .unmatched.se");
die;
}
print "PASSED; analyzed $pes paired-end ($pesFw fw, $pesRc rc) and $ses single-end alignments\n";
| vipints/oqtans | oqtans_tools/Bowtie/1.0.0/scripts/pe_verify.pl | Perl | bsd-3-clause | 10,945 |
#------------------------------------------------------------------------------
# File: Fixup.pm
#
# Description: Utility to handle pointer fixups
#
# Revisions: 01/19/2005 - P. Harvey Created
# 04/11/2005 - P. Harvey Allow fixups to be tagged with a marker,
# and add new marker-related routines
# 06/21/2006 - P. Harvey Patch to work with negative offsets
# 07/07/2006 - P. Harvey Added support for 16-bit pointers
#
# Data Members:
#
# Start - Position in data where a zero pointer points to.
# Shift - Amount to shift offsets (relative to Start).
# Fixups - List of Fixup object references to to shift relative to this Fixup.
# Pointers - Hash of references to fixup pointer arrays, keyed by ByteOrder
# string (with "2" added if pointer is 16-bit [default is 32-bit],
# plus "_$marker" suffix if tagged with a marker name).
#------------------------------------------------------------------------------
package Image::ExifTool::Fixup;
use strict;
use Image::ExifTool qw(GetByteOrder SetByteOrder Get32u Get32s Set32u
Get16u Get16s Set16u);
use vars qw($VERSION);
$VERSION = '1.04';
sub AddFixup($$;$$);
sub ApplyFixup($$);
sub Dump($;$);
#------------------------------------------------------------------------------
# New - create new Fixup object
# Inputs: 0) reference to Fixup object or Fixup class name
sub new
{
local $_;
my $that = shift;
my $class = ref($that) || $that || 'Image::ExifTool::Fixup';
my $self = bless {}, $class;
# initialize required members
$self->{Start} = 0;
$self->{Shift} = 0;
return $self;
}
#------------------------------------------------------------------------------
# Clone this object
# Inputs: 0) reference to Fixup object or Fixup class name
# Returns: reference to new Fixup object
sub Clone($)
{
my $self = shift;
my $clone = new Image::ExifTool::Fixup;
$clone->{Start} = $self->{Start};
$clone->{Shift} = $self->{Shift};
my $phash = $self->{Pointers};
if ($phash) {
$clone->{Pointers} = { };
my $byteOrder;
foreach $byteOrder (keys %$phash) {
my @pointers = @{$phash->{$byteOrder}};
$clone->{Pointers}->{$byteOrder} = \@pointers;
}
}
if ($self->{Fixups}) {
$clone->{Fixups} = [ ];
my $subFixup;
foreach $subFixup (@{$self->{Fixups}}) {
push @{$clone->{Fixups}}, $subFixup->Clone();
}
}
return $clone;
}
#------------------------------------------------------------------------------
# Add fixup pointer or another fixup object below this one
# Inputs: 0) Fixup object reference
# 1) Scalar for pointer offset, or reference to Fixup object
# 2) Optional marker name for the pointer
# 3) Optional pointer format ('int16u' or 'int32u', defaults to 'int32u')
# Notes: Byte ordering must be set properly for the pointer being added (must keep
# track of the byte order of each offset since MakerNotes may have different byte order!)
sub AddFixup($$;$$)
{
my ($self, $pointer, $marker, $format) = @_;
if (ref $pointer) {
$self->{Fixups} or $self->{Fixups} = [ ];
push @{$self->{Fixups}}, $pointer;
} else {
my $byteOrder = GetByteOrder();
if (defined $format) {
if ($format eq 'int16u') {
$byteOrder .= '2';
} elsif ($format ne 'int32u') {
warn "Bad Fixup pointer format $format\n";
}
}
$byteOrder .= "_$marker" if defined $marker;
my $phash = $self->{Pointers};
$phash or $phash = $self->{Pointers} = { };
$phash->{$byteOrder} or $phash->{$byteOrder} = [ ];
push @{$phash->{$byteOrder}}, $pointer;
}
}
#------------------------------------------------------------------------------
# fix up pointer offsets
# Inputs: 0) Fixup object reference, 1) data reference
# Outputs: Collapses fixup hierarchy into linear lists of fixup pointers
sub ApplyFixup($$)
{
my ($self, $dataPt) = @_;
my $start = $self->{Start};
my $shift = $self->{Shift} + $start; # make shift relative to start
my $phash = $self->{Pointers};
# fix up pointers in this fixup
if ($phash and ($start or $shift)) {
my $saveOrder = GetByteOrder(); # save original byte ordering
my ($byteOrder, $ptr);
foreach $byteOrder (keys %$phash) {
SetByteOrder(substr($byteOrder,0,2));
# apply the fixup offset shift (must get as signed integer
# to avoid overflow in case it was negative before)
my ($get, $set) = ($byteOrder =~ /^(II2|MM2)/) ?
(\&Get16s, \&Set16u) : (\&Get32s, \&Set32u);
foreach $ptr (@{$phash->{$byteOrder}}) {
$ptr += $start; # update pointer to new start location
next unless $shift;
&$set(&$get($dataPt, $ptr) + $shift, $dataPt, $ptr);
}
}
SetByteOrder($saveOrder); # restore original byte ordering
}
# recurse into contained fixups
if ($self->{Fixups}) {
# create our pointer hash if it doesn't exist
$phash or $phash = $self->{Pointers} = { };
# loop through all contained fixups
my $subFixup;
foreach $subFixup (@{$self->{Fixups}}) {
# adjust the subfixup start and shift
$subFixup->{Start} += $start;
$subFixup->{Shift} += $shift - $start;
# recursively apply contained fixups
ApplyFixup($subFixup, $dataPt);
my $shash = $subFixup->{Pointers} or next;
# add all pointers to our collapsed lists
my $byteOrder;
foreach $byteOrder (keys %$shash) {
$phash->{$byteOrder} or $phash->{$byteOrder} = [ ];
push @{$phash->{$byteOrder}}, @{$shash->{$byteOrder}};
delete $shash->{$byteOrder};
}
delete $subFixup->{Pointers};
}
delete $self->{Fixups}; # remove our contained fixups
}
# reset our Start/Shift for the collapsed fixup
$self->{Start} = $self->{Shift} = 0;
}
#------------------------------------------------------------------------------
# Does specified marker exist?
# Inputs: 0) Fixup object reference, 1) marker name
# Returns: True if fixup contains specified marker name
sub HasMarker($$)
{
my ($self, $marker) = @_;
my $phash = $self->{Pointers};
return 0 unless $phash;
return 1 if grep /_$marker$/, keys %$phash;
return 0 unless $self->{Fixups};
my $subFixup;
foreach $subFixup (@{$self->{Fixups}}) {
return 1 if $subFixup->HasMarker($marker);
}
return 0;
}
#------------------------------------------------------------------------------
# Set all marker pointers to specified value
# Inputs: 0) Fixup object reference, 1) data reference
# 2) marker name, 3) pointer value, 4) offset to start of data
sub SetMarkerPointers($$$$;$)
{
my ($self, $dataPt, $marker, $value, $startOffset) = @_;
my $start = $self->{Start} + ($startOffset || 0);
my $phash = $self->{Pointers};
if ($phash) {
my $saveOrder = GetByteOrder(); # save original byte ordering
my ($byteOrder, $ptr);
foreach $byteOrder (keys %$phash) {
next unless $byteOrder =~ /^(II|MM)(2?)_$marker$/;
SetByteOrder($1);
my $set = $2 ? \&Set16u : \&Set32u;
foreach $ptr (@{$phash->{$byteOrder}}) {
&$set($value, $dataPt, $ptr + $start);
}
}
SetByteOrder($saveOrder); # restore original byte ordering
}
if ($self->{Fixups}) {
my $subFixup;
foreach $subFixup (@{$self->{Fixups}}) {
$subFixup->SetMarkerPointers($dataPt, $marker, $value, $start);
}
}
}
#------------------------------------------------------------------------------
# Get pointer values for specified marker
# Inputs: 0) Fixup object reference, 1) data reference,
# 2) marker name, 3) offset to start of data
# Returns: List of marker pointers in list context, or first marker pointer otherwise
sub GetMarkerPointers($$$;$)
{
my ($self, $dataPt, $marker, $startOffset) = @_;
my $start = $self->{Start} + ($startOffset || 0);
my $phash = $self->{Pointers};
my @pointers;
if ($phash) {
my $saveOrder = GetByteOrder();
my ($byteOrder, $ptr);
foreach $byteOrder (grep /_$marker$/, keys %$phash) {
SetByteOrder(substr($byteOrder,0,2));
my $get = ($byteOrder =~ /^(II2|MM2)/) ? \&Get16u : \&Get32u;
foreach $ptr (@{$phash->{$byteOrder}}) {
push @pointers, &$get($dataPt, $ptr + $start);
}
}
SetByteOrder($saveOrder); # restore original byte ordering
}
if ($self->{Fixups}) {
my $subFixup;
foreach $subFixup (@{$self->{Fixups}}) {
push @pointers, $subFixup->GetMarkerPointers($dataPt, $marker, $start);
}
}
return @pointers if wantarray;
return $pointers[0];
}
#------------------------------------------------------------------------------
# Dump fixup to console for debugging
# Inputs: 0) Fixup object reference, 1) optional initial indent string
sub Dump($;$)
{
my ($self, $indent) = @_;
$indent or $indent = '';
printf "${indent}Fixup start=0x%x shift=0x%x\n", $self->{Start}, $self->{Shift};
my $phash = $self->{Pointers};
if ($phash) {
my $byteOrder;
foreach $byteOrder (sort keys %$phash) {
print "$indent $byteOrder: ", join(' ',@{$phash->{$byteOrder}}),"\n";
}
}
if ($self->{Fixups}) {
my $subFixup;
foreach $subFixup (@{$self->{Fixups}}) {
Dump($subFixup, $indent . ' ');
}
}
}
1; # end
__END__
=head1 NAME
Image::ExifTool::Fixup - Utility to handle pointer fixups
=head1 SYNOPSIS
use Image::ExifTool::Fixup;
$fixup = new Image::ExifTool::Fixup;
# add a new fixup to a pointer at the specified offset in data
$fixup->AddFixup($offset);
# add a new Fixup object to the tree
$fixup->AddFixup($subFixup);
$fixup->{Start} += $shift1; # shift pointer offsets and values
$fixup->{Shift} += $shift2; # shift pointer values only
# recursively apply fixups to the specified data
$fixup->ApplyFixups(\$data);
$fixup->Dump(); # dump debugging information
=head1 DESCRIPTION
This module contains the code to keep track of pointers in memory and to
shift these pointers as required. It is used by ExifTool to maintain the
pointers in image file directories (IFD's).
=head1 NOTES
Keeps track of pointers with different byte ordering, and relies on
Image::ExifTool::GetByteOrder() to determine the current byte ordering
when adding new pointers to a fixup.
Maintains a hierarchical list of fixups so that the whole hierarchy can
be shifted by a simple shift at the base. Hierarchy is collapsed to a
linear list when ApplyFixups() is called.
=head1 AUTHOR
Copyright 2003-2009, Phil Harvey (phil at owl.phy.queensu.ca)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 SEE ALSO
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| opf-attic/ref | tools/fits/0.6.1/tools/exiftool/perl/lib/Image/ExifTool/Fixup.pm | Perl | apache-2.0 | 11,809 |
#!/usr/bin/perl -w
# Copyright (C) 2005, 2006, 2007, 2009 Apple Inc. All rights reserved.
# Copyright (C) 2009, Julien Chaffraix <jchaffraix@webkit.org>
# Copyright (C) 2009 Torch Mobile Inc. All rights reserved. (http://www.torchmobile.com/)
# Copyright (C) 2011 Ericsson AB. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use strict;
use Config;
use Getopt::Long;
use File::Path;
use IO::File;
use InFilesParser;
sub readTags($$);
sub readAttrs($$);
my $printFactory = 0;
my $printWrapperFactory = 0;
my $printWrapperFactoryV8 = 0;
my $tagsFile = "";
my $attrsFile = "";
my $outputDir = ".";
my %parsedTags = ();
my %parsedAttrs = ();
my %enabledTags = ();
my %enabledAttrs = ();
my %allTags = ();
my %allAttrs = ();
my %parameters = ();
my $extraDefines = 0;
my %extensionAttrs = ();
require Config;
my $gccLocation = "";
if ($ENV{CC}) {
$gccLocation = $ENV{CC};
} elsif (($Config::Config{'osname'}) =~ /solaris/i) {
$gccLocation = "/usr/sfw/bin/gcc";
} else {
$gccLocation = "/usr/bin/gcc";
}
my $preprocessor = $gccLocation . " -E -x c++";
GetOptions(
'tags=s' => \$tagsFile,
'attrs=s' => \$attrsFile,
'factory' => \$printFactory,
'outputDir=s' => \$outputDir,
'extraDefines=s' => \$extraDefines,
'preprocessor=s' => \$preprocessor,
'wrapperFactory' => \$printWrapperFactory,
'wrapperFactoryV8' => \$printWrapperFactoryV8
);
die "You must specify at least one of --tags <file> or --attrs <file>" unless (length($tagsFile) || length($attrsFile));
if (length($tagsFile)) {
%allTags = %{readTags($tagsFile, 0)};
%enabledTags = %{readTags($tagsFile, 1)};
}
if (length($attrsFile)) {
%allAttrs = %{readAttrs($attrsFile, 0)};
%enabledAttrs = %{readAttrs($attrsFile, 1)};
}
die "You must specify a namespace (e.g. SVG) for <namespace>Names.h" unless $parameters{namespace};
die "You must specify a namespaceURI (e.g. http://www.w3.org/2000/svg)" unless $parameters{namespaceURI};
$parameters{namespacePrefix} = $parameters{namespace} unless $parameters{namespacePrefix};
mkpath($outputDir);
my $namesBasePath = "$outputDir/$parameters{namespace}Names";
my $factoryBasePath = "$outputDir/$parameters{namespace}ElementFactory";
my $wrapperFactoryFileName = "$parameters{namespace}ElementWrapperFactory";
printNamesHeaderFile("$namesBasePath.h");
printNamesCppFile("$namesBasePath.cpp");
if ($printFactory) {
printFactoryCppFile("$factoryBasePath.cpp");
printFactoryHeaderFile("$factoryBasePath.h");
}
die "You cannot specify both --wrapperFactory and --wrapperFactoryV8" if $printWrapperFactory && $printWrapperFactoryV8;
my $wrapperFactoryType = "";
if ($printWrapperFactory) {
$wrapperFactoryType = "JS";
} elsif ($printWrapperFactoryV8) {
$wrapperFactoryType = "V8";
}
if ($wrapperFactoryType) {
printWrapperFactoryCppFile($outputDir, $wrapperFactoryType, $wrapperFactoryFileName);
printWrapperFactoryHeaderFile($outputDir, $wrapperFactoryType, $wrapperFactoryFileName);
}
### Hash initialization
sub defaultTagPropertyHash
{
return (
'constructorNeedsCreatedByParser' => 0,
'constructorNeedsFormElement' => 0,
'interfaceName' => defaultInterfaceName($_[0]),
# By default, the JSInterfaceName is the same as the interfaceName.
'JSInterfaceName' => defaultInterfaceName($_[0]),
'mapToTagName' => '',
'wrapperOnlyIfMediaIsAvailable' => 0,
'conditional' => 0
);
}
sub defaultParametersHash
{
return (
'namespace' => '',
'namespacePrefix' => '',
'namespaceURI' => '',
'guardFactoryWith' => '',
'tagsNullNamespace' => 0,
'attrsNullNamespace' => 0
);
}
sub defaultInterfaceName
{
die "No namespace found" if !$parameters{namespace};
return $parameters{namespace} . upperCaseName($_[0]) . "Element"
}
### Parsing handlers
sub tagsHandler
{
my ($tag, $property, $value) = @_;
$tag =~ s/-/_/g;
# Initialize default property values.
$parsedTags{$tag} = { defaultTagPropertyHash($tag) } if !defined($parsedTags{$tag});
if ($property) {
die "Unknown property $property for tag $tag\n" if !defined($parsedTags{$tag}{$property});
# The code relies on JSInterfaceName deriving from interfaceName to check for custom JSInterfaceName.
# So override JSInterfaceName if it was not already set.
$parsedTags{$tag}{JSInterfaceName} = $value if $property eq "interfaceName" && $parsedTags{$tag}{JSInterfaceName} eq $parsedTags{$tag}{interfaceName};
$parsedTags{$tag}{$property} = $value;
}
}
sub attrsHandler
{
my ($attr, $property, $value) = @_;
# Translate HTML5 extension attributes of the form 'x-webkit-feature' to 'webkitfeature'.
# We don't just check for the 'x-' prefix because there are attributes such as x-height
# which should follow the default path below.
if ($attr =~ m/^x-webkit-(.*)/) {
my $newAttr = "webkit$1";
$extensionAttrs{$newAttr} = $attr;
$attr = $newAttr;
}
$attr =~ s/-/_/g;
# Initialize default properties' values.
$parsedAttrs{$attr} = {} if !defined($parsedAttrs{$attr});
if ($property) {
die "Unknown property $property for attribute $attr\n" if !defined($parsedAttrs{$attr}{$property});
$parsedAttrs{$attr}{$property} = $value;
}
}
sub parametersHandler
{
my ($parameter, $value) = @_;
# Initialize default properties' values.
%parameters = defaultParametersHash() if !(keys %parameters);
die "Unknown parameter $parameter for tags/attrs\n" if !defined($parameters{$parameter});
$parameters{$parameter} = $value;
}
## Support routines
sub preprocessorCommand()
{
return $preprocessor if $extraDefines eq 0;
return $preprocessor . " -D" . join(" -D", split(" ", $extraDefines));
}
sub readNames($$$$)
{
my ($namesFile, $hashToFillRef, $handler, $usePreprocessor) = @_;
my $names = new IO::File;
if ($usePreprocessor) {
open($names, preprocessorCommand() . " " . $namesFile . "|") or die "Failed to open file: $namesFile";
} else {
open($names, $namesFile) or die "Failed to open file: $namesFile";
}
my $InParser = InFilesParser->new();
$InParser->parse($names, \¶metersHandler, $handler);
close($names);
die "Failed to read names from file: $namesFile" if (keys %{$hashToFillRef} == 0);
return $hashToFillRef;
}
sub readAttrs($$)
{
my ($namesFile, $usePreprocessor) = @_;
%parsedAttrs = ();
return readNames($namesFile, \%parsedAttrs, \&attrsHandler, $usePreprocessor);
}
sub readTags($$)
{
my ($namesFile, $usePreprocessor) = @_;
%parsedTags = ();
return readNames($namesFile, \%parsedTags, \&tagsHandler, $usePreprocessor);
}
sub printMacros
{
my ($F, $macro, $suffix, $namesRef) = @_;
my %names = %$namesRef;
for my $name (sort keys %$namesRef) {
print F "$macro $name","$suffix;\n";
}
}
sub usesDefaultWrapper
{
my $tagName = shift;
return $tagName eq $parameters{namespace} . "Element";
}
# Build a direct mapping from the tags to the Element to create, excluding
# Element that have not constructor.
sub buildConstructorMap
{
my %tagConstructorMap = ();
for my $tagName (keys %enabledTags) {
my $interfaceName = $enabledTags{$tagName}{interfaceName};
next if (usesDefaultWrapper($interfaceName));
if ($enabledTags{$tagName}{mapToTagName}) {
die "Cannot handle multiple mapToTagName for $tagName\n" if $enabledTags{$enabledTags{$tagName}{mapToTagName}}{mapToTagName};
$interfaceName = $enabledTags{ $enabledTags{$tagName}{mapToTagName} }{interfaceName};
}
# Chop the string to keep the interesting part.
$interfaceName =~ s/$parameters{namespace}(.*)Element/$1/;
$tagConstructorMap{$tagName} = lc($interfaceName);
}
return %tagConstructorMap;
}
# Helper method that print the constructor's signature avoiding
# unneeded arguments.
sub printConstructorSignature
{
my ($F, $tagName, $constructorName, $constructorTagName) = @_;
print F "static PassRefPtr<$parameters{namespace}Element> ${constructorName}Constructor(const QualifiedName& $constructorTagName, Document* document";
if ($parameters{namespace} eq "HTML") {
print F ", HTMLFormElement*";
print F " formElement" if $enabledTags{$tagName}{constructorNeedsFormElement};
}
print F ", bool";
print F " createdByParser" if $enabledTags{$tagName}{constructorNeedsCreatedByParser};
print F ")\n{\n";
}
# Helper method to dump the constructor interior and call the
# Element constructor with the right arguments.
# The variable names should be kept in sync with the previous method.
sub printConstructorInterior
{
my ($F, $tagName, $interfaceName, $constructorTagName) = @_;
# Handle media elements.
if ($enabledTags{$tagName}{wrapperOnlyIfMediaIsAvailable}) {
print F <<END
Settings* settings = document->settings();
if (!MediaPlayer::isAvailable() || (settings && !settings->isMediaEnabled()))
return HTMLElement::create($constructorTagName, document);
END
;
}
# Call the constructor with the right parameters.
print F " return ${interfaceName}::create($constructorTagName, document";
print F ", formElement" if $enabledTags{$tagName}{constructorNeedsFormElement};
print F ", createdByParser" if $enabledTags{$tagName}{constructorNeedsCreatedByParser};
print F ");\n}\n\n";
}
sub printConstructors
{
my ($F, $tagConstructorMapRef) = @_;
my %tagConstructorMap = %$tagConstructorMapRef;
# This is to avoid generating the same constructor several times.
my %uniqueTags = ();
for my $tagName (sort keys %tagConstructorMap) {
my $interfaceName = $enabledTags{$tagName}{interfaceName};
# Ignore the mapped tag
# FIXME: It could be moved inside this loop but was split for readibility.
next if (defined($uniqueTags{$interfaceName}) || $enabledTags{$tagName}{mapToTagName});
$uniqueTags{$interfaceName} = '1';
my $conditional = $enabledTags{$tagName}{conditional};
if ($conditional) {
my $conditionalString = "ENABLE(" . join(") && ENABLE(", split(/&/, $conditional)) . ")";
print F "#if ${conditionalString}\n\n";
}
printConstructorSignature($F, $tagName, $tagConstructorMap{$tagName}, "tagName");
printConstructorInterior($F, $tagName, $interfaceName, "tagName");
if ($conditional) {
print F "#endif\n";
}
}
# Mapped tag name uses a special wrapper to keep their prefix and namespaceURI while using the mapped localname.
for my $tagName (sort keys %tagConstructorMap) {
if ($enabledTags{$tagName}{mapToTagName}) {
my $mappedName = $enabledTags{$tagName}{mapToTagName};
printConstructorSignature($F, $mappedName, $mappedName . "To" . $tagName, "tagName");
printConstructorInterior($F, $mappedName, $enabledTags{$mappedName}{interfaceName}, "QualifiedName(tagName.prefix(), ${mappedName}Tag.localName(), tagName.namespaceURI())");
}
}
}
sub printFunctionInits
{
my ($F, $tagConstructorMap) = @_;
my %tagConstructorMap = %$tagConstructorMap;
for my $tagName (sort keys %tagConstructorMap) {
my $conditional = $enabledTags{$tagName}{conditional};
if ($conditional) {
my $conditionalString = "ENABLE(" . join(") && ENABLE(", split(/&/, $conditional)) . ")";
print F "#if ${conditionalString}\n";
}
if ($enabledTags{$tagName}{mapToTagName}) {
print F " addTag(${tagName}Tag, $enabledTags{$tagName}{mapToTagName}To${tagName}Constructor);\n";
} else {
print F " addTag(${tagName}Tag, $tagConstructorMap{$tagName}Constructor);\n";
}
if ($conditional) {
print F "#endif\n\n";
}
}
}
sub svgCapitalizationHacks
{
my $name = shift;
$name = "FE" . ucfirst $1 if $name =~ /^fe(.+)$/;
return $name;
}
sub upperCaseName
{
my $name = shift;
$name = svgCapitalizationHacks($name) if ($parameters{namespace} eq "SVG");
while ($name =~ /^(.*?)_(.*)/) {
$name = $1 . ucfirst $2;
}
return ucfirst $name;
}
sub printLicenseHeader
{
my $F = shift;
print F "/*
* THIS FILE WAS AUTOMATICALLY GENERATED, DO NOT EDIT.
*
* This file was generated by the dom/make_names.pl script.
*
* Copyright (C) 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
";
}
sub printNamesHeaderFile
{
my ($headerPath) = shift;
my $F;
open F, ">$headerPath";
printLicenseHeader($F);
print F "#ifndef DOM_$parameters{namespace}NAMES_H\n";
print F "#define DOM_$parameters{namespace}NAMES_H\n\n";
print F "#include \"QualifiedName.h\"\n\n";
print F "namespace WebCore {\n\n namespace $parameters{namespace}Names {\n\n";
my $lowerNamespace = lc($parameters{namespacePrefix});
print F "#ifndef DOM_$parameters{namespace}NAMES_HIDE_GLOBALS\n";
print F "// Namespace\n";
print F "extern const WTF::AtomicString ${lowerNamespace}NamespaceURI;\n\n";
if (keys %allTags) {
print F "// Tags\n";
printMacros($F, "extern const WebCore::QualifiedName", "Tag", \%allTags);
}
if (keys %allAttrs) {
print F "// Attributes\n";
printMacros($F, "extern const WebCore::QualifiedName", "Attr", \%allAttrs);
}
print F "#endif\n\n";
if (keys %allTags) {
print F "WebCore::QualifiedName** get$parameters{namespace}Tags(size_t* size);\n";
}
if (keys %allAttrs) {
print F "WebCore::QualifiedName** get$parameters{namespace}Attrs(size_t* size);\n";
}
print F "\nvoid init();\n\n";
print F "} }\n\n";
print F "#endif\n\n";
close F;
}
sub printNamesCppFile
{
my $cppPath = shift;
my $F;
open F, ">$cppPath";
printLicenseHeader($F);
my $lowerNamespace = lc($parameters{namespacePrefix});
print F "#include \"config.h\"\n";
print F "#ifdef SKIP_STATIC_CONSTRUCTORS_ON_GCC\n";
print F "#define DOM_$parameters{namespace}NAMES_HIDE_GLOBALS 1\n";
print F "#else\n";
print F "#define QNAME_DEFAULT_CONSTRUCTOR 1\n";
print F "#endif\n\n";
print F "#include \"$parameters{namespace}Names.h\"\n\n";
print F "#include <wtf/StaticConstructors.h>\n";
print F "namespace WebCore {\n\n namespace $parameters{namespace}Names {
using namespace WebCore;
DEFINE_GLOBAL(AtomicString, ${lowerNamespace}NamespaceURI, \"$parameters{namespaceURI}\")
";
if (keys %allTags) {
print F "// Tags\n";
for my $name (sort keys %allTags) {
print F "DEFINE_GLOBAL(QualifiedName, ", $name, "Tag, nullAtom, \"$name\", ${lowerNamespace}NamespaceURI);\n";
}
print F "\n\nWebCore::QualifiedName** get$parameters{namespace}Tags(size_t* size)\n";
print F "{\n static WebCore::QualifiedName* $parameters{namespace}Tags[] = {\n";
for my $name (sort keys %allTags) {
print F " (WebCore::QualifiedName*)&${name}Tag,\n";
}
print F " };\n";
print F " *size = ", scalar(keys %allTags), ";\n";
print F " return $parameters{namespace}Tags;\n";
print F "}\n";
}
if (keys %allAttrs) {
print F "\n// Attributes\n";
for my $name (sort keys %allAttrs) {
print F "DEFINE_GLOBAL(QualifiedName, ", $name, "Attr, nullAtom, \"$name\", ${lowerNamespace}NamespaceURI);\n";
}
print F "\n\nWebCore::QualifiedName** get$parameters{namespace}Attrs(size_t* size)\n";
print F "{\n static WebCore::QualifiedName* $parameters{namespace}Attr[] = {\n";
for my $name (sort keys %allAttrs) {
print F " (WebCore::QualifiedName*)&${name}Attr,\n";
}
print F " };\n";
print F " *size = ", scalar(keys %allAttrs), ";\n";
print F " return $parameters{namespace}Attr;\n";
print F "}\n";
}
print F "\nvoid init()
{
static bool initialized = false;
if (initialized)
return;
initialized = true;
// Use placement new to initialize the globals.
AtomicString::init();
";
print(F " AtomicString ${lowerNamespace}NS(\"$parameters{namespaceURI}\");\n\n");
print(F " // Namespace\n");
print(F " new ((void*)&${lowerNamespace}NamespaceURI) AtomicString(${lowerNamespace}NS);\n\n");
if (keys %allTags) {
my $tagsNamespace = $parameters{tagsNullNamespace} ? "nullAtom" : "${lowerNamespace}NS";
printDefinitions($F, \%allTags, "tags", $tagsNamespace);
}
if (keys %allAttrs) {
my $attrsNamespace = $parameters{attrsNullNamespace} ? "nullAtom" : "${lowerNamespace}NS";
printDefinitions($F, \%allAttrs, "attributes", $attrsNamespace);
}
print F "}\n\n} }\n\n";
close F;
}
sub printJSElementIncludes
{
my $F = shift;
my $wrapperFactoryType = shift;
my %tagsSeen;
for my $tagName (sort keys %enabledTags) {
my $JSInterfaceName = $enabledTags{$tagName}{JSInterfaceName};
next if defined($tagsSeen{$JSInterfaceName}) || usesDefaultJSWrapper($tagName);
if ($enabledTags{$tagName}{conditional}) {
# We skip feature-define-specific #includes here since we handle them separately.
next;
}
$tagsSeen{$JSInterfaceName} = 1;
print F "#include \"${wrapperFactoryType}${JSInterfaceName}.h\"\n";
}
}
sub printElementIncludes
{
my $F = shift;
my %tagsSeen;
for my $tagName (sort keys %enabledTags) {
my $interfaceName = $enabledTags{$tagName}{interfaceName};
next if defined($tagsSeen{$interfaceName});
if ($enabledTags{$tagName}{conditional}) {
# We skip feature-define-specific #includes here since we handle them separately.
next;
}
$tagsSeen{$interfaceName} = 1;
print F "#include \"${interfaceName}.h\"\n";
}
}
sub printConditionalElementIncludes
{
my ($F, $wrapperFactoryType) = @_;
my %conditionals;
my %unconditionalElementIncludes;
my %unconditionalJSElementIncludes;
for my $tagName (keys %enabledTags) {
my $conditional = $enabledTags{$tagName}{conditional};
my $interfaceName = $enabledTags{$tagName}{interfaceName};
my $JSInterfaceName = $enabledTags{$tagName}{JSInterfaceName};
if ($conditional) {
$conditionals{$conditional}{interfaceNames}{$interfaceName} = 1;
$conditionals{$conditional}{JSInterfaceNames}{$JSInterfaceName} = 1;
} else {
$unconditionalElementIncludes{$interfaceName} = 1;
$unconditionalJSElementIncludes{$JSInterfaceName} = 1;
}
}
for my $conditional (sort keys %conditionals) {
print F "\n#if ENABLE($conditional)\n";
for my $interfaceName (sort keys %{$conditionals{$conditional}{interfaceNames}}) {
next if $unconditionalElementIncludes{$interfaceName};
print F "#include \"$interfaceName.h\"\n";
}
if ($wrapperFactoryType) {
for my $JSInterfaceName (sort keys %{$conditionals{$conditional}{JSInterfaceNames}}) {
next if $unconditionalJSElementIncludes{$JSInterfaceName};
print F "#include \"$wrapperFactoryType$JSInterfaceName.h\"\n";
}
}
print F "#endif\n";
}
}
sub printDefinitions
{
my ($F, $namesRef, $type, $namespaceURI) = @_;
my $singularType = substr($type, 0, -1);
my $shortType = substr($singularType, 0, 4);
my $shortCamelType = ucfirst($shortType);
my $shortUpperType = uc($shortType);
print F " // " . ucfirst($type) . "\n";
for my $name (sort keys %$namesRef) {
my $realName = $extensionAttrs{$name};
if (!$realName) {
$realName = $name;
$realName =~ s/_/-/g;
}
print F " new ((void*)&$name","${shortCamelType}) QualifiedName(nullAtom, \"$realName\", $namespaceURI);\n";
}
}
## ElementFactory routines
sub printFactoryCppFile
{
my $cppPath = shift;
my $F;
open F, ">$cppPath";
printLicenseHeader($F);
print F <<END
#include "config.h"
END
;
print F "\n#if $parameters{guardFactoryWith}\n\n" if $parameters{guardFactoryWith};
print F <<END
#include "$parameters{namespace}ElementFactory.h"
#include "$parameters{namespace}Names.h"
END
;
printElementIncludes($F);
print F "\n#include <wtf/HashMap.h>\n";
printConditionalElementIncludes($F);
print F <<END
#if ENABLE(DASHBOARD_SUPPORT) || ENABLE(VIDEO)
#include "Document.h"
#include "Settings.h"
#endif
namespace WebCore {
using namespace $parameters{namespace}Names;
END
;
print F "typedef PassRefPtr<$parameters{namespace}Element> (*ConstructorFunction)(const QualifiedName&, Document*";
print F ", HTMLFormElement*" if $parameters{namespace} eq "HTML";
print F ", bool createdByParser);\n";
print F <<END
typedef HashMap<AtomicStringImpl*, ConstructorFunction> FunctionMap;
static FunctionMap* gFunctionMap = 0;
END
;
my %tagConstructorMap = buildConstructorMap();
printConstructors($F, \%tagConstructorMap);
print F <<END
static void addTag(const QualifiedName& tag, ConstructorFunction func)
{
gFunctionMap->set(tag.localName().impl(), func);
}
static void createFunctionMap()
{
ASSERT(!gFunctionMap);
// Create the table.
gFunctionMap = new FunctionMap;
// Populate it with constructor functions.
END
;
printFunctionInits($F, \%tagConstructorMap);
print F "}\n";
print F "\nPassRefPtr<$parameters{namespace}Element> $parameters{namespace}ElementFactory::create$parameters{namespace}Element(const QualifiedName& qName, Document* document";
print F ", HTMLFormElement* formElement" if $parameters{namespace} eq "HTML";
print F ", bool createdByParser)\n{\n";
print F <<END
if (!document)
return 0;
END
;
if ($parameters{namespace} ne "HTML") {
print F <<END
#if ENABLE(DASHBOARD_SUPPORT)
Settings* settings = document->settings();
if (settings && settings->usesDashboardBackwardCompatibilityMode())
return 0;
#endif
END
;
}
print F <<END
if (!gFunctionMap)
createFunctionMap();
if (ConstructorFunction function = gFunctionMap->get(qName.localName().impl()))
END
;
if ($parameters{namespace} eq "HTML") {
print F " return function(qName, document, formElement, createdByParser);\n";
} else {
print F " return function(qName, document, createdByParser);\n";
}
print F " return $parameters{namespace}Element::create(qName, document);\n";
print F <<END
}
} // namespace WebCore
END
;
print F "#endif\n" if $parameters{guardFactoryWith};
close F;
}
sub printFactoryHeaderFile
{
my $headerPath = shift;
my $F;
open F, ">$headerPath";
printLicenseHeader($F);
print F<<END
#ifndef $parameters{namespace}ElementFactory_h
#define $parameters{namespace}ElementFactory_h
#include <wtf/Forward.h>
#include <wtf/PassRefPtr.h>
namespace WebCore {
class Element;
class Document;
class QualifiedName;
}
namespace WebCore {
class $parameters{namespace}Element;
END
;
print F " class HTMLFormElement;\n" if $parameters{namespace} eq "HTML";
print F<<END
// The idea behind this class is that there will eventually be a mapping from namespace URIs to ElementFactories that can dispense
// elements. In a compound document world, the generic createElement function (will end up being virtual) will be called.
class $parameters{namespace}ElementFactory {
public:
PassRefPtr<Element> createElement(const WebCore::QualifiedName&, WebCore::Document*, bool createdByParser = true);
END
;
print F " static PassRefPtr<$parameters{namespace}Element> create$parameters{namespace}Element(const WebCore::QualifiedName&, WebCore::Document*";
print F ", HTMLFormElement* = 0" if $parameters{namespace} eq "HTML";
print F ", bool createdByParser = true);\n";
printf F<<END
};
}
#endif // $parameters{namespace}ElementFactory_h
END
;
close F;
}
## Wrapper Factory routines
sub usesDefaultJSWrapper
{
my $name = shift;
# A tag reuses the default wrapper if its JSInterfaceName matches the default namespace Element.
return $enabledTags{$name}{JSInterfaceName} eq $parameters{namespace} . "Element" || $enabledTags{$name}{JSInterfaceName} eq "HTMLNoScriptElement";
}
sub printWrapperFunctions
{
my $F = shift;
my $wrapperFactoryType = shift;
my %tagsSeen;
for my $tagName (sort keys %enabledTags) {
# Avoid defining the same wrapper method twice.
my $JSInterfaceName = $enabledTags{$tagName}{JSInterfaceName};
next if defined($tagsSeen{$JSInterfaceName}) || usesDefaultJSWrapper($tagName);
$tagsSeen{$JSInterfaceName} = 1;
my $conditional = $enabledTags{$tagName}{conditional};
if ($conditional) {
my $conditionalString = "ENABLE(" . join(") && ENABLE(", split(/&/, $conditional)) . ")";
print F "#if ${conditionalString}\n\n";
}
if ($wrapperFactoryType eq "JS") {
# Hack for the media tags
# FIXME: This should have been done via a CustomWrapper attribute and a separate *Custom file.
if ($enabledTags{$tagName}{wrapperOnlyIfMediaIsAvailable}) {
print F <<END
static JSDOMWrapper* create${JSInterfaceName}Wrapper(ExecState* exec, JSDOMGlobalObject* globalObject, PassRefPtr<$parameters{namespace}Element> element)
{
Settings* settings = element->document()->settings();
if (!MediaPlayer::isAvailable() || (settings && !settings->isMediaEnabled()))
return CREATE_DOM_WRAPPER(exec, globalObject, $parameters{namespace}Element, element.get());
return CREATE_DOM_WRAPPER(exec, globalObject, ${JSInterfaceName}, element.get());
}
END
;
} else {
print F <<END
static JSDOMWrapper* create${JSInterfaceName}Wrapper(ExecState* exec, JSDOMGlobalObject* globalObject, PassRefPtr<$parameters{namespace}Element> element)
{
return CREATE_DOM_WRAPPER(exec, globalObject, ${JSInterfaceName}, element.get());
}
END
;
}
} elsif ($wrapperFactoryType eq "V8") {
if ($enabledTags{$tagName}{wrapperOnlyIfMediaIsAvailable}) {
print F <<END
static v8::Handle<v8::Value> create${JSInterfaceName}Wrapper($parameters{namespace}Element* element)
{
Settings* settings = element->document()->settings();
if (!MediaPlayer::isAvailable() || (settings && !settings->isMediaEnabled()))
return V8$parameters{namespace}Element::wrap(element);
return toV8(static_cast<${JSInterfaceName}*>(element));
}
END
;
} else {
print F <<END
static v8::Handle<v8::Value> create${JSInterfaceName}Wrapper($parameters{namespace}Element* element)
{
return toV8(static_cast<${JSInterfaceName}*>(element));
}
END
;
}
}
if ($conditional) {
print F "#endif\n\n";
}
}
}
sub printWrapperFactoryCppFile
{
my $outputDir = shift;
my $wrapperFactoryType = shift;
my $wrapperFactoryFileName = shift;
my $F;
open F, ">" . $outputDir . "/" . $wrapperFactoryType . $wrapperFactoryFileName . ".cpp";
printLicenseHeader($F);
print F "#include \"config.h\"\n";
print F "#include \"$wrapperFactoryType$parameters{namespace}ElementWrapperFactory.h\"\n";
print F "\n#if $parameters{guardFactoryWith}\n\n" if $parameters{guardFactoryWith};
printJSElementIncludes($F, $wrapperFactoryType);
print F "\n#include \"$parameters{namespace}Names.h\"\n\n";
printElementIncludes($F);
print F "\n#include <wtf/StdLibExtras.h>\n";
printConditionalElementIncludes($F, $wrapperFactoryType);
print F <<END
#if ENABLE(VIDEO)
#include "Document.h"
#include "Settings.h"
#endif
END
;
if ($wrapperFactoryType eq "JS") {
print F <<END
using namespace JSC;
END
;
} elsif ($wrapperFactoryType eq "V8") {
print F <<END
#include "V8$parameters{namespace}Element.h"
#include <v8.h>
END
;
}
print F <<END
namespace WebCore {
using namespace $parameters{namespace}Names;
END
;
if ($wrapperFactoryType eq "JS") {
print F <<END
typedef JSDOMWrapper* (*Create$parameters{namespace}ElementWrapperFunction)(ExecState*, JSDOMGlobalObject*, PassRefPtr<$parameters{namespace}Element>);
END
;
} elsif ($wrapperFactoryType eq "V8") {
print F <<END
typedef v8::Handle<v8::Value> (*Create$parameters{namespace}ElementWrapperFunction)($parameters{namespace}Element*);
END
;
}
printWrapperFunctions($F, $wrapperFactoryType);
if ($wrapperFactoryType eq "JS") {
print F <<END
JSDOMWrapper* createJS$parameters{namespace}Wrapper(ExecState* exec, JSDOMGlobalObject* globalObject, PassRefPtr<$parameters{namespace}Element> element)
{
typedef HashMap<WTF::AtomicStringImpl*, Create$parameters{namespace}ElementWrapperFunction> FunctionMap;
DEFINE_STATIC_LOCAL(FunctionMap, map, ());
if (map.isEmpty()) {
END
;
} elsif ($wrapperFactoryType eq "V8") {
print F <<END
v8::Handle<v8::Value> createV8$parameters{namespace}Wrapper($parameters{namespace}Element* element, bool forceNewObject)
{
typedef HashMap<WTF::AtomicStringImpl*, Create$parameters{namespace}ElementWrapperFunction> FunctionMap;
DEFINE_STATIC_LOCAL(FunctionMap, map, ());
if (map.isEmpty()) {
END
;
}
for my $tag (sort keys %enabledTags) {
# Do not add the name to the map if it does not have a JS wrapper constructor or uses the default wrapper.
next if usesDefaultJSWrapper($tag, \%enabledTags);
my $conditional = $enabledTags{$tag}{conditional};
if ($conditional) {
my $conditionalString = "ENABLE(" . join(") && ENABLE(", split(/&/, $conditional)) . ")";
print F "#if ${conditionalString}\n";
}
my $ucTag = $enabledTags{$tag}{JSInterfaceName};
print F " map.set(${tag}Tag.localName().impl(), create${ucTag}Wrapper);\n";
if ($conditional) {
print F "#endif\n";
}
}
print F <<END
}
Create$parameters{namespace}ElementWrapperFunction createWrapperFunction = map.get(element->localName().impl());
if (createWrapperFunction)
END
;
if ($wrapperFactoryType eq "JS") {
print F <<END
return createWrapperFunction(exec, globalObject, element);
return CREATE_DOM_WRAPPER(exec, globalObject, $parameters{namespace}Element, element.get());
END
;
} elsif ($wrapperFactoryType eq "V8") {
print F <<END
return createWrapperFunction(element);
return V8$parameters{namespace}Element::wrap(element, forceNewObject);
END
;
}
print F <<END
}
}
END
;
print F "#endif\n" if $parameters{guardFactoryWith};
close F;
}
sub printWrapperFactoryHeaderFile
{
my $outputDir = shift;
my $wrapperFactoryType = shift;
my $wrapperFactoryFileName = shift;
my $F;
open F, ">" . $outputDir . "/" . $wrapperFactoryType . $wrapperFactoryFileName . ".h";
printLicenseHeader($F);
print F "#ifndef $wrapperFactoryType$parameters{namespace}ElementWrapperFactory_h\n";
print F "#define $wrapperFactoryType$parameters{namespace}ElementWrapperFactory_h\n\n";
print F "#if $parameters{guardFactoryWith}\n" if $parameters{guardFactoryWith};
if ($wrapperFactoryType eq "JS") {
print F <<END
#include <wtf/Forward.h>
namespace JSC {
class ExecState;
}
namespace WebCore {
class JSDOMWrapper;
class JSDOMGlobalObject;
class $parameters{namespace}Element;
JSDOMWrapper* createJS$parameters{namespace}Wrapper(JSC::ExecState*, JSDOMGlobalObject*, PassRefPtr<$parameters{namespace}Element>);
}
END
;
} elsif ($wrapperFactoryType eq "V8") {
print F <<END
#include <v8.h>
namespace WebCore {
class $parameters{namespace}Element;
v8::Handle<v8::Value> createV8$parameters{namespace}Wrapper($parameters{namespace}Element*, bool);
}
END
;
}
print F "#endif // $parameters{guardFactoryWith}\n\n" if $parameters{guardFactoryWith};
print F "#endif // $wrapperFactoryType$parameters{namespace}ElementWrapperFactory_h\n";
close F;
}
| xlsdg/phantomjs-linux-armv6l | src/qt/src/3rdparty/webkit/Source/WebCore/dom/make_names.pl | Perl | bsd-3-clause | 35,259 |
#============================================================= -*-Perl-*-
#
# Template::Constants.pm
#
# DESCRIPTION
# Definition of constants for the Template Toolkit.
#
# AUTHOR
# Andy Wardley <abw@wardley.org>
#
# COPYRIGHT
# Copyright (C) 1996-2007 Andy Wardley. All Rights Reserved.
#
# This module is free software; you can redistribute it and/or
# modify it under the same terms as Perl itself.
#
#============================================================================
package Template::Constants;
require Exporter;
use strict;
use warnings;
use Exporter;
# Perl::MinimumVersion seems to think this is a Perl 5.008ism...
# use base qw( Exporter );
use vars qw( @EXPORT_OK %EXPORT_TAGS );
use vars qw( $DEBUG_OPTIONS @STATUS @ERROR @CHOMP @DEBUG @ISA );
# ... so we'll do it the Old Skool way just to keep it quiet
@ISA = qw( Exporter );
our $VERSION = 2.75;
#========================================================================
# ----- EXPORTER -----
#========================================================================
# STATUS constants returned by directives
use constant STATUS_OK => 0; # ok
use constant STATUS_RETURN => 1; # ok, block ended by RETURN
use constant STATUS_STOP => 2; # ok, stoppped by STOP
use constant STATUS_DONE => 3; # ok, iterator done
use constant STATUS_DECLINED => 4; # ok, declined to service request
use constant STATUS_ERROR => 255; # error condition
# ERROR constants for indicating exception types
use constant ERROR_RETURN => 'return'; # return a status code
use constant ERROR_FILE => 'file'; # file error: I/O, parse, recursion
use constant ERROR_VIEW => 'view'; # view error
use constant ERROR_UNDEF => 'undef'; # undefined variable value used
use constant ERROR_PERL => 'perl'; # error in [% PERL %] block
use constant ERROR_FILTER => 'filter'; # filter error
use constant ERROR_PLUGIN => 'plugin'; # plugin error
# CHOMP constants for PRE_CHOMP and POST_CHOMP
use constant CHOMP_NONE => 0; # do not remove whitespace
use constant CHOMP_ALL => 1; # remove whitespace up to newline
use constant CHOMP_ONE => 1; # new name for CHOMP_ALL
use constant CHOMP_COLLAPSE => 2; # collapse whitespace to a single space
use constant CHOMP_GREEDY => 3; # remove all whitespace including newlines
# DEBUG constants to enable various debugging options
use constant DEBUG_OFF => 0; # do nothing
use constant DEBUG_ON => 1; # basic debugging flag
use constant DEBUG_UNDEF => 2; # throw undef on undefined variables
use constant DEBUG_VARS => 4; # general variable debugging
use constant DEBUG_DIRS => 8; # directive debugging
use constant DEBUG_STASH => 16; # general stash debugging
use constant DEBUG_CONTEXT => 32; # context debugging
use constant DEBUG_PARSER => 64; # parser debugging
use constant DEBUG_PROVIDER => 128; # provider debugging
use constant DEBUG_PLUGINS => 256; # plugins debugging
use constant DEBUG_FILTERS => 512; # filters debugging
use constant DEBUG_SERVICE => 1024; # context debugging
use constant DEBUG_ALL => 2047; # everything
# extra debugging flags
use constant DEBUG_CALLER => 4096; # add caller file/line
use constant DEBUG_FLAGS => 4096; # bitmask to extraxt flags
$DEBUG_OPTIONS = {
&DEBUG_OFF => off => off => &DEBUG_OFF,
&DEBUG_ON => on => on => &DEBUG_ON,
&DEBUG_UNDEF => undef => undef => &DEBUG_UNDEF,
&DEBUG_VARS => vars => vars => &DEBUG_VARS,
&DEBUG_DIRS => dirs => dirs => &DEBUG_DIRS,
&DEBUG_STASH => stash => stash => &DEBUG_STASH,
&DEBUG_CONTEXT => context => context => &DEBUG_CONTEXT,
&DEBUG_PARSER => parser => parser => &DEBUG_PARSER,
&DEBUG_PROVIDER => provider => provider => &DEBUG_PROVIDER,
&DEBUG_PLUGINS => plugins => plugins => &DEBUG_PLUGINS,
&DEBUG_FILTERS => filters => filters => &DEBUG_FILTERS,
&DEBUG_SERVICE => service => service => &DEBUG_SERVICE,
&DEBUG_ALL => all => all => &DEBUG_ALL,
&DEBUG_CALLER => caller => caller => &DEBUG_CALLER,
};
@STATUS = qw( STATUS_OK STATUS_RETURN STATUS_STOP STATUS_DONE
STATUS_DECLINED STATUS_ERROR );
@ERROR = qw( ERROR_FILE ERROR_VIEW ERROR_UNDEF ERROR_PERL
ERROR_RETURN ERROR_FILTER ERROR_PLUGIN );
@CHOMP = qw( CHOMP_NONE CHOMP_ALL CHOMP_ONE CHOMP_COLLAPSE CHOMP_GREEDY );
@DEBUG = qw( DEBUG_OFF DEBUG_ON DEBUG_UNDEF DEBUG_VARS
DEBUG_DIRS DEBUG_STASH DEBUG_CONTEXT DEBUG_PARSER
DEBUG_PROVIDER DEBUG_PLUGINS DEBUG_FILTERS DEBUG_SERVICE
DEBUG_ALL DEBUG_CALLER DEBUG_FLAGS );
@EXPORT_OK = ( @STATUS, @ERROR, @CHOMP, @DEBUG );
%EXPORT_TAGS = (
'all' => [ @EXPORT_OK ],
'status' => [ @STATUS ],
'error' => [ @ERROR ],
'chomp' => [ @CHOMP ],
'debug' => [ @DEBUG ],
);
sub debug_flags {
my ($self, $debug) = @_;
my (@flags, $flag, $value);
$debug = $self unless defined($debug) || ref($self);
if ($debug =~ /^\d+$/) {
foreach $flag (@DEBUG) {
next if $flag =~ /^DEBUG_(OFF|ALL|FLAGS)$/;
# don't trash the original
my $copy = $flag;
$flag =~ s/^DEBUG_//;
$flag = lc $flag;
return $self->error("no value for flag: $flag")
unless defined($value = $DEBUG_OPTIONS->{ $flag });
$flag = $value;
if ($debug & $flag) {
$value = $DEBUG_OPTIONS->{ $flag };
return $self->error("no value for flag: $flag") unless defined $value;
push(@flags, $value);
}
}
return wantarray ? @flags : join(', ', @flags);
}
else {
@flags = split(/\W+/, $debug);
$debug = 0;
foreach $flag (@flags) {
$value = $DEBUG_OPTIONS->{ $flag };
return $self->error("unknown debug flag: $flag") unless defined $value;
$debug |= $value;
}
return $debug;
}
}
1;
__END__
=head1 NAME
Template::Constants - Defines constants for the Template Toolkit
=head1 SYNOPSIS
use Template::Constants qw( :status :error :all );
=head1 DESCRIPTION
The C<Template::Constants> modules defines, and optionally exports into the
caller's namespace, a number of constants used by the L<Template> package.
Constants may be used by specifying the C<Template::Constants> package
explicitly:
use Template::Constants;
print Template::Constants::STATUS_DECLINED;
Constants may be imported into the caller's namespace by naming them as
options to the C<use Template::Constants> statement:
use Template::Constants qw( STATUS_DECLINED );
print STATUS_DECLINED;
Alternatively, one of the following tagset identifiers may be specified
to import sets of constants: 'C<:status>', 'C<:error>', 'C<:all>'.
use Template::Constants qw( :status );
print STATUS_DECLINED;
Consult the documentation for the C<Exporter> module for more information
on exporting variables.
=head1 EXPORTABLE TAG SETS
The following tag sets and associated constants are defined:
:status
STATUS_OK # no problem, continue
STATUS_RETURN # ended current block then continue (ok)
STATUS_STOP # controlled stop (ok)
STATUS_DONE # iterator is all done (ok)
STATUS_DECLINED # provider declined to service request (ok)
STATUS_ERROR # general error condition (not ok)
:error
ERROR_RETURN # return a status code (e.g. 'stop')
ERROR_FILE # file error: I/O, parse, recursion
ERROR_UNDEF # undefined variable value used
ERROR_PERL # error in [% PERL %] block
ERROR_FILTER # filter error
ERROR_PLUGIN # plugin error
:chomp # for PRE_CHOMP and POST_CHOMP
CHOMP_NONE # do not remove whitespace
CHOMP_ONE # remove whitespace to newline
CHOMP_ALL # old name for CHOMP_ONE (deprecated)
CHOMP_COLLAPSE # collapse whitespace to a single space
CHOMP_GREEDY # remove all whitespace including newlines
:debug
DEBUG_OFF # do nothing
DEBUG_ON # basic debugging flag
DEBUG_UNDEF # throw undef on undefined variables
DEBUG_VARS # general variable debugging
DEBUG_DIRS # directive debugging
DEBUG_STASH # general stash debugging
DEBUG_CONTEXT # context debugging
DEBUG_PARSER # parser debugging
DEBUG_PROVIDER # provider debugging
DEBUG_PLUGINS # plugins debugging
DEBUG_FILTERS # filters debugging
DEBUG_SERVICE # context debugging
DEBUG_ALL # everything
DEBUG_CALLER # add caller file/line info
DEBUG_FLAGS # bitmap used internally
:all
All the above constants.
=head1 AUTHOR
Andy Wardley E<lt>abw@wardley.orgE<gt> L<http://wardley.org/>
=head1 COPYRIGHT
Copyright (C) 1996-2007 Andy Wardley. All Rights Reserved.
This module is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
=head1 SEE ALSO
L<Template>, C<Exporter>
=cut
# Local Variables:
# mode: perl
# perl-indent-level: 4
# indent-tabs-mode: nil
# End:
#
# vim: expandtab shiftwidth=4:
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/Template/Constants.pm | Perl | mit | 9,748 |
package Moose::Error::Croak;
use strict;
use warnings;
our $VERSION = '0.93';
$VERSION = eval $VERSION;
our $AUTHORITY = 'cpan:STEVAN';
use base qw(Moose::Error::Default);
sub new {
my ( $self, @args ) = @_;
$self->create_error_croak(@args);
}
__PACKAGE__
__END__
=pod
=head1 NAME
Moose::Error::Croak - Prefer C<croak>
=head1 SYNOPSIS
# Metaclass definition must come before Moose is used.
use metaclass (
metaclass => 'Moose::Meta::Class',
error_class => 'Moose::Error::Croak',
);
use Moose;
# ...
=head1 DESCRIPTION
This error class uses L<Carp/croak> to raise errors generated in your
metaclass.
=head1 METHODS
=over 4
=item new
Overrides L<Moose::Error::Default/new> to prefer C<croak>.
=back
=cut
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/Moose/Error/Croak.pm | Perl | mit | 769 |
package Pdbc::Connection;
use strict;
use warnings;
our @ISA = qw(Exporter);
our @EXPORT = qw();
use DBI;
use Data::Dumper;
sub new {
my $pkg = shift;
my $self = {
driver => 'Pg',
database => 'postgres',
host => 'localhost',
port => 5432,
user => 'postgres',
password => '',
@_
};
return bless $self, $pkg;
}
sub open {
my $self = shift;
my $dbh = DBI->connect("dbi:$self->{driver}:dbname=$self->{database};host=$self->{host};port=$self->{port}",
$self->{user},
$self->{password},
{ AutoCommit => 0 }
) or die"データベースに接続できませんでした.";
$self->{handle} = $dbh;
}
sub close {
my $self = shift;
$self->{handle}->disconnect();
delete $self->{handle};
}
1; | duck8823/pdbc-gen | lib/Pdbc/Connection.pm | Perl | mit | 722 |
package Amon2::Plugin::Web::FillInFormLite;
use strict;
use warnings;
use Amon2::Util;
use HTML::FillInForm::Lite;
sub init {
my ($class, $c, $conf) = @_;
Amon2::Util::add_method(ref $c || $c, 'fillin_form', \&_fillin_form2);
Amon2::Util::add_method(ref $c->create_response(), 'fillin_form', \&_fillin_form);
}
sub _fillin_form2 {
my ($self, @stuff) = @_;
$self->add_trigger(
'HTML_FILTER' => sub {
my ($c, $html) = @_;
return HTML::FillInForm::Lite->fill(\$html, @stuff);
},
);
}
sub _fillin_form {
my ($self, @stuff) = @_;
Carp::cluck("\$res->fillin_form() was deprecated. Use \$c->fillin_form(\$stuff) instead.");
my $html = $self->body();
my $output = HTML::FillInForm::Lite->fill(\$html, @stuff);
$self->body($output);
$self->header('Content-Length' => length($output)) if $self->header('Content-Length');
return $self;
}
1;
__END__
=encoding utf-8
=head1 NAME
Amon2::Plugin::Web::FillInFormLite - HTML::FillInForm::Lite
=head1 SYNOPSIS
use Amon2::Lite;
__PACKAGE__->load_plugins(qw/Web::FillInFormLite/);
post '/edit' => sub {
my $c = shift;
unless (is_valid()) {
$c->fillin_form($c->req);
return $c->render('edit.html');
}
$c->dbh->update($c->req());
return $c->redirect('/finished');
};
=head1 DESCRIPTION
HTML::FillInForm::Lite version of L<Amon2::Plugin::Web::FillInForm>
=head1 SEE ALSO
L<HTML::FillInForm::Lite>, L<Amon2>
=cut
| rosiro/wasarabi | local/lib/perl5/Amon2/Plugin/Web/FillInFormLite.pm | Perl | mit | 1,531 |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/BU3Xn7v6Kb/europe. Olson data version 2015g
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Europe::Tirane;
$DateTime::TimeZone::Europe::Tirane::VERSION = '1.94';
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Europe::Tirane::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
60368452840, # utc_end 1913-12-31 22:40:40 (Wed)
DateTime::TimeZone::NEG_INFINITY, # local_start
60368457600, # local_end 1914-01-01 00:00:00 (Thu)
4760,
0,
'LMT',
],
[
60368452840, # utc_start 1913-12-31 22:40:40 (Wed)
61203337200, # utc_end 1940-06-15 23:00:00 (Sat)
60368456440, # local_start 1913-12-31 23:40:40 (Wed)
61203340800, # local_end 1940-06-16 00:00:00 (Sun)
3600,
0,
'CET',
],
[
61203337200, # utc_start 1940-06-15 23:00:00 (Sat)
61278426000, # utc_end 1942-11-02 01:00:00 (Mon)
61203344400, # local_start 1940-06-16 01:00:00 (Sun)
61278433200, # local_end 1942-11-02 03:00:00 (Mon)
7200,
1,
'CEST',
],
[
61278426000, # utc_start 1942-11-02 01:00:00 (Mon)
61291126800, # utc_end 1943-03-29 01:00:00 (Mon)
61278429600, # local_start 1942-11-02 02:00:00 (Mon)
61291130400, # local_end 1943-03-29 02:00:00 (Mon)
3600,
0,
'CET',
],
[
61291126800, # utc_start 1943-03-29 01:00:00 (Mon)
61292163600, # utc_end 1943-04-10 01:00:00 (Sat)
61291134000, # local_start 1943-03-29 03:00:00 (Mon)
61292170800, # local_end 1943-04-10 03:00:00 (Sat)
7200,
1,
'CEST',
],
[
61292163600, # utc_start 1943-04-10 01:00:00 (Sat)
62272537200, # utc_end 1974-05-03 23:00:00 (Fri)
61292167200, # local_start 1943-04-10 02:00:00 (Sat)
62272540800, # local_end 1974-05-04 00:00:00 (Sat)
3600,
0,
'CET',
],
[
62272537200, # utc_start 1974-05-03 23:00:00 (Fri)
62285580000, # utc_end 1974-10-01 22:00:00 (Tue)
62272544400, # local_start 1974-05-04 01:00:00 (Sat)
62285587200, # local_end 1974-10-02 00:00:00 (Wed)
7200,
1,
'CEST',
],
[
62285580000, # utc_start 1974-10-01 22:00:00 (Tue)
62303814000, # utc_end 1975-04-30 23:00:00 (Wed)
62285583600, # local_start 1974-10-01 23:00:00 (Tue)
62303817600, # local_end 1975-05-01 00:00:00 (Thu)
3600,
0,
'CET',
],
[
62303814000, # utc_start 1975-04-30 23:00:00 (Wed)
62317116000, # utc_end 1975-10-01 22:00:00 (Wed)
62303821200, # local_start 1975-05-01 01:00:00 (Thu)
62317123200, # local_end 1975-10-02 00:00:00 (Thu)
7200,
1,
'CEST',
],
[
62317116000, # utc_start 1975-10-01 22:00:00 (Wed)
62335522800, # utc_end 1976-05-01 23:00:00 (Sat)
62317119600, # local_start 1975-10-01 23:00:00 (Wed)
62335526400, # local_end 1976-05-02 00:00:00 (Sun)
3600,
0,
'CET',
],
[
62335522800, # utc_start 1976-05-01 23:00:00 (Sat)
62348824800, # utc_end 1976-10-02 22:00:00 (Sat)
62335530000, # local_start 1976-05-02 01:00:00 (Sun)
62348832000, # local_end 1976-10-03 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62348824800, # utc_start 1976-10-02 22:00:00 (Sat)
62367577200, # utc_end 1977-05-07 23:00:00 (Sat)
62348828400, # local_start 1976-10-02 23:00:00 (Sat)
62367580800, # local_end 1977-05-08 00:00:00 (Sun)
3600,
0,
'CET',
],
[
62367577200, # utc_start 1977-05-07 23:00:00 (Sat)
62380274400, # utc_end 1977-10-01 22:00:00 (Sat)
62367584400, # local_start 1977-05-08 01:00:00 (Sun)
62380281600, # local_end 1977-10-02 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62380274400, # utc_start 1977-10-01 22:00:00 (Sat)
62398940400, # utc_end 1978-05-05 23:00:00 (Fri)
62380278000, # local_start 1977-10-01 23:00:00 (Sat)
62398944000, # local_end 1978-05-06 00:00:00 (Sat)
3600,
0,
'CET',
],
[
62398940400, # utc_start 1978-05-05 23:00:00 (Fri)
62411724000, # utc_end 1978-09-30 22:00:00 (Sat)
62398947600, # local_start 1978-05-06 01:00:00 (Sat)
62411731200, # local_end 1978-10-01 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62411724000, # utc_start 1978-09-30 22:00:00 (Sat)
62430390000, # utc_end 1979-05-04 23:00:00 (Fri)
62411727600, # local_start 1978-09-30 23:00:00 (Sat)
62430393600, # local_end 1979-05-05 00:00:00 (Sat)
3600,
0,
'CET',
],
[
62430390000, # utc_start 1979-05-04 23:00:00 (Fri)
62443173600, # utc_end 1979-09-29 22:00:00 (Sat)
62430397200, # local_start 1979-05-05 01:00:00 (Sat)
62443180800, # local_end 1979-09-30 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62443173600, # utc_start 1979-09-29 22:00:00 (Sat)
62461839600, # utc_end 1980-05-02 23:00:00 (Fri)
62443177200, # local_start 1979-09-29 23:00:00 (Sat)
62461843200, # local_end 1980-05-03 00:00:00 (Sat)
3600,
0,
'CET',
],
[
62461839600, # utc_start 1980-05-02 23:00:00 (Fri)
62475141600, # utc_end 1980-10-03 22:00:00 (Fri)
62461846800, # local_start 1980-05-03 01:00:00 (Sat)
62475148800, # local_end 1980-10-04 00:00:00 (Sat)
7200,
1,
'CEST',
],
[
62475141600, # utc_start 1980-10-03 22:00:00 (Fri)
62492770800, # utc_end 1981-04-25 23:00:00 (Sat)
62475145200, # local_start 1980-10-03 23:00:00 (Fri)
62492774400, # local_end 1981-04-26 00:00:00 (Sun)
3600,
0,
'CET',
],
[
62492770800, # utc_start 1981-04-25 23:00:00 (Sat)
62506072800, # utc_end 1981-09-26 22:00:00 (Sat)
62492778000, # local_start 1981-04-26 01:00:00 (Sun)
62506080000, # local_end 1981-09-27 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62506072800, # utc_start 1981-09-26 22:00:00 (Sat)
62524825200, # utc_end 1982-05-01 23:00:00 (Sat)
62506076400, # local_start 1981-09-26 23:00:00 (Sat)
62524828800, # local_end 1982-05-02 00:00:00 (Sun)
3600,
0,
'CET',
],
[
62524825200, # utc_start 1982-05-01 23:00:00 (Sat)
62538127200, # utc_end 1982-10-02 22:00:00 (Sat)
62524832400, # local_start 1982-05-02 01:00:00 (Sun)
62538134400, # local_end 1982-10-03 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62538127200, # utc_start 1982-10-02 22:00:00 (Sat)
62555151600, # utc_end 1983-04-17 23:00:00 (Sun)
62538130800, # local_start 1982-10-02 23:00:00 (Sat)
62555155200, # local_end 1983-04-18 00:00:00 (Mon)
3600,
0,
'CET',
],
[
62555151600, # utc_start 1983-04-17 23:00:00 (Sun)
62569490400, # utc_end 1983-09-30 22:00:00 (Fri)
62555158800, # local_start 1983-04-18 01:00:00 (Mon)
62569497600, # local_end 1983-10-01 00:00:00 (Sat)
7200,
1,
'CEST',
],
[
62569490400, # utc_start 1983-09-30 22:00:00 (Fri)
62585305200, # utc_end 1984-03-31 23:00:00 (Sat)
62569494000, # local_start 1983-09-30 23:00:00 (Fri)
62585308800, # local_end 1984-04-01 00:00:00 (Sun)
3600,
0,
'CET',
],
[
62585305200, # utc_start 1984-03-31 23:00:00 (Sat)
62593164000, # utc_end 1984-06-30 22:00:00 (Sat)
62585312400, # local_start 1984-04-01 01:00:00 (Sun)
62593171200, # local_end 1984-07-01 00:00:00 (Sun)
7200,
1,
'CEST',
],
[
62593164000, # utc_start 1984-06-30 22:00:00 (Sat)
62601037200, # utc_end 1984-09-30 01:00:00 (Sun)
62593171200, # local_start 1984-07-01 00:00:00 (Sun)
62601044400, # local_end 1984-09-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62601037200, # utc_start 1984-09-30 01:00:00 (Sun)
62616762000, # utc_end 1985-03-31 01:00:00 (Sun)
62601040800, # local_start 1984-09-30 02:00:00 (Sun)
62616765600, # local_end 1985-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62616762000, # utc_start 1985-03-31 01:00:00 (Sun)
62632486800, # utc_end 1985-09-29 01:00:00 (Sun)
62616769200, # local_start 1985-03-31 03:00:00 (Sun)
62632494000, # local_end 1985-09-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62632486800, # utc_start 1985-09-29 01:00:00 (Sun)
62648211600, # utc_end 1986-03-30 01:00:00 (Sun)
62632490400, # local_start 1985-09-29 02:00:00 (Sun)
62648215200, # local_end 1986-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62648211600, # utc_start 1986-03-30 01:00:00 (Sun)
62663936400, # utc_end 1986-09-28 01:00:00 (Sun)
62648218800, # local_start 1986-03-30 03:00:00 (Sun)
62663943600, # local_end 1986-09-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62663936400, # utc_start 1986-09-28 01:00:00 (Sun)
62679661200, # utc_end 1987-03-29 01:00:00 (Sun)
62663940000, # local_start 1986-09-28 02:00:00 (Sun)
62679664800, # local_end 1987-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62679661200, # utc_start 1987-03-29 01:00:00 (Sun)
62695386000, # utc_end 1987-09-27 01:00:00 (Sun)
62679668400, # local_start 1987-03-29 03:00:00 (Sun)
62695393200, # local_end 1987-09-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62695386000, # utc_start 1987-09-27 01:00:00 (Sun)
62711110800, # utc_end 1988-03-27 01:00:00 (Sun)
62695389600, # local_start 1987-09-27 02:00:00 (Sun)
62711114400, # local_end 1988-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62711110800, # utc_start 1988-03-27 01:00:00 (Sun)
62726835600, # utc_end 1988-09-25 01:00:00 (Sun)
62711118000, # local_start 1988-03-27 03:00:00 (Sun)
62726842800, # local_end 1988-09-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62726835600, # utc_start 1988-09-25 01:00:00 (Sun)
62742560400, # utc_end 1989-03-26 01:00:00 (Sun)
62726839200, # local_start 1988-09-25 02:00:00 (Sun)
62742564000, # local_end 1989-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62742560400, # utc_start 1989-03-26 01:00:00 (Sun)
62758285200, # utc_end 1989-09-24 01:00:00 (Sun)
62742567600, # local_start 1989-03-26 03:00:00 (Sun)
62758292400, # local_end 1989-09-24 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62758285200, # utc_start 1989-09-24 01:00:00 (Sun)
62774010000, # utc_end 1990-03-25 01:00:00 (Sun)
62758288800, # local_start 1989-09-24 02:00:00 (Sun)
62774013600, # local_end 1990-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62774010000, # utc_start 1990-03-25 01:00:00 (Sun)
62790339600, # utc_end 1990-09-30 01:00:00 (Sun)
62774017200, # local_start 1990-03-25 03:00:00 (Sun)
62790346800, # local_end 1990-09-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62790339600, # utc_start 1990-09-30 01:00:00 (Sun)
62806064400, # utc_end 1991-03-31 01:00:00 (Sun)
62790343200, # local_start 1990-09-30 02:00:00 (Sun)
62806068000, # local_end 1991-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62806064400, # utc_start 1991-03-31 01:00:00 (Sun)
62821789200, # utc_end 1991-09-29 01:00:00 (Sun)
62806071600, # local_start 1991-03-31 03:00:00 (Sun)
62821796400, # local_end 1991-09-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62821789200, # utc_start 1991-09-29 01:00:00 (Sun)
62837514000, # utc_end 1992-03-29 01:00:00 (Sun)
62821792800, # local_start 1991-09-29 02:00:00 (Sun)
62837517600, # local_end 1992-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62837514000, # utc_start 1992-03-29 01:00:00 (Sun)
62853238800, # utc_end 1992-09-27 01:00:00 (Sun)
62837521200, # local_start 1992-03-29 03:00:00 (Sun)
62853246000, # local_end 1992-09-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62853238800, # utc_start 1992-09-27 01:00:00 (Sun)
62868963600, # utc_end 1993-03-28 01:00:00 (Sun)
62853242400, # local_start 1992-09-27 02:00:00 (Sun)
62868967200, # local_end 1993-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62868963600, # utc_start 1993-03-28 01:00:00 (Sun)
62884688400, # utc_end 1993-09-26 01:00:00 (Sun)
62868970800, # local_start 1993-03-28 03:00:00 (Sun)
62884695600, # local_end 1993-09-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62884688400, # utc_start 1993-09-26 01:00:00 (Sun)
62900413200, # utc_end 1994-03-27 01:00:00 (Sun)
62884692000, # local_start 1993-09-26 02:00:00 (Sun)
62900416800, # local_end 1994-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62900413200, # utc_start 1994-03-27 01:00:00 (Sun)
62916138000, # utc_end 1994-09-25 01:00:00 (Sun)
62900420400, # local_start 1994-03-27 03:00:00 (Sun)
62916145200, # local_end 1994-09-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62916138000, # utc_start 1994-09-25 01:00:00 (Sun)
62931862800, # utc_end 1995-03-26 01:00:00 (Sun)
62916141600, # local_start 1994-09-25 02:00:00 (Sun)
62931866400, # local_end 1995-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62931862800, # utc_start 1995-03-26 01:00:00 (Sun)
62947587600, # utc_end 1995-09-24 01:00:00 (Sun)
62931870000, # local_start 1995-03-26 03:00:00 (Sun)
62947594800, # local_end 1995-09-24 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62947587600, # utc_start 1995-09-24 01:00:00 (Sun)
62963917200, # utc_end 1996-03-31 01:00:00 (Sun)
62947591200, # local_start 1995-09-24 02:00:00 (Sun)
62963920800, # local_end 1996-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62963917200, # utc_start 1996-03-31 01:00:00 (Sun)
62982061200, # utc_end 1996-10-27 01:00:00 (Sun)
62963924400, # local_start 1996-03-31 03:00:00 (Sun)
62982068400, # local_end 1996-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
62982061200, # utc_start 1996-10-27 01:00:00 (Sun)
62995366800, # utc_end 1997-03-30 01:00:00 (Sun)
62982064800, # local_start 1996-10-27 02:00:00 (Sun)
62995370400, # local_end 1997-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
62995366800, # utc_start 1997-03-30 01:00:00 (Sun)
63013510800, # utc_end 1997-10-26 01:00:00 (Sun)
62995374000, # local_start 1997-03-30 03:00:00 (Sun)
63013518000, # local_end 1997-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63013510800, # utc_start 1997-10-26 01:00:00 (Sun)
63026816400, # utc_end 1998-03-29 01:00:00 (Sun)
63013514400, # local_start 1997-10-26 02:00:00 (Sun)
63026820000, # local_end 1998-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63026816400, # utc_start 1998-03-29 01:00:00 (Sun)
63044960400, # utc_end 1998-10-25 01:00:00 (Sun)
63026823600, # local_start 1998-03-29 03:00:00 (Sun)
63044967600, # local_end 1998-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63044960400, # utc_start 1998-10-25 01:00:00 (Sun)
63058266000, # utc_end 1999-03-28 01:00:00 (Sun)
63044964000, # local_start 1998-10-25 02:00:00 (Sun)
63058269600, # local_end 1999-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63058266000, # utc_start 1999-03-28 01:00:00 (Sun)
63077014800, # utc_end 1999-10-31 01:00:00 (Sun)
63058273200, # local_start 1999-03-28 03:00:00 (Sun)
63077022000, # local_end 1999-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63077014800, # utc_start 1999-10-31 01:00:00 (Sun)
63089715600, # utc_end 2000-03-26 01:00:00 (Sun)
63077018400, # local_start 1999-10-31 02:00:00 (Sun)
63089719200, # local_end 2000-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63089715600, # utc_start 2000-03-26 01:00:00 (Sun)
63108464400, # utc_end 2000-10-29 01:00:00 (Sun)
63089722800, # local_start 2000-03-26 03:00:00 (Sun)
63108471600, # local_end 2000-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63108464400, # utc_start 2000-10-29 01:00:00 (Sun)
63121165200, # utc_end 2001-03-25 01:00:00 (Sun)
63108468000, # local_start 2000-10-29 02:00:00 (Sun)
63121168800, # local_end 2001-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63121165200, # utc_start 2001-03-25 01:00:00 (Sun)
63139914000, # utc_end 2001-10-28 01:00:00 (Sun)
63121172400, # local_start 2001-03-25 03:00:00 (Sun)
63139921200, # local_end 2001-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63139914000, # utc_start 2001-10-28 01:00:00 (Sun)
63153219600, # utc_end 2002-03-31 01:00:00 (Sun)
63139917600, # local_start 2001-10-28 02:00:00 (Sun)
63153223200, # local_end 2002-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63153219600, # utc_start 2002-03-31 01:00:00 (Sun)
63171363600, # utc_end 2002-10-27 01:00:00 (Sun)
63153226800, # local_start 2002-03-31 03:00:00 (Sun)
63171370800, # local_end 2002-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63171363600, # utc_start 2002-10-27 01:00:00 (Sun)
63184669200, # utc_end 2003-03-30 01:00:00 (Sun)
63171367200, # local_start 2002-10-27 02:00:00 (Sun)
63184672800, # local_end 2003-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63184669200, # utc_start 2003-03-30 01:00:00 (Sun)
63202813200, # utc_end 2003-10-26 01:00:00 (Sun)
63184676400, # local_start 2003-03-30 03:00:00 (Sun)
63202820400, # local_end 2003-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63202813200, # utc_start 2003-10-26 01:00:00 (Sun)
63216118800, # utc_end 2004-03-28 01:00:00 (Sun)
63202816800, # local_start 2003-10-26 02:00:00 (Sun)
63216122400, # local_end 2004-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63216118800, # utc_start 2004-03-28 01:00:00 (Sun)
63234867600, # utc_end 2004-10-31 01:00:00 (Sun)
63216126000, # local_start 2004-03-28 03:00:00 (Sun)
63234874800, # local_end 2004-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63234867600, # utc_start 2004-10-31 01:00:00 (Sun)
63247568400, # utc_end 2005-03-27 01:00:00 (Sun)
63234871200, # local_start 2004-10-31 02:00:00 (Sun)
63247572000, # local_end 2005-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63247568400, # utc_start 2005-03-27 01:00:00 (Sun)
63266317200, # utc_end 2005-10-30 01:00:00 (Sun)
63247575600, # local_start 2005-03-27 03:00:00 (Sun)
63266324400, # local_end 2005-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63266317200, # utc_start 2005-10-30 01:00:00 (Sun)
63279018000, # utc_end 2006-03-26 01:00:00 (Sun)
63266320800, # local_start 2005-10-30 02:00:00 (Sun)
63279021600, # local_end 2006-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63279018000, # utc_start 2006-03-26 01:00:00 (Sun)
63297766800, # utc_end 2006-10-29 01:00:00 (Sun)
63279025200, # local_start 2006-03-26 03:00:00 (Sun)
63297774000, # local_end 2006-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63297766800, # utc_start 2006-10-29 01:00:00 (Sun)
63310467600, # utc_end 2007-03-25 01:00:00 (Sun)
63297770400, # local_start 2006-10-29 02:00:00 (Sun)
63310471200, # local_end 2007-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63310467600, # utc_start 2007-03-25 01:00:00 (Sun)
63329216400, # utc_end 2007-10-28 01:00:00 (Sun)
63310474800, # local_start 2007-03-25 03:00:00 (Sun)
63329223600, # local_end 2007-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63329216400, # utc_start 2007-10-28 01:00:00 (Sun)
63342522000, # utc_end 2008-03-30 01:00:00 (Sun)
63329220000, # local_start 2007-10-28 02:00:00 (Sun)
63342525600, # local_end 2008-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63342522000, # utc_start 2008-03-30 01:00:00 (Sun)
63360666000, # utc_end 2008-10-26 01:00:00 (Sun)
63342529200, # local_start 2008-03-30 03:00:00 (Sun)
63360673200, # local_end 2008-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63360666000, # utc_start 2008-10-26 01:00:00 (Sun)
63373971600, # utc_end 2009-03-29 01:00:00 (Sun)
63360669600, # local_start 2008-10-26 02:00:00 (Sun)
63373975200, # local_end 2009-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63373971600, # utc_start 2009-03-29 01:00:00 (Sun)
63392115600, # utc_end 2009-10-25 01:00:00 (Sun)
63373978800, # local_start 2009-03-29 03:00:00 (Sun)
63392122800, # local_end 2009-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63392115600, # utc_start 2009-10-25 01:00:00 (Sun)
63405421200, # utc_end 2010-03-28 01:00:00 (Sun)
63392119200, # local_start 2009-10-25 02:00:00 (Sun)
63405424800, # local_end 2010-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63405421200, # utc_start 2010-03-28 01:00:00 (Sun)
63424170000, # utc_end 2010-10-31 01:00:00 (Sun)
63405428400, # local_start 2010-03-28 03:00:00 (Sun)
63424177200, # local_end 2010-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63424170000, # utc_start 2010-10-31 01:00:00 (Sun)
63436870800, # utc_end 2011-03-27 01:00:00 (Sun)
63424173600, # local_start 2010-10-31 02:00:00 (Sun)
63436874400, # local_end 2011-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63436870800, # utc_start 2011-03-27 01:00:00 (Sun)
63455619600, # utc_end 2011-10-30 01:00:00 (Sun)
63436878000, # local_start 2011-03-27 03:00:00 (Sun)
63455626800, # local_end 2011-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63455619600, # utc_start 2011-10-30 01:00:00 (Sun)
63468320400, # utc_end 2012-03-25 01:00:00 (Sun)
63455623200, # local_start 2011-10-30 02:00:00 (Sun)
63468324000, # local_end 2012-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63468320400, # utc_start 2012-03-25 01:00:00 (Sun)
63487069200, # utc_end 2012-10-28 01:00:00 (Sun)
63468327600, # local_start 2012-03-25 03:00:00 (Sun)
63487076400, # local_end 2012-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63487069200, # utc_start 2012-10-28 01:00:00 (Sun)
63500374800, # utc_end 2013-03-31 01:00:00 (Sun)
63487072800, # local_start 2012-10-28 02:00:00 (Sun)
63500378400, # local_end 2013-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63500374800, # utc_start 2013-03-31 01:00:00 (Sun)
63518518800, # utc_end 2013-10-27 01:00:00 (Sun)
63500382000, # local_start 2013-03-31 03:00:00 (Sun)
63518526000, # local_end 2013-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63518518800, # utc_start 2013-10-27 01:00:00 (Sun)
63531824400, # utc_end 2014-03-30 01:00:00 (Sun)
63518522400, # local_start 2013-10-27 02:00:00 (Sun)
63531828000, # local_end 2014-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63531824400, # utc_start 2014-03-30 01:00:00 (Sun)
63549968400, # utc_end 2014-10-26 01:00:00 (Sun)
63531831600, # local_start 2014-03-30 03:00:00 (Sun)
63549975600, # local_end 2014-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63549968400, # utc_start 2014-10-26 01:00:00 (Sun)
63563274000, # utc_end 2015-03-29 01:00:00 (Sun)
63549972000, # local_start 2014-10-26 02:00:00 (Sun)
63563277600, # local_end 2015-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63563274000, # utc_start 2015-03-29 01:00:00 (Sun)
63581418000, # utc_end 2015-10-25 01:00:00 (Sun)
63563281200, # local_start 2015-03-29 03:00:00 (Sun)
63581425200, # local_end 2015-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63581418000, # utc_start 2015-10-25 01:00:00 (Sun)
63594723600, # utc_end 2016-03-27 01:00:00 (Sun)
63581421600, # local_start 2015-10-25 02:00:00 (Sun)
63594727200, # local_end 2016-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63594723600, # utc_start 2016-03-27 01:00:00 (Sun)
63613472400, # utc_end 2016-10-30 01:00:00 (Sun)
63594730800, # local_start 2016-03-27 03:00:00 (Sun)
63613479600, # local_end 2016-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63613472400, # utc_start 2016-10-30 01:00:00 (Sun)
63626173200, # utc_end 2017-03-26 01:00:00 (Sun)
63613476000, # local_start 2016-10-30 02:00:00 (Sun)
63626176800, # local_end 2017-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63626173200, # utc_start 2017-03-26 01:00:00 (Sun)
63644922000, # utc_end 2017-10-29 01:00:00 (Sun)
63626180400, # local_start 2017-03-26 03:00:00 (Sun)
63644929200, # local_end 2017-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63644922000, # utc_start 2017-10-29 01:00:00 (Sun)
63657622800, # utc_end 2018-03-25 01:00:00 (Sun)
63644925600, # local_start 2017-10-29 02:00:00 (Sun)
63657626400, # local_end 2018-03-25 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63657622800, # utc_start 2018-03-25 01:00:00 (Sun)
63676371600, # utc_end 2018-10-28 01:00:00 (Sun)
63657630000, # local_start 2018-03-25 03:00:00 (Sun)
63676378800, # local_end 2018-10-28 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63676371600, # utc_start 2018-10-28 01:00:00 (Sun)
63689677200, # utc_end 2019-03-31 01:00:00 (Sun)
63676375200, # local_start 2018-10-28 02:00:00 (Sun)
63689680800, # local_end 2019-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63689677200, # utc_start 2019-03-31 01:00:00 (Sun)
63707821200, # utc_end 2019-10-27 01:00:00 (Sun)
63689684400, # local_start 2019-03-31 03:00:00 (Sun)
63707828400, # local_end 2019-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63707821200, # utc_start 2019-10-27 01:00:00 (Sun)
63721126800, # utc_end 2020-03-29 01:00:00 (Sun)
63707824800, # local_start 2019-10-27 02:00:00 (Sun)
63721130400, # local_end 2020-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63721126800, # utc_start 2020-03-29 01:00:00 (Sun)
63739270800, # utc_end 2020-10-25 01:00:00 (Sun)
63721134000, # local_start 2020-03-29 03:00:00 (Sun)
63739278000, # local_end 2020-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63739270800, # utc_start 2020-10-25 01:00:00 (Sun)
63752576400, # utc_end 2021-03-28 01:00:00 (Sun)
63739274400, # local_start 2020-10-25 02:00:00 (Sun)
63752580000, # local_end 2021-03-28 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63752576400, # utc_start 2021-03-28 01:00:00 (Sun)
63771325200, # utc_end 2021-10-31 01:00:00 (Sun)
63752583600, # local_start 2021-03-28 03:00:00 (Sun)
63771332400, # local_end 2021-10-31 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63771325200, # utc_start 2021-10-31 01:00:00 (Sun)
63784026000, # utc_end 2022-03-27 01:00:00 (Sun)
63771328800, # local_start 2021-10-31 02:00:00 (Sun)
63784029600, # local_end 2022-03-27 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63784026000, # utc_start 2022-03-27 01:00:00 (Sun)
63802774800, # utc_end 2022-10-30 01:00:00 (Sun)
63784033200, # local_start 2022-03-27 03:00:00 (Sun)
63802782000, # local_end 2022-10-30 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63802774800, # utc_start 2022-10-30 01:00:00 (Sun)
63815475600, # utc_end 2023-03-26 01:00:00 (Sun)
63802778400, # local_start 2022-10-30 02:00:00 (Sun)
63815479200, # local_end 2023-03-26 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63815475600, # utc_start 2023-03-26 01:00:00 (Sun)
63834224400, # utc_end 2023-10-29 01:00:00 (Sun)
63815482800, # local_start 2023-03-26 03:00:00 (Sun)
63834231600, # local_end 2023-10-29 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63834224400, # utc_start 2023-10-29 01:00:00 (Sun)
63847530000, # utc_end 2024-03-31 01:00:00 (Sun)
63834228000, # local_start 2023-10-29 02:00:00 (Sun)
63847533600, # local_end 2024-03-31 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63847530000, # utc_start 2024-03-31 01:00:00 (Sun)
63865674000, # utc_end 2024-10-27 01:00:00 (Sun)
63847537200, # local_start 2024-03-31 03:00:00 (Sun)
63865681200, # local_end 2024-10-27 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63865674000, # utc_start 2024-10-27 01:00:00 (Sun)
63878979600, # utc_end 2025-03-30 01:00:00 (Sun)
63865677600, # local_start 2024-10-27 02:00:00 (Sun)
63878983200, # local_end 2025-03-30 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63878979600, # utc_start 2025-03-30 01:00:00 (Sun)
63897123600, # utc_end 2025-10-26 01:00:00 (Sun)
63878986800, # local_start 2025-03-30 03:00:00 (Sun)
63897130800, # local_end 2025-10-26 03:00:00 (Sun)
7200,
1,
'CEST',
],
[
63897123600, # utc_start 2025-10-26 01:00:00 (Sun)
63910429200, # utc_end 2026-03-29 01:00:00 (Sun)
63897127200, # local_start 2025-10-26 02:00:00 (Sun)
63910432800, # local_end 2026-03-29 02:00:00 (Sun)
3600,
0,
'CET',
],
[
63910429200, # utc_start 2026-03-29 01:00:00 (Sun)
63928573200, # utc_end 2026-10-25 01:00:00 (Sun)
63910436400, # local_start 2026-03-29 03:00:00 (Sun)
63928580400, # local_end 2026-10-25 03:00:00 (Sun)
7200,
1,
'CEST',
],
];
sub olson_version {'2015g'}
sub has_dst_changes {56}
sub _max_year {2025}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
sub _last_offset { 3600 }
my $last_observance = bless( {
'format' => 'CE%sT',
'gmtoff' => '1:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 724458,
'local_rd_secs' => 0,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 724458,
'utc_rd_secs' => 0,
'utc_year' => 1985
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => 3600,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 724457,
'local_rd_secs' => 79200,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 724457,
'utc_rd_secs' => 79200,
'utc_year' => 1985
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '1:00u',
'from' => '1996',
'in' => 'Oct',
'letter' => '',
'name' => 'EU',
'offset_from_std' => 0,
'on' => 'lastSun',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '1:00u',
'from' => '1981',
'in' => 'Mar',
'letter' => 'S',
'name' => 'EU',
'offset_from_std' => 3600,
'on' => 'lastSun',
'save' => '1:00',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| rosiro/wasarabi | local/lib/perl5/DateTime/TimeZone/Europe/Tirane.pm | Perl | mit | 29,848 |
#BEGIN_HEADER
#
# Copyright (C) 2020 Mahdi Safsafi.
#
# https://github.com/MahdiSafsafi/opcodesDB
#
# See licence file 'LICENCE' for use and distribution rights.
#
#END_HEADER
use strict;
use warnings;
ICLASS addsub_carry => {
title => 'Add/subtract (with carry)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b11010000 Rm:u=0bxxxxx ig1=0b000000 Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS addsub_ext => {
title => 'Add/subtract (extended register)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b01011 opt:u=0bxx ig1=0b1 Rm:u=0bxxxxx option:u=0bxxx imm3:u=0bxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS addsub_imm => {
title => 'Add/subtract (immediate)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b100010 sh:u=0bx imm12:u=0bxxxxxxxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS addsub_immtags => {
title => 'Add/subtract (immediate, with tags)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b100011 o2:u=0bx uimm6:u=0bxxxxxx op3:u=0bxx uimm4:u=0bxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS addsub_shift => {
title => 'Add/subtract (shifted register)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b01011 shift:u=0bxx ig1=0b0 Rm:u=0bxxxxx imm6:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS barriers => {
title => 'Barriers',
diagram => 'ig0=0b11010101000000110011 CRm:u=0bxxxx op2:u=0bxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS bitfield => {
title => 'Bitfield',
diagram => 'sf:u=0bx opc:u=0bxx ig0=0b100110 N:u=0bx immr:u=0bxxxxxx imms:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS branch_imm => {
title => 'Unconditional branch (immediate)',
diagram => 'op:u=0bx ig0=0b00101 imm26:u=0bxxxxxxxxxxxxxxxxxxxxxxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS branch_reg => {
title => 'Unconditional branch (register)',
diagram => 'ig0=0b1101011 opc:u=0bxxxx op2:u=0bxxxxx op3:u=0bxxxxxx Rn:u=0bxxxxx op4:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS compbranch => {
title => 'Compare and branch (immediate)',
diagram => 'sf:u=0bx ig0=0b011010 op:u=0bx imm19:u=0bxxxxxxxxxxxxxxxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS condbranch => {
title => 'Conditional branch (immediate)',
diagram => 'ig0=0b0101010 o1:u=0bx imm19:u=0bxxxxxxxxxxxxxxxxxxx o0:u=0bx cond:u=0bxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS condcmp_imm => {
title => 'Conditional compare (immediate)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b11010010 imm5:u=0bxxxxx cond:u=0bxxxx ig1=0b1 o2:u=0bx Rn:u=0bxxxxx o3:u=0bx nzcv:u=0bxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS condcmp_reg => {
title => 'Conditional compare (register)',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b11010010 Rm:u=0bxxxxx cond:u=0bxxxx ig1=0b0 o2:u=0bx Rn:u=0bxxxxx o3:u=0bx nzcv:u=0bxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS condsel => {
title => 'Conditional select',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b11010100 Rm:u=0bxxxxx cond:u=0bxxxx op2:u=0bxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS dp_1src => {
title => 'Data-processing (1 source)',
diagram => 'sf:u=0bx ig0=0b1 S:u=0bx ig1=0b11010110 opcode2:u=0bxxxxx opcode:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS dp_2src => {
title => 'Data-processing (2 source)',
diagram => 'sf:u=0bx ig0=0b0 S:u=0bx ig1=0b11010110 Rm:u=0bxxxxx opcode:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS dp_3src => {
title => 'Data-processing (3 source)',
diagram => 'sf:u=0bx op54:u=0bxx ig0=0b11011 op31:u=0bxxx Rm:u=0bxxxxx o0:u=0bx Ra:u=0bxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS exception => {
title => 'Exception generation',
diagram => 'ig0=0b11010100 opc:u=0bxxx imm16:u=0bxxxxxxxxxxxxxxxx op2:u=0bxxx LL:u=0bxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS extract => {
title => 'Extract',
diagram => 'sf:u=0bx op21:u=0bxx ig0=0b100111 N:u=0bx o0:u=0bx Rm:u=0bxxxxx imms:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS hints => {
title => 'Hints',
diagram => 'ig0=0b11010101000000110010 CRm:u=0bxxxx op2:u=0bxxx ig1=0b11111',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS ldapstl_unscaled => {
title => 'LDAPR/STLR (unscaled immediate)',
diagram => 'size:u=0bxx ig0=0b011001 opc:u=0bxx ig1=0b0 imm9:u=0bxxxxxxxxx ig2=0b00 Rn:u=0bxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=ldst',
};
ICLASS ldst_pac => {
title => 'Load/store register (pac)',
diagram => 'size:u=0bxx ig0=0b111 V:u=0bx ig1=0b00 M:u=0bx S:u=0bx ig2=0b1 imm9:u=0bxxxxxxxxx W:u=0bx ig3=0b1 Rn:u=0bxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=ldst',
};
ICLASS ldst_unpriv => {
title => 'Load/store register (unprivileged)',
diagram => 'size:u=0bxx ig0=0b111 V:u=0bx ig1=0b00 opc:u=0bxx ig2=0b0 imm9:u=0bxxxxxxxxx ig3=0b10 Rn:u=0bxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=ldst',
};
ICLASS ldstexcl => {
title => 'Load/store exclusive',
diagram => 'size:u=0bxx ig0=0b001000 o2:u=0bx L:u=0bx o1:u=0bx Rs:u=0bxxxxx o0:u=0bx Rt2:u=0bxxxxx Rn:u=0bxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=ldst',
};
ICLASS ldsttags => {
title => 'Load/store memory tags',
diagram => 'ig0=0b11011001 opc:u=0bxx ig1=0b1 imm9:u=0bxxxxxxxxx op2:u=0bxx Rn:u=0bxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=ldst',
};
ICLASS log_imm => {
title => 'Logical (immediate)',
diagram => 'sf:u=0bx opc:u=0bxx ig0=0b100100 N:u=0bx immr:u=0bxxxxxx imms:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS log_shift => {
title => 'Logical (shifted register)',
diagram => 'sf:u=0bx opc:u=0bxx ig0=0b01010 shift:u=0bxx N:u=0bx Rm:u=0bxxxxx imm6:u=0bxxxxxx Rn:u=0bxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS memop => {
title => 'Atomic memory operations',
diagram => 'size:u=0bxx ig0=0b111 V:u=0bx ig1=0b00 A:u=0bx R:u=0bx ig2=0b1 Rs:u=0bxxxxx o3:u=0bx opc:u=0bxxx ig3=0b00 Rn:u=0bxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=ldst',
};
ICLASS movewide => {
title => 'Move wide (immediate)',
diagram => 'sf:u=0bx opc:u=0bxx ig0=0b100101 hw:u=0bxx imm16:u=0bxxxxxxxxxxxxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS pcreladdr => {
title => 'PC-rel. addressing',
diagram => 'op:u=0bx immlo:u=0bxx ig0=0b10000 immhi:u=0bxxxxxxxxxxxxxxxxxxx Rd:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpimm',
};
ICLASS perm_undef => {
title => 'Reserved',
diagram => 'ig0=0b0000000000000000 imm16:u=0bxxxxxxxxxxxxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=reserved',
};
ICLASS pstate => {
title => 'PSTATE',
diagram => 'ig0=0b1101010100000 op1:u=0bxxx ig1=0b0100 CRm:u=0bxxxx op2:u=0bxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS rmif => {
title => 'Rotate right into flags',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b11010000 imm6:u=0bxxxxxx ig1=0b00001 Rn:u=0bxxxxx o2:u=0bx mask:u=0bxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS setf => {
title => 'Evaluate into flags',
diagram => 'sf:u=0bx op:u=0bx S:u=0bx ig0=0b11010000 opcode2:u=0bxxxxxx sz:u=0bx ig1=0b0010 Rn:u=0bxxxxx o3:u=0bx mask:u=0bxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=dpreg',
};
ICLASS systeminstrs => {
title => 'System instructions',
diagram => 'ig0=0b1101010100 L:u=0bx ig1=0b01 op1:u=0bxxx CRn:u=0bxxxx CRm:u=0bxxxx op2:u=0bxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS systemmove => {
title => 'System register move',
diagram => 'ig0=0b1101010100 L:u=0bx ig1=0b1 o0:u=0bx op1:u=0bxxx CRn:u=0bxxxx CRm:u=0bxxxx op2:u=0bxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS systemresult => {
title => 'System with result',
diagram => 'ig0=0b1101010100100 op1:u=0bxxx CRn:u=0bxxxx CRm:u=0bxxxx op2:u=0bxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
ICLASS testbranch => {
title => 'Test and branch (immediate)',
diagram => 'b5:u=0bx ig0=0b011011 op:u=0bx b40:u=0bxxxxx imm14:u=0bxxxxxxxxxxxxxx Rt:u=0bxxxxx',
metadata => 'isa=A64 isaform=A64 alphaindex=BASIC',
tags => 'group=control',
};
1;
| MahdiSafsafi/opcodesDB | db/aarch64/basic/iclasses.pl | Perl | mit | 10,518 |
#!/usr/bin/perl
#
# $Header: svn://svn/SWM/trunk/misc/moneylogInsert.pl 8250 2013-04-08 08:24:36Z rlee $
#
use strict;
use lib "..","../web","../web/comp", "../web/user", '../web/RegoForm', "../web/dashboard", "../web/RegoFormBuilder",'../web/PaymentSplit', "../web/Clearances";
use Defs;
use Utils;
use DBI;
use WorkFlow;
use UserObj;
use CGI qw(unescape);
use RegistrationAllowed;
use PersonRegistration;
use Data::Dumper;
main();
sub main {
my %Data = ();
my $db = connectDB();
my $personID= 10346430;
my $pRegID = 1;
$Data{'db'} = $db;
$Data{'Realm'} = 1;
$Data{'RealmSubType'} = 0;
submitPersonRegistration(\%Data, $personID, $pRegID);
}
| facascante/slimerp | fifs/misc/submitRego.pl | Perl | mit | 669 |
package #
Date::Manip::Offset::off066;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:44:43 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '+02:27:16';
%Offset = (
0 => [
'africa/nairobi',
],
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/Offset/off066.pm | Perl | mit | 853 |
package Prophet::Replica::prophet;
{
$Prophet::Replica::prophet::VERSION = '0.751';
}
use Any::Moose;
extends 'Prophet::FilesystemReplica';
use Params::Validate qw(:all);
use LWP::UserAgent;
use LWP::ConnCache;
use File::Spec ();
use File::Path;
use Cwd ();
use File::Find;
use Prophet::Util;
use POSIX qw();
use Memoize;
use Prophet::ContentAddressedStore;
use JSON;
use Digest::SHA qw(sha1_hex);
has '+db_uuid' => (
lazy => 1,
default => sub { shift->_read_file('database-uuid') },
);
has _uuid => ( is => 'rw', );
has _replica_version => (
is => 'rw',
isa => 'Int',
lazy => 1,
default => sub { shift->_read_file('replica-version') || 0 }
);
has fs_root_parent => (
is => 'rw',
lazy => 1,
default => sub {
my $self = shift;
if ( $self->url =~ m{^file://(.*)} ) {
my $path = $1;
return File::Spec->catdir(
( File::Spec->splitpath($path) )[ 0, -2 ] );
}
},
);
has fs_root => (
is => 'rw',
lazy => 1,
default => sub {
my $self = shift;
return $self->url =~ m{^file://(.*)$} ? $1 : undef;
},
);
has record_cas => (
is => 'rw',
isa => 'Prophet::ContentAddressedStore',
lazy => 1,
default => sub {
my $self = shift;
Prophet::ContentAddressedStore->new(
{
fs_root => $self->fs_root,
root => $self->record_cas_dir
}
);
},
);
has changeset_cas => (
is => 'rw',
isa => 'Prophet::ContentAddressedStore',
lazy => 1,
default => sub {
my $self = shift;
Prophet::ContentAddressedStore->new(
{
fs_root => $self->fs_root,
root => $self->changeset_cas_dir
}
);
},
);
has current_edit => (
is => 'rw',
isa => 'Maybe[Prophet::ChangeSet]',
);
has current_edit_records => (
is => 'rw',
isa => 'ArrayRef',
default => sub { [] },
);
has '+resolution_db_handle' => (
isa => 'Prophet::Replica | Undef',
lazy => 1,
default => sub {
my $self = shift;
return if $self->is_resdb;
return Prophet::Replica->get_handle(
{
url => "prophet:" . $self->url . '/resolutions',
app_handle => $self->app_handle,
is_resdb => 1,
}
);
},
);
has backend => (
lazy => 1,
is => 'rw',
default => sub {
my $self = shift;
my $be;
if ( $self->url =~ /^http/i ) {
$be = 'Prophet::Replica::FS::Backend::LWP';
} else {
$be = 'Prophet::Replica::FS::Backend::File';
}
Prophet::App->require($be);
return $be->new( url => $self->url, fs_root => $self->fs_root );
}
);
use constant scheme => 'prophet';
use constant cas_root => 'cas';
use constant record_cas_dir =>
File::Spec->catdir( __PACKAGE__->cas_root => 'records' );
use constant changeset_cas_dir =>
File::Spec->catdir( __PACKAGE__->cas_root => 'changesets' );
use constant record_dir => 'records';
use constant userdata_dir => 'userdata';
use constant changeset_index => 'changesets.idx';
use constant local_metadata_dir => 'local_metadata';
sub BUILD {
my $self = shift;
my $args = shift;
Carp::cluck() unless ( $args->{app_handle} );
for ( $self->{url} ) {
s/^prophet://; # url-based constructor in ::replica should do better
s{/$}{};
}
}
sub replica_version {
die "replica_version is read-only; you want set_replica_version."
if @_ > 1;
shift->_replica_version;
}
sub set_replica_version {
my $self = shift;
my $version = shift;
$self->_replica_version($version);
$self->_write_file(
path => 'replica-version',
content => $version,
);
return $version;
}
sub can_initialize {
my $self = shift;
if ( $self->fs_root_parent && -w $self->fs_root_parent ) {
return 1;
}
return 0;
}
use constant can_read_records => 1;
use constant can_read_changesets => 1;
sub can_write_changesets { return ( shift->fs_root ? 1 : 0 ) }
sub can_write_records { return ( shift->fs_root ? 1 : 0 ) }
sub _on_initialize_create_paths {
my $self = shift;
return (
$self->record_dir, $self->cas_root,
$self->record_cas_dir, $self->changeset_cas_dir,
$self->userdata_dir
);
}
sub initialize_backend {
my $self = shift;
my %args = validate(
@_,
{
db_uuid => 0,
resdb_uuid => 0,
}
);
$self->set_db_uuid( $args{'db_uuid'}
|| $self->uuid_generator->create_str );
$self->set_latest_sequence_no("0");
$self->set_replica_uuid( $self->uuid_generator->create_str );
$self->set_replica_version(1);
$self->resolution_db_handle->initialize( db_uuid => $args{resdb_uuid} )
if !$self->is_resdb;
}
sub latest_sequence_no {
my $self = shift;
$self->_read_file('latest-sequence-no');
}
sub set_latest_sequence_no {
my $self = shift;
my $id = shift;
$self->_write_file(
path => 'latest-sequence-no',
content => scalar($id)
);
}
sub _increment_sequence_no {
my $self = shift;
my $seq = $self->latest_sequence_no + 1;
$self->set_latest_sequence_no($seq);
return $seq;
}
sub uuid {
my $self = shift;
$self->_uuid( $self->_read_file('replica-uuid') ) unless $self->_uuid;
# die $@ if $@;
return $self->_uuid;
}
sub set_replica_uuid {
my $self = shift;
my $uuid = shift;
$self->_write_file(
path => 'replica-uuid',
content => $uuid
);
}
sub set_db_uuid {
my $self = shift;
my $uuid = shift;
$self->_write_file(
path => 'database-uuid',
content => $uuid
);
$self->SUPER::set_db_uuid($uuid);
}
# Working with records {
sub _write_record {
my $self = shift;
my %args = validate( @_, { record => { isa => 'Prophet::Record' }, } );
my $record = $args{'record'};
$self->_write_serialized_record(
type => $record->type,
uuid => $record->uuid,
props => $record->get_props,
);
}
sub _write_serialized_record {
my $self = shift;
my %args = validate( @_, { type => 1, uuid => 1, props => 1 } );
for ( keys %{ $args{'props'} } ) {
delete $args{'props'}->{$_}
if ( !defined $args{'props'}->{$_} || $args{'props'}->{$_} eq '' );
}
my $cas_key = $self->record_cas->write( $args{props} );
my $record = {
uuid => $args{uuid},
type => $args{type},
cas_key => $cas_key
};
$self->_prepare_record_index_update(
uuid => $args{uuid},
type => $args{type},
cas_key => $cas_key
);
}
sub _prepare_record_index_update {
my $self = shift;
my %record = (@_);
# If we're inside an edit, we can record the changeset info into the index
if ( $self->current_edit ) {
push @{ $self->current_edit_records }, \%record;
} else {
# If we're not inside an edit, we're likely exporting the replica
# TODO: the replica exporter code should probably be retooled
$self->_write_record_index_entry(%record);
}
}
use constant RECORD_INDEX_SIZE => ( 4 + 20 );
sub _write_record_index_entry {
my $self = shift;
my %args =
validate( @_,
{ type => 1, uuid => 1, cas_key => 1, changeset_id => 0 } );
my $idx_filename = $self->_record_index_filename(
uuid => $args{uuid},
type => $args{type}
);
my $index_path = Prophet::Util->catfile( $self->fs_root, $idx_filename );
my ( undef, $parent, $filename ) = File::Spec->splitpath($index_path);
mkpath( [$parent] );
open( my $record_index, ">>", $index_path );
# XXX TODO: skip if the index already has this version of the record;
# XXX TODO FETCH THAT
my $record_last_changed_changeset = $args{'changeset_id'} || 0;
my $index_row =
pack( 'NH40', $record_last_changed_changeset, $args{cas_key} );
print $record_index $index_row || die $!;
close $record_index;
}
sub _read_file_range {
my $self = shift;
my %args = validate( @_, { path => 1, position => 1, length => 1 } );
return $self->backend->read_file_range(%args);
}
sub _last_record_index_entry {
my $self = shift;
my %args = ( type => undef, uuid => undef, @_ );
my $idx_filename;
my $record = $self->_read_file_range(
path => $self->_record_index_filename(
uuid => $args{uuid},
type => $args{type}
),
position => ( 0 - RECORD_INDEX_SIZE ),
length => RECORD_INDEX_SIZE
) || return;
my ( $seq, $key ) = unpack( "NH40", $record );
return ( $seq, $key );
}
sub _read_record_index {
my $self = shift;
my %args = validate( @_, { type => 1, uuid => 1 } );
my $idx_filename = $self->_record_index_filename(
uuid => $args{uuid},
type => $args{type}
);
my $index = $self->backend->read_file($idx_filename);
return unless $index;
my $count = length($index) / RECORD_INDEX_SIZE;
my @entries;
for my $record ( 1 .. $count ) {
my ( $seq, $key ) = unpack(
'NH40',
substr(
$index, ( $record - 1 ) * RECORD_INDEX_SIZE,
RECORD_INDEX_SIZE
)
);
push @entries, [ $seq => $key ];
}
return @entries;
}
sub _delete_record_index {
my $self = shift;
my %args = validate( @_, { type => 1, uuid => 1 } );
my $idx_filename = $self->_record_index_filename(
uuid => $args{uuid},
type => $args{type}
);
unlink Prophet::Util->catfile( $self->fs_root => $idx_filename )
|| die "Could not delete record $idx_filename: " . $!;
}
sub _read_serialized_record {
my $self = shift;
my %args = validate( @_, { type => 1, uuid => 1 } );
my $casfile = $self->_record_cas_filename(
type => $args{'type'},
uuid => $args{'uuid'}
);
return unless $casfile;
return from_json( $self->_read_file($casfile), { utf8 => 1 } );
}
# XXX TODO: memoize doesn't work on win:
# t\resty-server will issue the following error:
# Anonymous function called in forbidden list context; faulting
memoize '_record_index_filename' unless $^O =~ /MSWin/;
sub _record_index_filename {
my $self = shift;
my %args = validate( @_, { uuid => 1, type => 1 } );
return Prophet::Util->catfile(
$self->_record_type_dir( $args{'type'} ),
Prophet::Util::hashed_dir_name( $args{uuid} )
);
}
sub _record_cas_filename {
my $self = shift;
my %args = ( type => undef, uuid => undef, @_ );
my ( $seq, $key ) = $self->_last_record_index_entry(
type => $args{'type'},
uuid => $args{'uuid'}
);
return unless ( $key and ( $key ne '0' x 40 ) );
return $self->record_cas->filename($key);
}
sub _record_type_dir {
my $self = shift;
my $type = shift;
return File::Spec->catdir( $self->record_dir, $type );
}
# }
sub changesets_for_record {
my $self = shift;
my %args = validate( @_, { uuid => 1, type => 1, limit => 0 } );
my @record_index = $self->_read_record_index(
type => $args{'type'},
uuid => $args{'uuid'}
);
my $changeset_index = $self->read_changeset_index();
my @changesets;
for my $item (@record_index) {
my $sequence = $item->[0];
push @changesets,
$self->_get_changeset_via_index(
sequence_no => $sequence,
index_file => $changeset_index
);
last if ( defined $args{limit} && --$args{limit} );
}
return @changesets;
}
sub begin_edit {
my $self = shift;
my %args = validate(
@_,
{
source => 0, # the changeset that we're replaying, if applicable
}
);
my $source = $args{source};
my $creator = $source ? $source->creator : $self->changeset_creator;
my $created = $source && $source->created;
require Prophet::ChangeSet;
my $changeset = Prophet::ChangeSet->new(
{
source_uuid => $self->uuid,
creator => $creator,
$created ? ( created => $created ) : (),
}
);
$self->current_edit($changeset);
$self->current_edit_records( [] );
}
sub _set_original_source_metadata_for_current_edit {
my $self = shift;
my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } );
$self->current_edit->original_source_uuid(
$changeset->original_source_uuid );
$self->current_edit->original_sequence_no(
$changeset->original_sequence_no );
}
sub commit_edit {
my $self = shift;
my $sequence = $self->_increment_sequence_no;
$self->current_edit->original_sequence_no($sequence)
unless ( defined $self->current_edit->original_sequence_no );
$self->current_edit->original_source_uuid( $self->uuid )
unless ( $self->current_edit->original_source_uuid );
$self->current_edit->sequence_no($sequence);
for my $record ( @{ $self->current_edit_records } ) {
$self->_write_record_index_entry(
changeset_id => $sequence,
%$record
);
}
$self->_write_changeset_to_index( $self->current_edit );
}
sub _write_changeset_to_index {
my $self = shift;
my $changeset = shift;
$self->_write_changeset( changeset => $changeset );
$self->current_edit(undef);
}
sub _after_record_changes {
my $self = shift;
my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } );
$self->current_edit->is_nullification( $changeset->is_nullification );
$self->current_edit->is_resolution( $changeset->is_resolution );
}
sub create_record {
my $self = shift;
my %args = validate( @_, { uuid => 1, props => 1, type => 1 } );
my $inside_edit = $self->current_edit ? 1 : 0;
$self->begin_edit() unless ($inside_edit);
$self->_write_serialized_record(
type => $args{'type'},
uuid => $args{'uuid'},
props => $args{'props'}
);
my $change = Prophet::Change->new(
{
record_type => $args{'type'},
record_uuid => $args{'uuid'},
change_type => 'add_file'
}
);
for my $name ( keys %{ $args{props} } ) {
$change->add_prop_change(
name => $name,
old => undef,
new => $args{props}->{$name}
);
}
$self->current_edit->add_change( change => $change );
$self->commit_edit unless ($inside_edit);
}
sub delete_record {
my $self = shift;
my %args = validate( @_, { uuid => 1, type => 1 } );
my $inside_edit = $self->current_edit ? 1 : 0;
$self->begin_edit() unless ($inside_edit);
my $change = Prophet::Change->new(
{
record_type => $args{'type'},
record_uuid => $args{'uuid'},
change_type => 'delete'
}
);
$self->current_edit->add_change( change => $change );
$self->_prepare_record_index_update(
uuid => $args{uuid},
type => $args{type},
cas_key => '0' x 40
);
$self->commit_edit() unless ($inside_edit);
return 1;
}
sub set_record_props {
my $self = shift;
my %args = validate( @_, { uuid => 1, props => 1, type => 1 } );
my $inside_edit = $self->current_edit ? 1 : 0;
$self->begin_edit() unless ($inside_edit);
my $old_props = $self->get_record_props(
uuid => $args{'uuid'},
type => $args{'type'}
);
my %new_props = %$old_props;
for my $prop ( keys %{ $args{props} } ) {
if ( !defined $args{props}->{$prop} ) {
delete $new_props{$prop};
} else {
$new_props{$prop} = $args{props}->{$prop};
}
}
$self->_write_serialized_record(
type => $args{'type'},
uuid => $args{'uuid'},
props => \%new_props
);
my $change = Prophet::Change->new(
{
record_type => $args{'type'},
record_uuid => $args{'uuid'},
change_type => 'update_file'
}
);
for my $name ( keys %{ $args{props} } ) {
$change->add_prop_change(
name => $name,
old => $old_props->{$name},
new => $args{props}->{$name}
);
}
$self->current_edit->add_change( change => $change );
$self->commit_edit() unless ($inside_edit);
return 1;
}
sub get_record_props {
my $self = shift;
my %args = validate( @_, { uuid => 1, type => 1 } );
return $self->_read_serialized_record(
uuid => $args{'uuid'},
type => $args{'type'}
);
}
sub record_exists {
my $self = shift;
my %args = validate( @_, { uuid => 1, type => 1 } );
return unless $args{'uuid'};
return $self->_record_cas_filename(
type => $args{'type'},
uuid => $args{'uuid'}
) ? 1 : 0;
}
sub list_records {
my $self = shift;
my %args = validate( @_ => { type => 1, record_class => 1 } );
return [] unless $self->type_exists( type => $args{type} );
#return just the filenames, which, File::Find::Rule doesn't seem capable of
my @record_uuids;
find sub { return unless -f $_; push @record_uuids, $_ },
File::Spec->catdir(
$self->fs_root => $self->_record_type_dir( $args{'type'} ) );
return [
map {
my $record = $args{record_class}->new(
{
app_handle => $self->app_handle,
handle => $self,
type => $args{type}
}
);
$record->_instantiate_from_hash( uuid => $_ );
$record;
}
grep {
$self->_record_cas_filename( type => $args{'type'}, uuid => $_ )
} @record_uuids
];
}
sub list_types {
my $self = shift;
opendir( my $dh,
File::Spec->catdir( $self->fs_root => $self->record_dir ) )
|| die "can't open type directory $!";
my @types = grep { $_ !~ /^\./ } readdir($dh);
closedir $dh;
return \@types;
}
sub type_exists {
my $self = shift;
my %args = validate( @_, { type => 1 } );
return $self->_file_exists( $self->_record_type_dir( $args{'type'} ) );
}
__PACKAGE__->meta->make_immutable();
no Any::Moose;
1;
__END__
=pod
=head1 NAME
Prophet::Replica::prophet
=head1 VERSION
version 0.751
=head1 METHODS
=head2 replica_version
Returns this replica's version.
=head2 set_replica_version
Sets the replica's version to the given integer.
=head2 uuid
Return the replica's UUID
=head2 changesets_for_record { uuid => $uuid, type => $type, limit => $int }
Returns an ordered set of changeset objects for all changesets containing
changes to this object.
Note that changesets may include changes to other records
If "limit" is specified, only returns that many changesets (starting from
record creation).
=head2 begin_edit
Creates a new L<Prophet::ChangeSet>, which new changes will be added to.
=head1 Replica Format
=head4 overview
$URL
/<db-uuid>/
/replica-uuid
/latest-sequence-no
/replica-version
/cas/records/<substr(sha1,0,1)>/substr(sha1,1,1)/<sha1>
/cas/changesets/<substr(sha1,0,1)>/substr(sha1,1,1)/<sha1>
/records (optional?)
/<record type> (for resolution is actually _prophet-resolution-<cas-key>)
/<record uuid> which is a file containing a list of 0 or more rows
last-changed-sequence-no : cas key
/changesets.idx
index which has records:
each record is : local-replica-seq-no : original-uuid : original-seq-no : cas key
...
/resolutions/
/replica-uuid
/latest-sequence-no
/cas/<substr(sha1,0,1)>/substr(sha1,1,1)/<sha1>
/content (optional?)
/_prophet-resolution-<cas-key> (cas-key == a hash the conflicting change)
/<record uuid> (record uuid == the originating replica)
last-changed-sequence-no : <cas key to the content of the resolution>
/changesets.idx
index which has records:
each record is : local-replica-seq-no : original-uuid : original-seq-no : cas key
...
Inside the top level directory for the mirror, you'll find a directory named as
B<a hex-encoded UUID>. This directory is the root of the published replica. The
uuid uniquely identifes the database being replicated. All replicas of this
database will share the same UUID.
Inside the B<<db-uuid>> directory, are a set of files and directories that make
up the actual content of the database replica:
=over 2
=item C<replica-uuid>
Contains the replica's hex-encoded UUID.
=item C<replica-version>
Contains a single integer that defines the replica format.
The current replica version is 1.
=item C<latest-sequence-no>
Contains a single integer, the replica's most recent sequence number.
=item C<cas/records>
=item C<cas/changesets>
The C<cas> directory holds changesets and records, each keyed by a hex-encoded
hash of the item's content. Inside the C<cas> directory, you'll find a
two-level deep directory tree of single-character hex digits. You'll find the
changeset with the sha1 digest C<f4b7489b21f8d107ad8df78750a410c028abbf6c>
inside C<cas/changesets/f/4/f4b7489b21f8d107ad8df78750a410c028abbf6c>.
You'll find the record with the sha1 digest
C<dd6fb674de879a1a4762d690141cdfee138daf65> inside
C<cas/records/d/d/dd6fb674de879a1a4762d690141cdfee138daf65>.
TODO: define the format for changesets and records
=item C<records>
Files inside the C<records> directory are index files which list off all
published versions of a record and the key necessary to retrieve the record
from the I<content-addressed store>.
Inside the C<records> directory, you'll find directories named for each C<type>
in your database. Inside each C<type> directory, you'll find a two-level
directory tree of single hexadecimal digits. You'll find the record with the
type <Foo> and the UUID C<29A3CA16-03C5-11DD-9AE0-E25CFCEE7EC4> stored in
records/Foo/2/9/29A3CA16-03C5-11DD-9AE0-E25CFCEE7EC4
The format of record files is:
<unsigned-long-int: last-changed-sequence-no><40 chars of hex: cas key>
The file is sorted in asecnding order by revision id.
=item C<changesets.idx>
The C<changesets.idx> file lists each changeset in this replica and provides an
index into the B<content-addressed storage> to fetch the content of the
changeset.
The format of record files is:
<unsigned-long-int: sequence-no><16 bytes: changeset original source uuid><unsigned-long-int: changeset original source sequence no><16 bytes: cas key - sha1 sum of the changeset's content>
The file is sorted in ascending order by revision id.
=item C<resolutions>
=over 2
=item TODO DOC RESOLUTIONS
=back
=back
=head1 AUTHORS
=over 4
=item *
Jesse Vincent <jesse@bestpractical.com>
=item *
Chia-Liang Kao <clkao@bestpractical.com>
=item *
Christine Spang <christine@spang.cc>
=back
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2009 by Best Practical Solutions.
This is free software, licensed under:
The MIT (X11) License
=head1 BUGS AND LIMITATIONS
You can make new bug reports, and view existing ones, through the
web interface at L<https://rt.cpan.org/Public/Dist/Display.html?Name=Prophet>.
=head1 CONTRIBUTORS
=over 4
=item *
Alex Vandiver <alexmv@bestpractical.com>
=item *
Casey West <casey@geeknest.com>
=item *
Cyril Brulebois <kibi@debian.org>
=item *
Florian Ragwitz <rafl@debian.org>
=item *
Ioan Rogers <ioanr@cpan.org>
=item *
Jonas Smedegaard <dr@jones.dk>
=item *
Kevin Falcone <falcone@bestpractical.com>
=item *
Lance Wicks <lw@judocoach.com>
=item *
Nelson Elhage <nelhage@mit.edu>
=item *
Pedro Melo <melo@simplicidade.org>
=item *
Rob Hoelz <rob@hoelz.ro>
=item *
Ruslan Zakirov <ruz@bestpractical.com>
=item *
Shawn M Moore <sartak@bestpractical.com>
=item *
Simon Wistow <simon@thegestalt.org>
=item *
Stephane Alnet <stephane@shimaore.net>
=item *
Unknown user <nobody@localhost>
=item *
Yanick Champoux <yanick@babyl.dyndns.org>
=item *
franck cuny <franck@lumberjaph.net>
=item *
robertkrimen <robertkrimen@gmail.com>
=item *
sunnavy <sunnavy@bestpractical.com>
=back
=cut
| gitpan/Prophet | lib/Prophet/Replica/prophet.pm | Perl | mit | 24,770 |
package MIP::Recipes::Download::Cadd_whole_genome_snvs;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Basename qw{ dirname };
use File::Spec::Functions qw{ catdir catfile };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
use Readonly;
## MIPs lib/
use MIP::Constants qw{ $NEWLINE $SPACE $UNDERSCORE };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ download_cadd_whole_genome_snvs };
}
sub download_cadd_whole_genome_snvs {
## Function : Download cadd score for whole genome snvs
## Returns :
## Arguments: $active_parameter_href => Active parameters for this download hash {REF}
## : $genome_version => Human genome version
## : $job_id_href => The job_id hash {REF}
## : $profile_base_command => Submission profile base command
## : $recipe_name => Recipe name
## : $reference_href => Reference hash {REF}
## : $reference_version => Reference version
## : $quiet => Quiet (no output)
## : $temp_directory => Temporary directory for recipe
## : $verbose => Verbosity
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $genome_version;
my $job_id_href;
my $recipe_name;
my $reference_href;
my $reference_version;
## Default(s)
my $profile_base_command;
my $quiet;
my $temp_directory;
my $verbose;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
genome_version => {
store => \$genome_version,
strict_type => 1,
},
job_id_href => {
default => {},
defined => 1,
required => 1,
store => \$job_id_href,
strict_type => 1,
},
profile_base_command => {
default => q{sbatch},
store => \$profile_base_command,
strict_type => 1,
},
recipe_name => {
defined => 1,
required => 1,
store => \$recipe_name,
strict_type => 1,
},
reference_href => {
default => {},
defined => 1,
required => 1,
store => \$reference_href,
strict_type => 1,
},
reference_version => {
defined => 1,
required => 1,
store => \$reference_version,
strict_type => 1,
},
quiet => {
allow => [ undef, 0, 1 ],
default => 1,
store => \$quiet,
strict_type => 1,
},
temp_directory => {
store => \$temp_directory,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end };
use MIP::Program::Htslib qw{ htslib_tabix };
use MIP::Recipe qw{ parse_recipe_prerequisites };
use MIP::Recipes::Download::Get_reference qw{ get_reference };
use MIP::Script::Setup_script qw{ setup_script };
### PREPROCESSING:
## Retrieve logger object
my $log = Log::Log4perl->get_logger( uc q{mip_download} );
## Unpack parameters
my $reference_dir = $active_parameter_href->{reference_dir};
my %recipe = parse_recipe_prerequisites(
{
active_parameter_href => $active_parameter_href,
recipe_name => $recipe_name,
}
);
# Create anonymous filehandle
my $filehandle = IO::Handle->new();
## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header
my ( $recipe_file_path, $recipe_info_path ) = setup_script(
{
active_parameter_href => $active_parameter_href,
core_number => $recipe{core_number},
directory_id => q{mip_download},
filehandle => $filehandle,
info_file_id => $genome_version . $UNDERSCORE . $reference_version,
job_id_href => $job_id_href,
memory_allocation => $recipe{memory},
outdata_dir => $reference_dir,
outscript_dir => $reference_dir,
process_time => $recipe{time},
recipe_data_directory_path => $active_parameter_href->{reference_dir},
recipe_directory => $recipe_name . $UNDERSCORE . $reference_version,
recipe_name => $recipe_name,
source_environment_commands_ref => $recipe{load_env_ref},
}
);
### SHELL:
say {$filehandle} q{## } . $recipe_name;
get_reference(
{
filehandle => $filehandle,
recipe_name => $recipe_name,
reference_dir => $reference_dir,
reference_href => $reference_href,
quiet => $quiet,
verbose => $verbose,
}
);
htslib_tabix(
{
begin => 2,
end => 2,
filehandle => $filehandle,
force => 1,
sequence => 1,
infile_path => catfile( $reference_dir, $reference_href->{outfile} ),
}
);
say {$filehandle} $NEWLINE;
## Close filehandle
close $filehandle or $log->logcroak(q{Could not close filehandle});
if ( $recipe{mode} == 1 ) {
## No upstream or downstream dependencies
slurm_submit_job_no_dependency_dead_end(
{
base_command => $profile_base_command,
job_id_href => $job_id_href,
log => $log,
sbatch_file_name => $recipe_file_path,
}
);
}
return 1;
}
1;
| henrikstranneheim/MIP | lib/MIP/Recipes/Download/Cadd_whole_genome_snvs.pm | Perl | mit | 6,521 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::sensors::akcp::snmp::mode::components::temperature;
use strict;
use warnings;
use hardware::sensors::akcp::snmp::mode::components::resources qw(%map_default1_status %map_online %map_degree_type);
my $mapping = {
TempDescription => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.1' }, # hhmsSensorArrayTempDescription
TempDegree => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.3' }, # hhmsSensorArrayTempDegree
TempStatus => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.4', map => \%map_default1_status }, # hhmsSensorArrayTempStatus
TempOnline => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.5', map => \%map_online }, # hhmsSensorArrayTempOnline
TempHighWarning => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.7' }, # hhmsSensorArrayTempHighWarning
TempHighCritical => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.8' }, # hhmsSensorArrayTempHighCritical
TempLowWarning => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.9' }, # hhmsSensorArrayTempLowWarning
TempLowCritical => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.10' }, # hhmsSensorArrayTempLowCritical
TempDegreeType => { oid => '.1.3.6.1.4.1.3854.1.2.2.1.16.1.12', map => \%map_degree_type }, # hhmsSensorArrayTempDegreeType
};
my $mapping2 = {
TempDescription => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.2' }, # temperatureDescription
TempDegree => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.4' }, # temperatureDegree
TempStatus => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.6', map => \%map_default1_status }, # temperatureStatus
TempOnline => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.8', map => \%map_online }, # temperatureGoOffline
TempHighWarning => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.11' }, # temperatureHighWarning
TempHighCritical => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.12' }, # temperatureHighCritical
TempLowWarning => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.10' }, # temperatureLowWarning
TempLowCritical => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.9' }, # temperatureLowCritical
TempDegreeType => { oid => '.1.3.6.1.4.1.3854.3.5.2.1.5'}, # temperatureUnit
};
my $oid_hhmsSensorArrayTempEntry = '.1.3.6.1.4.1.3854.1.2.2.1.16.1';
my $oid_temperatureEntry = '.1.3.6.1.4.1.3854.3.5.2.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_hhmsSensorArrayTempEntry },
{ oid => $oid_temperatureEntry, end => $mapping2->{TempHighCritical}->{oid} };
}
sub check_temperature {
my ($self, %options) = @_;
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$options{entry}}})) {
next if ($oid !~ /^$options{mapping}->{TempOnline}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $options{mapping}, results => $self->{results}->{$options{entry}}, instance => $instance);
next if ($self->check_filter(section => 'temperature', instance => $instance));
if ($result->{TempOnline} eq 'offline') {
$self->{output}->output_add(long_msg => sprintf("skipping '%s': is offline", $result->{TempDescription}));
next;
}
$self->{components}->{temperature}->{total}++;
$self->{output}->output_add(long_msg => sprintf("temperature '%s' status is '%s' [instance = %s] [value = %s]",
$result->{TempDescription}, $result->{TempStatus}, $instance,
$result->{TempDegree}));
my $exit = $self->get_severity(label => 'default1', section => 'temperature', value => $result->{TempStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Temperature '%s' status is '%s'", $result->{TempDescription}, $result->{TempStatus}));
}
my ($exit2, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'temperature', instance => $instance, value => $result->{TempDegree});
if ($checked == 0) {
$result->{TempLowWarning} = (defined($result->{TempLowWarning}) && $result->{TempLowWarning} =~ /[0-9]/) ?
$result->{TempLowWarning} * $options{threshold_mult} : '';
$result->{TempLowCritical} = (defined($result->{TempLowCritical}) && $result->{TempLowCritical} =~ /[0-9]/) ?
$result->{TempLowCritical} * $options{threshold_mult} : '';
$result->{TempHighWarning} = (defined($result->{TempHighWarning}) && $result->{TempHighWarning} =~ /[0-9]/) ?
$result->{TempHighWarning} * $options{threshold_mult} : '';
$result->{TempHighCritical} = (defined($result->{TempHighCritical}) && $result->{TempHighCritical} =~ /[0-9]/) ?
$result->{TempHighCritical} * $options{threshold_mult} : '';
my $warn_th = $result->{TempLowWarning} . ':' . $result->{TempHighWarning};
my $crit_th = $result->{TempLowCritical} . ':' . $result->{TempHighCritical};
$self->{perfdata}->threshold_validate(label => 'warning-temperature-instance-' . $instance, value => $warn_th);
$self->{perfdata}->threshold_validate(label => 'critical-temperature-instance-' . $instance, value => $crit_th);
$warn = $self->{perfdata}->get_perfdata_for_output(label => 'warning-temperature-instance-' . $instance);
$crit = $self->{perfdata}->get_perfdata_for_output(label => 'critical-temperature-instance-' . $instance);
}
if (!$self->{output}->is_status(value => $exit2, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit2,
short_msg => sprintf("Temperature '%s' is %s %s", $result->{TempDescription}, $result->{TempDegree}, $result->{TempDegreeType}->{unit}));
}
$self->{output}->perfdata_add(
label => 'temperature', unit => $result->{TempDegreeType}->{unit},
nlabel => 'hardware.sensor.temperature.' . $result->{TempDegreeType}->{unit_long},
instances => $result->{TempDescription},
value => $result->{TempDegree},
warning => $warn,
critical => $crit,
);
}
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking temperatures");
$self->{components}->{temperature} = {name => 'temperatures', total => 0, skip => 0};
return if ($self->check_filter(section => 'temperature'));
check_temperature($self, entry => $oid_hhmsSensorArrayTempEntry, mapping => $mapping, threshold_mult => 1);
check_temperature($self, entry => $oid_temperatureEntry, mapping => $mapping2, threshold_mult => 0.1);
}
1;
| Sims24/centreon-plugins | hardware/sensors/akcp/snmp/mode/components/temperature.pm | Perl | apache-2.0 | 7,580 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::hp::3par::ssh::mode::components::disk;
use strict;
use warnings;
sub load {
my ($self) = @_;
# Id State
# 0 normal
# 1 normal
# 2 normal
#...
# 10 normal
# 11 normal
push @{$self->{commands}}, 'echo "===showdisk==="', 'showpd -showcols Id,State';
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking disks");
$self->{components}->{disk} = { name => 'disks', total => 0, skip => 0 };
return if ($self->check_filter(section => 'disk'));
return if ($self->{results} !~ /===showdisk===.*?\n(.*?)(===|\Z)/msi);
my @results = split /\n/, $1;
foreach (@results) {
next if (!/^\s*(\d+)\s+(\S+)/);
my ($instance, $state) = ($1, $2);
next if ($self->check_filter(section => 'disk', instance => $instance));
$self->{components}->{disk}->{total}++;
$self->{output}->output_add(long_msg => sprintf("disk '%s' state is '%s' [instance: '%s']",
$instance, $state, $instance)
);
my $exit = $self->get_severity(label => 'default', section => 'disk', value => $state);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Disk '%s' state is '%s'",
$instance, $state));
}
}
}
1;
| Tpo76/centreon-plugins | storage/hp/3par/ssh/mode/components/disk.pm | Perl | apache-2.0 | 2,295 |
###########################################$
# Copyright 2008-2010 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions
# and limitations under the License.
###########################################$
# __ _ _ ___
# ( )( \/\/ )/ __)
# /__\ \ / \__ \
# (_)(_) \/\/ (___/
#
# Amazon EC2 Perl Library
# API Version: 2010-06-15
# Generated: Wed Jul 21 13:37:54 PDT 2010
#
package Amazon::EC2::Model::InstanceBlockDeviceMapping;
use base qw (Amazon::EC2::Model);
#
# Amazon::EC2::Model::InstanceBlockDeviceMapping
#
# Properties:
#
#
# DeviceName: string
# Ebs: Amazon::EC2::Model::InstanceEbsBlockDevice
#
#
#
sub new {
my ($class, $data) = @_;
my $self = {};
$self->{_fields} = {
DeviceName => { FieldValue => undef, FieldType => "string"},
Ebs => {FieldValue => undef, FieldType => "Amazon::EC2::Model::InstanceEbsBlockDevice"},
};
bless ($self, $class);
if (defined $data) {
$self->_fromHashRef($data);
}
return $self;
}
sub getDeviceName {
return shift->{_fields}->{DeviceName}->{FieldValue};
}
sub setDeviceName {
my ($self, $value) = @_;
$self->{_fields}->{DeviceName}->{FieldValue} = $value;
return $self;
}
sub withDeviceName {
my ($self, $value) = @_;
$self->setDeviceName($value);
return $self;
}
sub isSetDeviceName {
return defined (shift->{_fields}->{DeviceName}->{FieldValue});
}
sub getEbs {
return shift->{_fields}->{Ebs}->{FieldValue};
}
sub setEbs {
my ($self, $value) = @_;
$self->{_fields}->{Ebs}->{FieldValue} = $value;
}
sub withEbs {
my ($self, $value) = @_;
$self->setEbs($value);
return $self;
}
sub isSetEbs {
return defined (shift->{_fields}->{Ebs}->{FieldValue});
}
1;
| electric-cloud/EC-EC2 | src/main/resources/project/lib/Amazon/EC2/Model/InstanceBlockDeviceMapping.pm | Perl | apache-2.0 | 2,478 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package XrefParser::FetchFiles;
use strict;
use warnings;
# Given one or several FTP or HTTP URIs, download them. If an URI is
# for a file or MySQL connection, then these will be ignored. For
# FTP, standard shell file name globbing is allowed (but not regular
# expressions). HTTP does not allow file name globbing. The routine
# returns a list of successfully downloaded local files or an empty list
# if there was an error.
use Carp;
use DBI;
use Digest::MD5 qw(md5_hex);
use Getopt::Long;
use POSIX qw(strftime);
use File::Basename;
use File::Spec::Functions;
use IO::File;
use Net::FTP;
use URI;
use URI::file;
use Text::Glob qw( match_glob );
use LWP::UserAgent;
my $base_dir = File::Spec->curdir();
sub new {
my ($proto) = @_;
my $class = ref $proto || $proto;
return bless {}, $class;
}
sub fetch_files {
my ($self, $arg_ref) = @_;
my $dest_dir = $arg_ref->{dest_dir};
my $user_uris = $arg_ref->{user_uris};
my $deletedownloaded = $arg_ref->{del_down};
my $checkdownload = $arg_ref->{chk_down};
my $verbose = $arg_ref->{verbose} ;
my @processed_files;
foreach my $user_uri (@$user_uris) {
# Change old-style 'LOCAL:' URIs into 'file:'.
$user_uri =~ s/^LOCAL:/file:/ix;
my $uri = URI->new($user_uri);
if ( $uri->scheme() eq 'script' ) {
push( @processed_files, $user_uri );
} elsif ( $uri->scheme() eq 'file' ) {
# Deal with local files.
$user_uri =~ s/file://x;
if ( -s $user_uri ) {
push( @processed_files, $user_uri );
} else {
printf( "==> Can not find file '%s' (or it is empty)\n",
$user_uri );
return ();
}
} elsif ( $uri->scheme() eq 'ftp' ) {
# Deal with FTP files.
my $file_path = catfile( $dest_dir, basename( $uri->path() ) );
if ( $deletedownloaded && -e $file_path ) {
if ($verbose) {
printf( "Deleting '%s'\n", $file_path );
}
unlink($file_path);
}
if ( $checkdownload && -s $file_path ) {
# The file is already there, no need to connect to a FTP
# server. This also means no file name globbing was
# used (for globbing FTP URIs, we always need to connect
# to a FTP site to see what files are there).
if ($verbose) {
printf( "File '%s' already exists\n", $file_path );
}
push( @processed_files, $file_path );
next;
}
if ( -e $file_path ) { unlink($file_path) }
if ($verbose) {
printf( "Connecting to FTP host '%s' for file '%s' \n",
$uri->host(), $file_path );
}
my $ftp = $self->get_ftp($uri, 0);
if(!defined($ftp) or ! $ftp->can('ls') or !$ftp->ls()){
$ftp = $self->get_ftp($uri, 1);
}
foreach my $remote_file ( ( @{ $ftp->ls() } ) ) {
if ( !match_glob( basename( $uri->path() ), $remote_file ) ) {
next;
}
$file_path = catfile( $dest_dir, basename($remote_file) );
if ( $deletedownloaded && -e $file_path ) {
if ($verbose) {
printf( "Deleting '%s'\n", $file_path );
}
unlink($file_path);
}
if ( $checkdownload && -s $file_path ) {
if ($verbose) {
printf( "File '%s' already exists\n", $file_path );
}
} else {
if ( -e $file_path ) { unlink($file_path) }
if ( !-d dirname($file_path) ) {
if ($verbose) {
printf( "Creating directory '%s'\n",
dirname($file_path) );
}
if ( !mkdir( dirname($file_path) ) ) {
printf( "==> Can not create directory '%s': %s",
dirname($file_path), $! );
return ();
}
}
if ($verbose) {
printf( "Fetching '%s' (size = %s)\n",
$remote_file,
$ftp->size($remote_file) || '(unknown)' );
printf( "Local file is '%s'\n", $file_path );
}
if ( !$ftp->get( $remote_file, $file_path ) ) {
printf( "==> Could not get '%s': %s\n",
basename( $uri->path() ), $ftp->message() );
return ();
}
} ## end else [ if ( $checkdownload &&...)]
if ( $file_path =~ /\.(gz|Z)$/x ) {
# Read from zcat pipe
#
my $cmd = "gzip -t $file_path";
if ( system($cmd) != 0 ) {
printf( "system command '%s' failed: %s - "
. "Checking of gzip file failed - "
. "FILE CORRUPTED ?\n\n",
$cmd, $? );
if ( -e $file_path ) {
if ($verbose) {
printf( "Deleting '%s'\n", $file_path );
}
unlink($file_path);
}
return ();
} else {
if ($verbose) {
printf( "'%s' passed (gzip -t) corruption test.\n",
$file_path );
}
}
}
push( @processed_files, $file_path );
} ## end foreach my $remote_file ( (...))
} elsif ( $uri->scheme() eq 'http' ) {
# Deal with HTTP files.
my $file_path = catfile( $dest_dir, basename( $uri->path() ) );
if ( $deletedownloaded && -e $file_path ) {
if ($verbose) {
printf( "Deleting '%s'\n", $file_path );
}
unlink($file_path);
}
if ( $checkdownload && -s $file_path ) {
# The file is already there, no need to connect to a
# HTTP server.
if ($verbose) {
printf( "File '%s' already exists\n", $file_path );
}
push( @processed_files, $file_path );
next;
}
if ( -e $file_path ) { unlink($file_path) }
if ( !-d dirname($file_path) ) {
if ($verbose) {
printf( "Creating directory '%s'\n", dirname($file_path) );
}
if ( !mkdir( dirname($file_path) ) ) {
printf( "==> Can not create directory '%s': %s",
dirname($file_path), $! );
return ();
}
}
if ($verbose) {
printf( "Connecting to HTTP host '%s'\n", $uri->host() );
printf( "Fetching '%s'\n", $uri->path() );
}
if ( $checkdownload && -s $file_path ) {
if ($verbose) {
printf( "File '%s' already exists\n", $file_path );
}
} else {
if ($verbose) {
printf( "Local file is '%s'\n", $file_path );
}
if ( -e $file_path ) { unlink($file_path) }
my $ua = LWP::UserAgent->new();
$ua->env_proxy();
my $response =
$ua->get( $uri->as_string(), ':content_file' => $file_path );
if ( !$response->is_success() ) {
printf( "==> Could not get '%s': %s\n",
basename( $uri->path() ), $response->content() );
return ();
}
}
push( @processed_files, $file_path );
} elsif ( $uri->scheme() eq 'mysql' ) {
# Just leave MySQL data untouched for now.
push( @processed_files, $user_uri );
} else {
printf( "==> Unknown URI scheme '%s' in URI '%s'\n",
$uri->scheme(), $uri->as_string() );
return ();
}
} ## end foreach my $user_uri (@user_uris)
return @processed_files;
} ## end sub fetch_files
sub get_ftp{
my ($self, $uri, $passive) = @_;
my $ftp;
if($passive){
$ftp = Net::FTP->new( $uri->host(), 'Debug' => 0, Passive => 1);
}
else{
$ftp = Net::FTP->new( $uri->host(), 'Debug' => 0);
}
if ( !defined($ftp) ) {
printf( "==> Can not open FTP connection: %s\n", $@ );
return ();
}
if ( !$ftp->login( 'anonymous', '-anonymous@' ) ) {
printf( "==> Can not log in on FTP host: %s\n",
$ftp->message() );
return ();
}
if ( !$ftp->cwd( dirname( $uri->path() ) ) ) {
printf( "== Can not change directory to '%s': %s\n",
dirname( $uri->path() ), $ftp->message() );
return ();
}
$ftp->binary();
return $ftp;
}
1;
| willmclaren/ensembl | misc-scripts/xref_mapping/XrefParser/FetchFiles.pm | Perl | apache-2.0 | 8,401 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::DBSQL::Funcgen::DNAMethylationFileAdaptor
=cut
package Bio::EnsEMBL::Funcgen::DBSQL::DNAMethylationFileAdaptor;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Exception qw( throw warning );
use DBI qw(:sql_types);
use vars '@ISA';
@ISA = qw(Bio::EnsEMBL::DBSQL::BaseAdaptor);
sub _tables {
return (
['external_feature_file', 'eff'],
['analysis', 'a' ],
['feature_type', 'ft' ],
['data_file', 'dr' ],
);
}
sub _columns {
my $self = shift;
return qw(
eff.external_feature_file_id
eff.name
ft.name
dr.path
dr.file_type
a.analysis_id
ft.feature_type_id
);
}
sub _default_where_clause {
return 'eff.analysis_id = a.analysis_id'
. ' and eff.feature_type_id = ft.feature_type_id'
. ' and dr.table_name="external_feature_file" and dr.table_id=external_feature_file_id'
. ' and ft.name = "5mC"'
;
}
sub fetch_by_name {
my $self = shift;
my $name = shift;
my $constraint = "eff.name = ?";
$self->bind_param_generic_fetch($name, SQL_VARCHAR);
my $dna_methylation_file = $self->generic_fetch($constraint);
if (!$dna_methylation_file || @$dna_methylation_file==0) {
return;
}
if (@$dna_methylation_file!=1) {
throw("Found ". @$dna_methylation_file ." dna methylation files with the same name!");
}
return $dna_methylation_file->[0];
}
sub _objs_from_sth {
my ($self, $sth) = @_;
my (
$sth_fetched_dbID,
$sth_fetched_eff_name,
$sth_fetched_ft_name,
$sth_fetched_dr_path,
$sth_fetched_dr_file_type,
$sth_fetched_a_analysis_id,
$sth_fetched_ft_feature_type_id
);
$sth->bind_columns (
\$sth_fetched_dbID,
\$sth_fetched_eff_name,
\$sth_fetched_ft_name,
\$sth_fetched_dr_path,
\$sth_fetched_dr_file_type,
\$sth_fetched_a_analysis_id,
\$sth_fetched_ft_feature_type_id
);
use Bio::EnsEMBL::Funcgen::DNAMethylationFile;
my $analysis_adaptor = $self->db->get_AnalysisAdaptor();
my $feature_type_adaptor = $self->db->get_FeatureTypeAdaptor();
my @return_objects;
ROW: while ( $sth->fetch() ) {
my $dna_methylation_file = Bio::EnsEMBL::Funcgen::DNAMethylationFile->new(
-dbID => $sth_fetched_dbID,
-name => $sth_fetched_eff_name,
-file => $sth_fetched_dr_path,
-file_type => $sth_fetched_dr_file_type,
);
my $analysis = $analysis_adaptor->fetch_by_dbID($sth_fetched_a_analysis_id);
$dna_methylation_file->_analysis($analysis);
my $feature_type = $feature_type_adaptor->fetch_by_dbID($sth_fetched_ft_feature_type_id);
$dna_methylation_file->_feature_type($feature_type);
push @return_objects, $dna_methylation_file
}
return \@return_objects;
}
1;
| Ensembl/ensembl-funcgen | modules/Bio/EnsEMBL/Funcgen/DBSQL/DNAMethylationFileAdaptor.pm | Perl | apache-2.0 | 3,729 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::tomcat::web::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_simple);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '0.1';
%{$self->{modes}} = (
'applications' => 'apps::tomcat::web::mode::applications',
'connectors' => 'apps::tomcat::web::mode::connectors',
'list-application' => 'apps::tomcat::web::mode::listapplication',
'memory' => 'apps::tomcat::web::mode::memory',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Tomcat Application Server through Manager Webpage
=cut
| centreon/centreon-plugins | apps/tomcat/web/plugin.pm | Perl | apache-2.0 | 1,477 |
package Paws::EC2::DescribeSpotFleetInstances;
use Moose;
has DryRun => (is => 'ro', isa => 'Bool', traits => ['NameInRequest'], request_name => 'dryRun' );
has MaxResults => (is => 'ro', isa => 'Int', traits => ['NameInRequest'], request_name => 'maxResults' );
has NextToken => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'nextToken' );
has SpotFleetRequestId => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'spotFleetRequestId' , required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DescribeSpotFleetInstances');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::EC2::DescribeSpotFleetInstancesResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::EC2::DescribeSpotFleetInstances - Arguments for method DescribeSpotFleetInstances on Paws::EC2
=head1 DESCRIPTION
This class represents the parameters used for calling the method DescribeSpotFleetInstances on the
Amazon Elastic Compute Cloud service. Use the attributes of this class
as arguments to method DescribeSpotFleetInstances.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DescribeSpotFleetInstances.
As an example:
$service_obj->DescribeSpotFleetInstances(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 DryRun => Bool
Checks whether you have the required permissions for the action,
without actually making the request, and provides an error response. If
you have the required permissions, the error response is
C<DryRunOperation>. Otherwise, it is C<UnauthorizedOperation>.
=head2 MaxResults => Int
The maximum number of results to return in a single call. Specify a
value between 1 and 1000. The default value is 1000. To retrieve the
remaining results, make another call with the returned C<NextToken>
value.
=head2 NextToken => Str
The token for the next set of results.
=head2 B<REQUIRED> SpotFleetRequestId => Str
The ID of the Spot fleet request.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DescribeSpotFleetInstances in L<Paws::EC2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/EC2/DescribeSpotFleetInstances.pm | Perl | apache-2.0 | 2,696 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::dell::6200::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'global-status' => 'centreon::common::dell::powerconnect3000::mode::globalstatus',
'environment' => 'centreon::common::dell::fastpath::snmp::mode::environment',
'interfaces' => 'snmp_standard::mode::interfaces',
'memory' => 'centreon::common::dell::fastpath::snmp::mode::memory',
'cpu' => 'centreon::common::dell::fastpath::snmp::mode::cpu',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Dell 6200 series in SNMP.
=cut
| wilfriedcomte/centreon-plugins | network/dell/6200/snmp/plugin.pm | Perl | apache-2.0 | 1,787 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package XrefParser::ChecksumParser;
# Input format looks like:
#
# UPI0001B45C00 71B80D7A684B1F2DEDDA7B5AEE1D029E
# UPI0002473BEA 4542D97F3AB3F7B656ABB941AED3F2BB
# UPI00024743AF A69E7EEE820CA54100AD43E86BE823E4
use strict;
use warnings;
use Carp;
use IO::File;
use base qw( XrefParser::BaseParser );
my $TABLE_NAME = 'checksum_xref';
sub run {
my ($self, $ref_arg) = @_;
my $source_id = $ref_arg->{source_id};
my $species_id = $ref_arg->{species_id};
my $files = $ref_arg->{files};
my $verbose = $ref_arg->{verbose};
if((!defined $source_id) or (!defined $species_id) or (!defined $files) ){
croak "Need to pass source_id, species_id and files as pairs";
}
$verbose ||=0;
my $target_file = $files->[0].'.mysqlinput';
my $input_fh = $self->get_filehandle($files->[0]);
if(-f $target_file) {
print "Target file '${target_file}' already exists; removing\n" if $verbose;
unlink $target_file;
}
my $output_fh = IO::File->new($target_file, 'w');
$self->_transfer_contents($input_fh, $output_fh, $source_id);
close($input_fh);
close($output_fh);
$self->_load_table($target_file, $verbose, $source_id);
return;
}
sub _transfer_contents {
my ($self, $input_fh, $output_fh, $source_id) = @_;
my $dbh = $self->dbi();
my ($counter) = $dbh->selectrow_array('select max(checksum_xref_id) from '.$TABLE_NAME );
while(my $line = <$input_fh>) {
chomp $line;
my ($upi, $checksum) = split(/\s+/, $line);
my @output = ($counter++, $source_id, $upi, $checksum);
print $output_fh join("\t", @output);
print $output_fh "\n";
}
return;
}
sub _load_table {
my ($self, $file, $verbose, $source_id) = @_;
my $dbh = $self->dbi();
my ($count) = $dbh->selectrow_array('select count(*) from '.$TABLE_NAME . ' WHERE source_id = ' . $source_id);
if($count) {
print "'$TABLE_NAME' has rows for $source_id; deleting\n" if $verbose;
$dbh->do('delete from ' . $TABLE_NAME . ' WHERE source_id = ' . $source_id);
}
print "Loading data into '$TABLE_NAME' from '$file'\n" if $verbose;
my $load = sprintf(q{LOAD DATA LOCAL INFILE '%s'INTO TABLE %s}, $file, $TABLE_NAME);
$dbh->do($load);
print "Finished loading data into '$TABLE_NAME'\n" if $verbose;
return;
}
1;
| danstaines/ensembl | misc-scripts/xref_mapping/XrefParser/ChecksumParser.pm | Perl | apache-2.0 | 2,981 |
package Google::Ads::AdWords::v201409::AppPaymentModel;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201409' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201409::Criterion);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %id_of :ATTR(:get<id>);
my %type_of :ATTR(:get<type>);
my %Criterion__Type_of :ATTR(:get<Criterion__Type>);
my %appPaymentModelType_of :ATTR(:get<appPaymentModelType>);
__PACKAGE__->_factory(
[ qw( id
type
Criterion__Type
appPaymentModelType
) ],
{
'id' => \%id_of,
'type' => \%type_of,
'Criterion__Type' => \%Criterion__Type_of,
'appPaymentModelType' => \%appPaymentModelType_of,
},
{
'id' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
'type' => 'Google::Ads::AdWords::v201409::Criterion::Type',
'Criterion__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'appPaymentModelType' => 'Google::Ads::AdWords::v201409::AppPaymentModel::AppPaymentModelType',
},
{
'id' => 'id',
'type' => 'type',
'Criterion__Type' => 'Criterion.Type',
'appPaymentModelType' => 'appPaymentModelType',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201409::AppPaymentModel
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
AppPaymentModel from the namespace https://adwords.google.com/api/adwords/cm/v201409.
Represents a criterion for targeting paid apps. <p>Possible IDs: {@code 30} ({@code APP_PAYMENT_MODEL_PAID}).</p> <p>A criterion of this type can only be created using an ID. <span class="constraint AdxEnabled">This is disabled for AdX when it is contained within Operators: ADD, SET.</span>
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * appPaymentModelType
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/Google-Ads-AdWords-Client | lib/Google/Ads/AdWords/v201409/AppPaymentModel.pm | Perl | apache-2.0 | 2,339 |
#!/usr/bin/env perl
# mysqltuner.pl - Version 1.7.13
# High Performance MySQL Tuning Script
# Copyright (C) 2006-2018 Major Hayden - major@mhtx.net
#
# For the latest updates, please visit http://mysqltuner.com/
# Git repository available at http://github.com/major/MySQLTuner-perl
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This project would not be possible without help from:
# Matthew Montgomery Paul Kehrer Dave Burgess
# Jonathan Hinds Mike Jackson Nils Breunese
# Shawn Ashlee Luuk Vosslamber Ville Skytta
# Trent Hornibrook Jason Gill Mark Imbriaco
# Greg Eden Aubin Galinotti Giovanni Bechis
# Bill Bradford Ryan Novosielski Michael Scheidell
# Blair Christensen Hans du Plooy Victor Trac
# Everett Barnes Tom Krouper Gary Barrueto
# Simon Greenaway Adam Stein Isart Montane
# Baptiste M. Cole Turner Major Hayden
# Joe Ashcraft Jean-Marie Renouard Christian Loos
# Julien Francoz
#
# Inspired by Matthew Montgomery's tuning-primer.sh script:
# http://forge.mysql.com/projects/view.php?id=44
#
package main;
use 5.005;
use strict;
use warnings;
use diagnostics;
use File::Spec;
use Getopt::Long;
use Pod::Usage;
use File::Basename;
use Cwd 'abs_path';
use Data::Dumper;
$Data::Dumper::Pair = " : ";
# for which()
#use Env;
# Set up a few variables for use in the script
my $tunerversion = "1.7.13";
my ( @adjvars, @generalrec );
# Set defaults
my %opt = (
"silent" => 0,
"nobad" => 0,
"nogood" => 0,
"noinfo" => 0,
"debug" => 0,
"nocolor" => ( !-t STDOUT ),
"color" => 0,
"forcemem" => 0,
"forceswap" => 0,
"host" => 0,
"socket" => 0,
"port" => 0,
"user" => 0,
"pass" => 0,
"password" => 0,
"ssl-ca" => 0,
"skipsize" => 0,
"checkversion" => 0,
"updateversion" => 0,
"buffers" => 0,
"passwordfile" => 0,
"bannedports" => '',
"maxportallowed" => 0,
"outputfile" => 0,
"dbstat" => 0,
"tbstat" => 0,
"notbstat" => 0,
"idxstat" => 0,
"sysstat" => 0,
"pfstat" => 0,
"skippassword" => 0,
"noask" => 0,
"template" => 0,
"json" => 0,
"prettyjson" => 0,
"reportfile" => 0,
"verbose" => 0,
"defaults-file" => '',
);
# Gather the options from the command line
GetOptions(
\%opt, 'nobad',
'nogood', 'noinfo',
'debug', 'nocolor',
'forcemem=i', 'forceswap=i',
'host=s', 'socket=s',
'port=i', 'user=s',
'pass=s', 'skipsize',
'checkversion', 'mysqladmin=s',
'mysqlcmd=s', 'help',
'buffers', 'skippassword',
'passwordfile=s', 'outputfile=s',
'silent', 'dbstat',
'json', 'prettyjson',
'idxstat', 'noask',
'template=s', 'reportfile=s',
'cvefile=s', 'bannedports=s',
'updateversion', 'maxportallowed=s',
'verbose', 'sysstat',
'password=s', 'pfstat',
'passenv=s', 'userenv=s',
'defaults-file=s', 'ssl-ca=s',
'color', 'tbstat',
'notbstat'
)
or pod2usage(
-exitval => 1,
-verbose => 99,
-sections => [
"NAME",
"IMPORTANT USAGE GUIDELINES",
"CONNECTION AND AUTHENTICATION",
"PERFORMANCE AND REPORTING OPTIONS",
"OUTPUT OPTIONS"
]
);
if ( defined $opt{'help'} && $opt{'help'} == 1 ) {
pod2usage(
-exitval => 0,
-verbose => 99,
-sections => [
"NAME",
"IMPORTANT USAGE GUIDELINES",
"CONNECTION AND AUTHENTICATION",
"PERFORMANCE AND REPORTING OPTIONS",
"OUTPUT OPTIONS"
]
);
}
my $devnull = File::Spec->devnull();
my $basic_password_files =
( $opt{passwordfile} eq "0" )
? abs_path( dirname(__FILE__) ) . "/basic_passwords.txt"
: abs_path( $opt{passwordfile} );
# Username from envvar
if ( exists $opt{userenv} && exists $ENV{ $opt{userenv} } ) {
$opt{user} = $ENV{ $opt{userenv} };
}
# Related to password option
if ( exists $opt{passenv} && exists $ENV{ $opt{passenv} } ) {
$opt{pass} = $ENV{ $opt{passenv} };
}
$opt{pass} = $opt{password} if ( $opt{pass} eq 0 and $opt{password} ne 0 );
# for RPM distributions
$basic_password_files = "/usr/share/mysqltuner/basic_passwords.txt"
unless -f "$basic_password_files";
# check if we need to enable verbose mode
if ( $opt{verbose} ) {
$opt{checkversion} = 1; #Check for updates to MySQLTuner
$opt{dbstat} = 1; #Print database information
$opt{tbstat} = 1; #Print database information
$opt{idxstat} = 1; #Print index information
$opt{sysstat} = 1; #Print index information
$opt{buffers} = 1; #Print global and per-thread buffer values
$opt{pfstat} = 1; #Print performance schema info.
$opt{cvefile} = 'vulnerabilities.csv'; #CVE File for vulnerability checks
}
$opt{nocolor} = 1 if defined($opt{outputfile});
$opt{tbstat} = 1 if ($opt{notbstat} != 0); # Don't Print database information
# for RPM distributions
$opt{cvefile} = "/usr/share/mysqltuner/vulnerabilities.csv"
unless ( defined $opt{cvefile} and -f "$opt{cvefile}" );
$opt{cvefile} = '' unless -f "$opt{cvefile}";
$opt{cvefile} = './vulnerabilities.csv' if -f './vulnerabilities.csv';
$opt{'bannedports'} = '' unless defined( $opt{'bannedports'} );
my @banned_ports = split ',', $opt{'bannedports'};
#
my $outputfile = undef;
$outputfile = abs_path( $opt{outputfile} ) unless $opt{outputfile} eq "0";
my $fh = undef;
open( $fh, '>', $outputfile )
or die("Fail opening $outputfile")
if defined($outputfile);
$opt{nocolor} = 1 if defined($outputfile);
$opt{nocolor} = 1 unless ( -t STDOUT );
$opt{nocolor} = 0 if ( $opt{color} == 1 );
# Setting up the colors for the print styles
my $me = `whoami`;
$me =~ s/\n//g;
# Setting up the colors for the print styles
my $good = ( $opt{nocolor} == 0 ) ? "[\e[0;32mOK\e[0m]" : "[OK]";
my $bad = ( $opt{nocolor} == 0 ) ? "[\e[0;31m!!\e[0m]" : "[!!]";
my $info = ( $opt{nocolor} == 0 ) ? "[\e[0;34m--\e[0m]" : "[--]";
my $deb = ( $opt{nocolor} == 0 ) ? "[\e[0;31mDG\e[0m]" : "[DG]";
my $cmd = ( $opt{nocolor} == 0 ) ? "\e[1;32m[CMD]($me)" : "[CMD]($me)";
my $end = ( $opt{nocolor} == 0 ) ? "\e[0m" : "";
# Checks for supported or EOL'ed MySQL versions
my ( $mysqlvermajor, $mysqlverminor, $mysqlvermicro );
# Super structure containing all information
my %result;
$result{'MySQLTuner'}{'version'} = $tunerversion;
$result{'MySQLTuner'}{'options'} = \%opt;
# Functions that handle the print styles
sub prettyprint {
print $_[0] . "\n" unless ( $opt{'silent'} or $opt{'json'} );
print $fh $_[0] . "\n" if defined($fh);
}
sub goodprint { prettyprint $good. " " . $_[0] unless ( $opt{nogood} == 1 ); }
sub infoprint { prettyprint $info. " " . $_[0] unless ( $opt{noinfo} == 1 ); }
sub badprint { prettyprint $bad. " " . $_[0] unless ( $opt{nobad} == 1 ); }
sub debugprint { prettyprint $deb. " " . $_[0] unless ( $opt{debug} == 0 ); }
sub redwrap {
return ( $opt{nocolor} == 0 ) ? "\e[0;31m" . $_[0] . "\e[0m" : $_[0];
}
sub greenwrap {
return ( $opt{nocolor} == 0 ) ? "\e[0;32m" . $_[0] . "\e[0m" : $_[0];
}
sub cmdprint { prettyprint $cmd. " " . $_[0] . $end; }
sub infoprintml {
for my $ln (@_) { $ln =~ s/\n//g; infoprint "\t$ln"; }
}
sub infoprintcmd {
cmdprint "@_";
infoprintml grep { $_ ne '' and $_ !~ /^\s*$/ } `@_ 2>&1`;
}
sub subheaderprint {
my $tln = 100;
my $sln = 8;
my $ln = length("@_") + 2;
prettyprint " ";
prettyprint "-" x $sln . " @_ " . "-" x ( $tln - $ln - $sln );
}
sub infoprinthcmd {
subheaderprint "$_[0]";
infoprintcmd "$_[1]";
}
# Calculates the number of physical cores considering HyperThreading
sub cpu_cores {
my $cntCPU =
`awk -F: '/^core id/ && !P[\$2] { CORES++; P[\$2]=1 }; /^physical id/ && !N[\$2] { CPUs++; N[\$2]=1 }; END { print CPUs*CORES }' /proc/cpuinfo`;
return ( $cntCPU == 0 ? `nproc` : $cntCPU );
}
# Calculates the parameter passed in bytes, then rounds it to one decimal place
sub hr_bytes {
my $num = shift;
return "0B" unless defined($num);
return "0B" if $num eq "NULL";
if ( $num >= ( 1024**3 ) ) { #GB
return sprintf( "%.1f", ( $num / ( 1024**3 ) ) ) . "G";
}
elsif ( $num >= ( 1024**2 ) ) { #MB
return sprintf( "%.1f", ( $num / ( 1024**2 ) ) ) . "M";
}
elsif ( $num >= 1024 ) { #KB
return sprintf( "%.1f", ( $num / 1024 ) ) . "K";
}
else {
return $num . "B";
}
}
sub hr_raw {
my $num = shift;
return "0" unless defined($num);
return "0" if $num eq "NULL";
if ( $num =~ /^(\d+)G$/ ) {
return $1 * 1024 * 1024 * 1024;
}
if ( $num =~ /^(\d+)M$/ ) {
return $1 * 1024 * 1024;
}
if ( $num =~ /^(\d+)K$/ ) {
return $1 * 1024;
}
if ( $num =~ /^(\d+)$/ ) {
return $1;
}
return $num;
}
# Calculates the parameter passed in bytes, then rounds it to the nearest integer
sub hr_bytes_rnd {
my $num = shift;
return "0B" unless defined($num);
return "0B" if $num eq "NULL";
if ( $num >= ( 1024**3 ) ) { #GB
return int( ( $num / ( 1024**3 ) ) ) . "G";
}
elsif ( $num >= ( 1024**2 ) ) { #MB
return int( ( $num / ( 1024**2 ) ) ) . "M";
}
elsif ( $num >= 1024 ) { #KB
return int( ( $num / 1024 ) ) . "K";
}
else {
return $num . "B";
}
}
# Calculates the parameter passed to the nearest power of 1000, then rounds it to the nearest integer
sub hr_num {
my $num = shift;
if ( $num >= ( 1000**3 ) ) { # Billions
return int( ( $num / ( 1000**3 ) ) ) . "B";
}
elsif ( $num >= ( 1000**2 ) ) { # Millions
return int( ( $num / ( 1000**2 ) ) ) . "M";
}
elsif ( $num >= 1000 ) { # Thousands
return int( ( $num / 1000 ) ) . "K";
}
else {
return $num;
}
}
# Calculate Percentage
sub percentage {
my $value = shift;
my $total = shift;
$total = 0 unless defined $total;
$total = 0 if $total eq "NULL";
return 100, 00 if $total == 0;
return sprintf( "%.2f", ( $value * 100 / $total ) );
}
# Calculates uptime to display in a more attractive form
sub pretty_uptime {
my $uptime = shift;
my $seconds = $uptime % 60;
my $minutes = int( ( $uptime % 3600 ) / 60 );
my $hours = int( ( $uptime % 86400 ) / (3600) );
my $days = int( $uptime / (86400) );
my $uptimestring;
if ( $days > 0 ) {
$uptimestring = "${days}d ${hours}h ${minutes}m ${seconds}s";
}
elsif ( $hours > 0 ) {
$uptimestring = "${hours}h ${minutes}m ${seconds}s";
}
elsif ( $minutes > 0 ) {
$uptimestring = "${minutes}m ${seconds}s";
}
else {
$uptimestring = "${seconds}s";
}
return $uptimestring;
}
# Retrieves the memory installed on this machine
my ( $physical_memory, $swap_memory, $duflags );
sub memerror {
badprint
"Unable to determine total memory/swap; use '--forcemem' and '--forceswap'";
exit 1;
}
sub os_setup {
my $os = `uname`;
$duflags = ( $os =~ /Linux/ ) ? '-b' : '';
if ( $opt{'forcemem'} > 0 ) {
$physical_memory = $opt{'forcemem'} * 1048576;
infoprint "Assuming $opt{'forcemem'} MB of physical memory";
if ( $opt{'forceswap'} > 0 ) {
$swap_memory = $opt{'forceswap'} * 1048576;
infoprint "Assuming $opt{'forceswap'} MB of swap space";
}
else {
$swap_memory = 0;
badprint "Assuming 0 MB of swap space (use --forceswap to specify)";
}
}
else {
if ( $os =~ /Linux|CYGWIN/ ) {
$physical_memory =
`grep -i memtotal: /proc/meminfo | awk '{print \$2}'`
or memerror;
$physical_memory *= 1024;
$swap_memory =
`grep -i swaptotal: /proc/meminfo | awk '{print \$2}'`
or memerror;
$swap_memory *= 1024;
}
elsif ( $os =~ /Darwin/ ) {
$physical_memory = `sysctl -n hw.memsize` or memerror;
$swap_memory =
`sysctl -n vm.swapusage | awk '{print \$3}' | sed 's/\..*\$//'`
or memerror;
}
elsif ( $os =~ /NetBSD|OpenBSD|FreeBSD/ ) {
$physical_memory = `sysctl -n hw.physmem` or memerror;
if ( $physical_memory < 0 ) {
$physical_memory = `sysctl -n hw.physmem64` or memerror;
}
$swap_memory =
`swapctl -l | grep '^/' | awk '{ s+= \$2 } END { print s }'`
or memerror;
}
elsif ( $os =~ /BSD/ ) {
$physical_memory = `sysctl -n hw.realmem` or memerror;
$swap_memory =
`swapinfo | grep '^/' | awk '{ s+= \$2 } END { print s }'`;
}
elsif ( $os =~ /SunOS/ ) {
$physical_memory =
`/usr/sbin/prtconf | grep Memory | cut -f 3 -d ' '`
or memerror;
chomp($physical_memory);
$physical_memory = $physical_memory * 1024 * 1024;
}
elsif ( $os =~ /AIX/ ) {
$physical_memory =
`lsattr -El sys0 | grep realmem | awk '{print \$2}'`
or memerror;
chomp($physical_memory);
$physical_memory = $physical_memory * 1024;
$swap_memory = `lsps -as | awk -F"(MB| +)" '/MB /{print \$2}'`
or memerror;
chomp($swap_memory);
$swap_memory = $swap_memory * 1024 * 1024;
}
elsif ( $os =~ /windows/i ) {
$physical_memory =
`wmic ComputerSystem get TotalPhysicalMemory | perl -ne "chomp; print if /[0-9]+/;"`
or memerror;
$swap_memory =
`wmic OS get FreeVirtualMemory | perl -ne "chomp; print if /[0-9]+/;"`
or memerror;
}
}
debugprint "Physical Memory: $physical_memory";
debugprint "Swap Memory: $swap_memory";
chomp($physical_memory);
chomp($swap_memory);
chomp($os);
$result{'OS'}{'OS Type'} = $os;
$result{'OS'}{'Physical Memory'}{'bytes'} = $physical_memory;
$result{'OS'}{'Physical Memory'}{'pretty'} = hr_bytes($physical_memory);
$result{'OS'}{'Swap Memory'}{'bytes'} = $swap_memory;
$result{'OS'}{'Swap Memory'}{'pretty'} = hr_bytes($swap_memory);
$result{'OS'}{'Other Processes'}{'bytes'} = get_other_process_memory();
$result{'OS'}{'Other Processes'}{'pretty'} =
hr_bytes( get_other_process_memory() );
}
sub get_http_cli {
my $httpcli = which( "curl", $ENV{'PATH'} );
chomp($httpcli);
if ($httpcli) {
return $httpcli;
}
$httpcli = which( "wget", $ENV{'PATH'} );
chomp($httpcli);
if ($httpcli) {
return $httpcli;
}
return "";
}
# Checks for updates to MySQLTuner
sub validate_tuner_version {
if ( $opt{'checkversion'} eq 0 and $opt{'updateversion'} eq 0 ) {
print "\n" unless ( $opt{'silent'} or $opt{'json'} );
infoprint "Skipped version check for MySQLTuner script";
return;
}
my $update;
my $url =
"https://raw.githubusercontent.com/major/MySQLTuner-perl/master/mysqltuner.pl";
my $httpcli = get_http_cli();
if ( $httpcli =~ /curl$/ ) {
debugprint "$httpcli is available.";
debugprint
"$httpcli -m 3 -silent '$url' 2>/dev/null | grep 'my \$tunerversion'| cut -d\\\" -f2";
$update =
`$httpcli -m 3 -silent '$url' 2>/dev/null | grep 'my \$tunerversion'| cut -d\\\" -f2`;
chomp($update);
debugprint "VERSION: $update";
compare_tuner_version($update);
return;
}
if ( $httpcli =~ /wget$/ ) {
debugprint "$httpcli is available.";
debugprint
"$httpcli -e timestamping=off -t 1 -T 3 -O - '$url' 2>$devnull| grep 'my \$tunerversion'| cut -d\\\" -f2";
$update =
`$httpcli -e timestamping=off -t 1 -T 3 -O - '$url' 2>$devnull| grep 'my \$tunerversion'| cut -d\\\" -f2`;
chomp($update);
compare_tuner_version($update);
return;
}
debugprint "curl and wget are not available.";
infoprint "Unable to check for the latest MySQLTuner version";
infoprint
"Using --pass and --password option is insecure during MySQLTuner execution(Password disclosure)"
if ( defined( $opt{'pass'} ) );
}
# Checks for updates to MySQLTuner
sub update_tuner_version {
if ( $opt{'updateversion'} eq 0 ) {
badprint "Skipped version update for MySQLTuner script";
print "\n" unless ( $opt{'silent'} or $opt{'json'} );
return;
}
my $update;
my $url = "https://raw.githubusercontent.com/major/MySQLTuner-perl/master/";
my @scripts =
( "mysqltuner.pl", "basic_passwords.txt", "vulnerabilities.csv" );
my $totalScripts = scalar(@scripts);
my $receivedScripts = 0;
my $httpcli = get_http_cli();
foreach my $script (@scripts) {
if ( $httpcli =~ /curl$/ ) {
debugprint "$httpcli is available.";
debugprint
"$httpcli --connect-timeout 3 '$url$script' 2>$devnull > $script";
$update =
`$httpcli --connect-timeout 3 '$url$script' 2>$devnull > $script`;
chomp($update);
debugprint "$script updated: $update";
if ( -s $script eq 0 ) {
badprint "Couldn't update $script";
}
else {
++$receivedScripts;
debugprint "$script updated: $update";
}
}
elsif ( $httpcli =~ /wget$/ ) {
debugprint "$httpcli is available.";
debugprint
"$httpcli -qe timestamping=off -t 1 -T 3 -O $script '$url$script'";
$update =
`$httpcli -qe timestamping=off -t 1 -T 3 -O $script '$url$script'`;
chomp($update);
if ( -s $script eq 0 ) {
badprint "Couldn't update $script";
}
else {
++$receivedScripts;
debugprint "$script updated: $update";
}
}
else {
debugprint "curl and wget are not available.";
infoprint "Unable to check for the latest MySQLTuner version";
}
}
if ( $receivedScripts eq $totalScripts ) {
goodprint "Successfully updated MySQLTuner script";
}
else {
badprint "Couldn't update MySQLTuner script";
}
#exit 0;
}
sub compare_tuner_version {
my $remoteversion = shift;
debugprint "Remote data: $remoteversion";
#exit 0;
if ( $remoteversion ne $tunerversion ) {
badprint
"There is a new version of MySQLTuner available($remoteversion)";
update_tuner_version();
return;
}
goodprint "You have the latest version of MySQLTuner($tunerversion)";
return;
}
# Checks to see if a MySQL login is possible
my ( $mysqllogin, $doremote, $remotestring, $mysqlcmd, $mysqladmincmd );
my $osname = $^O;
if ( $osname eq 'MSWin32' ) {
eval { require Win32; } or last;
$osname = Win32::GetOSName();
infoprint "* Windows OS($osname) is not fully supported.\n";
#exit 1;
}
sub mysql_setup {
$doremote = 0;
$remotestring = '';
if ( $opt{mysqladmin} ) {
$mysqladmincmd = $opt{mysqladmin};
}
else {
$mysqladmincmd = which( "mysqladmin", $ENV{'PATH'} );
}
chomp($mysqladmincmd);
if ( !-e $mysqladmincmd && $opt{mysqladmin} ) {
badprint "Unable to find the mysqladmin command you specified: "
. $mysqladmincmd . "";
exit 1;
}
elsif ( !-e $mysqladmincmd ) {
badprint "Couldn't find mysqladmin in your \$PATH. Is MySQL installed?";
exit 1;
}
if ( $opt{mysqlcmd} ) {
$mysqlcmd = $opt{mysqlcmd};
}
else {
$mysqlcmd = which( "mysql", $ENV{'PATH'} );
}
chomp($mysqlcmd);
if ( !-e $mysqlcmd && $opt{mysqlcmd} ) {
badprint "Unable to find the mysql command you specified: "
. $mysqlcmd . "";
exit 1;
}
elsif ( !-e $mysqlcmd ) {
badprint "Couldn't find mysql in your \$PATH. Is MySQL installed?";
exit 1;
}
$mysqlcmd =~ s/\n$//g;
my $mysqlclidefaults = `$mysqlcmd --print-defaults`;
debugprint "MySQL Client: $mysqlclidefaults";
if ( $mysqlclidefaults =~ /auto-vertical-output/ ) {
badprint
"Avoid auto-vertical-output in configuration file(s) for MySQL like";
exit 1;
}
debugprint "MySQL Client: $mysqlcmd";
$opt{port} = ( $opt{port} eq 0 ) ? 3306 : $opt{port};
# Are we being asked to connect via a socket?
if ( $opt{socket} ne 0 ) {
$remotestring = " -S $opt{socket} -P $opt{port}";
}
# Are we being asked to connect to a remote server?
if ( $opt{host} ne 0 ) {
chomp( $opt{host} );
# If we're doing a remote connection, but forcemem wasn't specified, we need to exit
if ( $opt{'forcemem'} eq 0
&& ( $opt{host} ne "127.0.0.1" )
&& ( $opt{host} ne "localhost" ) )
{
badprint "The --forcemem option is required for remote connections";
exit 1;
}
infoprint "Performing tests on $opt{host}:$opt{port}";
$remotestring = " -h $opt{host} -P $opt{port}";
if ( ( $opt{host} ne "127.0.0.1" ) && ( $opt{host} ne "localhost" ) ) {
$doremote = 1;
}
}
else {
$opt{host} = '127.0.0.1';
}
if ( $opt{'ssl-ca'} ne 0 ) {
if ( -e -r -f $opt{'ssl-ca'} ) {
$remotestring .= " --ssl-ca=$opt{'ssl-ca'}";
infoprint
"Will connect using ssl public key passed on the command line";
return 1;
}
else {
badprint
"Attempted to use passed ssl public key, but it was not found or could not be read";
exit 1;
}
}
# Did we already get a username without password on the command line?
if ( $opt{user} ne 0 and $opt{pass} eq 0 ) {
$mysqllogin = "-u $opt{user} " . $remotestring;
my $loginstatus = `$mysqladmincmd ping $mysqllogin 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
goodprint "Logged in using credentials passed on the command line";
return 1;
}
else {
badprint
"Attempted to use login credentials, but they were invalid";
exit 1;
}
}
# Did we already get a username and password passed on the command line?
if ( $opt{user} ne 0 and $opt{pass} ne 0 ) {
$mysqllogin = "-u $opt{user} -p'$opt{pass}'" . $remotestring;
my $loginstatus = `$mysqladmincmd ping $mysqllogin 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
goodprint "Logged in using credentials passed on the command line";
return 1;
}
else {
badprint
"Attempted to use login credentials, but they were invalid";
exit 1;
}
}
my $svcprop = which( "svcprop", $ENV{'PATH'} );
if ( substr( $svcprop, 0, 1 ) =~ "/" ) {
# We are on solaris
( my $mysql_login =
`svcprop -p quickbackup/username svc:/network/mysql-quickbackup:default`
) =~ s/\s+$//;
( my $mysql_pass =
`svcprop -p quickbackup/password svc:/network/mysql-quickbackup:default`
) =~ s/\s+$//;
if ( substr( $mysql_login, 0, 7 ) ne "svcprop" ) {
# mysql-quickbackup is installed
$mysqllogin = "-u $mysql_login -p$mysql_pass";
my $loginstatus = `mysqladmin $mysqllogin ping 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
goodprint "Logged in using credentials from mysql-quickbackup.";
return 1;
}
else {
badprint
"Attempted to use login credentials from mysql-quickbackup, but they failed.";
exit 1;
}
}
}
elsif ( -r "/etc/psa/.psa.shadow" and $doremote == 0 ) {
# It's a Plesk box, use the available credentials
$mysqllogin = "-u admin -p`cat /etc/psa/.psa.shadow`";
my $loginstatus = `$mysqladmincmd ping $mysqllogin 2>&1`;
unless ( $loginstatus =~ /mysqld is alive/ ) {
# Plesk 10+
$mysqllogin =
"-u admin -p`/usr/local/psa/bin/admin --show-password`";
$loginstatus = `$mysqladmincmd ping $mysqllogin 2>&1`;
unless ( $loginstatus =~ /mysqld is alive/ ) {
badprint
"Attempted to use login credentials from Plesk and Plesk 10+, but they failed.";
exit 1;
}
}
}
elsif ( -r "/usr/local/directadmin/conf/mysql.conf" and $doremote == 0 ) {
# It's a DirectAdmin box, use the available credentials
my $mysqluser =
`cat /usr/local/directadmin/conf/mysql.conf | egrep '^user=.*'`;
my $mysqlpass =
`cat /usr/local/directadmin/conf/mysql.conf | egrep '^passwd=.*'`;
$mysqluser =~ s/user=//;
$mysqluser =~ s/[\r\n]//;
$mysqlpass =~ s/passwd=//;
$mysqlpass =~ s/[\r\n]//;
$mysqllogin = "-u $mysqluser -p$mysqlpass";
my $loginstatus = `mysqladmin ping $mysqllogin 2>&1`;
unless ( $loginstatus =~ /mysqld is alive/ ) {
badprint
"Attempted to use login credentials from DirectAdmin, but they failed.";
exit 1;
}
}
elsif ( -r "/etc/mysql/debian.cnf"
and $doremote == 0
and $opt{'defaults-file'} eq '' )
{
# We have a Debian maintenance account, use it
$mysqllogin = "--defaults-file=/etc/mysql/debian.cnf";
my $loginstatus = `$mysqladmincmd $mysqllogin ping 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
goodprint
"Logged in using credentials from Debian maintenance account.";
return 1;
}
else {
badprint "Attempted to use login credentials from Debian maintenance account, but they failed.";
exit 1;
}
}
elsif ( $opt{'defaults-file'} ne '' and -r "$opt{'defaults-file'}" ) {
# defaults-file
debugprint "defaults file detected: $opt{'defaults-file'}";
my $mysqlclidefaults = `$mysqlcmd --print-defaults`;
debugprint "MySQL Client Default File: $opt{'defaults-file'}";
$mysqllogin = "--defaults-file=" . $opt{'defaults-file'};
my $loginstatus = `$mysqladmincmd $mysqllogin ping 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
goodprint "Logged in using credentials from defaults file account.";
return 1;
}
}
else {
# It's not Plesk or Debian, we should try a login
debugprint "$mysqladmincmd $remotestring ping 2>&1";
my $loginstatus = `$mysqladmincmd $remotestring ping 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
# Login went just fine
$mysqllogin = " $remotestring ";
# Did this go well because of a .my.cnf file or is there no password set?
my $userpath = `printenv HOME`;
if ( length($userpath) > 0 ) {
chomp($userpath);
}
unless ( -e "${userpath}/.my.cnf" or -e "${userpath}/.mylogin.cnf" )
{
badprint
"Successfully authenticated with no password - SECURITY RISK!";
}
return 1;
}
else {
if ( $opt{'noask'} == 1 ) {
badprint
"Attempted to use login credentials, but they were invalid";
exit 1;
}
my ( $name, $password );
# If --user is defined no need to ask for username
if ( $opt{user} ne 0 ) {
$name = $opt{user};
}
else {
print STDERR "Please enter your MySQL administrative login: ";
$name = <STDIN>;
}
# If --pass is defined no need to ask for password
if ( $opt{pass} ne 0 ) {
$password = $opt{pass};
}
else {
print STDERR
"Please enter your MySQL administrative password: ";
system("stty -echo >$devnull 2>&1");
$password = <STDIN>;
system("stty echo >$devnull 2>&1");
}
chomp($password);
chomp($name);
$mysqllogin = "-u $name";
if ( length($password) > 0 ) {
$mysqllogin .= " -p'$password'";
}
$mysqllogin .= $remotestring;
my $loginstatus = `$mysqladmincmd ping $mysqllogin 2>&1`;
if ( $loginstatus =~ /mysqld is alive/ ) {
print STDERR "";
if ( !length($password) ) {
# Did this go well because of a .my.cnf file or is there no password set?
my $userpath = `printenv HOME`;
chomp($userpath);
unless ( -e "$userpath/.my.cnf" ) {
badprint
"Successfully authenticated with no password - SECURITY RISK!";
}
}
return 1;
}
else {
badprint
"Attempted to use login credentials, but they were invalid.";
exit 1;
}
exit 1;
}
}
}
# MySQL Request Array
sub select_array {
my $req = shift;
debugprint "PERFORM: $req ";
my @result = `$mysqlcmd $mysqllogin -Bse "\\w$req" 2>>/dev/null`;
if ( $? != 0 ) {
badprint "failed to execute: $req";
badprint "FAIL Execute SQL / return code: $?";
debugprint "CMD : $mysqlcmd";
debugprint "OPTIONS: $mysqllogin";
debugprint `$mysqlcmd $mysqllogin -Bse "$req" 2>&1`;
#exit $?;
}
debugprint "select_array: return code : $?";
chomp(@result);
return @result;
}
sub human_size {
my( $size, $n ) =( shift, 0 );
++$n and $size /= 1024 until $size < 1024;
return sprintf "%.2f %s",
$size, ( qw[ bytes KB MB GB ] )[ $n ];
}
# MySQL Request one
sub select_one {
my $req = shift;
debugprint "PERFORM: $req ";
my $result = `$mysqlcmd $mysqllogin -Bse "\\w$req" 2>>/dev/null`;
if ( $? != 0 ) {
badprint "failed to execute: $req";
badprint "FAIL Execute SQL / return code: $?";
debugprint "CMD : $mysqlcmd";
debugprint "OPTIONS: $mysqllogin";
debugprint `$mysqlcmd $mysqllogin -Bse "$req" 2>&1`;
#exit $?;
}
debugprint "select_array: return code : $?";
chomp($result);
return $result;
}
# MySQL Request one
sub select_one_g {
my $pattern = shift;
my $req = shift;
debugprint "PERFORM: $req ";
my @result = `$mysqlcmd $mysqllogin -re "\\w$req\\G" 2>>/dev/null`;
if ( $? != 0 ) {
badprint "failed to execute: $req";
badprint "FAIL Execute SQL / return code: $?";
debugprint "CMD : $mysqlcmd";
debugprint "OPTIONS: $mysqllogin";
debugprint `$mysqlcmd $mysqllogin -Bse "$req" 2>&1`;
#exit $?;
}
debugprint "select_array: return code : $?";
chomp(@result);
return ( grep { /$pattern/ } @result )[0];
}
sub select_str_g {
my $pattern = shift;
my $req = shift;
my $str = select_one_g $pattern, $req;
return () unless defined $str;
my @val = split /:/, $str;
shift @val;
return trim(@val);
}
sub get_tuning_info {
my @infoconn = select_array "\\s";
my ( $tkey, $tval );
@infoconn =
grep { !/Threads:/ and !/Connection id:/ and !/pager:/ and !/Using/ }
@infoconn;
foreach my $line (@infoconn) {
if ( $line =~ /\s*(.*):\s*(.*)/ ) {
debugprint "$1 => $2";
$tkey = $1;
$tval = $2;
chomp($tkey);
chomp($tval);
$result{'MySQL Client'}{$tkey} = $tval;
}
}
$result{'MySQL Client'}{'Client Path'} = $mysqlcmd;
$result{'MySQL Client'}{'Admin Path'} = $mysqladmincmd;
$result{'MySQL Client'}{'Authentication Info'} = $mysqllogin;
}
# Populates all of the variable and status hashes
my ( %mystat, %myvar, $dummyselect, %myrepl, %myslaves );
sub arr2hash {
my $href = shift;
my $harr = shift;
my $sep = shift;
$sep = '\s' unless defined($sep);
foreach my $line (@$harr) {
next if ( $line =~ m/^\*\*\*\*\*\*\*/ );
$line =~ /([a-zA-Z_]*)\s*$sep\s*(.*)/;
$$href{$1} = $2;
debugprint "V: $1 = $2";
}
}
sub get_all_vars {
# We need to initiate at least one query so that our data is useable
$dummyselect = select_one "SELECT VERSION()";
if ( not defined($dummyselect) or $dummyselect eq "" ) {
badprint
"You probably did not get enough privileges for running MySQLTuner ...";
exit(256);
}
$dummyselect =~ s/(.*?)\-.*/$1/;
debugprint "VERSION: " . $dummyselect . "";
$result{'MySQL Client'}{'Version'} = $dummyselect;
my @mysqlvarlist = select_array("SHOW VARIABLES");
push( @mysqlvarlist, select_array("SHOW GLOBAL VARIABLES") );
arr2hash( \%myvar, \@mysqlvarlist );
$result{'Variables'} = \%myvar;
my @mysqlstatlist = select_array("SHOW STATUS");
push( @mysqlstatlist, select_array("SHOW GLOBAL STATUS") );
arr2hash( \%mystat, \@mysqlstatlist );
$result{'Status'} = \%mystat;
unless( defined ($myvar{'innodb_support_xa'}) ) {
$myvar{'innodb_support_xa'}='ON';
}
$myvar{'have_galera'} = "NO";
if ( defined( $myvar{'wsrep_provider_options'} )
&& $myvar{'wsrep_provider_options'} ne ""
&& $myvar{'wsrep_on'} ne "OFF" )
{
$myvar{'have_galera'} = "YES";
debugprint "Galera options: " . $myvar{'wsrep_provider_options'};
}
# Workaround for MySQL bug #59393 wrt. ignore-builtin-innodb
if ( ( $myvar{'ignore_builtin_innodb'} || "" ) eq "ON" ) {
$myvar{'have_innodb'} = "NO";
}
# Support GTID MODE FOR MARIADB
# Issue MariaDB GTID mode #272
$myvar{'gtid_mode'} = $myvar{'gtid_strict_mode'}
if ( defined( $myvar{'gtid_strict_mode'} ) );
$myvar{'have_threadpool'} = "NO";
if ( defined( $myvar{'thread_pool_size'} )
and $myvar{'thread_pool_size'} > 0 )
{
$myvar{'have_threadpool'} = "YES";
}
# have_* for engines is deprecated and will be removed in MySQL 5.6;
# check SHOW ENGINES and set corresponding old style variables.
# Also works around MySQL bug #59393 wrt. skip-innodb
my @mysqlenginelist = select_array "SHOW ENGINES";
foreach my $line (@mysqlenginelist) {
if ( $line =~ /^([a-zA-Z_]+)\s+(\S+)/ ) {
my $engine = lc($1);
if ( $engine eq "federated" || $engine eq "blackhole" ) {
$engine .= "_engine";
}
elsif ( $engine eq "berkeleydb" ) {
$engine = "bdb";
}
my $val = ( $2 eq "DEFAULT" ) ? "YES" : $2;
$myvar{"have_$engine"} = $val;
$result{'Storage Engines'}{$engine} = $2;
}
}
debugprint Dumper(@mysqlenginelist);
my @mysqlslave = select_array("SHOW SLAVE STATUS\\G");
arr2hash( \%myrepl, \@mysqlslave, ':' );
$result{'Replication'}{'Status'} = \%myrepl;
my @mysqlslaves = select_array "SHOW SLAVE HOSTS";
my @lineitems = ();
foreach my $line (@mysqlslaves) {
debugprint "L: $line ";
@lineitems = split /\s+/, $line;
$myslaves{ $lineitems[0] } = $line;
$result{'Replication'}{'Slaves'}{ $lineitems[0] } = $lineitems[4];
}
}
sub remove_cr {
return map {
my $line = $_;
$line =~ s/\n$//g;
$line =~ s/^\s+$//g;
$line;
} @_;
}
sub remove_empty {
grep { $_ ne '' } @_;
}
sub grep_file_contents {
my $file = shift;
my $patt;
}
sub get_file_contents {
my $file = shift;
open( my $fh, "<", $file ) or die "Can't open $file for read: $!";
my @lines = <$fh>;
close $fh or die "Cannot close $file: $!";
@lines = remove_cr @lines;
return @lines;
}
sub get_basic_passwords {
return get_file_contents(shift);
}
sub get_log_file_real_path {
my $file = shift;
my $hostname = shift;
my $datadir = shift;
if ( -f "$file" ) {
return $file;
}
elsif ( -f "$hostname.err" ) {
return "$hostname.err";
}
elsif ( $datadir ne "" ) {
return "$datadir$hostname.err";
}
else {
return $file;
}
}
sub log_file_recommendations {
$myvar{'log_error'} =
get_log_file_real_path( $myvar{'log_error'}, $myvar{'hostname'},
$myvar{'datadir'} );
subheaderprint "Log file Recommendations";
infoprint "Log file: "
. $myvar{'log_error'} . "("
. hr_bytes_rnd( ( stat $myvar{'log_error'} )[7] ) . ")";
if ( -f "$myvar{'log_error'}" ) {
goodprint "Log file $myvar{'log_error'} exists";
}
else {
badprint "Log file $myvar{'log_error'} doesn't exist";
}
if ( -r "$myvar{'log_error'}" ) {
goodprint "Log file $myvar{'log_error'} is readable.";
}
else {
badprint "Log file $myvar{'log_error'} isn't readable.";
return;
}
if ( ( stat $myvar{'log_error'} )[7] > 0 ) {
goodprint "Log file $myvar{'log_error'} is not empty";
}
else {
badprint "Log file $myvar{'log_error'} is empty";
}
if ( ( stat $myvar{'log_error'} )[7] < 32 * 1024 * 1024 ) {
goodprint "Log file $myvar{'log_error'} is smaller than 32 Mb";
}
else {
badprint "Log file $myvar{'log_error'} is bigger than 32 Mb";
push @generalrec,
$myvar{'log_error'}
. " is > 32Mb, you should analyze why or implement a rotation log strategy such as logrotate!";
}
my $numLi = 0;
my $nbWarnLog = 0;
my $nbErrLog = 0;
my @lastShutdowns;
my @lastStarts;
open( my $fh, '<', $myvar{'log_error'} )
or die "Can't open $myvar{'log_error'} for read: $!";
while ( my $logLi = <$fh> ) {
chomp $logLi;
$numLi++;
debugprint "$numLi: $logLi" if $logLi =~ /warning|error/i;
$nbErrLog++ if $logLi =~ /error/i;
$nbWarnLog++ if $logLi =~ /warning/i;
push @lastShutdowns, $logLi
if $logLi =~ /Shutdown complete/ and $logLi !~ /Innodb/i;
push @lastStarts, $logLi if $logLi =~ /ready for connections/;
}
close $fh;
if ( $nbWarnLog > 0 ) {
badprint "$myvar{'log_error'} contains $nbWarnLog warning(s).";
push @generalrec,
"Control warning line(s) into $myvar{'log_error'} file";
}
else {
goodprint "$myvar{'log_error'} doesn't contain any warning.";
}
if ( $nbErrLog > 0 ) {
badprint "$myvar{'log_error'} contains $nbErrLog error(s).";
push @generalrec, "Control error line(s) into $myvar{'log_error'} file";
}
else {
goodprint "$myvar{'log_error'} doesn't contain any error.";
}
infoprint scalar @lastStarts . " start(s) detected in $myvar{'log_error'}";
my $nStart = 0;
my $nEnd = 10;
if ( scalar @lastStarts < $nEnd ) {
$nEnd = scalar @lastStarts;
}
for my $startd ( reverse @lastStarts[ -$nEnd .. -1 ] ) {
$nStart++;
infoprint "$nStart) $startd";
}
infoprint scalar @lastShutdowns
. " shutdown(s) detected in $myvar{'log_error'}";
$nStart = 0;
$nEnd = 10;
if ( scalar @lastShutdowns < $nEnd ) {
$nEnd = scalar @lastShutdowns;
}
for my $shutd ( reverse @lastShutdowns[ -$nEnd .. -1 ] ) {
$nStart++;
infoprint "$nStart) $shutd";
}
#exit 0;
}
sub cve_recommendations {
subheaderprint "CVE Security Recommendations";
unless ( defined( $opt{cvefile} ) && -f "$opt{cvefile}" ) {
infoprint "Skipped due to --cvefile option undefined";
return;
}
#$mysqlvermajor=10;
#$mysqlverminor=1;
#$mysqlvermicro=17;
#prettyprint "Look for related CVE for $myvar{'version'} or lower in $opt{cvefile}";
my $cvefound = 0;
open( my $fh, "<", $opt{cvefile} )
or die "Can't open $opt{cvefile} for read: $!";
while ( my $cveline = <$fh> ) {
my @cve = split( ';', $cveline );
debugprint
"Comparing $mysqlvermajor\.$mysqlverminor\.$mysqlvermicro with $cve[1]\.$cve[2]\.$cve[3] : "
. ( mysql_version_le( $cve[1], $cve[2], $cve[3] ) ? '<=' : '>' );
# Avoid not major/minor version corresponding CVEs
next
unless ( int( $cve[1] ) == $mysqlvermajor
&& int( $cve[2] ) == $mysqlverminor );
if ( int( $cve[3] ) >= $mysqlvermicro ) {
badprint "$cve[4](<= $cve[1]\.$cve[2]\.$cve[3]) : $cve[6]";
$result{'CVE'}{'List'}{$cvefound} =
"$cve[4](<= $cve[1]\.$cve[2]\.$cve[3]) : $cve[6]";
$cvefound++;
}
}
close $fh or die "Cannot close $opt{cvefile}: $!";
$result{'CVE'}{'nb'} = $cvefound;
my $cve_warning_notes = "";
if ( $cvefound == 0 ) {
goodprint "NO SECURITY CVE FOUND FOR YOUR VERSION";
return;
}
if ( $mysqlvermajor eq 5 and $mysqlverminor eq 5 ) {
infoprint
"False positive CVE(s) for MySQL and MariaDB 5.5.x can be found.";
infoprint "Check careful each CVE for those particular versions";
}
badprint $cvefound . " CVE(s) found for your MySQL release.";
push( @generalrec,
$cvefound
. " CVE(s) found for your MySQL release. Consider upgrading your version !"
);
}
sub get_opened_ports {
my @opened_ports = `netstat -ltn`;
@opened_ports = map {
my $v = $_;
$v =~ s/.*:(\d+)\s.*$/$1/;
$v =~ s/\D//g;
$v;
} @opened_ports;
@opened_ports = sort { $a <=> $b } grep { !/^$/ } @opened_ports;
debugprint Dumper \@opened_ports;
$result{'Network'}{'TCP Opened'} = \@opened_ports;
return @opened_ports;
}
sub is_open_port {
my $port = shift;
if ( grep { /^$port$/ } get_opened_ports ) {
return 1;
}
return 0;
}
sub get_process_memory {
my $pid = shift;
my @mem = `ps -p $pid -o rss`;
return 0 if scalar @mem != 2;
return $mem[1] * 1024;
}
sub get_other_process_memory {
my @procs = `ps eaxo pid,command`;
@procs = map {
my $v = $_;
$v =~ s/.*PID.*//;
$v =~ s/.*mysqld.*//;
$v =~ s/.*\[.*\].*//;
$v =~ s/^\s+$//g;
$v =~ s/.*PID.*CMD.*//;
$v =~ s/.*systemd.*//;
$v =~ s/\s*?(\d+)\s*.*/$1/g;
$v;
} @procs;
@procs = remove_cr @procs;
@procs = remove_empty @procs;
my $totalMemOther = 0;
map { $totalMemOther += get_process_memory($_); } @procs;
return $totalMemOther;
}
sub get_os_release {
if ( -f "/etc/lsb-release" ) {
my @info_release = get_file_contents "/etc/lsb-release";
my $os_release = $info_release[3];
$os_release =~ s/.*="//;
$os_release =~ s/"$//;
return $os_release;
}
if ( -f "/etc/system-release" ) {
my @info_release = get_file_contents "/etc/system-release";
return $info_release[0];
}
if ( -f "/etc/os-release" ) {
my @info_release = get_file_contents "/etc/os-release";
my $os_release = $info_release[0];
$os_release =~ s/.*="//;
$os_release =~ s/"$//;
return $os_release;
}
if ( -f "/etc/issue" ) {
my @info_release = get_file_contents "/etc/issue";
my $os_release = $info_release[0];
$os_release =~ s/\s+\\n.*//;
return $os_release;
}
return "Unknown OS release";
}
sub get_fs_info {
my @sinfo = `df -P | grep '%'`;
my @iinfo = `df -Pi| grep '%'`;
shift @iinfo;
@sinfo = map {
my $v = $_;
$v =~ s/.*\s(\d+)%\s+(.*)/$1\t$2/g;
$v;
} @sinfo;
foreach my $info (@sinfo) {
next if $info =~ m{(\d+)\t/(run|dev|sys|proc)($|/)};
if ( $info =~ /(\d+)\t(.*)/ ) {
if ( $1 > 85 ) {
badprint "mount point $2 is using $1 % total space";
push( @generalrec, "Add some space to $2 mountpoint." );
}
else {
infoprint "mount point $2 is using $1 % of total space";
}
$result{'Filesystem'}{'Space Pct'}{$2} = $1;
}
}
@iinfo = map {
my $v = $_;
$v =~ s/.*\s(\d+)%\s+(.*)/$1\t$2/g;
$v;
} @iinfo;
foreach my $info (@iinfo) {
next if $info =~ m{(\d+)\t/(run|dev|sys|proc)($|/)};
if ( $info =~ /(\d+)\t(.*)/ ) {
if ( $1 > 85 ) {
badprint "mount point $2 is using $1 % of max allowed inodes";
push( @generalrec,
"Cleanup files from $2 mountpoint or reformat you filesystem."
);
}
else {
infoprint "mount point $2 is using $1 % of max allowed inodes";
}
$result{'Filesystem'}{'Inode Pct'}{$2} = $1;
}
}
}
sub merge_hash {
my $h1 = shift;
my $h2 = shift;
my %result = {};
foreach my $substanceref ( $h1, $h2 ) {
while ( my ( $k, $v ) = each %$substanceref ) {
next if ( exists $result{$k} );
$result{$k} = $v;
}
}
return \%result;
}
sub is_virtual_machine {
my $isVm = `grep -Ec '^flags.*\ hypervisor\ ' /proc/cpuinfo`;
return ( $isVm == 0 ? 0 : 1 );
}
sub infocmd {
my $cmd = "@_";
debugprint "CMD: $cmd";
my @result = `$cmd`;
@result = remove_cr @result;
for my $l (@result) {
infoprint "$l";
}
}
sub infocmd_tab {
my $cmd = "@_";
debugprint "CMD: $cmd";
my @result = `$cmd`;
@result = remove_cr @result;
for my $l (@result) {
infoprint "\t$l";
}
}
sub infocmd_one {
my $cmd = "@_";
my @result = `$cmd 2>&1`;
@result = remove_cr @result;
return join ', ', @result;
}
sub get_kernel_info {
my @params = (
'fs.aio-max-nr', 'fs.aio-nr',
'fs.file-max', 'sunrpc.tcp_fin_timeout',
'sunrpc.tcp_max_slot_table_entries', 'sunrpc.tcp_slot_table_entries',
'vm.swappiness'
);
infoprint "Information about kernel tuning:";
foreach my $param (@params) {
infocmd_tab("sysctl $param 2>/dev/null");
$result{'OS'}{'Config'}{$param} = `sysctl -n $param 2>/dev/null`;
}
if ( `sysctl -n vm.swappiness` > 10 ) {
badprint
"Swappiness is > 10, please consider having a value lower than 10";
push @generalrec, "setup swappiness lower or equals to 10";
push @adjvars,
'vm.swappiness <= 10 (echo 10 > /proc/sys/vm/swappiness)';
}
else {
infoprint "Swappiness is < 10.";
}
# only if /proc/sys/sunrpc exists
my $tcp_slot_entries =
`sysctl -n sunrpc.tcp_slot_table_entries 2>/dev/null`;
if ( -f "/proc/sys/sunrpc"
and ( $tcp_slot_entries eq '' or $tcp_slot_entries < 100 ) )
{
badprint
"Initial TCP slot entries is < 1M, please consider having a value greater than 100";
push @generalrec, "setup Initial TCP slot entries greater than 100";
push @adjvars,
'sunrpc.tcp_slot_table_entries > 100 (echo 128 > /proc/sys/sunrpc/tcp_slot_table_entries)';
}
else {
infoprint "TCP slot entries is > 100.";
}
if ( `sysctl -n fs.aio-max-nr` < 1000000 ) {
badprint
"Max running total of the number of events is < 1M, please consider having a value greater than 1M";
push @generalrec, "setup Max running number events greater than 1M";
push @adjvars,
'fs.aio-max-nr > 1M (echo 1048576 > /proc/sys/fs/aio-max-nr)';
}
else {
infoprint "Max Number of AIO events is > 1M.";
}
}
sub get_system_info {
$result{'OS'}{'Release'} = get_os_release();
infoprint get_os_release;
if (is_virtual_machine) {
infoprint "Machine type : Virtual machine";
$result{'OS'}{'Virtual Machine'} = 'YES';
}
else {
infoprint "Machine type : Physical machine";
$result{'OS'}{'Virtual Machine'} = 'NO';
}
$result{'Network'}{'Connected'} = 'NO';
`ping -c 1 ipecho.net &>/dev/null`;
my $isConnected = $?;
if ( $? == 0 ) {
infoprint "Internet : Connected";
$result{'Network'}{'Connected'} = 'YES';
}
else {
badprint "Internet : Disconnected";
}
$result{'OS'}{'NbCore'} = cpu_cores;
infoprint "Number of Core CPU : " . cpu_cores;
$result{'OS'}{'Type'} = `uname -o`;
infoprint "Operating System Type : " . infocmd_one "uname -o";
$result{'OS'}{'Kernel'} = `uname -r`;
infoprint "Kernel Release : " . infocmd_one "uname -r";
$result{'OS'}{'Hostname'} = `hostname`;
$result{'Network'}{'Internal Ip'} = `hostname -I`;
infoprint "Hostname : " . infocmd_one "hostname";
infoprint "Network Cards : ";
infocmd_tab "ifconfig| grep -A1 mtu";
infoprint "Internal IP : " . infocmd_one "hostname -I";
$result{'Network'}{'Internal Ip'} = `ifconfig| grep -A1 mtu`;
my $httpcli = get_http_cli();
infoprint "HTTP client found: $httpcli" if defined $httpcli;
my $ext_ip = "";
if ( $httpcli =~ /curl$/ ) {
$ext_ip = infocmd_one "$httpcli -m 3 ipecho.net/plain";
}
elsif ( $httpcli =~ /wget$/ ) {
$ext_ip = infocmd_one "$httpcli -t 1 -T 3 -q -O - ipecho.net/plain";
}
infoprint "External IP : " . $ext_ip;
$result{'Network'}{'External Ip'} = $ext_ip;
badprint
"External IP : Can't check because of Internet connectivity"
unless defined($httpcli);
infoprint "Name Servers : "
. infocmd_one "grep 'nameserver' /etc/resolv.conf \| awk '{print \$2}'";
infoprint "Logged In users : ";
infocmd_tab "who";
$result{'OS'}{'Logged users'} = `who`;
infoprint "Ram Usages in Mb : ";
infocmd_tab "free -m | grep -v +";
$result{'OS'}{'Free Memory RAM'} = `free -m | grep -v +`;
infoprint "Load Average : ";
infocmd_tab "top -n 1 -b | grep 'load average:'";
$result{'OS'}{'Load Average'} = `top -n 1 -b | grep 'load average:'`;
infoprint "System Uptime : ";
infocmd_tab "uptime";
$result{'OS'}{'Uptime'}= `uptime`;
}
sub system_recommendations {
return if ( $opt{sysstat} == 0 );
subheaderprint "System Linux Recommendations";
my $os = `uname`;
unless ( $os =~ /Linux/i ) {
infoprint "Skipped due to non Linux server";
return;
}
prettyprint "Look for related Linux system recommendations";
#prettyprint '-'x78;
get_system_info();
my $omem = get_other_process_memory;
infoprint "User process except mysqld used "
. hr_bytes_rnd($omem) . " RAM.";
if ( ( 0.15 * $physical_memory ) < $omem ) {
badprint
"Other user process except mysqld used more than 15% of total physical memory "
. percentage( $omem, $physical_memory ) . "% ("
. hr_bytes_rnd($omem) . " / "
. hr_bytes_rnd($physical_memory) . ")";
push( @generalrec,
"Consider stopping or dedicate server for additional process other than mysqld."
);
push( @adjvars,
"DON'T APPLY SETTINGS BECAUSE THERE ARE TOO MANY PROCESSES RUNNING ON THIS SERVER. OOM KILL CAN OCCUR!"
);
}
else {
infoprint
"Other user process except mysqld used less than 15% of total physical memory "
. percentage( $omem, $physical_memory ) . "% ("
. hr_bytes_rnd($omem) . " / "
. hr_bytes_rnd($physical_memory) . ")";
}
if ( $opt{'maxportallowed'} > 0 ) {
my @opened_ports = get_opened_ports;
infoprint "There is "
. scalar @opened_ports
. " listening port(s) on this server.";
if ( scalar(@opened_ports) > $opt{'maxportallowed'} ) {
badprint "There is too many listening ports: "
. scalar(@opened_ports)
. " opened > "
. $opt{'maxportallowed'}
. "allowed.";
push( @generalrec,
"Consider dedicating a server for your database installation with less services running on !"
);
}
else {
goodprint "There is less than "
. $opt{'maxportallowed'}
. " opened ports on this server.";
}
}
foreach my $banport (@banned_ports) {
if ( is_open_port($banport) ) {
badprint "Banned port: $banport is opened..";
push( @generalrec,
"Port $banport is opened. Consider stopping program handling this port."
);
}
else {
goodprint "$banport is not opened.";
}
}
get_fs_info;
get_kernel_info;
}
sub security_recommendations {
subheaderprint "Security Recommendations";
if ( mysql_version_eq(8) ) {
infoprint "Skipped due to unsupported feature for MySQL 8";
return;
}
#exit 0;
if ( $opt{skippassword} eq 1 ) {
infoprint "Skipped due to --skippassword option";
return;
}
my $PASS_COLUMN_NAME = 'password';
if ( $myvar{'version'} =~ /5\.7|10\..*MariaDB*/ ) {
$PASS_COLUMN_NAME =
"IF(plugin='mysql_native_password', authentication_string, 'password')";
}
debugprint "Password column = $PASS_COLUMN_NAME";
# Looking for Anonymous users
my @mysqlstatlist = select_array
"SELECT CONCAT(user, '\@', host) FROM mysql.user WHERE TRIM(USER) = '' OR USER IS NULL";
debugprint Dumper \@mysqlstatlist;
#exit 0;
if (@mysqlstatlist) {
foreach my $line ( sort @mysqlstatlist ) {
chomp($line);
badprint "User '" . $line . "' is an anonymous account.";
}
push( @generalrec,
"Remove Anonymous User accounts - there are "
. scalar(@mysqlstatlist)
. " anonymous accounts." );
}
else {
goodprint "There are no anonymous accounts for any database users";
}
if ( mysql_version_le( 5, 1 ) ) {
badprint "No more password checks for MySQL version <=5.1";
badprint "MySQL version <=5.1 are deprecated and end of support.";
return;
}
# Looking for Empty Password
if ( mysql_version_ge( 5, 5 ) ) {
@mysqlstatlist = select_array
"SELECT CONCAT(user, '\@', host) FROM mysql.user WHERE ($PASS_COLUMN_NAME = '' OR $PASS_COLUMN_NAME IS NULL) AND plugin NOT IN ('unix_socket', 'win_socket', 'auth_pam_compat')";
}
else {
@mysqlstatlist = select_array
"SELECT CONCAT(user, '\@', host) FROM mysql.user WHERE ($PASS_COLUMN_NAME = '' OR $PASS_COLUMN_NAME IS NULL)";
}
if (@mysqlstatlist) {
foreach my $line ( sort @mysqlstatlist ) {
chomp($line);
badprint "User '" . $line . "' has no password set.";
}
push( @generalrec,
"Set up a Password for user with the following SQL statement ( SET PASSWORD FOR 'user'\@'SpecificDNSorIp' = PASSWORD('secure_password'); )"
);
}
else {
goodprint "All database users have passwords assigned";
}
if ( mysql_version_ge( 5, 7 ) ) {
my $valPlugin = select_one(
"select count(*) from information_schema.plugins where PLUGIN_NAME='validate_password' AND PLUGIN_STATUS='ACTIVE'"
);
if ( $valPlugin >= 1 ) {
infoprint
"Bug #80860 MySQL 5.7: Avoid testing password when validate_password is activated";
return;
}
}
# Looking for User with user/ uppercase /capitalise user as password
@mysqlstatlist = select_array
"SELECT CONCAT(user, '\@', host) FROM mysql.user WHERE CAST($PASS_COLUMN_NAME as Binary) = PASSWORD(user) OR CAST($PASS_COLUMN_NAME as Binary) = PASSWORD(UPPER(user)) OR CAST($PASS_COLUMN_NAME as Binary) = PASSWORD(CONCAT(UPPER(LEFT(User, 1)), SUBSTRING(User, 2, LENGTH(User))))";
if (@mysqlstatlist) {
foreach my $line ( sort @mysqlstatlist ) {
chomp($line);
badprint "User '" . $line . "' has user name as password.";
}
push( @generalrec,
"Set up a Secure Password for user\@host ( SET PASSWORD FOR 'user'\@'SpecificDNSorIp' = PASSWORD('secure_password'); )"
);
}
@mysqlstatlist = select_array
"SELECT CONCAT(user, '\@', host) FROM mysql.user WHERE HOST='%'";
if (@mysqlstatlist) {
foreach my $line ( sort @mysqlstatlist ) {
chomp($line);
badprint "User '" . $line
. "' does not specify hostname restrictions.";
}
push( @generalrec,
"Restrict Host for user\@% to user\@SpecificDNSorIp" );
}
unless ( -f $basic_password_files ) {
badprint "There is no basic password file list!";
return;
}
my @passwords = get_basic_passwords $basic_password_files;
infoprint "There are "
. scalar(@passwords)
. " basic passwords in the list.";
my $nbins = 0;
my $passreq;
if (@passwords) {
my $nbInterPass = 0;
foreach my $pass (@passwords) {
$nbInterPass++;
$pass =~ s/\s//g;
$pass =~ s/\'/\\\'/g;
chomp($pass);
# Looking for User with user/ uppercase /capitalise weak password
@mysqlstatlist =
select_array
"SELECT CONCAT(user, '\@', host) FROM mysql.user WHERE $PASS_COLUMN_NAME = PASSWORD('"
. $pass
. "') OR $PASS_COLUMN_NAME = PASSWORD(UPPER('"
. $pass
. "')) OR $PASS_COLUMN_NAME = PASSWORD(CONCAT(UPPER(LEFT('"
. $pass
. "', 1)), SUBSTRING('"
. $pass
. "', 2, LENGTH('"
. $pass . "'))))";
debugprint "There is " . scalar(@mysqlstatlist) . " items.";
if (@mysqlstatlist) {
foreach my $line (@mysqlstatlist) {
chomp($line);
badprint "User '" . $line
. "' is using weak password: $pass in a lower, upper or capitalize derivative version.";
$nbins++;
}
}
debugprint "$nbInterPass / " . scalar(@passwords)
if ( $nbInterPass % 1000 == 0 );
}
}
if ( $nbins > 0 ) {
push( @generalrec, $nbins . " user(s) used basic or weak password." );
}
}
sub get_replication_status {
subheaderprint "Replication Metrics";
infoprint "Galera Synchronous replication: " . $myvar{'have_galera'};
if ( scalar( keys %myslaves ) == 0 ) {
infoprint "No replication slave(s) for this server.";
}
else {
infoprint "This server is acting as master for "
. scalar( keys %myslaves )
. " server(s).";
}
infoprint "Binlog format: " . $myvar{'binlog_format'};
infoprint "XA support enabled: " . $myvar{'innodb_support_xa'};
infoprint "Semi synchronous replication Master: "
. (
defined( $myvar{'rpl_semi_sync_master_enabled'} )
? $myvar{'rpl_semi_sync_master_enabled'}
: 'Not Activated'
);
infoprint "Semi synchronous replication Slave: "
. (
defined( $myvar{'rpl_semi_sync_slave_enabled'} )
? $myvar{'rpl_semi_sync_slave_enabled'}
: 'Not Activated'
);
if ( scalar( keys %myrepl ) == 0 and scalar( keys %myslaves ) == 0 ) {
infoprint "This is a standalone server";
return;
}
if ( scalar( keys %myrepl ) == 0 ) {
infoprint
"No replication setup for this server or replication not started.";
return;
}
$result{'Replication'}{'status'} = \%myrepl;
my ($io_running) = $myrepl{'Slave_IO_Running'};
debugprint "IO RUNNING: $io_running ";
my ($sql_running) = $myrepl{'Slave_SQL_Running'};
debugprint "SQL RUNNING: $sql_running ";
my ($seconds_behind_master) = $myrepl{'Seconds_Behind_Master'};
debugprint "SECONDS : $seconds_behind_master ";
if ( defined($io_running)
and ( $io_running !~ /yes/i or $sql_running !~ /yes/i ) )
{
badprint
"This replication slave is not running but seems to be configured.";
}
if ( defined($io_running)
&& $io_running =~ /yes/i
&& $sql_running =~ /yes/i )
{
if ( $myvar{'read_only'} eq 'OFF' ) {
badprint
"This replication slave is running with the read_only option disabled.";
}
else {
goodprint
"This replication slave is running with the read_only option enabled.";
}
if ( $seconds_behind_master > 0 ) {
badprint
"This replication slave is lagging and slave has $seconds_behind_master second(s) behind master host.";
}
else {
goodprint "This replication slave is up to date with master.";
}
}
}
sub validate_mysql_version {
( $mysqlvermajor, $mysqlverminor, $mysqlvermicro ) =
$myvar{'version'} =~ /^(\d+)(?:\.(\d+)|)(?:\.(\d+)|)/;
$mysqlverminor ||= 0;
$mysqlvermicro ||= 0;
if ( !mysql_version_ge( 5, 1 ) ) {
badprint "Your MySQL version "
. $myvar{'version'}
. " is EOL software! Upgrade soon!";
}
elsif ( ( mysql_version_ge(6) and mysql_version_le(9) )
or mysql_version_ge(12) )
{
badprint "Currently running unsupported MySQL version "
. $myvar{'version'} . "";
}
else {
goodprint "Currently running supported MySQL version "
. $myvar{'version'} . "";
}
}
# Checks if MySQL version is equal to (major, minor, micro)
sub mysql_version_eq {
my ( $maj, $min, $mic ) = @_;
return int($mysqlvermajor) == int($maj) if ( !defined($min) && !defined($mic));
return int($mysqlvermajor) == int($maj)&& int($mysqlverminor) == int($min) if ( !defined($mic));
return ( int($mysqlvermajor) == int($maj)
&& int($mysqlverminor) == int($min)
&& int($mysqlvermicro) == int($mic) );
}
# Checks if MySQL version is greater than equal to (major, minor, micro)
sub mysql_version_ge {
my ( $maj, $min, $mic ) = @_;
$min ||= 0;
$mic ||= 0;
return
int($mysqlvermajor) > int($maj)
|| ( int($mysqlvermajor) == int($maj) && int($mysqlverminor) > int($min) )
|| ( int($mysqlvermajor) == int($maj)
&& int($mysqlverminor) == int($min)
&& int($mysqlvermicro) >= int($mic) );
}
# Checks if MySQL version is lower than equal to (major, minor, micro)
sub mysql_version_le {
my ( $maj, $min, $mic ) = @_;
$min ||= 0;
$mic ||= 0;
return
int($mysqlvermajor) < int($maj)
|| ( int($mysqlvermajor) == int($maj) && int($mysqlverminor) < int($min) )
|| ( int($mysqlvermajor) == int($maj)
&& int($mysqlverminor) == int($min)
&& int($mysqlvermicro) <= int($mic) );
}
# Checks if MySQL micro version is lower than equal to (major, minor, micro)
sub mysql_micro_version_le {
my ( $maj, $min, $mic ) = @_;
return $mysqlvermajor == $maj
&& ( $mysqlverminor == $min
&& $mysqlvermicro <= $mic );
}
# Checks for 32-bit boxes with more than 2GB of RAM
my ($arch);
sub check_architecture {
if ( $doremote eq 1 ) { return; }
if ( `uname` =~ /SunOS/ && `isainfo -b` =~ /64/ ) {
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
elsif ( `uname` !~ /SunOS/ && `uname -m` =~ /(64|s390x)/ ) {
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
elsif ( `uname` =~ /AIX/ && `bootinfo -K` =~ /64/ ) {
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
elsif ( `uname` =~ /NetBSD|OpenBSD/ && `sysctl -b hw.machine` =~ /64/ ) {
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
elsif ( `uname` =~ /FreeBSD/ && `sysctl -b hw.machine_arch` =~ /64/ ) {
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
elsif ( `uname` =~ /Darwin/ && `uname -m` =~ /Power Macintosh/ ) {
# Darwin box.local 9.8.0 Darwin Kernel Version 9.8.0: Wed Jul 15 16:57:01 PDT 2009; root:xnu1228.15.4~1/RELEASE_PPC Power Macintosh
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
elsif ( `uname` =~ /Darwin/ && `uname -m` =~ /x86_64/ ) {
# Darwin gibas.local 12.3.0 Darwin Kernel Version 12.3.0: Sun Jan 6 22:37:10 PST 2013; root:xnu-2050.22.13~1/RELEASE_X86_64 x86_64
$arch = 64;
goodprint "Operating on 64-bit architecture";
}
else {
$arch = 32;
if ( $physical_memory > 2147483648 ) {
badprint
"Switch to 64-bit OS - MySQL cannot currently use all of your RAM";
}
else {
goodprint "Operating on 32-bit architecture with less than 2GB RAM";
}
}
$result{'OS'}{'Architecture'} = "$arch bits";
}
# Start up a ton of storage engine counts/statistics
my ( %enginestats, %enginecount, $fragtables );
sub check_storage_engines {
if ( $opt{skipsize} eq 1 ) {
subheaderprint "Storage Engine Statistics";
infoprint "Skipped due to --skipsize option";
return;
}
subheaderprint "Storage Engine Statistics";
my $engines;
if ( mysql_version_ge( 5, 5 ) ) {
my @engineresults = select_array
"SELECT ENGINE,SUPPORT FROM information_schema.ENGINES ORDER BY ENGINE ASC";
foreach my $line (@engineresults) {
my ( $engine, $engineenabled );
( $engine, $engineenabled ) = $line =~ /([a-zA-Z_]*)\s+([a-zA-Z]+)/;
$result{'Engine'}{$engine}{'Enabled'} = $engineenabled;
$engines .=
( $engineenabled eq "YES" || $engineenabled eq "DEFAULT" )
? greenwrap "+" . $engine . " "
: redwrap "-" . $engine . " ";
}
}
elsif ( mysql_version_ge( 5, 1, 5 ) ) {
my @engineresults = select_array
"SELECT ENGINE,SUPPORT FROM information_schema.ENGINES WHERE ENGINE NOT IN ('performance_schema','MyISAM','MERGE','MEMORY') ORDER BY ENGINE ASC";
foreach my $line (@engineresults) {
my ( $engine, $engineenabled );
( $engine, $engineenabled ) = $line =~ /([a-zA-Z_]*)\s+([a-zA-Z]+)/;
$result{'Engine'}{$engine}{'Enabled'} = $engineenabled;
$engines .=
( $engineenabled eq "YES" || $engineenabled eq "DEFAULT" )
? greenwrap "+" . $engine . " "
: redwrap "-" . $engine . " ";
}
}
else {
$engines .=
( defined $myvar{'have_archive'} && $myvar{'have_archive'} eq "YES" )
? greenwrap "+Archive "
: redwrap "-Archive ";
$engines .=
( defined $myvar{'have_bdb'} && $myvar{'have_bdb'} eq "YES" )
? greenwrap "+BDB "
: redwrap "-BDB ";
$engines .=
( defined $myvar{'have_federated_engine'}
&& $myvar{'have_federated_engine'} eq "YES" )
? greenwrap "+Federated "
: redwrap "-Federated ";
$engines .=
( defined $myvar{'have_innodb'} && $myvar{'have_innodb'} eq "YES" )
? greenwrap "+InnoDB "
: redwrap "-InnoDB ";
$engines .=
( defined $myvar{'have_isam'} && $myvar{'have_isam'} eq "YES" )
? greenwrap "+ISAM "
: redwrap "-ISAM ";
$engines .=
( defined $myvar{'have_ndbcluster'}
&& $myvar{'have_ndbcluster'} eq "YES" )
? greenwrap "+NDBCluster "
: redwrap "-NDBCluster ";
}
my @dblist = grep { $_ ne 'lost+found' } select_array "SHOW DATABASES";
$result{'Databases'}{'List'} = [@dblist];
infoprint "Status: $engines";
if ( mysql_version_ge( 5, 1, 5 ) ) {
# MySQL 5 servers can have table sizes calculated quickly from information schema
my @templist = select_array
"SELECT ENGINE,SUM(DATA_LENGTH+INDEX_LENGTH),COUNT(ENGINE),SUM(DATA_LENGTH),SUM(INDEX_LENGTH) FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ('information_schema', 'performance_schema', 'mysql') AND ENGINE IS NOT NULL GROUP BY ENGINE ORDER BY ENGINE ASC;";
my ( $engine, $size, $count, $dsize, $isize );
foreach my $line (@templist) {
( $engine, $size, $count, $dsize, $isize ) =
$line =~ /([a-zA-Z_]+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)/;
debugprint "Engine Found: $engine";
next unless ( defined($engine) );
$size = 0 unless defined($size);
$isize = 0 unless defined($isize);
$dsize = 0 unless defined($dsize);
$count = 0 unless defined($count);
$enginestats{$engine} = $size;
$enginecount{$engine} = $count;
$result{'Engine'}{$engine}{'Table Number'} = $count;
$result{'Engine'}{$engine}{'Total Size'} = $size;
$result{'Engine'}{$engine}{'Data Size'} = $dsize;
$result{'Engine'}{$engine}{'Index Size'} = $isize;
}
my $not_innodb = '';
if ( not defined $result{'Variables'}{'innodb_file_per_table'} ) {
$not_innodb = "AND NOT ENGINE='InnoDB'";
}
elsif ( $result{'Variables'}{'innodb_file_per_table'} eq 'OFF' ) {
$not_innodb = "AND NOT ENGINE='InnoDB'";
}
$result{'Tables'}{'Fragmented tables'} =
[ select_array
"SELECT CONCAT(CONCAT(TABLE_SCHEMA, '.'), TABLE_NAME),DATA_FREE FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ('information_schema','performance_schema', 'mysql') AND DATA_LENGTH/1024/1024>100 AND DATA_FREE*100/(DATA_LENGTH+INDEX_LENGTH+DATA_FREE) > 10 AND NOT ENGINE='MEMORY' $not_innodb"
];
$fragtables = scalar @{ $result{'Tables'}{'Fragmented tables'} };
}
else {
# MySQL < 5 servers take a lot of work to get table sizes
my @tblist;
# Now we build a database list, and loop through it to get storage engine stats for tables
foreach my $db (@dblist) {
chomp($db);
if ( $db eq "information_schema"
or $db eq "performance_schema"
or $db eq "mysql"
or $db eq "lost+found" )
{
next;
}
my @ixs = ( 1, 6, 9 );
if ( !mysql_version_ge( 4, 1 ) ) {
# MySQL 3.23/4.0 keeps Data_Length in the 5th (0-based) column
@ixs = ( 1, 5, 8 );
}
push( @tblist,
map { [ (split)[@ixs] ] }
select_array "SHOW TABLE STATUS FROM \\\`$db\\\`" );
}
# Parse through the table list to generate storage engine counts/statistics
$fragtables = 0;
foreach my $tbl (@tblist) {
debugprint "Data dump " . Dumper(@$tbl);
my ( $engine, $size, $datafree ) = @$tbl;
next if $engine eq 'NULL';
$size = 0 if $size eq 'NULL';
$datafree = 0 if $datafree eq 'NULL';
if ( defined $enginestats{$engine} ) {
$enginestats{$engine} += $size;
$enginecount{$engine} += 1;
}
else {
$enginestats{$engine} = $size;
$enginecount{$engine} = 1;
}
if ( $datafree > 0 ) {
$fragtables++;
}
}
}
while ( my ( $engine, $size ) = each(%enginestats) ) {
infoprint "Data in $engine tables: "
. hr_bytes($size)
. " (Tables: "
. $enginecount{$engine} . ")" . "";
}
# If the storage engine isn't being used, recommend it to be disabled
if ( !defined $enginestats{'InnoDB'}
&& defined $myvar{'have_innodb'}
&& $myvar{'have_innodb'} eq "YES" )
{
badprint "InnoDB is enabled but isn't being used";
push( @generalrec,
"Add skip-innodb to MySQL configuration to disable InnoDB" );
}
if ( !defined $enginestats{'BerkeleyDB'}
&& defined $myvar{'have_bdb'}
&& $myvar{'have_bdb'} eq "YES" )
{
badprint "BDB is enabled but isn't being used";
push( @generalrec,
"Add skip-bdb to MySQL configuration to disable BDB" );
}
if ( !defined $enginestats{'ISAM'}
&& defined $myvar{'have_isam'}
&& $myvar{'have_isam'} eq "YES" )
{
badprint "MYISAM is enabled but isn't being used";
push( @generalrec,
"Add skip-isam to MySQL configuration to disable ISAM (MySQL > 4.1.0)"
);
}
# Fragmented tables
if ( $fragtables > 0 ) {
badprint "Total fragmented tables: $fragtables";
push( @generalrec,
"Run OPTIMIZE TABLE to defragment tables for better performance" );
my $total_free = 0;
foreach my $table_line ( @{ $result{'Tables'}{'Fragmented tables'} } ) {
my ( $full_table_name, $data_free ) = split( /\s+/, $table_line );
$data_free = 0 if ( !defined($data_free) or $data_free eq '' );
$data_free = $data_free / 1024 / 1024;
$total_free += $data_free;
my ( $table_schema, $table_name ) = split( /\./, $full_table_name );
push( @generalrec,
" OPTIMIZE TABLE `$table_schema`.`$table_name`; -- can free $data_free MB"
);
}
push( @generalrec,
"Total freed space after theses OPTIMIZE TABLE : $total_free Mb" );
}
else {
goodprint "Total fragmented tables: $fragtables";
}
# Auto increments
my %tblist;
# Find the maximum integer
my $maxint = select_one "SELECT ~0";
$result{'MaxInt'} = $maxint;
# Now we use a database list, and loop through it to get storage engine stats for tables
foreach my $db (@dblist) {
chomp($db);
if ( !$tblist{$db} ) {
$tblist{$db} = ();
}
if ( $db eq "information_schema" ) { next; }
my @ia = ( 0, 10 );
if ( !mysql_version_ge( 4, 1 ) ) {
# MySQL 3.23/4.0 keeps Data_Length in the 5th (0-based) column
@ia = ( 0, 9 );
}
push(
@{ $tblist{$db} },
map { [ (split)[@ia] ] }
select_array "SHOW TABLE STATUS FROM \\\`$db\\\`"
);
}
my @dbnames = keys %tblist;
foreach my $db (@dbnames) {
foreach my $tbl ( @{ $tblist{$db} } ) {
my ( $name, $autoincrement ) = @$tbl;
if ( $autoincrement =~ /^\d+?$/ ) {
my $percent = percentage( $autoincrement, $maxint );
$result{'PctAutoIncrement'}{"$db.$name"} = $percent;
if ( $percent >= 75 ) {
badprint
"Table '$db.$name' has an autoincrement value near max capacity ($percent%)";
}
}
}
}
}
my %mycalc;
sub calculations {
if ( $mystat{'Questions'} < 1 ) {
badprint
"Your server has not answered any queries - cannot continue...";
exit 2;
}
# Per-thread memory
if ( mysql_version_ge(4) ) {
$mycalc{'per_thread_buffers'} =
$myvar{'read_buffer_size'} +
$myvar{'read_rnd_buffer_size'} +
$myvar{'sort_buffer_size'} +
$myvar{'thread_stack'} +
$myvar{'join_buffer_size'};
}
else {
$mycalc{'per_thread_buffers'} =
$myvar{'record_buffer'} +
$myvar{'record_rnd_buffer'} +
$myvar{'sort_buffer'} +
$myvar{'thread_stack'} +
$myvar{'join_buffer_size'};
}
$mycalc{'total_per_thread_buffers'} =
$mycalc{'per_thread_buffers'} * $myvar{'max_connections'};
$mycalc{'max_total_per_thread_buffers'} =
$mycalc{'per_thread_buffers'} * $mystat{'Max_used_connections'};
# Server-wide memory
$mycalc{'max_tmp_table_size'} =
( $myvar{'tmp_table_size'} > $myvar{'max_heap_table_size'} )
? $myvar{'max_heap_table_size'}
: $myvar{'tmp_table_size'};
$mycalc{'server_buffers'} =
$myvar{'key_buffer_size'} + $mycalc{'max_tmp_table_size'};
$mycalc{'server_buffers'} +=
( defined $myvar{'innodb_buffer_pool_size'} )
? $myvar{'innodb_buffer_pool_size'}
: 0;
$mycalc{'server_buffers'} +=
( defined $myvar{'innodb_additional_mem_pool_size'} )
? $myvar{'innodb_additional_mem_pool_size'}
: 0;
$mycalc{'server_buffers'} +=
( defined $myvar{'innodb_log_buffer_size'} )
? $myvar{'innodb_log_buffer_size'}
: 0;
$mycalc{'server_buffers'} +=
( defined $myvar{'query_cache_size'} ) ? $myvar{'query_cache_size'} : 0;
$mycalc{'server_buffers'} +=
( defined $myvar{'aria_pagecache_buffer_size'} )
? $myvar{'aria_pagecache_buffer_size'}
: 0;
# Global memory
# Max used memory is memory used by MySQL based on Max_used_connections
# This is the max memory used theoretically calculated with the max concurrent connection number reached by mysql
$mycalc{'max_used_memory'} =
$mycalc{'server_buffers'} +
$mycalc{"max_total_per_thread_buffers"} +
get_pf_memory();
# + get_gcache_memory();
$mycalc{'pct_max_used_memory'} =
percentage( $mycalc{'max_used_memory'}, $physical_memory );
# Total possible memory is memory needed by MySQL based on max_connections
# This is the max memory MySQL can theoretically used if all connections allowed has opened by mysql
$mycalc{'max_peak_memory'} =
$mycalc{'server_buffers'} +
$mycalc{'total_per_thread_buffers'} +
get_pf_memory();
# + get_gcache_memory();
$mycalc{'pct_max_physical_memory'} =
percentage( $mycalc{'max_peak_memory'}, $physical_memory );
debugprint "Max Used Memory: "
. hr_bytes( $mycalc{'max_used_memory'} ) . "";
debugprint "Max Used Percentage RAM: "
. $mycalc{'pct_max_used_memory'} . "%";
debugprint "Max Peak Memory: "
. hr_bytes( $mycalc{'max_peak_memory'} ) . "";
debugprint "Max Peak Percentage RAM: "
. $mycalc{'pct_max_physical_memory'} . "%";
# Slow queries
$mycalc{'pct_slow_queries'} =
int( ( $mystat{'Slow_queries'} / $mystat{'Questions'} ) * 100 );
# Connections
$mycalc{'pct_connections_used'} = int(
( $mystat{'Max_used_connections'} / $myvar{'max_connections'} ) * 100 );
$mycalc{'pct_connections_used'} =
( $mycalc{'pct_connections_used'} > 100 )
? 100
: $mycalc{'pct_connections_used'};
# Aborted Connections
$mycalc{'pct_connections_aborted'} =
percentage( $mystat{'Aborted_connects'}, $mystat{'Connections'} );
debugprint "Aborted_connects: " . $mystat{'Aborted_connects'} . "";
debugprint "Connections: " . $mystat{'Connections'} . "";
debugprint "pct_connections_aborted: "
. $mycalc{'pct_connections_aborted'} . "";
# Key buffers
if ( mysql_version_ge( 4, 1 ) && $myvar{'key_buffer_size'} > 0 ) {
$mycalc{'pct_key_buffer_used'} = sprintf(
"%.1f",
(
1 - (
(
$mystat{'Key_blocks_unused'} *
$myvar{'key_cache_block_size'}
) / $myvar{'key_buffer_size'}
)
) * 100
);
}
else {
$mycalc{'pct_key_buffer_used'} = 0;
}
if ( $mystat{'Key_read_requests'} > 0 ) {
$mycalc{'pct_keys_from_mem'} = sprintf(
"%.1f",
(
100 - (
( $mystat{'Key_reads'} / $mystat{'Key_read_requests'} ) *
100
)
)
);
}
else {
$mycalc{'pct_keys_from_mem'} = 0;
}
if ( defined $mystat{'Aria_pagecache_read_requests'}
&& $mystat{'Aria_pagecache_read_requests'} > 0 )
{
$mycalc{'pct_aria_keys_from_mem'} = sprintf(
"%.1f",
(
100 - (
(
$mystat{'Aria_pagecache_reads'} /
$mystat{'Aria_pagecache_read_requests'}
) * 100
)
)
);
}
else {
$mycalc{'pct_aria_keys_from_mem'} = 0;
}
if ( $mystat{'Key_write_requests'} > 0 ) {
$mycalc{'pct_wkeys_from_mem'} = sprintf( "%.1f",
( ( $mystat{'Key_writes'} / $mystat{'Key_write_requests'} ) * 100 )
);
}
else {
$mycalc{'pct_wkeys_from_mem'} = 0;
}
if ( $doremote eq 0 and !mysql_version_ge(5) ) {
my $size = 0;
$size += (split)[0]
for
`find $myvar{'datadir'} -name "*.MYI" 2>&1 | xargs du -L $duflags 2>&1`;
$mycalc{'total_myisam_indexes'} = $size;
$mycalc{'total_aria_indexes'} = 0;
}
elsif ( mysql_version_ge(5) ) {
$mycalc{'total_myisam_indexes'} = select_one
"SELECT IFNULL(SUM(INDEX_LENGTH),0) FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ('information_schema') AND ENGINE = 'MyISAM';";
$mycalc{'total_aria_indexes'} = select_one
"SELECT IFNULL(SUM(INDEX_LENGTH),0) FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ('information_schema') AND ENGINE = 'Aria';";
}
if ( defined $mycalc{'total_myisam_indexes'}
and $mycalc{'total_myisam_indexes'} == 0 )
{
$mycalc{'total_myisam_indexes'} = "fail";
}
elsif ( defined $mycalc{'total_myisam_indexes'} ) {
chomp( $mycalc{'total_myisam_indexes'} );
}
if ( defined $mycalc{'total_aria_indexes'}
and $mycalc{'total_aria_indexes'} == 0 )
{
$mycalc{'total_aria_indexes'} = 1;
}
elsif ( defined $mycalc{'total_aria_indexes'} ) {
chomp( $mycalc{'total_aria_indexes'} );
}
# Query cache
if ( mysql_version_ge(8) and mysql_version_le(10) ) {
$mycalc{'query_cache_efficiency'} = 0;
} elsif ( mysql_version_ge(4) ) {
$mycalc{'query_cache_efficiency'} = sprintf(
"%.1f",
(
$mystat{'Qcache_hits'} /
( $mystat{'Com_select'} + $mystat{'Qcache_hits'} )
) * 100
);
if ( $myvar{'query_cache_size'} ) {
$mycalc{'pct_query_cache_used'} = sprintf(
"%.1f",
100 - (
$mystat{'Qcache_free_memory'} / $myvar{'query_cache_size'}
) * 100
);
}
if ( $mystat{'Qcache_lowmem_prunes'} == 0 ) {
$mycalc{'query_cache_prunes_per_day'} = 0;
}
else {
$mycalc{'query_cache_prunes_per_day'} = int(
$mystat{'Qcache_lowmem_prunes'} / ( $mystat{'Uptime'} / 86400 )
);
}
}
# Sorting
$mycalc{'total_sorts'} = $mystat{'Sort_scan'} + $mystat{'Sort_range'};
if ( $mycalc{'total_sorts'} > 0 ) {
$mycalc{'pct_temp_sort_table'} = int(
( $mystat{'Sort_merge_passes'} / $mycalc{'total_sorts'} ) * 100 );
}
# Joins
$mycalc{'joins_without_indexes'} =
$mystat{'Select_range_check'} + $mystat{'Select_full_join'};
$mycalc{'joins_without_indexes_per_day'} =
int( $mycalc{'joins_without_indexes'} / ( $mystat{'Uptime'} / 86400 ) );
# Temporary tables
if ( $mystat{'Created_tmp_tables'} > 0 ) {
if ( $mystat{'Created_tmp_disk_tables'} > 0 ) {
$mycalc{'pct_temp_disk'} = int(
(
$mystat{'Created_tmp_disk_tables'} /
$mystat{'Created_tmp_tables'}
) * 100
);
}
else {
$mycalc{'pct_temp_disk'} = 0;
}
}
# Table cache
if ( $mystat{'Opened_tables'} > 0 ) {
$mycalc{'table_cache_hit_rate'} =
int( $mystat{'Open_tables'} * 100 / $mystat{'Opened_tables'} );
}
else {
$mycalc{'table_cache_hit_rate'} = 100;
}
# Open files
if ( $myvar{'open_files_limit'} > 0 ) {
$mycalc{'pct_files_open'} =
int( $mystat{'Open_files'} * 100 / $myvar{'open_files_limit'} );
}
# Table locks
if ( $mystat{'Table_locks_immediate'} > 0 ) {
if ( $mystat{'Table_locks_waited'} == 0 ) {
$mycalc{'pct_table_locks_immediate'} = 100;
}
else {
$mycalc{'pct_table_locks_immediate'} = int(
$mystat{'Table_locks_immediate'} * 100 / (
$mystat{'Table_locks_waited'} +
$mystat{'Table_locks_immediate'}
)
);
}
}
# Thread cache
$mycalc{'thread_cache_hit_rate'} =
int( 100 -
( ( $mystat{'Threads_created'} / $mystat{'Connections'} ) * 100 ) );
# Other
if ( $mystat{'Connections'} > 0 ) {
$mycalc{'pct_aborted_connections'} =
int( ( $mystat{'Aborted_connects'} / $mystat{'Connections'} ) * 100 );
}
if ( $mystat{'Questions'} > 0 ) {
$mycalc{'total_reads'} = $mystat{'Com_select'};
$mycalc{'total_writes'} =
$mystat{'Com_delete'} +
$mystat{'Com_insert'} +
$mystat{'Com_update'} +
$mystat{'Com_replace'};
if ( $mycalc{'total_reads'} == 0 ) {
$mycalc{'pct_reads'} = 0;
$mycalc{'pct_writes'} = 100;
}
else {
$mycalc{'pct_reads'} = int(
(
$mycalc{'total_reads'} /
( $mycalc{'total_reads'} + $mycalc{'total_writes'} )
) * 100
);
$mycalc{'pct_writes'} = 100 - $mycalc{'pct_reads'};
}
}
# InnoDB
if ( $myvar{'have_innodb'} eq "YES" ) {
$mycalc{'innodb_log_size_pct'} =
( $myvar{'innodb_log_file_size'} *
$myvar{'innodb_log_files_in_group'} * 100 /
$myvar{'innodb_buffer_pool_size'} );
}
# InnoDB Buffer pool read cache efficiency
(
$mystat{'Innodb_buffer_pool_read_requests'},
$mystat{'Innodb_buffer_pool_reads'}
)
= ( 1, 1 )
unless defined $mystat{'Innodb_buffer_pool_reads'};
$mycalc{'pct_read_efficiency'} = percentage(
(
$mystat{'Innodb_buffer_pool_read_requests'} -
$mystat{'Innodb_buffer_pool_reads'}
),
$mystat{'Innodb_buffer_pool_read_requests'}
) if defined $mystat{'Innodb_buffer_pool_read_requests'};
debugprint "pct_read_efficiency: " . $mycalc{'pct_read_efficiency'} . "";
debugprint "Innodb_buffer_pool_reads: "
. $mystat{'Innodb_buffer_pool_reads'} . "";
debugprint "Innodb_buffer_pool_read_requests: "
. $mystat{'Innodb_buffer_pool_read_requests'} . "";
# InnoDB log write cache efficiency
( $mystat{'Innodb_log_write_requests'}, $mystat{'Innodb_log_writes'} ) =
( 1, 1 )
unless defined $mystat{'Innodb_log_writes'};
$mycalc{'pct_write_efficiency'} = percentage(
( $mystat{'Innodb_log_write_requests'} - $mystat{'Innodb_log_writes'} ),
$mystat{'Innodb_log_write_requests'}
) if defined $mystat{'Innodb_log_write_requests'};
debugprint "pct_write_efficiency: " . $mycalc{'pct_write_efficiency'} . "";
debugprint "Innodb_log_writes: " . $mystat{'Innodb_log_writes'} . "";
debugprint "Innodb_log_write_requests: "
. $mystat{'Innodb_log_write_requests'} . "";
$mycalc{'pct_innodb_buffer_used'} = percentage(
(
$mystat{'Innodb_buffer_pool_pages_total'} -
$mystat{'Innodb_buffer_pool_pages_free'}
),
$mystat{'Innodb_buffer_pool_pages_total'}
) if defined $mystat{'Innodb_buffer_pool_pages_total'};
# Binlog Cache
if ( $myvar{'log_bin'} ne 'OFF' ) {
$mycalc{'pct_binlog_cache'} = percentage(
$mystat{'Binlog_cache_use'} - $mystat{'Binlog_cache_disk_use'},
$mystat{'Binlog_cache_use'} );
}
}
sub mysql_stats {
subheaderprint "Performance Metrics";
# Show uptime, queries per second, connections, traffic stats
my $qps;
if ( $mystat{'Uptime'} > 0 ) {
$qps = sprintf( "%.3f", $mystat{'Questions'} / $mystat{'Uptime'} );
}
push( @generalrec,
"MySQL was started within the last 24 hours - recommendations may be inaccurate"
) if ( $mystat{'Uptime'} < 86400 );
infoprint "Up for: "
. pretty_uptime( $mystat{'Uptime'} ) . " ("
. hr_num( $mystat{'Questions'} ) . " q ["
. hr_num($qps)
. " qps], "
. hr_num( $mystat{'Connections'} )
. " conn," . " TX: "
. hr_bytes_rnd( $mystat{'Bytes_sent'} )
. ", RX: "
. hr_bytes_rnd( $mystat{'Bytes_received'} ) . ")";
infoprint "Reads / Writes: "
. $mycalc{'pct_reads'} . "% / "
. $mycalc{'pct_writes'} . "%";
# Binlog Cache
if ( $myvar{'log_bin'} eq 'OFF' ) {
infoprint "Binary logging is disabled";
}
else {
infoprint "Binary logging is enabled (GTID MODE: "
. ( defined( $myvar{'gtid_mode'} ) ? $myvar{'gtid_mode'} : "OFF" )
. ")";
}
# Memory usage
infoprint "Physical Memory : " . hr_bytes($physical_memory);
infoprint "Max MySQL memory : " . hr_bytes( $mycalc{'max_peak_memory'} );
infoprint "Other process memory: " . hr_bytes( get_other_process_memory() );
#print hr_bytes( $mycalc{'server_buffers'} );
infoprint "Total buffers: "
. hr_bytes( $mycalc{'server_buffers'} )
. " global + "
. hr_bytes( $mycalc{'per_thread_buffers'} )
. " per thread ($myvar{'max_connections'} max threads)";
infoprint "P_S Max memory usage: " . hr_bytes_rnd( get_pf_memory() );
$result{'P_S'}{'memory'} = get_other_process_memory();
$result{'P_S'}{'pretty_memory'} =
hr_bytes_rnd( get_other_process_memory() );
infoprint "Galera GCache Max memory usage: "
. hr_bytes_rnd( get_gcache_memory() );
$result{'Galera'}{'GCache'}{'memory'} = get_gcache_memory();
$result{'Galera'}{'GCache'}{'pretty_memory'} =
hr_bytes_rnd( get_gcache_memory() );
if ( $opt{buffers} ne 0 ) {
infoprint "Global Buffers";
infoprint " +-- Key Buffer: "
. hr_bytes( $myvar{'key_buffer_size'} ) . "";
infoprint " +-- Max Tmp Table: "
. hr_bytes( $mycalc{'max_tmp_table_size'} ) . "";
if ( defined $myvar{'query_cache_type'} ) {
infoprint "Query Cache Buffers";
infoprint " +-- Query Cache: "
. $myvar{'query_cache_type'} . " - "
. (
$myvar{'query_cache_type'} eq 0 |
$myvar{'query_cache_type'} eq 'OFF' ? "DISABLED"
: (
$myvar{'query_cache_type'} eq 1 ? "ALL REQUESTS"
: "ON DEMAND"
)
) . "";
infoprint " +-- Query Cache Size: "
. hr_bytes( $myvar{'query_cache_size'} ) . "";
}
infoprint "Per Thread Buffers";
infoprint " +-- Read Buffer: "
. hr_bytes( $myvar{'read_buffer_size'} ) . "";
infoprint " +-- Read RND Buffer: "
. hr_bytes( $myvar{'read_rnd_buffer_size'} ) . "";
infoprint " +-- Sort Buffer: "
. hr_bytes( $myvar{'sort_buffer_size'} ) . "";
infoprint " +-- Thread stack: "
. hr_bytes( $myvar{'thread_stack'} ) . "";
infoprint " +-- Join Buffer: "
. hr_bytes( $myvar{'join_buffer_size'} ) . "";
if ( $myvar{'log_bin'} ne 'OFF' ) {
infoprint "Binlog Cache Buffers";
infoprint " +-- Binlog Cache: "
. hr_bytes( $myvar{'binlog_cache_size'} ) . "";
}
}
if ( $arch
&& $arch == 32
&& $mycalc{'max_used_memory'} > 2 * 1024 * 1024 * 1024 )
{
badprint
"Allocating > 2GB RAM on 32-bit systems can cause system instability";
badprint "Maximum reached memory usage: "
. hr_bytes( $mycalc{'max_used_memory'} )
. " ($mycalc{'pct_max_used_memory'}% of installed RAM)";
}
elsif ( $mycalc{'pct_max_used_memory'} > 85 ) {
badprint "Maximum reached memory usage: "
. hr_bytes( $mycalc{'max_used_memory'} )
. " ($mycalc{'pct_max_used_memory'}% of installed RAM)";
}
else {
goodprint "Maximum reached memory usage: "
. hr_bytes( $mycalc{'max_used_memory'} )
. " ($mycalc{'pct_max_used_memory'}% of installed RAM)";
}
if ( $mycalc{'pct_max_physical_memory'} > 85 ) {
badprint "Maximum possible memory usage: "
. hr_bytes( $mycalc{'max_peak_memory'} )
. " ($mycalc{'pct_max_physical_memory'}% of installed RAM)";
push( @generalrec,
"Reduce your overall MySQL memory footprint for system stability" );
}
else {
goodprint "Maximum possible memory usage: "
. hr_bytes( $mycalc{'max_peak_memory'} )
. " ($mycalc{'pct_max_physical_memory'}% of installed RAM)";
}
if ( $physical_memory <
( $mycalc{'max_peak_memory'} + get_other_process_memory() ) )
{
badprint
"Overall possible memory usage with other process exceeded memory";
push( @generalrec,
"Dedicate this server to your database for highest performance." );
}
else {
goodprint
"Overall possible memory usage with other process is compatible with memory available";
}
# Slow queries
if ( $mycalc{'pct_slow_queries'} > 5 ) {
badprint "Slow queries: $mycalc{'pct_slow_queries'}% ("
. hr_num( $mystat{'Slow_queries'} ) . "/"
. hr_num( $mystat{'Questions'} ) . ")";
}
else {
goodprint "Slow queries: $mycalc{'pct_slow_queries'}% ("
. hr_num( $mystat{'Slow_queries'} ) . "/"
. hr_num( $mystat{'Questions'} ) . ")";
}
if ( $myvar{'long_query_time'} > 10 ) {
push( @adjvars, "long_query_time (<= 10)" );
}
if ( defined( $myvar{'log_slow_queries'} ) ) {
if ( $myvar{'log_slow_queries'} eq "OFF" ) {
push( @generalrec,
"Enable the slow query log to troubleshoot bad queries" );
}
}
# Connections
if ( $mycalc{'pct_connections_used'} > 85 ) {
badprint
"Highest connection usage: $mycalc{'pct_connections_used'}% ($mystat{'Max_used_connections'}/$myvar{'max_connections'})";
push( @adjvars,
"max_connections (> " . $myvar{'max_connections'} . ")" );
push( @adjvars,
"wait_timeout (< " . $myvar{'wait_timeout'} . ")",
"interactive_timeout (< " . $myvar{'interactive_timeout'} . ")" );
push( @generalrec,
"Reduce or eliminate persistent connections to reduce connection usage"
);
}
else {
goodprint
"Highest usage of available connections: $mycalc{'pct_connections_used'}% ($mystat{'Max_used_connections'}/$myvar{'max_connections'})";
}
# Aborted Connections
if ( $mycalc{'pct_connections_aborted'} > 3 ) {
badprint
"Aborted connections: $mycalc{'pct_connections_aborted'}% ($mystat{'Aborted_connects'}/$mystat{'Connections'})";
push( @generalrec,
"Reduce or eliminate unclosed connections and network issues" );
}
else {
goodprint
"Aborted connections: $mycalc{'pct_connections_aborted'}% ($mystat{'Aborted_connects'}/$mystat{'Connections'})";
}
# name resolution
if ( defined( $result{'Variables'}{'skip_networking'} )
&& $result{'Variables'}{'skip_networking'} eq 'ON' )
{
infoprint
"Skipped name resolution test due to skip_networking=ON in system variables.";
}
elsif ( not defined( $result{'Variables'}{'skip_name_resolve'} ) ) {
infoprint
"Skipped name resolution test due to missing skip_name_resolve in system variables.";
}
elsif ( $result{'Variables'}{'skip_name_resolve'} eq 'OFF' ) {
badprint
"name resolution is active : a reverse name resolution is made for each new connection and can reduce performance";
push( @generalrec,
"Configure your accounts with ip or subnets only, then update your configuration with skip-name-resolve=1"
);
}
# Query cache
if ( !mysql_version_ge(4) ) {
# MySQL versions < 4.01 don't support query caching
push( @generalrec,
"Upgrade MySQL to version 4+ to utilize query caching" );
}
elsif (mysql_version_eq(8)) {
infoprint "Query cache have been removed in MySQL 8";
#return;
}
elsif ( $myvar{'query_cache_size'} < 1
and $myvar{'query_cache_type'} eq "OFF" )
{
goodprint
"Query cache is disabled by default due to mutex contention on multiprocessor machines.";
}
elsif ( $mystat{'Com_select'} == 0 ) {
badprint
"Query cache cannot be analyzed - no SELECT statements executed";
}
else {
badprint
"Query cache may be disabled by default due to mutex contention.";
push( @adjvars, "query_cache_size (=0)" );
push( @adjvars, "query_cache_type (=0)" );
if ( $mycalc{'query_cache_efficiency'} < 20 ) {
badprint
"Query cache efficiency: $mycalc{'query_cache_efficiency'}% ("
. hr_num( $mystat{'Qcache_hits'} )
. " cached / "
. hr_num( $mystat{'Qcache_hits'} + $mystat{'Com_select'} )
. " selects)";
push( @adjvars,
"query_cache_limit (> "
. hr_bytes_rnd( $myvar{'query_cache_limit'} )
. ", or use smaller result sets)" );
}
else {
goodprint
"Query cache efficiency: $mycalc{'query_cache_efficiency'}% ("
. hr_num( $mystat{'Qcache_hits'} )
. " cached / "
. hr_num( $mystat{'Qcache_hits'} + $mystat{'Com_select'} )
. " selects)";
}
if ( $mycalc{'query_cache_prunes_per_day'} > 98 ) {
badprint
"Query cache prunes per day: $mycalc{'query_cache_prunes_per_day'}";
if ( $myvar{'query_cache_size'} >= 128 * 1024 * 1024 ) {
push( @generalrec,
"Increasing the query_cache size over 128M may reduce performance"
);
push( @adjvars,
"query_cache_size (> "
. hr_bytes_rnd( $myvar{'query_cache_size'} )
. ") [see warning above]" );
}
else {
push( @adjvars,
"query_cache_size (> "
. hr_bytes_rnd( $myvar{'query_cache_size'} )
. ")" );
}
}
else {
goodprint
"Query cache prunes per day: $mycalc{'query_cache_prunes_per_day'}";
}
}
# Sorting
if ( $mycalc{'total_sorts'} == 0 ) {
goodprint "No Sort requiring temporary tables";
}
elsif ( $mycalc{'pct_temp_sort_table'} > 10 ) {
badprint
"Sorts requiring temporary tables: $mycalc{'pct_temp_sort_table'}% ("
. hr_num( $mystat{'Sort_merge_passes'} )
. " temp sorts / "
. hr_num( $mycalc{'total_sorts'} )
. " sorts)";
push( @adjvars,
"sort_buffer_size (> "
. hr_bytes_rnd( $myvar{'sort_buffer_size'} )
. ")" );
push( @adjvars,
"read_rnd_buffer_size (> "
. hr_bytes_rnd( $myvar{'read_rnd_buffer_size'} )
. ")" );
}
else {
goodprint
"Sorts requiring temporary tables: $mycalc{'pct_temp_sort_table'}% ("
. hr_num( $mystat{'Sort_merge_passes'} )
. " temp sorts / "
. hr_num( $mycalc{'total_sorts'} )
. " sorts)";
}
# Joins
if ( $mycalc{'joins_without_indexes_per_day'} > 250 ) {
badprint
"Joins performed without indexes: $mycalc{'joins_without_indexes'}";
push( @adjvars,
"join_buffer_size (> "
. hr_bytes( $myvar{'join_buffer_size'} )
. ", or always use indexes with JOINs)" );
push( @generalrec,
"Adjust your join queries to always utilize indexes" );
}
else {
goodprint "No joins without indexes";
# No joins have run without indexes
}
# Temporary tables
if ( $mystat{'Created_tmp_tables'} > 0 ) {
if ( $mycalc{'pct_temp_disk'} > 25
&& $mycalc{'max_tmp_table_size'} < 256 * 1024 * 1024 )
{
badprint
"Temporary tables created on disk: $mycalc{'pct_temp_disk'}% ("
. hr_num( $mystat{'Created_tmp_disk_tables'} )
. " on disk / "
. hr_num( $mystat{'Created_tmp_tables'} )
. " total)";
push( @adjvars,
"tmp_table_size (> "
. hr_bytes_rnd( $myvar{'tmp_table_size'} )
. ")" );
push( @adjvars,
"max_heap_table_size (> "
. hr_bytes_rnd( $myvar{'max_heap_table_size'} )
. ")" );
push( @generalrec,
"When making adjustments, make tmp_table_size/max_heap_table_size equal"
);
push( @generalrec,
"Reduce your SELECT DISTINCT queries which have no LIMIT clause"
);
}
elsif ($mycalc{'pct_temp_disk'} > 25
&& $mycalc{'max_tmp_table_size'} >= 256 * 1024 * 1024 )
{
badprint
"Temporary tables created on disk: $mycalc{'pct_temp_disk'}% ("
. hr_num( $mystat{'Created_tmp_disk_tables'} )
. " on disk / "
. hr_num( $mystat{'Created_tmp_tables'} )
. " total)";
push( @generalrec,
"Temporary table size is already large - reduce result set size"
);
push( @generalrec,
"Reduce your SELECT DISTINCT queries without LIMIT clauses" );
}
else {
goodprint
"Temporary tables created on disk: $mycalc{'pct_temp_disk'}% ("
. hr_num( $mystat{'Created_tmp_disk_tables'} )
. " on disk / "
. hr_num( $mystat{'Created_tmp_tables'} )
. " total)";
}
}
else {
goodprint "No tmp tables created on disk";
}
# Thread cache
if ( defined( $myvar{'thread_handling'} )
and $myvar{'thread_handling'} eq 'pool-of-threads' )
{
# https://www.percona.com/doc/percona-server/LATEST/performance/threadpool.html
# When thread pool is enabled, the value of the thread_cache_size variable
# is ignored. The Threads_cached status variable contains 0 in this case.
infoprint "Thread cache not used with thread_handling=pool-of-threads";
}
else {
if ( $myvar{'thread_cache_size'} eq 0 ) {
badprint "Thread cache is disabled";
push( @generalrec, "Set thread_cache_size to 4 as a starting value" );
push( @adjvars, "thread_cache_size (start at 4)" );
}
else {
if ( $mycalc{'thread_cache_hit_rate'} <= 50 ) {
badprint
"Thread cache hit rate: $mycalc{'thread_cache_hit_rate'}% ("
. hr_num( $mystat{'Threads_created'} )
. " created / "
. hr_num( $mystat{'Connections'} )
. " connections)";
push( @adjvars,
"thread_cache_size (> $myvar{'thread_cache_size'})" );
}
else {
goodprint
"Thread cache hit rate: $mycalc{'thread_cache_hit_rate'}% ("
. hr_num( $mystat{'Threads_created'} )
. " created / "
. hr_num( $mystat{'Connections'} )
. " connections)";
}
}
}
# Table cache
my $table_cache_var = "";
if ( $mystat{'Open_tables'} > 0 ) {
if ( $mycalc{'table_cache_hit_rate'} < 20 ) {
badprint "Table cache hit rate: $mycalc{'table_cache_hit_rate'}% ("
. hr_num( $mystat{'Open_tables'} )
. " open / "
. hr_num( $mystat{'Opened_tables'} )
. " opened)";
if ( mysql_version_ge( 5, 1 ) ) {
$table_cache_var = "table_open_cache";
}
else {
$table_cache_var = "table_cache";
}
push( @adjvars,
$table_cache_var . " (> " . $myvar{$table_cache_var} . ")" );
push( @generalrec,
"Increase "
. $table_cache_var
. " gradually to avoid file descriptor limits" );
push( @generalrec,
"Read this before increasing "
. $table_cache_var
. " over 64: http://bit.ly/1mi7c4C" );
push( @generalrec,
"Read this before increasing for MariaDB"
. " https://mariadb.com/kb/en/library/optimizing-table_open_cache/");
push( @generalrec,
"This is MyISAM only table_cache scalability problem, InnoDB not affected."
);
push( @generalrec,
"See more details here: https://bugs.mysql.com/bug.php?id=49177"
);
push( @generalrec,
"This bug already fixed in MySQL 5.7.9 and newer MySQL versions."
);
push( @generalrec,
"Beware that open_files_limit ("
. $myvar{'open_files_limit'}
. ") variable " );
push( @generalrec,
"should be greater than $table_cache_var ("
. $myvar{$table_cache_var}
. ")" );
}
else {
goodprint "Table cache hit rate: $mycalc{'table_cache_hit_rate'}% ("
. hr_num( $mystat{'Open_tables'} )
. " open / "
. hr_num( $mystat{'Opened_tables'} )
. " opened)";
}
}
# Open files
if ( defined $mycalc{'pct_files_open'} ) {
if ( $mycalc{'pct_files_open'} > 85 ) {
badprint "Open file limit used: $mycalc{'pct_files_open'}% ("
. hr_num( $mystat{'Open_files'} ) . "/"
. hr_num( $myvar{'open_files_limit'} ) . ")";
push( @adjvars,
"open_files_limit (> " . $myvar{'open_files_limit'} . ")" );
}
else {
goodprint "Open file limit used: $mycalc{'pct_files_open'}% ("
. hr_num( $mystat{'Open_files'} ) . "/"
. hr_num( $myvar{'open_files_limit'} ) . ")";
}
}
# Table locks
if ( defined $mycalc{'pct_table_locks_immediate'} ) {
if ( $mycalc{'pct_table_locks_immediate'} < 95 ) {
badprint
"Table locks acquired immediately: $mycalc{'pct_table_locks_immediate'}%";
push( @generalrec,
"Optimize queries and/or use InnoDB to reduce lock wait" );
}
else {
goodprint
"Table locks acquired immediately: $mycalc{'pct_table_locks_immediate'}% ("
. hr_num( $mystat{'Table_locks_immediate'} )
. " immediate / "
. hr_num( $mystat{'Table_locks_waited'} +
$mystat{'Table_locks_immediate'} )
. " locks)";
}
}
# Binlog cache
if ( defined $mycalc{'pct_binlog_cache'} ) {
if ( $mycalc{'pct_binlog_cache'} < 90
&& $mystat{'Binlog_cache_use'} > 0 )
{
badprint "Binlog cache memory access: "
. $mycalc{'pct_binlog_cache'} . "% ("
. (
$mystat{'Binlog_cache_use'} - $mystat{'Binlog_cache_disk_use'} )
. " Memory / "
. $mystat{'Binlog_cache_use'}
. " Total)";
push( @generalrec,
"Increase binlog_cache_size (Actual value: "
. $myvar{'binlog_cache_size'}
. ")" );
push( @adjvars,
"binlog_cache_size ("
. hr_bytes( $myvar{'binlog_cache_size'} + 16 * 1024 * 1024 )
. ")" );
}
else {
goodprint "Binlog cache memory access: "
. $mycalc{'pct_binlog_cache'} . "% ("
. (
$mystat{'Binlog_cache_use'} - $mystat{'Binlog_cache_disk_use'} )
. " Memory / "
. $mystat{'Binlog_cache_use'}
. " Total)";
debugprint "Not enough data to validate binlog cache size\n"
if $mystat{'Binlog_cache_use'} < 10;
}
}
# Performance options
if ( !mysql_version_ge( 5, 1 ) ) {
push( @generalrec, "Upgrade to MySQL 5.5+ to use asynchronous write" );
}
elsif ( $myvar{'concurrent_insert'} eq "OFF" ) {
push( @generalrec, "Enable concurrent_insert by setting it to 'ON'" );
}
elsif ( $myvar{'concurrent_insert'} eq 0 ) {
push( @generalrec, "Enable concurrent_insert by setting it to 1" );
}
}
# Recommendations for MyISAM
sub mysql_myisam {
subheaderprint "MyISAM Metrics";
# Key buffer usage
if ( defined( $mycalc{'pct_key_buffer_used'} ) ) {
if ( $mycalc{'pct_key_buffer_used'} < 90 ) {
badprint "Key buffer used: $mycalc{'pct_key_buffer_used'}% ("
. hr_num( $myvar{'key_buffer_size'} *
$mycalc{'pct_key_buffer_used'} /
100 )
. " used / "
. hr_num( $myvar{'key_buffer_size'} )
. " cache)";
#push(@adjvars,"key_buffer_size (\~ ".hr_num( $myvar{'key_buffer_size'} * $mycalc{'pct_key_buffer_used'} / 100).")");
}
else {
goodprint "Key buffer used: $mycalc{'pct_key_buffer_used'}% ("
. hr_num( $myvar{'key_buffer_size'} *
$mycalc{'pct_key_buffer_used'} /
100 )
. " used / "
. hr_num( $myvar{'key_buffer_size'} )
. " cache)";
}
}
else {
# No queries have run that would use keys
debugprint "Key buffer used: $mycalc{'pct_key_buffer_used'}% ("
. hr_num(
$myvar{'key_buffer_size'} * $mycalc{'pct_key_buffer_used'} / 100 )
. " used / "
. hr_num( $myvar{'key_buffer_size'} )
. " cache)";
}
# Key buffer
if ( !defined( $mycalc{'total_myisam_indexes'} ) and $doremote == 1 ) {
push( @generalrec,
"Unable to calculate MyISAM indexes on remote MySQL server < 5.0.0"
);
}
elsif ( $mycalc{'total_myisam_indexes'} =~ /^fail$/ ) {
badprint
"Cannot calculate MyISAM index size - re-run script as root user";
}
elsif ( $mycalc{'total_myisam_indexes'} == "0" ) {
badprint
"None of your MyISAM tables are indexed - add indexes immediately";
}
else {
if ( $myvar{'key_buffer_size'} < $mycalc{'total_myisam_indexes'}
&& $mycalc{'pct_keys_from_mem'} < 95 )
{
badprint "Key buffer size / total MyISAM indexes: "
. hr_bytes( $myvar{'key_buffer_size'} ) . "/"
. hr_bytes( $mycalc{'total_myisam_indexes'} ) . "";
push( @adjvars,
"key_buffer_size (> "
. hr_bytes( $mycalc{'total_myisam_indexes'} )
. ")" );
}
else {
goodprint "Key buffer size / total MyISAM indexes: "
. hr_bytes( $myvar{'key_buffer_size'} ) . "/"
. hr_bytes( $mycalc{'total_myisam_indexes'} ) . "";
}
if ( $mystat{'Key_read_requests'} > 0 ) {
if ( $mycalc{'pct_keys_from_mem'} < 95 ) {
badprint
"Read Key buffer hit rate: $mycalc{'pct_keys_from_mem'}% ("
. hr_num( $mystat{'Key_read_requests'} )
. " cached / "
. hr_num( $mystat{'Key_reads'} )
. " reads)";
}
else {
goodprint
"Read Key buffer hit rate: $mycalc{'pct_keys_from_mem'}% ("
. hr_num( $mystat{'Key_read_requests'} )
. " cached / "
. hr_num( $mystat{'Key_reads'} )
. " reads)";
}
}
else {
# No queries have run that would use keys
debugprint "Key buffer size / total MyISAM indexes: "
. hr_bytes( $myvar{'key_buffer_size'} ) . "/"
. hr_bytes( $mycalc{'total_myisam_indexes'} ) . "";
}
if ( $mystat{'Key_write_requests'} > 0 ) {
if ( $mycalc{'pct_wkeys_from_mem'} < 95 ) {
badprint
"Write Key buffer hit rate: $mycalc{'pct_wkeys_from_mem'}% ("
. hr_num( $mystat{'Key_write_requests'} )
. " cached / "
. hr_num( $mystat{'Key_writes'} )
. " writes)";
}
else {
goodprint
"Write Key buffer hit rate: $mycalc{'pct_wkeys_from_mem'}% ("
. hr_num( $mystat{'Key_write_requests'} )
. " cached / "
. hr_num( $mystat{'Key_writes'} )
. " writes)";
}
}
else {
# No queries have run that would use keys
debugprint
"Write Key buffer hit rate: $mycalc{'pct_wkeys_from_mem'}% ("
. hr_num( $mystat{'Key_write_requests'} )
. " cached / "
. hr_num( $mystat{'Key_writes'} )
. " writes)";
}
}
}
# Recommendations for ThreadPool
sub mariadb_threadpool {
subheaderprint "ThreadPool Metrics";
# AriaDB
unless ( defined $myvar{'have_threadpool'}
&& $myvar{'have_threadpool'} eq "YES" )
{
infoprint "ThreadPool stat is disabled.";
return;
}
infoprint "ThreadPool stat is enabled.";
infoprint "Thread Pool Size: " . $myvar{'thread_pool_size'} . " thread(s).";
if ( $myvar{'version'} =~ /mariadb|percona/i ) {
infoprint "Using default value is good enough for your version ("
. $myvar{'version'} . ")";
return;
}
if ( $myvar{'have_innodb'} eq 'YES' ) {
if ( $myvar{'thread_pool_size'} < 16
or $myvar{'thread_pool_size'} > 36 )
{
badprint
"thread_pool_size between 16 and 36 when using InnoDB storage engine.";
push( @generalrec,
"Thread pool size for InnoDB usage ("
. $myvar{'thread_pool_size'}
. ")" );
push( @adjvars,
"thread_pool_size between 16 and 36 for InnoDB usage" );
}
else {
goodprint
"thread_pool_size between 16 and 36 when using InnoDB storage engine.";
}
return;
}
if ( $myvar{'have_isam'} eq 'YES' ) {
if ( $myvar{'thread_pool_size'} < 4 or $myvar{'thread_pool_size'} > 8 )
{
badprint
"thread_pool_size between 4 and 8 when using MyIsam storage engine.";
push( @generalrec,
"Thread pool size for MyIsam usage ("
. $myvar{'thread_pool_size'}
. ")" );
push( @adjvars,
"thread_pool_size between 4 and 8 for MyIsam usage" );
}
else {
goodprint
"thread_pool_size between 4 and 8 when using MyISAM storage engine.";
}
}
}
sub get_pf_memory {
# Performance Schema
return 0 unless defined $myvar{'performance_schema'};
return 0 if $myvar{'performance_schema'} eq 'OFF';
my @infoPFSMemory = grep /performance_schema.memory/,
select_array("SHOW ENGINE PERFORMANCE_SCHEMA STATUS");
return 0 if scalar(@infoPFSMemory) == 0;
$infoPFSMemory[0] =~ s/.*\s+(\d+)$/$1/g;
return $infoPFSMemory[0];
}
# Recommendations for Performance Schema
sub mysqsl_pfs {
subheaderprint "Performance schema";
# Performance Schema
$myvar{'performance_schema'} = 'OFF'
unless defined( $myvar{'performance_schema'} );
unless ( $myvar{'performance_schema'} eq 'ON' ) {
infoprint "Performance schema is disabled.";
if ( mysql_version_ge( 5, 6 ) ) {
push( @generalrec,
"Performance schema should be activated for better diagnostics"
);
push( @adjvars, "performance_schema = ON enable PFS" );
}
}
else {
if ( mysql_version_le( 5, 5 ) ) {
push( @generalrec,
"Performance schema shouldn't be activated for MySQL and MariaDB 5.5 and lower version"
);
push( @adjvars, "performance_schema = OFF disable PFS" );
}
}
debugprint "Performance schema is " . $myvar{'performance_schema'};
infoprint "Memory used by P_S: " . hr_bytes( get_pf_memory() );
if ( mysql_version_eq( 10, 0 ) ) {
push( @generalrec,
"Performance schema shouldn't be activated for MariaDB 10.0 for performance issue"
);
push( @adjvars, "performance_schema = OFF disable PFS" );
return;
}
unless ( grep /^sys$/, select_array("SHOW DATABASES") ) {
infoprint "Sys schema isn't installed.";
push( @generalrec,
"Consider installing Sys schema from https://github.com/mysql/mysql-sys"
) unless ( mysql_version_le( 5, 5 ) );
return;
}
else {
infoprint "Sys schema is installed.";
}
return if ( $opt{pfstat} == 0 or $myvar{'performance_schema'} ne 'ON' );
infoprint "Sys schema Version: "
. select_one("select sys_version from sys.version");
# Top user per connection
subheaderprint "Performance schema: Top 5 user per connection";
my $nbL = 1;
for my $lQuery (
select_array(
'select user, total_connections from sys.user_summary order by total_connections desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery conn(s)";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per statement
subheaderprint "Performance schema: Top 5 user per statement";
$nbL = 1;
for my $lQuery (
select_array(
'select user, statements from sys.user_summary order by statements desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery stmt(s)";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per statement latency
subheaderprint "Performance schema: Top 5 user per statement latency";
$nbL = 1;
for my $lQuery (
select_array(
'select user, statement_avg_latency from sys.x\\$user_summary order by statement_avg_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per lock latency
subheaderprint "Performance schema: Top 5 user per lock latency";
$nbL = 1;
for my $lQuery (
select_array(
'select user, lock_latency from sys.x\\$user_summary_by_statement_latency order by lock_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per full scans
subheaderprint "Performance schema: Top 5 user per nb full scans";
$nbL = 1;
for my $lQuery (
select_array(
'select user, full_scans from sys.x\\$user_summary_by_statement_latency order by full_scans desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per row_sent
subheaderprint "Performance schema: Top 5 user per rows sent";
$nbL = 1;
for my $lQuery (
select_array(
'select user, rows_sent from sys.x\\$user_summary_by_statement_latency order by rows_sent desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per row modified
subheaderprint "Performance schema: Top 5 user per rows modified";
$nbL = 1;
for my $lQuery (
select_array(
'select user, rows_affected from sys.x\\$user_summary_by_statement_latency order by rows_affected desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per io
subheaderprint "Performance schema: Top 5 user per io";
$nbL = 1;
for my $lQuery (
select_array(
'select user, file_ios from sys.x\\$user_summary order by file_ios desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top user per io latency
subheaderprint "Performance schema: Top 5 user per io latency";
$nbL = 1;
for my $lQuery (
select_array(
'select user, file_io_latency from sys.x\\$user_summary order by file_io_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per connection
subheaderprint "Performance schema: Top 5 host per connection";
$nbL = 1;
for my $lQuery (
select_array(
'select host, total_connections from sys.x\\$host_summary order by total_connections desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery conn(s)";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per statement
subheaderprint "Performance schema: Top 5 host per statement";
$nbL = 1;
for my $lQuery (
select_array(
'select host, statements from sys.x\\$host_summary order by statements desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery stmt(s)";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per statement latency
subheaderprint "Performance schema: Top 5 host per statement latency";
$nbL = 1;
for my $lQuery (
select_array(
'select host, statement_avg_latency from sys.x\\$host_summary order by statement_avg_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per lock latency
subheaderprint "Performance schema: Top 5 host per lock latency";
$nbL = 1;
for my $lQuery (
select_array(
'select host, lock_latency from sys.x\\$host_summary_by_statement_latency order by lock_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per full scans
subheaderprint "Performance schema: Top 5 host per nb full scans";
$nbL = 1;
for my $lQuery (
select_array(
'select host, full_scans from sys.x\\$host_summary_by_statement_latency order by full_scans desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per rows sent
subheaderprint "Performance schema: Top 5 host per rows sent";
$nbL = 1;
for my $lQuery (
select_array(
'select host, rows_sent from sys.x\\$host_summary_by_statement_latency order by rows_sent desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per rows modified
subheaderprint "Performance schema: Top 5 host per rows modified";
$nbL = 1;
for my $lQuery (
select_array(
'select host, rows_affected from sys.x\\$host_summary_by_statement_latency order by rows_affected desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per io
subheaderprint "Performance schema: Top 5 host per io";
$nbL = 1;
for my $lQuery (
select_array(
'select host, file_ios from sys.x\\$host_summary order by file_ios desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top 5 host per io latency
subheaderprint "Performance schema: Top 5 host per io latency";
$nbL = 1;
for my $lQuery (
select_array(
'select host, file_io_latency from sys.x\\$host_summary order by file_io_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top IO type order by total io
subheaderprint "Performance schema: Top IO type order by total io";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select substring(event_name,14), SUM(total)AS total from sys.x\\$host_summary_by_file_io_type GROUP BY substring(event_name,14) ORDER BY total DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery i/o";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top IO type order by total latency
subheaderprint "Performance schema: Top IO type order by total latency";
$nbL = 1;
for my $lQuery (
select_array(
'select substring(event_name,14), ROUND(SUM(total_latency),1) AS total_latency from sys.x\\$host_summary_by_file_io_type GROUP BY substring(event_name,14) ORDER BY total_latency DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top IO type order by max latency
subheaderprint "Performance schema: Top IO type order by max latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select substring(event_name,14), MAX(max_latency) as max_latency from sys.x\\$host_summary_by_file_io_type GROUP BY substring(event_name,14) ORDER BY max_latency DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top Stages order by total io
subheaderprint "Performance schema: Top Stages order by total io";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select substring(event_name,7), SUM(total)AS total from sys.x\\$host_summary_by_stages GROUP BY substring(event_name,7) ORDER BY total DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery i/o";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top Stages order by total latency
subheaderprint "Performance schema: Top Stages order by total latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select substring(event_name,7), ROUND(SUM(total_latency),1) AS total_latency from sys.x\\$host_summary_by_stages GROUP BY substring(event_name,7) ORDER BY total_latency DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top Stages order by avg latency
subheaderprint "Performance schema: Top Stages order by avg latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select substring(event_name,7), MAX(avg_latency) as avg_latency from sys.x\\$host_summary_by_stages GROUP BY substring(event_name,7) ORDER BY avg_latency DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top host per table scans
subheaderprint "Performance schema: Top 5 host per table scans";
$nbL = 1;
for my $lQuery (
select_array(
'select host, table_scans from sys.x\\$host_summary order by table_scans desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# InnoDB Buffer Pool by schema
subheaderprint "Performance schema: InnoDB Buffer Pool by schema";
$nbL = 1;
for my $lQuery (
select_array(
'select object_schema, allocated, data, pages from sys.x\\$innodb_buffer_stats_by_schema ORDER BY pages DESC'
)
)
{
infoprint " +-- $nbL: $lQuery page(s)";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# InnoDB Buffer Pool by table
subheaderprint "Performance schema: InnoDB Buffer Pool by table";
$nbL = 1;
for my $lQuery (
select_array(
'select object_schema, object_name, allocated,data, pages from sys.x\\$innodb_buffer_stats_by_table ORDER BY pages DESC'
)
)
{
infoprint " +-- $nbL: $lQuery page(s)";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Process per allocated memory
subheaderprint "Performance schema: Process per time";
$nbL = 1;
for my $lQuery (
select_array(
'select user, Command AS PROC, time from sys.x\\$processlist ORDER BY time DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# InnoDB Lock Waits
subheaderprint "Performance schema: InnoDB Lock Waits";
$nbL = 1;
for my $lQuery (
select_array(
'select wait_age_secs, locked_table, locked_type, waiting_query from sys.x\\$innodb_lock_waits order by wait_age_secs DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Threads IO Latency
subheaderprint "Performance schema: Thread IO Latency";
$nbL = 1;
for my $lQuery (
select_array(
'select user, total_latency, max_latency from sys.x\\$io_by_thread_by_latency order by total_latency DESC;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# High Cost SQL statements
subheaderprint "Performance schema: Top 5 Most latency statements";
$nbL = 1;
for my $lQuery (
select_array(
'select query, avg_latency from sys.x\\$statement_analysis order by avg_latency desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top 5% slower queries
subheaderprint "Performance schema: Top 5 slower queries";
$nbL = 1;
for my $lQuery (
select_array(
'select query, exec_count from sys.x\\$statements_with_runtimes_in_95th_percentile order by exec_count desc LIMIT 5'
)
)
{
infoprint " +-- $nbL: $lQuery s";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top 10 nb statement type
subheaderprint "Performance schema: Top 10 nb statement type";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select statement, sum(total) as total from sys.x\\$host_summary_by_statement_type group by statement order by total desc LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top statement by total latency
subheaderprint "Performance schema: Top statement by total latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select statement, sum(total_latency) as total from sys.x\\$host_summary_by_statement_type group by statement order by total desc LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top statement by lock latency
subheaderprint "Performance schema: Top statement by lock latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select statement, sum(lock_latency) as total from sys.x\\$host_summary_by_statement_type group by statement order by total desc LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top statement by full scans
subheaderprint "Performance schema: Top statement by full scans";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select statement, sum(full_scans) as total from sys.x\\$host_summary_by_statement_type group by statement order by total desc LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top statement by rows sent
subheaderprint "Performance schema: Top statement by rows sent";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select statement, sum(rows_sent) as total from sys.x\\$host_summary_by_statement_type group by statement order by total desc LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Top statement by rows modified
subheaderprint "Performance schema: Top statement by rows modified";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select statement, sum(rows_affected) as total from sys.x\\$host_summary_by_statement_type group by statement order by total desc LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Use temporary tables
subheaderprint "Performance schema: Some queries using temp table";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select query from sys.x\\$statements_with_temp_tables LIMIT 20'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Unused Indexes
subheaderprint "Performance schema: Unused indexes";
$nbL = 1;
for my $lQuery ( select_array('select * from sys.schema_unused_indexes') ) {
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Full table scans
subheaderprint "Performance schema: Tables with full table scans";
$nbL = 1;
for my $lQuery (
select_array(
'select * from sys.x\\$schema_tables_with_full_table_scans order by rows_full_scanned DESC'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Latest file IO by latency
subheaderprint "Performance schema: Latest FILE IO by latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select thread, file, latency, operation from sys.x\\$latest_file_io ORDER BY latency LIMIT 10;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# FILE by IO read bytes
subheaderprint "Performance schema: FILE by IO read bytes";
$nbL = 1;
for my $lQuery (
select_array(
'select file, total_read from sys.x\\$io_global_by_file_by_bytes order by total_read DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# FILE by IO written bytes
subheaderprint "Performance schema: FILE by IO written bytes";
$nbL = 1;
for my $lQuery (
select_array(
'select file, total_written from sys.x\\$io_global_by_file_by_bytes order by total_written DESC LIMIT 15'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# file per IO total latency
subheaderprint "Performance schema: file per IO total latency";
$nbL = 1;
for my $lQuery (
select_array(
'select file, total_latency from sys.x\\$io_global_by_file_by_latency ORDER BY total_latency DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# file per IO read latency
subheaderprint "Performance schema: file per IO read latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select file, read_latency from sys.x\\$io_global_by_file_by_latency ORDER BY read_latency DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# file per IO write latency
subheaderprint "Performance schema: file per IO write latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select file, write_latency from sys.x\\$io_global_by_file_by_latency ORDER BY write_latency DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Event Wait by read bytes
subheaderprint "Performance schema: Event Wait by read bytes";
$nbL = 1;
for my $lQuery (
select_array(
'select event_name, total_read from sys.x\\$io_global_by_wait_by_bytes order by total_read DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Event Wait by write bytes
subheaderprint "Performance schema: Event Wait written bytes";
$nbL = 1;
for my $lQuery (
select_array(
'select event_name, total_written from sys.x\\$io_global_by_wait_by_bytes order by total_written DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# event per wait total latency
subheaderprint "Performance schema: event per wait total latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select event_name, total_latency from sys.x\\$io_global_by_wait_by_latency ORDER BY total_latency DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# event per wait read latency
subheaderprint "Performance schema: event per wait read latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select event_name, read_latency from sys.x\\$io_global_by_wait_by_latency ORDER BY read_latency DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# event per wait write latency
subheaderprint "Performance schema: event per wait write latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select event_name, write_latency from sys.x\\$io_global_by_wait_by_latency ORDER BY write_latency DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
#schema_index_statistics
# TOP 15 most read index
subheaderprint "Performance schema: TOP 15 most read indexes";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name,index_name, rows_selected from sys.x\\$schema_index_statistics ORDER BY ROWs_selected DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 most used index
subheaderprint "Performance schema: TOP 15 most modified indexes";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name,index_name, rows_inserted+rows_updated+rows_deleted AS changes from sys.x\\$schema_index_statistics ORDER BY rows_inserted+rows_updated+rows_deleted DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high read latency index
subheaderprint "Performance schema: TOP 15 high read latency index";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name,index_name, select_latency from sys.x\\$schema_index_statistics ORDER BY select_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high insert latency index
subheaderprint "Performance schema: TOP 15 most modified indexes";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name,index_name, insert_latency from sys.x\\$schema_index_statistics ORDER BY insert_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high update latency index
subheaderprint "Performance schema: TOP 15 high update latency index";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name,index_name, update_latency from sys.x\\$schema_index_statistics ORDER BY update_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high delete latency index
subheaderprint "Performance schema: TOP 15 high delete latency index";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name,index_name, delete_latency from sys.x\\$schema_index_statistics ORDER BY delete_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 most read tables
subheaderprint "Performance schema: TOP 15 most read tables";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name, rows_fetched from sys.x\\$schema_table_statistics ORDER BY ROWs_fetched DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 most used tables
subheaderprint "Performance schema: TOP 15 most modified tables";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name, rows_inserted+rows_updated+rows_deleted AS changes from sys.x\\$schema_table_statistics ORDER BY rows_inserted+rows_updated+rows_deleted DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high read latency tables
subheaderprint "Performance schema: TOP 15 high read latency tables";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name, fetch_latency from sys.x\\$schema_table_statistics ORDER BY fetch_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high insert latency tables
subheaderprint "Performance schema: TOP 15 high insert latency tables";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name, insert_latency from sys.x\\$schema_table_statistics ORDER BY insert_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high update latency tables
subheaderprint "Performance schema: TOP 15 high update latency tables";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name, update_latency from sys.x\\$schema_table_statistics ORDER BY update_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# TOP 15 high delete latency tables
subheaderprint "Performance schema: TOP 15 high delete latency tables";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select table_schema, table_name, delete_latency from sys.x\\$schema_table_statistics ORDER BY delete_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
# Redundant indexes
subheaderprint "Performance schema: Redundant indexes";
$nbL = 1;
for my $lQuery (
select_array('use sys;select * from schema_redundant_indexes;') )
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Tables not using InnoDB buffer";
$nbL = 1;
for my $lQuery (
select_array(
' Select table_schema, table_name from sys.x\\$schema_table_statistics_with_buffer where innodb_buffer_allocated IS NULL;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Table not using InnoDB buffer";
$nbL = 1;
for my $lQuery (
select_array(
' Select table_schema, table_name from sys.x\\$schema_table_statistics_with_buffer where innodb_buffer_allocated IS NULL;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Table not using InnoDB buffer";
$nbL = 1;
for my $lQuery (
select_array(
' Select table_schema, table_name from sys.x\\$schema_table_statistics_with_buffer where innodb_buffer_allocated IS NULL;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Top 15 Tables using InnoDB buffer";
$nbL = 1;
for my $lQuery (
select_array(
'select table_schema,table_name,innodb_buffer_allocated from sys.x\\$schema_table_statistics_with_buffer where innodb_buffer_allocated IS NOT NULL ORDER BY innodb_buffer_allocated DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Top 15 Tables with InnoDB buffer free";
$nbL = 1;
for my $lQuery (
select_array(
'select table_schema,table_name,innodb_buffer_free from sys.x\\$schema_table_statistics_with_buffer where innodb_buffer_allocated IS NOT NULL ORDER BY innodb_buffer_free DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Top 15 Most executed queries";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, exec_count from sys.x\\$statement_analysis order by exec_count DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: Latest SQL queries in errors or warnings";
$nbL = 1;
for my $lQuery (
select_array(
'select query, last_seen from sys.x\\$statements_with_errors_or_warnings ORDER BY last_seen LIMIT 100;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Top 20 queries with full table scans";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, exec_count from sys.x\\$statements_with_full_table_scans order BY exec_count DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Last 50 queries with full table scans";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, last_seen from sys.x\\$statements_with_full_table_scans order BY last_seen DESC LIMIT 50;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 reader queries (95% percentile)";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query , rows_sent from sys.x\\$statements_with_runtimes_in_95th_percentile ORDER BY ROWs_sent DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: TOP 15 most row look queries (95% percentile)";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, rows_examined AS search from sys.x\\$statements_with_runtimes_in_95th_percentile ORDER BY rows_examined DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: TOP 15 total latency queries (95% percentile)";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, total_latency AS search from sys.x\\$statements_with_runtimes_in_95th_percentile ORDER BY total_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: TOP 15 max latency queries (95% percentile)";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, max_latency AS search from sys.x\\$statements_with_runtimes_in_95th_percentile ORDER BY max_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: TOP 15 average latency queries (95% percentile)";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, avg_latency AS search from sys.x\\$statements_with_runtimes_in_95th_percentile ORDER BY avg_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Top 20 queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, exec_count from sys.x\\$statements_with_sorting order BY exec_count DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Last 50 queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, last_seen from sys.x\\$statements_with_sorting order BY last_seen DESC LIMIT 50;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 row sorting queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query , rows_sorted from sys.x\\$statements_with_sorting ORDER BY ROWs_sorted DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 total latency queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, total_latency AS search from sys.x\\$statements_with_sorting ORDER BY total_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 merge queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, sort_merge_passes AS search from sys.x\\$statements_with_sorting ORDER BY sort_merge_passes DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: TOP 15 average sort merges queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, avg_sort_merges AS search from sys.x\\$statements_with_sorting ORDER BY avg_sort_merges DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 scans queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, sorts_using_scans AS search from sys.x\\$statements_with_sorting ORDER BY sorts_using_scans DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 range queries with sort";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, sort_using_range AS search from sys.x\\$statements_with_sorting ORDER BY sort_using_range DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
##################################################################################
#statements_with_temp_tables
#mysql> desc statements_with_temp_tables;
#+--------------------------+---------------------+------+-----+---------------------+-------+
#| Field | Type | Null | Key | Default | Extra |
#+--------------------------+---------------------+------+-----+---------------------+-------+
#| query | longtext | YES | | NULL | |
#| db | varchar(64) | YES | | NULL | |
#| exec_count | bigint(20) unsigned | NO | | NULL | |
#| total_latency | text | YES | | NULL | |
#| memory_tmp_tables | bigint(20) unsigned | NO | | NULL | |
#| disk_tmp_tables | bigint(20) unsigned | NO | | NULL | |
#| avg_tmp_tables_per_query | decimal(21,0) | NO | | 0 | |
#| tmp_tables_to_disk_pct | decimal(24,0) | NO | | 0 | |
#| first_seen | timestamp | NO | | 0000-00-00 00:00:00 | |
#| last_seen | timestamp | NO | | 0000-00-00 00:00:00 | |
#| digest | varchar(32) | YES | | NULL | |
#+--------------------------+---------------------+------+-----+---------------------+-------+
#11 rows in set (0,01 sec)#
#
subheaderprint "Performance schema: Top 20 queries with temp table";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, exec_count from sys.x\\$statements_with_temp_tables order BY exec_count DESC LIMIT 20;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: Last 50 queries with temp table";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, last_seen from sys.x\\$statements_with_temp_tables order BY last_seen DESC LIMIT 50;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint
"Performance schema: TOP 15 total latency queries with temp table";
$nbL = 1;
for my $lQuery (
select_array(
'select db, query, total_latency AS search from sys.x\\$statements_with_temp_tables ORDER BY total_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 queries with temp table to disk";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select db, query, disk_tmp_tables from sys.x\\$statements_with_temp_tables ORDER BY disk_tmp_tables DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
##################################################################################
#wait_classes_global_by_latency
#ysql> select * from wait_classes_global_by_latency;
#-----------------+-------+---------------+-------------+-------------+-------------+
# event_class | total | total_latency | min_latency | avg_latency | max_latency |
#-----------------+-------+---------------+-------------+-------------+-------------+
# wait/io/file | 15381 | 1.23 s | 0 ps | 80.12 us | 230.64 ms |
# wait/io/table | 59 | 7.57 ms | 5.45 us | 128.24 us | 3.95 ms |
# wait/lock/table | 69 | 3.22 ms | 658.84 ns | 46.64 us | 1.10 ms |
#-----------------+-------+---------------+-------------+-------------+-------------+
# rows in set (0,00 sec)
subheaderprint "Performance schema: TOP 15 class events by number";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select event_class, total from sys.x\\$wait_classes_global_by_latency ORDER BY total DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 30 events by number";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select events, total from sys.x\\$waits_global_by_latency ORDER BY total DESC LIMIT 30;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 class events by total latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select event_class, total_latency from sys.x\\$wait_classes_global_by_latency ORDER BY total_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 30 events by total latency";
$nbL = 1;
for my $lQuery (
select_array(
'use sys;select events, total_latency from sys.x\\$waits_global_by_latency ORDER BY total_latency DESC LIMIT 30;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 15 class events by max latency";
$nbL = 1;
for my $lQuery (
select_array(
'select event_class, max_latency from sys.x\\$wait_classes_global_by_latency ORDER BY max_latency DESC LIMIT 15;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
subheaderprint "Performance schema: TOP 30 events by max latency";
$nbL = 1;
for my $lQuery (
select_array(
'select events, max_latency from sys.x\\$waits_global_by_latency ORDER BY max_latency DESC LIMIT 30;'
)
)
{
infoprint " +-- $nbL: $lQuery";
$nbL++;
}
infoprint "No information found or indicators deactivated."
if ( $nbL == 1 );
}
# Recommendations for Ariadb
sub mariadb_ariadb {
subheaderprint "AriaDB Metrics";
# AriaDB
unless ( defined $myvar{'have_aria'}
and $myvar{'have_aria'} eq "YES" )
{
infoprint "AriaDB is disabled.";
return;
}
infoprint "AriaDB is enabled.";
# Aria pagecache
if ( !defined( $mycalc{'total_aria_indexes'} ) and $doremote == 1 ) {
push( @generalrec,
"Unable to calculate Aria indexes on remote MySQL server < 5.0.0" );
}
elsif ( $mycalc{'total_aria_indexes'} =~ /^fail$/ ) {
badprint
"Cannot calculate Aria index size - re-run script as root user";
}
elsif ( $mycalc{'total_aria_indexes'} == "0" ) {
badprint
"None of your Aria tables are indexed - add indexes immediately";
}
else {
if (
$myvar{'aria_pagecache_buffer_size'} < $mycalc{'total_aria_indexes'}
&& $mycalc{'pct_aria_keys_from_mem'} < 95 )
{
badprint "Aria pagecache size / total Aria indexes: "
. hr_bytes( $myvar{'aria_pagecache_buffer_size'} ) . "/"
. hr_bytes( $mycalc{'total_aria_indexes'} ) . "";
push( @adjvars,
"aria_pagecache_buffer_size (> "
. hr_bytes( $mycalc{'total_aria_indexes'} )
. ")" );
}
else {
goodprint "Aria pagecache size / total Aria indexes: "
. hr_bytes( $myvar{'aria_pagecache_buffer_size'} ) . "/"
. hr_bytes( $mycalc{'total_aria_indexes'} ) . "";
}
if ( $mystat{'Aria_pagecache_read_requests'} > 0 ) {
if ( $mycalc{'pct_aria_keys_from_mem'} < 95 ) {
badprint
"Aria pagecache hit rate: $mycalc{'pct_aria_keys_from_mem'}% ("
. hr_num( $mystat{'Aria_pagecache_read_requests'} )
. " cached / "
. hr_num( $mystat{'Aria_pagecache_reads'} )
. " reads)";
}
else {
goodprint
"Aria pagecache hit rate: $mycalc{'pct_aria_keys_from_mem'}% ("
. hr_num( $mystat{'Aria_pagecache_read_requests'} )
. " cached / "
. hr_num( $mystat{'Aria_pagecache_reads'} )
. " reads)";
}
}
else {
# No queries have run that would use keys
}
}
}
# Recommendations for TokuDB
sub mariadb_tokudb {
subheaderprint "TokuDB Metrics";
# AriaDB
unless ( defined $myvar{'have_tokudb'}
&& $myvar{'have_tokudb'} eq "YES" )
{
infoprint "TokuDB is disabled.";
return;
}
infoprint "TokuDB is enabled.";
# All is to done here
}
# Recommendations for XtraDB
sub mariadb_xtradb {
subheaderprint "XtraDB Metrics";
# XtraDB
unless ( defined $myvar{'have_xtradb'}
&& $myvar{'have_xtradb'} eq "YES" )
{
infoprint "XtraDB is disabled.";
return;
}
infoprint "XtraDB is enabled.";
infoprint "Note that MariaDB 10.2 makes use of InnoDB, not XtraDB."
# All is to done here
}
# Recommendations for RocksDB
sub mariadb_rockdb {
subheaderprint "RocksDB Metrics";
# RocksDB
unless ( defined $myvar{'have_rocksdb'}
&& $myvar{'have_rocksdb'} eq "YES" )
{
infoprint "RocksDB is disabled.";
return;
}
infoprint "RocksDB is enabled.";
# All is to do here
}
# Recommendations for Spider
sub mariadb_spider {
subheaderprint "Spider Metrics";
# Spider
unless ( defined $myvar{'have_spider'}
&& $myvar{'have_spider'} eq "YES" )
{
infoprint "Spider is disabled.";
return;
}
infoprint "Spider is enabled.";
# All is to do here
}
# Recommendations for Connect
sub mariadb_connect {
subheaderprint "Connect Metrics";
# Connect
unless ( defined $myvar{'have_connect'}
&& $myvar{'have_connect'} eq "YES" )
{
infoprint "Connect is disabled.";
return;
}
infoprint "Connect is enabled.";
# All is to do here
}
# Perl trim function to remove whitespace from the start and end of the string
sub trim {
my $string = shift;
return "" unless defined($string);
$string =~ s/^\s+//;
$string =~ s/\s+$//;
return $string;
}
sub get_wsrep_options {
return () unless defined $myvar{'wsrep_provider_options'};
my @galera_options = split /;/, $myvar{'wsrep_provider_options'};
my $wsrep_slave_threads = $myvar{'wsrep_slave_threads'};
push @galera_options, ' wsrep_slave_threads = ' . $wsrep_slave_threads;
@galera_options = remove_cr @galera_options;
@galera_options = remove_empty @galera_options;
debugprint Dumper( \@galera_options );
return @galera_options;
}
sub get_gcache_memory {
my $gCacheMem = hr_raw( get_wsrep_option('gcache.size') );
return 0 unless defined $gCacheMem and $gCacheMem ne '';
return $gCacheMem;
}
sub get_wsrep_option {
my $key = shift;
return '' unless defined $myvar{'wsrep_provider_options'};
my @galera_options = get_wsrep_options;
return '' unless scalar(@galera_options) > 0;
my @memValues = grep /\s*$key =/, @galera_options;
my $memValue = $memValues[0];
return 0 unless defined $memValue;
$memValue =~ s/.*=\s*(.+)$/$1/g;
return $memValue;
}
# Recommendations for Galera
sub mariadb_galera {
subheaderprint "Galera Metrics";
# Galera Cluster
unless ( defined $myvar{'have_galera'}
&& $myvar{'have_galera'} eq "YES" )
{
infoprint "Galera is disabled.";
return;
}
infoprint "Galera is enabled.";
debugprint "Galera variables:";
foreach my $gvar ( keys %myvar ) {
next unless $gvar =~ /^wsrep.*/;
next if $gvar eq 'wsrep_provider_options';
debugprint "\t" . trim($gvar) . " = " . $myvar{$gvar};
$result{'Galera'}{'variables'}{$gvar} = $myvar{$gvar};
}
debugprint "Galera wsrep provider Options:";
my @galera_options = get_wsrep_options;
$result{'Galera'}{'wsrep options'} = get_wsrep_options();
foreach my $gparam (@galera_options) {
debugprint "\t" . trim($gparam);
}
debugprint "Galera status:";
foreach my $gstatus ( keys %mystat ) {
next unless $gstatus =~ /^wsrep.*/;
debugprint "\t" . trim($gstatus) . " = " . $mystat{$gstatus};
$result{'Galera'}{'status'}{$gstatus} = $myvar{$gstatus};
}
infoprint "GCache is using "
. hr_bytes_rnd( get_wsrep_option('gcache.mem_size') );
#my @primaryKeysNbTables=();
my @primaryKeysNbTables = select_array(
"Select CONCAT(c.table_schema,CONCAT('.', c.table_name))
from information_schema.columns c
join information_schema.tables t using (TABLE_SCHEMA, TABLE_NAME)
where c.table_schema not in ('mysql', 'information_schema', 'performance_schema')
and t.table_type != 'VIEW'
group by c.table_schema,c.table_name
having sum(if(c.column_key in ('PRI','UNI'), 1,0)) = 0"
);
infoprint "CPU core detected : ". (cpu_cores);
infoprint "wsrep_slave_threads: ". get_wsrep_option('wsrep_slave_threads');
if ( get_wsrep_option('wsrep_slave_threads') > ((cpu_cores) * 4)
or get_wsrep_option('wsrep_slave_threads') < ((cpu_cores) * 2) )
{
badprint
"wsrep_slave_threads is not equal to 2, 3 or 4 times number of CPU(s)";
push @adjvars, "wsrep_slave_threads = ".((cpu_cores) * 4);
}
else {
goodprint
"wsrep_slave_threads is equal to 2, 3 or 4 times number of CPU(s)";
}
if ( get_wsrep_option('gcs.fc_limit') !=
get_wsrep_option('wsrep_slave_threads') * 5 )
{
badprint "gcs.fc_limit should be equal to 5 * wsrep_slave_threads";
push @adjvars, "gcs.fc_limit= wsrep_slave_threads * 5";
}
else {
goodprint "gcs.fc_limit should be equal to 5 * wsrep_slave_threads";
}
if ( get_wsrep_option('wsrep_slave_threads') > 1 ) {
infoprint
"wsrep parallel slave can cause frequent inconsistency crash.";
push @adjvars,
"Set wsrep_slave_threads to 1 in case of HA_ERR_FOUND_DUPP_KEY crash on slave";
# check options for parallel slave
if ( get_wsrep_option('wsrep_slave_FK_checks') eq "OFF" ) {
badprint "wsrep_slave_FK_checks is off with parallel slave";
push @adjvars,
"wsrep_slave_FK_checks should be ON when using parallel slave";
}
# wsrep_slave_UK_checks seems useless in MySQL source code
if ( $myvar{'innodb_autoinc_lock_mode'} != 2 ) {
badprint
"innodb_autoinc_lock_mode is incorrect with parallel slave";
push @adjvars,
"innodb_autoinc_lock_mode should be 2 when using parallel slave";
}
}
if ( get_wsrep_option('gcs.fc_limit') != $myvar{'wsrep_slave_threads'} * 5 )
{
badprint "gcs.fc_limit should be equal to 5 * wsrep_slave_threads";
push @adjvars, "gcs.fc_limit= wsrep_slave_threads * 5";
}
else {
goodprint "gcs.fc_limit is equal to 5 * wsrep_slave_threads";
}
if ( get_wsrep_option('gcs.fc_factor') != 0.8 ) {
badprint "gcs.fc_factor should be equal to 0.8";
push @adjvars, "gcs.fc_factor=0.8";
}
else {
goodprint "gcs.fc_factor is equal to 0.8";
}
if ( get_wsrep_option('wsrep_flow_control_paused') > 0.02 ) {
badprint "Fraction of time node pause flow control > 0.02";
}
else {
goodprint
"Flow control fraction seems to be OK (wsrep_flow_control_paused<=0.02)";
}
if ( scalar(@primaryKeysNbTables) > 0 ) {
badprint "Following table(s) don't have primary key:";
foreach my $badtable (@primaryKeysNbTables) {
badprint "\t$badtable";
push @{ $result{'Tables without PK'} }, $badtable;
}
}
else {
goodprint "All tables get a primary key";
}
my @nonInnoDBTables = select_array(
"select CONCAT(table_schema,CONCAT('.', table_name)) from information_schema.tables where ENGINE <> 'InnoDB' and table_schema not in ('mysql', 'performance_schema', 'information_schema')"
);
if ( scalar(@nonInnoDBTables) > 0 ) {
badprint "Following table(s) are not InnoDB table:";
push @generalrec,
"Ensure that all table(s) are InnoDB tables for Galera replication";
foreach my $badtable (@nonInnoDBTables) {
badprint "\t$badtable";
}
}
else {
goodprint "All tables are InnoDB tables";
}
if ( $myvar{'binlog_format'} ne 'ROW' ) {
badprint "Binlog format should be in ROW mode.";
push @adjvars, "binlog_format = ROW";
}
else {
goodprint "Binlog format is in ROW mode.";
}
if ( $myvar{'innodb_flush_log_at_trx_commit'} != 0 ) {
badprint "InnoDB flush log at each commit should be disabled.";
push @adjvars, "innodb_flush_log_at_trx_commit = 0";
}
else {
goodprint "InnoDB flush log at each commit is disabled for Galera.";
}
infoprint "Read consistency mode :" . $myvar{'wsrep_causal_reads'};
if ( defined( $myvar{'wsrep_cluster_name'} )
and $myvar{'wsrep_on'} eq "ON" )
{
goodprint "Galera WsREP is enabled.";
if ( defined( $myvar{'wsrep_cluster_address'} )
and trim("$myvar{'wsrep_cluster_address'}") ne "" )
{
goodprint "Galera Cluster address is defined: "
. $myvar{'wsrep_cluster_address'};
my @NodesTmp = split /,/, $myvar{'wsrep_cluster_address'};
my $nbNodes = @NodesTmp;
infoprint "There are $nbNodes nodes in wsrep_cluster_address";
my $nbNodesSize = trim( $mystat{'wsrep_cluster_size'} );
if ( $nbNodesSize == 3 or $nbNodesSize == 5 ) {
goodprint "There are $nbNodesSize nodes in wsrep_cluster_size.";
}
else {
badprint
"There are $nbNodesSize nodes in wsrep_cluster_size. Prefer 3 or 5 nodes architecture.";
push @generalrec, "Prefer 3 or 5 nodes architecture.";
}
# wsrep_cluster_address doesn't include garbd nodes
if ( $nbNodes > $nbNodesSize ) {
badprint
"All cluster nodes are not detected. wsrep_cluster_size less then node count in wsrep_cluster_address";
}
else {
goodprint "All cluster nodes detected.";
}
}
else {
badprint "Galera Cluster address is undefined";
push @adjvars,
"set up wsrep_cluster_address variable for Galera replication";
}
if ( defined( $myvar{'wsrep_cluster_name'} )
and trim( $myvar{'wsrep_cluster_name'} ) ne "" )
{
goodprint "Galera Cluster name is defined: "
. $myvar{'wsrep_cluster_name'};
}
else {
badprint "Galera Cluster name is undefined";
push @adjvars,
"set up wsrep_cluster_name variable for Galera replication";
}
if ( defined( $myvar{'wsrep_node_name'} )
and trim( $myvar{'wsrep_node_name'} ) ne "" )
{
goodprint "Galera Node name is defined: "
. $myvar{'wsrep_node_name'};
}
else {
badprint "Galera node name is undefined";
push @adjvars,
"set up wsrep_node_name variable for Galera replication";
}
if ( trim( $myvar{'wsrep_notify_cmd'} ) ne "" ) {
goodprint "Galera Notify command is defined.";
}
else {
badprint "Galera Notify command is not defined.";
push( @adjvars, "set up parameter wsrep_notify_cmd to be notify" );
}
if ( trim( $myvar{'wsrep_sst_method'} ) !~ "^xtrabackup.*"
and trim( $myvar{'wsrep_sst_method'} ) !~ "^mariabackup" )
{
badprint "Galera SST method is not xtrabackup based.";
push( @adjvars,
"set up parameter wsrep_sst_method to xtrabackup based parameter"
);
}
else {
goodprint "SST Method is based on xtrabackup.";
}
if (
(
defined( $myvar{'wsrep_OSU_method'} )
&& trim( $myvar{'wsrep_OSU_method'} ) eq "TOI"
)
|| ( defined( $myvar{'wsrep_osu_method'} )
&& trim( $myvar{'wsrep_osu_method'} ) eq "TOI" )
)
{
goodprint "TOI is default mode for upgrade.";
}
else {
badprint "Schema upgrade are not replicated automatically";
push( @adjvars, "set up parameter wsrep_OSU_method to TOI" );
}
infoprint "Max WsRep message : "
. hr_bytes( $myvar{'wsrep_max_ws_size'} );
}
else {
badprint "Galera WsREP is disabled";
}
if ( defined( $mystat{'wsrep_connected'} )
and $mystat{'wsrep_connected'} eq "ON" )
{
goodprint "Node is connected";
}
else {
badprint "Node is disconnected";
}
if ( defined( $mystat{'wsrep_ready'} ) and $mystat{'wsrep_ready'} eq "ON" )
{
goodprint "Node is ready";
}
else {
badprint "Node is not ready";
}
infoprint "Cluster status :" . $mystat{'wsrep_cluster_status'};
if ( defined( $mystat{'wsrep_cluster_status'} )
and $mystat{'wsrep_cluster_status'} eq "Primary" )
{
goodprint "Galera cluster is consistent and ready for operations";
}
else {
badprint "Cluster is not consistent and ready";
}
if ( $mystat{'wsrep_local_state_uuid'} eq
$mystat{'wsrep_cluster_state_uuid'} )
{
goodprint "Node and whole cluster at the same level: "
. $mystat{'wsrep_cluster_state_uuid'};
}
else {
badprint "Node and whole cluster not the same level";
infoprint "Node state uuid: " . $mystat{'wsrep_local_state_uuid'};
infoprint "Cluster state uuid: " . $mystat{'wsrep_cluster_state_uuid'};
}
if ( $mystat{'wsrep_local_state_comment'} eq 'Synced' ) {
goodprint "Node is synced with whole cluster.";
}
else {
badprint "Node is not synced";
infoprint "Node State : " . $mystat{'wsrep_local_state_comment'};
}
if ( $mystat{'wsrep_local_cert_failures'} == 0 ) {
goodprint "There is no certification failures detected.";
}
else {
badprint "There is "
. $mystat{'wsrep_local_cert_failures'}
. " certification failure(s)detected.";
}
for my $key ( keys %mystat ) {
if ( $key =~ /wsrep_|galera/i ) {
debugprint "WSREP: $key = $mystat{$key}";
}
}
debugprint Dumper get_wsrep_options();
}
# Recommendations for InnoDB
sub mysql_innodb {
subheaderprint "InnoDB Metrics";
# InnoDB
unless ( defined $myvar{'have_innodb'}
&& $myvar{'have_innodb'} eq "YES"
&& defined $enginestats{'InnoDB'} )
{
infoprint "InnoDB is disabled.";
if ( mysql_version_ge( 5, 5 ) ) {
badprint
"InnoDB Storage engine is disabled. InnoDB is the default storage engine";
}
return;
}
infoprint "InnoDB is enabled.";
if ( $opt{buffers} ne 0 ) {
infoprint "InnoDB Buffers";
if ( defined $myvar{'innodb_buffer_pool_size'} ) {
infoprint " +-- InnoDB Buffer Pool: "
. hr_bytes( $myvar{'innodb_buffer_pool_size'} ) . "";
}
if ( defined $myvar{'innodb_buffer_pool_instances'} ) {
infoprint " +-- InnoDB Buffer Pool Instances: "
. $myvar{'innodb_buffer_pool_instances'} . "";
}
if ( defined $myvar{'innodb_buffer_pool_chunk_size'} ) {
infoprint " +-- InnoDB Buffer Pool Chunk Size: "
. hr_bytes( $myvar{'innodb_buffer_pool_chunk_size'} ) . "";
}
if ( defined $myvar{'innodb_additional_mem_pool_size'} ) {
infoprint " +-- InnoDB Additional Mem Pool: "
. hr_bytes( $myvar{'innodb_additional_mem_pool_size'} ) . "";
}
if ( defined $myvar{'innodb_log_file_size'} ) {
infoprint " +-- InnoDB Log File Size: "
. hr_bytes( $myvar{'innodb_log_file_size'} );
}
if ( defined $myvar{'innodb_log_files_in_group'} ) {
infoprint " +-- InnoDB Log File In Group: "
. $myvar{'innodb_log_files_in_group'};
}
if ( defined $myvar{'innodb_log_files_in_group'} ) {
infoprint " +-- InnoDB Total Log File Size: "
. hr_bytes( $myvar{'innodb_log_files_in_group'} *
$myvar{'innodb_log_file_size'} )
. "("
. $mycalc{'innodb_log_size_pct'}
. " % of buffer pool)";
}
if ( defined $myvar{'innodb_log_buffer_size'} ) {
infoprint " +-- InnoDB Log Buffer: "
. hr_bytes( $myvar{'innodb_log_buffer_size'} );
}
if ( defined $mystat{'Innodb_buffer_pool_pages_free'} ) {
infoprint " +-- InnoDB Log Buffer Free: "
. hr_bytes( $mystat{'Innodb_buffer_pool_pages_free'} ) . "";
}
if ( defined $mystat{'Innodb_buffer_pool_pages_total'} ) {
infoprint " +-- InnoDB Log Buffer Used: "
. hr_bytes( $mystat{'Innodb_buffer_pool_pages_total'} ) . "";
}
}
if ( defined $myvar{'innodb_thread_concurrency'} ) {
infoprint "InnoDB Thread Concurrency: "
. $myvar{'innodb_thread_concurrency'};
}
# InnoDB Buffer Pool Size
if ( $myvar{'innodb_file_per_table'} eq "ON" ) {
goodprint "InnoDB File per table is activated";
}
else {
badprint "InnoDB File per table is not activated";
push( @adjvars, "innodb_file_per_table=ON" );
}
# InnoDB Buffer Pool Size
if ( $myvar{'innodb_buffer_pool_size'} > $enginestats{'InnoDB'} ) {
goodprint "InnoDB buffer pool / data size: "
. hr_bytes( $myvar{'innodb_buffer_pool_size'} ) . "/"
. hr_bytes( $enginestats{'InnoDB'} ) . "";
}
else {
badprint "InnoDB buffer pool / data size: "
. hr_bytes( $myvar{'innodb_buffer_pool_size'} ) . "/"
. hr_bytes( $enginestats{'InnoDB'} ) . "";
push( @adjvars,
"innodb_buffer_pool_size (>= "
. hr_bytes( $enginestats{'InnoDB'} )
. ") if possible." );
}
if ( $mycalc{'innodb_log_size_pct'} < 20
or $mycalc{'innodb_log_size_pct'} > 30 )
{
badprint "Ratio InnoDB log file size / InnoDB Buffer pool size ("
. $mycalc{'innodb_log_size_pct'} . " %): "
. hr_bytes( $myvar{'innodb_log_file_size'} ) . " * "
. $myvar{'innodb_log_files_in_group'} . "/"
. hr_bytes( $myvar{'innodb_buffer_pool_size'} )
. " should be equal 25%";
push(
@adjvars,
"innodb_log_file_size should be (="
. hr_bytes_rnd(
$myvar{'innodb_buffer_pool_size'} /
$myvar{'innodb_log_files_in_group'} / 4
)
. ") if possible, so InnoDB total log files size equals to 25% of buffer pool size."
);
push( @generalrec,
"Before changing innodb_log_file_size and/or innodb_log_files_in_group read this: http://bit.ly/2wgkDvS"
);
}
else {
goodprint "Ratio InnoDB log file size / InnoDB Buffer pool size: "
. hr_bytes( $myvar{'innodb_log_file_size'} ) . " * "
. $myvar{'innodb_log_files_in_group'} . "/"
. hr_bytes( $myvar{'innodb_buffer_pool_size'} )
. " should be equal 25%";
}
# InnoDB Buffer Pool Instances (MySQL 5.6.6+)
if ( defined( $myvar{'innodb_buffer_pool_instances'} ) ) {
# Bad Value if > 64
if ( $myvar{'innodb_buffer_pool_instances'} > 64 ) {
badprint "InnoDB buffer pool instances: "
. $myvar{'innodb_buffer_pool_instances'} . "";
push( @adjvars, "innodb_buffer_pool_instances (<= 64)" );
}
# InnoDB Buffer Pool Size > 1Go
if ( $myvar{'innodb_buffer_pool_size'} > 1024 * 1024 * 1024 ) {
# InnoDB Buffer Pool Size / 1Go = InnoDB Buffer Pool Instances limited to 64 max.
# InnoDB Buffer Pool Size > 64Go
my $max_innodb_buffer_pool_instances =
int( $myvar{'innodb_buffer_pool_size'} / ( 1024 * 1024 * 1024 ) );
$max_innodb_buffer_pool_instances = 64
if ( $max_innodb_buffer_pool_instances > 64 );
if ( $myvar{'innodb_buffer_pool_instances'} !=
$max_innodb_buffer_pool_instances )
{
badprint "InnoDB buffer pool instances: "
. $myvar{'innodb_buffer_pool_instances'} . "";
push( @adjvars,
"innodb_buffer_pool_instances(="
. $max_innodb_buffer_pool_instances
. ")" );
}
else {
goodprint "InnoDB buffer pool instances: "
. $myvar{'innodb_buffer_pool_instances'} . "";
}
# InnoDB Buffer Pool Size < 1Go
}
else {
if ( $myvar{'innodb_buffer_pool_instances'} != 1 ) {
badprint
"InnoDB buffer pool <= 1G and Innodb_buffer_pool_instances(!=1).";
push( @adjvars, "innodb_buffer_pool_instances (=1)" );
}
else {
goodprint "InnoDB buffer pool instances: "
. $myvar{'innodb_buffer_pool_instances'} . "";
}
}
}
# InnoDB Used Buffer Pool Size vs CHUNK size
if ( !defined( $myvar{'innodb_buffer_pool_chunk_size'} ) ) {
infoprint
"InnoDB Buffer Pool Chunk Size not used or defined in your version";
}
else {
infoprint "Number of InnoDB Buffer Pool Chunk : "
. int( $myvar{'innodb_buffer_pool_size'} ) /
int( $myvar{'innodb_buffer_pool_chunk_size'} ) . " for "
. $myvar{'innodb_buffer_pool_instances'}
. " Buffer Pool Instance(s)";
if (
int( $myvar{'innodb_buffer_pool_size'} ) % (
int( $myvar{'innodb_buffer_pool_chunk_size'} ) *
int( $myvar{'innodb_buffer_pool_instances'} )
) eq 0
)
{
goodprint
"Innodb_buffer_pool_size aligned with Innodb_buffer_pool_chunk_size & Innodb_buffer_pool_instances";
}
else {
badprint
"Innodb_buffer_pool_size aligned with Innodb_buffer_pool_chunk_size & Innodb_buffer_pool_instances";
#push( @adjvars, "Adjust innodb_buffer_pool_instances, innodb_buffer_pool_chunk_size with innodb_buffer_pool_size" );
push( @adjvars,
"innodb_buffer_pool_size must always be equal to or a multiple of innodb_buffer_pool_chunk_size * innodb_buffer_pool_instances"
);
}
}
# InnoDB Read efficiency
if ( defined $mycalc{'pct_read_efficiency'}
&& $mycalc{'pct_read_efficiency'} < 90 )
{
badprint "InnoDB Read buffer efficiency: "
. $mycalc{'pct_read_efficiency'} . "% ("
. ( $mystat{'Innodb_buffer_pool_read_requests'} -
$mystat{'Innodb_buffer_pool_reads'} )
. " hits/ "
. $mystat{'Innodb_buffer_pool_read_requests'}
. " total)";
}
else {
goodprint "InnoDB Read buffer efficiency: "
. $mycalc{'pct_read_efficiency'} . "% ("
. ( $mystat{'Innodb_buffer_pool_read_requests'} -
$mystat{'Innodb_buffer_pool_reads'} )
. " hits/ "
. $mystat{'Innodb_buffer_pool_read_requests'}
. " total)";
}
# InnoDB Write efficiency
if ( defined $mycalc{'pct_write_efficiency'}
&& $mycalc{'pct_write_efficiency'} < 90 )
{
badprint "InnoDB Write Log efficiency: "
. abs( $mycalc{'pct_write_efficiency'} ) . "% ("
. abs( $mystat{'Innodb_log_write_requests'} -
$mystat{'Innodb_log_writes'} )
. " hits/ "
. $mystat{'Innodb_log_write_requests'}
. " total)";
}
else {
goodprint "InnoDB Write log efficiency: "
. $mycalc{'pct_write_efficiency'} . "% ("
. ( $mystat{'Innodb_log_write_requests'} -
$mystat{'Innodb_log_writes'} )
. " hits/ "
. $mystat{'Innodb_log_write_requests'}
. " total)";
}
# InnoDB Log Waits
if ( defined $mystat{'Innodb_log_waits'}
&& $mystat{'Innodb_log_waits'} > 0 )
{
badprint "InnoDB log waits: "
. percentage( $mystat{'Innodb_log_waits'},
$mystat{'Innodb_log_writes'} )
. "% ("
. $mystat{'Innodb_log_waits'}
. " waits / "
. $mystat{'Innodb_log_writes'}
. " writes)";
push( @adjvars,
"innodb_log_buffer_size (>= "
. hr_bytes_rnd( $myvar{'innodb_log_buffer_size'} )
. ")" );
}
else {
goodprint "InnoDB log waits: "
. percentage( $mystat{'Innodb_log_waits'},
$mystat{'Innodb_log_writes'} )
. "% ("
. $mystat{'Innodb_log_waits'}
. " waits / "
. $mystat{'Innodb_log_writes'}
. " writes)";
}
$result{'Calculations'} = {%mycalc};
}
sub check_metadata_perf {
subheaderprint "Analysis Performance Metrics";
infoprint "innodb_stats_on_metadata: ".$myvar{'innodb_stats_on_metadata'};
if ($myvar{'innodb_stats_on_metadata'} eq 'ON') {
badprint "Stat are updated during querying INFORMATION_SCHEMA.";
push @adjvars, "SET innodb_stats_on_metadata = OFF";
#Disabling innodb_stats_on_metadata
select_one("SET GLOBAL innodb_stats_on_metadata = OFF;");
return 1;
}
goodprint "No stat updates during querying INFORMATION_SCHEMA.";
return 0
}
# Recommendations for Database metrics
sub mysql_databases {
return if ( $opt{dbstat} == 0 );
subheaderprint "Database Metrics";
unless ( mysql_version_ge( 5, 5 ) ) {
infoprint
"Skip Database metrics from information schema missing in this version";
return;
}
my @dblist = select_array(
"SELECT DISTINCT TABLE_SCHEMA FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ( 'mysql', 'performance_schema', 'information_schema', 'sys' );"
);
infoprint "There is " . scalar(@dblist) . " Database(s).";
my @totaldbinfo = split /\s/,
select_one(
"SELECT SUM(TABLE_ROWS), SUM(DATA_LENGTH), SUM(INDEX_LENGTH) , SUM(DATA_LENGTH+INDEX_LENGTH), COUNT(TABLE_NAME),COUNT(DISTINCT(TABLE_COLLATION)),COUNT(DISTINCT(ENGINE)) FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ( 'mysql', 'performance_schema', 'information_schema', 'sys' );"
);
infoprint "All User Databases:";
infoprint " +-- TABLE : "
. ( $totaldbinfo[4] eq 'NULL' ? 0 : $totaldbinfo[4] ) . "";
infoprint " +-- ROWS : "
. ( $totaldbinfo[0] eq 'NULL' ? 0 : $totaldbinfo[0] ) . "";
infoprint " +-- DATA : "
. hr_bytes( $totaldbinfo[1] ) . "("
. percentage( $totaldbinfo[1], $totaldbinfo[3] ) . "%)";
infoprint " +-- INDEX : "
. hr_bytes( $totaldbinfo[2] ) . "("
. percentage( $totaldbinfo[2], $totaldbinfo[3] ) . "%)";
infoprint " +-- SIZE : " . hr_bytes( $totaldbinfo[3] ) . "";
infoprint " +-- COLLA : "
. ( $totaldbinfo[5] eq 'NULL' ? 0 : $totaldbinfo[5] ) . " ("
. (
join ", ",
select_array(
"SELECT DISTINCT(TABLE_COLLATION) FROM information_schema.TABLES;")
) . ")";
infoprint " +-- ENGIN : "
. ( $totaldbinfo[6] eq 'NULL' ? 0 : $totaldbinfo[6] ) . " ("
. (
join ", ",
select_array("SELECT DISTINCT(ENGINE) FROM information_schema.TABLES;")
) . ")";
$result{'Databases'}{'All databases'}{'Rows'} =
( $totaldbinfo[0] eq 'NULL' ? 0 : $totaldbinfo[0] );
$result{'Databases'}{'All databases'}{'Data Size'} = $totaldbinfo[1];
$result{'Databases'}{'All databases'}{'Data Pct'} =
percentage( $totaldbinfo[1], $totaldbinfo[3] ) . "%";
$result{'Databases'}{'All databases'}{'Index Size'} = $totaldbinfo[2];
$result{'Databases'}{'All databases'}{'Index Pct'} =
percentage( $totaldbinfo[2], $totaldbinfo[3] ) . "%";
$result{'Databases'}{'All databases'}{'Total Size'} = $totaldbinfo[3];
print "\n" unless ( $opt{'silent'} or $opt{'json'} );
foreach (@dblist) {
my @dbinfo = split /\s/,
select_one(
"SELECT TABLE_SCHEMA, SUM(TABLE_ROWS), SUM(DATA_LENGTH), SUM(INDEX_LENGTH) , SUM(DATA_LENGTH+INDEX_LENGTH), COUNT(DISTINCT ENGINE),COUNT(TABLE_NAME),COUNT(DISTINCT(TABLE_COLLATION)),COUNT(DISTINCT(ENGINE)) FROM information_schema.TABLES WHERE TABLE_SCHEMA='$_' GROUP BY TABLE_SCHEMA ORDER BY TABLE_SCHEMA"
);
next unless defined $dbinfo[0];
infoprint "Database: " . $dbinfo[0] . "";
infoprint " +-- TABLE: "
. ( !defined( $dbinfo[6] ) or $dbinfo[6] eq 'NULL' ? 0 : $dbinfo[6] )
. "";
infoprint " +-- COLL : "
. ( $dbinfo[7] eq 'NULL' ? 0 : $dbinfo[7] ) . " ("
. (
join ", ",
select_array(
"SELECT DISTINCT(TABLE_COLLATION) FROM information_schema.TABLES WHERE TABLE_SCHEMA='$_';"
)
) . ")";
infoprint " +-- ROWS : "
. ( !defined( $dbinfo[1] ) or $dbinfo[1] eq 'NULL' ? 0 : $dbinfo[1] )
. "";
infoprint " +-- DATA : "
. hr_bytes( $dbinfo[2] ) . "("
. percentage( $dbinfo[2], $dbinfo[4] ) . "%)";
infoprint " +-- INDEX: "
. hr_bytes( $dbinfo[3] ) . "("
. percentage( $dbinfo[3], $dbinfo[4] ) . "%)";
infoprint " +-- TOTAL: " . hr_bytes( $dbinfo[4] ) . "";
infoprint " +-- ENGIN : "
. ( $dbinfo[8] eq 'NULL' ? 0 : $dbinfo[8] ) . " ("
. (
join ", ",
select_array(
"SELECT DISTINCT(ENGINE) FROM information_schema.TABLES WHERE TABLE_SCHEMA='$_'"
)
) . ")";
badprint "Index size is larger than data size for $dbinfo[0] \n"
if ( $dbinfo[2] ne 'NULL' )
and ( $dbinfo[3] ne 'NULL' )
and ( $dbinfo[2] < $dbinfo[3] );
badprint "There are " . $dbinfo[5] . " storage engines. Be careful. \n"
if $dbinfo[5] > 1;
$result{'Databases'}{ $dbinfo[0] }{'Rows'} = $dbinfo[1];
$result{'Databases'}{ $dbinfo[0] }{'Tables'} = $dbinfo[6];
$result{'Databases'}{ $dbinfo[0] }{'Collations'} = $dbinfo[7];
$result{'Databases'}{ $dbinfo[0] }{'Data Size'} = $dbinfo[2];
$result{'Databases'}{ $dbinfo[0] }{'Data Pct'} =
percentage( $dbinfo[2], $dbinfo[4] ) . "%";
$result{'Databases'}{ $dbinfo[0] }{'Index Size'} = $dbinfo[3];
$result{'Databases'}{ $dbinfo[0] }{'Index Pct'} =
percentage( $dbinfo[3], $dbinfo[4] ) . "%";
$result{'Databases'}{ $dbinfo[0] }{'Total Size'} = $dbinfo[4];
if ( $dbinfo[7] > 1 ) {
badprint $dbinfo[7]
. " different collations for database "
. $dbinfo[0];
push( @generalrec,
"Check all table collations are identical for all tables in "
. $dbinfo[0]
. " database." );
}
else {
goodprint $dbinfo[7]
. " collation for "
. $dbinfo[0]
. " database.";
}
if ( $dbinfo[8] > 1 ) {
badprint $dbinfo[8]
. " different engines for database "
. $dbinfo[0];
push( @generalrec,
"Check all table engines are identical for all tables in "
. $dbinfo[0]
. " database." );
}
else {
goodprint $dbinfo[8] . " engine for " . $dbinfo[0] . " database.";
}
my @distinct_column_charset = select_array(
"select DISTINCT(CHARACTER_SET_NAME) from information_schema.COLUMNS where CHARACTER_SET_NAME IS NOT NULL AND TABLE_SCHEMA ='$_'"
);
infoprint "Charsets for $dbinfo[0] database table column: "
. join( ', ', @distinct_column_charset );
if ( scalar(@distinct_column_charset) > 1 ) {
badprint $dbinfo[0]
. " table column(s) has several charsets defined for all text like column(s).";
push( @generalrec,
"Limit charset for column to one charset if possible for "
. $dbinfo[0]
. " database." );
}
else {
goodprint $dbinfo[0]
. " table column(s) has same charset defined for all text like column(s).";
}
my @distinct_column_collation = select_array(
"select DISTINCT(COLLATION_NAME) from information_schema.COLUMNS where COLLATION_NAME IS NOT NULL AND TABLE_SCHEMA ='$_'"
);
infoprint "Collations for $dbinfo[0] database table column: "
. join( ', ', @distinct_column_collation );
if ( scalar(@distinct_column_collation) > 1 ) {
badprint $dbinfo[0]
. " table column(s) has several collations defined for all text like column(s).";
push( @generalrec,
"Limit collations for column to one collation if possible for "
. $dbinfo[0]
. " database." );
}
else {
goodprint $dbinfo[0]
. " table column(s) has same collation defined for all text like column(s).";
}
}
}
# Recommendations for database columns
sub mysql_tables {
return if ( $opt{tbstat} == 0 );
subheaderprint "Table Column Metrics";
unless ( mysql_version_ge( 5, 5 ) ) {
infoprint
"Skip Database metrics from information schema missing in this version";
return;
}
my @dblist = select_array(
"SELECT DISTINCT TABLE_SCHEMA FROM information_schema.TABLES WHERE TABLE_SCHEMA NOT IN ( 'mysql', 'performance_schema', 'information_schema', 'sys' );"
);
foreach (@dblist) {
my $dbname = $_;
next unless defined $_;
infoprint "Database: " . $_ . "";
my @dbtable = select_array(
"SELECT TABLE_NAME FROM information_schema.TABLES WHERE TABLE_SCHEMA='$dbname' AND TABLE_TYPE='BASE TABLE' ORDER BY TABLE_NAME"
);
foreach (@dbtable) {
my $tbname = $_;
infoprint " +-- TABLE: $tbname";
my @tbcol = select_array(
"SELECT COLUMN_NAME FROM information_schema.COLUMNS WHERE TABLE_SCHEMA='$dbname' AND TABLE_NAME='$tbname'"
);
foreach (@tbcol) {
my $ctype = select_one(
"SELECT COLUMN_TYPE FROM information_schema.COLUMNS WHERE TABLE_SCHEMA='$dbname' AND TABLE_NAME='$tbname' AND COLUMN_NAME='$_' "
);
my $isnull = select_one(
"SELECT IS_NULLABLE FROM information_schema.COLUMNS WHERE TABLE_SCHEMA='$dbname' AND TABLE_NAME='$tbname' AND COLUMN_NAME='$_' "
);
infoprint " +-- Column $tbname.$_:";
my $current_type =
uc($ctype) . ( $isnull eq 'NO' ? " NOT NULL" : "" );
my $optimal_type = select_str_g( "Optimal_fieldtype",
"SELECT $_ FROM $dbname.$tbname PROCEDURE ANALYSE(100000)"
);
if ( not defined($optimal_type) or $optimal_type eq '' ) {
infoprint " Current Fieldtype: $current_type";
infoprint " Optimal Fieldtype: Not available";
}
elsif ( $current_type ne $optimal_type ) {
infoprint " Current Fieldtype: $current_type";
infoprint " Optimal Fieldtype: $optimal_type";
badprint
"Consider changing type for column $_ in table $dbname.$tbname";
push( @generalrec,
"ALTER TABLE $dbname.$tbname MODIFY $_ $optimal_type;"
);
}
else {
goodprint "$dbname.$tbname ($_) type: $current_type";
}
}
}
}
}
# Recommendations for Indexes metrics
sub mysql_indexes {
return if ( $opt{idxstat} == 0 );
subheaderprint "Indexes Metrics";
unless ( mysql_version_ge( 5, 5 ) ) {
infoprint
"Skip Index metrics from information schema missing in this version";
return;
}
# unless ( mysql_version_ge( 5, 6 ) ) {
# infoprint
#"Skip Index metrics from information schema due to erroneous information provided in this version";
# return;
# }
my $selIdxReq = <<'ENDSQL';
SELECT
CONCAT(CONCAT(t.TABLE_SCHEMA, '.'),t.TABLE_NAME) AS 'table'
, CONCAT(CONCAT(CONCAT(s.INDEX_NAME, '('),s.COLUMN_NAME), ')') AS 'index'
, s.SEQ_IN_INDEX AS 'seq'
, s2.max_columns AS 'maxcol'
, s.CARDINALITY AS 'card'
, t.TABLE_ROWS AS 'est_rows'
, INDEX_TYPE as type
, ROUND(((s.CARDINALITY / IFNULL(t.TABLE_ROWS, 0.01)) * 100), 2) AS 'sel'
FROM INFORMATION_SCHEMA.STATISTICS s
INNER JOIN INFORMATION_SCHEMA.TABLES t
ON s.TABLE_SCHEMA = t.TABLE_SCHEMA
AND s.TABLE_NAME = t.TABLE_NAME
INNER JOIN (
SELECT
TABLE_SCHEMA
, TABLE_NAME
, INDEX_NAME
, MAX(SEQ_IN_INDEX) AS max_columns
FROM INFORMATION_SCHEMA.STATISTICS
WHERE TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema')
AND INDEX_TYPE <> 'FULLTEXT'
GROUP BY TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
) AS s2
ON s.TABLE_SCHEMA = s2.TABLE_SCHEMA
AND s.TABLE_NAME = s2.TABLE_NAME
AND s.INDEX_NAME = s2.INDEX_NAME
WHERE t.TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema')
AND t.TABLE_ROWS > 10
AND s.CARDINALITY IS NOT NULL
AND (s.CARDINALITY / IFNULL(t.TABLE_ROWS, 0.01)) < 8.00
ORDER BY sel
LIMIT 10;
ENDSQL
my @idxinfo = select_array($selIdxReq);
infoprint "Worst selectivity indexes:";
foreach (@idxinfo) {
debugprint "$_";
my @info = split /\s/;
infoprint "Index: " . $info[1] . "";
infoprint " +-- COLUMN : " . $info[0] . "";
infoprint " +-- NB SEQS : " . $info[2] . " sequence(s)";
infoprint " +-- NB COLS : " . $info[3] . " column(s)";
infoprint " +-- CARDINALITY : " . $info[4] . " distinct values";
infoprint " +-- NB ROWS : " . $info[5] . " rows";
infoprint " +-- TYPE : " . $info[6];
infoprint " +-- SELECTIVITY : " . $info[7] . "%";
$result{'Indexes'}{ $info[1] }{'Column'} = $info[0];
$result{'Indexes'}{ $info[1] }{'Sequence number'} = $info[2];
$result{'Indexes'}{ $info[1] }{'Number of column'} = $info[3];
$result{'Indexes'}{ $info[1] }{'Cardinality'} = $info[4];
$result{'Indexes'}{ $info[1] }{'Row number'} = $info[5];
$result{'Indexes'}{ $info[1] }{'Index Type'} = $info[6];
$result{'Indexes'}{ $info[1] }{'Selectivity'} = $info[7];
if ( $info[7] < 25 ) {
badprint "$info[1] has a low selectivity";
}
}
return
unless ( defined( $myvar{'performance_schema'} )
and $myvar{'performance_schema'} eq 'ON' );
$selIdxReq = <<'ENDSQL';
SELECT CONCAT(CONCAT(object_schema,'.'),object_name) AS 'table', index_name
FROM performance_schema.table_io_waits_summary_by_index_usage
WHERE index_name IS NOT NULL
AND count_star =0
AND index_name <> 'PRIMARY'
AND object_schema != 'mysql'
ORDER BY count_star, object_schema, object_name;
ENDSQL
@idxinfo = select_array($selIdxReq);
infoprint "Unused indexes:";
push( @generalrec, "Remove unused indexes." ) if ( scalar(@idxinfo) > 0 );
foreach (@idxinfo) {
debugprint "$_";
my @info = split /\s/;
badprint "Index: $info[1] on $info[0] is not used.";
push @{ $result{'Indexes'}{'Unused Indexes'} },
$info[0] . "." . $info[1];
}
}
# Take the two recommendation arrays and display them at the end of the output
sub make_recommendations {
$result{'Recommendations'} = \@generalrec;
$result{'Adjust variables'} = \@adjvars;
subheaderprint "Recommendations";
if ( @generalrec > 0 ) {
prettyprint "General recommendations:";
foreach (@generalrec) { prettyprint " " . $_ . ""; }
}
if ( @adjvars > 0 ) {
prettyprint "Variables to adjust:";
if ( $mycalc{'pct_max_physical_memory'} > 90 ) {
prettyprint
" *** MySQL's maximum memory usage is dangerously high ***\n"
. " *** Add RAM before increasing MySQL buffer variables ***";
}
foreach (@adjvars) { prettyprint " " . $_ . ""; }
}
if ( @generalrec == 0 && @adjvars == 0 ) {
prettyprint "No additional performance recommendations are available.";
}
}
sub close_outputfile {
close($fh) if defined($fh);
}
sub headerprint {
prettyprint
" >> MySQLTuner $tunerversion - Major Hayden <major\@mhtx.net>\n"
. " >> Bug reports, feature requests, and downloads at http://mysqltuner.com/\n"
. " >> Run with '--help' for additional options and output filtering";
}
sub string2file {
my $filename = shift;
my $content = shift;
open my $fh, q(>), $filename
or die
"Unable to open $filename in write mode. Please check permissions for this file or directory";
print $fh $content if defined($content);
close $fh;
debugprint $content if ( $opt{'debug'} );
}
sub file2array {
my $filename = shift;
debugprint "* reading $filename" if ( $opt{'debug'} );
my $fh;
open( $fh, q(<), "$filename" )
or die "Couldn't open $filename for reading: $!\n";
my @lines = <$fh>;
close($fh);
return @lines;
}
sub file2string {
return join( '', file2array(@_) );
}
my $templateModel;
if ( $opt{'template'} ne 0 ) {
$templateModel = file2string( $opt{'template'} );
}
else {
# DEFAULT REPORT TEMPLATE
$templateModel = <<'END_TEMPLATE';
<!DOCTYPE html>
<html>
<head>
<title>MySQLTuner Report</title>
<meta charset="UTF-8">
</head>
<body>
<h1>Result output</h1>
<pre>
{$data}
</pre>
</body>
</html>
END_TEMPLATE
}
sub dump_result {
debugprint Dumper( \%result ) if ( $opt{'debug'} );
debugprint "HTML REPORT: $opt{'reportfile'}";
if ( $opt{'reportfile'} ne 0 ) {
eval { require Text::Template };
if ($@) {
badprint "Text::Template Module is needed.";
die "Text::Template Module is needed.";
}
my $vars = { 'data' => Dumper( \%result ) };
my $template;
{
no warnings 'once';
$template = Text::Template->new(
TYPE => 'STRING',
PREPEND => q{;},
SOURCE => $templateModel
) or die "Couldn't construct template: $Text::Template::ERROR";
}
open my $fh, q(>), $opt{'reportfile'}
or die
"Unable to open $opt{'reportfile'} in write mode. please check permissions for this file or directory";
$template->fill_in( HASH => $vars, OUTPUT => $fh );
close $fh;
}
if ( $opt{'json'} ne 0 ) {
eval { require JSON };
if ($@) {
print "$bad JSON Module is needed.\n";
return 1;
}
my $json = JSON->new->allow_nonref;
print $json->utf8(1)->pretty( ( $opt{'prettyjson'} ? 1 : 0 ) )->encode( \%result );
if ( $opt{'outputfile'} ne 0 ) {
unlink $opt{'outputfile'} if (-e $opt{'outputfile'});
open my $fh, q(>), $opt{'outputfile'}
or die
"Unable to open $opt{'outputfile'} in write mode. please check permissions for this file or directory";
print $fh $json->utf8(1)->pretty( ( $opt{'prettyjson'} ? 1 : 0 ) )->encode( \%result );
close $fh;
}
}
}
sub which {
my $prog_name = shift;
my $path_string = shift;
my @path_array = split /:/, $ENV{'PATH'};
for my $path (@path_array) {
return "$path/$prog_name" if ( -x "$path/$prog_name" );
}
return 0;
}
# ---------------------------------------------------------------------------
# BEGIN 'MAIN'
# ---------------------------------------------------------------------------
headerprint; # Header Print
validate_tuner_version; # Check last version
mysql_setup; # Gotta login first
debugprint "MySQL FINAL Client : $mysqlcmd $mysqllogin";
debugprint "MySQL Admin FINAL Client : $mysqladmincmd $mysqllogin";
#exit(0);
os_setup; # Set up some OS variables
get_all_vars; # Toss variables/status into hashes
get_tuning_info; # Get information about the tuning connexion
validate_mysql_version; # Check current MySQL version
check_architecture; # Suggest 64-bit upgrade
system_recommendations; # avoid to many service on the same host
log_file_recommendations; # check log file content
check_storage_engines; # Show enabled storage engines
check_metadata_perf; # Show parameter impacting performance during analysis
mysql_databases; # Show informations about databases
mysql_tables; # Show informations about table column
mysql_indexes; # Show informations about indexes
security_recommendations; # Display some security recommendations
cve_recommendations; # Display related CVE
calculations; # Calculate everything we need
mysql_stats; # Print the server stats
mysqsl_pfs; # Print Performance schema info
mariadb_threadpool; # Print MariaDB ThreadPool stats
mysql_myisam; # Print MyISAM stats
mysql_innodb; # Print InnoDB stats
mariadb_ariadb; # Print MariaDB AriaDB stats
mariadb_tokudb; # Print MariaDB Tokudb stats
mariadb_xtradb; # Print MariaDB XtraDB stats
#mariadb_rockdb; # Print MariaDB RockDB stats
#mariadb_spider; # Print MariaDB Spider stats
#mariadb_connect; # Print MariaDB Connect stats
mariadb_galera; # Print MariaDB Galera Cluster stats
get_replication_status; # Print replication info
make_recommendations; # Make recommendations based on stats
dump_result; # Dump result if debug is on
close_outputfile; # Close reportfile if needed
# ---------------------------------------------------------------------------
# END 'MAIN'
# ---------------------------------------------------------------------------
1;
__END__
=pod
=encoding UTF-8
=head1 NAME
MySQLTuner 1.7.13 - MySQL High Performance Tuning Script
=head1 IMPORTANT USAGE GUIDELINES
To run the script with the default options, run the script without arguments
Allow MySQL server to run for at least 24-48 hours before trusting suggestions
Some routines may require root level privileges (script will provide warnings)
You must provide the remote server's total memory when connecting to other servers
=head1 CONNECTION AND AUTHENTICATION
--host <hostname> Connect to a remote host to perform tests (default: localhost)
--socket <socket> Use a different socket for a local connection
--port <port> Port to use for connection (default: 3306)
--user <username> Username to use for authentication
--userenv <envvar> Name of env variable which contains username to use for authentication
--pass <password> Password to use for authentication
--passenv <envvar> Name of env variable which contains password to use for authentication
--ssl-ca <path> Path to public key
--mysqladmin <path> Path to a custom mysqladmin executable
--mysqlcmd <path> Path to a custom mysql executable
--defaults-file <path> Path to a custom .my.cnf
=head1 PERFORMANCE AND REPORTING OPTIONS
--skipsize Don't enumerate tables and their types/sizes (default: on)
(Recommended for servers with many tables)
--skippassword Don't perform checks on user passwords(default: off)
--checkversion Check for updates to MySQLTuner (default: don't check)
--updateversion Check for updates to MySQLTuner and update when newer version is available (default: don't check)
--forcemem <size> Amount of RAM installed in megabytes
--forceswap <size> Amount of swap memory configured in megabytes
--passwordfile <path> Path to a password file list(one password by line)
=head1 OUTPUT OPTIONS
--silent Don't output anything on screen
--nogood Remove OK responses
--nobad Remove negative/suggestion responses
--noinfo Remove informational responses
--debug Print debug information
--dbstat Print database information
--tbstat Print table information
--notbstat Don't Print table information
--idxstat Print index information
--sysstat Print system information
--pfstat Print Performance schema
--bannedports Ports banned separated by comma(,)
--maxportallowed Number of ports opened allowed on this hosts
--cvefile <path> CVE File for vulnerability checks
--nocolor Don't print output in color
--json Print result as JSON string
--buffers Print global and per-thread buffer values
--outputfile <path> Path to a output txt file
--reportfile <path> Path to a report txt file
--template <path> Path to a template file
--verbose Prints out all options (default: no verbose)
=head1 PERLDOC
You can find documentation for this module with the perldoc command.
perldoc mysqltuner
=head2 INTERNALS
L<https://github.com/major/MySQLTuner-perl/blob/master/INTERNALS.md>
Internal documentation
=head1 AUTHORS
Major Hayden - major@mhtx.net
=head1 CONTRIBUTORS
=over 4
=item *
Matthew Montgomery
=item *
Paul Kehrer
=item *
Dave Burgess
=item *
Jonathan Hinds
=item *
Mike Jackson
=item *
Nils Breunese
=item *
Shawn Ashlee
=item *
Luuk Vosslamber
=item *
Ville Skytta
=item *
Trent Hornibrook
=item *
Jason Gill
=item *
Mark Imbriaco
=item *
Greg Eden
=item *
Aubin Galinotti
=item *
Giovanni Bechis
=item *
Bill Bradford
=item *
Ryan Novosielski
=item *
Michael Scheidell
=item *
Blair Christensen
=item *
Hans du Plooy
=item *
Victor Trac
=item *
Everett Barnes
=item *
Tom Krouper
=item *
Gary Barrueto
=item *
Simon Greenaway
=item *
Adam Stein
=item *
Isart Montane
=item *
Baptiste M.
=item *
Cole Turner
=item *
Major Hayden
=item *
Joe Ashcraft
=item *
Jean-Marie Renouard
=item *
Stephan GroBberndt
=item *
Christian Loos
=back
=head1 SUPPORT
Bug reports, feature requests, and downloads at http://mysqltuner.com/
Bug tracker can be found at https://github.com/major/MySQLTuner-perl/issues
Maintained by Major Hayden (major\@mhtx.net) - Licensed under GPL
=head1 SOURCE CODE
L<https://github.com/major/MySQLTuner-perl>
git clone https://github.com/major/MySQLTuner-perl.git
=head1 COPYRIGHT AND LICENSE
Copyright (C) 2006-2017 Major Hayden - major@mhtx.net
For the latest updates, please visit http://mysqltuner.com/
Git repository available at http://github.com/major/MySQLTuner-perl
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
=cut
# Local variables:
# indent-tabs-mode: t
# cperl-indent-level: 8
# perl-indent-level: 8
# End:
| bingen/rpi_docker_home_server | images/rpi-mariadb/mysqltuner.pl | Perl | apache-2.0 | 219,803 |
#!/usr/bin/perl
srand();
my $testcase = shift;
unless($testcase =~ /^\d+$/){
print "USGE: ./gentestcase.pl <TESTCASE#>\n";
exit;
}
my @grammars = ();
my %rules1 = qw(
S aBa|bBb|cBc|g
B bCb|cCc|dDd|f
C a|b|c
D d|a|e|E
E hacker
);
push(@grammars,%rules1);
my %rules2 = qw(
S aBa|bBb|cBc|dDd
B b
C aDDD|aDD|aDd|aDd|aDD
D d|a|e|E
E f
);
push(@grammars,%rules2);
if($testcase > 2){$testcase = 2;}
my %rules = $grammars[$testcase];
my $start = "S";
push(@toresolve,$start);
$complete = 0;
my @unique = ();
while($complete == 0){
$complete = 1;
my @newresolve = ();
foreach my $toresolve (@toresolve){
#print "Checking [$toresolve] for all lowercase...\n";
if($toresolve =~ /^[a-z]+$/){
#print "ADDING $toresolve to final strings list!\n";
$unique{$toresolve} = 1;
next;
}
#print "Resolving $toresolve...\n";
my @subs = split(//,$toresolve);
foreach my $sub (@subs){
#print "\tChecking $sub...\n";
foreach my $lrule (keys %rules){
if($lrule eq $sub){
#print "\t\t$sub matches rule: $lrule -> $rules{$lrule}\n";
$complete = 0;
my @rrules = split(/\|/,$rules{$lrule});
foreach my $rrule (@rrules){
#print "\t\t\tApplying rule $rrule...\n";
my $tmp = $toresolve;
$tmp =~ s/$sub/$rrule/;
#print "\t\t\tPushing $tmp to new resolutions\n";
push(@newresolve,$tmp);
}
}
}
}
}
@toresolve = @newresolve;
}
my @yes = ();
open(OUTFILE,">all_strings-$testcase.txt");
foreach my $string (sort keys %unique){
print OUTFILE "$string\n";
print "$string\n";
push(@yes,$string);
}
close OUTFILE;
open(FILE,">in$testcase");
my %rules = $grammars[$testcase];
foreach my $key (keys %rules){
print "$key -> $rules{$key}\n";
}
my @terminals = ('a','b','c','d','e','f','g','hacker');
for(my $i = 0; $i < int(rand(10))+4; $i++){
my $choice = int(rand(100)) % 2;
if($choice == 1){
my $string = $yes[int(rand(@yes))];
print FILE "$string\n";
} else {
my $string = "";
for(my $j = 0; $j < int(rand(3))+3; $j++){
$string .= $terminals[int(rand(@terminals))];
}
print FILE "$string\n";
}
}
close FILE; | krux702/crashandcompile | prob/r2_grammar-hard/gentestcase.pl | Perl | bsd-2-clause | 2,093 |
# vim:ts=4
#
# Copyright (c) 2002-2012 Hypertriton, Inc. <http://hypertriton.com/>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
sub Test
{
# Look for a C++ compiler.
# XXX duplicated code between cc/cxx
print << 'EOF';
if [ "$CXX" = "" ]; then
for i in `echo $PATH |sed 's/:/ /g'`; do
if [ -x "${i}/cxx" ]; then
if [ -f "${i}/cxx" ]; then
CXX="${i}/cxx"
break
fi
elif [ -x "${i}/gcc" ]; then
if [ -f "${i}/gcc" ]; then
CXX="${i}/gcc"
break
fi
fi
done
if [ "$CXX" = "" ]; then
echo "*"
echo "* Unable to find a standard C++ compiler in PATH. You may need"
echo "* to set the CXX environment variable."
echo "*"
echo "Unable to find a C compiler in PATH." >> config.log
HAVE_CXX="no"
echo "no"
else
HAVE_CXX="yes"
echo "yes, ${CXX}"
echo "yes, ${CXX}" >> config.log
fi
else
echo "using ${CXX}"
fi
if [ "${HAVE_CXX}" = "yes" ]; then
$ECHO_N "checking whether the C++ compiler works..."
$ECHO_N "checking whether the C++ compiler works..." >> config.log
cat << 'EOT' > conftest.cc
#include <iostream>
int main(void) { std::cout << "Hello world!" << std::endl; return 0; }
EOT
$CXX -o conftest conftest.cc -lstdc++ 2>>config.log
if [ $? != 0 ]; then
echo "no"
echo "no (test failed to compile)" >> config.log
HAVE_CXX="no"
else
echo "yes"
echo "yes" >> config.log
HAVE_CXX="yes"
fi
if [ "${HAVE_CXX}" = "yes" ]; then
if [ "${EXECSUFFIX}" = "" ]; then
EXECSUFFIX=""
for OUTFILE in conftest.exe conftest conftest.*; do
if [ -f $OUTFILE ]; then
case $OUTFILE in
*.c | *.cc | *.m | *.o | *.obj | *.bb | *.bbg | *.d | *.pdb | *.tds | *.xcoff | *.dSYM | *.xSYM )
;;
*.* )
EXECSUFFIX=`expr "$OUTFILE" : '[^.]*\(\..*\)'`
break ;;
* )
break ;;
esac;
fi
done
if [ "$EXECSUFFIX" != "" ]; then
echo "Detected executable suffix: $EXECSUFFIX" >> config.log
fi
EOF
MkSaveMK('EXECSUFFIX');
MkSaveDefine('EXECSUFFIX');
print << 'EOF';
fi
fi
rm -f conftest.cc conftest$EXECSUFFIX
TEST_CXXFLAGS=""
fi
EOF
MkIfTrue('${HAVE_CXX}');
MkPrintN('checking for c++ compiler warning options...');
MkCompileCXX('HAVE_CXX_WARNINGS', '-Wall -Werror', '-lstdc++', << 'EOF');
int main(void) { return (0); }
EOF
MkIfTrue('${HAVE_CXX_WARNINGS}');
MkDefine('TEST_CXXFLAGS', '-Wall -Werror');
MkEndif;
# Check for long double type.
MkPrintN('checking for long double...');
TryCompile('HAVE_LONG_DOUBLE', << 'EOF');
int
main(void)
{
long double ld = 0.1;
ld = 0;
return (0);
}
EOF
# Check for long long type.
MkPrintN('checking for long long...');
TryCompile('HAVE_LONG_LONG', << 'EOF');
int
main(void)
{
long long ll = 0.0;
unsigned long long ull = 0.0;
ll = 1.0;
ull = 1.0;
return (0);
}
EOF
MkPrintN('checking for cygwin environment...');
TryCompileFlagsCXX('HAVE_CYGWIN', '-mcygwin', << 'EOF');
#include <sys/types.h>
#include <sys/stat.h>
#include <windows.h>
int
main(void) {
struct stat sb;
DWORD rv;
rv = GetFileAttributes("foo");
stat("foo", &sb);
return (0);
}
EOF
MkPrintN('checking for libtool --tag=CXX retardation...');
print 'cat << EOT > conftest.cc', "\n";
print << 'EOF';
#include <iostream>
int main(void) { std::cout << "Hello world!" << std::endl; return 0; }
EOT
EOF
print << "EOF";
\$LIBTOOL --quiet --mode=compile --tag=CXX \$CXX \$CXXFLAGS \$TEST_CXXFLAGS -o \$testdir/conftest.o conftest.cc 2>>config.log
EOF
MkIf('"$?" = "0"');
MkPrint('yes');
MkDefine('LIBTOOLOPTS_CXX', '--tag=CXX');
MkElse;
MkPrint('no');
MkEndif;
MkSaveMK('LIBTOOLOPTS_CXX');
print 'rm -f conftest.cc $testdir/conftest$EXECSUFFIX', "\n";
# Preserve ${CXX} and ${CXXFLAGS}
MkSaveMK('CXX', 'CXXFLAGS');
MkEndif; # HAVE_CXX
}
sub Emul
{
my ($os, $osrel, $machine) = @_;
MkDefine('HAVE_IEEE754', 'yes');
MkSaveDefine('HAVE_IEEE754');
MkSaveUndef('HAVE_LONG_DOUBLE');
MkSaveUndef('HAVE_LONG_LONG');
MkSaveUndef('HAVE_CYGWIN');
return (1);
}
BEGIN
{
$TESTS{'cxx'} = \&Test;
$EMUL{'cxx'} = \&Emul;
$DESCR{'cxx'} = 'a C++ compiler';
}
;1
| stqism/ToxBuild | ToxBuild/cxx.pm | Perl | bsd-2-clause | 5,315 |
#! /usr/bin/perl -w
use strict;
use warnings;
use Carp;
use Switch;
use Archive::Tar;
use Archive::Tar::Constant;
use Archive::Zip qw( :ERROR_CODES :CONSTANTS );
use Compress::Zlib;
use POSIX;
use File::Copy;
use File::stat;
sub adddir($$) {
my ($dir, $ref) = @_;
$dir =~ s/^\.\///;
my $base = $dir;
$base =~ s/^.+\/([^\/]+)$/$1/;
if (-f "$dir/$base.pro") {
push @{$ref}, "$dir/$base.pro";
}
opendir(DIR, $dir);
foreach my $e (grep { ! /^\./ } readdir(DIR)) {
if (-d "$dir/$e") {
adddir("$dir/$e", $ref);
}
}
}
my %files;
my $ver;
my %filevars = ( 'sources' => 1, 'headers' => 1, 'rc_file' => 1, 'dist' => 1, 'forms' => 1, 'resources' => 1, 'precompiled_header' => 1, 'translations' => 1);
system("rm mumble-*");
chdir("scripts");
system("bash mkini.sh");
chdir("..");
my @pro = ("main.pro");
#, "src/mumble.pri");
#adddir(".", \@pro);
my @resources;
while (my $pro = shift @pro) {
open(F, $pro) or croak "Failed to open $pro";
print "Processing $pro\n";
$files{$pro}=1;
my $basedir=$pro;
$basedir =~ s/[^\/]+\Z//g;
my @vpath = ($basedir);
while(<F>) {
chomp();
if (/^include\((.+)\)/) {
my $f = $basedir . $1;
while ($f =~ /\.\./) {
$f =~ s/(\/|\A)[^\/]+\/\.\.\//$1/g;
}
push @pro, $f;
} elsif (/^\s*(\w+)\s*?[\+\-\*]{0,1}=\s*(.+)$/) {
my ($var,$value)=(lc $1,$2);
switch ($var) {
case "version" {
if ($value !~ /\$\$/) {
croak "Versions don't match" if (defined($ver) && ($ver ne $value));
$ver=$value;
}
}
case "vpath" {
if ($value eq '../$$SOURCEDIR/libcelt') {
my $vdir = $basedir;
$vdir =~ s/-build/-src/;
push @vpath, $vdir.'libcelt/';
} else {
push @vpath,map { "$basedir$_/"} map { s/\$\$PWD/./; $_;} split(/\s/, $value);
}
}
case "subdirs" {
push @pro,map { my ($b,$p) = ($_,$_); $p =~ s/^.+\///g; "$basedir$b/$p.pro" } split(/\s/, $value);
}
case %filevars {
foreach my $f (split(/\s+/,$value)) {
next if ($f =~ /^Murmur\.(h|cpp)$/);
next if ($f =~ /^Mumble\.pb\.(h|cc)$/);
my $ok = 0;
foreach my $d (@vpath) {
if (-f "$d$f") {
$f = $d.$f;
$ok = 1;
last;
}
}
if (! $ok) {
croak "Failed to find $f in ".join(" ",@vpath);
} else {
while ($f =~ /\.\./) {
$f =~ s/(\/|\A)[^\/]+\/\.\.\//$1/g;
}
$files{$f}=1;
if ($var eq "resources") {
push @resources,$f;
}
}
}
}
}
}
}
close(F);
}
foreach my $resfile (@resources) {
open(F, $resfile);
my $basedir=$resfile;
$basedir =~ s/[^\/]+\Z//g;
while(<F>) {
chomp();
if (/\>(.+)<\/file\>/) {
my $f = $basedir.$1;
next if $f =~ /\.qm$/;
while ($f =~ /\.\./) {
$f =~ s/(\/|\A)[^\/]+\/\.\.\//$1/g;
}
$files{$f}=1;
}
}
close(F);
}
my @fulldirs = ('speex','speex/include/speex','speex/libspeex','man');
foreach my $cver ('0.7.0', '0.11.0') {
push @fulldirs, "celt-$cver-src";
push @fulldirs, "celt-$cver-src/libcelt";
}
foreach my $dir (@fulldirs) {
opendir(D, $dir) or croak "Could not open $dir";
foreach my $f (grep(! /^\./,readdir(D))) {
next if ($f =~ /\~$/);
my $ff=$dir . '/' . $f;
if (-f $ff) {
$files{$ff}=1;
}
}
closedir(D);
}
delete($files{'LICENSE'});
if ($#ARGV < 0) {
open(F, "git describe origin/master|");
while (<F>) {
chomp();
s/^(.+)-([0-9]+)-(g.+)$/$1|$2|$3/;
s/-/~/;
s/\|/-/g;
$ver = $_;
}
close(F);
print "REVISION $ver\n";
} elsif ($#ARGV == 0) {
$ver = $ARGV[0];
}
my $tar = new Archive::Tar();
my $zip = new Archive::Zip();
my $blob;
my $dir="mumble-$ver/";
my $zipdir = $zip->addDirectory($dir);
foreach my $file ('LICENSE', sort keys %files) {
print "Adding $file\n";
open(F, $file) or croak "Missing $file";
sysread(F, $blob, stat($file)->size);
if ($file eq "src/Version.h") {
$blob =~ s/(\#ifndef MUMBLE_VERSION)/$1\n\#define MUMBLE_VERSION $ver\n\#endif\n$1/;
}
if (-l $file) {
$tar->add_data($dir . $file, "", { linkname => readlink($file), type => Archive::Tar::Constant::SYMLINK });
} else {
$tar->add_data($dir . $file, $blob);
}
my $zipmember=$zip->addString($blob, $dir . $file);
$zipmember->desiredCompressionMethod( COMPRESSION_DEFLATED );
$zipmember->desiredCompressionLevel( 9 );
close(F);
}
my $gz=gzopen("mumble-${ver}.tar.gz", "w");
$gz->gzwrite($tar->write());
$gz->gzclose();
$zip->writeToFileNamed("mumble-${ver}.zip");
copy("mumble-${ver}.tar.gz", "../deb-mumble/tarballs/mumble_${ver}.orig.tar.gz");
system("/usr/bin/gpg", "--armor", "--default-key", "DEBA6F3E", "--sign", "--detach-sign", "--output", "mumble-${ver}.tar.gz.sig", "mumble-${ver}.tar.gz");
system("/usr/bin/scp", "-4", "mumble-${ver}.tar.gz", "mumble-${ver}.tar.gz.sig", "slicer\@mumble.hive.no:/var/www/snapshot/");
| chancegarcia/mumble | scripts/release.pl | Perl | bsd-3-clause | 5,242 |
use utf8;
package Netdisco::DB::Result::DevicePower;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
use strict;
use warnings;
use base 'DBIx::Class::Core';
__PACKAGE__->table("device_power");
__PACKAGE__->add_columns(
"ip",
{ data_type => "inet", is_nullable => 0 },
"module",
{ data_type => "integer", is_nullable => 0 },
"power",
{ data_type => "integer", is_nullable => 1 },
"status",
{ data_type => "text", is_nullable => 1 },
);
__PACKAGE__->set_primary_key("ip", "module");
# Created by DBIx::Class::Schema::Loader v0.07015 @ 2012-01-07 14:20:02
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:awZRI/IH2VewzGlxISsr7w
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;
| jeneric/netdisco-frontend-sandpit | Netdisco/lib/Netdisco/DB/Result/DevicePower.pm | Perl | bsd-3-clause | 785 |
package Millc::Lex;
use Exporter 'import';
use List::Util 'min';
use Modern::Perl;
our @EXPORT_OK = qw(lex);
my @patterns = (
[ qr/::/, 'colon_colon' ],
[ qr/==/, 'eq_eq' ],
[ qr/:/, 'colon' ],
[ qr/;/, 'semicolon' ],
[ qr/\./, 'period' ],
[ qr/\(/, 'left_parenthesis' ],
[ qr/\)/, 'right_parenthesis' ],
[ qr/{/, 'left_brace' ],
[ qr/}/, 'right_brace' ],
[ qr/~/, 'tilde' ],
[ qr/\+/, 'plus' ],
[ qr/-/, 'minus' ],
[ qr/".*?"/, 'string', sub { substr($_[0], 1, length($_[0]) - 2) } ],
[ qr/CHECK/, 'CHECK' ],
[ qr/check/, 'check' ],
[ qr/false/, 'boolean', sub { 0 } ],
[ qr/else/, 'else' ],
[ qr/MAIN/, 'MAIN' ],
[ qr/proc/, 'proc' ],
[ qr/true/, 'boolean', sub { 1 } ],
[ qr/if/, 'if' ],
[ qr/use/, 'use' ],
[ qr/_/, 'underscore' ],
[ qr/[a-zA-Z_][a-zA-Z0-9_]*/, 'identifier', sub { shift } ],
[ qr/\z/, 'eof' ],
);
sub lex {
my $code = shift;
my @tokens;
token: while ($code ne '') {
my $space = qr/=[a-z].*?\n=cut|[ \n]|#(\((?:(?-1)|.)*?\))|#.*?\n/s;
$code =~ s/^$space+//s;
for (@patterns) {
my ($pattern, $type, $value) = @$_;
if ($code =~ /^$pattern/) {
push @tokens, {
type => $type,
$value ? (value => $value->($&)) : (),
};
$code = substr($code, length($&));
next token;
}
}
die 'invalid token: ' . substr($code, 0, min(15, index($code, "\n")));
}
@tokens;
}
1;
| tomalakgeretkal/mill | millc/lib/Millc/Lex.pm | Perl | bsd-3-clause | 1,581 |
% This file is part of AceRules.
% Copyright 2008-2012, Tobias Kuhn, http://www.tkuhn.ch
%
% AceRules is free software: you can redistribute it and/or modify it under the terms of the GNU
% Lesser General Public License as published by the Free Software Foundation, either version 3 of
% the License, or (at your option) any later version.
%
% AceRules is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
% the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
% General Public License for more details.
%
% You should have received a copy of the GNU Lesser General Public License along with AceRules. If
% not, see http://www.gnu.org/licenses/.
:- module(acerules_server, [
acerules_server/1 % +Port
]).
:- use_module(library(streampool)).
:- use_module(soap_utils).
:- use_module(get_results).
/** <module> AceRules server
This module contains the AceRules server. It is supposed to run continuously and it can
communicate through a socket connection with other programs. A small script is started
every time a web request is performed. This makes the web response very fast.
---+++ Technical remarks:
This module is basically a copy of the APE server module.
This AceRules server runs an own instance of the APE parser. Thus, it does not use the
APE web service.
Generally, it would be better if AceRules uses the APE web service (or communicates
directly with the APE server).
@author Kaarel Kaljurand
@author Tobias Kuhn
@version 2007-08-14
*/
%% acerules_server(+Port)
%
% Starts the AceRules server.
acerules_server(Port) :-
tcp_socket(Socket),
tcp_bind(Socket, Port),
tcp_listen(Socket, 5),
tcp_open_socket(Socket, In, _Out),
add_stream_to_pool(In, accept(Socket)),
stream_pool_main_loop.
accept(Socket) :-
tcp_accept(Socket, Slave, _Peer),
tcp_open_socket(Slave, In, Out),
add_stream_to_pool(In, client(In, Out)).
client(In, Out) :-
read_lines(In, RequestC),
close(In),
atom_codes(Request, RequestC),
( Request == '' ->
true
;
process_client_request(Out, Request)
),
close(Out),
delete_stream_from_pool(In).
process_client_request(Out, I) :-
get_results(I, O),
write(Out, O),
write(Out, '\n.\n'),
flush_output(Out),
!.
process_client_request(Out, _ClientRequest) :-
create_fault_element('ar:Unknown', 'Failed to process the request.', FaultElement),
create_soap_message(FaultElement, SOAPOutput),
write(Out, SOAPOutput),
write(Out, '\n.\n'),
flush_output(Out).
read_lines(In, Codes) :-
read_line_to_codes(In, Line),
(Line == end_of_file ->
Codes = []
;
(Line == [] ->
Codes = []
;
read_lines(In, CodesRest),
append(Line, [10|CodesRest], Codes)
)
).
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/AceRules/engine/webservice/acerules_server.pl | Perl | mit | 2,761 |
% Simple illustration of using Aleph to do incremental learning
% To run do the following:
% a. Load Aleph
% b. read_all(mem).
% c. induce_incremental.
% After that, just follow the menus on screen.
/** <examples>
?- induce_incremental(Program).
% try with this input
mem(1,[1]).
overgeneral.
show(constraints).
none.
ok.
ok.
none.
mem(1,[2,1]).
because(overgeneral,not(mem(1,[2,3]))).
none.
ok.
ok.
none.
none.
*/
% :- modeh(*,mem(+any,+list)).
% :- modeb(*,mem(+any,+list)).
% :- modeb(1,((+list) = ([-any|-list]))).
:- use_module(library(aleph)).
:- aleph.
:- if(current_predicate(use_rendering/1)).
:- use_rendering(prolog).
:- endif.
:- mode(*,mem(+any,+list)).
:- mode(1,((+list) = ([-any|-list]))).
:- aleph_set(i,3).
:- aleph_set(noise,0).
:- aleph_set(print,1).
:- determination(mem/2,mem/2).
:- determination(mem/2,'='/2).
| TeamSPoon/logicmoo_workspace | packs_web/swish/examples/aleph/interactive_mem.pl | Perl | mit | 856 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::SubSlicedFeature
=head1 SYNOPSIS
my $truncated_gene = Bio::EnsEMBL::Utils::SubSlicedFeature->new(
-start => 300,
-end => 10000,
-feature => $gene);
my $transcripts = $truncated_gene->get_all_Transcripts();
# list of transcripts is limited to those within the coordinates, rather
# than the original feature Slice.
=head1 DESCRIPTION
Alters the behaviour of a normal Feature object to act within a user-specified
sub-slice of of its boundaries. As it stands, this only affects get_all_*
methods, meaning that seq_region_start() and project() will work on original
coordinates.
=cut
package Bio::EnsEMBL::SubSlicedFeature;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Argument qw/rearrange/;
use base qw/Bio::EnsEMBL::Utils::Proxy/;
sub new {
my ($class, @args) = @_;
my ($start,$end,$original_feature) = rearrange([qw/start end feature/], @args);
my $self = $class->SUPER::new($original_feature);
$self->{'start'} = $start;
$self->{'end'} = $end;
return $self;
}
# Required by Proxy to control scope of the autoloaded methods.
# Also intercepts calls to get methods that would access Slice
sub __resolver {
my ($self, $package_name, $method) = @_;
if ($method =~ /^get_all_/) {
# Call original method and filter results to Proxy coordinates
return sub {
my ($local_self, @args) = @_;
my $feature_list = $local_self->__proxy()->$method(@args);
my @short_list;
foreach my $feature (@$feature_list) {
if ($feature->start > $local_self->{'start'}
&& $feature->end < $local_self->{'end'}) {
push @short_list,$feature;
}
}
return \@short_list;
}
} else {
# No intervention required, call original object method
return sub {
my ($local_self, @args) = @_;
return $local_self->__proxy()->$method(@args);
};
}
}
1; | mjg17/ensembl | modules/Bio/EnsEMBL/SubSlicedFeature.pm | Perl | apache-2.0 | 2,878 |
package Google::Ads::AdWords::v201409::IdError::Reason;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201409'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
IdError.Reason from the namespace https://adwords.google.com/api/adwords/cm/v201409.
The reasons for the target error.
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201409/IdError/Reason.pm | Perl | apache-2.0 | 1,101 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 8.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
# The name this swash is to be known by, with the format of the mappings in
# the main body of the table, and what all code points missing from this file
# map to.
$utf8::SwashInfo{'ToBpt'}{'format'} = 's'; # string
$utf8::SwashInfo{'ToBpt'}{'missing'} = 'n';
return <<'END';
28 o
29 c
5B o
5D c
7B o
7D c
F3A o
F3B c
F3C o
F3D c
169B o
169C c
2045 o
2046 c
207D o
207E c
208D o
208E c
2308 o
2309 c
230A o
230B c
2329 o
232A c
2768 o
2769 c
276A o
276B c
276C o
276D c
276E o
276F c
2770 o
2771 c
2772 o
2773 c
2774 o
2775 c
27C5 o
27C6 c
27E6 o
27E7 c
27E8 o
27E9 c
27EA o
27EB c
27EC o
27ED c
27EE o
27EF c
2983 o
2984 c
2985 o
2986 c
2987 o
2988 c
2989 o
298A c
298B o
298C c
298D o
298E c
298F o
2990 c
2991 o
2992 c
2993 o
2994 c
2995 o
2996 c
2997 o
2998 c
29D8 o
29D9 c
29DA o
29DB c
29FC o
29FD c
2E22 o
2E23 c
2E24 o
2E25 c
2E26 o
2E27 c
2E28 o
2E29 c
3008 o
3009 c
300A o
300B c
300C o
300D c
300E o
300F c
3010 o
3011 c
3014 o
3015 c
3016 o
3017 c
3018 o
3019 c
301A o
301B c
FE59 o
FE5A c
FE5B o
FE5C c
FE5D o
FE5E c
FF08 o
FF09 c
FF3B o
FF3D c
FF5B o
FF5D c
FF5F o
FF60 c
FF62 o
FF63 c
END
| operepo/ope | bin/usr/share/perl5/core_perl/unicore/To/Bpt.pl | Perl | mit | 1,692 |
package Moose::Meta::Method::Accessor::Native::Hash::clear;
BEGIN {
$Moose::Meta::Method::Accessor::Native::Hash::clear::AUTHORITY = 'cpan:STEVAN';
}
{
$Moose::Meta::Method::Accessor::Native::Hash::clear::VERSION = '2.0602';
}
use strict;
use warnings;
use Moose::Role;
with 'Moose::Meta::Method::Accessor::Native::Hash::Writer' => {
-excludes => [
qw(
_maximum_arguments
_inline_optimized_set_new_value
_return_value
)
]
};
sub _maximum_arguments { 0 }
sub _adds_members { 0 }
sub _potential_value { '{}' }
sub _inline_optimized_set_new_value {
my $self = shift;
my ($inv, $new, $slot_access) = @_;
return $slot_access . ' = {};';
}
sub _return_value { '' }
no Moose::Role;
1;
| leighpauls/k2cro4 | third_party/perl/perl/vendor/lib/Moose/Meta/Method/Accessor/Native/Hash/clear.pm | Perl | bsd-3-clause | 769 |
# $Id: Reader.pm,v 1.1.2.1 2004/04/20 20:09:48 pajas Exp $
#
# This is free software, you may use it and distribute it under the same terms as
# Perl itself.
#
# Copyright 2001-2003 AxKit.com Ltd., 2002-2006 Christian Glahn, 2006-2009 Petr Pajas
#
#
package XML::LibXML::Reader;
use XML::LibXML;
use Carp;
use strict;
use warnings;
use vars qw ($VERSION);
$VERSION = "2.0014"; # VERSION TEMPLATE: DO NOT CHANGE
use 5.008_000;
BEGIN {
UNIVERSAL::can('XML::LibXML::Reader','_newForFile') or
croak("Cannot use XML::LibXML::Reader module - ".
"your libxml2 is compiled without reader support!");
}
use base qw(Exporter);
use constant {
XML_READER_TYPE_NONE => 0,
XML_READER_TYPE_ELEMENT => 1,
XML_READER_TYPE_ATTRIBUTE => 2,
XML_READER_TYPE_TEXT => 3,
XML_READER_TYPE_CDATA => 4,
XML_READER_TYPE_ENTITY_REFERENCE => 5,
XML_READER_TYPE_ENTITY => 6,
XML_READER_TYPE_PROCESSING_INSTRUCTION => 7,
XML_READER_TYPE_COMMENT => 8,
XML_READER_TYPE_DOCUMENT => 9,
XML_READER_TYPE_DOCUMENT_TYPE => 10,
XML_READER_TYPE_DOCUMENT_FRAGMENT => 11,
XML_READER_TYPE_NOTATION => 12,
XML_READER_TYPE_WHITESPACE => 13,
XML_READER_TYPE_SIGNIFICANT_WHITESPACE => 14,
XML_READER_TYPE_END_ELEMENT => 15,
XML_READER_TYPE_END_ENTITY => 16,
XML_READER_TYPE_XML_DECLARATION => 17,
XML_READER_NONE => -1,
XML_READER_START => 0,
XML_READER_ELEMENT => 1,
XML_READER_END => 2,
XML_READER_EMPTY => 3,
XML_READER_BACKTRACK => 4,
XML_READER_DONE => 5,
XML_READER_ERROR => 6
};
use vars qw( @EXPORT @EXPORT_OK %EXPORT_TAGS );
sub CLONE_SKIP { 1 }
BEGIN {
%EXPORT_TAGS = (
types =>
[qw(
XML_READER_TYPE_NONE
XML_READER_TYPE_ELEMENT
XML_READER_TYPE_ATTRIBUTE
XML_READER_TYPE_TEXT
XML_READER_TYPE_CDATA
XML_READER_TYPE_ENTITY_REFERENCE
XML_READER_TYPE_ENTITY
XML_READER_TYPE_PROCESSING_INSTRUCTION
XML_READER_TYPE_COMMENT
XML_READER_TYPE_DOCUMENT
XML_READER_TYPE_DOCUMENT_TYPE
XML_READER_TYPE_DOCUMENT_FRAGMENT
XML_READER_TYPE_NOTATION
XML_READER_TYPE_WHITESPACE
XML_READER_TYPE_SIGNIFICANT_WHITESPACE
XML_READER_TYPE_END_ELEMENT
XML_READER_TYPE_END_ENTITY
XML_READER_TYPE_XML_DECLARATION
)],
states =>
[qw(
XML_READER_NONE
XML_READER_START
XML_READER_ELEMENT
XML_READER_END
XML_READER_EMPTY
XML_READER_BACKTRACK
XML_READER_DONE
XML_READER_ERROR
)]
);
@EXPORT = (@{$EXPORT_TAGS{types}},@{$EXPORT_TAGS{states}});
@EXPORT_OK = @EXPORT;
$EXPORT_TAGS{all}=\@EXPORT_OK;
}
{
my %props = (
load_ext_dtd => 1, # load the external subset
complete_attributes => 2, # default DTD attributes
validation => 3, # validate with the DTD
expand_entities => 4, # substitute entities
);
sub getParserProp {
my ($self, $name) = @_;
my $prop = $props{$name};
return undef unless defined $prop;
return $self->_getParserProp($prop);
}
sub setParserProp {
my $self = shift;
my %args = map { ref($_) eq 'HASH' ? (%$_) : $_ } @_;
my ($key, $value);
while (($key,$value) = each %args) {
my $prop = $props{ $key };
$self->_setParserProp($prop,$value);
}
return;
}
my (%string_pool,%rng_pool,%xsd_pool); # used to preserve data passed to the reader
sub new {
my ($class) = shift;
my %args = map { ref($_) eq 'HASH' ? (%$_) : $_ } @_;
my $encoding = $args{encoding};
my $URI = $args{URI};
$URI="$URI" if defined $URI; # stringify in case it is an URI object
my $options = XML::LibXML->_parser_options(\%args);
my $self = undef;
if ( defined $args{location} ) {
$self = $class->_newForFile( $args{location}, $encoding, $options );
}
elsif ( defined $args{string} ) {
$self = $class->_newForString( $args{string}, $URI, $encoding, $options );
$string_pool{$self} = \$args{string};
}
elsif ( defined $args{IO} ) {
$self = $class->_newForIO( $args{IO}, $URI, $encoding, $options );
}
elsif ( defined $args{DOM} ) {
croak("DOM must be a XML::LibXML::Document node")
unless UNIVERSAL::isa($args{DOM}, 'XML::LibXML::Document');
$self = $class->_newForDOM( $args{DOM} );
}
elsif ( defined $args{FD} ) {
my $fd = fileno($args{FD});
$self = $class->_newForFd( $fd, $URI, $encoding, $options );
}
else {
croak("XML::LibXML::Reader->new: specify location, string, IO, DOM, or FD");
}
if ($args{RelaxNG}) {
if (ref($args{RelaxNG})) {
$rng_pool{$self} = \$args{RelaxNG};
$self->_setRelaxNG($args{RelaxNG});
} else {
$self->_setRelaxNGFile($args{RelaxNG});
}
}
if ($args{Schema}) {
if (ref($args{Schema})) {
$xsd_pool{$self} = \$args{Schema};
$self->_setXSD($args{Schema});
} else {
$self->_setXSDFile($args{Schema});
}
}
return $self;
}
sub DESTROY {
my $self = shift;
delete $string_pool{$self};
delete $rng_pool{$self};
delete $xsd_pool{$self};
$self->_DESTROY;
}
}
sub close {
my ($reader) = @_;
# _close return -1 on failure, 0 on success
# perl close returns 0 on failure, 1 on success
return $reader->_close == 0 ? 1 : 0;
}
sub preservePattern {
my $reader=shift;
my ($pattern,$ns_map)=@_;
if (ref($ns_map) eq 'HASH') {
# translate prefix=>URL hash to a (URL,prefix) list
$reader->_preservePattern($pattern,[reverse %$ns_map]);
} else {
$reader->_preservePattern(@_);
}
}
sub nodePath {
my $reader=shift;
my $path = $reader->_nodePath;
$path=~s/\[\d+\]//g; # make /foo[1]/bar[1] just /foo/bar, since
# sibling count in the buffered fragment is
# basically random and generally misleading
return $path;
}
1;
__END__
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/XML/LibXML/Reader.pm | Perl | mit | 5,827 |
############################################################################
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package TestDriverScript;
###############################################################################
# Test driver for pig nightly tests.
#
#
my $ROOT=undef;
if (defined $ENV{'PIG_HARNESS_ROOT'} ){
$ROOT= $ENV{'PIG_HARNESS_ROOT'};
} else {
die "FATAL ERROR: $0 - You must set PIG_HARNESS_ROOT to the root directory of the pig_harness";
}
#Set library paths
unshift( @INC, "$ROOT/libexec" );
unshift( @INC, ".");
use TestDriverPig;
use IPC::Run; # don't do qw(run), it screws up TestDriver which also has a run method
use File::Path;
use Digest::MD5 qw(md5_hex);
use Util;
use strict;
use English;
our @ISA = "TestDriverPig";
#########################################################################
# Sub: new
# TestDriverPigCmdline Constructor.
#
#
# Paramaters:
# None
#
# Returns:
# None
sub new
{
# Call our parent
my ($proto) = @_;
my $class = ref($proto) || $proto;
my $self = $class->SUPER::new;
$self->{'ignore'} = "true";
bless($self, $class);
return $self;
}
########################################################################
# Sub: runTest
# Runs the test and returns the results.
# -Write the pig script to a file.
# -Run the command
# -Copy result file out of hadoop
# -Sort and postprocess the result if necessary
#
# Parameters:
# $testCmd -
# $log -
#
# Returns:
# hash reference containg the test result
#
sub runTest
{
my ($self, $testCmd, $log) = @_;
my $subName = (caller(0))[3];
my %result;
# extract the current zebra.jar file path from the classpath
# and enter it in the hash for use in the substitution of :ZEBRAJAR:
my $zebrajar = $testCmd->{'cp'};
$zebrajar =~ s/zebra.jar.*/zebra.jar/;
$zebrajar =~ s/.*://;
$testCmd->{'zebrajar'} = $zebrajar;
if( $testCmd->{'pig'} ){
return runPig( $self, $testCmd, $log );
} elsif( $testCmd->{'pigsql'} ){
return runPigSql( $self, $testCmd, $log );
} elsif( $testCmd->{'script'} ){
return runScript( $self, $testCmd, $log );
}
return %result;
}
sub runPig
{
my ($self, $testCmd, $log) = @_;
my $subName = (caller(0))[3];
my %result;
return 1 if ( $testCmd->{'ignore'}=~ "true" );
# Write the pig script to a file.
my $pigfile = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".pig";
my $outdir = $testCmd->{'outlpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
my $outfile = "$outdir/pig.out";
my $pigcmd = $testCmd->{'pig'};
$pigcmd =~ s/:INPATH:/$testCmd->{'inpathbase'}/g;
$pigcmd =~ s/:OUTPATH:/$outfile/g;
$pigcmd =~ s/:FUNCPATH:/$testCmd->{'funcjarPath'}/g;
$pigcmd =~ s/:PIGGYBANKPATH:/$testCmd->{'piggybankjarPath'}/g;
$pigcmd =~ s/:ZEBRAJAR:/$testCmd->{'zebrajar'}/g;
$pigcmd =~ s/:RUNID:/$testCmd->{'UID'}/g;
$pigcmd =~ s/:PIGHARNESS:/$ENV{PIG_HARNESS_ROOT}/g;
$pigcmd =~ s/:USRHOMEPATH:/$testCmd->{'userhomePath'}/g;
$pigcmd =~ s/:SCRIPTHOMEPATH:/$testCmd->{'scriptPath'}/g;
$pigcmd =~ s/:DBUSER:/$testCmd->{'dbuser'}/g;
$pigcmd =~ s/:DBNAME:/$testCmd->{'dbdb'}/g;
$pigcmd =~ s/:LOCALINPATH:/$testCmd->{'localinpathbase'}/g;
$pigcmd =~ s/:LOCALOUTPATH:/$testCmd->{'localoutpathbase'}/g;
$pigcmd =~ s/:BMPATH:/$testCmd->{'benchmarkPath'}/g;
$pigcmd =~ s/:TMP:/$testCmd->{'tmpPath'}/g;
$pigcmd =~ s/:FILER:/$testCmd->{'filerPath'}/g;
$pigcmd =~ s/:LATESTOUTPUTPATH:/$self->{'latestoutputpath'}/g;
#my $pigcmd = replaceParameters( $testCmd->{'pig'}, $outfile, $testCmd, $log );
open(FH, ">$pigfile") or die "Unable to open file $pigfile to write pig script, $ERRNO\n";
print FH $pigcmd . "\n";
close(FH);
# Run the command
my @cmd = Util::getBasePigCmd($testCmd);
# Add option -l giving location for secondary logs
my $localdir = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
mkdir $localdir;
my $locallog = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".log";
push(@cmd, "-l");
push(@cmd, $locallog);
# Add pig parameters if they're provided
if (defined($testCmd->{'pig_params'})) {
push(@cmd, @{$testCmd->{'pig_params'}});
}
push(@cmd, $pigfile);
print $log "Going to run pig command: @cmd\n";
print $log "Pig script contains: $pigcmd\n";
IPC::Run::run(\@cmd, \undef, \$result{'stdout'}, \$result{'stderr'});
# print `@cmd`;
$result{'rc'} = $? >> 8;
return \%result;
}
sub runPigSql
{
my ($self, $testCmd, $log) = @_;
my $subName = (caller(0))[3];
my %result;
return 1 if ( $testCmd->{'ignore'}=~ "true" );
# Write the pig script to a file.
my $pigfile = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".pig";
my $outdir = $testCmd->{'outlpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
my $outfile = "$outdir/pig.out";
my $pigcmd = $testCmd->{'pigsql'};
$pigcmd =~ s/:INPATH:/$testCmd->{'inpathbase'}/g;
$pigcmd =~ s/:OUTPATH:/$outfile/g;
$pigcmd =~ s/:FUNCPATH:/$testCmd->{'funcjarPath'}/g;
$pigcmd =~ s/:PIGGYBANKPATH:/$testCmd->{'piggybankjarPath'}/g;
$pigcmd =~ s/:RUNID:/$testCmd->{'UID'}/g;
$pigcmd =~ s/:PIGHARNESS:/$ENV{PIG_HARNESS_ROOT}/g;
$pigcmd =~ s/:USRHOMEPATH:/$testCmd->{'userhomePath'}/g;
$pigcmd =~ s/:SCRIPTHOMEPATH:/$testCmd->{'scriptPath'}/g;
$pigcmd =~ s/:DBUSER:/$testCmd->{'dbuser'}/g;
$pigcmd =~ s/:DBNAME:/$testCmd->{'dbdb'}/g;
$pigcmd =~ s/:LOCALINPATH:/$testCmd->{'localinpathbase'}/g;
$pigcmd =~ s/:LOCALOUTPATH:/$testCmd->{'localoutpathbase'}/g;
$pigcmd =~ s/:BMPATH:/$testCmd->{'benchmarkPath'}/g;
$pigcmd =~ s/:TMP:/$testCmd->{'tmpPath'}/g;
$pigcmd =~ s/:FILER:/$testCmd->{'filerPath'}/g;
$pigcmd =~ s/:LATESTOUTPUTPATH:/$self->{'latestoutputpath'}/g;
#my $pigcmd = replaceParameters( $testCmd->{'pig'}, $outfile, $testCmd, $log );
open(FH, ">$pigfile") or die "Unable to open file $pigfile to write pig script, $ERRNO\n";
print FH $pigcmd . "\n";
close(FH);
# Run the command
my $outfile = $testCmd->{'outpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
$testCmd->{'testoutpath'}=$outfile;
$self->{'latestoutputpath'}=$outfile;
# Pig _SQL_ command
my @cmd = Util::getBasePigSqlCmd($testCmd);
# Add option -l giving location for secondary logs
my $localdir = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
mkdir $localdir;
my $locallog = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".log";
push(@cmd, "-l");
push(@cmd, $locallog);
# Add pig parameters if they're provided
if (defined($testCmd->{'pig_params'})) {
push(@cmd, @{$testCmd->{'pig_params'}});
}
push(@cmd, $pigfile);
print $log "Going to run pig sql command: @cmd\n";
print $log "Pig Sql script contains: $pigcmd\n";
IPC::Run::run(\@cmd, \undef, \$result{'stdout'}, \$result{'stderr'});
$result{'rc'} = $? >> 8;
return \%result;
}
########################################################################
# Sub: runScript
# Runs the Script and returns the results.
# -Write the script to a file.
# -Run the command
#
# Parameters:
# $testCmd -
# $log -
#
# Returns:
# hash reference containg the test result
#
sub runScript
{
my ($self, $testCmd, $log) = @_;
my $subName = (caller(0))[3];
my %result;
return 1 if ( $testCmd->{'ignore'}=~ "true" );
# Write the pig script to a file.
my $script = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".sh";
my $outdir = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
print $log "Attempting to create $outdir\n";
mkpath( [ $outdir ] , 1, 0755) if ( ! -e outdir );
if ( ! -e $outdir ){
print $log "$0.$subName FATAL could not mkdir $outdir\n";
die "$0.$subName FATAL could not mkdir $outdir\n";
}
my $outfile = "$outdir/script.out";
my $cmd = $testCmd->{'script'};
$cmd =~ s/:INPATH:/$testCmd->{'inpathbase'}/g;
$cmd =~ s/:OUTPATH:/$outfile/g;
$cmd =~ s/:FUNCPATH:/$testCmd->{'funcjarPath'}/g;
$cmd =~ s/:PIGGYBANKPATH:/$testCmd->{'piggybankjarPath'}/g;
$cmd =~ s/:RUNID:/$testCmd->{'UID'}/g;
$cmd =~ s/:PIGHARNESS:/$ENV{PIG_HARNESS_ROOT}/g;
$cmd =~ s/:USRHOMEPATH:/$testCmd->{'userhomePath'}/g;
$cmd =~ s/:SCRIPTHOMEPATH:/$testCmd->{'scriptPath'}/g;
$cmd =~ s/:DBUSER:/$testCmd->{'dbuser'}/g;
$cmd =~ s/:DBNAME:/$testCmd->{'dbdb'}/g;
$cmd =~ s/:LOCALINPATH:/$testCmd->{'localinpathbase'}/g;
$cmd =~ s/:LOCALOUTPATH:/$testCmd->{'localoutpathbase'}/g;
$cmd =~ s/:BMPATH:/$testCmd->{'benchmarkPath'}/g;
$cmd =~ s/:TMP:/$testCmd->{'tmpPath'}/g;
$cmd =~ s/:FILER:/$testCmd->{'filerPath'}/g;
# my $cmd = replaceParameters( $testCmd->{'script'}, $outfile, $testCmd, $log );
open(FH, ">$script") or die "Unable to open file $script to write pig script, $ERRNO\n";
print FH $cmd . "\n";
close(FH);
#my @cmds = split (/$/, $cmd);
#push(@cmds, $cmd);
print $log "$0:$subName RESULT ARE IN FILE ($outfile)\n";
print $log "$0:$subName SCRIPT CONTAINS ($script): \n$cmd\n";
my @result=`chmod +x $script`;
my $command= "$script >> $outfile 2>&1";
print $log "Going to run command: ($command)\n";
@result=`$command`;
$result{'rc'} = $?;
print $log @result;
# IPC::Run::run(\@cmds, \undef, \$result{'stdout'}, \$result{'stderr'});
#IPC::Run::run(\@cmds, \undef, \$result{'stdout'}, \$result{'stderr'});
#$result{'rc'} = $? >> 8;
return \%result;
}
sub replaceParameters(){
my ($self, $cmd, $outfile, $testCmd, $log) = @_;
$cmd =~ s/:INPATH:/$testCmd->{'inpathbase'}/g;
$cmd =~ s/:OUTPATH:/$outfile/g;
$cmd =~ s/:FUNCPATH:/$testCmd->{'funcjarPath'}/g;
$cmd =~ s/:PIGGYBANKPATH:/$testCmd->{'piggybankjarPath'}/g;
$cmd =~ s/:RUNID:/$testCmd->{'UID'}/g;
$cmd =~ s/:PIGHARNESS:/$ENV{PIG_HARNESS_ROOT}/g;
$cmd =~ s/:USRHOMEPATH:/$testCmd->{'userhomePath'}/g;
$cmd =~ s/:SCRIPTHOMEPATH:/$testCmd->{'scriptPath'}/g;
$cmd =~ s/:DBUSER:/$testCmd->{'dbuser'}/g;
$cmd =~ s/:DBNAME:/$testCmd->{'dbdb'}/g;
$cmd =~ s/:LOCALINPATH:/$testCmd->{'localinpathbase'}/g;
$cmd =~ s/:LOCALOUTPATH:/$testCmd->{'localoutpathbase'}/g;
$cmd =~ s/:BMPATH:/$testCmd->{'benchmarkPath'}/g;
$cmd =~ s/:TMP:/$testCmd->{'tmpPath'}/g;
$cmd =~ s/:LATESTOUTPUTPATH:/$self->{'latestoutputpath'}/g;
return $cmd;
}
########################################################################
# Sub: generateBenchmark
# Generate databse benchmark.
#
# Parameters:
# $testCmd -
# $log -
#
# Returns:
# hash reference containg the test result
#
sub generateBenchmark
{
my ($self, $testCmd, $log) = @_;
my %result;
foreach my $key ('expected_out', 'expected_out_regex', 'expected_err', 'expected_err_regex', 'rc') {
if (defined $testCmd->{$key}) {
$result{$key} = $testCmd->{$key};
}
}
return \%result;
}
########################################################################
# Sub: compare
# Compare the test reuslts to the benchmark results
#
# Parameters:
# None.
#
# Returns:
# the result of the test run. 1 if the test passes.
sub compare
{
my ($self, $testResult, $benchmarkResult, $log) = @_;
my $subName = (caller(0))[3];
#return 1 if ( $testCmd->{'ignore'}=~ "true" );
return 1;
}
sub compareSAV
{
my ($self, $testResult, $benchmarkResult, $log) = @_;
my $subName = (caller(0))[3];
# IMPORTANT NOTE:
# If you are using a regex to compare stdout or stderr
# and if the pattern that you are trying to match spans two line
# explicitly use '\n' (without the single quotes) in the regex
if (defined $benchmarkResult->{'rc'} &&
($testResult->{'rc'} != $benchmarkResult->{'rc'})) {
print $log "Test and benchmark return code differ:\n";
print $log "Test rc = " . $testResult->{'rc'} . "\n";
print $log "Expected rc = " . $benchmarkResult->{'rc'} . "\n";
return 0;
}
# Check if we are looking for an exact match
if (defined $benchmarkResult->{'expected_out'}) {
print $log "$0::$subName INFO Checking test result <$testResult->{'stdout'}> " .
"as exact match against expected <$benchmarkResult->{'expected_out'}>\n";
return $testResult->{'stdout'} eq
$benchmarkResult->{'expected_out'};
} elsif (defined $benchmarkResult->{'expected_out_regex'}) {
print $log "$0::$subName INFO Checking test result for regular expression " .
"<$benchmarkResult->{'expected_out_regex'}> in " .
"<$testResult->{'stdout'}>\n";
return $testResult->{'stdout'} =~
$benchmarkResult->{'expected_out_regex'};
} elsif (defined $benchmarkResult->{'expected_err'}) {
print $log "$0::$subName INFO Checking test result <$testResult->{'stderr'}> " .
"as exact match against expected <$benchmarkResult->{'expected_err'}>\n";
return $testResult->{'stderr'} eq
$benchmarkResult->{'expected_err'};
} elsif (defined $benchmarkResult->{'expected_err_regex'}) {
print $log "$0::$subName INFO Checking test result for regular expression " .
"<$benchmarkResult->{'expected_err_regex'}> in " .
"<$testResult->{'stderr'}>\n";
return $testResult->{'stderr'} =~
$benchmarkResult->{'expected_err_regex'};
} else {
return 1;
}
}
1;
| hxquangnhat/PIG-ROLLUP-MRCUBE | test/e2e/pig/drivers/TestDriverScript.pm | Perl | apache-2.0 | 15,418 |
use strict;
use warnings;
use PostgresNode;
use TestLib;
use Test::More;
if ($^O eq 'msys' && `uname -or` =~ /^2.*Msys/)
{
plan skip_all => 'High bit name tests fail on Msys2';
}
else
{
plan tests => 14;
}
# We're going to use byte sequences that aren't valid UTF-8 strings. Use
# LATIN1, which accepts any byte and has a conversion from each byte to UTF-8.
$ENV{LC_ALL} = 'C';
$ENV{PGCLIENTENCODING} = 'LATIN1';
# Create database and user names covering the range of LATIN1
# characters, for use in a connection string by pg_dumpall. Skip ','
# because of pg_regress --create-role, skip [\n\r] because pg_dumpall
# does not allow them. We also skip many ASCII letters, to keep the
# total number of tested characters to what will fit in four names.
# The odds of finding something interesting by testing all ASCII letters
# seem too small to justify the cycles of testing a fifth name.
my $dbname1 =
'regression'
. generate_ascii_string(1, 9)
. generate_ascii_string(11, 12)
. generate_ascii_string(14, 33)
. ($TestLib::windows_os ? '' : '"x"') # IPC::Run mishandles '"' on Windows
. generate_ascii_string(35, 43) # skip ','
. generate_ascii_string(45, 54);
my $dbname2 = 'regression' . generate_ascii_string(55, 65) # skip 'B'-'W'
. generate_ascii_string(88, 99) # skip 'd'-'w'
. generate_ascii_string(120, 149);
my $dbname3 = 'regression' . generate_ascii_string(150, 202);
my $dbname4 = 'regression' . generate_ascii_string(203, 255);
(my $username1 = $dbname1) =~ s/^regression/regress_/;
(my $username2 = $dbname2) =~ s/^regression/regress_/;
(my $username3 = $dbname3) =~ s/^regression/regress_/;
(my $username4 = $dbname4) =~ s/^regression/regress_/;
my $src_bootstrap_super = 'regress_postgres';
my $dst_bootstrap_super = 'boot';
my $node = get_new_node('main');
$node->init(extra =>
[ '-U', $src_bootstrap_super, '--locale=C', '--encoding=LATIN1' ]);
# prep pg_hba.conf and pg_ident.conf
$node->run_log(
[
$ENV{PG_REGRESS}, '--config-auth',
$node->data_dir, '--user',
$src_bootstrap_super, '--create-role',
"$username1,$username2,$username3,$username4"
]);
$node->start;
my $backupdir = $node->backup_dir;
my $discard = "$backupdir/discard.sql";
my $plain = "$backupdir/plain.sql";
my $dirfmt = "$backupdir/dirfmt";
$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname1 ]);
$node->run_log(
[ 'createuser', '-U', $src_bootstrap_super, '-s', $username1 ]);
$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname2 ]);
$node->run_log(
[ 'createuser', '-U', $src_bootstrap_super, '-s', $username2 ]);
$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname3 ]);
$node->run_log(
[ 'createuser', '-U', $src_bootstrap_super, '-s', $username3 ]);
$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname4 ]);
$node->run_log(
[ 'createuser', '-U', $src_bootstrap_super, '-s', $username4 ]);
# For these tests, pg_dumpall -r is used because it produces a short
# dump.
$node->command_ok(
[
'pg_dumpall', '--roles-only', '-f', $discard, '--dbname',
$node->connstr($dbname1),
'-U', $username4
],
'pg_dumpall with long ASCII name 1');
$node->command_ok(
[
'pg_dumpall', '--no-sync', '--roles-only', '-f', $discard, '--dbname',
$node->connstr($dbname2),
'-U', $username3
],
'pg_dumpall with long ASCII name 2');
$node->command_ok(
[
'pg_dumpall', '--no-sync', '--roles-only', '-f', $discard, '--dbname',
$node->connstr($dbname3),
'-U', $username2
],
'pg_dumpall with long ASCII name 3');
$node->command_ok(
[
'pg_dumpall', '--no-sync', '--roles-only', '-f', $discard, '--dbname',
$node->connstr($dbname4),
'-U', $username1
],
'pg_dumpall with long ASCII name 4');
$node->command_ok(
[
'pg_dumpall', '-U',
$src_bootstrap_super, '--no-sync',
'--roles-only', '-l',
'dbname=template1'
],
'pg_dumpall -l accepts connection string');
$node->run_log([ 'createdb', '-U', $src_bootstrap_super, "foo\n\rbar" ]);
# not sufficient to use -r here
$node->command_fails(
[ 'pg_dumpall', '-U', $src_bootstrap_super, '--no-sync', '-f', $discard ],
'pg_dumpall with \n\r in database name');
$node->run_log([ 'dropdb', '-U', $src_bootstrap_super, "foo\n\rbar" ]);
# make a table, so the parallel worker has something to dump
$node->safe_psql(
$dbname1,
'CREATE TABLE t0()',
extra_params => [ '-U', $src_bootstrap_super ]);
# XXX no printed message when this fails, just SIGPIPE termination
$node->command_ok(
[
'pg_dump', '-Fd', '--no-sync', '-j2', '-f', $dirfmt, '-U', $username1,
$node->connstr($dbname1)
],
'parallel dump');
# recreate $dbname1 for restore test
$node->run_log([ 'dropdb', '-U', $src_bootstrap_super, $dbname1 ]);
$node->run_log([ 'createdb', '-U', $src_bootstrap_super, $dbname1 ]);
$node->command_ok(
[
'pg_restore', '-v', '-d', 'template1',
'-j2', '-U', $username1, $dirfmt
],
'parallel restore');
$node->run_log([ 'dropdb', '-U', $src_bootstrap_super, $dbname1 ]);
$node->command_ok(
[
'pg_restore', '-C', '-v', '-d',
'template1', '-j2', '-U', $username1,
$dirfmt
],
'parallel restore with create');
$node->command_ok(
[ 'pg_dumpall', '--no-sync', '-f', $plain, '-U', $username1 ],
'take full dump');
system_log('cat', $plain);
my ($stderr, $result);
my $restore_super = qq{regress_a'b\\c=d\\ne"f};
# Restore full dump through psql using environment variables for
# dbname/user connection parameters
my $envar_node = get_new_node('destination_envar');
$envar_node->init(extra =>
[ '-U', $dst_bootstrap_super, '--locale=C', '--encoding=LATIN1' ]);
$envar_node->run_log(
[
$ENV{PG_REGRESS}, '--config-auth',
$envar_node->data_dir, '--user',
$dst_bootstrap_super, '--create-role',
$restore_super
]);
$envar_node->start;
# make superuser for restore
$envar_node->run_log(
[ 'createuser', '-U', $dst_bootstrap_super, '-s', $restore_super ]);
{
local $ENV{PGPORT} = $envar_node->port;
local $ENV{PGUSER} = $restore_super;
$result = run_log([ 'psql', '-X', '-f', $plain ], '2>', \$stderr);
}
ok($result,
'restore full dump using environment variables for connection parameters'
);
is($stderr, '', 'no dump errors');
# Restore full dump through psql using command-line options for
# dbname/user connection parameters. "\connect dbname=" forgets
# user/port from command line.
$restore_super =~ s/"//g
if $TestLib::windows_os; # IPC::Run mishandles '"' on Windows
my $cmdline_node = get_new_node('destination_cmdline');
$cmdline_node->init(extra =>
[ '-U', $dst_bootstrap_super, '--locale=C', '--encoding=LATIN1' ]);
$cmdline_node->run_log(
[
$ENV{PG_REGRESS}, '--config-auth',
$cmdline_node->data_dir, '--user',
$dst_bootstrap_super, '--create-role',
$restore_super
]);
$cmdline_node->start;
$cmdline_node->run_log(
[ 'createuser', '-U', $dst_bootstrap_super, '-s', $restore_super ]);
{
$result = run_log(
[
'psql', '-p', $cmdline_node->port, '-U',
$restore_super, '-X', '-f', $plain
],
'2>',
\$stderr);
}
ok($result,
'restore full dump with command-line options for connection parameters');
is($stderr, '', 'no dump errors');
| 50wu/gpdb | src/bin/pg_dump/t/010_dump_connstr.pl | Perl | apache-2.0 | 7,201 |
package Statistics::Descriptive::Weighted;
$VERSION = '0.5';
use Statistics::Descriptive;
use Data::Dumper;
package Statistics::Descriptive::Weighted::Sparse;
use strict;
use vars qw($AUTOLOAD @ISA %fields);
@ISA = qw(Statistics::Descriptive::Sparse);
use Carp qw(cluck confess);
##Define a new field to be used as method, to
##augment the ones inherited
%fields = (
weight => 0,
sum_squares => 0,
weight_homozyg => 0,
biased_variance => 0,
biased_standard_deviation => 0,
);
__PACKAGE__->_make_accessors( [ grep { $_ ne "weight" } keys(%fields) ] );
__PACKAGE__->_make_private_accessors(["weight"]);
##Have to override the base method to add new fields to the object
##The proxy method from base class is still valid
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = $class->SUPER::new(); ##Create my self re SUPER
@{ $self->{'_permitted'} } {keys %fields} = values %fields;
@{ $self } {keys %fields} = values %fields;
bless ($self, $class); #Re-anneal the object
return $self;
}
sub add_data {
my $self = shift; ##Myself
my $oldmean;
my $oldweight;
my ($min,$max);
my $aref;
if ( (not ref $_[0] eq 'ARRAY') || (exists $_[1] and (not (ref $_[1] eq 'ARRAY') || @{$_[0]} != @{$_[1]} ) ) ) {
cluck "WARNING: Expected input are two references to two arrays of equal length; first data, then positive weights. Second array is optional.\n";
return undef;
}
my ($datum,$weight) = @_;
##Calculate new mean, pseudo-variance, min and max;
##The on-line weighted incremental algorithm for variance is based on West 1979 from Wikipedia
##D. H. D. West (1979). Communications of the ACM, 22, 9, 532-535: Updating Mean and Variance Estimates: An Improved Method
## NEW in Version 0.4:
## I calculate a sample weighted variance based on normalized weights rather than the sample size
## correction factor is: 1 / (1 - sum [w_i / (sum w_i) ]^2)
## call H = sum [w_i / (sum w_i) ]^2. An online update eq for H is H_new = (sum.w_old^2 * H_old) + weight^2) / sum.w^2
## correction factor is then 1 / (1 - H_new)
my $weighterror;
for (0..$#$datum ) {
if (not defined $$weight[$_]) {
$$weight[$_] = 1;
}
if ($$weight[$_] <= 0) {
$weighterror = 1;
next;
}
$oldmean = $self->{mean};
$oldweight = $self->{weight};
$self->{weight} += $$weight[$_];
$self->{weight_homozyg} = ((($oldweight ** 2 * $self->{weight_homozyg}) + $$weight[$_] ** 2) / ( $self->{weight} ** 2 ));
$self->{count}++;
$self->{sum} += ($$weight[$_] * $$datum[$_]);
$self->{mean} += (($$weight[$_] / $self->{weight} ) * ($$datum[$_] - $oldmean));
$self->{sum_squares} += (($$weight[$_] / $self->{weight} ) * ($$datum[$_] - $oldmean) ** 2) * $oldweight;
if (not defined $self->{max} or $$datum[$_] > $self->{max}) {
$self->{max} = $$datum[$_];
}
if (not defined $self->{min} or $$datum[$_] < $self->{min}) {
$self->{min} = $$datum[$_];
}
}
cluck "WARNING: One or more data with nonpositive weights were skipped.\n" if ($weighterror);
$self->{sample_range} = $self->{max} - $self->{min};
if ($self->{count} > 1) {
$self->{variance} = ($self->{sum_squares} / ((1 - $self->{weight_homozyg}) * $self->{weight}));
$self->{standard_deviation} = sqrt( $self->{variance});
$self->{biased_variance} = ($self->{sum_squares} / $self->{weight});
$self->{biased_standard_deviation} = sqrt( $self->{biased_variance});
}
return 1;
}
sub weight {
my $self = shift;
if (@_ > 0) {
cluck "WARNING: Sparse statistics object expects zero arguments to weight function, returns sum of weights.";
}
return $self->_weight();
}
## OVERRIDES FOR UNSUPPORTED FUNCTIONS
sub mindex{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub maxdex{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
1;
package Statistics::Descriptive::Weighted::Full;
use Carp qw(cluck confess);
use Tree::Treap;
use strict;
use vars qw(@ISA %fields);
@ISA = qw(Statistics::Descriptive::Weighted::Sparse);
##Create a list of fields not to remove when data is updated
%fields = (
_permitted => undef, ##Place holder for the inherited key hash
data => undef, ##keys from variate values to a hashref with keys weight, cdf, tail-prob
did_cdf => undef, ##flag to indicate whether CDF/quantile fun has been computed or not
quantile => undef, ##"hash" for quantile function
percentile => undef, ##"hash" for percentile function
maxweight => 0,
mode => undef,
order => 1,
_reserved => undef, ##Place holder for this lookup hash
);
##Have to override the base method to add the data to the object
##The proxy method from above is still valid
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = $class->SUPER::new(); ##Create my self re SUPER
$self->{data} = new Tree::Treap("num"); ## inserts data by numeric comparison
$self->{did_cdf} = 0;
$self->{maxweight} = 0;
$self->{quantile} = new Tree::Treap("num");
$self->{percentile} = new Tree::Treap("num");
$self->{order} = 1;
$self->{'_reserved'} = \%fields;
bless ($self, $class);
return $self;
}
## The treap gives relatively fast search and good performance on possibly sorted data
## The choice is motivated by heavy intended use for Empirical Distribution Function
## A lot of work is done at insertion for faster computation on search
## THE ACTUAL DATA INSERTION IS DONE AT FUNCTION _addweight
## The data structure loses information. Like a hash keys appear only once.
## The value of a key is its sum of weight for that key, and the cumulative weight
sub add_data {
my $self = shift;
my $key;
if ( (not ref $_[0] eq 'ARRAY') || (exists $_[1] and (not (ref $_[1] eq 'ARRAY') || @{$_[0]} != @{$_[1]} ) ) ) {
cluck "WARNING: Expected input are two references to two arrays of equal length; first data, then positive weights. Second array is optional.\n";
return undef;
}
my ($datum,$weight) = @_;
my $filterdatum = [];
my $filterweight = [];
my $weighterror;
my $newweight;
for (0..$#$datum) {
if (not defined $$weight[$_]) {
$$weight[$_] = 1;
}
if ($$weight[$_] > 0) {
push @$filterdatum,$$datum[$_];
push @$filterweight,$$weight[$_];
$newweight = $self->_addweight($$datum[$_], $$weight[$_]);
if ($newweight > $self->{maxweight}) {
$self->{maxweight} = $newweight;
$self->{mode} = $$datum[$_];
}
}
else {
$weighterror = 1;
}
}
cluck "WARNING: One or more data with nonpositive weights were skipped.\n" if ($weighterror);
$self->SUPER::add_data($filterdatum,$filterweight); ##Perform base statistics on the data
##Clear the did_cdf flag
$self->{did_cdf} = 0;
##Need to delete all cached keys
foreach $key (keys %{ $self }) { # Check each key in the object
# If it's a reserved key for this class, keep it
next if exists $self->{'_reserved'}->{$key};
# If it comes from the base class, keep it
next if exists $self->{'_permitted'}->{$key};
delete $self->{$key}; # Delete the out of date cached key
}
return 1;
}
sub count {
my $self = shift;
if (@_ == 1) { ##Inquire
my $val = $self->{data}->get_val($_[0]);
return (defined $val ? ${ $val }{'count'} : $val);
}
elsif (@_ == 0) { ##Inquire
return $self->{count};
}
else {
cluck "WARNING: Only 1 or fewer arguments expected.";
}
return 1;
}
sub weight {
my $self = shift;
if (@_ == 1) { ##Inquire
my $val = $self->{data}->get_val($_[0]);
return (defined $val ? ${ $val }{'weight'} : $val);
}
elsif (@_ == 0) { ##Inquire
return $self->{weight};
}
else {
cluck "WARNING: Only 1 or fewer arguments expected.";
}
return 1;
}
sub _addweight {
my $self = shift;
my $oldweight = ($self->weight($_[0]) || 0);
my $newweight = $_[1] + $oldweight;
my $value = $self->{data}->get_val($_[0]);
my $weights = ($value ? $$value{'weights'} : [] );
push @$weights, $_[1];
my $orders = ($value ? $$value{'order'} : [] );
push @$orders, $self->{order}++;
my $newcount = ($self->count($_[0]) || 0) + 1;
if (@_ == 2) { ##Assign
my $values = {'weight' => $newweight, 'weights' => $weights, 'count' => $newcount, 'order' => $orders, 'cdf' => undef, 'rt_tail_prob' => undef, 'percentile' => undef};
$self->{data}->insert($_[0],$values);
}
else {
cluck "WARNING: Only two arguments (key, addend) expected.";
}
return $newweight;
}
sub _do_cdf {
my $self = shift;
my $cumweight = 0;
foreach my $key ($self->{data}->keys()){
my $value = $self->{data}->get_val($key);
my $keyweight = $self->weight($key);
my $oldcumweight = $cumweight;
$cumweight += $keyweight;
my $propcumweight = $cumweight / $self->{weight};
my $right_tail_prob = (1 - ($oldcumweight / $self->{weight}));
my $percentile = ((100 / $self->{weight}) * ($cumweight - ($keyweight / 2)));
$$value{'cdf'} = $propcumweight;
$$value{'rt_tail_prob'} = $right_tail_prob;
$$value{'percentile'} = $percentile;
$self->{data}->insert($key,$value);
$self->{quantile}->insert($propcumweight,$key);
$self->{percentile}->insert($percentile,$key);
}
$self->{did_cdf} = 1;
return 1;
}
sub quantile {
my $self = shift;
$self->_do_cdf() unless $self->{did_cdf};
if (@_ == 1) { ##Inquire
my $proportion = shift;
cluck "WARNING: Expects an argument between 0 and 1 inclusive." if ($proportion < 0 or $proportion > 1);
my @keys = $self->{quantile}->range_keys($proportion, undef);
my $key = $keys[0]; ## GET THE SMALLEST QUANTILE g.e. $proportion
return $self->{quantile}->get_val($keys[0]);
}
else {
cluck "WARNING: Exactly 1 argument expected.";
return undef;
}
}
sub percentile {
my $self = shift;
$self->_do_cdf() unless $self->{did_cdf};
if (@_ != 1) {
cluck "WARNING: Exactly 1 argument expected.";
}
my $percent = shift;
if ($percent < 0 or $percent > 100) {
cluck "WARNING: Expects an argument between 0 and 100 inclusive.";
}
my $percentile;
if ($percent < $self->{percentile}->minimum()) {
$percentile = $self->{data}->minimum();
} elsif ($percent > $self->{percentile}->maximum()) {
$percentile = $self->{data}->maximum();
} else {
my @lekeys = $self->{percentile}->range_keys(undef,$percent);
my $lekey = $lekeys[-1];
my @gekeys = $self->{percentile}->range_keys($percent, undef);
my $gekey = $gekeys[0];
my $leval = $self->{percentile}->get_val($lekey);
$percentile = $leval;
if ($gekey != $lekey) {
my $geval = $self->{percentile}->get_val($gekey);
$percentile += ($percent - $lekey) / ($gekey - $lekey) * ($geval - $leval);
}
}
return $percentile;
}
sub median {
my $self = shift;
##Cached?
return $self->{median} if defined $self->{median};
return $self->{median} = $self->percentile(50);
}
sub mode {
my $self = shift;
return $self->{mode};
}
sub cdf {
my $self = shift;
$self->_do_cdf() unless $self->{did_cdf};
if (@_ == 1) { ##Inquire
my $value = shift;
return 0 if ($self->{data}->minimum() > $value);
my @keys = $self->{data}->range_keys(undef, $value);
my $key = $keys[-1]; ## GET THE LARGEST OBSERVED VALUE l.e. $value
return ${ $self->{data}->get_val($key) }{'cdf'};
}
else {
cluck "WARNING: Exactly 1 argument expected.";
return undef;
}
}
sub survival {
my $self = shift;
$self->_do_cdf() unless $self->{did_cdf};
if (@_ == 1) { ##Inquire
my $value = shift;
return 1 if ($self->{data}->minimum() > $value);
my @keys = $self->{data}->range_keys(undef, $value);
my $key = $keys[-1]; ## GET THE LARGEST OBSERVED VALUE l.e. $value
return 1 - (${ $self->{data}->get_val($key) }{'cdf'});
}
else {
cluck "WARNING: Only 1 argument expected.";
return undef;
}
}
sub rtp {
my $self = shift;
$self->_do_cdf() unless $self->{did_cdf};
if (@_ == 1) { ##Inquire
my $value = shift;
return 0 if ($self->{data}->maximum() < $value);
my @keys = $self->{data}->range_keys($value, undef);
my $key = $keys[0]; ## GET THE SMALLEST OBSERVED VALUE g.e. $value
return ${ $self->{data}->get_val($key) }{'rt_tail_prob'};
}
else {
cluck "WARNING: Only 1 argument expected.";
return undef;
}
}
sub get_data {
my $self = shift;
$self->_do_cdf() unless $self->{did_cdf};
my ($uniqkeys, $sumweights, $keys, $weights, $counts, $cdfs, $rtps, $percentiles, $order) = ([],[],[],[],[],[],[],[],[]);
my $key = $self->{'data'}->minimum();
while ($key){
my $value = $self->{data}->get_val($key);
push @$uniqkeys, $key;
push @$sumweights, $$value{'weight'};
foreach my $weight (@{ $$value{'weights'} } ) {
push @$keys, $key;
push @$weights, $weight;
}
push @$order, @{ $$value{'order'}};
push @$counts, $$value{'count'};
push @$cdfs, $$value{'cdf'};
push @$rtps, $$value{'rt_tail_prob'};
push @$percentiles, $$value{'percentile'};
$key = $self->{'data'}->successor($key);
}
return {'uniqvars' => $uniqkeys, 'sumweights' => $sumweights, 'counts' => $counts, 'cdfs' => $cdfs, 'rtps' => $rtps, 'vars' => $keys, 'weights' => $weights, 'percentiles' => $percentiles, 'order' => $order};
}
sub print {
my $self = shift;
print Data::Dumper->Dump([$self->get_data()]);
}
## OVERRIDES FOR UNSUPPORTED FUNCTIONS
sub sort_data{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub presorted{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub harmonic_mean{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub geometric_mean{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub trimmed_mean{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub frequency_distribution{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
sub least_squares_fit{
confess "ERROR: Statistics::Descriptive::Weighted does not support this function.";
}
1;
package Statistics::Descriptive;
##All modules return true.
1;
__END__
=head1 NAME
Statistics::Descriptive::Weighted - Module of basic descriptive
statistical functions for weighted variates.
=head1 SYNOPSIS
use Statistics::Descriptive::Weighted;
$stat = Statistics::Descriptive::Weighted::Full->new();
$stat->add_data([1,2,3,4],[0.1,1,10,100]); ## weights are in second argument
$mean = $stat->mean(); ## weighted mean
$var = $stat->variance(); ## weighted sample variance (unbiased estimator)
$var = $stat->biased_variance(); ## weighted sample variance (biased)
$stat->add_data([3],[10]); ## statistics are updated as variates are added
$vwt = $stat->weight(3); ## returns 20, the weight of 3
$wt = $stat->weight(); ## returns sum of weights, 121.1
$ct = $stat->count(3); ## returns 2, the number of times 3 was observed
$ct = $stat->count(); ## returns 5, the total number of observations
$med = $stat->median(); ## weighted sample median
$mode = $stat->mode(); ## returns 4, value with the most weight
$ptl = $stat->quantile(.01); ## returns 3, smallest value with cdf >= 1st %ile
$ptl = $stat->percentile(1); ## returns about 2.06, obtained by interpolation
$cdf = $stat->cdf(3); ## returns ECDF of 3 (about 17.4%)
$cdf = $stat->cdf(3.5); ## returns ECDF of 3.5 (about 17.4%, same as ECDF of 3)
$sf = $stat->survival(3); ## returns complement of ECDF(3) (about 82.6%)
$pval = $stat->rtp(4); ## returns right tail probability of 4 (100 / 121.1, about 82.6%)
$min = $stat->min(); ## returns 1, the minimum
$max = $stat->max(); ## returns 4, the maximum
$unweighted = Statistics::Descriptive::Full->new();
$weighted = Statistics::Descriptive::Weighted::Full->new();
$unweighted->add_data(1,1,1,1,7,7,7,7);
$weighted->add_data([1,7],[4,4]);
$ct = $unweighted->count(); ## returns 8
$ct = $weighted->count(); ## returns 2
print "false, variances unequal!\n" unless
( abs($unweighted->variance() - $weighted->variance()) < 1e-12 );
## the above statement will print, the variances are truly unequal
## the unweighted variance is corrected in terms of sample-size,
## while the weighted variance is corrected in terms of the sum of
## squared weights
$data = $weighted->get_data();
## the above statement returns a hashref with keys:
## 'vars','weights','uniqvars','counts','sumweights','cdfs','rtps','percentiles','order'
$weighted->print();
## prints the hashref above with Data::Dumper
=head1 DESCRIPTION
This module partially extends the module Statistics::Descriptive to handle
weighted variates. Like that module, this module has an object-oriented
design and supports two different types of data storage and calculation
objects: sparse and full. With the sparse object representation, none of
the data is stored and only a few statistical measures are available. Using
the full object representation, complete information about the dataset
(including order of observation) is retained and additional functions are
available.
This module represents numbers in the same way Perl does on your
architecture, relying on Perl's own warnings and assertions regarding
underflow and overflow errors, division by zero, etc. The constant
C<$Statistics::Descriptive::Tolerance> is not used. Caveat programmor.
Variance calculations, however, are designed to avoid numerical
problems. "Online" (running sums) approaches are used to avoid
catastrophic cancellation and other problems. New in versions 0.4 and
up, I have corrected the definition of the "variance" and
"standard_deviation" functions to standard definitions. This module
now models the same calculation as eg the "corpcor" package in R for
weighted sample variance. Following convention from
Statistics::Descriptive, "variance" and "standard_deviation" return
B<unbiased> "sample" estimators. Also new in v0.4, I now provide
"biased_variance" and "biased_standard_deviation" functions to return
the biased estimators. Please see below for full definitions.
Like in Statistics::Descriptive any of the methods (both Sparse and
Full) cache values so that subsequent calls with the same arguments
are faster.
Be warned that this is B<not> a drop-in replacement for
Statistics::Descriptive. The interfaces are different for adding data,
and also for retrieving data with get_data. Certain functions from
Statistics::Descriptive have been dropped, specifically:
=over
=item Statistics::Descriptive::Sparse::mindex()
=item Statistics::Descriptive::Sparse::maxdex()
=item Statistics::Descriptive::Full::sort_data()
=item Statistics::Descriptive::Full::presorted()
=item Statistics::Descriptive::Full::harmonic_mean()
=item Statistics::Descriptive::Full::geometric_mean()
=item Statistics::Descriptive::Full::trimmed_mean()
=item Statistics::Descriptive::Full::frequency_distribution()
=item Statistics::Descriptive::Full::least_squares_fit()
=back
Calling these functions on Statistics::Descriptive::Weighted objects
will cause programs to die with a stack backtrace.
With this module you can recover the data sorted from get_data(). Data
is sorted automatically on insertion.
The main extension and focus of this module was to implement a cumulative
distribution function and a right-tail probability function with efficient
search performance, even if the data added is already sorted. This is
achieved using a partially randomized self-balancing tree to store data.
The implementation uses Tree::Treap v. 0.02 written by Andrew Johnson.
=head1 METHODS
=head2 Sparse Methods
=over
=item $stat = Statistics::Descriptive::Weighted::Sparse->new();
Create a new sparse statistics object.
=item $stat->add_data([1,2,3],[11,9,2]);
Adds data to the statistics object. The cached statistical values are
updated automatically.
This function expects one or two array references: the first points to
variates and the second to their corresponding weights. The referenced
arrays must be of equal lengths. The weights are expected to all be
positive. If any weights are not positive, the module will carp
(complain to standard error) and the corresponding variates will be
skipped over.
If the weights array is omitted, all weights for the values added are
assumed to be 1.
Variates may be added in multiple instances to Statistics objects, and
their summaries are calculated "on-line," that is updated.
=item $stat->count();
Returns the number of variates that have been added.
=item $stat->weight();
Returns the sum of the weight of the variates.
=item $stat->sum();
Returns the sum of the variates multiplied by their weights.
=item $stat->mean();
Returns the weighted mean of the data. This is the sum of the weighted
data divided by the sum of weights.
=item $stat->variance();
Returns the unbiased weighted sample variance of the data. An
"on-line" weighted incremental algorithm for variance is based on
D. H. D. West (1979). Communications of the ACM, 22, 9, 532-535:
Updating Mean and Variance Estimates: An Improved Method. However,
instead of dividing by (n-1) as in that paper, the bias correction
used is:
=over
1 / (1 - (sum_i ((w_i)^2) / (sum_i w_i)^2)),
=back
where w_i is the ith weight. This bias correction factor multiplies
the biased estimator of the variance defined below.
=item $stat->standard_deviation();
Returns the square root of the unbiased weighted sample variance of the data.
=item $stat->biased_variance();
Returns the biased weighted sample variance of the data. The same
"on-line" weighted incremental algorithm for variance is used. The
definition of the biased weighted variance estimator is:
=over
sum_i (w_i * (x_i - mean_x)^2) / sum_i (w_i),
=back
where w_i is the weight of the ith variate x_i, and mean_x is the
weighted mean of the variates. To reproduce the variance calculation
of earlier versions of this module, multiple the biased variance by
($stat->count() / ($stat->count() - 1)).
=item $stat->biased_standard_deviation();
Returns the square root of the unbiased weighted sample variance of the data.
=item $stat->min();
Returns the minimum value of the data set.
=item $stat->max();
Returns the maximum value of the data set.
=item $stat->sample_range();
Returns the sample range (max - min) of the data set.
=back
=head2 Full Methods
Similar to the Sparse Methods above, any Full Method that is called caches
the current result so that it doesn't have to be recalculated.
=over
=item $stat = Statistics::Descriptive::Weighted::Full->new();
Create a new statistics object that inherits from
Statistics::Descriptive::Sparse so that it contains all the methods
described above.
=item $stat->add_data([1,2,4,5],[2,2,2,5]);
Adds weighted data to the statistics object. All of the sparse
statistical values are updated and cached. Cached values from Full
methods are deleted since they are no longer valid.
I<Note: Calling add_data with an empty array will delete all of your
Full method cached values! Cached values for the sparse methods are
not changed>
=item $stat->mode();
Returns the data value with the most weight. In the case that a data
value is observed multiple times, their successive weights are summed
of course.
=item $stat->maxweight();
The weight of the mode.
=item $stat->count(10);
The number of observations of a particular data value.
=item $stat->weight(10);
The total weight of a particular data value.
=item $x = $stat->cdf(4);
Returns the weighted empirical cumulative distribution function (ECDF).
=over
=item
For example, given the 6 measurements:
-2, 7, 7, 4, 18, -5
with weights:
2, 1, 1, 2, 2, 2
Let F(x) be the ECDF of x, which is defined as the sum of all
normalized weights of all observed variates less than or equal to x.
Then F(-8) = 0, F(-5.0001) = 0, F(-5) = 1/5, F(-4.999) = 1/5, F(7) =
4/5, F(18) = 1, F(239) = 1.
Note that we can recover the different measured values and how many
times each occurred from F(x) -- no information regarding the range
in values is lost. Summarizing measurements using histograms, on the
other hand, in general loses information about the different values
observed, so the EDF is preferred.
Using either the EDF or a histogram, however, we do lose information
regarding the order in which the values were observed. Whether this
loss is potentially significant will depend on the metric being
measured.
=back
(Modified from: pod from Statistics::Descriptive, itself taken from
I<RFC2330 - Framework for IP Performance Metrics>, Section 11.3.
Defining Statistical Distributions. RFC2330 is available from:
http://www.cis.ohio-state.edu/htbin/rfc/rfc2330.html.)
=item $x = $stat->survival(8);
Complement of the weighted cdf function, also known as the weighted
survival function. The weighted survival function S(x) is the sum of
all normalized weights of all observed variates greater than x.
=over
=item
For example, given the 6 measurements:
-2, 7, 7, 4, 18, -5
with weights:
2, 1, 1, 2, 2, 2
Then S(-8) = 1, S(-5.0001) = 1, S(-5) = 4/5, S(-4.999) = 4/5, S(7) =
1/5, S(18) = 0, S(239) = 0.
=back
=item $x = $stat->rtp(8);
The weighted right tail probability function. The weighted right tail
probability function P(x) is the sum of all normalized weights of all
observed variates greater than or equal to x. This may be useful for
Monte Carlo estimation of P-values.
=over 4
=item
For example, given the 6 measurements:
-2, 7, 7, 4, 18, -5
with weights:
2, 1, 1, 2, 2, 2
Then P(-8) = 1, P(-5.0001) = 1, P(-5) = 1, P(-4.999) = 4/5, P(7) =
2/5, P(18) = 1/5, P(239) = 0.
=back
=item $x = $stat->quantile(0.25);
Returns the weighted quantile. This is the inverse of the weighted
ECDF function. It is only defined for arguments between 0 and 1
inclusively. If F(x) is the ECDF, then the weighted quantile function
G(y) returns the smallest variate x whose weighted ECDF F(x) is
greater than or equal to y.
=over
=item
For example, given the 6 measurements:
-2, 7, 7, 4, 18, -5
with weights:
2, 1, 1, 2, 2, 2
Then G(0) = -5, G(0.1) = -5, G(0.2) = -5, G(0.25) = -2, G(0.4) = -2,
G(0.8) = 7, G(1) = 18.
=back
=item $x = $stat->percentile(25);
Returns the weighted percentile. It is only defined for arguments
between 0 and 100 inclusively. Unlike the quantile function above, the
percentile function performs weighted linear interpolation between
variates unless the argument exactly equals the computed percentile of
one of the variates.
=over
=item
Define p_n to be the percentile of the nth sorted variate, written
v_n, like so:
p_n = 100/S_N * (S_n - (w_n / 2)),
where S_N is the sum of all weights, S_n is the partial sum of weights
up to and including the nth variate, and w_n is the weight of the nth
variate.
Given a percent value 0 <= y <= 100, find an integer k such that:
p_k <= y <= p_(k+1).
The percentile function P(y) may now be defined:
P(y) = v_k + {[(y - p_k) / (p_(k+1) - p_k)] * (v_(k+1) - v_k)}
=back
This definition of weighted percentile was taken from:
http://en.wikipedia.org/wiki/Percentile on Dec 15, 2008.
=item $stat->median();
This is calculated as $stat->percentile(50) and cached as necessary.
=item $stat->get_data();
Returns a data structure that reconstitutes the original data added to
the object, supplemented by some of the distributional
summaries. Returns a reference to a hash, with the following keys,
each pointing to a reference to an array containing the indicated data.
=over
=item vars
The observed variates, sorted.
=item weights
The weights of the variates (in corresponding order to the value of
'vars').
=item order
The order of addition of the variates (in corresponding order to the value of
'vars').
=item uniqvars
The uniquely observed variates, sorted.
=item counts
The numbers of times each variate was observed (in corresponding order
to the value of 'uniqvars').
=item sumweights
The total weight of each unique variate (in corresponding order
to the value of 'uniqvars').
=item cdfs
The cdf of each unique variate (in corresponding order to the value of
'uniqvars').
=item rtps
The rt tail probabilities of each unique variate (in corresponding
order to the value of 'uniqvars').
=item percentiles
The percentiles of each unique variate (see "percentile" above for
definition, given in corresponding order to the value of 'uniqvars').
=back
=item $stat->print();
Prints a Data::Dumper dump of the hashref returned by get_data().
=back
=head1 REPORTING ERRORS
When reporting errors, please include the following to help me out:
=over
=item *
Your version of perl. This can be obtained by typing perl C<-v> at
the command line.
=item *
Which versions of Statistics::Descriptive and
Statistics::Descriptive::Weighted you're using.
=item *
Details about what the error is. Try to narrow down the scope
of the problem and send me code that I can run to verify and
track it down.
=back
=head1 NOTES
I use a running sum approach for the bias correction factor. We may
write this factor as (1 / (1 - H)),
where
=over
H is 1 / (1 - (sum_i ((w_i)^2) / (sum_i w_i)^2)).
=back
The calculation I use for calculation of the (n+1)th value of H, on
encountering the (n+1)th variate is:
=over
H_(n+1) = (sum_i^n w_i)^2 * H_n + w_(n+1)^2) / (sum_i^(n+1) w_i)^2
=back
together with initial value:
=over
H_0 = 0.
=back
=head1 AUTHOR
David H. Ardell
dhard@cpan.org (or just ask Google).
=head1 THANKS
Florent Angly
who contributed bug fixes, added features and tests, and improved
installation statistics (Oct 2009).
=head1 REFERENCES
=over
=item *
RFC2330, Framework for IP Performance Metrics
=item *
L<http://en.wikipedia.org/wiki/Percentile>
=item *
L<http://en.wikipedia.org/wiki/Weighted_mean>
=item *
L<http://en.wikipedia.org/wiki/Weighted_variance>
=item *
D. H. D. West (1979). Communications of the ACM, 22, 9, 532-535:
Updating Mean and Variance Estimates: An Improved Method.
=item *
L<http://en.wikipedia.org/wiki/Treap>
=item *
Tree::Treap Copyright 2002-2005 Andrew Johnson. L<http://stuff.siaris.net>
=back
=head1 COPYRIGHT
Copyright (c) 2008,2009 David H. Ardell.
Copyright (c) 2009 Florent Angly.
This program is free software; you may redistribute it and/or modify it
under the same terms as Perl itself.
Portions of this code are from Statistics::Descriptive which is under
the following copyrights.
Copyright (c) 1997,1998 Colin Kuskie. All rights
reserved. This
program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
Copyright (c) 1998 Andrea Spinelli. All rights
reserved. This program
is free software; you can redistribute it and/or modify it under the
same terms as Perl itself.
Copyright (c) 1994,1995 Jason Kastner. All rights
reserved. This
program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself, either Perl version 5.8.8 or,
at your option, any later version of Perl 5 you may have available.
=head1 REVISION HISTORY
=over
=item v.0.5
October 2009. Fixed installation/test errors. Weights array made optional.
=item v.0.4
January 2009. Redefinition of variance and standard_deviation to
standard definitions; introduction of biased_variance,
biased_standard_deviation functions
=item v.0.2-v.0.3
December 2008. Corrections made to installation package.
=item v.0.1
December 2008. Initial release under perl licensing.
=back
=cut
| lskatz/lskScripts | lib/Statistics/Descriptive/Weighted.pm | Perl | mit | 32,345 |
use strict;
use Data::Dumper;
use Bio::KBase::Utilities::ScriptThing;
use Carp;
#
# This is a SAS Component
#
=head1 NAME
get_entity_Contig
=head1 SYNOPSIS
get_entity_Contig [-c N] [-a] [--fields field-list] < ids > table.with.fields.added
=head1 DESCRIPTION
A contig is thought of as composing a part of the DNA
associated with a specific genome. It is represented as an ID
(including the genome ID) and a ContigSequence. We do not think
of strings of DNA from, say, a metgenomic sample as "contigs",
since there is no associated genome (these would be considered
ContigSequences). This use of the term "ContigSequence", rather
than just "DNA sequence", may turn out to be a bad idea. For now,
you should just realize that a Contig has an associated
genome, but a ContigSequence does not.
Example:
get_entity_Contig -a < ids > table.with.fields.added
would read in a file of ids and add a column for each field in the entity.
The standard input should be a tab-separated table (i.e., each line
is a tab-separated set of fields). Normally, the last field in each
line would contain the id. If some other column contains the id,
use
-c N
where N is the column (from 1) that contains the id.
This is a pipe command. The input is taken from the standard input, and the
output is to the standard output.
=head2 Related entities
The Contig entity has the following relationship links:
=over 4
=item HasAsSequence ContigSequence
=item HasVariationIn ObservationalUnit
=item IsComponentOf Genome
=item IsImpactedBy Trait
=item IsLocusFor Feature
=item IsSummarizedBy AlleleFrequency
=back
=head1 COMMAND-LINE OPTIONS
Usage: get_entity_Contig [arguments] < ids > table.with.fields.added
-a Return all available fields.
-c num Select the identifier from column num.
-i filename Use filename rather than stdin for input.
--fields list Choose a set of fields to return. List is a comma-separated list of strings.
-a Return all available fields.
--show-fields List the available fields.
The following fields are available:
=over 4
=item source_id
ID of this contig from the core (source) database
=back
=head1 AUTHORS
L<The SEED Project|http://www.theseed.org>
=cut
our $usage = <<'END';
Usage: get_entity_Contig [arguments] < ids > table.with.fields.added
-c num Select the identifier from column num
-i filename Use filename rather than stdin for input
--fields list Choose a set of fields to return. List is a comma-separated list of strings.
-a Return all available fields.
--show-fields List the available fields.
The following fields are available:
source_id
ID of this contig from the core (source) database
END
use Bio::KBase::CDMI::CDMIClient;
use Getopt::Long;
#Default fields
my @all_fields = ( 'source_id' );
my %all_fields = map { $_ => 1 } @all_fields;
my $column;
my $a;
my $f;
my $i = "-";
my @fields;
my $help;
my $show_fields;
my $geO = Bio::KBase::CDMI::CDMIClient->new_get_entity_for_script('c=i' => \$column,
"all-fields|a" => \$a,
"help|h" => \$help,
"show-fields" => \$show_fields,
"fields=s" => \$f,
'i=s' => \$i);
if ($help)
{
print $usage;
exit 0;
}
if ($show_fields)
{
print STDERR "Available fields:\n";
print STDERR "\t$_\n" foreach @all_fields;
exit 0;
}
if ($a && $f)
{
print STDERR "Only one of the -a and --fields options may be specified\n";
exit 1;
}
if ($a)
{
@fields = @all_fields;
}
elsif ($f) {
my @err;
for my $field (split(",", $f))
{
if (!$all_fields{$field})
{
push(@err, $field);
}
else
{
push(@fields, $field);
}
}
if (@err)
{
print STDERR "get_entity_Contig: unknown fields @err. Valid fields are: @all_fields\n";
exit 1;
}
} else {
print STDERR $usage;
exit 1;
}
my $ih;
if ($i eq '-')
{
$ih = \*STDIN;
}
else
{
open($ih, "<", $i) or die "Cannot open input file $i: $!\n";
}
while (my @tuples = Bio::KBase::Utilities::ScriptThing::GetBatch($ih, undef, $column)) {
my @h = map { $_->[0] } @tuples;
my $h = $geO->get_entity_Contig(\@h, \@fields);
for my $tuple (@tuples) {
my @values;
my ($id, $line) = @$tuple;
my $v = $h->{$id};
if (! defined($v))
{
#nothing found for this id
print STDERR $line,"\n";
} else {
foreach $_ (@fields) {
my $val = $v->{$_};
push (@values, ref($val) eq 'ARRAY' ? join(",", @$val) : $val);
}
my $tail = join("\t", @values);
print "$line\t$tail\n";
}
}
}
__DATA__
| kbase/kb_seed | scripts/get_entity_Contig.pl | Perl | mit | 4,645 |
% nal_reader.pl
% Read Non_Axiomatic Logic from Prolog
:-module(nal_reader,[
nal_test/0,
nal_test/1,
nal_read_clause/2,
% nal_test/2,
% nal_call/2,
nal_call/3
]).
:- set_module(class(library)).
:- set_module(base(system)).
:- use_module(library(logicmoo_common)).
:- use_module(library(logicmoo/dcg_meta)).
:- use_module(library(narsese)).
/*
task ::= [budget] sentence (* task to be processed *)
sentence ::= statement"." [tense] [truth] (* judgement to be absorbed into beliefs *)
| statement"?" [tense] [truth] (* question on thuth-value to be answered *)
| statement"!" [desire] (* goal to be realized by operations *)
| statement"@" [desire] (* question on desire-value to be answered *)
statement ::= <"<">term copula term<">"> (* two terms related to each other *)
| <"(">term copula term<")"> (* two terms related to each other, new notation *)
| term (* a term can name a statement *)
| "(^"word {","term} ")" (* an operation to be executed *)
| word"("term {","term} ")" (* an operation to be executed, new notation *)
copula ::= "-->" (* inheritance *)
| "<->" (* similarity *)
| "{--" (* instance *)
| "--]" (* property *)
| "{-]" (* instance-property *)
| "==>" (* implication *)
| "=/>" (* predictive implication *)
| "=|>" (* concurrent implication *)
| "=\\>" (* =\> retrospective implication *)
| "<=>" (* equivalence *)
| "</>" (* predictive equivalence *)
| "<|>" (* concurrent equivalence *)
term ::= word (* an atomic constant term *)
| variable (* an atomic variable term *)
| compound-term (* a term with internal structure *)
| statement (* a statement can serve as a term *)
compound-term ::= op-ext-set term {"," term} "}" (* extensional set *)
| op-int-set term {"," term} "]" (* intensional set *)
| "("op-multi"," term {"," term} ")" (* with prefix operator *)
| "("op-single"," term "," term ")" (* with prefix operator *)
| "(" term {op-multi term} ")" (* with infix operator *)
| "(" term op-single term ")" (* with infix operator *)
| "(" term {","term} ")" (* product, new notation *)
| "(" op-ext-image "," term {"," term} ")"(* special case, extensional image *)
| "(" op-int-image "," term {"," term} ")"(* special case, \ intensional image *)
| "(" op-negation "," term ")" (* negation *)
| op-negation term (* negation, new notation *)
op-int-set::= "[" (* intensional set *)
op-ext-set::= "{" (* extensional set *)
op-negation::= "--" (* negation *)
op-int-image::= "\\" (* \ intensional image *)
op-ext-image::= "/" (* extensional image *)
op-multi ::= "&&" (* conjunction *)
| "*" (* product *)
| "||" (* disjunction *)
| "&|" (* parallel events *)
| "&/" (* sequential events *)
| "|" (* intensional intersection *)
| "&" (* extensional intersection *)
op-single ::= "-" (* extensional difference *)
| "~" (* intensional difference *)
variable ::= "$"word (* independent variable *)
| "#"word (* dependent variable *)
| "?"word (* query variable in question *)
tense ::= ":/:" (* future event *)
| ":|:" (* present event *)
| ":\\:" (* :\: past event *)
desire ::= truth (* same format, different interpretations *)
truth ::= <"%">frequency[<";">confidence]<"%"> (* two numbers in [0,1]x(0,1) *)
budget ::= <"$">priority[<";">durability][<";">quality]<"$"> (* three numbers in [0,1]x(0,1)x[0,1] *)
word : #"[^\ ]+" (* unicode string *)
priority : #"([0]?\.[0-9]+|1\.[0]*|1|0)" (* 0 <= x <= 1 *)
durability : #"[0]?\.[0]*[1-9]{1}[0-9]*" (* 0 < x < 1 *)
quality : #"([0]?\.[0-9]+|1\.[0]*|1|0)" (* 0 <= x <= 1 *)
frequency : #"([0]?\.[0-9]+|1\.[0]*|1|0)" (* 0 <= x <= 1 *)
confidence : #"[0]?\.[0]*[1-9]{1}[0-9]*" (* 0 < x < 1 *)
*/
nal_task(S)--> cwhite,!,nal_task(S),!.
nal_task(task(X,S,T,O,B)) --> nal_task(X,S,T,O,B),!.
nal_task(X,S,T,O,B) --> optional(B, nal_budget),!, nal_sentence(X,S,T,O). % task to be processed
nal_sentence(X,S,T,O)--> nal_statement(S), nal_post_statement(X,T,O).
nal_post_statement(X,T,O)-->
/*nal_statement(S),*/ nal_o(`.` ,X, judgement)-> optional(T,nal_tense)-> optional(O,nal_truth),! % judgement to be absorbed into beliefs
; /*nal_statement(S),*/ nal_o(`?` ,X, question_truth)-> optional(T,nal_tense)-> optional(O,nal_truth),! % question on truth_value to be answered
; /*nal_statement(S),*/ nal_o(`!` ,X, goal), optional(O,nal_desire) % goal to be realized by operations
; /*nal_statement(S),*/ nal_o(`@` ,X, question_desire), optional(O,nal_desire) % question on desire_value to be answered
.
nal_statement(S)--> mw(nal_statement_0(S)),!.
nal_statement_0(S)-->
mw(`<`) ,!, nal_term(A), nal_copula(R), nal_term(B), mw(`>`) , {S=..[R,A,B]} % two, terms related to each other
; nal_l_paren, `^` , nal_term_list_comma(L), nal_paren_r, {S= exec(L)} % an operation to be executed
; nal_l_paren, nal_term(A), nal_copula(R), nal_term(B), nal_paren_r, {S=..[R,A,B]} % two, terms related to each other, new notation
; nal_word(A), nal_l_paren, nal_term_list_comma(L), nal_paren_r, {S= exec([A|L])} % an operation to be executed, new notation
; nal_term_1(X), {S= nal_named_statement(X)} % a, term, can name a statement(S)
.
nal_copula(X) -->
nal_o(`-->` ,X, inheritance )
; nal_o(`<->` ,X, similarity )
; nal_o(`{--` ,X, instance )
; nal_o(`--]` ,X, property )
; nal_o(`{-]` ,X, inst_prop )
; nal_o(`==>` ,X, implication )
; nal_o(`=/>` ,X, predictive_impl )
; nal_o(`=|>` ,X, concurrent_impl )
; nal_o(`=\\>` ,X, retrospective_impl )
; nal_o(`<=>` ,X, equiv )
; nal_o(`</>` ,X, predictive_equiv )
; nal_o(`<|>` ,X, concurrent_equiv )
; nal_o(`=>` ,X, unknown_impl )
.
nal_term(N) --> nal_term_old(O), {old_to_new(O,N)}.
nal_term_old(S)
--> nal_word(S) % an atomic constant, term,
; nal_variable(S) % an atomic variable, term,
; nal_compound_term(S) % a, term, with internal structure
; nal_statement(S) % a statement can serve as a, term,
.
nal_term_0(N) --> nal_term_0_old(O), {old_to_new(O,N)}.
nal_term_0_old(S)
--> nal_word_0(S) % an atomic constant, term,
; nal_variable_0(S) % an atomic variable, term,
; nal_compound_term_0(S) % a, term, with internal structure
; nal_statement_0(S) % a statement can serve as a, term,
.
nal_term_1(N) --> nal_term_1_old(O), {old_to_new(O,N)}.
nal_term_1_old(S)
--> nal_word(S) % an atomic constant, term,
; nal_variable(S) % an atomic variable, term,
; nal_compound_term(S) % a, term, with internal structure
.
old_to_new(rel([R, var(arg, L) | B]), ext_image(New)):- length(Left,L), append(Left,Right,[R|B]), append(Left,['_'|Right],New).
old_to_new(rel([R, var(int, L) | B]), int_image(New)):- length(Left,L), append(Left,Right,[R|B]), append(Left,['_'|Right],New).
old_to_new(X,X).
nal_compound_term(X)--> mw(nal_compound_term_0(X)).
nal_compound_term_0('exec'([S]))--> `^`,!,nal_term_1(S).
nal_compound_term_0(S)--> \+ dcg_peek(`<`),!,
( nal_o(nal_op_ext_set,X,ext_set), nal_term_list_comma(L), `}` % extensional set
; nal_o(nal_op_int_set,X,int_set), nal_term_list_comma(L), `]` % intensional set
; nal_word_0(A), `[`, nal_term_list_comma(L), `]`, {S= v(A,L)} % @TODO notation
; nal_o(nal_op_negation,X,negation), nal_term(AB),{L=[AB]} % negation, new notation
; nal_l_paren, nal_paren_compound_term(X,L), nal_paren_r
), {S=..[X,L]}.
nal_paren_compound_term(X,L) -->
nal_op_multi(X), nal_comma, nal_term_list_comma(L) % with prefix operator
; nal_op_single(X), nal_comma, nal_term(A), nal_comma, nal_term(B), {L=[A,B]} % with prefix operator
; nal_o(nal_op_ext_image,X,ext_image), nal_comma, nal_term_list_comma(L) % special case, extensional image
; nal_o(nal_op_int_image,X,int_image), nal_comma, nal_term_list_comma(L) % special case, \ intensional image
; nal_o(nal_op_negation,X,negation), nal_comma, nal_term(AB),{L=[AB]} % negation
; nal_term(A), nal_op_multi(X), nal_term(B),{L=[A,B]} % with infix operator
; nal_term(A), nal_op_single(X), nal_term(B),{L=[A,B]} % with infix operator
; nal_preserve_whitespace((nal_term_0(A), cspace, {X=rel}, nal_term_list_white(SL, ` `))),{L=[A|SL]}
; {X=product}, nal_term_list_comma(L) % product, new notation
.
nal_op_int_set-->`[`. % intensional set
nal_op_ext_set-->`{`. % extensional set
nal_op_negation-->`--`. % negation
nal_op_int_image-->`\\`. % \ intensional image
nal_op_ext_image-->`/`. % / extensional image
nal_preserve_whitespace(DCG,S,E) :- locally(b_setval(whitespace,preserve),phrase(DCG,S,E)).
% nal_no_preserve_whitespace(DCG,S,E) :- phrase(DCG,S,E).
nal_op_multi(X)-->
nal_o(`&&` ,X, and) % conjunction
; nal_o(`*` ,X, product) % product
; nal_o(`||` ,X, or) % disjunction
; nal_o(`#` ,X, sequence_spatial) % patham9 "sequence", wasn't really useful for NLP, it was called PART
; nal_o(`&|` ,X, parallel_evnts) % parallel events
; nal_o(`&/` ,X, sequence_evnts) % sequential events
; nal_o(`|` ,X, int_intersection) % intensional intersection
; nal_o(`&` ,X, ext_intersection) % extensional intersection
.
nal_op_single(X) -->
nal_o(`-`, X, ext_difference) % extensional difference
; nal_o(`~`, X, int_difference) % intensional difference
.
nal_variable(V)--> mw(nal_variable_0(V)).
nal_variable_0(var(X,W))
-->nal_o(`$`, X, ind), nal_word_0(W) % independent variable
;nal_o(`#`, X, dep), nal_word_0(W) % dependent variable
;nal_o(`?`, X, query), nal_word_0(W) % query variable in question
;nal_o(`/`, X, arg), nal_word_0(W) % query variable in params
;nal_o(`\\`,X, int), nal_word_0(W) % query variable in ....
.
nal_variable_0(('_')) --> `_`.
nal_variable_0(('#')) --> `#`.
nal_variable_0(('$')) --> `$`.
nal_tense(X) -->
nal_o(`:/:`, X, future) % future event
; nal_o(`:|:`, X, present) % present event
; nal_o(`:\\:`, X, past) % :\: past event
.
nal_tense('t!'(X)) --> `:!`, number(X), `:`.
nal_tense('t'(X)) --> `:`, nal_term_1(X), `:`.
% Desire is same format of Truth, but different interpretations
nal_desire(D)-->nal_truth(D).
% Truth is two numbers in [0,1]x(0,1)
nal_truth([F,C])--> `%`, !, nal_frequency(F), optional((`;`, nal_confidence(C))), `%`.
nal_truth([F,C])--> `{`, !, nal_frequency(F), nal_confidence(C), `}`.
% Budget is three numbers in optional(O,0,1]x(0,1)x[0,1]
nal_budget(nal_budget_pdq(P,D,Q))--> `$`,!, nal_priority(P), optional(( `;`, nal_durability(D))), optional((`;`, nal_quality(Q))), `$`.
nal_word(E) --> mw(nal_word_0(E)).
nal_word_0('+'(E)) --> `+`,dcg_basics:integer(E),!.
nal_word_0(E) --> dcg_basics:number(E),!.
nal_word_0(E) --> quoted_string(E),!.
nal_word_0(E) --> dcg_peek([C]),{char_type(C,alpha)},!, nal_rsymbol([],E),!.
nal_priority(F) --> nal_float_inclusive(0,1,F). % 0 <= x <= 1
nal_durability(F) --> nal_float_exclusive(0,1,F). % 0 < x < 1
nal_quality(F) --> nal_float_inclusive(0,1,F). % 0 <= x <= 1
nal_frequency(F) --> nal_float_inclusive(0,1,F). % 0 <= x <= 1
nal_confidence(F) --> nal_float_exclusive(0,1,F). % 0 < x < 1
nal_o(S,X,X) --> owhite,S,owhite.
nal_o(X,X) --> nal_o(X,X,X).
nal_float_inclusive(L,H,F)--> mw((dcg_basics:number(F) -> {nal_warn_if_strict((L=< F,F=< H))})).
nal_float_exclusive(L,H,F)--> mw((dcg_basics:number(F) -> {nal_warn_if_strict((L < F,F < H))})).
nal_warn_if_strict(G):- call(G),!.
nal_warn_if_strict(G):- dmsg(nal_warn_if_strict(G)),!.
:- set_dcg_meta_reader_options(file_comment_reader, nal_comment_expr).
nal_comment_expr(X) --> cspace,!,nal_comment_expr(X).
nal_comment_expr('$COMMENT'(Expr,I,CP)) --> nal_comment_expr_3(Expr,I,CP),!.
nal_comment_expr_3(T,N,CharPOS) --> `/*`, !, my_lazy_list_location(file(_,_,N,CharPOS)),!, zalwayz(read_string_until_no_esc(S,`*/`)),!,
{text_to_string_safe(S,T)},!.
nal_comment_expr_3(T,N,CharPOS) --> {nal_cmt_until_eoln(Text)},Text,!, my_lazy_list_location(file(_,_,N,CharPOS)),!,zalwayz(read_string_until_no_esc(S,eoln)),!,
{text_to_string_safe(S,T)},!.
nal_cmt_until_eoln(`//`).
nal_cmt_until_eoln(`'`).
nal_cmt_until_eoln(`**`).
nal_comma --> mw(`,`).
nal_l_paren --> mw(`(`).
nal_paren_r --> mw(`)`).
nal_term_list_white([H|T], Sep) --> nal_term_0(H), ( (Sep,owhite) -> nal_term_list_white(T, Sep) ; ({T=[]},owhite)).
nal_term_list_comma([H|T]) --> nal_term(H), ( nal_comma -> nal_term_list_comma(T) ; {T=[]} ).
nal_rsymbol(Chars,E) --> [C], {notrace(nal_sym_char(C))},!, nal_sym_continue(S), {append(Chars,[C|S],AChars),string_to_atom(AChars,E)},!.
nal_sym_continue([]) --> nal_peek_symbol_breaker,!.
nal_sym_continue([H|T]) --> [H], {nal_sym_char(H)},!, nal_sym_continue(T).
nal_sym_continue([]) --> [].
nal_peek_symbol_breaker --> dcg_peek(`--`).
nal_peek_symbol_breaker --> dcg_peek(`-`),!,{fail}.
nal_peek_symbol_breaker --> dcg_peek(one_blank).
nal_peek_symbol_breaker --> dcg_peek([C]),{\+ nal_sym_char(C)},!.
nal_sym_char(C):- \+ integer(C),!,char_code(C,D),!,nal_sym_char(D).
nal_sym_char(C):- bx(C =< 32),!,fail.
%nal_sym_char(44). % allow comma in middle of symbol
% word is: #"[^\ ]+" % unicode string
nal_sym_char(C):- nal_never_symbol_char(NeverSymbolList),memberchk(C,NeverSymbolList),!,fail. % maybe 44 ? nal_comma
%nal_sym_char(C):- nb_current('$maybe_string',t),memberchk(C,`,.:;!%`),!,fail.
nal_sym_char(_):- !.
nal_never_symbol_char(`";()~'[]<>``{},=\\^`).
nal_rsymbol_cont(Prepend,E) --> nal_sym_continue(S), {append(Prepend,S,AChars),string_to_atom(AChars,E)},!.
nal_is_test_file(X):-filematch('../../nal-tests/**/*',X), \+ nal_non_file(X).
nal_is_test_file(X):-filematch('../../examples/**/*',X), \+ nal_non_file(X).
nal_non_file(X):- downcase_atom(X,DC),X\==DC,!,nal_non_file(DC).
nal_non_file(X):- atom_concat(readme,_,X).
nal_non_file(X):- atom_concat(_,'.pl',X).
nal_test_file:-
make,
catch((
forall(nal_is_test_file(X),((dmsg(file_begin(X)),ignore(nal_test_file(X)),dmsg(file_end(X)))))),
'$aborted',true).
nal_test_file(File):- (\+ atom(File); \+ is_absolute_file_name(File)),
absolute_file_name(File,Absolute), !, nal_test_file(Absolute).
nal_test_file(File):- open(File,read,In),
nal_read_clauses(In, Expr),!,
must_or_rtrace(nal_call(nal_test_file,Expr,OutL)),!,
flatten([OutL],Out),
maplist(wdmsg,Out),!.
% NAL file reader
nal_file(end_of_file) --> file_eof,!.
nal_file(O) --> cspace, !, nal_file(O).
nal_file([]) --> \+ dcg_peek([_]), !.
nal_file(Comment) --> nal_comment_expr(Comment).
nal_file(O) --> nal_file_element(O), !, owhite.
% fallback to english in a file
nal_file(unk_english(Text)) --> read_string_until_no_esc(Str,eoln),
{atom_string(Str,Text)},!. %split_string(Str, "", "\s\t\r\n", Text).
% nal_file(planStepLPG(Name,Expr,Value)) --> owhite,sym_or_num(Name),`:`,owhite, nal(Expr),owhite, `[`,sym_or_num(Value),`]`,owhite. % 0.0003: (PICK-UP ANDY IBM-R30 CS-LOUNGE) [0.1000]
% nal_file(Term,Left,Right):- eoln(EOL),append(LLeft,[46,EOL|Right],Left),read_term_from_codes(LLeft,Term,[double_quotes(string),syntax_errors(fail)]),!.
% nal_file(Term,Left,Right):- append(LLeft,[46|Right],Left), ( \+ member(46,Right)),read_term_from_codes(LLeft,Term,[double_quotes(string),syntax_errors(fail)]),!.
% non-standard
nal_file_element(outputMustContain(O)) --> `''outputMustContain('`, !, trace, read_string_until(Str,`')`),{fmt(Str),phrase(nal_task(O),Str,[])}.
nal_file_element('1Answer'(O) ) --> `' Answer `, read_string_until(Str,(`{`,read_string_until(_,eoln))),{phrase(nal_task(O),Str,[])}.
nal_file_element(N=V ) --> `*`, nal_word(N), mw(`=`), nal_term(V).
nal_file_element(nal_in(H,V3)) --> `IN:`, nal_task(H), optional(nal_three_vals(V3)).
nal_file_element(nal_out(H,V3)) --> `OUT:`, nal_task(H), optional(nal_three_vals(V3)).
% standard
nal_file_element(do_steps(N)) --> dcg_basics:number(N),!.
nal_file_element(H) --> nal_task(H).
nal_file_element(nal_term(H)) --> nal_term(H).
% nal_read_clause("'the detective claims that tim lives in graz",A)
% {1 : 4;3}
nal_three_vals(V3)--> `{`, read_string_until_no_esc(Str,(`}`;eoln)),
{read_term_from_codes(Str,V3,[double_quotes(string),syntax_errors(fail)])},!.
%nal_file_with_comments(O,with_text(O,Txt),S,E):- copy_until_tail(S,Copy),text_to_string_safe(Copy,Txt),!.
:- thread_local(t_l:sreader_options/2).
nal_test:- fmt('\nNAL TEST'), forall(nal_is_test(_,Test),nal_test(Test)).
:- use_module(library(dcg/basics)).
% try_reader_test(Test):- is_stream(Test), !, \+ is_compound(Test), open_string(Test,Stream), try_reader_test(Stream).
nal_test(Test):-
fmt("\n-----------------------------\n"),
fmt(Test),
fmt("-----------------------------\n"),
nal_call('dmsg',Test,Out),dmsg(Out).
nal_zave_varname(N,V):- debug_var(N,V),!.
%nal_zave_varname(N,V):- V = '$VAR'(N).
/*
implode_varnames(Vs):- (var(Vs) ; Vs==[]),!.
implode_varnames([NV|Vs]) :- implode_varnames(Vs),
(var(NV) -> ignore((nal_variable_name(NV,Name),nal_zave_varname(Name,NV)));
ignore((NV=(N=V),nal_zave_varname(N,V)))).
*/
nal_read_clauses( Text, Out):-
findall(Cl,nal_read_clause(Text, Cl), OutL),
flatten([OutL],Out).
nal_read_clause( NonStream, Out):- \+ is_stream(NonStream), !, % wdmsg(NonStream),
must_or_rtrace((open_string(NonStream,Stream), nal_read_clause(Stream, Out))).
nal_read_clause(Stream, Out):-
'$current_typein_module'(M),
M\== input, !,
setup_call_cleanup(
'$set_typein_module'(input),
nal_read_clause(Stream, Out),
'$set_typein_module'(M)).
nal_read_clause(Stream, Out):-
op(601, xfx, input:(/)),
op(601, xfx, input:(\\)),
(at_end_of_stream(Stream)-> Out=[];
(nal_read_term(Stream, Term),
(Term == end_of_file -> Out=[];
(Term = (:- Exec) -> (input:call(Exec), Out=More) ; Out = [Term|More]),
nal_read_clause(Stream, More)))).
nal_read_term(In,Expr):-
notrace(( is_stream(In),
remove_pending_buffer_codes(In,Codes),
read_codes_from_pending_input(In,Text), Text\==[])), !,
call_cleanup(parse_meta_ascii(nal_file, Text,Expr),
append_buffer_codes(In,Codes)).
nal_read_term(Text,Expr):-
notrace(( =( ascii_,In),
remove_pending_buffer_codes(In,Codes))),
call_cleanup(parse_meta_ascii(nal_file, Text,Expr),
append_buffer_codes(In,Codes)).
% Expand Stream or String
nal_call(Ctx, Stream, Out):- \+ compound(Stream),
must_or_rtrace(nal_read_clauses(Stream, List)), !,
nal_call(Ctx, List, Out).
nal_call(Ctx, List, Out):- is_list(List),!, maplist(nal_call(Ctx),List, OutL),flatten(OutL,Out).
nal_call(Ctx, InnerCtx=json(List), Out):- !, nal_call([InnerCtx|Ctx], List, Out).
nal_call(Ctx, List, Out):-
sub_term(Sub, List), nonvar(Sub),
nal_rule_rewrite(Ctx, Sub, NewSub),
% ignore((NewSub=='$',wdmsg(nal_rule_rewrite(_Ctx, Sub, NewSub)))),
nonvar(NewSub), Sub\==NewSub,
subst(List, Sub, NewSub, NewList),
List\==NewList, !,
nal_call(Ctx, NewList, Out).
nal_call(_Ctx, List, Out):- flatten([List], Out),!.
nal_rule_rewrite(_Ctx, json(Replace), Replace):- nonvar(Replace),!.
nal_join_atomics(Sep,List,Joined):- atomics_to_string(List,Sep,Joined).
/*
nal_into_tokenized(Text,TokenizedText):- \+ string(Text),!,
any_to_string(Text,String), nal_into_tokenized(String,TokenizedText).
nal_into_tokenized(Text,TokenizedText):-
split_string(Text, "", "\s\t\r\n", [L]), L\==Text,!,
nal_into_tokenized(L,M),
%string_concat(M,"\n",TokenizedText).
string_concat(M,"",TokenizedText).
nal_into_tokenized(Text,TokenizedText):- L=[_S1,_S2|_SS],
member(Split,["\n'","'\n","<META>'","<META>","\n"]),
atomic_list_concat(L,Split,Text),
maplist(nal_into_tokenized,L,LO),
atomics_to_string(LO,Split, TokenizedText).
nal_into_tokenized(Text,TokenizedText):-
split_string(Text, "\n", "\s\t\n\r",StringList),
maplist(into_text80_atoms,StringList,SentenceList),
maplist(nal_join_atomics(' '),SentenceList,ListOfStrings),
nal_join_atomics('\n',ListOfStrings,TokenizedText),!.
*/
:- fixup_exports.
nal_is_test(read, "'the detective claims that tim lives in graz").
nal_is_test(read, "<{tim} --> (/,livingIn,_,{graz})>.").
nal_is_test(read, "<bird --> swimmer>. %0.87;0.91%").
nal_is_test(read, "''outputMustContain('<bird --> swimmer>. %0.87;0.91%')").
nal_is_test(read, "1").
nal_is_test(read, "$1").
nal_is_test(read, "#1").
nal_is_test(read, "?1").
nal_is_test(read, "/1").
nal_is_test(read, "\\1").
% like to distinguish
% "eaten by tiger" vs. "eating tiger"
% before: (/,eat,tiger,_) vs. (/,eat,_,tiger)
% now: (eat /2 tiger) vs. (eat /1 tiger)
nal_is_test(read, "'eating tiger").
nal_is_test(read, "(eat /1 tiger)").
nal_is_test(read, "(/,eat,_,tiger)").
nal_is_test(read, "'eaten by tiger").
nal_is_test(read, "(eat /2 tiger)").
nal_is_test(read, "(/,eat,tiger,_)").
nal_is_test(read, "'intensional eating").
nal_is_test(read, "(eat \\1 tiger)").
nal_is_test(read, "(\\,eat,_,tiger)").
nal_is_test(read, "(eat \\2 tiger)").
nal_is_test(read, "(\\,eat,tiger,_)").
%
nal_is_test(exec, "'Revision ------
'Bird is a type of swimmer.
<bird --> swimmer>.
'Bird is probably not a type of swimmer.
<bird --> swimmer>. %0.10;0.60%
1
'Bird is very likely to be a type of swimmer.
''outputMustContain('<bird --> swimmer>. %0.87;0.91%')").
nal_is_test(exec, "
********** revision
IN: <bird --> swimmer>. %1.00;0.90% {0 : 1}
IN: <bird --> swimmer>. %0.10;0.60% {0 : 2}
1
OUT: <bird --> swimmer>. %0.87;0.91% {1 : 1;2}
").
nal_is_test(exec, "********** deduction
IN: <bird --> animal>. %1.00;0.90% {0 : 1}
IN: <robin --> bird>. %1.00;0.90% {0 : 2}
1
OUT: <robin --> animal>. %1.00;0.81% {1 : 2;1}
OUT: <animal --> robin>. %1.00;0.45% {1 : 2;1} ").
nal_is_test(exec, "
********** abduction
IN: <sport --> competition>. %1.00;0.90% {0 : 1}
IN: <chess --> competition>. %0.90;0.90% {0 : 2}
1
OUT: <sport --> chess>. %1.00;0.42% {1 : 2;1}
OUT: <chess --> sport>. %0.90;0.45% {1 : 2;1}
OUT: <chess <-> sport>. %0.90;0.45% {1 : 2;1}
OUT: <(&,chess,sport) --> competition>. %1.00;0.81% {1 : 2;1}
OUT: <(|,chess,sport) --> competition>. %0.90;0.81% {1 : 2;1}
OUT: <<sport --> $1> ==> <chess --> $1>>. %0.90;0.45% {1 : 2;1}
OUT: <<chess --> $1> ==> <sport --> $1>>. %1.00;0.42% {1 : 2;1}
OUT: <<chess --> $1> <=> <sport --> $1>>. %0.90;0.45% {1 : 2;1}
OUT: (&&,<chess --> #1>,<sport --> #1>). %0.90;0.81% {1 : 2;1}
").
nal_is_test(exec, "
********* induction
IN: <swan --> swimmer>. %0.90;0.90% {0 : 1}
IN: <swan --> bird>. %1.00;0.90% {0 : 2}
1
OUT: <bird --> swimmer>. %0.90;0.45% {1 : 2;1}
OUT: <swimmer --> bird>. %1.00;0.42% {1 : 2;1}
OUT: <bird <-> swimmer>. %0.90;0.45% {1 : 2;1}
OUT: <swan --> (|,bird,swimmer)>. %1.00;0.81% {1 : 2;1}
OUT: <swan --> (&,bird,swimmer)>. %0.90;0.81% {1 : 2;1}
OUT: <<$1 --> swimmer> ==> <$1 --> bird>>. %1.00;0.42% {1 : 2;1}
OUT: <<$1 --> bird> ==> <$1 --> swimmer>>. %0.90;0.45% {1 : 2;1}
OUT: <<$1 --> bird> <=> <$1 --> swimmer>>. %0.90;0.45% {1 : 2;1}
OUT: (&&,<#1 --> bird>,<#1 --> swimmer>). %0.90;0.81% {1 : 2;1}
").
nal_is_test(exec, "
********** exemplification
IN: <robin --> bird>. %1.00;0.90% {0 : 1}
IN: <bird --> animal>. %1.00;0.90% {0 : 2}
1
OUT: <robin --> animal>. %1.00;0.81% {1 : 2;1}
OUT: <animal --> robin>. %1.00;0.45% {1 : 2;1}
").
nal_is_test(exec, "
********** conversion
IN: <bird --> swimmer>. %1.00;0.90% {0 : 1}
IN: <swimmer --> bird>? {0 : 2}
2
OUT: <swimmer --> bird>. %1.00;0.47% {2 : 1}
").
nal_is_test(exec, "
********** y/n question
IN: <bird --> swimmer>. %1.00;0.90% {0 : 1}
IN: <bird --> swimmer>? {0 : 2}
1
OUT: <bird --> swimmer>. %1.00;0.90% {0 : 1}
").
nal_is_test(exec, "
********** wh-question
IN: <bird --> swimmer>. %1.00;0.80% {0 : 1}
IN: <?1 --> swimmer>? {0 : 2}
1
OUT: <bird --> swimmer>. %1.00;0.80% {0 : 1}
").
nal_is_test(exec, "
'the detective claims that tim lives in graz
'<{tim} --> (/,livingIn,_,{graz})>.
'and lawyer claims that this is not the case
<{tim} --> (/,livingIn,_,{graz})>. %0%
100
'the first deponent, a psychologist,
'claims that people with sunglasses are more aggressive
<<(*,$1,sunglasses) --> own> ==> <$1 --> [aggressive]>>.
'the third deponent claims, that he has seen tom with sunglasses on:
<(*,{tom},sunglasses) --> own>.
'the teacher claims, that people who are aggressive tend to be murders
<<$1 --> [aggressive]> ==> <$1 --> murder>>.
'the second deponent claims, that if the person lives in Graz, he is surely the murder
<<$1 --> (/,livingIn,_,{graz})> ==> <$1 --> murder>>.
'who is the murder?
<{?who} --> murder>?
''outputMustContain('<{tom} --> murder>. %1.00;0.73%')
").
nal_is_test(read, "
' Custom truth values These are added by appending {0.0 0.9} instead of %0.0;0.9% as we believe this increased the readability.
' Example
********** wh-question
IN: <bird --> swimmer>. %1.00;0.80% {0 : 1}
IN: <?1 --> swimmer>? {0 : 2}
1
OUT: <bird --> swimmer>. %1.00;0.80% {0 : 1}
' can now be
********** wh-question
IN: <bird --> swimmer>. {1.0 0.80} {0 : 1}
IN: <?1 --> swimmer>? {0 : 2}
1
OUT: <bird --> swimmer>. {1.0 0.80} {0 : 1}
").
nal_is_test(read, "
'Images
(/,rel,_,b)
' has to be written as
(rel /1 b),
' and image as
(/,rel,a,_)
' as
(rel /2 a)
' same for \\ with \\1 and \\2.
").
nal_is_test(read, "
'Intervals, to measure expected time distances between events, are always learned by ONA and stored as meta-data, they are not part of the Narsese I/O format anymore. Hence a sequence
(&/,a,+5,b)
' becomes
(&/,a,b)
' or
(a &/ b)
' and also the interval for implications is not used anymore.
").
nal_is_test(read, "
'Operators The syntactic sugar
(^op,arg_1,arg_2,arg_3,arg_n)
' is not supported anymore, instead the full notation has to be used which is supported by both systems:
<(*,arg_1,arg_2,arg_3,arg_n) --> ^op>
' though for operations without arguments, the following shortcut can be used:
^op
").
nal_is_test(read, "
'Restrictions
'1. Copulas in ONA are binary, since it's using an array-based heap for terms.
' While there are ways to encode n-ary relations in a binary heap, the ONA philosophy, following KISS,
' encourages the use of left-nesting, which is also used by the system itself to compose sequences of events:
((a &/ b) &/ c).
").
nal_is_test(read, "
'2. The parallel temporal copula &| is not implemented, please use &/ for now, again due to KISS.
' If the order does not matter in some background knowledge we want to give the system, in addition to
<(a &/ b) =/> c>
' also give it
<(b &/ a) =/> c>
' to achieve the same as with &| for now.
").
nal_is_test(read, "
'Optional syntactic improvements
' The ONA parser does not require commas, and doesn't distinguish between < and (, also it supports infix format.
<(|,a,b) --> M>.
' can be written as
<(a | b) --> M>.
' or even as
((a | b) --> M).
' Note: Spaces cannot be omitted.
").
nal_is_test(read, "
'Tim is alive.
<{Tim} --> [alive]>.
'Tim is a human.
<{Tim} --> human>.
").
nal_is_test(read, "
'Humans are a lifeform.
<human --> lifeform>.
'Lifeforms are like machines.
<lifeform <-> machine>.
").
nal_is_test(read, "
'Tom eats chocolate.
<(*,{Tom},chocolate) --> eat>.
<{Tom} --> (/,eat,_,chocolate)>.
<chocolate --> (/,eat,{Tom},_)>.
").
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nars/prolog/nars/nal_reader.pl | Perl | mit | 31,392 |
package MIP::Dependency_tree;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
use List::MoreUtils qw{ any firstidx };
use Readonly;
## MIPs lib/
use MIP::Constants qw{ $SPACE };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ get_dependency_tree
get_dependency_tree_chain
get_dependency_subtree
get_recipes_for_dependency_tree_chain
get_recipe_dependency_tree_chain
set_dependency_tree_order
};
}
sub get_dependency_tree {
## Function : Collects all downstream recipes from initation point.
## Returns :
## Arguments : $current_chain => Current chain
## : $is_recipe_found_ref => Found initiation recipe {REF}
## : $is_chain_found_ref => Found recipe chain
## : $recipe => Initiation point
## : $start_with_recipes_ref => Store recipes
## : $dependency_tree_href => Dependency hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $current_chain;
my $is_recipe_found_ref;
my $is_chain_found_ref;
my $recipe;
my $start_with_recipes_ref;
my $dependency_tree_href;
my $tmpl = {
current_chain => {
store => \$current_chain,
strict_type => 1,
},
is_recipe_found_ref => {
default => \$$,
store => \$is_recipe_found_ref,
strict_type => 1,
},
is_chain_found_ref => {
default => \$$,
store => \$is_chain_found_ref,
strict_type => 1,
},
recipe => {
required => 1,
store => \$recipe,
strict_type => 1,
},
start_with_recipes_ref => {
default => [],
defined => 1,
required => 1,
store => \$start_with_recipes_ref,
strict_type => 1,
},
dependency_tree_href => {
default => {},
defined => 1,
required => 1,
store => \$dependency_tree_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## Copy hash to enable recursive removal of keys
my %tree = %{$dependency_tree_href};
KEY_VALUE_PAIR:
while ( my ( $key, $value ) = each %tree ) {
## Do not enter into more chains than one if recipe and chain is found
next KEY_VALUE_PAIR
if ( $key =~ /CHAIN_/sxm
&& ${$is_recipe_found_ref}
&& ${$is_chain_found_ref} ne q{CHAIN_MAIN} );
## Do not add recipe name or PARALLEL
if ( $key =~ /CHAIN_/sxm ) {
$current_chain = $key;
}
if ( ref $value eq q{ARRAY} ) {
## Inspect element
ELEMENT:
foreach my $element ( @{$value} ) {
## Call recursive
if ( ref $element eq q{HASH} ) {
get_dependency_tree(
{
current_chain => $current_chain,
dependency_tree_href => $element,
is_recipe_found_ref => $is_recipe_found_ref,
is_chain_found_ref => $is_chain_found_ref,
recipe => $recipe,
start_with_recipes_ref => $start_with_recipes_ref,
}
);
}
## Found initiator recipe
if ( ref $element ne q{HASH}
&& $element eq $recipe )
{
## Start collecting recipes downstream
${$is_recipe_found_ref} = 1;
## Found chain that recipe belongs to
# Set is part of chain signal
${$is_chain_found_ref} = $current_chain;
}
## Special case for parallel section
if ( $key eq q{PARALLEL}
&& ${$is_recipe_found_ref} )
{
if ( any { $_ eq $recipe } @{$value} ) {
## Add only start_with recipe from parallel section
push @{$start_with_recipes_ref}, $recipe;
## Skip any remaining hash_ref or element
last ELEMENT;
}
}
## Add downstream recipes
if ( ref $element ne q{HASH}
&& ${$is_recipe_found_ref} )
{
push @{$start_with_recipes_ref}, $element;
}
}
}
## Remove identifier
delete $tree{$key};
}
return;
}
sub get_dependency_tree_chain {
## Function : Sets chain id to parameters hash from the dependency tree
## Returns :
## Arguments : $current_chain => Current chain
## : $dependency_tree_href => Dependency hash {REF}
## : $parameter_href => Parameter hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $current_chain;
my $dependency_tree_href;
my $parameter_href;
my $tmpl = {
current_chain => {
store => \$current_chain,
strict_type => 1,
},
dependency_tree_href => {
default => {},
defined => 1,
required => 1,
store => \$dependency_tree_href,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## Copy hash to enable recursive removal of keys
my %tree = %{$dependency_tree_href};
KEY_VALUE_PAIR:
while ( my ( $key, $value ) = each %tree ) {
## Add ID of chain
my ($chain_id) = $key =~ /CHAIN_(\S+)/sxm;
## If chain_id is found
if ( defined $chain_id ) {
## Set current chain
$current_chain = $chain_id;
}
## Call recursive
if ( ref $value eq q{HASH} ) {
get_dependency_tree_chain(
{
current_chain => $current_chain,
dependency_tree_href => $value,
parameter_href => $parameter_href,
}
);
}
elsif ( ref $value eq q{ARRAY} ) {
## Inspect element
ELEMENT:
foreach my $element ( @{$value} ) {
## Call recursive
if ( ref $element eq q{HASH} ) {
get_dependency_tree_chain(
{
current_chain => $current_chain,
dependency_tree_href => $element,
parameter_href => $parameter_href,
}
);
}
## Found recipes
if ( ref $element ne q{HASH} ) {
$parameter_href->{$element}{chain} = $current_chain;
}
if ( $key eq q{PARALLEL} ) {
$parameter_href->{$element}{chain} = uc $element;
}
## Hash in PARALLEL section create anonymous chain ID
## E.g. haplotypecaller->genotypegvcfs
if ( $key eq uc $element ) {
RECIPE:
foreach my $recipe ( @{$value} ) {
$parameter_href->{$recipe}{chain} = uc $element;
}
last ELEMENT;
}
}
}
## Remove identifier
delete $tree{$key};
}
return;
}
sub get_dependency_subtree {
## Function : Get part of dependency tree
## Returns : %dependency_tree
## Arguments: $chain_initiation_point => Chain to operate on
## : $dependency_tree_href => Dependency hash {REF}
## : $dependency_subtree_href => Dependency sub hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $chain_initiation_point;
my $dependency_tree_href;
my $dependency_subtree_href;
my $tmpl = {
chain_initiation_point => {
store => \$chain_initiation_point,
strict_type => 1,
},
dependency_tree_href => {
default => {},
defined => 1,
required => 1,
store => \$dependency_tree_href,
strict_type => 1,
},
dependency_subtree_href => {
default => {},
required => 1,
store => \$dependency_subtree_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## Return if tree is found
return if ( defined $dependency_subtree_href->{$chain_initiation_point} );
## Copy hash to enable recursive removal of keys
my %tree = %{$dependency_tree_href};
KEY_VALUE_PAIR:
while ( my ( $key, $value ) = each %tree ) {
## Save subtree if it matches chain
if ( $key eq $chain_initiation_point ) {
$dependency_subtree_href->{$chain_initiation_point} = $value;
}
## Inspect element
if ( ref $value eq q{ARRAY} ) {
ELEMENT:
foreach my $element ( @{$value} ) {
## Call recursive
if ( ref $element eq q{HASH} ) {
get_dependency_subtree(
{
dependency_tree_href => $element,
dependency_subtree_href => $dependency_subtree_href,
chain_initiation_point => $chain_initiation_point,
}
);
}
}
delete $tree{$key};
}
}
return;
}
sub set_dependency_tree_order {
## Function : Collects order of all recipes from initiation.
## Returns :
## Arguments : $recipes_ref => Recipes {REF}
## : $dependency_tree_href => Dependency hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $recipes_ref;
my $dependency_tree_href;
my $tmpl = {
recipes_ref => {
default => [],
defined => 1,
required => 1,
store => \$recipes_ref,
strict_type => 1,
},
dependency_tree_href => {
default => {},
defined => 1,
required => 1,
store => \$dependency_tree_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## Copy hash to enable recursive removal of keys
my %tree = %{$dependency_tree_href};
KEY_VALUE_PAIR:
while ( my ( $key, $value ) = each %tree ) {
## Call recursive
if ( ref $value eq q{HASH} ) {
set_dependency_tree_order(
{
dependency_tree_href => $value,
recipes_ref => $recipes_ref,
}
);
}
elsif ( ref $value eq q{ARRAY} ) {
## Inspect element
ELEMENT:
foreach my $element ( @{$value} ) {
## Call recursive
if ( ref $element eq q{HASH} ) {
set_dependency_tree_order(
{
dependency_tree_href => $element,
recipes_ref => $recipes_ref,
}
);
}
## Found recipe
if ( ref $element ne q{HASH} ) {
## Add to order
push @{$recipes_ref}, $element;
}
}
}
## Remove identifier
delete $tree{$key};
}
return;
}
sub get_recipes_for_dependency_tree_chain {
## Function : Collects all recipes downstream of initation point
## Returns : @chain_recipes
## Arguments : $chain_initiation_point => Chain to operate on
## : $dependency_tree_href => Dependency hash {REF}
## : $recipe_initiation_point => Recipe to start with
my ($arg_href) = @_;
## Flatten argument(s)
my $chain_initiation_point;
my $dependency_tree_href;
my $recipe_initiation_point;
my $tmpl = {
chain_initiation_point => {
defined => 1,
required => 1,
store => \$chain_initiation_point,
strict_type => 1,
},
dependency_tree_href => {
default => {},
defined => 1,
required => 1,
store => \$dependency_tree_href,
strict_type => 1,
},
recipe_initiation_point => {
defined => 1,
store => \$recipe_initiation_point,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## Get the dependency subtree
my $dependency_subtree_href = {};
get_dependency_subtree(
{
dependency_subtree_href => $dependency_subtree_href,
dependency_tree_href => $dependency_tree_href,
chain_initiation_point => $chain_initiation_point,
}
);
## Get the recipes
my @recipes;
set_dependency_tree_order(
{
dependency_tree_href => $dependency_subtree_href,
recipes_ref => \@recipes,
}
);
## Slice if $recipe_initiation_point is defined
if ($recipe_initiation_point) {
my $initiation_idx = firstidx { $_ eq $recipe_initiation_point } @recipes;
@recipes = @recipes[ $initiation_idx .. $#recipes ];
}
return @recipes;
}
sub get_recipe_dependency_tree_chain {
## Function : Get the chain to which a recipe belongs
## Returns :
## Arguments : $chain_id_ref => Chain found {REF}
## : $current_chain => Current chain
## : $dependency_tree_href => Dependency hash {REF}
## : $recipe => Initiation point
my ($arg_href) = @_;
## Flatten argument(s)
my $chain_id_ref;
my $current_chain;
my $dependency_tree_href;
my $recipe;
my $tmpl = {
chain_id_ref => {
default => \$$,
store => \$chain_id_ref,
strict_type => 1,
},
current_chain => {
store => \$current_chain,
strict_type => 1,
},
dependency_tree_href => {
default => {},
defined => 1,
required => 1,
store => \$dependency_tree_href,
strict_type => 1,
},
recipe => {
required => 1,
store => \$recipe,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## Return if chain has been found
return if ( ${$chain_id_ref} );
## Copy hash to enable recursive removal of keys
my %tree = %{$dependency_tree_href};
KEY_VALUE_PAIR:
while ( my ( $key, $value ) = each %tree ) {
## Don't store PARALLEL as the current chain
if ( $key =~ /CHAIN_/sxm ) {
$current_chain = $key;
}
## Call recursive
if ( ref $value eq q{HASH} ) {
get_recipe_dependency_tree_chain(
{
chain_id_ref => $chain_id_ref,
current_chain => $current_chain,
dependency_tree_href => $value,
recipe => $recipe,
}
);
}
## Inspect element
if ( ref $value eq q{ARRAY} ) {
ELEMENT:
foreach my $element ( @{$value} ) {
## Call recursive
if ( ref $element eq q{HASH} ) {
get_recipe_dependency_tree_chain(
{
chain_id_ref => $chain_id_ref,
current_chain => $current_chain,
dependency_tree_href => $element,
recipe => $recipe,
}
);
}
## Found recipe
if ( ( ref $element ne q{HASH} ) && ( $element eq $recipe ) ) {
## Save current chain
${$chain_id_ref} = $current_chain;
last ELEMENT;
}
}
delete $tree{$key};
}
}
return;
}
1;
| henrikstranneheim/MIP | lib/MIP/Dependency_tree.pm | Perl | mit | 17,460 |
#!/usr/bin/perl
use strict;
use CXGN::Page;
my $page=CXGN::Page->new('');
$page->header('SGN: Phenotype search help');
print <<EOHTML;
<p>
The <a href="/search/direct_search.pl?search=phenotypes">phenotype search</a> is a
tool for searching SGN's database for accessions from phenotyped populations.
</p>
<img src="/documents/help/screenshots/pheno_search_help.png" alt="screenshot of phenotype search form" style="margin: auto; border: 1px solid black;" />
<h4>1. Keyword</h4>
<p>
Here a word or a string can be typed. Searches the database for individuals with the word/string in their 'phenotype description' field.
</p><p>
</p>
<h4>2. Population</h4>
<p>
Each individual accession in the database is associated to a population.
The phenotype search can be limited to only accessions from a specific population.
Currently the following phenotyped populations are included in the searchable database:
<ul>
<li>M82 EMS mutant population</li>
<li>Fast-neutron M82 mutant population</li>
<li>TGRC monogenic mutant population</li>
<li>Eggplant EMS mutant population</li>
<li>TGRC monogenic mutant population</li
<li>F2 2000 mapping population</li>
<li>M82 x L.pennellii IL population</li>
</ul>
</p>
<h4>3. Accession</h4>
<p>
If you know the accession name of your individual, type it in. By default, the database will be searched for all individuals with an accession name that starts with what you type in.For example, if you type in "e000", your results might include e0001m1, e0001m2, e0002m1, etc.
The search is case-insensitive.
</p>
EOHTML
;
$page->footer();
| solgenomics/sgn | cgi-bin/help/phenotype_search_help.pl | Perl | mit | 1,582 |
#!/usr/bin/perl
#
# Author: Long H. Do (long.h.do@gmail.com)
#
# This script takes as input .diff files produced by CuffDiff to produce plots that search for GEDDs
# Requires R and the Bioconductor preprocessCore library
#
# usage: ./thisscript cuffdiff1 cuffdiff2 cuffdiff3
#
###########################################################################################
use strict;
use POSIX;
#takes cuffdiff .diff files and plots lowess smoothen lines across the chromosomes
my @y_title;# = ('Ts65Dn Fibroblast**','Ts65Dn Fibroblast*');
my @rho_order;# = ('1_3','2_3','1_2');
my @selected_chr = (); #shows all chromosomes
#my @selected_chr =(10,16); #plots only chromosomes 10 and 16
#########change this to your organism of choice to lable the chromosomes
my $box_prefix = 'Chr';
#my $box_prefix = 'HSA';
my $correlation_method = 'spearman';
my $pdf_filename = 'spearman.pdf';
my $pdf_width = 10;
my $pdf_height = 10;
my $pdf_column = 4;
my $pdf_row = 6;
my $plot_pointsize = .3;
my $plot_pointtype = 19;
###COLOR OPTIONS######
my %points_color = (1=>'lightpink',2=>'lightblue',3=>'lightgray');
my %lines_color = (1=>'red',2=>'blue',3=>'black');
my $box_title_size=.7;
my $box_title_pos = 0;
my $axis_label_font_size = .7;
my $axis_tick_size = .01;
my $axis_color = '#989898';
my $ylim = '-2,2';
my $rho_text_pos = '0,-1.7';
my $rho_font_size = 1.5;
my $plot_f = .1;
my $lowess_f = .3; #smoother span
my $lowess_iter = 1000; #iterations
my $tmpdir = '.tmp';
if(!-d $tmpdir){
mkdir($tmpdir) || die "Error creating directory: $tmpdir\n";
}
open (RSCRIPT,"|/usr/bin/Rscript -") || die "$!\n";
#open (RSCRIPT,">rscript.txt");
print RSCRIPT '#! /usr/bin/Rscript --vanilla --default-packages=utils' . "\n";
_print_plots(_get_spearman(\@ARGV));
sub _print_plots{
my ($spearman,$data) = @_;
print RSCRIPT 'pdf("'.$pdf_filename.'",width='.$pdf_width.',height='.$pdf_height.');par(mfrow=c('.$pdf_column.','.$pdf_row.'),oma = c(4,7,0,0) + .1, mar = c(1,0,1,1) + .1)' . "\n";
my @sorted;
if($selected_chr[0]){
@sorted = @selected_chr;
}
else{
my @chr = keys(%{$spearman});
@sorted = sort{$a <=> $b}@chr;
if($sorted[0] eq "X"){
shift(@sorted);
push @sorted,"X";
}
}
my (%uniq_num,%uniq_pair);
for my $chr (@sorted){
my @rho;
my $n=1;
for my $rho (@{$spearman->{$chr}}){
my ($i,$j) = split '_', $rho;
$uniq_pair{$rho}++;
$uniq_num{$i}++;
$uniq_num{$j}++;
push @rho,'rho["'.$i.','.$j.'"] == .(round(rho_'."$chr\_$rho".',2))';
$n++;
}
for my $i (@{$data->{$chr}}){
if($i == 1){
print RSCRIPT 'write("Plotting Chr' .$chr. '...",stderr()); log2data = log2(data.'."$chr".'[,'.($i*2).'] / data.'."$chr".'[,'.($i*2 - 1).']); plot(log2data,cex='.$plot_pointsize.',pch='.$plot_pointtype.',col="'.$points_color{$i}.'",ylim=c('.$ylim.'), xlab="",ylab="",axes=FALSE); box(lwd=.5,col="'.$axis_color.'");axis(side=1,lwd=.5,cex.axis='.$axis_label_font_size.', col="'.$axis_color.'",tck='.$axis_tick_size.',las=2,labels=NA);axis(side = 1, cex.axis='.$axis_label_font_size.', lwd = 0, line = -1.1, las = 1); axis(side=2,at=seq('.$ylim.',by=1),lwd=.5,cex.axis='.$axis_label_font_size.',col="'.$axis_color.'",tck='.$axis_tick_size.',las=3,labels=NA); axis(side = 2, at=seq('.$ylim.',by=1),cex.axis='.$axis_label_font_size.', lwd = 0, line = -.9, las = 2);lines(lowess(log2data,f='.$plot_f.',iter='.$lowess_iter.'),lwd=.4,col="'.$lines_color{$i}.'");' . "\n";
}
else{
print RSCRIPT 'log2data = log2(data.'."$chr".'[,'.($i*2).'] / data.'."$chr".'[,'.($i*2 - 1).']) ; points(log2data,cex='.$plot_pointsize.',pch='.$plot_pointtype.',col="'.$points_color{$i}.'");lines(lowess(log2data,f='.$plot_f.',iter='.$lowess_iter.'),lwd=.4,col="'.$lines_color{$i}.'");' . "\n";
}
}
print RSCRIPT 'mtext(side = 3, "'. $box_prefix . $chr.'", cex='.$box_title_size.',line ='.$box_title_pos.');text('.$rho_text_pos.',adj=0,cex='.$rho_font_size.',labels=bquote(' . join('*","~',@rho) . '));' . "\n";
}
print RSCRIPT 'mtext("Genes (chromosomal order)",side=1,cex=1,outer=TRUE,padj=1);';
my $i = 1;
my $padj = -.85;
for my $y_title(@y_title){
print RSCRIPT 'mtext(expression(paste(log[2],"[FC] ' . $y_title . '")),side=2,las=0,cex=1,adj=0,padj='. $padj .',outer=TRUE,at=0.43,col="'.$lines_color{$i}.'");';
$padj -= 1.65;
$i++;
}
unless( $selected_chr[0]){
for my $num (keys %uniq_num){
my @lowess_all;
for my $chr (@sorted){
push @lowess_all, 'lowess.'.$chr.'_'.$num . '$y';
}
print RSCRIPT "lowess.all_$num = c(" . join(",",@lowess_all) . ');';
}
for my $pair(sort keys %uniq_pair){
my ($i,$j) = split '_',$pair;
print RSCRIPT 'rho.all_' . "$i\_$j". '= cor(lowess.' . "all\_$i" . ',lowess.' . "all\_$j" . ',method="'.$correlation_method.'");' . "\n";
print RSCRIPT "print('$i\_$j');print(rho.all_" ."$i\_$j);\n";
}
}
close RSCRIPT;
if(-d $tmpdir){
system("rm -rf $tmpdir");
}
}
sub _makeFC_chr{
my ($files) = @_;
my %selected;
if($selected_chr[0]){
for my $chr(@selected_chr){
$selected{$chr}++;
}
}
my %fpkm_chr;
my @header;
my $f=1;
foreach my $file (@{$files}){
open (FILE,"$file") || die "$!\n";
my $n=1;
my $flag=1;
while (<FILE>){
chomp;
my @col = split;
next unless ($col[7] >= .1); #only keep if fpkm > .1
my($chr,$position) = split ':',$col[3];
next unless ($chr && $position);
$chr =~ s/chr//;
next unless $chr =~ /^\d+$|^X$/;
if($selected_chr[0]){
next unless($selected{$chr});
}
my ($start,$stop) = split '-',$position;
my $name = $col[4] . '__'. $col[5];
if(!defined $fpkm_chr{$chr}{$start}{$f}){
if($stop - $start > 0){
push @{$fpkm_chr{$chr}{$start}{$f}},\@col;
}
else{
push @{$fpkm_chr{$chr}{$stop}{$f}},\@col;
}
}
if($n == 1){
push @header,$name;
push @y_title,$col[5] . " \($f\)";
}
$n++;
}
close FILE;
$f++;
}
##modify row/columns based on total chromosomes
my $chr_count = scalar(keys(%fpkm_chr));
if($chr_count <=6){
$pdf_row = 2;
$plot_pointsize = .2;
}
else{
$pdf_row = 4;
if($f>3){
$rho_font_size = .9;
}
}
$pdf_column = ceil($chr_count/$pdf_row);
return(\%fpkm_chr,\@header);
}
sub _get_spearman{
my ($files) = @_;
my (%spearman,%data);
my $pairs = scalar(@{$files});
print RSCRIPT 'library(preprocessCore)' . "\n";
my ($fpkm_chr,$header) = _makeFC_chr($files);
my $header_string;
for my $exp(@{$header}){
my @e = split '__',$exp;
$header_string .= "\t" . join("\t",@e);
}
for my $chr(keys %{$fpkm_chr}){
next unless $chr =~ /^\d+$|^X$/;
my $i = 1;
open (CHRFPKM,">$tmpdir/$chr\_fpkm.txt") || die "$!: Unable to create chr fpkm file\n";
print CHRFPKM "gene\tlocus" . $header_string . "\n";
my @position = keys(%{$fpkm_chr->{$chr}});
my @sorted = sort{$a <=> $b}@position;
for my $pos (@sorted){
if( scalar(keys %{$fpkm_chr->{$chr}{$pos}}) == $pairs){
my $n=1;
for my $name (@{$header}){
my @col = @{$fpkm_chr->{$chr}{$pos}{$n}};
if($n==1){
print CHRFPKM $col[0]->[2] . "\t" . $col[0]->[3];
}
for my $col(@col){
print CHRFPKM "\t" . $col->[7] . "\t" . $col->[8];
}
$n++;
}
print CHRFPKM "\n";
}
}
close CHRFPKM;
print RSCRIPT 'write("Calculating lowess for Chr' .$chr. '...",stderr());' . 'chr_'.$chr.'_fpkm <- read.table("'. "$tmpdir\/$chr\_fpkm.txt" .'",header=T);' . "\n";
#print 'write("Calculating lowess for Chr' .$chr. '...",stderr());' . 'chr_'.$chr.'_fpkm <- read.table("'. "$tmpdir\/$chr\_fpkm.txt" .'",header=T);' . "\n";
print RSCRIPT 'data.'."$chr".' = normalize.quantiles(as.matrix('. "chr_$chr\_fpkm" .'[,3:'. (scalar(@{$files}) * 2 + 2) .']));' . "\n";
#print 'data.'."$chr".' = normalize.quantiles(as.matrix('. "chr_$chr\_fpkm" .'[,3:'. (scalar(@{$files}) * 2 + 2) .']));' . "\n";
for (my $i=1;$i<=$pairs;$i++){
print RSCRIPT 'lowess.'."$chr\_$i".' = lowess(log2(data.'."$chr".'[,'. ($i*2) .']/data.'."$chr".'[,'. ($i*2 - 1).']),f='.$lowess_f.',iter='.$lowess_iter.');' . "\n";
#print 'lowess.'."$chr\_$i".' = lowess(log2(data.'."$chr".'[,'. ($i*2) .']/data.'."$chr".'[,'. ($i*2 - 1).']),f='.$lowess_f.',iter='.$lowess_iter.');' . "\n";
push @{$data{$chr}},$i;
}
my %combo;
for (my $i=1;$i<=$pairs;$i++){
for (my $j=1;$j<=$pairs;$j++){
if($i<$j){
$combo{"$i\_$j"}++;
}
}
}
my @order = sort keys %combo;
my $n =1;
for my $order(@order){
my ($i,$j) = split '_',$order;
print RSCRIPT 'rho_' ."$chr\_$i\_$j". '= cor(lowess.' . "$chr\_$i" . '$y,lowess.' . "$chr\_$j" . '$y,method="'.$correlation_method.'");' . "\n";
#print 'rho_' ."$chr\_$i\_$j". '= cor(lowess.' . "$chr\_$i" . '$y,lowess.' . "$chr\_$j" . '$y,method="'.$correlation_method.'");' . "\n";
push @{$spearman{$chr}},"$i\_$j";
$n++;
}
}
return (\%spearman,\%data);
}
| lhdo/GEDDplot | makeSpearmanFigures.pl | Perl | mit | 8,808 |
#!/usr/bin/perl
use strict;
use warnings;
my %steps;
while (<>) {
if (/Step (.) must be finished before step (.) can begin./) {
$steps{$1} = {} if !exists $steps{$1};
$steps{$2} = {} if !exists $steps{$2};
$steps{$2}->{$1} = 1;
}
}
my $next = get_next();
while (defined $next) {
print $next;
for my $step (keys %steps) {
delete $steps{$step}->{$next};
}
delete $steps{$next};
$next = get_next();
}
print "\n";
sub get_next {
my @can_run_next = grep { !scalar keys $steps{$_} } sort keys %steps;
my $next = undef;
$next = $can_run_next[0] if scalar @can_run_next;
return $next;
}
| KenMGJ/advent-of-code | 2018/07/part1.pl | Perl | mit | 677 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.