code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
package #
Date::Manip::Offset::off309;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:44:43 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '-04:32:36';
%Offset = (
0 => [
'america/la_paz',
],
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/Offset/off309.pm | Perl | mit | 853 |
#!/usr/bin/env perl
use strict;
use warnings;
use 5.014;
use lib './lib';
use PPP::Abook qw/add_address remove_address list/;
use IO::Socket;
my %cmds = (
add => \&add_address,
remove => \&remove_address,
list => \&list
);
sub handle_cmd {
my ($op, @args) = @_;
if (!exists $cmds{$op}) {
return "ERR: invalid command";
}
$cmds{$op}->(@args);
}
sub parse {
split /(?<!\\);/, $_[0];
}
sub conn_handler {
my $server_sock = IO::Socket::INET->new(
LocalAddr => 'localhost:7337',
Proto => 'tcp',
Listen => 1,
ReuseAddr => 1,
) or die "Cannot create socket: $!\n";
while (my $cmd_sock = $server_sock->accept()) {
my $data = <$cmd_sock>;
$data =~ s/(\r\n|\n|\n\r)$//;
say "[$data]";
my @cmd = parse($data);
my $resp = handle_cmd(@cmd);
say $cmd_sock $resp;
close $cmd_sock;
}
}
conn_handler();
| stelf/fmi-perl | examples/08.0-net/04-abook/bin/abook.pl | Perl | cc0-1.0 | 873 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::zyxel::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'cpu' => 'network::zyxel::snmp::mode::cpu',
'interfaces' => 'snmp_standard::mode::interfaces',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'list-vpn' => 'network::zyxel::snmp::mode::listvpn',
'memory' => 'network::zyxel::snmp::mode::memory',
'sessions' => 'network::zyxel::snmp::mode::sessions',
'vpn-status' => 'network::zyxel::snmp::mode::vpnstatus',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Zyxel equipments in SNMP.
=cut
| wilfriedcomte/centreon-plugins | network/zyxel/snmp/plugin.pm | Perl | apache-2.0 | 1,788 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V9::Resources::MoveUnusedBudgetRecommendation;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
budgetRecommendation => $args->{budgetRecommendation},
excessCampaignBudget => $args->{excessCampaignBudget}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V9/Resources/MoveUnusedBudgetRecommendation.pm | Perl | apache-2.0 | 1,125 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V10::Services::AdGroupCustomizerService::AdGroupCustomizerOperation;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
create => $args->{create},
remove => $args->{remove}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V10/Services/AdGroupCustomizerService/AdGroupCustomizerOperation.pm | Perl | apache-2.0 | 1,091 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Enums::SharedSetStatusEnum;
use strict;
use warnings;
use Const::Exporter enums => [
UNSPECIFIED => "UNSPECIFIED",
UNKNOWN => "UNKNOWN",
ENABLED => "ENABLED",
REMOVED => "REMOVED"
];
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Enums/SharedSetStatusEnum.pm | Perl | apache-2.0 | 821 |
#!/usr/bin/env perl
use Getopt::Long;
use Pod::Usage;
#Get command line options
my $streamingJar; #is there a default?
my $stoplist;
my $outputPrefix = 'wordcloud';
my $wordsInCloud = 25;
my $pythonExec = 'python';
my $language = 'en';
my $preProc = 1;
my $tfOption = 'count';
my $dfOption = '';
my $TOTAL_DOCS;
my $TOTAL_Terms;
GetOptions("jar=s" => \$streamingJar,
"out=s" => \$outputPrefix,
"n=i" => \$wordsInCloud,
"python=s" => \$pythonExec,
"language=s" => \$language,
"pp!" => \$preProc,
"lang!" => \$language,
"stop=s" => \$stoplist,
"tf=s" => \$tfOption,
"df=s" => \$dfOption
);
my $TFHdfsPath=shift;
my $DFHdfsPath=shift;
my $OutHdfsPath=shift;
#check if langid mod file exists - if not will look for langid zip
# or langid directory
if ($language) {
#check that langid.mod exists
if (! -e 'langid.mod') {
if (-e 'langid.zip') {
print "Renaming langid.zip langid.mod...\n";
&run_cmdline ('mv langid.zip langid.mod');
}
elsif (-d 'langid') {
print "Creating langid.mod...\n";
&run_cmdline ('zip -r langid.zip langid');
&run_cmdline ('mv langid.zip langid.mod');
}
else {
print "Could not find langid module - running without langauge detection...\n";
$language = undef;
}
}
}
##run map reduce job to get term frequency
$tfCommand = "hadoop jar $streamingJar ".
"-file tf_mapper.py tf_reducer.py ".($language? "langid.mod " : "")."$stoplist ".
"-inputformat \'SequenceFileAsTextInputFormat\' ".
"-mapper \"$pythonExec tf_mapper.py $tfOption $stoplist $language $preProc\" ".
"-reducer \"$pythonExec tf_reducer.py\" ".
"-input $TFHdfsPath ".
"-output $OutHdfsPath/tc";
&run_cmdline($tfCommand);
#get the number of words and print:
$totalWords = (split ",", (split "\t", `hadoop fs -cat $OutHdfsPath/tc/part-* | grep _TF_TOTAL_WORD_COUNT`)[1])[0];
print "Counted $totalWords words\n";
##run map reduce job to get document frequency
$dfCommand = "hadoop jar $streamingJar ".
"-file df_mapper.py df_reducer.py ".($language? "langid.mod " : "")."$stoplist ".
"-inputformat \'SequenceFileAsTextInputFormat\' ".
"-mapper \"$pythonExec df_mapper.py $stoplist $language $preProc\" ".
"-reducer \"$pythonExec df_reducer.py\" ".
"-input $DFHdfsPath ".
"-output $OutHdfsPath/dc";
&run_cmdline($dfCommand);
#get the number of docs and print:
$totalDocs = (split ",", (split "\t", `hadoop fs -cat $OutHdfsPath/dc/part-* | grep _DF_TOTAL_DOC_COUNT`)[1])[0];
print "Counted $totalDocs docs\n";
##calculate and sort tf-idf scores
$tfidfCommand = "hadoop jar $streamingJar ".
"-Dmapred.output.key.comparator.class=org.apache.hadoop.mapred.lib.KeyFieldBasedComparator ".
"-Dmapred.text.key.comparator.options=-k1nr ".
"-Dnum.key.fields.for.partition=1 ".
"-Dstream.num.map.output.key.fields=1 ".
"-Dmapred.child.java.opts=-Xmx2G ". #remove or make configurable
"-file tfidf_reducer.py ".
"-mapper \"cat\" ".
"-reducer \"$pythonExec tfidf_reducer.py $dfOption\" ".
"-partitioner org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner ".
"-cmdenv DF_TOTAL_DOC_COUNT=$totalDocs ".
"-cmdenv TF_TOTAL_WORD_COUNT=$totalWords ".
"-input $OutHdfsPath/tc ".
"-input $OutHdfsPath/dc ".
"-output $OutHdfsPath/tfidf";
&run_cmdline($tfidfCommand);
print "Created TF-IDF scores in $OutHdfsPath/tfidf\n";
#create the word cloud
$wcCommand = "hadoop fs -text $OutHdfsPath/tfidf/part-* | sort -k1nr | head -n $wordsInCloud | ".
"perl WordStatsToCloud.pl -cloudsize $wordsInCloud -in - -out $outputPrefix"."_$wordsInCloud.html";
&run_cmdline($wcCommand);
print "Created HTML word cloud in $outputPrefix"."_$wordsInCloud.html\n";
#runs command line and prints error to stdout on failure
sub run_cmdline {
my $cmdline = shift;
print STDOUT "Running: $cmdline\n";
my $status = system($cmdline);
if ($status != 0) {
if ($? == -1) {
print STDOUT "... failed to execute: $!\n";
} elsif ($? & 127) {
printf STDOUT "... died with signal %d, %s coredump\n",
($? & 127), ($? & 128) ? 'with' : 'without';
} else {
printf "... exited with value %d\n", $? >> 8;
}
exit($status);
}
}
# .
| plamenbbn/XDATA | wordcloud/wordcloud.pl | Perl | apache-2.0 | 4,208 |
false :-
main_verifier_error.
verifier_error(A,B,C) :-
A=0,
B=0,
C=0.
verifier_error(A,B,C) :-
A=0,
B=1,
C=1.
verifier_error(A,B,C) :-
A=1,
B=0,
C=1.
verifier_error(A,B,C) :-
A=1,
B=1,
C=1.
gcd(A,B,C,D,E,F) :-
A=1,
B=1,
C=1.
gcd(A,B,C,D,E,F) :-
A=0,
B=1,
C=1.
gcd(A,B,C,D,E,F) :-
A=0,
B=0,
C=0.
gcd__1(A,B) :-
true.
gcd___0(A,B,C) :-
gcd__1(B,C),
B<1,
A=0.
gcd___0(A,B,C) :-
gcd__1(B,C),
C<1,
A=0.
gcd__4(A,B) :-
gcd__1(A,B),
A>=1,
B>=1.
gcd___0(A,B,C) :-
B=A,
gcd__4(B,C),
B=C.
gcd__6(A,B) :-
gcd__4(A,B),
A<B.
gcd__6(A,B) :-
gcd__4(A,B),
A>B.
gcd__8(A,B) :-
gcd__6(A,B),
A>B.
gcd__11(A,B) :-
gcd__6(A,B),
A=<B.
gcd___0(A,B,C) :-
D=1,
E=0,
F=0,
G=B-C,
gcd__8(B,C),
gcd(D,E,F,G,C,A).
gcd___0(A,B,C) :-
D=1,
E=0,
F=0,
G=C-B,
gcd__11(B,C),
gcd(D,E,F,B,G,A).
gcd__split(A,B,C) :-
gcd___0(A,B,C).
gcd(A,B,C,D,E,F) :-
A=1,
B=0,
C=0,
gcd__split(F,D,E).
main_entry :-
true.
main__un(A) :-
main_entry,
A>=1.
main__un1(A,B) :-
main__un(A),
B>=1.
main__un2 :-
main__un1(A,B),
gcd(1,0,0,A,B,C),
C<1,
A>0,
B>0.
main_verifier_error :-
main__un2.
| bishoksan/RAHFT | benchmarks_scp/SVCOMP15/svcomp15-clp/gcd01_true-unreach-call_true-termination.c.pl | Perl | apache-2.0 | 1,247 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V10::Resources::CustomerAsset;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
asset => $args->{asset},
fieldType => $args->{fieldType},
resourceName => $args->{resourceName},
status => $args->{status}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V10/Resources/CustomerAsset.pm | Perl | apache-2.0 | 1,147 |
package Paws::MTurk::QualificationRequest;
use Moose;
has Answer => (is => 'ro', isa => 'Str');
has QualificationRequestId => (is => 'ro', isa => 'Str');
has QualificationTypeId => (is => 'ro', isa => 'Str');
has SubmitTime => (is => 'ro', isa => 'Str');
has Test => (is => 'ro', isa => 'Str');
has WorkerId => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::MTurk::QualificationRequest
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::MTurk::QualificationRequest object:
$service_obj->Method(Att1 => { Answer => $value, ..., WorkerId => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::MTurk::QualificationRequest object:
$result = $service_obj->Method(...);
$result->Att1->Answer
=head1 DESCRIPTION
The QualificationRequest data structure represents a request a Worker
has made for a Qualification.
=head1 ATTRIBUTES
=head2 Answer => Str
The Worker's answers for the Qualification type's test contained in a
QuestionFormAnswers document, if the type has a test and the Worker has
submitted answers. If the Worker does not provide any answers, Answer
may be empty.
=head2 QualificationRequestId => Str
The ID of the Qualification request, a unique identifier generated when
the request was submitted.
=head2 QualificationTypeId => Str
The ID of the Qualification type the Worker is requesting, as returned
by the CreateQualificationType operation.
=head2 SubmitTime => Str
The date and time the Qualification request had a status of Submitted.
This is either the time the Worker submitted answers for a
Qualification test, or the time the Worker requested the Qualification
if the Qualification type does not have a test.
=head2 Test => Str
The contents of the Qualification test that was presented to the
Worker, if the type has a test and the Worker has submitted answers.
This value is identical to the QuestionForm associated with the
Qualification type at the time the Worker requests the Qualification.
=head2 WorkerId => Str
The ID of the Worker requesting the Qualification.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::MTurk>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/MTurk/QualificationRequest.pm | Perl | apache-2.0 | 2,728 |
package OpenXPKI::Server::Workflow::Validator::ReasonCode;
use strict;
use Moose;
use OpenXPKI::Server::Context qw( CTX );
use OpenXPKI::Debug;
use OpenXPKI::Exception;
use Workflow::Exception qw( validation_error );
use DateTime;
extends 'OpenXPKI::Server::Workflow::Validator';
sub _preset_args {
return [ qw(reason_code) ];
}
sub _validate {
my ( $self, $wf, $reason_code ) = @_;
##! 16: 'reason_code' . $reason_code
my @valid_reason_codes = (
'unspecified',
'keyCompromise',
'CACompromise',
'affiliationChanged',
'superseded',
'cessationOfOperation',
'certificateHold',
'removeFromCRL',
);
CTX('log')->application()->warn('Call to deprecated class OpenXPKI::Server::Workflow::Validator::ReasonCode');
my $codes = $self->param('valid_reason_codes');
if ($codes) {
@valid_reason_codes = split /,\s*/, $codes;
}
if (! grep { $_ eq $reason_code} @valid_reason_codes) {
CTX('log')->application()->error('Got invalid reason code: ' . $reason_code);
validation_error('I18N_OPENXPKI_SERVER_WORKFLOW_VALIDATOR_REASON_CODE_INVALID');
}
return 1;
}
1;
__END__
=head1 NAME
OpenXPKI::Server::Workflow::Validator::ReasonCode
=head1 DEPRECATION WARNING
Class is deprecated - use the predefined global validator which is based
on OpenXPKI::Server::Workflow::Validator::Regex
class: OpenXPKI::Server::Workflow::Validator::Regex
arg:
- $reason_code
param:
regex: "\\A (unspecified|keyCompromise|CACompromise|affiliationChanged|superseded|cessationOfOperation) \\z"
error: I18N_OPENXPKI_UI_REASON_CODE_NOT_SUPPORTED
=head1 SYNOPSIS
validate_reason_code:
class: OpenXPKI::Server::Workflow::Validator::ReasonCode
param:
valid_reason_codes: unspecified, superseded
arg:
- $reason_code
=head1 DESCRIPTION
This validator checks whether a given CRR reason code is valid. The accepted
reason codes are preset to those accepted by openssl but you can also pass
your own list of accepted codes as param (comma separated list of values!).
=head2 Default Codes
unspecified, keyCompromise, CACompromise, affiliationChanged, superseded,
cessationOfOperation, certificateHold, removeFromCRL
| stefanomarty/openxpki | core/server/OpenXPKI/Server/Workflow/Validator/ReasonCode.pm | Perl | apache-2.0 | 2,285 |
#
# (c) Jan Gehring <jan.gehring@gmail.com>
#
# vim: set ts=2 sw=2 tw=0:
# vim: set expandtab:
package Rex::Test::Base::has_file;
use strict;
use warnings;
our $VERSION = '0.56.1'; # VERSION
use Rex -base;
use base qw(Rex::Test::Base);
sub new {
my $that = shift;
my $proto = ref($that) || $that;
my $self = {@_};
bless( $self, $proto );
my ( $pkg, $file ) = caller(0);
return $self;
}
sub run_test {
my ( $self, $file ) = @_;
$self->ok( is_file($file), "File $file found." );
}
1;
| gitpan/Rex | lib/Rex/Test/Base/has_file.pm | Perl | apache-2.0 | 510 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::GA::SiteDefs;
use strict;
use warnings;
sub update_conf {}
1;
| Ensembl/public-plugins | ga/conf/SiteDefs.pm | Perl | apache-2.0 | 793 |
package Google::Ads::AdWords::v201809::StringFormatError;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201809::ApiError);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %fieldPath_of :ATTR(:get<fieldPath>);
my %fieldPathElements_of :ATTR(:get<fieldPathElements>);
my %trigger_of :ATTR(:get<trigger>);
my %errorString_of :ATTR(:get<errorString>);
my %ApiError__Type_of :ATTR(:get<ApiError__Type>);
my %reason_of :ATTR(:get<reason>);
__PACKAGE__->_factory(
[ qw( fieldPath
fieldPathElements
trigger
errorString
ApiError__Type
reason
) ],
{
'fieldPath' => \%fieldPath_of,
'fieldPathElements' => \%fieldPathElements_of,
'trigger' => \%trigger_of,
'errorString' => \%errorString_of,
'ApiError__Type' => \%ApiError__Type_of,
'reason' => \%reason_of,
},
{
'fieldPath' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'fieldPathElements' => 'Google::Ads::AdWords::v201809::FieldPathElement',
'trigger' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'errorString' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'ApiError__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'reason' => 'Google::Ads::AdWords::v201809::StringFormatError::Reason',
},
{
'fieldPath' => 'fieldPath',
'fieldPathElements' => 'fieldPathElements',
'trigger' => 'trigger',
'errorString' => 'errorString',
'ApiError__Type' => 'ApiError.Type',
'reason' => 'reason',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::StringFormatError
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
StringFormatError from the namespace https://adwords.google.com/api/adwords/cm/v201809.
A list of error code for reporting invalid content of input strings.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * reason
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/StringFormatError.pm | Perl | apache-2.0 | 2,552 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::bluecoat::snmp::mode::hardware;
use base qw(centreon::plugins::templates::hardware);
use strict;
use warnings;
sub set_system {
my ($self, %options) = @_;
$self->{regexp_threshold_numeric_check_section_option} = '^sensor$';
$self->{cb_hook2} = 'snmp_execute';
$self->{thresholds} = {
sensor_opstatus => [
['ok', 'OK'],
['unavailable', 'UNKNOWN'],
['nonoperational', 'UNKNOWN'],
['unknown', 'UNKNOWN']
],
sensor => [
['ok', 'OK'],
['unknown', 'UNKNOWN'],
['notInstalled', 'OK'],
['voltageLowWarning', 'WARNING'],
['voltageLowCritical', 'CRITICAL'],
['noPower', 'CRITICAL'],
['voltageHighWarning', 'WARNING'],
['voltageHighCritical', 'CRITICAL'],
['voltageHighSevere', 'CRITICAL'],
['temperatureHighWarning', 'WARNING'],
['temperatureHighCritical', 'CRITICAL'],
['temperatureHighSevere', 'CRITICAL'],
['fanSlowWarning', 'WARNING'],
['fanSlowCritical', 'CRITICAL'],
['fanStopped', 'CRITICAL']
],
disk => [
['present', 'OK'],
['initializing', 'OK'],
['inserted', 'OK'],
['offline', 'WARNING'],
['removed', 'WARNING'],
['notpresent', 'OK'],
['empty', 'WARNING'],
['ioerror', 'CRITICAL'],
['unusable', 'CRITICAL'],
['unknown', 'UNKNOWN']
]
};
$self->{components_path} = 'network::bluecoat::snmp::mode::components';
$self->{components_module} = ['sensor', 'disk'];
}
sub snmp_execute {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
$self->{results} = $self->{snmp}->get_multiple_table(oids => $self->{request});
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {});
return $self;
}
1;
__END__
=head1 MODE
Check Hardware (Sensors, Disks).
=over 8
=item B<--component>
Which component to check (Default: '.*').
Can be: 'sensor', 'disk'.
=item B<--filter>
Exclude some parts (comma seperated list) (Example: --filter=disk --filter=sensor)
Can also exclude specific instance: --filter=rmsVoltage,I1
=item B<--absent-problem>
Return an error if an entity is not 'present' (default is skipping) (comma seperated list)
Can be specific or global: --absent-problem=disk,1
=item B<--no-component>
Return an error if no compenents are checked.
If total (with skipped) is 0. (Default: 'critical' returns).
=item B<--threshold-overload>
Set to overload default threshold values (syntax: section,[instance,]status,regexp)
It used before default thresholds (order stays).
Example: --threshold-overload='sensor,CRITICAL,^(?!(ok)$)'
=item B<--warning>
Set warning threshold for temperatures (syntax: type,instance,threshold)
Example: --warning='sensor,.*,30'
=item B<--critical>
Set critical threshold for temperatures (syntax: type,instance,threshold)
Example: --critical='sensor,.*,40'
=back
=cut
| centreon/centreon-plugins | network/bluecoat/snmp/mode/hardware.pm | Perl | apache-2.0 | 3,994 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Services::SmartCampaignSettingService;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseService);
sub get {
my $self = shift;
my $request_body = shift;
my $http_method = 'GET';
my $request_path = 'v8/{+resourceName}';
my $response_type =
'Google::Ads::GoogleAds::V8::Resources::SmartCampaignSetting';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub mutate {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v8/customers/{+customerId}/smartCampaignSettings:mutate';
my $response_type =
'Google::Ads::GoogleAds::V8::Services::SmartCampaignSettingService::MutateSmartCampaignSettingsResponse';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Services/SmartCampaignSettingService.pm | Perl | apache-2.0 | 1,458 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::watchguard::snmp::mode::system;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
use Digest::MD5 qw(md5_hex);
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'global', type => 0, message_separator => ' - ' }
];
$self->{maps_counters}->{global} = [
{ label => 'connections', nlabel => 'system.connections.current.count', set => {
key_values => [ { name => 'connections' } ],
output_template => 'Current connections: %s',
perfdatas => [
{ label => 'current_connections', template => '%s', min => 0 }
]
}
},
{ label => 'in-traffic', nlabel => 'system.traffic.in.bitspersecond', set => {
key_values => [ { name => 'in_traffic', per_second => 1 } ],
output_template => 'Traffic in: %s %s/s',
output_change_bytes => 2,
perfdatas => [
{ label => 'traffic_in', template => '%s', min => 0, unit => 'b/s' }
]
}
},
{ label => 'out-traffic', nlabel => 'system.traffic.out.bitspersecond', set => {
key_values => [ { name => 'out_traffic', per_second => 1 } ],
output_template => 'Traffic out: %s %s/s',
output_change_bytes => 2,
perfdatas => [
{ label => 'traffic_out', template => '%s', min => 0, unit => 'b/s' }
]
}
}
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, statefile => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
if ($options{snmp}->is_snmpv1()) {
$self->{output}->add_option_msg(short_msg => 'Need to use SNMP v2c or v3.');
$self->{output}->option_exit();
}
my $oid_wgSystemTotalSendBytes = '.1.3.6.1.4.1.3097.6.3.8.0';
my $oid_wgSystemTotalRecvBytes = '.1.3.6.1.4.1.3097.6.3.9.0';
my $oid_wgSystemCurrActiveConns = '.1.3.6.1.4.1.3097.6.3.80.0';
my $snmp_result = $options{snmp}->get_leef(
oids => [
$oid_wgSystemTotalSendBytes, $oid_wgSystemTotalRecvBytes, $oid_wgSystemCurrActiveConns
],
nothing_quit => 1
);
$self->{global} = {
out_traffic => $snmp_result->{$oid_wgSystemTotalSendBytes} * 8,
in_traffic => $snmp_result->{$oid_wgSystemTotalRecvBytes} * 8,
connections => $snmp_result->{$oid_wgSystemCurrActiveConns}
};
$self->{cache_name} = 'watchguard_' . $self->{mode} . '_' . $options{snmp}->get_hostname() . '_' . $options{snmp}->get_port() . '_' .
(defined($self->{option_results}->{filter_counters}) ? md5_hex($self->{option_results}->{filter_counters}) : md5_hex('all'));
}
1;
__END__
=head1 MODE
Check system statistics.
=over 8
=item B<--filter-counters>
Only display some counters (regexp can be used).
Example: --filter-counters='^connections$'
=item B<--warning-*> B<--critical-*>
Thresholds.
Can be: 'in-traffic', 'out-traffic', 'connections'.
=back
=cut
| Tpo76/centreon-plugins | network/watchguard/snmp/mode/system.pm | Perl | apache-2.0 | 4,054 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::common::ibm::tapelibrary::snmp::mode::components::chassis;
use strict;
use warnings;
use centreon::common::ibm::tapelibrary::snmp::mode::components::resources qw($map_operational);
my $mapping = {
subChassis_OperationalStatus => { oid => '.1.3.6.1.4.1.14851.3.1.4.10.1.10', map => $map_operational },
};
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping->{subChassis_OperationalStatus}->{oid} };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking chassis");
$self->{components}->{chassis} = {name => 'chassis', total => 0, skip => 0};
return if ($self->check_filter(section => 'chassis'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$mapping->{subChassis_OperationalStatus}->{oid}}})) {
$oid =~ /^$mapping->{subChassis_OperationalStatus}->{oid}\.(.*)$/;
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$mapping->{subChassis_OperationalStatus}->{oid}}, instance => $instance);
next if ($self->check_filter(section => 'chassis', instance => $instance));
$self->{components}->{chassis}->{total}++;
$self->{output}->output_add(long_msg => sprintf("chassis '%s' status is '%s' [instance: %s].",
$instance, $result->{subChassis_OperationalStatus},
$instance
));
my $exit = $self->get_severity(label => 'operational', section => 'chassis', value => $result->{subChassis_OperationalStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("chassis '%s' status is '%s'",
$instance, $result->{subChassis_OperationalStatus}));
}
}
}
1; | Tpo76/centreon-plugins | centreon/common/ibm/tapelibrary/snmp/mode/components/chassis.pm | Perl | apache-2.0 | 2,799 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::common::aruba::snmp::mode::license;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
use centreon::plugins::templates::catalog_functions qw(catalog_status_threshold);
use Time::Local;
sub custom_status_output {
my ($self, %options) = @_;
my $msg = sprintf("Status is '%s', Expires in '%s' [%s]",
$self->{result_values}->{flag},
$self->{result_values}->{expires_human},
$self->{result_values}->{expires_date});
return $msg;
}
sub custom_status_calc {
my ($self, %options) = @_;
$self->{result_values}->{key} = $options{new_datas}->{$self->{instance} . '_sysExtLicenseKey'};
$self->{result_values}->{flag} = $options{new_datas}->{$self->{instance} . '_sysExtLicenseFlags'};
$self->{result_values}->{service} = $options{new_datas}->{$self->{instance} . '_sysExtLicenseService'};
$self->{result_values}->{expires} = $options{new_datas}->{$self->{instance} . '_sysExtLicenseExpires'};
$self->{result_values}->{expires_date} = $options{new_datas}->{$self->{instance} . '_sysExtLicenseExpires'};
$self->{result_values}->{expires_human} = 'Never';
if ($self->{result_values}->{expires} !~ /Never/) {
my ($year, $mon, $mday, $hour, $min, $sec) = split(/[\s\-:]+/, $self->{result_values}->{expires});
$self->{result_values}->{expires} = timelocal($sec, $min, $hour, $mday, $mon - 1, $year) - time();
$self->{result_values}->{expires_human} = centreon::plugins::misc::change_seconds(value => $self->{result_values}->{expires});
$self->{result_values}->{expires_human} = $self->{result_values}->{expires} = 0 if ($self->{result_values}->{expires} < 0);
}
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'license', type => 1, cb_prefix_output => 'prefix_output',
message_multiple => 'All licenses status are ok' },
];
$self->{maps_counters}->{license} = [
{ label => 'status', threshold => 0, set => {
key_values => [ { name => 'sysExtLicenseKey' }, { name => 'sysExtLicenseFlags' },
{ name => 'sysExtLicenseService' }, { name => 'sysExtLicenseExpires' } ],
closure_custom_calc => $self->can('custom_status_calc'),
closure_custom_output => $self->can('custom_status_output'),
closure_custom_perfdata => sub { return 0; },
closure_custom_threshold_check => \&catalog_status_threshold,
}
},
];
}
sub prefix_output {
my ($self, %options) = @_;
return "License '" . $options{instance_value}->{sysExtLicenseService} . "' ";
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, force_new_perfdata => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
"warning-status:s" => { name => 'warning_status' },
"critical-status:s" => { name => 'critical_status',
default => '%{flag} !~ /enabled/i || (%{expires} ne "Never" && %{expires} < 86400)' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
$self->change_macros(macros => ['warning_status', 'critical_status']);
}
my %map_flags = (
'E' => 'enabled', 'A' => 'auto-generated', 'R' => 'reboot-required'
);
my $oid_wlsxSysExtSwitchLicenseTable = '.1.3.6.1.4.1.14823.2.2.1.2.1.20.1';
my $mapping = {
sysExtLicenseKey => { oid => '.1.3.6.1.4.1.14823.2.2.1.2.1.20.1.2' },
sysExtLicenseInstalled => { oid => '.1.3.6.1.4.1.14823.2.2.1.2.1.20.1.3' },
sysExtLicenseExpires => { oid => '.1.3.6.1.4.1.14823.2.2.1.2.1.20.1.4' },
sysExtLicenseFlags => { oid => '.1.3.6.1.4.1.14823.2.2.1.2.1.20.1.5', map => \%map_flags },
sysExtLicenseService => { oid => '.1.3.6.1.4.1.14823.2.2.1.2.1.20.1.6' },
};
sub manage_selection {
my ($self, %options) = @_;
my $snmp_result = $options{snmp}->get_table(
oid => $oid_wlsxSysExtSwitchLicenseTable,
start => $mapping->{sysExtLicenseKey}->{oid},
end => $mapping->{sysExtLicenseService}->{oid},
nothing_quit => 1
);
foreach my $oid (keys %{$snmp_result}) {
$snmp_result->{$oid} = centreon::plugins::misc::trim($snmp_result->{$oid});
}
$self->{license} = {};
foreach my $oid (keys %{$snmp_result}) {
next if ($oid !~ /^$mapping->{sysExtLicenseKey}->{oid}\.(.*)/);
my $instance = $1;
my $result = $options{snmp}->map_instance(
mapping => $mapping,
results => $snmp_result,
instance => $instance
);
$self->{license}->{$result->{sysExtLicenseService}} = { %{$result} };
}
if (scalar(keys %{$self->{license}}) <= 0) {
$self->{output}->add_option_msg(short_msg => "No license found.");
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check license (WLSX-SYSTEMEXT-MIB).
=over 8
=item B<--warning-status>
Threshold warning.
Can use special variables like:%{key},
%{service}, %{flag}, %{expires} (Default: '')
=item B<--critical-status>
Threshold critical.
Can use special variables like: %{key},
%{service}, %{flag}, %{expires} (Default: '%{flag} !~ /enabled/i || (%{expires} ne "Never" && %{expires} < 86400)')
=back
=cut
| Tpo76/centreon-plugins | centreon/common/aruba/snmp/mode/license.pm | Perl | apache-2.0 | 6,187 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::aws::elasticache::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_custom);
sub new {
my ( $class, %options ) = @_;
my $self = $class->SUPER::new( package => __PACKAGE__, %options );
bless $self, $class;
$self->{version} = '0.1';
%{ $self->{modes} } = (
'commands-memcached' => 'cloud::aws::elasticache::mode::commandsmemcached',
'commands-redis' => 'cloud::aws::elasticache::mode::commandsredis',
'connections' => 'cloud::aws::elasticache::mode::connections',
'cpu' => 'cloud::aws::elasticache::mode::cpu',
'evictions' => 'cloud::aws::elasticache::mode::evictions',
'items' => 'cloud::aws::elasticache::mode::items',
'network' => 'cloud::aws::elasticache::mode::network',
'replication' => 'cloud::aws::elasticache::mode::replication',
'requests-memcached' => 'cloud::aws::elasticache::mode::requestsmemcached',
'requests-redis' => 'cloud::aws::elasticache::mode::requestsredis',
'usage-memcached' => 'cloud::aws::elasticache::mode::usagememcached',
'usage-redis' => 'cloud::aws::elasticache::mode::usageredis',
);
$self->{custom_modes}{paws} = 'cloud::aws::custom::paws';
$self->{custom_modes}{awscli} = 'cloud::aws::custom::awscli';
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Amazon ElastiCache.
=cut
| Tpo76/centreon-plugins | cloud/aws/elasticache/plugin.pm | Perl | apache-2.0 | 2,274 |
=head1 LICENSE
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2017] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Analysis::Runnable::Pecan -
=head1 SYNOPSIS
my $runnable = new Bio::EnsEMBL::Analysis::Runnable::Pecan
(-workdir => $workdir,
-fasta_files => $fasta_files,
-tree_string => $tree_string,
-program => "/path/to/program");
$runnable->run;
my @output = @{$runnable->output};
=head1 DESCRIPTION
Mavid expects to run the program mavid, a global multiple aligner for large genomic sequences,
using a fasta file and a tree file (Newick format), and eventually a constraints file.
The output (multiple alignment) is parsed and return as a Bio::EnsEMBL::Compara::GenomicAlignBlock object.
=head1 METHODS
=cut
package Bio::EnsEMBL::Analysis::Runnable::Pecan;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Exception;
use Bio::EnsEMBL::Utils::Argument;
use Bio::EnsEMBL::Compara::GenomicAlign;
use Bio::EnsEMBL::Compara::GenomicAlignBlock;
use Bio::EnsEMBL::Analysis::Config::Compara;
use Bio::EnsEMBL::Analysis::Runnable;
our @ISA = qw(Bio::EnsEMBL::Analysis::Runnable);
my $java_exe = "/nfs/software/ensembl/RHEL7/jenv/shims/java";
my $uname = `uname`;
$uname =~ s/[\r\n]+//;
my $default_exonerate = $EXONERATE;
my $default_jar_file = "/nfs/software/ensembl/RHEL7/linuxbrew/Cellar/pecan/0.8.0/pecan.jar";
my $default_java_class = "bp.pecan.Pecan";
my $estimate_tree = "/nfs/software/ensembl/RHEL7/linuxbrew/Cellar/pecan/0.8.0/libexec/bp/pecan/utils/EstimateTree.py";
=head2 new
Arg [1] : -workdir => "/path/to/working/directory"
Arg [2] : -fasta_files => "/path/to/fasta/file"
Arg [3] : -tree_string => "/path/to/tree/file" (optional)
Arg [4] : -parameters => "parameter" (optional)
Function : contruct a new Bio::EnsEMBL::Analysis::Runnable::Pecan
runnable
Returntype: Bio::EnsEMBL::Analysis::Runnable::Pecan
Exceptions: none
Example :
=cut
sub new {
my ($class,@args) = @_;
my $self = $class->SUPER::new(@args);
my ($workdir, $fasta_files, $tree_string, $parameters,
$jar_file, $java_class, $exonerate) =
rearrange(['WORKDIR', 'FASTA_FILES', 'TREE_STRING','PARAMETERS',
'JAR_FILE', 'JAVA_CLASS', 'EXONERATE'], @args);
chdir $self->workdir;
$self->fasta_files($fasta_files) if (defined $fasta_files);
if (defined $tree_string) {
$self->tree_string($tree_string)
} else {
# Use EstimateTree.py program to get a tree from the sequences
my $run_str = "python $estimate_tree " . join(" ", @$fasta_files);
print "RUN $run_str\n";
my @estimate = qx"$run_str";
if (($estimate[0] !~ /^FINAL_TREE: \(.+\);/) or ($estimate[2] !~ /^ORDERED_SEQUENCES: (.+)/)) {
throw "Error while running EstimateTree program for Pecan";
}
($tree_string) = $estimate[0] =~ /^FINAL_TREE: (\(.+\);)/;
$self->tree_string($tree_string);
# print "THIS TREE $tree_string\n";
my ($files) = $estimate[2] =~ /^ORDERED_SEQUENCES: (.+)/;
@$fasta_files = split(" ", $files);
$self->fasta_files($fasta_files);
# print "THESE FILES ", join(" ", @$fasta_files), "\n";
## Build newick tree which can be stored in the meta table
foreach my $this_file (@$fasta_files) {
my $header = qx"head -1 $this_file";
my ($dnafrag_id, $name, $start, $end, $strand) = $header =~ /^>DnaFrag(\d+)\|([^\.+])\.(\d+)\-(\d+)\:(\-?1)/;
# print "HEADER: $dnafrag_id, $name, $start, $end, $strand $header";
$strand = 0 if ($strand != 1);
$tree_string =~ s/(\W)\d+(\W)/$1${dnafrag_id}_${start}_${end}_${strand}$2/;
}
$self->{tree_to_save} = $tree_string;
# print "TREE_TO_SAVE: $tree_string\n";
}
$self->parameters($parameters) if (defined $parameters);
unless (defined $self->program) {
if (defined($self->analysis) and defined($self->analysis->program)) {
$self->program($self->analysis->program);
} else {
$self->program($java_exe);
}
}
if (defined $jar_file) {
$self->jar_file($jar_file);
} else {
$self->jar_file($default_jar_file);
}
if (defined $java_class) {
$self->java_class($java_class);
} else {
$self->java_class($default_java_class);
}
if (defined $exonerate) {
$self->exonerate($exonerate);
} else {
$self->exonerate($default_exonerate);
}
# Try to locate jar file in usual places...
if (!-e $self->jar_file) {
$default_jar_file = $self->jar_file;
if (-e "/usr/local/pecan/$default_jar_file") {
$self->jar_file("/usr/local/pecan/$default_jar_file");
} elsif (-e "/usr/local/ensembl/pecan/$default_jar_file") {
$self->jar_file("/usr/local/ensembl/pecan/$default_jar_file");
} elsif (-e "/usr/local/ensembl/bin/$default_jar_file") {
$self->jar_file("/usr/local/ensembl/bin/$default_jar_file");
} elsif (-e "/usr/local/bin/pecan/$default_jar_file") {
$self->jar_file("/usr/local/bin/pecan/$default_jar_file");
} elsif (-e $ENV{HOME}."/pecan/$default_jar_file") {
$self->jar_file($ENV{HOME}."/pecan/$default_jar_file");
} elsif (-e $ENV{HOME}."/Downloads/$default_jar_file") {
$self->jar_file($ENV{HOME}."/Downloads/$default_jar_file");
} elsif (-e "/software/ensembl/compara/pecan/$default_jar_file") {
$self->jar_file("/software/ensembl/compara/pecan/$default_jar_file");
} else {
throw("Cannot find Pecan JAR file!");
}
}
return $self;
}
sub fasta_files {
my $self = shift;
$self->{'_fasta_files'} = shift if(@_);
return $self->{'_fasta_files'};
}
sub tree_string {
my $self = shift;
$self->{'_tree_string'} = shift if(@_);
return $self->{'_tree_string'};
}
sub parameters {
my $self = shift;
$self->{'_parameters'} = shift if(@_);
return $self->{'_parameters'};
}
sub jar_file {
my $self = shift;
$self->{'_jar_file'} = shift if(@_);
return $self->{'_jar_file'};
}
sub java_class {
my $self = shift;
$self->{'_java_class'} = shift if(@_);
return $self->{'_java_class'};
}
sub exonerate {
my $self = shift;
$self->{'_exonerate'} = shift if(@_);
return $self->{'_exonerate'};
}
=head2 run_analysis
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::Pecan
Arg [2] : string, program name
Function : create and open a commandline for the program trf
Returntype: none
Exceptions: throws if the program in not executable or if the results
file doesnt exist
Example :
=cut
sub run_analysis {
my ($self, $program) = @_;
$self->run_pecan;
$self->parse_results;
return 1;
}
sub run_pecan {
my $self = shift;
chdir $self->workdir;
throw($self->program . " is not executable Pecan::run_analysis ")
unless ($self->program && -x $self->program);
my $command = $self->program;
if ($self->parameters) {
$command .= " " . $self->parameters;
}
$command .= " -cp ".$self->jar_file." ".$self->java_class;
if (@{$self->fasta_files}) {
$command .= " -F";
foreach my $fasta_file (@{$self->fasta_files}) {
$command .= " $fasta_file";
}
}
#Remove -X option. Transitive anchoring is now switched off by default
#$command .= " -J '" . $self->exonerate . "' -X";
$command .= " -J '" . $self->exonerate . "'";
if ($self->tree_string) {
$command .= " -E '" . $self->tree_string . "'";
}
$command .= " -G pecan.mfa";
if ($self->options) {
$command .= " " . $self->options;
}
print "Running pecan: " . $command . "\n";
open(PECAN, "$command 2>&1 |") || die "Failed: $!\n";
my $java_error = <PECAN>;
if ($java_error) {
die ($java_error);
}
close PECAN;
# unless (system($command) == 0) {
# throw("pecan execution failed\n");
# }
}
=head2 parse_results
Arg [1] : Bio::EnsEMBL::Analysis::Runnable::Pecan
Function : parse the specifed file and produce RepeatFeatures
Returntype: nine
Exceptions: throws if fails to open or close the results file
Example :
=cut
sub parse_results{
my ($self, $run_number) = @_;
my $alignment_file = $self->workdir . "/pecan.mfa";
my $this_genomic_align_block = new Bio::EnsEMBL::Compara::GenomicAlignBlock;
open F, $alignment_file || throw("Could not open $alignment_file");
my $seq = "";
my $this_genomic_align;
print "Reading $alignment_file...\n";
while (<F>) {
next if (/^\s*$/);
chomp;
## FASTA headers are defined in the Bio::EnsEMBL::Compara::Production::GenomicAlignBlock::Pecan
## module (or any other module you use to create this Pecan analysis job). Here is an example:
## >DnaFrag1234|X.10001-20000:-1
## This will correspond to chromosome X, which has dnafrag_id 1234 and the region goes from
## position 10001 to 20000 on the reverse strand.
if (/^>/) {
if (/^>DnaFrag(\d+)\|(.+)\.(\d+)\-(\d+)\:(\-?1)$/) {
if (defined($this_genomic_align) and $seq) {
$this_genomic_align->aligned_sequence($seq);
$this_genomic_align_block->add_GenomicAlign($this_genomic_align);
}
$this_genomic_align = new Bio::EnsEMBL::Compara::GenomicAlign;
$this_genomic_align->dnafrag_id($1);
$this_genomic_align->dnafrag_start($3);
$this_genomic_align->dnafrag_end($4);
$this_genomic_align->dnafrag_strand($5);
$seq = "";
} else {
throw("Error while parsing the FASTA header. It must start by \">DnaFrag#####\" where ##### is the dnafrag_id\n$_");
}
} else {
$seq .= $_;
}
}
close F;
$this_genomic_align->aligned_sequence($seq);
$this_genomic_align_block->add_GenomicAlign($this_genomic_align);
$self->output([$this_genomic_align_block]);
}
1;
| james-monkeyshines/ensembl-analysis | modules/Bio/EnsEMBL/Analysis/Runnable/Pecan.pm | Perl | apache-2.0 | 10,461 |
package Paws::CloudFront::GetDistributionConfig;
use Moose;
has Id => (is => 'ro', isa => 'Str', uri_name => 'Id', traits => ['ParamInURI'], required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'GetDistributionConfig');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/2017-03-25/distribution/{Id}/config');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::CloudFront::GetDistributionConfigResult');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CloudFront::GetDistributionConfig - Arguments for method GetDistributionConfig on Paws::CloudFront
=head1 DESCRIPTION
This class represents the parameters used for calling the method GetDistributionConfig2017_03_25 on the
Amazon CloudFront service. Use the attributes of this class
as arguments to method GetDistributionConfig2017_03_25.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to GetDistributionConfig2017_03_25.
As an example:
$service_obj->GetDistributionConfig2017_03_25(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> Id => Str
The distribution's ID.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method GetDistributionConfig2017_03_25 in L<Paws::CloudFront>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/CloudFront/GetDistributionConfig.pm | Perl | apache-2.0 | 1,926 |
package VMOMI::ReplicationSpec;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['replicationGroupId', 'ReplicationGroupId', 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/ReplicationSpec.pm | Perl | apache-2.0 | 441 |
package JobDB::Job;
use strict;
use Data::Dumper;
use Conf;
use DirHandle;
use File::Basename;
use IO::File;
use Fcntl ':flock';
use MGRAST::Metadata;
use JSON;
use LWP::UserAgent;
use Auth;
use MIME::Base64;
=pod
=head1 NAME
Job - MGRAST job access module
=head1 DESCRIPTION
TODO
=head1 METHODS
=over 4
=item * B<init> ()
Initialise a new instance of the Job object.
=cut
sub init {
my $self = shift;
# check if we are called properly
unless (ref $self) {
die "Not called as an object method.";
}
# parameters for the Job->init() call
my $data = $_[0];
unless (ref $data eq 'HASH') {
die "Init without a parameters hash.";
}
my $job = $self->SUPER::init(@_);
unless (ref $job) {
print STDERR "Job init call failed.\n";
return undef;
}
# # check if the user has the right to see this job
unless ($job->public) {
unless ($job->_master->{_user}) {
print STDERR "Private job ".$job->metagenome_id." called with no user.\n";
return undef;
}
unless ($job->_master->{_user}->has_right(undef,'view','metagenome',$job->metagenome_id) || $job->_master->{_user}->has_star_right('view','metagenome')) {
print STDERR "User ".$job->_master->{_user}->login." lacks rights for job ".$job->metagenome_id.".\n";
return undef;
}
}
return $job;
}
sub name {
my ($self , $value) = @_ ;
if ($value){
$self->SUPER::name($value);
}
else{
my $name = '';
unless ($self->SUPER::name()){
my $sample = $self->sample ;
if ($sample and ref $sample){
$name = join ";" , @{ $sample->data('sample_name') } ;
$self->SUPER::name($name) if ($name) ;
}
}
}
return $self->SUPER::name() || 'unknown';
}
sub reserve_job_id {
my ($self, $user, $name, $file, $size, $md5) = @_;
my $master = $self->_master();
unless (ref($master)) {
print STDRER "reserve_job_id called without a dbmaster reference";
return undef;
}
unless (ref($user)) {
print STDRER "reserve_job_id called without a user";
return undef;
}
# get and insert next IDs, need to lock to prevent race conditions
my $dbh = $master->db_handle;
my $cmd = "INSERT INTO Job (job_id,metagenome_id,name,file,file_size_raw,file_checksum_raw,server_version,owner,_owner_db) VALUES ((SELECT max(x.job_id + 1) FROM Job x),(SELECT max(y.metagenome_id + 1) FROM Job y),?,?,?,?,?,?,?)";
my $sth = $dbh->prepare($cmd);
$sth->execute($name, $file, $size, $md5, 4, $user->_id, 1);
$sth->finish();
$dbh->commit();
my $insertid = $dbh->selectcol_arrayref("SELECT LAST_INSERT_ID()");
unless ($insertid && @$insertid) {
print STDRER "Can't create job\n";
return undef;
}
$insertid = $insertid->[0];
# get job object
my $job = $master->Job->get_objects({_id => $insertid});
unless ($job && @$job) {
print STDRER "Can't create job\n";
return undef;
}
$job = $job->[0];
# Connect to User/Rights DB
my $dbm = DBMaster->new(-database => $Conf::webapplication_db,
-backend => $Conf::webapplication_backend,
-host => $Conf::webapplication_host,
-user => $Conf::webapplication_user,
);
# check rights
my $rights = ['view', 'edit', 'delete'];
foreach my $right_name (@$rights) {
my $objs = $dbm->Rights->get_objects({ scope => $user->get_user_scope,
data_type => 'metagenome',
data_id => $job->{metagenome_id},
name => $right_name,
granted => 1 });
unless (@$objs > 0) {
my $right = $dbm->Rights->create({ scope => $user->get_user_scope,
data_type => 'metagenome',
data_id => $job->{metagenome_id},
name => $right_name,
granted => 1 });
unless (ref $right) {
print STDRER "Unable to create Right $right_name - metagenome - ".$job->{metagenome_id};
return undef;
}
}
}
return $job;
}
sub initialize {
my ($self, $user, $data, $job) = @_;
my $master = $self->_master();
unless (ref($master)) {
print STDRER "initialize called without a dbmaster reference";
return undef;
}
# get parmas from hash or file
my $params = {};
if (ref($data) eq "HASH") {
$params = $data;
}
elsif ((! ref($data)) && (-s $data)) {
my @lines = `cat $data`;
chomp @lines;
foreach my $line (@lines) {
my ($k, $v) = split(/\t/, $line);
$params->{$k} = $v;
}
}
# hack due too same keys: 'sequence type' and 'sequence_type'
if (exists $params->{'sequence type'}) {
delete $params->{'sequence type'};
}
# sequence_type is currently a guess, add it
if (exists $params->{sequence_type}) {
$params->{sequence_type_guess} = $params->{sequence_type};
}
# get job object
unless ($job && ref($job)) {
eval {
$job = $master->Job->reserve_job_id($user, $params->{name}, $params->{file}, $params->{file_size}, $params->{file_checksum});
};
if ($@ || (! $job)) {
print STDRER "Can't create job\n";
return undef;
}
}
# add sequence type
if (exists $params->{sequence_type}) {
$job->sequence_type($params->{sequence_type});
}
# add raw stats
my $stat_keys = ['bp_count', 'sequence_count', 'average_length', 'standard_deviation_length', 'length_min', 'length_max', 'average_gc_content', 'standard_deviation_gc_content', 'average_gc_ratio', 'standard_deviation_gc_ratio', 'ambig_char_count', 'ambig_sequence_count', 'average_ambig_chars', 'drisee_score'];
foreach my $key (@$stat_keys) {
if (exists $params->{$key}) {
$job->stats($key.'_raw', $params->{$key});
} elsif (exists $params->{$key.'_raw'}) {
$job->stats($key.'_raw', $params->{$key.'_raw'});
}
}
# add attributes
my $used_keys = {metagenome_id => 1, name => 1, file => 1, file_size => 1, file_checksum => 1, sequence_type => 1};
map { $used_keys->{$_} = 1 } @$stat_keys;
foreach my $key (keys %$params) {
my $clean_key = $key;
$clean_key =~ s/_raw$//;
next if (exists($used_keys->{$key}) || exists($used_keys->{$clean_key}));
my $value = $params->{$key};
$value =~ s/\s+/_/g;
$job->data($key, $value);
}
$job->set_filter_options();
return $job;
}
sub set_publication_date {
my ($self) = @_;
my $master = $self->_master();
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(time);
$mon++;
my $date = (1900 + $year) . "-" . ($mon > 9 ? $mon : "0".$mon) . "-" . ($mday > 9 ? $mday : "0".$mday)." ".($hour > 9 ? $hour : "0".$hour).":".($min > 9 ? $min : "0".$min).":".($sec > 9 ? $sec : "0".$sec); # 2016-03-08 10:26:48
my $publication = $master->JobAttributes->get_objects({ job => $self, tag => "publication_date" });
if (scalar(@$publication)) {
$publication = $publication->[0];
$publication->value($date);
} else {
$master->JobAttributes->create({ job => $self, tag => "publication_date", value => $date});
}
return 1;
}
sub reserve_job {
my ($self, $user, $options, $stats) = @_;
my $data = {};
if (ref $options) {
map { $data->{$_} = $options->{$_}; } keys %$options;
}
if (ref $stats) {
map { $data->{$_} = $stats->{$_}; } keys %$stats;
}
return $self->initalize($user, $data);
}
sub has_checksum {
my ($self, $checksum, $user) = @_;
my $dbh = $self->_master->db_handle;
my $who = ($user && ref($user)) ? "(owner=".$user->_id." OR public=1)" : "public=1";
my $md5 = $dbh->selectcol_arrayref("SELECT metagenome_id FROM Job WHERE file_checksum_raw='$checksum' AND ".$who);
return ($md5 && @$md5) ? $md5->[0] : 0;
}
sub finish_upload {
my ($self, $file, $file_format) = @_ ;
# create_and_submit_job -j <job_number> -f <sequence_file> [ -p <pipeline_name> -o <pipeline_options> --fastq --rna_only ]
# set options
my $opts = $self->set_job_options;
my $format = ($file_format =~ /fastq/) ? "--fastq" : '' ;
my $cmd = $Conf::create_job;
my $params = " -j " . $self->job_id . " -f $file -o '$opts' $format";
print STDERR "Calling $cmd $params\n";
if ($cmd and -f $cmd) {
my $output = `$cmd $params`;
print STDERR $output;
return $output;
}
else {
print STDERR "Can't find $cmd\n";
return 0;
}
}
=pod
=item * B<get_jobs_for_user> (I<user>, I<right>, I<viewable>)
Returns the Jobs objects the user I<user> has access to. Access to a job is defined
by the right to edit a genome of a certain metagenome_id. In the context of the RAST
server this method checks the 'edit - genome' rights of a user. Optionally, you can
change this by providing the parameter I<right> and setting it to eg. 'view'.
If present and true, the parameter I<viewable> restricts the query to jobs marked
as viewable.
Please note you may not longer pass a scope to this function.
=cut
sub get_jobs_for_user {
my ($self, $user, $right, $viewable) = @_;
unless (ref $self) {
die "Call method via the DBMaster.\n";
}
unless (ref $user and ( $user->isa("WebServerBackend::User"))) {
print STDERR "No user given in method get_jobs_for_user.\n";
die "No user given in method get_jobs_for_user.\n";
}
my $get_options = {};
$get_options->{viewable} = 1 if ($viewable);
my $right_to = $user->has_right_to(undef, $right || 'edit', 'metagenome');
# check if first right_to is place holder
if (scalar(@$right_to) and $right_to->[0] eq '*') {
return $self->_master->Job->get_objects($get_options);
}
$get_options->{owner} = $user;
my $jobs = $self->_master->Job->get_objects($get_options);
my %ids = map { $_ => 1 } @$right_to;
foreach my $j (@$jobs){
if ($j->metagenome_id){
if (defined $ids{$j->metagenome_id}){
delete $ids{$j->metagenome_id};
}
}
}
foreach (keys %ids){
my $tmp_j;
if($_){
$tmp_j = $self->_master->Job->init({ metagenome_id => $_ });
}
if($tmp_j){
if($viewable){
next unless $tmp_j->viewable;
}
push @$jobs, $tmp_j;
}
}
return $jobs;
}
sub get_jobs_for_user_fast {
my ($self, $user_or_scope, $right, $viewable) = @_;
unless (ref $self) {
die "Call method via the DBMaster.\n";
}
unless (ref $user_or_scope and ( $user_or_scope->isa("WebServerBackend::User") or $user_or_scope->isa("WebServerBackend::Scope"))) {
print STDERR "No user or scope given in method get_jobs_for_user.\n";
die "No user or scope given in method get_jobs_for_user.\n";
}
my $right_to = $user_or_scope->has_right_to(undef, $right || 'edit', 'metagenome');
my $job_cond = "";
if ($viewable) {
$job_cond .= " AND viewable = 1";
}
unless (@$right_to and $right_to->[0] eq '*') {
my @g = grep { $_ ne '*' } @$right_to;
if (@g == 0) { return (); }
$job_cond .= " AND metagenome_id IN ( " . join(", ", map { "'$_'" } @g) . ")";
}
my $dbh = $self->_master()->db_handle();
my $skip = $dbh->selectcol_arrayref(qq(SELECT DISTINCT job FROM JobAttributes WHERE tag='deleted' OR tag='no_sims_found'));
if ($skip && @$skip) {
$job_cond .= " AND j._id NOT IN (".join(",", @$skip).")";
}
my $res = $dbh->selectall_arrayref(qq(SELECT j.job_id, j.metagenome_id, j.name,
j.file_size_raw, j.server_version,
j.created_on, j.owner, j._owner_db, j.viewable, j.sequence_type,
s.stage, s.status, s.timestamp, j._id
FROM Job j, PipelineStage s
WHERE s.job=j._id $job_cond
ORDER BY j.job_id DESC, s.timestamp ASC));
my $ent = shift(@$res);
my @out;
while ($ent)
{
my($cur, $cur_genome, $cur_name, $cur_size, $cur_server_version, $cur_created, $cur_owner, $cur_owner_db, $jviewable, $jsequence_type, undef, undef, undef, $jid) = @$ent;
my $stages = {};
my $timed_stati = [];
while ($ent and $ent->[0] eq $cur)
{
my($id, $genome, $name, $size, $vers, $created, $owner, $owner_db, $view, $type, $stage, $stat, $ts) = @$ent;
$stages->{$stage} = $stat;
push(@$timed_stati, [ $ts, $stage, $stat ]);
$ent = shift(@$res);
}
push(@out, {
job_id => $cur,
metagenome_id => $cur_genome,
name => $cur_name,
project_name => '',
created_on => $cur_created,
status => $stages,
owner => $cur_owner,
owner_db => $cur_owner_db,
size => $cur_size,
server_version => $cur_server_version,
viewable => $jviewable,
_id => $jid,
sequence_type => $jsequence_type,
timed_stati => $timed_stati
});
}
return @out;
}
sub get_sequence_types {
my ($self, $mgids) = @_;
my %data = map { $_, "Unknown" } @$mgids;
my $dbh = $self->_master()->db_handle;
my $id_list = join(",", map { $dbh->quote($_) } @$mgids);
my $query = "select metagenome_id, sequence_type from Job where metagenome_id in (".$id_list.")";
my $result = $dbh->selectall_arrayref($query);
foreach my $r (@$result) {
if (exists $data{$r->[0]}) {
$data{$r->[0]} = $r->[1];
}
}
return \%data;
}
sub get_job_ids {
my ($self, $mgids) = @_;
my $data = {};
my $dbh = $self->_master()->db_handle;
my $id_list = join(",", map { $dbh->quote($_) } @$mgids);
my $query = "select metagenome_id, job_id from Job where metagenome_id in (".$id_list.")";
my $result = $dbh->selectall_arrayref($query);
if ($result && @$result) {
map { $data->{$_->[0]} = $_->[1] } @$result;
}
return $data;
}
sub get_job_pipelines {
my ($self, $mgids, $default) = @_;
my %data = map { $_, $default } @$mgids;
my $dbh = $self->_master()->db_handle;
my $id_list = join(",", map { $dbh->quote($_) } @$mgids);
my $query = "select j.metagenome_id, a.value from Job j, JobAttributes a where j._id=a.job and tag='pipeline_version' and metagenome_id in (".$id_list.")";
my $result = $dbh->selectall_arrayref($query);
if ($result && @$result) {
map { $data{$_->[0]} = $_->[1] } @$result;
}
return \%data;
}
sub get_public_jobs {
my ($self, $id_only) = @_;
my $db = $self->_master();
if ($id_only) {
my $query = "select metagenome_id from Job where viewable=1 and public=1";
my $result = $db->db_handle->selectcol_arrayref($query);
return ($result && @$result) ? $result : [];
}
else {
return $db->Job->get_objects( {public => 1, viewable => 1} );
}
}
sub get_private_jobs {
my ($self, $user, $id_only, $edit) = @_;
unless ($user && ref($user)) { return []; }
my $ids = $edit ? $user->has_right_to(undef,'edit','metagenome') : $user->has_right_to(undef,'view','metagenome');
unless ($ids && (@$ids > 0)) { return []; }
my $db = $self->_master();
if ($id_only) {
my $query = "select metagenome_id from Job where viewable=1 and (public is null or public=0) and metagenome_id IN (".join(",", map {"'$_'"} @$ids).")";
my $result = $db->db_handle->selectcol_arrayref($query);
return ($result && @$result) ? $result : [];
}
else {
my $jobs = [];
foreach my $mg (@$ids) {
my $job = $db->Job->get_objects( {metagenome_id => $mg, viewable => 1} );
if ($job && @$job) {
push @$jobs, $job;
}
}
return $jobs;
}
}
sub count_all {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT count(*) FROM Job WHERE viewable=1");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] ) ;
}
sub count_public {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT count(*) FROM Job WHERE viewable=1 AND public=1");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] ) ;
}
sub set_job_data {
my ($self, $type, $data) = @_;
unless ($data && %$data) {
return 0;
}
my $jid = $self->_id;
my $dbh = $self->_master->db_handle;
my $table;
if ($type eq 'statistics') {
$table = 'JobStatistics';
} elsif ($type eq 'attributes') {
$table = 'JobAttributes';
} else {
return 0;
}
my $query = $dbh->prepare(qq(insert into $table (`tag`,`value`,`job`,`_job_db`) values (?, ?, $jid, 2) on duplicate key update value = ?));
while ( my ($tag, $val) = each(%$data) ) {
$query->execute($tag, $val, $val) || return 0;
}
$dbh->commit;
return 1;
}
# new method section
=pod
=item * B<stats> ()
Returns a hash of all stats keys and values for a job.
If a key is given , returns only hash of specified key, value pair.
Sets a value if key and value is given (return true or false if works)
=cut
sub stats {
my ($self, $tag, $value) = @_;
my $dbh = $self->_master->db_handle;
my $sth;
if (defined($value) and $tag) {
my $jstat = $self->_master->JobStatistics->get_objects( { job => $self,
tag => $tag
});
if (ref $jstat and scalar @$jstat) {
$jstat->[0]->value($value);
}
else{
$jstat = $self->_master->JobStatistics->create( { job => $self,
tag => $tag,
value => $value
});
}
return 1;
}
elsif ($tag) {
$sth = $dbh->prepare("SELECT tag , value FROM JobStatistics where job=".$self->_id." and tag='$tag'");
}
else {
$sth = $dbh->prepare("SELECT tag , value FROM JobStatistics where job=".$self->_id);
}
$sth->execute;
my $results = $sth->fetchall_arrayref();
my $rhash = {};
map { $rhash->{ $_->[0] } = $_->[1] } @$results ;
return $rhash;
}
sub get_deleted_jobs {
my ($self) = @_;
my $del = $self->_master->JobAttributes->get_objects({tag => 'deleted'});
my @jobs = map { $_->job } @$del;
return \@jobs;
}
sub get_bad_jobs {
my ($self) = @_;
my $bad = $self->_master->JobAttributes->get_objects({tag => 'no_sims_found'});
my @jobs = map { $_->job } @$bad;
return \@jobs;
}
=pod
=item * B<stage> ()
Returns a hash of all stages and current status for a given job id.
=cut
sub get_stages_fast {
my ($self, $jobid) = @_;
unless (ref $self) {
die "Call method via the DBMaster.\n";
}
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT stage, status FROM PipelineStage where job=$jobid");
$sth->execute;
my $rhash = {};
my $rows = $sth->fetchall_arrayref();
if ($rows && (@$rows > 0)) {
%$rhash = map { $_->[0], $_->[1] } @$rows;
}
return $rhash;
}
sub stage_info {
my ($self, $tag, $value) = @_;
$self->stage($tag, $value);
}
=pod
=item * B<stage> ()
Returns a hash of all stages and current status for a job.
If a stage is given, returns only hash of specified stage, current status.
Insert or update a status if stage and status is given.
=cut
sub stage {
my ($self, $tag, $value) = @_;
my $dbh = $self->_master->db_handle;
my $sth;
if ($value and $tag) {
my $jstat = $self->_master->PipelineStage->get_objects( { job => $self,
stage => $tag
});
# update existing stage-status
if ( ref($jstat) and scalar(@$jstat) ) {
my $time = $self->get_timestamp();
$jstat->[0]->value($value);
$jstat->[0]->timestamp($time);
}
# insert new stage-status
else{
$jstat = $self->_master->PipelineStage->create( { job => $self,
stage => $tag,
status => $value,
});
}
return { $tag => $value };
}
# get current status for input stage
elsif ($tag) {
$sth = $dbh->prepare("SELECT stage, status FROM PipelineStage where job=" . $self->_id . " and stage='$tag'");
}
else {
$sth = $dbh->prepare("SELECT stage, status FROM PipelineStage where job=" . $self->_id);
}
$sth->execute;
my $results = $sth->fetchall_arrayref();
my $rhash = {};
map { $rhash->{ $_->[0] } = $_->[1] } @$results;
return $rhash;
}
=pod
=item * B<data> ()
Returns a hash of all attribute keys and values for a job.
If a key is given , returns only hash of specified key, value pair.
Sets a value if key and value is given (return true or false if works)
=cut
sub data {
my ($self, $tag, $value) = @_;
my $dbh = $self->_master->db_handle;
my $sth;
if (defined($value) and $tag) {
if (ref $value){
print STDERR "ERROR: invalid value type for $tag ($value) \n";
print STDERR Dumper $value;
return 0;
}
my $jstat = $self->_master->JobAttributes->get_objects( { job => $self,
tag => $tag
});
if (ref $jstat and scalar @$jstat) {
$jstat->[0]->value($value);
}
else {
$jstat = $self->_master->JobAttributes->create( { job => $self,
tag => $tag,
value => $value
});
}
return 1;
}
elsif ($tag) {
$sth = $dbh->prepare("SELECT tag, value FROM JobAttributes where job=". $self->_id ." and tag='$tag'");
}
else {
$sth = $dbh->prepare("SELECT tag, value FROM JobAttributes where job=". $self->_id);
}
$sth->execute;
my $results = $sth->fetchall_arrayref();
my $rhash = {};
map { $rhash->{ $_->[0] } = $_->[1] } @$results;
return $rhash;
}
=pod
=item * B<set_filter_options> ()
job function that checks JobAttributes for a given set of tags,
if exist then creates new tag 'filter_options' with concatanated string,
returns filter_options string
=cut
sub set_filter_options {
my ($self) = @_;
my $flags = { filter_ln => [ 'min_ln', 'max_ln' ],
filter_ambig => [ 'max_ambig' ],
dynamic_trim => [ 'min_qual', 'max_lqb' ]
};
my %tags = map { $_, 1 } map { ($_, @{$flags->{$_}}) } keys %$flags;
my @opts = ();
my $data = $self->data();
my $skip = 1;
foreach my $t ( keys %tags ) {
$tags{$t} = exists($data->{$t}) ? $data->{$t} : 0;
}
foreach my $f ( keys %$flags ) {
if ( $tags{$f} ) {
$skip = 0;
push @opts, $f;
foreach my $s ( @{$flags->{$f}} ) { push @opts, "$s=" . $tags{$s}; }
}
}
my $opts_string = $skip ? 'skip' : join(":", @opts);
$self->data('filter_options', $opts_string);
# reset job option string
$self->set_job_options();
return $opts_string;
}
=pod
=item * B<set_job_options> ()
job function that creates an options string based upon all tag / value pairs
in JobAttributes for job and sets $job->{options} to its value.
also adds 'user_name' and 'project_id' as tags
format: tag1=value1&tag2=value2 ...
returns options string
=cut
sub set_job_options {
my ($self) = @_;
my $job_data = $self->data();
my @job_opts = ();
while ( my ($t, $v) = each %$job_data ) {
push @job_opts, "$t=$v";
}
push @job_opts, "user_id=mgu".$self->owner->_id;
if ($self->primary_project) {
push @job_opts, "project_id=mgp".$self->primary_project->id;
}
my $opts_string = join("&", @job_opts);
$self->options($opts_string);
return $opts_string;
}
sub env_package {
my ($self) = @_;
if ($self->sample) {
my $eps = $self->sample->children('ep');
if ($eps && @$eps) {
return $eps->[0];
}
}
return undef;
}
######################
# MIxS metadata methods
#####################
sub enviroment {
my ($self) = @_;
unless (defined $self->sample) { return []; }
my $results = {};
foreach my $type (('biome', 'feature', 'material')) {
foreach my $val (@{$self->sample->value_set($type)}) {
$val =~ s/^envo:\s?//i;
$results->{$val} = 1;
}
}
return [keys %$results];
}
sub biome {
my ($self) = @_;
unless (defined $self->sample) { return ''; }
my $results = {};
foreach my $val (@{$self->sample->value_set('biome')}) {
$val =~ s/^envo:\s?//i;
$results->{$val} = 1;
}
return (scalar(keys %$results) > 0) ? join(", ", keys %$results) : '';
}
sub feature {
my ($self) = @_;
unless (defined $self->sample) { return ''; }
my $results = {};
foreach my $val (@{$self->sample->value_set('feature')}) {
$val =~ s/^envo:\s?//i;
$results->{$val} = 1;
}
return (scalar(keys %$results) > 0) ? join(", ", keys %$results) : '';
}
sub material {
my ($self) = @_;
unless (defined $self->sample) { return ''; }
my $results = {};
foreach my $val (@{$self->sample->value_set('material')}) {
$val =~ s/^envo:\s?//i;
$results->{$val} = 1;
}
return (scalar(keys %$results) > 0) ? join(", ", keys %$results) : '';
}
sub seq_method {
my ($self) = @_;
my $sm_mdata = $self->get_metadata_value('seq_meth', 'library');
my $sm_guess = $self->data('sequencing_method_guess');
return $sm_mdata ? $sm_mdata : (exists($sm_guess->{sequencing_method_guess}) ? $sm_guess->{sequencing_method_guess} : '');
}
sub seq_type {
my ($self) = @_;
my $mddb = MGRAST::Metadata->new();
my $guess = $self->sequence_type || '';
my $input = $self->get_metadata_value('investigation_type', 'library');
## calculated takes precidence over inputed
return $guess ? $guess : $mddb->investigation_type_alias($input);
}
sub pubmed {
my ($self) = @_;
my $ids = $self->external_ids();
return $ids->{pubmed_id} ? $ids->{pubmed_id} : '';
}
sub external_ids {
my ($self) = @_;
my $id_set = {};
foreach my $id (("project", "ncbi", "greengenes")) {
my $val = $self->get_metadata_value($id."_id", 'primary_project');
$id_set->{$id} = $val;
}
foreach my $id (("pubmed", "gold")) {
my $val = $self->get_metadata_value($id."_id", 'library');
$id_set->{$id} = $val;
}
return $id_set;
}
sub location {
my ($self) = @_;
my $location = $self->get_metadata_value('location', 'sample');
return $location;
}
sub country {
my ($self) = @_;
my $country = $self->get_metadata_value('country', 'sample');
$country =~ s/^(gaz|country):\s?//i;
return $country;
}
sub geo_loc_name {
my ($self) = @_;
my $region = [];
my $location = $self->location;
my $county = $self->country;
foreach my $md (($county, $location)) {
if ($md) { push @$region, $md; }
}
return $region;
}
sub lat_lon {
my ($self) = @_;
my $lat = $self->get_metadata_value('latitude', 'sample');
my $lon = $self->get_metadata_value('longitude', 'sample');
return ($lat && $lon) ? [$lat, $lon] : [];
}
sub trim {
my $s = shift;
if (defined($s)) {
$s =~ s/^\s+|\s+$//g;
}
return $s ;
}
# return iso8601 format
sub collection_date {
my ($self) = @_;
my $collection_date_value = trim($self->get_metadata_value('collection_date', 'sample'));
my $collection_time_value = trim($self->get_metadata_value('collection_time', 'sample'));
my $collection_timezone_value = trim($self->get_metadata_value('collection_timezone', 'sample'));
### date
my $collection_date="";
unless($collection_date_value) {
return "";
}
my ($year, $month, $day);
if ($collection_date_value =~ /-/) {
($year, $month, $day) = $collection_date_value =~ /^(\d\d\d\d)-(\d\d)-(\d\d)$/
} else {
($year, $month, $day) = $collection_date_value =~ /^(\d\d\d\d)(\d\d)(\d\d)$/
}
unless (defined($year) && defined($month) && defined($day)) {
return "ERROR: Could not parse date (".$collection_date_value.")";
}
# specifying month only (day==0) is not possible, thus will set day to 1.
if ($day == 0) {
$day = 1
}
if ( ($year > 3000) || ($month > 12) || ($day > 31) ) {
return "ERROR: Could not parse date (".$collection_date_value.")";
}
$collection_date = sprintf("%04d", $year)."-".sprintf("%02d", $month)."-".sprintf("%02d", $day);
### time
unless($collection_time_value) {
return $collection_date;
}
# remove UTC from time
if ($collection_time_value =~ /UTC/) {
$collection_time_value =~ s/\s*UTC\s*//;
if (length($collection_time_value) == 0) {
return $collection_date;
}
}
my ($hour, $minute, $second);
if ($collection_time_value =~ /:/) {
($hour, $minute, $second) = $collection_time_value =~ /^(\d+):(\d+):(\d+)$/;
} else {
($hour, $minute, $second) = $collection_time_value =~ /^(\d\d)(\d\d)(\d\d)$/;
}
unless (defined($hour) && defined($minute) && defined($second)) {
return "ERROR: Could not parse time (".$collection_time_value.")";
}
if ( ($hour > 24) || ($minute > 60) || ($second > 60) ) {
return "ERROR: Could not parse time (".$collection_time_value.")";
}
$collection_date .= "T" . sprintf("%02d", $hour).":".sprintf("%02d", $minute).":".sprintf("%02d", $second);
### timezone
unless($collection_timezone_value) {
return $collection_date;
}
# remove UTC from timezone
if ($collection_timezone_value =~ /UTC/) {
$collection_timezone_value =~ s/\s*UTC\s*//;
if (length($collection_timezone_value) == 0) {
return $collection_date."Z";
}
}
# extract sign
my ($sign, $day_string) = $collection_timezone_value =~ /^([+-])(.*)/;
unless (defined($sign)) {
return $collection_date."ERROR timezone has no sign in (".$collection_timezone_value.")";
}
unless (defined($day_string)) {
return $collection_date."ERROR no string after sign in (".$collection_timezone_value.")";
}
my ($tz_hour, $tz_minute);
if ($day_string =~ /:/) {
($tz_hour, $tz_minute) = $day_string =~ /^(\d+):(\d+)$/;
} else {
($tz_hour, $tz_minute) = $day_string =~ /^(\d+)(\d\d)$/;
unless (defined($tz_hour)) {
($tz_hour) = $day_string =~ /^(\d+)$/;
if (defined($tz_hour)) {
$tz_minute = 0;
}
}
}
unless (defined $tz_hour) {
return $collection_date."ERROR tz_hour not parsed in (".$day_string.")";
}
if ($tz_hour > 12) {
return $collection_date."ERROR tz_hour > 12 in (".$day_string.")";
}
unless (defined $tz_minute) {
return $collection_date."ERROR tz_minute not parsed in (".$day_string.")";
}
if ($tz_minute > 60) {
return $collection_date."ERROR tz_minute > 60 in (".$day_string.")";
}
# prefix hour and minute with zero if needed
$collection_date .= $sign.sprintf("%02d", $tz_hour).sprintf("%02d", $tz_minute);
return $collection_date;
#my $time_set = [];
#foreach my $tag (('collection_date', 'collection_time', 'collection_timezone')) {
# last if (($tag eq 'collection_timezone') && (@$time_set == 0));
# my $val = $self->get_metadata_value($tag, 'sample');
# if ($val) { push @$time_set, $val; }
#}
#return join(" ", @$time_set);
}
sub env_package_type {
my ($self) = @_;
return $self->get_metadata_value('env_package', 'sample');
}
sub get_metadata_value {
my ($self, $tag, $type) = @_;
unless (defined $self->$type) { return ''; }
my $data = $self->$type->data($tag);
return exists($data->{$tag}) ? $data->{$tag} : '';
}
sub jobs_mixs_metadata_fast {
my ($self, $mgids) = @_;
my $data = {};
map { $data->{$_->{metagenome_id}} = $_ } @{ $self->fetch_browsepage_viewable(undef, $mgids) };
return $data;
}
######################
# Class methods
#####################
sub fetch_browsepage_in_progress {
my ($self, $user, $count_only) = @_;
unless (ref($user) && $user->isa("WebServerBackend::User")) {
return [];
}
# get mgrast token
#my $mgrast_token = undef;
#if ($Conf::mgrast_oauth_name && $Conf::mgrast_oauth_pswd) {
# my $key = encode_base64($Conf::mgrast_oauth_name.':'.$Conf::mgrast_oauth_pswd);
# my $rep = Auth::globus_token($key);
# $mgrast_token = $rep ? $rep->{access_token} : undef;
#}
#### changed because globus has hard time handeling multiple tokens
my $mgrast_token = "mgrast ".$Conf::mgrast_oauth_token || undef;
# set json handle
my $agent = LWP::UserAgent->new;
my $json = JSON->new;
$json = $json->utf8();
$json->max_size(0);
$json->allow_nonref;
my $stage_titles = {
'upload' => 'Upload',
'preprocess_qc' => 'Sequence Filtering',
'dereplication' => 'Dereplication',
'screen' => 'Sequence Screening',
'genecalling' => 'Gene Calling',
'cluster_aa90' => 'Gene Clustering',
'loadAWE' => 'Calculating Sims',
'sims' => 'Processing Sims',
'loadDB' => 'Loading Database',
'done' => 'Finalizing Data' };
# get awe data
my $stats = {};
eval {
my @args = ('Authorization', $mgrast_token);
my $get = $agent->get($Conf::awe_url.'/job?query&limit=0&info.user=mgu'.$user->{_id}, @args);
$stats = $json->decode( $get->content );
};
# awe is down, get minimal info from DB
if ($@ || (! $stats) || (exists($stats->{error}) && $stats->{error})) {
print STDERR "AWE job info retrieval failed for user ".$user->{_id}.": $@".($stats && $stats->{error} ? $stats->{error} : "")."\n";
# get job info
my $dbh = $self->_master()->db_handle();
my $sql = "select _id, job_id, name, metagenome_id from Job where owner=".$user->{_id}." and metagenome_id is not null and (viewable=0 or viewable is null)";
my $sth = $dbh->prepare($sql);
$sth->execute;
my $jobdata = $sth->fetchall_arrayref();
$sth->finish;
# get jobs to skip
$sql = "select job from JobAttributes where tag='deleted' or tag='no_sims_found'";
$sth = $dbh->prepare($sql);
$sth->execute;
my %skip = map { $_->[0], 1 } @{ $sth->fetchall_arrayref() };
$sth->finish;
# get display jobs: _id, job_id, name, metagenome_id
my %id2job = map { $_->[1], $_ } grep { ! exists $skip{$_->[0]} } @$jobdata;
if ($count_only) {
return scalar(keys %id2job);
} else {
my $data_table = [];
foreach my $job_id (keys %id2job) {
push(@$data_table, {
job_id => $job_id,
metagenome_id => $id2job{$job_id}[3],
metagenome_name => $id2job{$job_id}[2],
states => [],
status => 'stage unknown: running' });
}
return $data_table;
}
# build progress from AWE
} else {
my $running = [];
foreach my $job (@{$stats->{data}}) {
unless (($job->{state} eq 'completed') || ($job->{state} eq 'deleted')) {
push @$running, $job;
}
}
if ($count_only) {
return scalar(@$running);
} else {
my $data_table = [];
foreach my $job (@$running) {
my $row = {
job_id => $job->{info}{userattr}{job_id},
metagenome_id => $job->{info}{userattr}{id},
metagenome_name => $job->{info}{userattr}{name},
states => [],
status => undef };
my $i = 1;
$row->{metagenome_id} =~ s/^mgm//;
foreach my $stage (@{$job->{tasks}}) {
if (! $row->{status} && ($stage->{remainwork} > 0 || $i == scalar(@{$job->{tasks}}))) {
$row->{status} = ($stage_titles->{$stage->{cmd}->{description}} ? $stage_titles->{$stage->{cmd}->{description}} : $stage->{cmd}->{description}).": ".$stage->{state};
}
push(@{$row->{states}}, { stage => $stage_titles->{$stage->{cmd}->{description}} ? $stage_titles->{$stage->{cmd}->{description}} : $stage->{cmd}->{description}, status => $stage->{state} });
$i++;
}
push(@$data_table, $row);
}
return $data_table;
}
}
}
sub fetch_browsepage_viewable {
my ($self, $user, $mgids) = @_;
my $mddb = MGRAST::Metadata->new();
my $jobselopt = "";
my $user_id = "";
if ($mgids && (@$mgids > 0)) {
$jobselopt = "viewable=1 and metagenome_id in (".join(",", map {"'$_'"} @$mgids).")";
}
elsif (ref $user and ( $user->isa("WebServerBackend::User"))) {
$user_id = $user->_id();
if ($user->has_star_right('view', 'metagenome')) {
$jobselopt = "viewable=1";
} else {
my $userjobs = $user->has_right_to(undef, 'view', 'metagenome');
if ($userjobs->[0] eq '*') {
$jobselopt = "viewable=1";
} elsif ( @$userjobs > 0 ) {
$jobselopt = "viewable=1 and (public=1 or metagenome_id in (".join(",", map {"'$_'"} @$userjobs)."))";
} else {
$jobselopt = "viewable=1 and public=1";
}
}
} else {
$jobselopt = "viewable=1 and public=1";
}
# metadata
my $data = [];
my $dbh = $self->_master()->db_handle();
my $jsql = "select _id, job_id, metagenome_id, name, public, owner, sequence_type from Job where job_id is not null and ".$jobselopt;
my $jobs = $dbh->selectall_arrayref($jsql);
my @jids = map { $_->[1] } @$jobs;
my $jmd = $mddb->get_jobs_metadata_fast(\@jids);
my $tags = ['bp_count_raw','sequence_count_raw','average_length_raw', 'drisee_score_raw', 'alpha_diversity_shannon'];
my $ssql = "select job, tag, value from JobStatistics where job in (".join(",", map {$_->[0]} @$jobs).") and tag in (".join(",", map {"'$_'"} @$tags).")";
my $tmp = $dbh->selectall_arrayref($ssql);
my $stat = {};
map { $stat->{$_->[0]}{$_->[1]} = $_->[2] } @$tmp;
foreach my $job (@$jobs) { #_id, job_id, metagenome_id, name, public, owner, sequence_type
my $row = { '_id' => $job->[0],
'job_id' => $job->[1],
'metagenome_id' => $job->[2],
'name' => $job->[3] || '',
'public' => ($job->[4]) ? 1 : 0,
'shared' => ($job->[4]) ? '' : ($job->[5] eq $user_id) ? 0 : 1,
'bp_count' => $stat->{$job->[0]}{bp_count_raw} || 0,
'sequence_count' => $stat->{$job->[0]}{sequence_count_raw} || 0,
'average_length' => $stat->{$job->[0]}{average_length_raw} || '',
'drisee' => $stat->{$job->[0]}{drisee_score_raw} || '',
'alpha_diversity' => $stat->{$job->[0]}{alpha_diversity_shannon} || '',
'sequence_type' => $job->[6]
};
if (exists $jmd->{$job->[1]}{project}) {
my $proj = $jmd->{$job->[1]}{project};
$proj->{id} =~ s/^mgp//;
$row->{project} = $proj->{name};
$row->{project_id} = $proj->{id};
$row->{pi} = exists($proj->{data}{PI_lastname}) ? $proj->{data}{PI_lastname} : '';
$row->{pi_firstname} = exists($proj->{data}{PI_firstname}) ? $proj->{data}{PI_firstname} : '';
$row->{pi_email} = exists($proj->{data}{PI_email}) ? $proj->{data}{PI_email} : '';
}
if (exists $jmd->{$job->[1]}{sample}) {
my $samp = $jmd->{$job->[1]}{sample};
my $dt = [];
foreach my $tag (('collection_date', 'collection_time', 'collection_timezone')) {
my $val = exists($samp->{data}{$tag}) ? $samp->{data}{$tag} : '';
if ($val) { push @$dt, $val; }
}
$row->{collection_date} = (@$dt > 0) ? join(' ', @$dt) : '';
$row->{biome} = exists($samp->{data}{biome}) ? $samp->{data}{biome} : '';
$row->{feature} = exists($samp->{data}{feature}) ? $samp->{data}{feature} : '';
$row->{material} = exists($samp->{data}{material}) ? $samp->{data}{material} : '';
$row->{env_package} = exists($samp->{data}{env_package}) ? $samp->{data}{env_package} : (exists($jmd->{$job->[1]}{env_package}) ? $jmd->{$job->[1]}{env_package}{type} : '');
$row->{altitude} = exists($samp->{data}{altitude}) ? $samp->{data}{altitude} : (exists($samp->{data}{elevation}) ? $samp->{data}{elevation} : '');
$row->{depth} = exists($samp->{data}{depth}) ? $samp->{data}{depth} : '';
$row->{location} = exists($samp->{data}{location}) ? $samp->{data}{location} : '';
$row->{country} = exists($samp->{data}{country}) ? $samp->{data}{country} : '';
$row->{latitude} = exists($samp->{data}{latitude}) ? $samp->{data}{latitude} : '';
$row->{longitude} = exists($samp->{data}{longitude}) ? $samp->{data}{longitude} : '';
$row->{temperature} = exists($samp->{data}{temperature}) ? $samp->{data}{temperature} : '';
$row->{ph} = exists($samp->{data}{ph}) ? $samp->{data}{ph} : '';
$row->{health_disease_stat} = exists($samp->{data}{health_disease_stat}) ? $samp->{data}{health_disease_stat} : 'NA';
}
if (exists $jmd->{$job->[1]}{library}) {
my $lib = $jmd->{$job->[1]}{library};
if ($lib->{type}) { $row->{sequence_type} = $lib->{type}; }
$row->{'sequencing method'} = exists($lib->{data}{seq_meth}) ? $lib->{data}{seq_meth} : '';
}
push(@$data, $row);
}
return $data;
}
sub last_id {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT max(job_id), max(metagenome_id + 0) FROM Job");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] || "0" , $result->[1] || "0") ;
}
sub count_recent {
my ($self , $days) = @_;
$days = 30 if (!$days);
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("select count(_id) from Job where created_on > current_timestamp - interval ".$days." day");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] ) ;
}
sub count_total_bp {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("select sum(value) from JobStatistics where tag = 'bp_count_raw'");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] ) ;
}
sub in_projects {
my ($self, $public) = @_;
my $pub_option = "";
if ((defined $public) && ($public == 0)) {
$pub_option = "(Job.public = 0 or Job.public is NULL)";
} else {
$pub_option = "Job.public = 1";
}
my $dbh = $self->_master()->db_handle();
my $statement = "select metagenome_id, name, sequence_type, file_size_raw, public, viewable from Job where $pub_option and exists (select ProjectJob.job from ProjectJob where ProjectJob.job = Job._id)";
my $sth = $dbh->prepare($statement);
$sth->execute;
my $data = $sth->fetchall_arrayref();
return $data;
}
sub without_project {
my ($self, $public) = @_;
my $pub_option = "";
if ((defined $public) && ($public == 0)) {
$pub_option = "(Job.public = 0 or Job.public is NULL)";
} else {
$pub_option = "Job.public = 1";
}
my $dbh = $self->_master()->db_handle();
my $statement = "select metagenome_id, name, sequence_type, file_size_raw, public, viewable from Job where $pub_option and not exists (select ProjectJob.job from ProjectJob where ProjectJob.job = Job._id)";
my $sth = $dbh->prepare($statement);
$sth->execute;
my $data = $sth->fetchall_arrayref();
return $data;
}
sub count_total_sequences {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("select sum(value) from JobStatistics where tag = 'sequence_count_raw'");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] );
}
sub count_all {
my ($self , $user) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT count(_id) from Job where job_id is not null");
$sth = $dbh->prepare("SELECT count(_id) FROM Job where owner=".$user->_id." and job_id is not null") if ($user and ref $user);
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] );
}
sub count_public {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT count(*) FROM Job where public and viewable");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] );
}
sub count_public_wgs {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT count(*) FROM Job where public and viewable and sequence_type = 'WGS' ");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] );
}
sub count_public_amplicon {
my ($self) = @_;
my $dbh = $self->_master()->db_handle();
my $sth = $dbh->prepare("SELECT count(*) FROM Job where public and viewable and sequence_type = 'Amplicon'");
$sth->execute;
my $result = $sth->fetchrow_arrayref();
return ( $result->[0] );
}
sub get_timestamp {
my ($self, $time) = @_;
unless ($time) {
$time = time;
}
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime($time);
return sprintf("%4d-%02d-%02d %02d:%02d:%02d",$year+1900,$mon+1,$mday,$hour,$min,$sec);
}
#################
# delete methods
#################
sub user_delete {
my ($self, $user, $reason) = @_;
my $jobdbm = $self->_master();
my $mgid = $self->metagenome_id;
my $jobid = $self->job_id;
if ($self->public) {
return(0, "Unable to delete metagenome '$mgid' as it has been made public. If someone is sharing this data with you please contact them with inquiries. However, if you believe you have reached this message in error please contact the <a href='mailto:mg-rast\@rt.mcs.anl.gov'>MG-RAST help desk</a>.");
}
unless( $user && ($user->has_right(undef, 'delete', 'metagenome', $mgid) || $user->has_star_right('delete','metagenome')) ) {
return (0, "Unable to delete metagenome '$mgid'. If someone is sharing this data with you please contact them with inquiries. However, if you believe you have reached this message in error please contact the <a href='mailto:mg-rast\@mcs.anl.gov'>MG-RAST mailing list</a>.");
}
# remove from project
if ($self->primary_project) {
$self->primary_project->remove_job($self);
}
# using argument 0 does not work, argument 'null' sets viewable to 0
$self->viewable('null');
# set status as deleted
my $message = $reason || 'deleted by '.$user->login;
$self->data('deleted', $message);
# delete rights
my $webappdb = DBMaster->new(-database => $Conf::webapplication_db,
-backend => $Conf::webapplication_backend,
-host => $Conf::webapplication_host,
-user => $Conf::webapplication_user);
my $job_rights = $webappdb->Rights->get_objects( { data_type => 'metagenome', data_id => $mgid } );
foreach my $r (@$job_rights) {
$r->delete;
}
# delete cassandra data
use Inline::Python qw(py_eval);
my $import = q|import sys; sys.path.insert(1, "|.$Conf::pylib_dir.q|"); from mgrast_cassandra import *|;
py_eval($import);
my $chdl = Inline::Python::Object->new('__main__', 'JobHandle', $Conf::cassandra_m5nr);
$chdl->delete_job($jobid);
$chdl->close();
######## delete AWE / Shock ##########
# get mgrast token
#my $mgrast_token = undef;
#if ($Conf::mgrast_oauth_name && $Conf::mgrast_oauth_pswd) {
# my $key = encode_base64($Conf::mgrast_oauth_name.':'.$Conf::mgrast_oauth_pswd);
# my $rep = Auth::globus_token($key);
# $mgrast_token = $rep ? $rep->{access_token} : undef;
#}
#### changed because globus has hard time handeling multiple tokens
my $mgrast_token = "mgrast ".$Conf::mgrast_oauth_token || undef;
my @auth = ('Authorization', $mgrast_token);
# get handles
my $agent = LWP::UserAgent->new;
my $json = JSON->new;
$json = $json->utf8();
$json->max_size(0);
$json->allow_nonref;
# get AWE job
my $ajobs = [];
eval {
my $get = $agent->get($Conf::awe_url.'/job?query&limit=0&info.name='.$jobid, @auth);
$ajobs = $json->decode( $get->content )->{data};
};
# delete AWE job
if ($@) {
return (0, "Unable to get metagenome '$mgid' from AWE: ".$@);
} else {
foreach my $j (@$ajobs) {
eval {
$agent->delete($Conf::awe_url.'/job/'.$j->{id}, @auth);
};
if ($@) {
return (0, "Unable to delete metagenome '$mgid' from AWE: ".$@);
}
}
}
# get shock nodes
my $nodes = [];
eval {
my $get = $agent->get($Conf::shock_url.'/node?query&limit=0&type=metagenome&id=mgm'.$mgid, @auth);
$nodes = $json->decode( $get->content )->{data};
};
# delete shock nodes
if ($@) {
return (0, "Unable to get metagenome '$mgid' files from Shock: ".$@);
} else {
# modify shock nodes
foreach my $n (@$nodes) {
eval {
$agent->delete($Conf::shock_url.'/node/'.$n->{id}, @auth);
};
if ($@) {
return (0, "Unable to delete metagenome '$mgid' from Shock: ".$@);
}
}
}
return (1, "");
}
sub delete {
my ($self) = @_;
# get a web app master
my $webapp_dbm = DBMaster->new(-database => $Conf::webapplication_db,
-backend => $Conf::webapplication_backend,
-host => $Conf::webapplication_host,
-user => $Conf::webapplication_user,
);
# get the job master
my $dbm = $self->_master();
unless (ref($webapp_dbm)) {
die "Could not initialize WebApplication DBMaster in Job->delete";
}
# delete all rights to the job
my $job_rights = $webapp_dbm->Rights->get_objects( { data_type => 'metagenome',
data_id => $self->metagenome_id } );
foreach my $right (@$job_rights) {
$right->delete();
}
# delete all pipeline stages
my $pipeline_stages = $dbm->PipelineStage->get_objects( { job => $self } );
foreach my $pipeline_stage (@$pipeline_stages) {
$pipeline_stage->delete();
}
# delete all job statistics
my $statistics = $dbm->JobStatistics->get_objects( { job => $self } );
foreach my $statistic (@$statistics) {
$statistic->delete();
}
# delete all references to projects
my $projectjobs = $dbm->ProjectJob->get_objects( { job => $self } );
foreach my $projectjob (@$projectjobs) {
$projectjob->delete();
}
# delete all attributes
my $jobattributes = $dbm->JobAttributes->get_objects( { job => $self } );
foreach my $jobattribute (@$jobattributes) {
$jobattribute->delete();
}
# delete all jobgroup references
my $jobgroupjobs = $dbm->JobgroupJob->get_objects( { job => $self } );
foreach my $jobgroupjob (@$jobgroupjobs) {
$jobgroupjob->delete();
}
# delete self
$self->SUPER::delete(@_);
return 1;
}
1;
| paczian/MG-RAST | src/MGRAST/lib/JobDB/Job.pm | Perl | bsd-2-clause | 50,109 |
#!/usr/bin/perl
use utf8; # Source code is UTF-8
use open ':utf8';
use Storable; # to retrieve hash from disk
binmode STDIN, ':utf8';
#binmode STDOUT, ':utf8';
use strict;
use XML::LibXML;
use Error qw(:try);
#read xml from STDIN
#my $parser = XML::LibXML->new({encoding => 'utf-8'});
my $dom = XML::LibXML->load_xml( IO => *STDIN);
foreach my $sentence ( $dom->getElementsByTagName('s'))
{
# attach chay/kay/jaqay to following root, if no suffixes are in between, as 'det'
my @determiners = $sentence->findnodes("descendant::terminal[morph/tag[text()='PrnDem']]");
foreach my $det (@determiners)
{
my $nextSibling = @{$det->findnodes("following-sibling::*[1]")}[-1];
if($nextSibling && $nextSibling->exists("morph/tag[contains(., 'NRoot')] or (morph/tag[contains(., 'VRoot')] and morph/tag[text()='NS']) or morph/tag[text()='NP']"))
{
&insertNode($nextSibling,$det);
&setLabel($det,'det');
}
}
# attach numerals as quant to following NRoot, if not preceded by another NRootNUM
#(in this case, it's a complex number, leave for manual annotation), set label to 'qnt'
# huk gets no label, as it can be used to mark indefiniteness as well, in this case the label would be 'mod', not 'qnt'
my @numbers = $sentence->findnodes("descendant::terminal[morph/tag[text()='NRootNUM']]");
foreach my $num (@numbers)
{
my $nextSibling = @{$num->findnodes("following-sibling::*[1]")}[-1];
if($nextSibling && $nextSibling->exists("morph/tag[contains(., 'NRoot')] or (morph/tag[contains(., 'VRoot')] and morph/tag[text()='NS']) or morph/tag[text()='NP']"))
{
&insertNode($nextSibling,$num);
if(!$num->exists("word[text()='huk' or text()='huq' or text()='Huk' or text()='Huq' ]"))
{
&setLabel($num,'qnt');
}
}
}
# attach topic suffix to its root as 'topic'
my @topics = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Top' or text()='+QTop']]");
foreach my $top (@topics)
{
my $root = @{$top->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($root,$top);
&setLabel($top, 'topic');
# if root is chay -> chayqa, insert label 'linker' into root
if($root->exists("word[starts-with(.,'chay') or starts-with(.,'Chay')]") )
{
&setLabel($root,'linker');
}
}
# negation -chu, if its root is a verbal root, attach -chu to its root as 'neg', else leave for manual annotation
my @negations = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Intr_Neg']]");
foreach my $neg (@negations)
{
my $vroot = @{$neg->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
if($vroot->exists("pos[contains(.,'VS')] or morph/tag[text()='VRoot']"))
{
&insertNode($vroot,$neg);
&setLabel($neg, 's.neg')
}
}
# -pas: if its root is an interrogative pronoun -> attach it as 'mod'
# else: attach to root, but leave label for manual annotation
my @additives = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Add']]");
foreach my $add (@additives)
{
my $root = @{$add->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($root,$add);
if($root->exists("morph/tag[text()='PrnInterr']"))
{
&setLabel($add, 'mod')
}
}
# -ña: attach to its root, unless preceded by -lla, then attach to lla (-llaña -> special meaning)
my @discontinuatives = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Disc']]");
foreach my $disc (@discontinuatives)
{
my $prevSibling = @{$disc->findnodes("preceding-sibling::*[1]")}[-1];
# check if preceded by -lla (+Lim)
if($prevSibling->exists("morph/tag[text()='+Lim_Aff']"))
{
&insertNode($prevSibling, $disc);
&setLabel($disc, 'mod');
}
else
{
my $root = @{$disc->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($root,$disc);
&setLabel($disc, 'mod')
}
}
# -lla, -ya and -raq: attach to root as 'mod'
my @modifiers = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Lim_Aff' or text()='+Emph' or text()='+Cont'] and pos[text()='Amb'] ]");
foreach my $mod (@modifiers)
{
my $root = @{$mod->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($root,$mod);
&setLabel($mod, 'mod');
}
# -taq: attach to root, but leave label for manual annotation
my @taqs = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Con_Intr'] ]");
foreach my $taq (@taqs)
{
my $root = @{$taq->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($root,$taq);
}
# attach possessive suffixes: if preceded by a nominalizing suffix, attach to this as 's.poss.subj', if no NS, attach to root as 's.poss'
my @possessives = $sentence->findnodes("descendant::terminal[pos[contains(., 'NPers')]]");
foreach my $poss (@possessives)
{
my $prevSibling = @{$poss->findnodes("preceding-sibling::*[1]")}[-1];
# check if preceded by NS
if($prevSibling->exists("pos[contains(., 'NS')]"))
{
&insertNode($prevSibling, $poss);
&setLabel($poss, 's.poss.subj');
}
# else attach to root
else
{
my $nroot = @{$poss->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($nroot,$poss);
&setLabel($poss, 's.poss');
}
}
# nominaliations: nominalizing suffixes depende on their root as 'ns'
my @nominalizingSuffixes = $sentence->findnodes("descendant::terminal[pos[starts-with(.,'NS')]]");
#find corresponding root and attach ns to the root
foreach my $ns (@nominalizingSuffixes)
{
my $vroot = @{$ns->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($vroot,$ns);
&setLabel($ns, 'ns');
# if -pti/qti, -spa or -stin -> insert label 'sub' in root, except with hina (hinaspa) -> label is 'linker'
if($ns->exists("morph/tag[contains(.,'SS') or text()='+DS']") && !$vroot->exists("word[contains(.,'hina') or contains(., 'Hina')]"))
{
&setLabel($vroot, 'sub');
}
elsif($ns->exists("morph/tag[contains(.,'SS') or text()='+DS']") && $vroot->exists("word[contains(.,'hina') or contains(., 'Hina')]"))
{
&setLabel($vroot, 'linker');
}
}
# causative and reflexive: depend on their root as 'mod'
my @causAndRflx = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Caus' or text()='+Rflx_Int']]");
#find corresponding root and attach ns to the root
foreach my $causrflx (@causAndRflx)
{
my $vroot = @{$causrflx->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($vroot,$causrflx);
&setLabel($causrflx, 'mod');
}
# attach aspect suffix to root as 'mod', except if in IG with person suffix (e.g. -shani), in this case, the label is set according to person suffix(es)
my @aspects = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Prog'] and not(pos[contains(.,'VPers')])]");
#find corresponding root and attach ns to the root
foreach my $asp (@aspects)
{
my $vroot = @{$asp->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($vroot , $asp);
&setLabel($asp, 'mod');
}
# attach object and subject markers to their root (correct in almost all cases,
# except with 'object raising', but those are rare and need to be manually checked)
# also, we annotate indirect objects have to be checked manually
my @vpers = $sentence->findnodes("descendant::terminal[pos[contains(.,'VPers')]]");
foreach my $personsuffix (@vpers)
{
my $vroot = @{$personsuffix->findnodes("preceding-sibling::terminal[pos[contains(.,'Root')]]")}[-1];
&insertNode($vroot,$personsuffix);
#object markers, exclude portmanteau forms
if($personsuffix->exists("child::morph/tag[text()='+1.Obj' or text()='+2.Obj']"))
{
&setLabel($personsuffix, 's.obj');
}
# subject markers, exclude portmanteau forms
elsif($personsuffix->exists("child::morph/tag[contains(.,'Subj') and not(contains(.,'Obj'))]"))
{
&setLabel($personsuffix, 's.subj');
}
# portmanteau forms
else
{
&setLabel($personsuffix,'s.subj_obj')
}
}
#get all terminal nodes that are case suffixes and make them the head of their noun
my @caseSuffixes = $sentence->findnodes("descendant::terminal[pos[text()='Cas'] or word[text()='-yuq'] or word[text()='-sapa'] ]");
foreach my $cs (@caseSuffixes)
{
# get terminal node that contains root of this word
#my $root = @{$cs->findnodes("preceding-sibling::terminal/pos[text()='Root']/..")}[-1];
my $root = @{$cs->findnodes("preceding-sibling::terminal/pos[contains(., 'Root')]/..")}[-1];
# check if preceding node is also a case suffix, if it is, leave for manual annotation
my $prevSibling = my $prevSibling = @{$cs->findnodes("preceding-sibling::*[1]")}[-1];
if(!$prevSibling->exists("pos[text()='Cas']"))
{
&insertNode($cs,$root);
#set label for root (can be s.arg or s.arg.claus), distinction only with case suffixes, not with -yuq/-sapa
if($cs->exists("child::pos[text()='Cas']") and $root->exists("child::morph/tag[contains(.,'VRoot')]"))
{
&setLabel($root,'s.arg.claus')
}
else
{
&setLabel($root,'s.arg');
}
#$sentence->removeChild($root)
}
;
}
# postposition -manta pacha
my @pachas = $sentence->findnodes("descendant::terminal[word[text()='pacha'] and pos[text()='NRoot']]");
foreach my $pacha (@pachas)
{
my $prevSibling = @{$pacha->findnodes("preceding-sibling::*[1]")}[-1];
#print "$prevSibling\n";
# check if preceded by -manta, and check if preceding sibling exists: pacha is not a suffix, thus might be the first word in the sentence
if($prevSibling && $prevSibling->exists("morph/tag[text()='+Abl']"))
{
&insertNode($pacha, $prevSibling);
&setLabel($prevSibling, 'p.arg');
&setLabel($pacha, 'tmp');
}
}
# hina as postposition (if preceded by NRoot or case suffix)
my @hinas = $sentence->findnodes("descendant::terminal[word[text()='hina'] and morph/tag[text()='Part_Sim']]");
foreach my $hina (@hinas)
{
my $prevSibling = @{$hina->findnodes("preceding-sibling::*[1]")}[-1];
my $grandparent = $hina->parentNode->parentNode;
# check if preceded by directly by NRoot or case suffix, and check if preceding sibling exists: hina is not a suffix, thus might be the first word in the sentence
if($prevSibling && $prevSibling->exists("pos[text()='Cas'] or morph/tag[contains(.,'NRoot')]") && !$prevSibling->exists("word[text()='Pero' or text()='pero']"))
{
&insertNode($hina,$prevSibling);
&setLabel($prevSibling, 'p.arg');
&setLabel($hina, 'comp');
}
# if directly preceded by -chu -> -chu hina 'it seems that, i believe that..' -> epistemic modifier
# annotate -chu --mod-- hina --epst-- verb (don't attach to verb, too error prone, leave for manual annotation)
elsif($prevSibling && $prevSibling->exists("morph/tag[text()='+Intr_Neg']"))
{
&insertNode($hina, $prevSibling);
&setLabel($prevSibling, 'mod');
&setLabel($hina, 'epst');
}
# -chu might depend on preceding verb as 's.neg'
elsif($prevSibling && $prevSibling->exists("morph/tag[contains(.,'VRoot') or contains(.,'VS')]") && $prevSibling->exists("children/terminal/morph/tag[text()='+Intr_Neg']"))
{
my $chu = @{$prevSibling->findnodes("children/terminal[morph/tag[text()='+Intr_Neg']]")}[-1];
&insertNode($hina,$chu);
&insertNode($prevSibling,$hina);
&setLabel($chu, 'mod');
&setLabel($hina, 'epst');
}
# hina might be s.arg to a case suffix, in this case, get previous sibling of parent
elsif($grandparent->exists("pos[text()='Cas']") )
{
my $OriginalPrecedingSibling = @{$grandparent->findnodes("preceding-sibling::*[1]")}[-1];
#check if preceded by noun or case suffix
if($OriginalPrecedingSibling && $OriginalPrecedingSibling->exists("pos[text()='Cas'] or morph/tag[contains(.,'NRoot')]") && !$OriginalPrecedingSibling->exists("word[text()='Pero' or text()='pero']"))
{
&insertNode($hina,$OriginalPrecedingSibling);
&setLabel($OriginalPrecedingSibling, 'p.arg');
&setLabel($hina, 'comp');
}
# -chu hina-raq -> hina might be child of a case suffix here too
elsif($OriginalPrecedingSibling && $OriginalPrecedingSibling->exists("morph/tag[text()='+Intr_Neg']"))
{
&insertNode($hina,$OriginalPrecedingSibling);
&setLabel($OriginalPrecedingSibling, 'mod');
&setLabel($hina, 'epst');
}
# verb -chu -hina -ta, possible (?)
# elsif($OriginalPrecedingSibling && $OriginalPrecedingSibling->exists("morph/tag[contains(.,'VRoot') or contains(.,'VS')]") && $OriginalPrecedingSibling->exists("children/terminal/morph/tag[text()='+Intr_Neg']"))
# {
# my $chu = @{$OriginalPrecedingSibling->findnodes("children/terminal[morph/tag[text()='+Intr_Neg']]")}[-1];
# &insertNode($hina,$chu);
# &insertNode($OriginalPrecedingSibling,$grandparent);
# &setLabel($chu, 'mod');
# &setLabel($grandparent, 'epst');
# }
}
}
# ukhu, k'uchu, pata, hawa
my @postpositions = $sentence->findnodes("descendant::terminal[word[starts-with(., 'ukhu') or starts-with(.,'pata') or starts-with(.,'hawa')]]");
foreach my $postpos (@postpositions)
{
my $prevSibling = @{$postpos->findnodes("preceding-sibling::*[1]")}[-1];
my $grandparent = $postpos->parentNode->parentNode;
# check if preceded by directly by NRoot: ukhu etc are not suffixes, thus might be the first word in the sentence
if($prevSibling && $prevSibling->exists("morph/tag[contains(.,'NRoot')]") && !$prevSibling->exists("word[text()='Pero' or text()='pero']"))
{
&insertNode($postpos,$prevSibling);
&setLabel($prevSibling, 'p.arg');
}
# postposition might be s.arg to a case suffix, in this case, get previous sibling of parent
elsif($grandparent->exists("pos[text()='Cas']") )
{
my $OrginalPrecedingSibling = @{$grandparent->findnodes("preceding-sibling::*[1]")}[-1];
#print $OrginalPrecedingSibling;
if($OrginalPrecedingSibling && $OrginalPrecedingSibling->exists("morph/tag[contains(.,'NRoot')]") && !$OrginalPrecedingSibling->exists("word[text()='Pero' or text()='pero']"))
{
&insertNode($postpos,$OrginalPrecedingSibling);
&setLabel($OrginalPrecedingSibling, 'p.arg');
}
}
}
#agentive forms
# if -q form followed by finite form of copula -> attach -q verbform to copula as 'hab'
my @agentives = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Ag']]");
foreach my $ag (@agentives)
{
my $vroot = $ag->parentNode->parentNode;
my $followingSiblingOfParent = @{$vroot->findnodes("following-sibling::*[1]")}[-1];
if($followingSiblingOfParent && $followingSiblingOfParent->exists("translation[text()='=ser'] and children/terminal[label[contains(.,'subj')]]"))
{
&insertNode($followingSiblingOfParent, $vroot);
&setLabel($vroot, 'hab');
}
}
# genitive forms: attach genitive suffix to following noun as 'poss', if this noun bears an 's.poss'
my @genitives = $sentence->findnodes("descendant::terminal[morph/tag[text()='+Gen']]");
foreach my $gen (@genitives)
{
my $followingSibling = @{$gen->findnodes("following-sibling::*[1]")}[-1];
if($followingSibling)
{
my $followingSiblingsChild = @{$followingSibling->findnodes("children/terminal[1]")}[-1];
# check if folliwing word is a noun and bear a possessive suffix, if next word is a case suffix, check if the condition applies to its child (the noun)
if($followingSibling && $followingSibling->exists("(morph/tag[contains(.,'NRoot')] or children/terminal/pos[text()='NS']) and children/terminal/label[text()='s.poss']"))
{
&insertNode($followingSibling,$gen);
&setLabel($gen,'poss');
}
elsif($followingSibling && $followingSiblingsChild && $followingSibling->exists("pos[text()='Cas']") && $followingSiblingsChild->exists("(morph/tag[contains(.,'NRoot')] or children/terminal/pos[text()='NS']) and children/terminal/label[text()='s.poss']"))
{
&insertNode($followingSiblingsChild,$gen);
&setLabel($gen,'poss');
}
# same check, but if possessive suffix depends as 's.poss.subj', set label of possesive noun to 'poss.subj'
elsif($followingSibling && $followingSibling->exists("(morph/tag[contains(.,'NRoot')] or children/terminal/pos[text()='NS']) and children/terminal/label[text()='s.poss.subj']"))
{
&insertNode($followingSibling,$gen);
&setLabel($gen,'poss.subj');
}
elsif($followingSibling && $followingSiblingsChild && $followingSibling->exists("pos[text()='Cas']") && $followingSiblingsChild->exists("(morph/tag[contains(.,'NRoot')] or children/terminal/pos[text()='NS']) and children/terminal/children/terminal/label[text()='s.poss.subj']"))
{
&insertNode($followingSiblingsChild,$gen);
&setLabel($gen,'poss.subj');
}
}
}
# set label of evidential to 'ev'
my @evidentials = $sentence->findnodes("descendant::terminal[morph/tag[contains(.,'+DirE') or contains(.,'+Asmp') or contains(., '+IndE') ]]");
foreach my $ev (@evidentials)
{
&setLabel($ev,'ev');
}
# set label of Spanish conjunctions to 'linker' (y,o,u,pero..)
my @spanishLinkers = $sentence->findnodes("descendant::terminal[word[text()='pero' or text()='Pero' or text()='y' or text()='o' or text()='u' or text()='entonces' or text()='Entonces' ]]");
foreach my $spanishLinker (@spanishLinkers)
{
&setLabel($spanishLinker,'linker');
}
# annotate huq(k) kaq noun -> huq -- pred --kaq --mod --noun
my @huqs = $sentence->findnodes("descendant::terminal[word[text()='huk' or text()='huq' or text()='Huk' or text()='Huq']]");
foreach my $huq (@huqs)
{
my $nextSibling = @{$huq->findnodes("following-sibling::*[1]")}[-1];
if($nextSibling && $nextSibling->exists("word[text()='ka'] and morph/tag[contains(.,'VRoot')]"))
{
my $nextSiblingsChild = @{$nextSibling->findnodes("children/terminal[1]")}[-1];
if($nextSiblingsChild && $nextSiblingsChild->exists("morph/tag[text()='+Ag']"))
{
&insertNode($nextSibling, $huq);
&setLabel($huq,'pred');
# check if nextSibling of kaq is a noun, might also be a case suffix with a noun as child.
# attach huq kaq only to nominal roots, not nominalized verbs, as in this case, it might be a subject instead of a modifier
my $followingSiblingOfNextSibling = @{$nextSibling->findnodes("following-sibling::*[1]")}[-1];
if($followingSiblingOfNextSibling && $followingSiblingOfNextSibling->exists("morph/tag[contains(.,'NRoot')]"))
{
&insertNode($followingSiblingOfNextSibling, $nextSibling);
&setLabel($nextSibling, 'mod');
}
elsif($followingSiblingOfNextSibling && $followingSiblingOfNextSibling->exists("pos[text()='Cas']"))
{
my $childOfCaseSuffix = @{$followingSiblingOfNextSibling->findnodes("children/terminal[1]")}[-1];
if($childOfCaseSuffix && $childOfCaseSuffix->exists("morph/tag[contains(.,'NRoot')]"))
{
&insertNode($childOfCaseSuffix,$nextSibling);
&setLabel($nextSibling,'mod');
}
}
}
}
}
# print $sentence->getAttribute('id');
# print "\n";
}
my $corpus= @{$dom->getElementsByTagName('quechua_corpus')}[0];
$corpus->setAttribute('xmlns','http://ufal.mff.cuni.cz/pdt/pml/');
# print new xml to stdout
my $docstring = $dom->toString;
$docstring=~ s/\n\s+\n/\n/g;
print STDOUT $docstring;
#insert a node as child, check if node has already children
sub insertNode{
my $parent = $_[0];
my $child = $_[1];
if($parent->exists('children'))
{
my $children = @{$parent->find('children')}[0];
$children->appendChild($child);
}
else
{
my $children = $dom->createElement('children');
$parent->appendChild($children);
$children->appendChild($child);
}
}
sub setLabel{
my $node = $_[0];
my $labeltext = $_[1];
my $label= @{$node->getChildrenByLocalName('label')}[0];
$label->removeChildNodes();
$label->appendText($labeltext);
} | ariosquoia/squoia | treebanks/quz/scripts/preAnotate.pl | Perl | apache-2.0 | 20,057 |
package Google::Ads::AdWords::v201406::CustomerFeed;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201406' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %feedId_of :ATTR(:get<feedId>);
my %matchingFunction_of :ATTR(:get<matchingFunction>);
my %placeholderTypes_of :ATTR(:get<placeholderTypes>);
my %status_of :ATTR(:get<status>);
__PACKAGE__->_factory(
[ qw( feedId
matchingFunction
placeholderTypes
status
) ],
{
'feedId' => \%feedId_of,
'matchingFunction' => \%matchingFunction_of,
'placeholderTypes' => \%placeholderTypes_of,
'status' => \%status_of,
},
{
'feedId' => 'SOAP::WSDL::XSD::Typelib::Builtin::long',
'matchingFunction' => 'Google::Ads::AdWords::v201406::Function',
'placeholderTypes' => 'SOAP::WSDL::XSD::Typelib::Builtin::int',
'status' => 'Google::Ads::AdWords::v201406::CustomerFeed::Status',
},
{
'feedId' => 'feedId',
'matchingFunction' => 'matchingFunction',
'placeholderTypes' => 'placeholderTypes',
'status' => 'status',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201406::CustomerFeed
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
CustomerFeed from the namespace https://adwords.google.com/api/adwords/cm/v201406.
CustomerFeeds are used to link a feed to the customer using a matching function, making the feed's feed items available in the customer's ads for substitution.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * feedId
=item * matchingFunction
=item * placeholderTypes
=item * status
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201406/CustomerFeed.pm | Perl | apache-2.0 | 2,187 |
#-----------------------------------------------------------
# itempos.pl
#
# History:
# 20191111 - Added default value to $jmp if $item{extver} cannot be determined.
#
# References
# http://c0nn3ct0r.blogspot.com/2011/11/windows-shellbag-forensics.html
# Andrew's Python code for Registry Decoder
# http://code.google.com/p/registrydecoder/source/browse/trunk/templates/template_files/ShellBag.py
# Joachim Metz's shell item format specification
# http://download.polytechnic.edu.na/pub4/download.sourceforge.net/pub/
# sourceforge/l/project/li/liblnk/Documentation/Windows%20Shell%20Item%20format/
# Windows%20Shell%20Item%20format.pdf
# Converting DOS Date format
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms724274(v=VS.85).aspx
#
# Thanks to Willi Ballenthin and Joachim Metz for the documentation they
# provided, Andrew Case for posting the Registry Decoder code, and Kevin
# Moore for writing the shell bag parser for Registry Decoder, as well as
# assistance with some parsing.
#
# copyright 2013 Quantum Analytics Research, LLC
# Author: H. Carvey, keydet89@yahoo.com
#-----------------------------------------------------------
package itempos;
use strict;
use Time::Local;
my %config = (hive => "NTUSER\.DAT",
hivemask => 16,
output => "report",
category => "User Activity",
osmask => 16, #Win7/Win2008R2
hasShortDescr => 1,
hasDescr => 0,
hasRefs => 0,
version => 20130514);
sub getConfig{return %config}
sub getShortDescr {
return "Shell/Bags/1/Desktop ItemPos* value parsing; Win7 NTUSER.DAT hives";
}
sub getDescr{}
sub getRefs {}
sub getHive {return $config{hive};}
sub getVersion {return $config{version};}
my $VERSION = getVersion();
sub pluginmain {
my $class = shift;
my $hive = shift;
::logMsg("Launching itempos v.".$VERSION);
::rptMsg("itempos v.".$VERSION); # banner
::rptMsg("(".$config{hive}.") ".getShortDescr()."\n"); # banner
my %itempos = ();
my $reg = Parse::Win32Registry->new($hive);
my $root_key = $reg->get_root_key;
my $key_path = "Software\\Microsoft\\Windows\\Shell\\Bags\\1\\Desktop";
my $key;
if ($key = $root_key->get_subkey($key_path)) {
::rptMsg($key_path);
my $lw = $key->get_timestamp();
::rptMsg("LastWrite: ".gmtime($lw));
::rptMsg("");
my @vals = $key->get_list_of_values();
foreach my $v (@vals) {
my $name = $v->get_name();
if ($name =~ m/^ItemPos/) {
$itempos{$name} = $v->get_data();
}
}
if (scalar keys %itempos > 0) {
foreach my $i (keys %itempos) {
::rptMsg("Value: ".$i);
::rptMsg(sprintf "%-10s|%-20s|%-20s|%-20s|Name","Size","Modified","Accessed","Created");
::rptMsg(sprintf "%-10s|%-20s|%-20s|%-20s|"."-" x 10,"-" x 10,"-" x 20,"-" x 20,"-" x 20);
parseBagEntry($itempos{$i});
::rptMsg("");
}
}
else {
::rptMsg("No ItemPos* values found.");
}
}
else {
::rptMsg($key_path." not found.");
}
# ::rptMsg("");
# The following was added on 20130514 to address Windows XP systems
$key_path = "Software\\Microsoft\\Windows\\ShellNoRoam\\Bags";
if ($key = $root_key->get_subkey($key_path)) {
my @sk = $key->get_list_of_subkeys();
if (scalar(@sk) > 0) {
foreach my $s (@sk) {
my %itempos = ();
my @vals = $s->get_subkey("Shell")->get_list_of_values();
if (scalar(@vals) > 0) {
foreach my $v (@vals) {
my $name = $v->get_name();
if ($name =~ m/^ItemPos/) {
$itempos{$name} = $v->get_data();
}
}
if (scalar keys %itempos > 0) {
::rptMsg($key_path."\\".$s->get_name()."\\Shell");
foreach my $i (keys %itempos) {
::rptMsg("Value: ".$i);
::rptMsg(sprintf "%-10s|%-20s|%-20s|%-20s|Name","Size","Modified","Accessed","Created");
::rptMsg(sprintf "%-10s|%-20s|%-20s|%-20s|"."-" x 10,"-" x 10,"-" x 20,"-" x 20,"-" x 20);
parseBagEntry($itempos{$i});
::rptMsg("");
}
}
}
}
}
else {
# No subkeys
}
}
else {
::rptMsg($key_path." not found\.");
}
}
#-----------------------------------------------------------
#
#-----------------------------------------------------------
#-----------------------------------------------------------
# parseBagEntry()
#-----------------------------------------------------------
sub parseBagEntry {
my $data = shift;
my $ofs = 24;
my $len = length($data);
while ($ofs < $len) {
my %item = ();
my $sz = unpack("v",substr($data,$ofs,2));
my $data = substr($data,$ofs,$sz);
my $type = unpack("C",substr($data,2,1));
if ($type == 0x1f) {
%item = parseSystemBagItem($data);
::rptMsg(sprintf "%-10s|%-20s|%-20s|%-20s|".$item{name},"","","","");
}
elsif ($type == 0x31 || $type == 0x32 || $type == 0x3a) {
%item = parseFolderItem($data);
my ($m,$a,$c);
(exists $item{mtime_str} && $item{mtime_str} ne "0") ? ($m = $item{mtime_str}) : ($m = "");
(exists $item{atime_str} && $item{atime_str} ne "0") ? ($a = $item{atime_str}) : ($a = "");
(exists $item{ctime_str} && $item{ctime_str} ne "0") ? ($c = $item{ctime_str}) : ($c = "");
my $str = sprintf "%-10s|%-20s|%-20s|%-20s|",$item{size},$m,$a,$c;
::rptMsg($str.$item{name});
}
else {
}
$ofs += $sz + 8;
}
}
#-----------------------------------------------------------
# parseSystemBagItem()
#-----------------------------------------------------------
sub parseSystemBagItem {
my $data = shift;
my %item = ();
my %vals = (0x00 => "Explorer",
0x42 => "Libraries",
0x44 => "Users",
0x4c => "Public",
0x48 => "My Documents",
0x50 => "My Computer",
0x58 => "My Network Places",
0x60 => "Recycle Bin",
0x68 => "Explorer",
0x70 => "Control Panel",
0x78 => "Recycle Bin",
0x80 => "My Games");
$item{type} = unpack("C",substr($data,2,1));
$item{id} = unpack("C",substr($data,3,1));
if (exists $vals{$item{id}}) {
$item{name} = $vals{$item{id}};
}
else {
$item{name} = parseGUID(substr($data,4,16));
}
return %item;
}
#-----------------------------------------------------------
# parseFolderItem()
#-----------------------------------------------------------
sub parseFolderItem {
my $data = shift;
my %item = ();
my $ofs_mdate = 0x08;
$item{type} = unpack("C",substr($data,2,1));
$item{size} = unpack("V",substr($data,4,4));
my @m = unpack("vv",substr($data,$ofs_mdate,4));
($item{mtime_str},$item{mtime}) = convertDOSDate($m[0],$m[1]);
my $ofs_shortname = $ofs_mdate + 6;
my $tag = 1;
my $cnt = 0;
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
else {
$str .= $s;
$cnt++;
}
}
# $str =~ s/\x00//g;
my $shortname = $str;
my $ofs = $ofs_shortname + $cnt + 1;
# Read progressively, 1 byte at a time, looking for 0xbeef
$tag = 1;
$cnt = 0;
while ($tag) {
if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
$tag = 0;
}
else {
$cnt++;
}
}
$item{extver} = unpack("v",substr($data,$ofs + $cnt - 4,2));
$ofs = $ofs + $cnt + 2;
@m = unpack("vv",substr($data,$ofs,4));
($item{ctime_str},$item{ctime}) = convertDOSDate($m[0],$m[1]);
$ofs += 4;
@m = unpack("vv",substr($data,$ofs,4));
($item{atime_str},$item{atime}) = convertDOSDate($m[0],$m[1]);
my $jmp;
if ($item{extver} == 0x03) {
$jmp = 8;
}
elsif ($item{extver} == 0x07) {
$jmp = 26;
}
elsif ($item{extver} == 0x08) {
$jmp = 30;
}
else {
$jmp = 34;
}
$ofs += $jmp;
$str = substr($data,$ofs,length($data) - 30);
my $longname = (split(/\x00\x00/,$str,2))[0];
$longname =~ s/\x00//g;
if ($longname ne "") {
$item{name} = $longname;
}
else {
$item{name} = $shortname;
}
return %item;
}
#-----------------------------------------------------------
# convertDOSDate()
# subroutine to convert 4 bytes of binary data into a human-
# readable format. Returns both a string and a Unix-epoch
# time.
#-----------------------------------------------------------
sub convertDOSDate {
my $date = shift;
my $time = shift;
if ($date == 0x00 || $time == 0x00){
return (0,0);
}
else {
my $sec = ($time & 0x1f) * 2;
$sec = "0".$sec if (length($sec) == 1);
if ($sec == 60) {$sec = 59};
my $min = ($time & 0x7e0) >> 5;
$min = "0".$min if (length($min) == 1);
my $hr = ($time & 0xF800) >> 11;
$hr = "0".$hr if (length($hr) == 1);
my $day = ($date & 0x1f);
$day = "0".$day if (length($day) == 1);
my $mon = ($date & 0x1e0) >> 5;
$mon = "0".$mon if (length($mon) == 1);
my $yr = (($date & 0xfe00) >> 9) + 1980;
my $gmtime = timegm($sec,$min,$hr,$day,($mon - 1),$yr);
return ("$yr-$mon-$day $hr:$min:$sec",$gmtime);
# return gmtime(timegm($sec,$min,$hr,$day,($mon - 1),$yr));
}
}
#-----------------------------------------------------------
# parseGUID()
# Takes 16 bytes of binary data, returns a string formatted
# as an MS GUID.
#-----------------------------------------------------------
sub parseGUID {
my $data = shift;
my $d1 = unpack("V",substr($data,0,4));
my $d2 = unpack("v",substr($data,4,2));
my $d3 = unpack("v",substr($data,6,2));
my $d4 = unpack("H*",substr($data,8,2));
my $d5 = unpack("H*",substr($data,10,6));
return sprintf "{%08x-%x-%x-$d4-$d5}",$d1,$d2,$d3;
}
#-----------------------------------------------------------
# printData()
# subroutine used primarily for debugging; takes an arbitrary
# length of binary data, prints it out in hex editor-style
# format for easy debugging
#-----------------------------------------------------------
sub printData {
my $data = shift;
my $len = length($data);
my $tag = 1;
my $cnt = 0;
my $loop = $len/16;
$loop++ if ($len%16);
foreach my $cnt (0..($loop - 1)) {
# while ($tag) {
my $left = $len - ($cnt * 16);
my $n;
($left < 16) ? ($n = $left) : ($n = 16);
my $seg = substr($data,$cnt * 16,$n);
my @str1 = split(//,unpack("H*",$seg));
my @s3;
my $str = "";
foreach my $i (0..($n - 1)) {
$s3[$i] = $str1[$i * 2].$str1[($i * 2) + 1];
if (hex($s3[$i]) > 0x1f && hex($s3[$i]) < 0x7f) {
$str .= chr(hex($s3[$i]));
}
else {
$str .= "\.";
}
}
my $h = join(' ',@s3);
::rptMsg(sprintf "0x%08x: %-47s ".$str,($cnt * 16),$h);
}
}
1;
| esaunders/autopsy | thirdparty/rr-full/plugins/itempos.pl | Perl | apache-2.0 | 10,457 |
#!/usr/bin/perl
use strict;
use warnings;
use HTTP::Response;
use LWP::UserAgent;
use Test::More;
my @responses;
# override LWP::UserAgent->new to force attachment of our request/response interceptor
{
no warnings 'redefine';
my $oldnew = \&LWP::UserAgent::new;
*LWP::UserAgent::new = sub {
# create object
my $ua = $oldnew->(@_);
# attach our response_send handler
$ua->add_handler('request_send', sub {
my($request, $ua, $h) = @_;
# look for a matching response
my @pairs = @responses;
foreach (@responses) {
my $response;
if(ref($_) eq 'CODE') {
$response = $_->($request);
}
elsif(ref($_) eq 'HASH') {
$response = $_->{$request->uri};
}
if($response) {
pass('expected request: ' . $request->uri);
return $response->clone();
}
}
# no response found, return a 500 ISE
fail('unexpected request: ' . $request->uri);
return HTTP::Response->new(500);
});
return $ua;
};
}
sub addMockResponses {
push @responses, @_;
}
sub clearMockResponses {
@responses = ();
}
| frett/perl-Authen-CAS-UserAgent | t/MockUserAgent.pl | Perl | bsd-3-clause | 1,061 |
=pod
=head1 NAME
openssl-rand,
rand - generate pseudo-random bytes
=head1 SYNOPSIS
B<openssl rand>
[B<-help>]
[B<-out> I<file>]
[B<-rand> I<file(s)>]
[B<-base64>]
[B<-hex>]
I<num>
=head1 DESCRIPTION
The B<rand> command outputs I<num> pseudo-random bytes after seeding
the random number generator once. As in other B<openssl> command
line tools, PRNG seeding uses the file I<$HOME/>B<.rnd> or B<.rnd>
in addition to the files given in the B<-rand> option. A new
I<$HOME>/B<.rnd> or B<.rnd> file will be written back if enough
seeding was obtained from these sources.
=head1 OPTIONS
=over 4
=item B<-help>
Print out a usage message.
=item B<-out> I<file>
Write to I<file> instead of standard output.
=item B<-rand> I<file(s)>
Use specified file or files or EGD socket (see L<RAND_egd(3)>)
for seeding the random number generator.
Multiple files can be specified separated by an OS-dependent character.
The separator is B<;> for MS-Windows, B<,> for OpenVMS, and B<:> for
all others.
=item B<-base64>
Perform base64 encoding on the output.
=item B<-hex>
Show the output as a hex string.
=back
=head1 SEE ALSO
L<RAND_bytes(3)>
=head1 COPYRIGHT
Copyright 2000-2016 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the OpenSSL license (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| google/google-ctf | third_party/edk2/CryptoPkg/Library/OpensslLib/openssl/doc/apps/rand.pod | Perl | apache-2.0 | 1,484 |
=head1 NAME
Debugging mod_perl Perl Internals
=head1 Description
This document explains how to debug Perl code under mod_perl.
Most of the L<mod_perl 1.0 debug
documentation|docs::1.0::guide::debug> applies to mod_perl 2.0:
=head2 Detecting Hanging Processes
See L<Hanging Processes: Detection and
Diagnostics|docs::1.0::guide::debug/Hanging_Processes__Detection_and_Diagnostics>
for the explanation, but under mp2 to use signals to detect where the
process is spinning, you can't use C<$SIG{USR2}>, you have to use
POSIX signals. i.e. the code becomes:
use Carp ();
use POSIX qw(SIGUSR2);
my $mask = POSIX::SigSet->new( SIGUSR2 );
my $action = POSIX::SigAction->new(\&tell_where_spinning, $mask);
my $oldaction = POSIX::SigAction->new();
POSIX::sigaction(SIGUSR2, $action, $oldaction );
sub tell_where_spinning {
Carp::confess("caught SIGUSR2!");
};
and then:
% kill USR2 <pid_of_the_spinning_process>
and watch for the trace in F<error_log>.
=head1 Maintainers
Maintainer is the person(s) you should contact with updates,
corrections and patches.
=over
=item *
Stas Bekman [http://stason.org/]
=back
=head1 Authors
=over
=item *
Stas Bekman [http://stason.org/]
=back
Only the major authors are listed above. For contributors see the
Changes file.
=cut
| Distrotech/mod_perl | docs/src/docs/2.0/devel/debug/perl.pod | Perl | apache-2.0 | 1,321 |
apiVersion: v1
kind: Pod
metadata:
name: glusterblock-provisioner
spec:
containers:
-
env:
-
name: PROVISIONER_NAME
value: gluster.org/glusterblock
image: "quay.io/external_storage/glusterblock-provisioner:latest"
name: glusterblock-provisioner
| dhirajh/external-storage | gluster/block/deploy/glusterblk-provisioner.pod | Perl | apache-2.0 | 305 |
package TestApache::zzz_check_n_requests;
use strict;
use warnings;
use Apache::Constants;
use Apache::Test qw(-withtestmore);
use Apache::TestUtil;
use Apache::SizeLimit;
use constant ONE_MB => 1024;
use constant TEN_MB => ONE_MB * 10;
use constant TWENTY_MB => TEN_MB * 2;
my $i = 0;
my %hash = ();
sub handler {
my $r = shift;
plan $r, tests => 11;
Apache::SizeLimit->add_cleanup_handler($r);
Apache::SizeLimit->set_max_process_size(TEN_MB);
## this should cause us to fire
Apache::SizeLimit->set_check_interval();
# We can assume this will use _at least_ 1MB of memory, based on
# assuming a scalar consumes >= 1K.
# and after 10 requests, we should be _at least_ 10MB of memory
for (0..9) {
my @big = ('x') x ONE_MB;
$hash{$i++} = \@big;
is(
$i,
$i,
"now using $i MB of memory (at least)"
);
}
is(
1,
Apache::SizeLimit->_limits_are_exceeded(),
"we passed the limits and _WILL_ kill the child"
);
return Apache::Constants::OK;
}
1;
| Distrotech/Apache-SizeLimit | t/response/TestApache/zzz_check_n_requests.pm | Perl | apache-2.0 | 1,101 |
#!/usr/bin/perl
use strict;
use warnings;
$|++;
use AnyEvent;
use Net::RabbitFoot;
my $conn = Net::RabbitFoot->new()->load_xml_spec()->connect(
host => 'localhost',
port => 5672,
user => 'guest',
pass => 'guest',
vhost => '/',
);
my $channel = $conn->open_channel();
$channel->declare_exchange(
exchange => 'logs',
type => 'fanout',
);
my $result = $channel->declare_queue( exclusive => 1, );
my $queue_name = $result->{method_frame}->{queue};
$channel->bind_queue(
exchange => 'logs',
queue => $queue_name,
);
print " [*] Waiting for logs. To exit press CTRL-C\n";
sub callback {
my $var = shift;
my $body = $var->{body}->{payload};
print " [x] $body\n";
}
$channel->consume(
on_consume => \&callback,
queue => $queue_name,
no_ack => 1,
);
AnyEvent->condvar->recv;
| anhzhi/rabbitmq-tutorials | perl/receive_logs.pl | Perl | apache-2.0 | 839 |
#!/usr/bin/perl -w
use strict;
use Bio::DB::Fasta;
my $sUsage = qq(
# This script will extract up_stream regions of genes and store the sequences in a fasta file
Usage:
perl $0
<gff3 file>
<genome fasta file>
<length of up_stream region, 5000 or ... >
<fasta file stor storing up_stream sequences>
);
die $sUsage unless @ARGV >= 4;
my($gff_file, $genome_file, $length_up, $out_fasta) = @ARGV;
# Main
my $genome_obj = Bio::DB::Fasta->new($genome_file);
my %up_stream_regions = parse_gff_file($gff_file, $length_up, $genome_obj);
output(\%up_stream_regions, $genome_obj, $out_fasta);
# Subroutines
sub parse_gff_file
{
my ($gff, $len_up, $genome) = @_;
my %return_hash;
open (IN ,"$gff") or die "can't open file $gff\n";
while(<IN>)
{
next if /^#/;
chomp;
my @line_data = split /\t/, $_;
my ($chr_id, $type, $gen_start, $gen_end, $strand, $description) = @line_data[0, 2, 3, 4, 6, 8];
next unless $type eq 'gene';
my $gene_id = $1 if $description =~ /ID=(\S+)/;
print STDERR "gene_id: ", $gene_id, "\tDescription: ", $description, "\n";
my ($up_start, $up_end);
if ($strand eq '+')
{
$up_start = $gen_start - $len_up + 1;
$up_start = $up_start>0?$up_start:0;
$up_end = $gen_start;
push @{$return_hash{$chr_id}}, [$gene_id, $up_start, $up_end];
}
else
{
$up_start = $gen_end + $len_up - 1;
$up_start = $up_start>$genome->length($chr_id)?$genome->length($chr_id):$up_start;
$up_end = $gen_end;
push @{$return_hash{$chr_id}}, [$gene_id, $up_start, $up_end];
}
}
close IN;
return %return_hash;
}
sub output
{
my ($up_str_ref, $genome, $out_file) = @_;
open (OUT, ">$out_file") or die "can't open file $out_file \n";
foreach my $chr_id (keys %$up_str_ref)
{
foreach (@{$up_str_ref->{$chr_id}})
{
my ($gene_id, $start, $end) = @$_;
print OUT ">", join("_",($chr_id, $gene_id)),"\n",
$genome->seq($chr_id, $start => $end),"\n";
}
}
close OUT;
}
| swang8/Perl_scripts_misc | extract_up_stream_regions.pl | Perl | mit | 1,944 |
package #
Date::Manip::Offset::off272;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:44:43 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '-03:38:48';
%Offset = (
0 => [
'america/santarem',
],
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/Offset/off272.pm | Perl | mit | 855 |
# -*- perl -*-
# !!! DO NOT EDIT !!!
# This file was automatically generated.
package Net::Amazon::Validate::ItemSearch::jp::Availability;
use 5.006;
use strict;
use warnings;
sub new {
my ($class , %options) = @_;
my $self = {
'_default' => 'Music',
%options,
};
push @{$self->{_options}}, 'Apparel';
push @{$self->{_options}}, 'Classical';
push @{$self->{_options}}, 'DVD';
push @{$self->{_options}}, 'Music';
push @{$self->{_options}}, 'VHS';
push @{$self->{_options}}, 'Video';
bless $self, $class;
}
sub user_or_default {
my ($self, $user) = @_;
if (defined $user && length($user) > 0) {
return $self->find_match($user);
}
return $self->default();
}
sub default {
my ($self) = @_;
return $self->{_default};
}
sub find_match {
my ($self, $value) = @_;
for (@{$self->{_options}}) {
return $_ if lc($_) eq lc($value);
}
die "$value is not a valid value for jp::Availability!\n";
}
1;
__END__
=head1 NAME
Net::Amazon::Validate::ItemSearch::jp::Availability;
=head1 DESCRIPTION
The default value is Music, unless mode is specified.
The list of available values are:
Apparel
Classical
DVD
Music
VHS
Video
=cut
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/Net/Amazon/Validate/ItemSearch/jp/Availability.pm | Perl | mit | 1,266 |
#!/usr/bin/perl
#SMS:xmolna02
# ------------------------------------------------------------------------------
# File: sms.pl
# Date: 2011/04/08
# Author: David Molnar, xmolna02@stud.fit.vutbr.cz
# Project: SMS Compress
# ------------------------------------------------------------------------------
use strict;
use locale;
use utf8;
use encoding 'utf-8';
use XML::Simple;
# process params
# ------------------------------------------------------------------------------
my %sw = process_params();
# print help to stdout
if (defined $sw{help})
{
print_help();
exit;
}
# init (open) I/O files
# ------------------------------------------------------------------------------
# open input file
if (defined $sw{_ifile})
{
open(IFILE, "<:utf8", $sw{_ifile}) ||
exit_error(2, "nelze otevrit vstupni soubor pro cteni!");
}
# open output file for writing
if (defined $sw{_ofile})
{
open(OFILE, ">:utf8", $sw{_ofile}) ||
exit_error(3, "nelze otevrit vystupni soubor pro zapis!");
}
# open xml dictionary file
my $dic;
if (defined $sw{_dfile})
{
$dic = eval { XMLin($sw{_dfile}) };
if ($@)
{
close IFILE;
close OFILE;
exit_error(1, "nelze otevrit/zpracovat XML slovnik!");
}
}
# read everything from input file (or stdin) and save to $input
# ------------------------------------------------------------------------------
my $input = '';
while (my $row = defined $sw{input} ? <IFILE> : <STDIN>)
{
$input .= $row;
}
if (defined $sw{input})
{
close IFILE;
}
my $output = $input;
# r option - remove diacritics
# ------------------------------------------------------------------------------
if (defined $sw{r})
{
$output = remove_diacritic($output);
}
# v option - from xml file
# ------------------------------------------------------------------------------
if (defined $sw{v})
{
$output = process_dic($output, $dic, $sw{e}, $sw{s});
}
# c option - camel notation
# ------------------------------------------------------------------------------
if (defined $sw{c})
{
$output = process_camel($output, $sw{a}, $sw{b});
}
# n option - get count of neccessary sms
# ------------------------------------------------------------------------------
my $sms_count = 0;
if (defined $sw{n})
{
$sms_count = get_sms_count($input, $output);
}
# write results to file (or stdout)
# if switch -n is used then write only count of sms
# ------------------------------------------------------------------------------
my $result = defined $sw{n} ? \$sms_count : \$output;
if (defined $sw{output})
{
print OFILE $$result;
close OFILE;
}
else
{
print $$result;
}
# functions
# ------------------------------------------------------------------------------
# print help to stdout
sub print_help
{
print
"SMS Compress
- Komprese SMS
- Odstraneni ceske diakritiky
- Aplikace pravidel ze slovniku zkratek.
Parametry
--help Vytiskne napovedu
(nelze s nicim)
--input=filename.ext Vstupni soubor obsahujici SMS
--output=filename.text Vystupni soubor s SMS v ASCII
-r Odstraneni diakritiky z SMS
(lze -c a -v, odstraneni diakritiky se provede jako prvni)
-c Camel komprese, viz dalsi nastaveni
-a Komprimovat pouze slova z malych pismen
(lze s -c)
-b Nekomprimovat slova z velkych pismen
(lze s -c, nelze s -a)
-dict=filename Urceni XML slovniku zkratek
(nutne s -v)
-v Aplikace pravidel ze slovniku zkratek
(nutne s -d, lze s -c, lze s -e nebo -s)
-e Aplikace pouze expanzivnich pravidel
(nutne s -v, nelze s -s)
-s Aplikace pouze zkracujicich pravidel
(nutne s -v, nelze s -e)
-n Spocita minimalni pocet SMS\n";
}
# print error message to stderr and exit
sub exit_error
{
print STDERR "Chyba: ", $_[1], "\n";
exit $_[0];
}
# processes params from command line
sub process_params
{
my %res = ();
foreach my $item (@ARGV)
{
SWITCH:
{
$item =~ /^--help$/ && do { $res{help} += 1; last SWITCH; };
$item =~ /^--input=(.*?)$/ && do { $res{input} += 1;
$res{_ifile} = $1; last SWITCH; };
$item =~ /^--output=(.*?)$/ && do { $res{output} += 1;
$res{_ofile} = $1; last SWITCH; };
$item =~ /^--dict=(.*?)$/ && do { $res{dict} += 1;
$res{_dfile} = $1; last SWITCH; };
$item =~ /^-r$/ && do { $res{r} += 1; last SWITCH; };
$item =~ /^-c$/ && do { $res{c} += 1; last SWITCH; };
$item =~ /^-a$/ && do { $res{a} += 1; last SWITCH; };
$item =~ /^-b$/ && do { $res{b} += 1; last SWITCH; };
$item =~ /^-v$/ && do { $res{v} += 1; last SWITCH; };
$item =~ /^-e$/ && do { $res{e} += 1; last SWITCH; };
$item =~ /^-s$/ && do { $res{s} += 1; last SWITCH; };
$item =~ /^-n$/ && do { $res{n} += 1; last SWITCH; };
exit_error(1, "Neznamy parameter!");
}
}
foreach my $item (keys %res)
{
if ($item =~ /^[^_]/ && $res{$item} > 1)
{
exit_error(1, "parametre nelze zadat opakovane!");
}
}
if (defined $res{help} && scalar keys %res > 1)
{ exit_error(1, "--help nelze kombinovat inymi parametrami!"); }
if (defined $res{a} && !defined $res{c})
{ exit_error(1, "-a lze s -c!"); }
if (defined $res{b} && (!defined $res{c} || defined $res{a}))
{ exit_error(1, "-b lze s -c a nelze s -a!"); }
if (defined $res{_dfile} && !defined $res{v})
{ exit_error(1, "--dict lze s -v!"); }
if (defined $res{e} && !defined $res{v})
{ exit_error(1, "-e lze s -v!"); }
if (defined $res{s} && (!defined $res{v} || defined $res{e}))
{ exit_error(1, "-s lze s -v a nelze s -e!"); }
return %res;
}
# removes czech diacritics and returns the result
sub remove_diacritic
{
# "dictionary" for removing czech diacritics characters
my %char_hash = (á => "a", č => "c", ď => "d", é => "e", ě => "e", í => "i",
ň => "n", ó => "o", ř => "r", š => "s", ť => "t", ú => "u",
ů => "u", ý => "y", ž => "z",
Á => "A", Č => "C", Ď => "D", É => "E", Ě => "E", Í => "I",
Ň => "N", Ó => "O", Ř => "R", Š => "S", Ť => "T", Ú => "U",
Ů => "U", Ý => "Y", Ž => "Z");
return join('', map { $a = $_;
(grep(/\Q$a\E/, keys %char_hash))[0]
? $char_hash{$_} : $_ }
split('', $_[0]));
}
# convert input to camel notation
sub process_camel
{
my $result = $_[0];
my $asw = $_[1];
my $bsw = $_[2];
# switch -a used: only words containing only lowercase
if (defined $asw)
{
$result =~ s/(
^ # beginning of the line
| # or
[^[:alpha:]] # one character what is not letter
)
(
[[:lower:]]+ # one or more lowercase letters
)
/$1\u$2/xg;
}
# switch -b used: skip words containing only uppercase
# ()|() does not working therefore two s//
elsif (defined $bsw)
{
$result =~ s/(
^ # beginning of the line
| # or
[^[:alpha:]] # one character what is not letter
)
(
[[:upper:]] # one uppercase
)
(
# letters containing min. one lowercase
[[:upper:]]*[[:lower:]][[:alpha:]]*
)
/$1\u$2\L$3\E/xg;
$result =~ s/(
^ # beginning of the line
| # or
[^[:alpha:]] # one character what is not letter
)
(
[[:lower:]] # one lowercase
)
(
[[:alpha:]]* # zero or more letters
)
/$1\u$2\L$3\E/xg;
}
else
{
$result =~ s/(
^ # beginning of the line
| # or
[^[:alpha:]] # one character what is not letter
)
(
[[:alpha:]] # one letter
)
(
[[:alpha:]]* # zero or more letters
)
/$1\u$2\L$3\E/xg;
}
# clear all white space
$result =~ s/[[:space:]]+//xg;
return $result;
}
# process expansion or shortening based on xml dictionary file
sub process_dic
{
my $result = $_[0];
my $dic = $_[1];
my $esw = $_[2];
my $ssw = $_[3];
# expansion
if (defined $esw || (!defined $esw && !defined $ssw))
{
foreach my $item (@{$dic->{rule}})
{
next if (!defined $item->{expansive});
my $abb = $item->{abbrev};
my $text = $item->{text};
my $case = $item->{casesensitive};
$result =~ s/\Q$abb\E/$text/gi if (!defined $case);
$result =~ s/\Q$abb\E/$text/g if (defined $case && $case == 1);
}
}
# shortening
if (defined $ssw || (!defined $esw && !defined $ssw))
{
foreach my $item (@{$dic->{rule}})
{
next if (defined $item->{expansive} && $item->{expansive} == 1);
my $abb = $item->{abbrev};
my $text = $item->{text};
my $case = $item->{casesensitive};
$result =~ s/\Q$text\E/$abb/gi if (!defined $case);
$result =~ s/\Q$text\E/$abb/g if (defined $case && $case == 1);
}
}
return $result;
}
# get count of necessary sms('s)
# max length of one sms is 160 (70 diacritics)
# max length if more sms used 153 (67 diacritics)
sub get_sms_count
{
my $result = 0;
my $len = length $_[1];
if ($_[0] eq remove_diacritic($_[0]))
{
$result = ($len > 160) ? int($len / 153 + 0.99) : $len;
}
else
{
$result = ($len > 70) ? int($len / 67 + 0.99) : $len;
}
return $result;
}
# end of sms.pl
# ------------------------------------------------------------------------------
| mdavid626/fit | src/IPP_sms compress/sms.pl | Perl | mit | 10,647 |
% Length of a list
% Author: Peter Schneider-Kamp
% terminating
%query: len(i,o).
len([],0) :- !.
len(Xs,s(N)) :- tail(Xs,Ys), len(Ys,N).
tail([],[]).
tail([X|Xs],Xs).
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Logic_Programming_with_Cut/Schneider_Kamp_09/len.pl | Perl | mit | 180 |
package #
Date::Manip::TZ::amedmo00;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:33:43 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,1,16,26,8],'-07:33:52',[-7,-33,-52],
'LMT',0,[1906,9,1,7,33,51],[1906,8,31,23,59,59],
'0001010200:00:00','0001010116:26:08','1906090107:33:51','1906083123:59:59' ],
],
1906 =>
[
[ [1906,9,1,7,33,52],[1906,9,1,0,33,52],'-07:00:00',[-7,0,0],
'MST',0,[1918,4,14,8,59,59],[1918,4,14,1,59,59],
'1906090107:33:52','1906090100:33:52','1918041408:59:59','1918041401:59:59' ],
],
1918 =>
[
[ [1918,4,14,9,0,0],[1918,4,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1918,10,27,7,59,59],[1918,10,27,1,59,59],
'1918041409:00:00','1918041403:00:00','1918102707:59:59','1918102701:59:59' ],
[ [1918,10,27,8,0,0],[1918,10,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1919,4,13,8,59,59],[1919,4,13,1,59,59],
'1918102708:00:00','1918102701:00:00','1919041308:59:59','1919041301:59:59' ],
],
1919 =>
[
[ [1919,4,13,9,0,0],[1919,4,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1919,5,27,7,59,59],[1919,5,27,1,59,59],
'1919041309:00:00','1919041303:00:00','1919052707:59:59','1919052701:59:59' ],
[ [1919,5,27,8,0,0],[1919,5,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1920,4,25,8,59,59],[1920,4,25,1,59,59],
'1919052708:00:00','1919052701:00:00','1920042508:59:59','1920042501:59:59' ],
],
1920 =>
[
[ [1920,4,25,9,0,0],[1920,4,25,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1920,10,31,7,59,59],[1920,10,31,1,59,59],
'1920042509:00:00','1920042503:00:00','1920103107:59:59','1920103101:59:59' ],
[ [1920,10,31,8,0,0],[1920,10,31,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1921,4,24,8,59,59],[1921,4,24,1,59,59],
'1920103108:00:00','1920103101:00:00','1921042408:59:59','1921042401:59:59' ],
],
1921 =>
[
[ [1921,4,24,9,0,0],[1921,4,24,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1921,9,25,7,59,59],[1921,9,25,1,59,59],
'1921042409:00:00','1921042403:00:00','1921092507:59:59','1921092501:59:59' ],
[ [1921,9,25,8,0,0],[1921,9,25,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1922,4,30,8,59,59],[1922,4,30,1,59,59],
'1921092508:00:00','1921092501:00:00','1922043008:59:59','1922043001:59:59' ],
],
1922 =>
[
[ [1922,4,30,9,0,0],[1922,4,30,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1922,9,24,7,59,59],[1922,9,24,1,59,59],
'1922043009:00:00','1922043003:00:00','1922092407:59:59','1922092401:59:59' ],
[ [1922,9,24,8,0,0],[1922,9,24,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1923,4,29,8,59,59],[1923,4,29,1,59,59],
'1922092408:00:00','1922092401:00:00','1923042908:59:59','1923042901:59:59' ],
],
1923 =>
[
[ [1923,4,29,9,0,0],[1923,4,29,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1923,9,30,7,59,59],[1923,9,30,1,59,59],
'1923042909:00:00','1923042903:00:00','1923093007:59:59','1923093001:59:59' ],
[ [1923,9,30,8,0,0],[1923,9,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1942,2,9,8,59,59],[1942,2,9,1,59,59],
'1923093008:00:00','1923093001:00:00','1942020908:59:59','1942020901:59:59' ],
],
1942 =>
[
[ [1942,2,9,9,0,0],[1942,2,9,3,0,0],'-06:00:00',[-6,0,0],
'MWT',1,[1945,8,14,22,59,59],[1945,8,14,16,59,59],
'1942020909:00:00','1942020903:00:00','1945081422:59:59','1945081416:59:59' ],
],
1945 =>
[
[ [1945,8,14,23,0,0],[1945,8,14,17,0,0],'-06:00:00',[-6,0,0],
'MPT',1,[1945,9,30,7,59,59],[1945,9,30,1,59,59],
'1945081423:00:00','1945081417:00:00','1945093007:59:59','1945093001:59:59' ],
[ [1945,9,30,8,0,0],[1945,9,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1947,4,27,8,59,59],[1947,4,27,1,59,59],
'1945093008:00:00','1945093001:00:00','1947042708:59:59','1947042701:59:59' ],
],
1947 =>
[
[ [1947,4,27,9,0,0],[1947,4,27,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1947,9,28,7,59,59],[1947,9,28,1,59,59],
'1947042709:00:00','1947042703:00:00','1947092807:59:59','1947092801:59:59' ],
[ [1947,9,28,8,0,0],[1947,9,28,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1967,4,30,8,59,59],[1967,4,30,1,59,59],
'1947092808:00:00','1947092801:00:00','1967043008:59:59','1967043001:59:59' ],
],
1967 =>
[
[ [1967,4,30,9,0,0],[1967,4,30,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1967,10,29,7,59,59],[1967,10,29,1,59,59],
'1967043009:00:00','1967043003:00:00','1967102907:59:59','1967102901:59:59' ],
[ [1967,10,29,8,0,0],[1967,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1969,4,27,8,59,59],[1969,4,27,1,59,59],
'1967102908:00:00','1967102901:00:00','1969042708:59:59','1969042701:59:59' ],
],
1969 =>
[
[ [1969,4,27,9,0,0],[1969,4,27,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1969,10,26,7,59,59],[1969,10,26,1,59,59],
'1969042709:00:00','1969042703:00:00','1969102607:59:59','1969102601:59:59' ],
[ [1969,10,26,8,0,0],[1969,10,26,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1972,4,30,8,59,59],[1972,4,30,1,59,59],
'1969102608:00:00','1969102601:00:00','1972043008:59:59','1972043001:59:59' ],
],
1972 =>
[
[ [1972,4,30,9,0,0],[1972,4,30,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1972,10,29,7,59,59],[1972,10,29,1,59,59],
'1972043009:00:00','1972043003:00:00','1972102907:59:59','1972102901:59:59' ],
[ [1972,10,29,8,0,0],[1972,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1973,4,29,8,59,59],[1973,4,29,1,59,59],
'1972102908:00:00','1972102901:00:00','1973042908:59:59','1973042901:59:59' ],
],
1973 =>
[
[ [1973,4,29,9,0,0],[1973,4,29,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1973,10,28,7,59,59],[1973,10,28,1,59,59],
'1973042909:00:00','1973042903:00:00','1973102807:59:59','1973102801:59:59' ],
[ [1973,10,28,8,0,0],[1973,10,28,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1974,4,28,8,59,59],[1974,4,28,1,59,59],
'1973102808:00:00','1973102801:00:00','1974042808:59:59','1974042801:59:59' ],
],
1974 =>
[
[ [1974,4,28,9,0,0],[1974,4,28,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1974,10,27,7,59,59],[1974,10,27,1,59,59],
'1974042809:00:00','1974042803:00:00','1974102707:59:59','1974102701:59:59' ],
[ [1974,10,27,8,0,0],[1974,10,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1975,4,27,8,59,59],[1975,4,27,1,59,59],
'1974102708:00:00','1974102701:00:00','1975042708:59:59','1975042701:59:59' ],
],
1975 =>
[
[ [1975,4,27,9,0,0],[1975,4,27,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1975,10,26,7,59,59],[1975,10,26,1,59,59],
'1975042709:00:00','1975042703:00:00','1975102607:59:59','1975102601:59:59' ],
[ [1975,10,26,8,0,0],[1975,10,26,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1976,4,25,8,59,59],[1976,4,25,1,59,59],
'1975102608:00:00','1975102601:00:00','1976042508:59:59','1976042501:59:59' ],
],
1976 =>
[
[ [1976,4,25,9,0,0],[1976,4,25,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1976,10,31,7,59,59],[1976,10,31,1,59,59],
'1976042509:00:00','1976042503:00:00','1976103107:59:59','1976103101:59:59' ],
[ [1976,10,31,8,0,0],[1976,10,31,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1977,4,24,8,59,59],[1977,4,24,1,59,59],
'1976103108:00:00','1976103101:00:00','1977042408:59:59','1977042401:59:59' ],
],
1977 =>
[
[ [1977,4,24,9,0,0],[1977,4,24,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1977,10,30,7,59,59],[1977,10,30,1,59,59],
'1977042409:00:00','1977042403:00:00','1977103007:59:59','1977103001:59:59' ],
[ [1977,10,30,8,0,0],[1977,10,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1978,4,30,8,59,59],[1978,4,30,1,59,59],
'1977103008:00:00','1977103001:00:00','1978043008:59:59','1978043001:59:59' ],
],
1978 =>
[
[ [1978,4,30,9,0,0],[1978,4,30,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1978,10,29,7,59,59],[1978,10,29,1,59,59],
'1978043009:00:00','1978043003:00:00','1978102907:59:59','1978102901:59:59' ],
[ [1978,10,29,8,0,0],[1978,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1979,4,29,8,59,59],[1979,4,29,1,59,59],
'1978102908:00:00','1978102901:00:00','1979042908:59:59','1979042901:59:59' ],
],
1979 =>
[
[ [1979,4,29,9,0,0],[1979,4,29,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1979,10,28,7,59,59],[1979,10,28,1,59,59],
'1979042909:00:00','1979042903:00:00','1979102807:59:59','1979102801:59:59' ],
[ [1979,10,28,8,0,0],[1979,10,28,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1980,4,27,8,59,59],[1980,4,27,1,59,59],
'1979102808:00:00','1979102801:00:00','1980042708:59:59','1980042701:59:59' ],
],
1980 =>
[
[ [1980,4,27,9,0,0],[1980,4,27,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1980,10,26,7,59,59],[1980,10,26,1,59,59],
'1980042709:00:00','1980042703:00:00','1980102607:59:59','1980102601:59:59' ],
[ [1980,10,26,8,0,0],[1980,10,26,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1981,4,26,8,59,59],[1981,4,26,1,59,59],
'1980102608:00:00','1980102601:00:00','1981042608:59:59','1981042601:59:59' ],
],
1981 =>
[
[ [1981,4,26,9,0,0],[1981,4,26,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1981,10,25,7,59,59],[1981,10,25,1,59,59],
'1981042609:00:00','1981042603:00:00','1981102507:59:59','1981102501:59:59' ],
[ [1981,10,25,8,0,0],[1981,10,25,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1982,4,25,8,59,59],[1982,4,25,1,59,59],
'1981102508:00:00','1981102501:00:00','1982042508:59:59','1982042501:59:59' ],
],
1982 =>
[
[ [1982,4,25,9,0,0],[1982,4,25,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1982,10,31,7,59,59],[1982,10,31,1,59,59],
'1982042509:00:00','1982042503:00:00','1982103107:59:59','1982103101:59:59' ],
[ [1982,10,31,8,0,0],[1982,10,31,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1983,4,24,8,59,59],[1983,4,24,1,59,59],
'1982103108:00:00','1982103101:00:00','1983042408:59:59','1983042401:59:59' ],
],
1983 =>
[
[ [1983,4,24,9,0,0],[1983,4,24,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1983,10,30,7,59,59],[1983,10,30,1,59,59],
'1983042409:00:00','1983042403:00:00','1983103007:59:59','1983103001:59:59' ],
[ [1983,10,30,8,0,0],[1983,10,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1984,4,29,8,59,59],[1984,4,29,1,59,59],
'1983103008:00:00','1983103001:00:00','1984042908:59:59','1984042901:59:59' ],
],
1984 =>
[
[ [1984,4,29,9,0,0],[1984,4,29,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1984,10,28,7,59,59],[1984,10,28,1,59,59],
'1984042909:00:00','1984042903:00:00','1984102807:59:59','1984102801:59:59' ],
[ [1984,10,28,8,0,0],[1984,10,28,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1985,4,28,8,59,59],[1985,4,28,1,59,59],
'1984102808:00:00','1984102801:00:00','1985042808:59:59','1985042801:59:59' ],
],
1985 =>
[
[ [1985,4,28,9,0,0],[1985,4,28,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1985,10,27,7,59,59],[1985,10,27,1,59,59],
'1985042809:00:00','1985042803:00:00','1985102707:59:59','1985102701:59:59' ],
[ [1985,10,27,8,0,0],[1985,10,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1986,4,27,8,59,59],[1986,4,27,1,59,59],
'1985102708:00:00','1985102701:00:00','1986042708:59:59','1986042701:59:59' ],
],
1986 =>
[
[ [1986,4,27,9,0,0],[1986,4,27,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1986,10,26,7,59,59],[1986,10,26,1,59,59],
'1986042709:00:00','1986042703:00:00','1986102607:59:59','1986102601:59:59' ],
[ [1986,10,26,8,0,0],[1986,10,26,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1987,4,5,8,59,59],[1987,4,5,1,59,59],
'1986102608:00:00','1986102601:00:00','1987040508:59:59','1987040501:59:59' ],
],
1987 =>
[
[ [1987,4,5,9,0,0],[1987,4,5,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1987,10,25,7,59,59],[1987,10,25,1,59,59],
'1987040509:00:00','1987040503:00:00','1987102507:59:59','1987102501:59:59' ],
[ [1987,10,25,8,0,0],[1987,10,25,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1988,4,3,8,59,59],[1988,4,3,1,59,59],
'1987102508:00:00','1987102501:00:00','1988040308:59:59','1988040301:59:59' ],
],
1988 =>
[
[ [1988,4,3,9,0,0],[1988,4,3,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1988,10,30,7,59,59],[1988,10,30,1,59,59],
'1988040309:00:00','1988040303:00:00','1988103007:59:59','1988103001:59:59' ],
[ [1988,10,30,8,0,0],[1988,10,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1989,4,2,8,59,59],[1989,4,2,1,59,59],
'1988103008:00:00','1988103001:00:00','1989040208:59:59','1989040201:59:59' ],
],
1989 =>
[
[ [1989,4,2,9,0,0],[1989,4,2,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1989,10,29,7,59,59],[1989,10,29,1,59,59],
'1989040209:00:00','1989040203:00:00','1989102907:59:59','1989102901:59:59' ],
[ [1989,10,29,8,0,0],[1989,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1990,4,1,8,59,59],[1990,4,1,1,59,59],
'1989102908:00:00','1989102901:00:00','1990040108:59:59','1990040101:59:59' ],
],
1990 =>
[
[ [1990,4,1,9,0,0],[1990,4,1,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1990,10,28,7,59,59],[1990,10,28,1,59,59],
'1990040109:00:00','1990040103:00:00','1990102807:59:59','1990102801:59:59' ],
[ [1990,10,28,8,0,0],[1990,10,28,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1991,4,7,8,59,59],[1991,4,7,1,59,59],
'1990102808:00:00','1990102801:00:00','1991040708:59:59','1991040701:59:59' ],
],
1991 =>
[
[ [1991,4,7,9,0,0],[1991,4,7,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1991,10,27,7,59,59],[1991,10,27,1,59,59],
'1991040709:00:00','1991040703:00:00','1991102707:59:59','1991102701:59:59' ],
[ [1991,10,27,8,0,0],[1991,10,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1992,4,5,8,59,59],[1992,4,5,1,59,59],
'1991102708:00:00','1991102701:00:00','1992040508:59:59','1992040501:59:59' ],
],
1992 =>
[
[ [1992,4,5,9,0,0],[1992,4,5,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1992,10,25,7,59,59],[1992,10,25,1,59,59],
'1992040509:00:00','1992040503:00:00','1992102507:59:59','1992102501:59:59' ],
[ [1992,10,25,8,0,0],[1992,10,25,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1993,4,4,8,59,59],[1993,4,4,1,59,59],
'1992102508:00:00','1992102501:00:00','1993040408:59:59','1993040401:59:59' ],
],
1993 =>
[
[ [1993,4,4,9,0,0],[1993,4,4,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1993,10,31,7,59,59],[1993,10,31,1,59,59],
'1993040409:00:00','1993040403:00:00','1993103107:59:59','1993103101:59:59' ],
[ [1993,10,31,8,0,0],[1993,10,31,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1994,4,3,8,59,59],[1994,4,3,1,59,59],
'1993103108:00:00','1993103101:00:00','1994040308:59:59','1994040301:59:59' ],
],
1994 =>
[
[ [1994,4,3,9,0,0],[1994,4,3,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1994,10,30,7,59,59],[1994,10,30,1,59,59],
'1994040309:00:00','1994040303:00:00','1994103007:59:59','1994103001:59:59' ],
[ [1994,10,30,8,0,0],[1994,10,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1995,4,2,8,59,59],[1995,4,2,1,59,59],
'1994103008:00:00','1994103001:00:00','1995040208:59:59','1995040201:59:59' ],
],
1995 =>
[
[ [1995,4,2,9,0,0],[1995,4,2,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1995,10,29,7,59,59],[1995,10,29,1,59,59],
'1995040209:00:00','1995040203:00:00','1995102907:59:59','1995102901:59:59' ],
[ [1995,10,29,8,0,0],[1995,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1996,4,7,8,59,59],[1996,4,7,1,59,59],
'1995102908:00:00','1995102901:00:00','1996040708:59:59','1996040701:59:59' ],
],
1996 =>
[
[ [1996,4,7,9,0,0],[1996,4,7,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1996,10,27,7,59,59],[1996,10,27,1,59,59],
'1996040709:00:00','1996040703:00:00','1996102707:59:59','1996102701:59:59' ],
[ [1996,10,27,8,0,0],[1996,10,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1997,4,6,8,59,59],[1997,4,6,1,59,59],
'1996102708:00:00','1996102701:00:00','1997040608:59:59','1997040601:59:59' ],
],
1997 =>
[
[ [1997,4,6,9,0,0],[1997,4,6,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1997,10,26,7,59,59],[1997,10,26,1,59,59],
'1997040609:00:00','1997040603:00:00','1997102607:59:59','1997102601:59:59' ],
[ [1997,10,26,8,0,0],[1997,10,26,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1998,4,5,8,59,59],[1998,4,5,1,59,59],
'1997102608:00:00','1997102601:00:00','1998040508:59:59','1998040501:59:59' ],
],
1998 =>
[
[ [1998,4,5,9,0,0],[1998,4,5,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1998,10,25,7,59,59],[1998,10,25,1,59,59],
'1998040509:00:00','1998040503:00:00','1998102507:59:59','1998102501:59:59' ],
[ [1998,10,25,8,0,0],[1998,10,25,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[1999,4,4,8,59,59],[1999,4,4,1,59,59],
'1998102508:00:00','1998102501:00:00','1999040408:59:59','1999040401:59:59' ],
],
1999 =>
[
[ [1999,4,4,9,0,0],[1999,4,4,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[1999,10,31,7,59,59],[1999,10,31,1,59,59],
'1999040409:00:00','1999040403:00:00','1999103107:59:59','1999103101:59:59' ],
[ [1999,10,31,8,0,0],[1999,10,31,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2000,4,2,8,59,59],[2000,4,2,1,59,59],
'1999103108:00:00','1999103101:00:00','2000040208:59:59','2000040201:59:59' ],
],
2000 =>
[
[ [2000,4,2,9,0,0],[2000,4,2,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2000,10,29,7,59,59],[2000,10,29,1,59,59],
'2000040209:00:00','2000040203:00:00','2000102907:59:59','2000102901:59:59' ],
[ [2000,10,29,8,0,0],[2000,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2001,4,1,8,59,59],[2001,4,1,1,59,59],
'2000102908:00:00','2000102901:00:00','2001040108:59:59','2001040101:59:59' ],
],
2001 =>
[
[ [2001,4,1,9,0,0],[2001,4,1,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2001,10,28,7,59,59],[2001,10,28,1,59,59],
'2001040109:00:00','2001040103:00:00','2001102807:59:59','2001102801:59:59' ],
[ [2001,10,28,8,0,0],[2001,10,28,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2002,4,7,8,59,59],[2002,4,7,1,59,59],
'2001102808:00:00','2001102801:00:00','2002040708:59:59','2002040701:59:59' ],
],
2002 =>
[
[ [2002,4,7,9,0,0],[2002,4,7,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2002,10,27,7,59,59],[2002,10,27,1,59,59],
'2002040709:00:00','2002040703:00:00','2002102707:59:59','2002102701:59:59' ],
[ [2002,10,27,8,0,0],[2002,10,27,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2003,4,6,8,59,59],[2003,4,6,1,59,59],
'2002102708:00:00','2002102701:00:00','2003040608:59:59','2003040601:59:59' ],
],
2003 =>
[
[ [2003,4,6,9,0,0],[2003,4,6,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2003,10,26,7,59,59],[2003,10,26,1,59,59],
'2003040609:00:00','2003040603:00:00','2003102607:59:59','2003102601:59:59' ],
[ [2003,10,26,8,0,0],[2003,10,26,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2004,4,4,8,59,59],[2004,4,4,1,59,59],
'2003102608:00:00','2003102601:00:00','2004040408:59:59','2004040401:59:59' ],
],
2004 =>
[
[ [2004,4,4,9,0,0],[2004,4,4,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2004,10,31,7,59,59],[2004,10,31,1,59,59],
'2004040409:00:00','2004040403:00:00','2004103107:59:59','2004103101:59:59' ],
[ [2004,10,31,8,0,0],[2004,10,31,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2005,4,3,8,59,59],[2005,4,3,1,59,59],
'2004103108:00:00','2004103101:00:00','2005040308:59:59','2005040301:59:59' ],
],
2005 =>
[
[ [2005,4,3,9,0,0],[2005,4,3,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2005,10,30,7,59,59],[2005,10,30,1,59,59],
'2005040309:00:00','2005040303:00:00','2005103007:59:59','2005103001:59:59' ],
[ [2005,10,30,8,0,0],[2005,10,30,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2006,4,2,8,59,59],[2006,4,2,1,59,59],
'2005103008:00:00','2005103001:00:00','2006040208:59:59','2006040201:59:59' ],
],
2006 =>
[
[ [2006,4,2,9,0,0],[2006,4,2,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2006,10,29,7,59,59],[2006,10,29,1,59,59],
'2006040209:00:00','2006040203:00:00','2006102907:59:59','2006102901:59:59' ],
[ [2006,10,29,8,0,0],[2006,10,29,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2007,3,11,8,59,59],[2007,3,11,1,59,59],
'2006102908:00:00','2006102901:00:00','2007031108:59:59','2007031101:59:59' ],
],
2007 =>
[
[ [2007,3,11,9,0,0],[2007,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2007,11,4,7,59,59],[2007,11,4,1,59,59],
'2007031109:00:00','2007031103:00:00','2007110407:59:59','2007110401:59:59' ],
[ [2007,11,4,8,0,0],[2007,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2008,3,9,8,59,59],[2008,3,9,1,59,59],
'2007110408:00:00','2007110401:00:00','2008030908:59:59','2008030901:59:59' ],
],
2008 =>
[
[ [2008,3,9,9,0,0],[2008,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2008,11,2,7,59,59],[2008,11,2,1,59,59],
'2008030909:00:00','2008030903:00:00','2008110207:59:59','2008110201:59:59' ],
[ [2008,11,2,8,0,0],[2008,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2009,3,8,8,59,59],[2009,3,8,1,59,59],
'2008110208:00:00','2008110201:00:00','2009030808:59:59','2009030801:59:59' ],
],
2009 =>
[
[ [2009,3,8,9,0,0],[2009,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2009,11,1,7,59,59],[2009,11,1,1,59,59],
'2009030809:00:00','2009030803:00:00','2009110107:59:59','2009110101:59:59' ],
[ [2009,11,1,8,0,0],[2009,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2010,3,14,8,59,59],[2010,3,14,1,59,59],
'2009110108:00:00','2009110101:00:00','2010031408:59:59','2010031401:59:59' ],
],
2010 =>
[
[ [2010,3,14,9,0,0],[2010,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2010,11,7,7,59,59],[2010,11,7,1,59,59],
'2010031409:00:00','2010031403:00:00','2010110707:59:59','2010110701:59:59' ],
[ [2010,11,7,8,0,0],[2010,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2011,3,13,8,59,59],[2011,3,13,1,59,59],
'2010110708:00:00','2010110701:00:00','2011031308:59:59','2011031301:59:59' ],
],
2011 =>
[
[ [2011,3,13,9,0,0],[2011,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2011,11,6,7,59,59],[2011,11,6,1,59,59],
'2011031309:00:00','2011031303:00:00','2011110607:59:59','2011110601:59:59' ],
[ [2011,11,6,8,0,0],[2011,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2012,3,11,8,59,59],[2012,3,11,1,59,59],
'2011110608:00:00','2011110601:00:00','2012031108:59:59','2012031101:59:59' ],
],
2012 =>
[
[ [2012,3,11,9,0,0],[2012,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2012,11,4,7,59,59],[2012,11,4,1,59,59],
'2012031109:00:00','2012031103:00:00','2012110407:59:59','2012110401:59:59' ],
[ [2012,11,4,8,0,0],[2012,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2013,3,10,8,59,59],[2013,3,10,1,59,59],
'2012110408:00:00','2012110401:00:00','2013031008:59:59','2013031001:59:59' ],
],
2013 =>
[
[ [2013,3,10,9,0,0],[2013,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2013,11,3,7,59,59],[2013,11,3,1,59,59],
'2013031009:00:00','2013031003:00:00','2013110307:59:59','2013110301:59:59' ],
[ [2013,11,3,8,0,0],[2013,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2014,3,9,8,59,59],[2014,3,9,1,59,59],
'2013110308:00:00','2013110301:00:00','2014030908:59:59','2014030901:59:59' ],
],
2014 =>
[
[ [2014,3,9,9,0,0],[2014,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2014,11,2,7,59,59],[2014,11,2,1,59,59],
'2014030909:00:00','2014030903:00:00','2014110207:59:59','2014110201:59:59' ],
[ [2014,11,2,8,0,0],[2014,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2015,3,8,8,59,59],[2015,3,8,1,59,59],
'2014110208:00:00','2014110201:00:00','2015030808:59:59','2015030801:59:59' ],
],
2015 =>
[
[ [2015,3,8,9,0,0],[2015,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2015,11,1,7,59,59],[2015,11,1,1,59,59],
'2015030809:00:00','2015030803:00:00','2015110107:59:59','2015110101:59:59' ],
[ [2015,11,1,8,0,0],[2015,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2016,3,13,8,59,59],[2016,3,13,1,59,59],
'2015110108:00:00','2015110101:00:00','2016031308:59:59','2016031301:59:59' ],
],
2016 =>
[
[ [2016,3,13,9,0,0],[2016,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2016,11,6,7,59,59],[2016,11,6,1,59,59],
'2016031309:00:00','2016031303:00:00','2016110607:59:59','2016110601:59:59' ],
[ [2016,11,6,8,0,0],[2016,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2017,3,12,8,59,59],[2017,3,12,1,59,59],
'2016110608:00:00','2016110601:00:00','2017031208:59:59','2017031201:59:59' ],
],
2017 =>
[
[ [2017,3,12,9,0,0],[2017,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2017,11,5,7,59,59],[2017,11,5,1,59,59],
'2017031209:00:00','2017031203:00:00','2017110507:59:59','2017110501:59:59' ],
[ [2017,11,5,8,0,0],[2017,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2018,3,11,8,59,59],[2018,3,11,1,59,59],
'2017110508:00:00','2017110501:00:00','2018031108:59:59','2018031101:59:59' ],
],
2018 =>
[
[ [2018,3,11,9,0,0],[2018,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2018,11,4,7,59,59],[2018,11,4,1,59,59],
'2018031109:00:00','2018031103:00:00','2018110407:59:59','2018110401:59:59' ],
[ [2018,11,4,8,0,0],[2018,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2019,3,10,8,59,59],[2019,3,10,1,59,59],
'2018110408:00:00','2018110401:00:00','2019031008:59:59','2019031001:59:59' ],
],
2019 =>
[
[ [2019,3,10,9,0,0],[2019,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2019,11,3,7,59,59],[2019,11,3,1,59,59],
'2019031009:00:00','2019031003:00:00','2019110307:59:59','2019110301:59:59' ],
[ [2019,11,3,8,0,0],[2019,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2020,3,8,8,59,59],[2020,3,8,1,59,59],
'2019110308:00:00','2019110301:00:00','2020030808:59:59','2020030801:59:59' ],
],
2020 =>
[
[ [2020,3,8,9,0,0],[2020,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2020,11,1,7,59,59],[2020,11,1,1,59,59],
'2020030809:00:00','2020030803:00:00','2020110107:59:59','2020110101:59:59' ],
[ [2020,11,1,8,0,0],[2020,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2021,3,14,8,59,59],[2021,3,14,1,59,59],
'2020110108:00:00','2020110101:00:00','2021031408:59:59','2021031401:59:59' ],
],
2021 =>
[
[ [2021,3,14,9,0,0],[2021,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2021,11,7,7,59,59],[2021,11,7,1,59,59],
'2021031409:00:00','2021031403:00:00','2021110707:59:59','2021110701:59:59' ],
[ [2021,11,7,8,0,0],[2021,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2022,3,13,8,59,59],[2022,3,13,1,59,59],
'2021110708:00:00','2021110701:00:00','2022031308:59:59','2022031301:59:59' ],
],
2022 =>
[
[ [2022,3,13,9,0,0],[2022,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2022,11,6,7,59,59],[2022,11,6,1,59,59],
'2022031309:00:00','2022031303:00:00','2022110607:59:59','2022110601:59:59' ],
[ [2022,11,6,8,0,0],[2022,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2023,3,12,8,59,59],[2023,3,12,1,59,59],
'2022110608:00:00','2022110601:00:00','2023031208:59:59','2023031201:59:59' ],
],
2023 =>
[
[ [2023,3,12,9,0,0],[2023,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2023,11,5,7,59,59],[2023,11,5,1,59,59],
'2023031209:00:00','2023031203:00:00','2023110507:59:59','2023110501:59:59' ],
[ [2023,11,5,8,0,0],[2023,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2024,3,10,8,59,59],[2024,3,10,1,59,59],
'2023110508:00:00','2023110501:00:00','2024031008:59:59','2024031001:59:59' ],
],
2024 =>
[
[ [2024,3,10,9,0,0],[2024,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2024,11,3,7,59,59],[2024,11,3,1,59,59],
'2024031009:00:00','2024031003:00:00','2024110307:59:59','2024110301:59:59' ],
[ [2024,11,3,8,0,0],[2024,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2025,3,9,8,59,59],[2025,3,9,1,59,59],
'2024110308:00:00','2024110301:00:00','2025030908:59:59','2025030901:59:59' ],
],
2025 =>
[
[ [2025,3,9,9,0,0],[2025,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2025,11,2,7,59,59],[2025,11,2,1,59,59],
'2025030909:00:00','2025030903:00:00','2025110207:59:59','2025110201:59:59' ],
[ [2025,11,2,8,0,0],[2025,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2026,3,8,8,59,59],[2026,3,8,1,59,59],
'2025110208:00:00','2025110201:00:00','2026030808:59:59','2026030801:59:59' ],
],
2026 =>
[
[ [2026,3,8,9,0,0],[2026,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2026,11,1,7,59,59],[2026,11,1,1,59,59],
'2026030809:00:00','2026030803:00:00','2026110107:59:59','2026110101:59:59' ],
[ [2026,11,1,8,0,0],[2026,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2027,3,14,8,59,59],[2027,3,14,1,59,59],
'2026110108:00:00','2026110101:00:00','2027031408:59:59','2027031401:59:59' ],
],
2027 =>
[
[ [2027,3,14,9,0,0],[2027,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2027,11,7,7,59,59],[2027,11,7,1,59,59],
'2027031409:00:00','2027031403:00:00','2027110707:59:59','2027110701:59:59' ],
[ [2027,11,7,8,0,0],[2027,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2028,3,12,8,59,59],[2028,3,12,1,59,59],
'2027110708:00:00','2027110701:00:00','2028031208:59:59','2028031201:59:59' ],
],
2028 =>
[
[ [2028,3,12,9,0,0],[2028,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2028,11,5,7,59,59],[2028,11,5,1,59,59],
'2028031209:00:00','2028031203:00:00','2028110507:59:59','2028110501:59:59' ],
[ [2028,11,5,8,0,0],[2028,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2029,3,11,8,59,59],[2029,3,11,1,59,59],
'2028110508:00:00','2028110501:00:00','2029031108:59:59','2029031101:59:59' ],
],
2029 =>
[
[ [2029,3,11,9,0,0],[2029,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2029,11,4,7,59,59],[2029,11,4,1,59,59],
'2029031109:00:00','2029031103:00:00','2029110407:59:59','2029110401:59:59' ],
[ [2029,11,4,8,0,0],[2029,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2030,3,10,8,59,59],[2030,3,10,1,59,59],
'2029110408:00:00','2029110401:00:00','2030031008:59:59','2030031001:59:59' ],
],
2030 =>
[
[ [2030,3,10,9,0,0],[2030,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2030,11,3,7,59,59],[2030,11,3,1,59,59],
'2030031009:00:00','2030031003:00:00','2030110307:59:59','2030110301:59:59' ],
[ [2030,11,3,8,0,0],[2030,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2031,3,9,8,59,59],[2031,3,9,1,59,59],
'2030110308:00:00','2030110301:00:00','2031030908:59:59','2031030901:59:59' ],
],
2031 =>
[
[ [2031,3,9,9,0,0],[2031,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2031,11,2,7,59,59],[2031,11,2,1,59,59],
'2031030909:00:00','2031030903:00:00','2031110207:59:59','2031110201:59:59' ],
[ [2031,11,2,8,0,0],[2031,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2032,3,14,8,59,59],[2032,3,14,1,59,59],
'2031110208:00:00','2031110201:00:00','2032031408:59:59','2032031401:59:59' ],
],
2032 =>
[
[ [2032,3,14,9,0,0],[2032,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2032,11,7,7,59,59],[2032,11,7,1,59,59],
'2032031409:00:00','2032031403:00:00','2032110707:59:59','2032110701:59:59' ],
[ [2032,11,7,8,0,0],[2032,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2033,3,13,8,59,59],[2033,3,13,1,59,59],
'2032110708:00:00','2032110701:00:00','2033031308:59:59','2033031301:59:59' ],
],
2033 =>
[
[ [2033,3,13,9,0,0],[2033,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2033,11,6,7,59,59],[2033,11,6,1,59,59],
'2033031309:00:00','2033031303:00:00','2033110607:59:59','2033110601:59:59' ],
[ [2033,11,6,8,0,0],[2033,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2034,3,12,8,59,59],[2034,3,12,1,59,59],
'2033110608:00:00','2033110601:00:00','2034031208:59:59','2034031201:59:59' ],
],
2034 =>
[
[ [2034,3,12,9,0,0],[2034,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2034,11,5,7,59,59],[2034,11,5,1,59,59],
'2034031209:00:00','2034031203:00:00','2034110507:59:59','2034110501:59:59' ],
[ [2034,11,5,8,0,0],[2034,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2035,3,11,8,59,59],[2035,3,11,1,59,59],
'2034110508:00:00','2034110501:00:00','2035031108:59:59','2035031101:59:59' ],
],
2035 =>
[
[ [2035,3,11,9,0,0],[2035,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2035,11,4,7,59,59],[2035,11,4,1,59,59],
'2035031109:00:00','2035031103:00:00','2035110407:59:59','2035110401:59:59' ],
[ [2035,11,4,8,0,0],[2035,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2036,3,9,8,59,59],[2036,3,9,1,59,59],
'2035110408:00:00','2035110401:00:00','2036030908:59:59','2036030901:59:59' ],
],
2036 =>
[
[ [2036,3,9,9,0,0],[2036,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2036,11,2,7,59,59],[2036,11,2,1,59,59],
'2036030909:00:00','2036030903:00:00','2036110207:59:59','2036110201:59:59' ],
[ [2036,11,2,8,0,0],[2036,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2037,3,8,8,59,59],[2037,3,8,1,59,59],
'2036110208:00:00','2036110201:00:00','2037030808:59:59','2037030801:59:59' ],
],
2037 =>
[
[ [2037,3,8,9,0,0],[2037,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2037,11,1,7,59,59],[2037,11,1,1,59,59],
'2037030809:00:00','2037030803:00:00','2037110107:59:59','2037110101:59:59' ],
[ [2037,11,1,8,0,0],[2037,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2038,3,14,8,59,59],[2038,3,14,1,59,59],
'2037110108:00:00','2037110101:00:00','2038031408:59:59','2038031401:59:59' ],
],
2038 =>
[
[ [2038,3,14,9,0,0],[2038,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2038,11,7,7,59,59],[2038,11,7,1,59,59],
'2038031409:00:00','2038031403:00:00','2038110707:59:59','2038110701:59:59' ],
[ [2038,11,7,8,0,0],[2038,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2039,3,13,8,59,59],[2039,3,13,1,59,59],
'2038110708:00:00','2038110701:00:00','2039031308:59:59','2039031301:59:59' ],
],
2039 =>
[
[ [2039,3,13,9,0,0],[2039,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2039,11,6,7,59,59],[2039,11,6,1,59,59],
'2039031309:00:00','2039031303:00:00','2039110607:59:59','2039110601:59:59' ],
[ [2039,11,6,8,0,0],[2039,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2040,3,11,8,59,59],[2040,3,11,1,59,59],
'2039110608:00:00','2039110601:00:00','2040031108:59:59','2040031101:59:59' ],
],
2040 =>
[
[ [2040,3,11,9,0,0],[2040,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2040,11,4,7,59,59],[2040,11,4,1,59,59],
'2040031109:00:00','2040031103:00:00','2040110407:59:59','2040110401:59:59' ],
[ [2040,11,4,8,0,0],[2040,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2041,3,10,8,59,59],[2041,3,10,1,59,59],
'2040110408:00:00','2040110401:00:00','2041031008:59:59','2041031001:59:59' ],
],
2041 =>
[
[ [2041,3,10,9,0,0],[2041,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2041,11,3,7,59,59],[2041,11,3,1,59,59],
'2041031009:00:00','2041031003:00:00','2041110307:59:59','2041110301:59:59' ],
[ [2041,11,3,8,0,0],[2041,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2042,3,9,8,59,59],[2042,3,9,1,59,59],
'2041110308:00:00','2041110301:00:00','2042030908:59:59','2042030901:59:59' ],
],
2042 =>
[
[ [2042,3,9,9,0,0],[2042,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2042,11,2,7,59,59],[2042,11,2,1,59,59],
'2042030909:00:00','2042030903:00:00','2042110207:59:59','2042110201:59:59' ],
[ [2042,11,2,8,0,0],[2042,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2043,3,8,8,59,59],[2043,3,8,1,59,59],
'2042110208:00:00','2042110201:00:00','2043030808:59:59','2043030801:59:59' ],
],
2043 =>
[
[ [2043,3,8,9,0,0],[2043,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2043,11,1,7,59,59],[2043,11,1,1,59,59],
'2043030809:00:00','2043030803:00:00','2043110107:59:59','2043110101:59:59' ],
[ [2043,11,1,8,0,0],[2043,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2044,3,13,8,59,59],[2044,3,13,1,59,59],
'2043110108:00:00','2043110101:00:00','2044031308:59:59','2044031301:59:59' ],
],
2044 =>
[
[ [2044,3,13,9,0,0],[2044,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2044,11,6,7,59,59],[2044,11,6,1,59,59],
'2044031309:00:00','2044031303:00:00','2044110607:59:59','2044110601:59:59' ],
[ [2044,11,6,8,0,0],[2044,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2045,3,12,8,59,59],[2045,3,12,1,59,59],
'2044110608:00:00','2044110601:00:00','2045031208:59:59','2045031201:59:59' ],
],
2045 =>
[
[ [2045,3,12,9,0,0],[2045,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2045,11,5,7,59,59],[2045,11,5,1,59,59],
'2045031209:00:00','2045031203:00:00','2045110507:59:59','2045110501:59:59' ],
[ [2045,11,5,8,0,0],[2045,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2046,3,11,8,59,59],[2046,3,11,1,59,59],
'2045110508:00:00','2045110501:00:00','2046031108:59:59','2046031101:59:59' ],
],
2046 =>
[
[ [2046,3,11,9,0,0],[2046,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2046,11,4,7,59,59],[2046,11,4,1,59,59],
'2046031109:00:00','2046031103:00:00','2046110407:59:59','2046110401:59:59' ],
[ [2046,11,4,8,0,0],[2046,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2047,3,10,8,59,59],[2047,3,10,1,59,59],
'2046110408:00:00','2046110401:00:00','2047031008:59:59','2047031001:59:59' ],
],
2047 =>
[
[ [2047,3,10,9,0,0],[2047,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2047,11,3,7,59,59],[2047,11,3,1,59,59],
'2047031009:00:00','2047031003:00:00','2047110307:59:59','2047110301:59:59' ],
[ [2047,11,3,8,0,0],[2047,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2048,3,8,8,59,59],[2048,3,8,1,59,59],
'2047110308:00:00','2047110301:00:00','2048030808:59:59','2048030801:59:59' ],
],
2048 =>
[
[ [2048,3,8,9,0,0],[2048,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2048,11,1,7,59,59],[2048,11,1,1,59,59],
'2048030809:00:00','2048030803:00:00','2048110107:59:59','2048110101:59:59' ],
[ [2048,11,1,8,0,0],[2048,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2049,3,14,8,59,59],[2049,3,14,1,59,59],
'2048110108:00:00','2048110101:00:00','2049031408:59:59','2049031401:59:59' ],
],
2049 =>
[
[ [2049,3,14,9,0,0],[2049,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2049,11,7,7,59,59],[2049,11,7,1,59,59],
'2049031409:00:00','2049031403:00:00','2049110707:59:59','2049110701:59:59' ],
[ [2049,11,7,8,0,0],[2049,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2050,3,13,8,59,59],[2050,3,13,1,59,59],
'2049110708:00:00','2049110701:00:00','2050031308:59:59','2050031301:59:59' ],
],
2050 =>
[
[ [2050,3,13,9,0,0],[2050,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2050,11,6,7,59,59],[2050,11,6,1,59,59],
'2050031309:00:00','2050031303:00:00','2050110607:59:59','2050110601:59:59' ],
[ [2050,11,6,8,0,0],[2050,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2051,3,12,8,59,59],[2051,3,12,1,59,59],
'2050110608:00:00','2050110601:00:00','2051031208:59:59','2051031201:59:59' ],
],
2051 =>
[
[ [2051,3,12,9,0,0],[2051,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2051,11,5,7,59,59],[2051,11,5,1,59,59],
'2051031209:00:00','2051031203:00:00','2051110507:59:59','2051110501:59:59' ],
[ [2051,11,5,8,0,0],[2051,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2052,3,10,8,59,59],[2052,3,10,1,59,59],
'2051110508:00:00','2051110501:00:00','2052031008:59:59','2052031001:59:59' ],
],
2052 =>
[
[ [2052,3,10,9,0,0],[2052,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2052,11,3,7,59,59],[2052,11,3,1,59,59],
'2052031009:00:00','2052031003:00:00','2052110307:59:59','2052110301:59:59' ],
[ [2052,11,3,8,0,0],[2052,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2053,3,9,8,59,59],[2053,3,9,1,59,59],
'2052110308:00:00','2052110301:00:00','2053030908:59:59','2053030901:59:59' ],
],
2053 =>
[
[ [2053,3,9,9,0,0],[2053,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2053,11,2,7,59,59],[2053,11,2,1,59,59],
'2053030909:00:00','2053030903:00:00','2053110207:59:59','2053110201:59:59' ],
[ [2053,11,2,8,0,0],[2053,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2054,3,8,8,59,59],[2054,3,8,1,59,59],
'2053110208:00:00','2053110201:00:00','2054030808:59:59','2054030801:59:59' ],
],
2054 =>
[
[ [2054,3,8,9,0,0],[2054,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2054,11,1,7,59,59],[2054,11,1,1,59,59],
'2054030809:00:00','2054030803:00:00','2054110107:59:59','2054110101:59:59' ],
[ [2054,11,1,8,0,0],[2054,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2055,3,14,8,59,59],[2055,3,14,1,59,59],
'2054110108:00:00','2054110101:00:00','2055031408:59:59','2055031401:59:59' ],
],
2055 =>
[
[ [2055,3,14,9,0,0],[2055,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2055,11,7,7,59,59],[2055,11,7,1,59,59],
'2055031409:00:00','2055031403:00:00','2055110707:59:59','2055110701:59:59' ],
[ [2055,11,7,8,0,0],[2055,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2056,3,12,8,59,59],[2056,3,12,1,59,59],
'2055110708:00:00','2055110701:00:00','2056031208:59:59','2056031201:59:59' ],
],
2056 =>
[
[ [2056,3,12,9,0,0],[2056,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2056,11,5,7,59,59],[2056,11,5,1,59,59],
'2056031209:00:00','2056031203:00:00','2056110507:59:59','2056110501:59:59' ],
[ [2056,11,5,8,0,0],[2056,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2057,3,11,8,59,59],[2057,3,11,1,59,59],
'2056110508:00:00','2056110501:00:00','2057031108:59:59','2057031101:59:59' ],
],
2057 =>
[
[ [2057,3,11,9,0,0],[2057,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2057,11,4,7,59,59],[2057,11,4,1,59,59],
'2057031109:00:00','2057031103:00:00','2057110407:59:59','2057110401:59:59' ],
[ [2057,11,4,8,0,0],[2057,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2058,3,10,8,59,59],[2058,3,10,1,59,59],
'2057110408:00:00','2057110401:00:00','2058031008:59:59','2058031001:59:59' ],
],
2058 =>
[
[ [2058,3,10,9,0,0],[2058,3,10,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2058,11,3,7,59,59],[2058,11,3,1,59,59],
'2058031009:00:00','2058031003:00:00','2058110307:59:59','2058110301:59:59' ],
[ [2058,11,3,8,0,0],[2058,11,3,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2059,3,9,8,59,59],[2059,3,9,1,59,59],
'2058110308:00:00','2058110301:00:00','2059030908:59:59','2059030901:59:59' ],
],
2059 =>
[
[ [2059,3,9,9,0,0],[2059,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2059,11,2,7,59,59],[2059,11,2,1,59,59],
'2059030909:00:00','2059030903:00:00','2059110207:59:59','2059110201:59:59' ],
[ [2059,11,2,8,0,0],[2059,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2060,3,14,8,59,59],[2060,3,14,1,59,59],
'2059110208:00:00','2059110201:00:00','2060031408:59:59','2060031401:59:59' ],
],
2060 =>
[
[ [2060,3,14,9,0,0],[2060,3,14,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2060,11,7,7,59,59],[2060,11,7,1,59,59],
'2060031409:00:00','2060031403:00:00','2060110707:59:59','2060110701:59:59' ],
[ [2060,11,7,8,0,0],[2060,11,7,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2061,3,13,8,59,59],[2061,3,13,1,59,59],
'2060110708:00:00','2060110701:00:00','2061031308:59:59','2061031301:59:59' ],
],
2061 =>
[
[ [2061,3,13,9,0,0],[2061,3,13,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2061,11,6,7,59,59],[2061,11,6,1,59,59],
'2061031309:00:00','2061031303:00:00','2061110607:59:59','2061110601:59:59' ],
[ [2061,11,6,8,0,0],[2061,11,6,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2062,3,12,8,59,59],[2062,3,12,1,59,59],
'2061110608:00:00','2061110601:00:00','2062031208:59:59','2062031201:59:59' ],
],
2062 =>
[
[ [2062,3,12,9,0,0],[2062,3,12,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2062,11,5,7,59,59],[2062,11,5,1,59,59],
'2062031209:00:00','2062031203:00:00','2062110507:59:59','2062110501:59:59' ],
[ [2062,11,5,8,0,0],[2062,11,5,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2063,3,11,8,59,59],[2063,3,11,1,59,59],
'2062110508:00:00','2062110501:00:00','2063031108:59:59','2063031101:59:59' ],
],
2063 =>
[
[ [2063,3,11,9,0,0],[2063,3,11,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2063,11,4,7,59,59],[2063,11,4,1,59,59],
'2063031109:00:00','2063031103:00:00','2063110407:59:59','2063110401:59:59' ],
[ [2063,11,4,8,0,0],[2063,11,4,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2064,3,9,8,59,59],[2064,3,9,1,59,59],
'2063110408:00:00','2063110401:00:00','2064030908:59:59','2064030901:59:59' ],
],
2064 =>
[
[ [2064,3,9,9,0,0],[2064,3,9,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2064,11,2,7,59,59],[2064,11,2,1,59,59],
'2064030909:00:00','2064030903:00:00','2064110207:59:59','2064110201:59:59' ],
[ [2064,11,2,8,0,0],[2064,11,2,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2065,3,8,8,59,59],[2065,3,8,1,59,59],
'2064110208:00:00','2064110201:00:00','2065030808:59:59','2065030801:59:59' ],
],
2065 =>
[
[ [2065,3,8,9,0,0],[2065,3,8,3,0,0],'-06:00:00',[-6,0,0],
'MDT',1,[2065,11,1,7,59,59],[2065,11,1,1,59,59],
'2065030809:00:00','2065030803:00:00','2065110107:59:59','2065110101:59:59' ],
[ [2065,11,1,8,0,0],[2065,11,1,1,0,0],'-07:00:00',[-7,0,0],
'MST',0,[2066,3,14,8,59,59],[2066,3,14,1,59,59],
'2065110108:00:00','2065110101:00:00','2066031408:59:59','2066031401:59:59' ],
],
);
%LastRule = (
'zone' => {
'dstoff' => '-06:00:00',
'stdoff' => '-07:00:00',
},
'rules' => {
'03' => {
'flag' => 'ge',
'dow' => '7',
'num' => '8',
'type' => 'w',
'time' => '02:00:00',
'isdst' => '1',
'abb' => 'MDT',
},
'11' => {
'flag' => 'ge',
'dow' => '7',
'num' => '1',
'type' => 'w',
'time' => '02:00:00',
'isdst' => '0',
'abb' => 'MST',
},
},
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/TZ/amedmo00.pm | Perl | mit | 50,044 |
#!/usr/bin/perl
use warnings;
use strict;
use Pod::Usage;
use Getopt::Long;
#steps
#read in gene to uniprot mapping file
#read in transcript to uniprot mapping file
#read in Uniprot ID history file
#read in MAF file to retrieve default Uniprot ID
#read in isoform1 sequence file
#read in annovar output file and process records
#usage
# $scriptFolder/perl/uniprot_lookup.pl $annovarExonicOutput $gene2uniprot $knownGene $uniprot_sec $maf_uniprot $isoprot_seq> $annovarExonicOutput".isoform_filter"
@ARGV or pod2usage (-verbose=>0, -exitval=>1, -output=>\*STDOUT);
@ARGV == 6 or pod2usage ("Syntax error - you are missing an argument");
my ($annovar_output_file, $gene2uniprot, $knowngene2protein, $uniprot_sec, $maf_uniprot, $isoprot_seq) = @ARGV;
open (ANNOVAR_OUTPUT, $annovar_output_file) or die "Error: cannot read from ANNOVAR output file $annovar_output_file: $!\n";
open (HGNC2UNIPROT, $gene2uniprot) or die "Error: cannot read from file $gene2uniprot: $!\n";
open (GENE2PROTEIN, $knowngene2protein) or die "Error: cannot read from file $knowngene2protein: $!\n";
open (UNIPROT_SEC, $uniprot_sec) or die "Error: cannot read from Uniprot Secondary file $uniprot_sec: $!\n";
open (MAF_UNIPROT, $maf_uniprot) or die "Error: cannot read from MAF.NCM.UNIPROT file $maf_uniprot: $!\n";
open (ISOPROT, $isoprot_seq) or die "Error: cannot read from ISOFORM.sprot file $isoprot_seq: $!\n";
#read mapping between genes and uniprot IDs
my %hgnc2uniprot;
my %alias_hgnc2uniprot;
my %old_hgnc2uniprot;
#hashes for correspondence between gene and alias; between gene and old; between alias and old
my %gene_alias;
my %gene_old;
my %gene_alias_old;
while(my $line = <HGNC2UNIPROT>) {
chomp($line);
my ($gene_id, $gene_symbol, $alias, $old, $uniprot) = split (/\t/, $line);
chomp($gene_symbol);
#first get all primary gene to uniprot ID mapping
#if gene symbol is withdrawn - it is skipped
if($gene_symbol && $uniprot) {
$hgnc2uniprot{$gene_symbol}=$uniprot;
}
#add more gene mapping - first aliases and then old genes
#add aliases to mapping
if($gene_symbol && $alias && $uniprot) {
$gene_alias{$gene_symbol}=$alias;
if($alias =~ /;/) {
my @aliases=split(";", $alias);
foreach my $one_alias (@aliases) {
if (!$hgnc2uniprot{$one_alias}) {
$alias_hgnc2uniprot{$one_alias}=$uniprot;
}else {
next;
}
}
}else {
if (!$hgnc2uniprot{$alias}) {
$alias_hgnc2uniprot{$alias}=$uniprot;
}
}
}
#add old genes to mapping
if($gene_symbol && $old && $uniprot) {
$gene_old{$gene_symbol}=$old;
if($old =~ /;/) {
my @old_symbols=split(";", $old);
foreach my $one_old_symbol (@old_symbols) {
if (!$hgnc2uniprot{$one_old_symbol}) {
$old_hgnc2uniprot{$one_old_symbol}=$uniprot;
}else {
next;
}
}
}else {
if (!$hgnc2uniprot{$old}){
$old_hgnc2uniprot{$old}=$uniprot;
}
}
}
if($alias && $old) {
$gene_old{$gene_symbol}=$old;
my @alias_symbols;
if($alias =~ /;/) {
@alias_symbols=split(";", $alias);
}else {
@alias_symbols=$alias;
}
foreach my $one_alias(@alias_symbols) {
$gene_alias_old{$one_alias}=$old;
}
}
}
close HGNC2UNIPROT;
my $total1 = scalar (keys %hgnc2uniprot);
#print "You have $total1 primary gene to uniprot mappings\n";
my $total2 = scalar (keys %alias_hgnc2uniprot);
#print "You have $total2 secondary gene to uniprot mappings\n";
#read mapping between transcripts and uniprot IDs
#this includes hg18 and hg19 so you can have 1 transcript going to 2 different Uniprot IDs
my %gene2uniprot;
while(my $line = <GENE2PROTEIN>) {
chomp($line);
my ($transcript, $uniprot) = split (/\t/, $line);
#skip uniprot with dash which means this is not default isoform
if($uniprot =~ /-/) {
next;
}
if ($gene2uniprot{$transcript}) {
$gene2uniprot{$transcript}=$gene2uniprot{$transcript}.":".$uniprot;
}else {
$gene2uniprot{$transcript}=$uniprot;
}
}
my $total3 = scalar (keys %gene2uniprot);
#print "You have $total3 transcript to uniprot mappings\n";
close GENE2PROTEIN;
#read mapping between uniprot primary and uniprot secondary accessions
my %uniprotsecacc;
#storing primary to secondary uniprot mapping
while(my $line = <UNIPROT_SEC>) {
chomp($line);
my ($secondary, $primary) = split (/\s+/, $line);
$primary =~s/^\s+//;
$primary=~s/\s+$//;
$secondary =~s/^\s+//;
$secondary=~s/\s+$//;
push(@{$uniprotsecacc{$primary}},$secondary);
}
my $total4 = scalar (keys %uniprotsecacc);
#print "You have $total4 secondary to primary uniprot mappings\n";
close UNIPROT_SEC;
#read in gene symbols form MAF file
#read in uniprot ID from MAF_UNIPROT file
my %maflist_gene;
my %uniprot;
my $count=0;
while (<MAF_UNIPROT>) {
s/[\r\n]+$//;
#skip first header line
if($count==0) {
$count++;
next;
}
chomp($_);
my @field = split (/\t/, $_);
my $key="line"."$count";
#gene_symbol is first field
my $gene_symbol = $field[0];
chomp ($gene_symbol);
$maflist_gene{$key}=$gene_symbol;
#print "This is your symbol: $gene_symbol\n";
#grab the UNIPROT ID - last field in line
#can have more than 1 uniprot ID and can have dashes
#one uniprot ID
my $one_field=pop(@field);
chomp($one_field);
#print "This is field: $one_field\n";
if($one_field =~ m/^(\D\d[\d\D][\d\D][\d\D]\d)$/) {
my $uniprotid = $1;
chomp($uniprotid);
$uniprot{$key}=$uniprotid;
}
#has a dash
if($one_field =~ m/^\-\;\D\d[\d\D][\d\D][\d\D]\d$/) {
my @all=split(";", $one_field);
my $uniprotid = $all[1];
chomp($uniprotid);
$uniprot{$key}=$uniprotid;
}
#more than one Uniprot ID
if($one_field =~ m/^\D\d[\d\D][\d\D][\d\D]\d\;/) {
my $uniprotid = $one_field;
chomp($uniprotid);
$uniprot{$key}=$uniprotid;
}
$count++;
}
my $total5 = scalar (keys %uniprot);
#print "You have $total5 Uniprot IDs from MAF file\n";
close MAF_UNIPROT;
#read in isoform1 file
my %isoform_results;
my $uniprot_id;
my $sequence;
while (my $line = <ISOPROT>) {
chomp($line);
#get uniprot ID
if ($line =~ /^\>sp\|(\D\d[\d\D][\d\D][\d\D]\d)/) {
$uniprot_id=$1;
$sequence="";
}else {
$sequence.=$line;
$isoform_results{$uniprot_id}=$sequence;
}
}
close ISOPROT;
my $total6 = scalar (keys %isoform_results);
#print "You have $total6 isoform 1 sequences\n";
my $bad_line=0;
#read ANNOVAR OUTPUT file
while (<ANNOVAR_OUTPUT>) {
s/[\r\n]+$//;
m/^line\d+/ or die "Error: invalid record found in exonic_variant_function file $annovar_output_file (line number expected): <$_>\n";
my @field = split (/\t/, $_);
if($field[1] =~ m/^unknown/) {
$field[2].=",NO UNIPROT MATCH";
print_fields(@field);
$bad_line++;
next;
}
if($field[2] =~ m/^\w+:(\w+):wholegene/) {
$field[2].=",NO UNIPROT MATCH";
print_fields(@field);
$bad_line++;
next;
}
#here is an example of what has to be parsed
#DIP2C:uc001ifp.1:exon19:c.2209_2210insTG:p.A737fs,DIP2C:uc009xhi.1:exon2:c.367_368insTG:p.A123fs,
$field[2] =~ m/^(\S+)/ or die "Error: invalid record found in exonic_variant_function file (exonic format error): <$_>\n";
my $required_info=$1;
my @all_transcripts=split(",", $required_info);
#line number is $field[0] in ANNOVAR output file
my $maf_line_number=$field[0];
#first check if there is a UNIPROT ID present for this line
my $default_isoform;
if ($uniprot{$maf_line_number}) {
$default_isoform=$uniprot{$maf_line_number};
#print "This is your default isoform:$maf_line_number\t$default_isoform\n";
}
#No Default Isoform
if (!$default_isoform) {
$field[2].="NO UNIPROT ID";
print_fields(@field);
$bad_line++;
next; #no uniprot ID for this gene in the MAF file so go to the next line in file
}
#second check if gene symbol corresponds to a Uniprot ID
#gene symbol comes from original MAF
my $no_uniprot_id_warning=1;
my $gene_symbol=$maflist_gene{$maf_line_number};
my @primary_keep_this_uniprot;
my @alias_keep_this_uniprot;
my @old_keep_this_uniprot;
my @keep_this_uniprot;
my @temp;
#add primary information
if($hgnc2uniprot{$gene_symbol}) {
$no_uniprot_id_warning=0;
my %new_uniprot=process_uniprot_id_field($hgnc2uniprot{$gene_symbol});
if($new_uniprot{"primary"}) {
push(@primary_keep_this_uniprot, @{$new_uniprot{"primary"}});
}
if($new_uniprot{"old"}) {
push(@temp, @{$new_uniprot{"old"}});
}
#add alias information
}elsif($alias_hgnc2uniprot{$gene_symbol}) {
$no_uniprot_id_warning=0;
my %new_uniprot=process_uniprot_id_field($alias_hgnc2uniprot{$gene_symbol});
if($new_uniprot{"primary"}) {
push(@alias_keep_this_uniprot, @{$new_uniprot{"primary"}});
}
if($new_uniprot{"old"}) {
push(@temp, @{$new_uniprot{"old"}});
}
#add old gene information
}elsif($old_hgnc2uniprot{$gene_symbol}) {
$no_uniprot_id_warning=0;
my %new_uniprot=process_uniprot_id_field($old_hgnc2uniprot{$gene_symbol});
if($new_uniprot{"primary"}) {
push(@old_keep_this_uniprot, @{$new_uniprot{"primary"}});
}
if($new_uniprot{"old"}) {
push(@temp, @{$new_uniprot{"old"}});
}
}
push(@keep_this_uniprot, @primary_keep_this_uniprot);
push(@keep_this_uniprot, @alias_keep_this_uniprot);
push(@keep_this_uniprot, @old_keep_this_uniprot);
push(@keep_this_uniprot, @temp);
if ($no_uniprot_id_warning){
$field[2].="NO UNIPROT MATCH1";
print_fields(@field);
$bad_line++;
next; #no uniprot ID for this gene in all the mapping sources so go to the next line in file
}
#third: map transcript to uniprot IDs
my %primary_keep_this_transcript;
my %alias_keep_this_transcript;
my %old_keep_this_transcript;
my %keep_this_transcript;
my $no_uniprot_transcript_warning=1;
foreach my $one_transcript(@all_transcripts) {
my @transcript_info=split(":", $one_transcript);
my $transcript_gene_symbol=$transcript_info[0];
my $transcript=$transcript_info[1];
#check MAF gene symbol
#if no match - skip to next transcript
my $keep=check_MAF_gene($gene_symbol, $transcript_gene_symbol);
if ($keep == 0) {
next;
}
if($gene2uniprot{$transcript}) {
my @uniprot=split(":", $gene2uniprot{$transcript});
#first: check primary hgnc2uniprot
if($hgnc2uniprot{$gene_symbol}) {
foreach my $one_uniprot_id (@uniprot) {
if ($hgnc2uniprot{$gene_symbol} =~/($one_uniprot_id)/) {
$primary_keep_this_transcript{$transcript}++;
$no_uniprot_transcript_warning=0;
}
}
}
#second: check alias hgnc2uniprot
if($alias_hgnc2uniprot{$gene_symbol}) {
foreach my $one_uniprot_id (@uniprot) {
if ($alias_hgnc2uniprot{$gene_symbol} =~ /($one_uniprot_id)/) {
$alias_keep_this_transcript{$transcript}++;
$no_uniprot_transcript_warning=0;
}
}
}
#third: check old hgnc2uniprot
if($old_hgnc2uniprot{$gene_symbol}) {
foreach my $one_uniprot_id (@uniprot) {
if ($old_hgnc2uniprot{$gene_symbol} =~ /($one_uniprot_id)/) {
$old_keep_this_transcript{$transcript}++;
$no_uniprot_transcript_warning=0;
}
}
}
#fourth: check retired uniprot IDs by checking the complete list
foreach my $one_uniprot_id (@uniprot) {
if (grep {$_ eq $one_uniprot_id} @keep_this_uniprot) {
$keep_this_transcript{$transcript}++;
$no_uniprot_transcript_warning=0;
}
}
}
}
if ($no_uniprot_transcript_warning){
$field[2].="NO UNIPROT MATCH2";
print_fields(@field);
$bad_line++;
next; #no mapping between transcript and defined uniprot for this gene so go to the next line in file
}
#perform isoform filtering here
if ($default_isoform){
if (%primary_keep_this_transcript) {
my %primary_result=isoform_filter(\@all_transcripts, \%primary_keep_this_transcript, $default_isoform);
if($primary_result{"match"}==1 && $primary_result{"isoform"}) {
#at this point you should have 1 transcript to keep
foreach my $one_transcript(@all_transcripts) {
my @transcript_info=split(":", $one_transcript);
my $transcript=$transcript_info[1];
if ($primary_result{"isoform"} && $transcript =~ m/($primary_result{"isoform"})/) {
$field[2]=$one_transcript;
}
}
print_fields(@field);
next;
}
}
if (%alias_keep_this_transcript) {
my %alias_result=isoform_filter(\@all_transcripts, \%alias_keep_this_transcript, $default_isoform);
if($alias_result{"match"}==1 && $alias_result{"isoform"}) {
#at this point you should have 1 transcript to keep
foreach my $one_transcript(@all_transcripts) {
my @transcript_info=split(":", $one_transcript);
my $transcript=$transcript_info[1];
if ($alias_result{"isoform"} && $transcript =~ m/($alias_result{"isoform"})/) {
$field[2]=$one_transcript;
}
}
print_fields(@field);
next;
}
}
if (%old_keep_this_transcript) {
my %old_result=isoform_filter(\@all_transcripts, \%old_keep_this_transcript, $default_isoform);
if($old_result{"match"}==1 && $old_result{"isoform"}) {
#at this point you should have 1 transcript to keep
foreach my $one_transcript(@all_transcripts) {
my @transcript_info=split(":", $one_transcript);
my $transcript=$transcript_info[1];
if ($old_result{"isoform"} && $transcript =~ m/($old_result{"isoform"})/) {
$field[2]=$one_transcript;
}
}
print_fields(@field);
next;
}
}
if (%keep_this_transcript) {
#first match will win here
my %result=isoform_filter(\@all_transcripts, \%keep_this_transcript, $default_isoform);
if($result{"match"}==1 && $result{"isoform"}) {
#at this point you should have 1 transcript to keep
foreach my $one_transcript(@all_transcripts) {
my @transcript_info=split(":", $one_transcript);
my $transcript=$transcript_info[1];
if ($result{"isoform"} && $transcript =~ m/($result{"isoform"})/) {
$field[2]=$one_transcript;
}
}
print_fields(@field);
next;
#if you have reached this point, that means there was no isoform match or there was a deletion
}else {
my @filtered_transcript_list=generate_filtered_transcript_list(\%keep_this_transcript, \@all_transcripts);
#NEW ADDITION:7/2/14
#need to flag true no isoform matches and not deletions
if($result{"match"}==-1 && $result{"isoform"}) {
print_fields(@field);
}else {
push(@filtered_transcript_list, "NO ISOFORM MATCH");
$field[2]=join(",", @filtered_transcript_list);
print_fields(@field);
$bad_line++;
}
}
}
}
}
close ANNOVAR_OUTPUT;
#print"There are $bad_line lines which were not processed.\n";
#print "COMPLETE\n";
#print a line to file
sub print_fields {
my @field=@_;
foreach my $field (@field) {
print "$field\t";
}
print"\n";
}
#perform isoform check and select transcript which matches isoform protein sequence
sub isoform_filter {
my($all_transcripts,$keep_this_transcript, $default_isoform)=@_;
my @all_transcripts=@$all_transcripts;
my %keep_this_transcript=%$keep_this_transcript;
#there are special cases where there is more than 1 default isoform;
my @default_isoform;
if ($default_isoform =~ /;/) {
@default_isoform=split(";", $default_isoform);
}else {
@default_isoform=$default_isoform;
}
#print "All transcripts:@all_transcripts\n";
my $keep_list=join(",", (keys %keep_this_transcript));
#print "Keep transcript: $keep_list\n";
#print "Default isoform: $default_isoform\n";
#first set isoform match to all good transcripts; overwrite this if filter works
my $isoform_match=join(",", (keys %keep_this_transcript));
my %overall_result;
foreach my $one_transcript(@all_transcripts) {
#parse the transcript information
my @fields=split(":", $one_transcript);
my $transcript=$fields[1];
#double-check against keep_this_transcript
#if it doesn't match, don't test it
my $keep_flag=0;
foreach my $keep (keys %keep_this_transcript) {
if ($transcript eq $keep) {
$keep_flag=1;
}
}
if($keep_flag == 0) {
next;
}
my $annovar_protein;
#get the protein amino acid information
#need to parse this: p.A737fs
foreach my $one_annovar_field (@fields) {
if($one_annovar_field =~ m/^p\.(\S+)$/) {
$annovar_protein=$1;
last;
}else {
$annovar_protein="none";
}
}
#print "$annovar_protein\n";
#get the amino acid
$annovar_protein =~ /^(\D)/;
my $annovar_aa=$1;
#get the amino acid position
my $annovar_position;
if($annovar_protein =~/^\D(\d+)(\D)+$/) {
$annovar_position=$1;
#NEW ADDITION:7/2/14
#check delins but set del as -1
}elsif($annovar_protein=~/^\D(\d+)_/) {
$annovar_position=$1;
#print "DASH:$annovar_protein:amino acid position is $annovar_position\n";
}elsif($annovar_protein=~/^\d+_\d+del/) {
$overall_result{"match"}=-1;
#print "$annovar_protein: has a deletion\n";
$overall_result{"isoform"}=$isoform_match;
return %overall_result;
}else {
#print "$annovar_protein:DID NOT PROCESS\n";
}
#then check wildtype amino acid against amino acid in default isoform
foreach my $isoform (@default_isoform) {
my $aa_sequence = $isoform_results{$isoform};
if($annovar_aa && $annovar_position && $aa_sequence) {
my $result=check_isoform_aa($annovar_aa, $annovar_position, $aa_sequence);
#print "Your check isoform result: $result\n";
if ($result==1) {
$isoform_match=$transcript;
#there is no need to check again
$overall_result{"match"}=1;
$overall_result{"isoform"}=$isoform_match;
return %overall_result;
}
}
}
}
#print "After isoform filter: $isoform_match\n";
$overall_result{"match"}=0;
$overall_result{"isoform"}=$isoform_match;
return %overall_result;
}
#compare isoform aa sequence and ANNOVAR aa sequence
sub check_isoform_aa {
my ($aa, $position, $isoform_sequence)=@_;
my $result;
my $mod_position=$position-1;
#print "AA:$aa\tposition:$position\n";
#print "$isoform_sequence\n";
my $length=length($isoform_sequence);
#print "Length of sequence:$length\n";
#print "Modified position: $mod_position\n";
if (length($isoform_sequence) < $mod_position) {
$result=-1;
}else {
my $wildtype_aa=substr($isoform_sequence, $mod_position, 1);
#print "This is the wildtype aa: $wildtype_aa\n";
if($wildtype_aa && ($aa eq $wildtype_aa)){
$result=1;
}else {
$result=0;
}
}
return $result;
}
#filter transcript list
sub generate_filtered_transcript_list {
my @final_list;
my ($keep_this_transcript, $all_transcripts)=@_;
my %keep_this_transcript=%$keep_this_transcript;
my @all_transcripts=@$all_transcripts;
foreach my $filtered (keys %keep_this_transcript) {
foreach my $one_transcript(@all_transcripts) {
my @transcript_info=split(":", $one_transcript);
my $transcript=$transcript_info[1];
if ($transcript =~ m/($filtered)/) {
push(@final_list,$one_transcript);
}
}
}
return @final_list;
}
#process Uniprot IDs assigned to a given gene symbol; add in old Uniprot IDs
sub process_uniprot_id_field {
my ($uniprot_field) = @_;
my @primary_ids_keep_uniprot;
my @old_ids_keep_uniprot;
my %results;
#print "This is the starting uniprot field: $uniprot_field\n";
#check for dashes and multiple uniprot IDs
if (!($uniprot_field =~ /\;/)) {
push(@primary_ids_keep_uniprot, $uniprot_field);
#print "@keep_uniprot\n";
#if this is a newer Uniprot ID, then store old Uniprot IDs also
my @old_uniprots=add_old_uniprot($uniprot_field);
if (@old_uniprots) {
push(@old_ids_keep_uniprot, @old_uniprots);
}
} else {
#dashes in Uniprot field
if($uniprot_field =~ m/^\-\;\D\d[\d\D][\d\D][\d\D]\d$/) {
my @all=split(";", $uniprot_field);
my $uniprotid = $all[1];
chomp($uniprotid);
push(@primary_ids_keep_uniprot, $uniprotid);
my @old_uniprots=add_old_uniprot($uniprotid);
if (@old_uniprots) {
push(@old_ids_keep_uniprot, @old_uniprots);
}
}
#more than one Uniprot ID
if($uniprot_field =~ m/^\D\d[\d\D][\d\D][\d\D]\d\;/) {
my @uniprotid = split(";", $uniprot_field);
foreach my $one_id (@uniprotid) {
chomp($one_id);
push(@primary_ids_keep_uniprot, $one_id);
my @old_uniprots=add_old_uniprot($one_id);
if (@old_uniprots) {
push(@old_ids_keep_uniprot, @old_uniprots);
}
}
}
}
#print "These are your uniprot IDs:@primary_ids_keep_uniprot followed by @old_ids_keep_uniprot\n";
$results{"primary"}=\@primary_ids_keep_uniprot;
$results{"old"}=\@old_ids_keep_uniprot;
return %results;
}
#adds old Uniprot IDs for a current Uniprot ID
sub add_old_uniprot {
my ($uniprot_id) = @_;
my @old_uniprot=[];
if($uniprotsecacc{$uniprot_id}) {
@old_uniprot =@{$uniprotsecacc{$uniprot_id}};
}
#print "Old uniprots added: @old_uniprot\n";
return @old_uniprot;
}
#checks MAF gene symbol to see if it is valid
#if gene_symbol is a LOC, we'll assume it is related to MAF gene symbol. May need to change this assumption?
#need to check MAF gene symbol against ANNOVAR gene symbol;
#need to check MAF gene symbol alias against ANNOVAR gene symbol;
#need to check MAF gene symbol old against ANNOVAR gene symbol;
#need to check MAF gene symbol alias to old mapping against ANNOVAR gene symbol;
sub check_MAF_gene {
my ($gene_symbol, $transcript_gene_symbol)=@_;
my $keep=0;
if ($transcript_gene_symbol =~/^LOC/) {
$keep=1;
}
if($gene_symbol eq $transcript_gene_symbol) {
$keep=1;
}else {
if ($gene_alias{$gene_symbol}){
if ($gene_alias{$gene_symbol} =~ /($transcript_gene_symbol)/) {
$keep=1;
}
}
if ($gene_old{$gene_symbol}){
if ($gene_old{$gene_symbol} =~ /($transcript_gene_symbol)/) {
$keep=1;
}
}
#strange case 1
if ($gene_alias_old{$gene_symbol}) {
if ($gene_alias_old{$gene_symbol} =~ /($transcript_gene_symbol)/) {
$keep=1;
}
}
#strange case 2
if ($gene_alias_old{$transcript_gene_symbol}) {
if ($gene_alias_old{$transcript_gene_symbol} =~ /($gene_symbol)/) {
$keep=1;
}
}
#strange case 3
if ($gene_alias{$transcript_gene_symbol}) {
if ($gene_alias{$transcript_gene_symbol} =~ /($gene_symbol)/) {
$keep=1;
}
}
}
return $keep;
}
=head1 SYNOPSIS
uniprot_lookup.pl [arguments]
Optional arguments:
-h, --help print help message
-m, --man print complete documentation
-v, --verbose use verbose output
Function: a script to annotate a MAF with protein information
Example: uniprot_lookup.pl $annovarExonicOutput $gene2uniprot $knownGene $uniprot_sec $maf_uniprot $isoprot_seq > $annovarExonicOutput".isoform_filter
Version:
=head1 OPTIONS
=over 8
=item B<--help>
print a brief usage message and detailed explanation of options.
=item B<--man>
print the complete manual of the program.
=item B<--verbose>
use verbose output.
=back
=head1 DESCRIPTION
=cut
| cancerregulome/gidget | commands/maf_processing/perl/uniprot_lookup.pl | Perl | mit | 23,370 |
#!/usr/bin/perl -w
use strict;
use Bio::SearchIO;
my $sUsage = qq(
perl $0 <blast output>
);
my $blast_file = shift or die $sUsage;
my %allele_count;
my $searchio = Bio::SearchIO->new(-format => 'blast', file => "$blast_file" );
while (my $result = $searchio->next_result())
{
last unless defined $result;
my $query_name = $result->query_name;
while(my $hit = $result->next_hit())
{
next unless defined $hit;
while(my $hsp = $hit->next_hsp())
{
next unless $hsp->length('query') > 50;
my ($start, $end) = $hsp->range('query');
print STDERR "!!$start\t$end\n" if $start > $end;
my $seq = $hsp->hit_string;
my $allele = substr($seq, (51-$start), 1);
#print $query_name, "\t", $allele, "\n";
$allele_count{$query_name}{$allele}++;
}
}
}
foreach my $id (keys %allele_count)
{
my @arr = sort {$allele_count{$id}{$b} <=> $allele_count{$id}{$a}} keys %{$allele_count{$id}};
print join ("\t", ($id, @arr, @{$allele_count{$id}}{@arr})), "\n";
} | swang8/Perl_scripts_misc | get_allele_from_blast.pl | Perl | mit | 976 |
package YourCompany::Config;
=encoding utf-8
=head1 NAME
YourCompany::Config
=head1 DESCRIPTION
YourCompany Project configuration based on C<YAML> format.
It reads I<config/defaults.yml> and overrides settings with I<config/MODE.yml> and I<config/local.yml>
where C<MODE> is equal to I<$ENV{MOJO_MODE}>, I<$ENV{PLACK_ENV}> or I<"development">.
All root sections available through C<YourCompany::Config-E<gt>$section> property accessor.
Root available as C<YourCompany::Config-E<gt>all>.
=head1 SYNOPSYS
use YourCompany::Config;
say YourCompany::Config->database->{name};
=cut
use YourCompany::Perl::UTF8;
use Cwd qw( abs_path );
use File::Basename qw( dirname );
use Hash::Merge ();
use YAML::Syck qw( LoadFile );
sub _loader( $, $mode ) {
local $YAML::Syck::ImplicitTyping = 1;
local $YAML::Syck::ImplicitUnicode = 1;
my $BASE_DIR = abs_path( dirname( __FILE__ ). "/../.." );
my @files_to_merge = ( 'defaults' );
push @files_to_merge, $mode if $mode;
push @files_to_merge, 'local';
my $config = {
app => {
mode => $mode,
},
};
my $merger = Hash::Merge->new( 'RIGHT_PRECEDENT' );
for (@files_to_merge) {
my $file_name = "$BASE_DIR/config/$_.yml";
next unless -r $file_name;
my $to_merge = LoadFile( $file_name );
$config = $merger->merge( $config, $to_merge );
}
# per process setup
return $config;
}
sub _setup( $class ) {
my $mode = $ENV{MOJO_MODE} || $ENV{PLACK_ENV} || 'development';
my $config = $class->_loader($mode);
no strict 'refs'; ## no critic (TestingAndDebugging::ProhibitNoStrict)
for my $key ( keys %$config ) {
*{"$class\::$key"} = sub {
$config->{$key};
};
}
*{"$class\::all"} = sub {
return $config;
};
use strict 'refs';
return 1;
}
sub import( $class, @ ) {
state $did_setup = $class->_setup;
}
1;
| akzhan/perl-YourCompany-Project | lib/YourCompany/Config.pm | Perl | mit | 1,947 |
package Test::use::ok;
use 5.005;
our $VERSION = '1.302162';
__END__
=head1 NAME
Test::use::ok - Alternative to Test::More::use_ok
=head1 SYNOPSIS
use ok 'Some::Module';
=head1 DESCRIPTION
According to the B<Test::More> documentation, it is recommended to run
C<use_ok()> inside a C<BEGIN> block, so functions are exported at
compile-time and prototypes are properly honored.
That is, instead of writing this:
use_ok( 'Some::Module' );
use_ok( 'Other::Module' );
One should write this:
BEGIN { use_ok( 'Some::Module' ); }
BEGIN { use_ok( 'Other::Module' ); }
However, people often either forget to add C<BEGIN>, or mistakenly group
C<use_ok> with other tests in a single C<BEGIN> block, which can create subtle
differences in execution order.
With this module, simply change all C<use_ok> in test scripts to C<use ok>,
and they will be executed at C<BEGIN> time. The explicit space after C<use>
makes it clear that this is a single compile-time action.
=head1 SEE ALSO
L<Test::More>
=head1 MAINTAINER
=over 4
=item Chad Granum E<lt>exodist@cpan.orgE<gt>
=back
=encoding utf8
=head1 CC0 1.0 Universal
To the extent possible under law, 唐鳳 has waived all copyright and related
or neighboring rights to L<Test-use-ok>.
This work is published from Taiwan.
L<http://creativecommons.org/publicdomain/zero/1.0>
=cut
| operepo/ope | client_tools/svc/rc/usr/share/perl5/core_perl/Test/use/ok.pm | Perl | mit | 1,360 |
package Google::Ads::AdWords::v201809::CampaignExtensionSettingService::ResponseHeader;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' }
__PACKAGE__->__set_name('ResponseHeader');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::AdWords::v201809::SoapResponseHeader
);
}
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::CampaignExtensionSettingService::ResponseHeader
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
ResponseHeader from the namespace https://adwords.google.com/api/adwords/cm/v201809.
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201809::CampaignExtensionSettingService::ResponseHeader->new($data);
Constructor. The following data structure may be passed to new():
$a_reference_to, # see Google::Ads::AdWords::v201809::SoapResponseHeader
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/CampaignExtensionSettingService/ResponseHeader.pm | Perl | apache-2.0 | 1,089 |
# hash: login, uid, pass, dir,
sub add_user {
my(%args) = %{$_[0]};
my $command = "adduser -m ";
if (defined $args{'uid'}) {
$command .= "-u ".$args{'uid'};
}
if (defined $args{'dir'}) {
$command .= "-k ".$args{'dir'};
}
if (defined $args{'pass'}) {
$command .= "-p ".crypt($args{'pass'}, "AA");
}
$command .= " ".$args{'login'};
print $command;
if ($? == 1) {
print STDERR "Can'd add user, maybe gid is taken?"
}
}
# args login
sub del_user {
my $login = shift;
system("deluser ".$login);
}
# print find_free_uid();
# change_gid('couchdb', 118);
#print generate_password(20);
#print home_dir("ymir");
my %user = (login => 'testuser', pass => 'password');
&add_user(\%user); | mishin/presentation | 4_trans/user.perl | Perl | apache-2.0 | 717 |
package Paws::EFS::DescribeMountTargets;
use Moose;
has FileSystemId => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'FileSystemId');
has Marker => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'Marker');
has MaxItems => (is => 'ro', isa => 'Int', traits => ['ParamInQuery'], query_name => 'MaxItems');
has MountTargetId => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'MountTargetId');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DescribeMountTargets');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/2015-02-01/mount-targets');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::EFS::DescribeMountTargetsResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::EFS::DescribeMountTargets - Arguments for method DescribeMountTargets on Paws::EFS
=head1 DESCRIPTION
This class represents the parameters used for calling the method DescribeMountTargets on the
Amazon Elastic File System service. Use the attributes of this class
as arguments to method DescribeMountTargets.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DescribeMountTargets.
As an example:
$service_obj->DescribeMountTargets(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 FileSystemId => Str
(Optional) ID of the file system whose mount targets you want to list
(String). It must be included in your request if C<MountTargetId> is
not included.
=head2 Marker => Str
(Optional) Opaque pagination token returned from a previous
C<DescribeMountTargets> operation (String). If present, it specifies to
continue the list from where the previous returning call left off.
=head2 MaxItems => Int
(Optional) Maximum number of mount targets to return in the response.
It must be an integer with a value greater than zero.
=head2 MountTargetId => Str
(Optional) ID of the mount target that you want to have described
(String). It must be included in your request if C<FileSystemId> is not
included.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DescribeMountTargets in L<Paws::EFS>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/EFS/DescribeMountTargets.pm | Perl | apache-2.0 | 2,826 |
=head1 LICENSE
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 Bio::EnsEMBL::EGPipeline::Common::FastaSplit
=cut
package Bio::EnsEMBL::EGPipeline::Common::RunnableDB::FastaSplit;
use strict;
use warnings;
use base ('Bio::EnsEMBL::EGPipeline::Common::RunnableDB::Base');
use Bio::SeqIO;
use File::Basename qw(dirname fileparse);
use File::Path qw(make_path remove_tree);
use POSIX qw(ceil);
sub param_defaults {
my ($self) = @_;
return {
'out_dir' => undef, # Top-level directory for output
'max_seqs_per_file' => 100, # Maximum number of records in a file
'max_seq_length_per_file' => undef, # Maximum sequence length in a file
'max_files_per_directory' => 100, # Maximum number of files in a directory
'max_dirs_per_directory' => 100, # Maximum number of subdirectories
'delete_existing_files' => 1, # Ensure that directories only contains files generated with the latest execution
'unique_file_names' => 0, # Ensure that output file names are unique across all directories?
'delete_original_file' => 0, # After splitting, delete original fasta file?
};
}
sub fetch_input {
my ($self) = @_;
my $fasta_file = $self->param_required('fasta_file');
my $out_dir = $self->param('out_dir');
if (!-e $fasta_file) {
$self->throw("Fasta file '$fasta_file' does not exist");
}
if (!defined $out_dir) {
$out_dir = dirname($fasta_file);
$self->param('out_dir', $out_dir)
} else {
if (!-e $out_dir) {
$self->warning("Output directory '$out_dir' does not exist. I shall create it.");
make_path($out_dir) or $self->throw("Failed to create output directory '$out_dir'");
}
}
}
sub run {
my $self = shift @_;
# All these should have appropriate values, either from param_defaults
# or fetch_input. (If the user has provided anything but integers for
# the max_* parameters, the script will automatically fail when it does
# numeric comparisons with those values, so don't bother checking them.)
my $fasta_file = $self->param('fasta_file');
my $max_records = $self->param('max_seqs_per_file');
my $max_seq_length = $self->param('max_seq_length_per_file');
my $max_files = $self->param('max_files_per_directory');
my $max_dirs = $self->param('max_dirs_per_directory');
$self->param('split_files', []);
# Tracking parameters to determine when we need a new file or directory.
my $record_count = 0;
my $seq_length = 0;
my $file_count = 1;
my $dir_index = 0;
# Do nothing if there's nothing to do...
if (-s $fasta_file == 0) {
$self->input_job->autoflow(0);
return;
}
# Need to calculate required degree of subdirectory nesting.
$self->directory_structure();
$self->delete_existing_files() if $self->param('delete_existing_files');
my ($basename, undef, undef) = fileparse($fasta_file, qr/\.[^.]*/);
my $split_file = $self->new_filename($dir_index, $basename, $file_count);
my $original = Bio::SeqIO->new(-format => 'Fasta', -file => $fasta_file);
my $split = Bio::SeqIO->new(-format => 'Fasta', -file => ">$split_file");
while (my $seq = $original->next_seq) {
$record_count++;
$seq_length += $seq->length;
# Checking for record_count > 1 ensures that an empty file isn't created
# if max_records <=0 or max_seq_length < the length of the sequence.
if (
$record_count > 1 &&
( (defined($max_records) && $record_count > $max_records) ||
(defined($max_seq_length) && $seq_length > $max_seq_length)
)
) {
$record_count = 1;
$seq_length = $seq->length;
if (defined($max_files) && $file_count >= $max_files) {
$dir_index++;
$file_count = 1;
} else {
$file_count++;
}
$split_file = $self->new_filename($dir_index, $basename, $file_count);
$split = Bio::SeqIO->new(-format => 'Fasta', -file => ">$split_file");
}
my $success = $split->write_seq($seq);
$self->throw("Failed to write sequence to '$split_file'") unless $success;
}
if ($self->param('delete_original_file')) {
unlink $fasta_file;
}
}
sub write_output {
my ($self) = @_;
foreach my $split_file (@{$self->param('split_files')}) {
$self->dataflow_output_id({'split_file' => $split_file}, 2);
}
}
sub new_filename {
my ($self, $dir_index, $basename, $file_count) = @_;
my $out_dir = $self->param('out_dir');
my @dirs = @{$self->param('dirs')};
my $sub_dir = "$out_dir/".$dirs[$dir_index];
if (!-e $sub_dir) {
make_path($sub_dir) or $self->throw("Failed to create output directory '$sub_dir'");
}
my $split_file;
if ($self->param('unique_file_names')) {
$split_file = "$sub_dir/$basename.$dir_index.$file_count.fa";
} else {
$split_file = "$sub_dir/$basename.$file_count.fa";
}
my @split_files = (@{$self->param('split_files')}, $split_file);
$self->param('split_files', \@split_files);
return $split_file;
}
sub directory_structure {
my ($self) = @_;
# This function sets an arrayref paramter with directory paths;
# which is subsequently indexed in the new_filename function by the
# parameter that keeps track of how many directories have been seen.
my $max_files = $self->param('max_files_per_directory');
my $max_dirs = $self->param('max_dirs_per_directory');
my $files_required = $self->files_required();
my $dirs_required = 1;
if (defined $max_files && $max_files > 0) {
$dirs_required = ceil($files_required / $max_files);
}
if (!defined $max_dirs || $max_dirs == 0) {
$max_dirs = 1;
}
my @dirs;
if ($dirs_required < $max_dirs) {
@dirs = (1..$dirs_required);
} else {
@dirs = (1..$max_dirs);
}
while ($dirs_required > $max_dirs) {
$dirs_required = ceil($dirs_required / $max_dirs);
my @new_dirs;
foreach my $dir (@dirs) {
foreach my $sub_dir (1..$max_dirs) {
push @new_dirs, "$dir/$sub_dir";
}
}
@dirs = @new_dirs;
}
$self->param('dirs', \@dirs);
}
sub files_required {
my ($self) = @_;
my $fasta_file = $self->param('fasta_file');
my $max_records = $self->param('max_seqs_per_file');
my $max_seq_length = $self->param('max_seq_length_per_file');
my $record_count = 0;
my $seq_length = 0;
my $files_required = 1;
my $original = Bio::SeqIO->new(-format => 'Fasta', -file => $fasta_file);
while (my $seq = $original->next_seq) {
$record_count++;
$seq_length += $seq->length;
if (
$record_count > 1 &&
( (defined($max_records) && $record_count > $max_records) ||
(defined($max_seq_length) && $seq_length > $max_seq_length)
)
) {
$record_count = 1;
$seq_length = $seq->length;
$files_required++;
}
}
return $files_required;
}
sub delete_existing_files {
my ($self) = @_;
my $out_dir = $self->param('out_dir');
foreach my $dir (@{$self->param('dirs')}) {
remove_tree("$out_dir/$dir", {keep_root => 1});
}
}
1;
| navygit/ncRNA_Pipeline | modules/Bio/EnsEMBL/EGPipeline/Common/RunnableDB/FastaSplit.pm | Perl | apache-2.0 | 7,626 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::dell::compellent::snmp::mode::hardware;
use base qw(centreon::plugins::templates::hardware);
use strict;
use warnings;
sub set_system {
my ($self, %options) = @_;
$self->{regexp_threshold_overload_check_section_option} =
'^(ctrl|disk|encl|ctrlfan|ctrlpower|ctrlvoltage|ctrltemp|enclfan|enclpower|encliomod|encltemp|volume|cache|server|sc)$';
$self->{regexp_threshold_numeric_check_section_option} = '^(ctrltemp|ctrlvoltage|ctrlfan|encltemp)$';
$self->{cb_hook2} = 'snmp_execute';
$self->{thresholds} = {
default => [
['up', 'OK'],
['down', 'CRITICAL'],
['degraded', 'WARNING'],
],
};
$self->{components_path} = 'storage::dell::compellent::snmp::mode::components';
$self->{components_module} = ['ctrl', 'disk', 'ctrlfan', 'ctrlpower', 'ctrlvoltage', 'ctrltemp',
'encl', 'enclfan', 'enclpower', 'encliomod', 'encltemp', 'volume', 'cache', 'server', 'sc'];
}
sub snmp_execute {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
$self->{results} = $self->{snmp}->get_multiple_table(oids => $self->{request});
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, no_absent => 1);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
});
return $self;
}
1;
__END__
=head1 MODE
Check sensors.
=over 8
=item B<--component>
Which component to check (Default: '.*').
Can be: 'ctrl', 'disk', 'encl', 'ctrlfan', 'ctrlpower', 'ctrlvoltage',
'ctrltemp', 'enclfan', 'enclpower', 'encliomod', 'encltemp', 'volume', 'cache', 'server', 'sc'.
=item B<--filter>
Exclude some parts (comma seperated list) (Example: --filter=ctrlfan --filter=enclpower)
Can also exclude specific instance: --filter=ctrlfan,1
=item B<--no-component>
Return an error if no compenents are checked.
If total (with skipped) is 0. (Default: 'critical' returns).
=item B<--threshold-overload>
Set to overload default threshold values (syntax: section,[instance,]status,regexp)
It used before default thresholds (order stays).
Example: --threshold-overload='ctrl,CRITICAL,^(?!(up)$)'
=item B<--warning>
Set warning threshold for 'ctrltemp', 'ctrlfan', 'ctrlvoltage', 'encltemp' (syntax: type,regexp,threshold)
Example: --warning='ctrltemp,1,30'
=item B<--critical>
Set critical threshold for 'ctrltemp', 'ctrlfan', 'ctrlvoltage', 'encltemp' (syntax: type,regexp,threshold)
Example: --critical='ctrltemp,1,50'
=back
=cut | wilfriedcomte/centreon-plugins | storage/dell/compellent/snmp/mode/hardware.pm | Perl | apache-2.0 | 3,410 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::netapp::ontap::oncommandapi::mode::lunusage;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_usage_perfdata {
my ($self, %options) = @_;
$self->{output}->perfdata_add(
label => 'used', unit => 'B',
instances => $self->use_instances(extra_instance => $options{extra_instance}) ? $self->{result_values}->{display} : undef,
value => $self->{result_values}->{used},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{thlabel}, total => $self->{result_values}->{total}, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{thlabel}, total => $self->{result_values}->{total}, cast_int => 1),
min => 0, max => $self->{result_values}->{total}
);
}
sub custom_usage_threshold {
my ($self, %options) = @_;
my ($exit, $threshold_value);
$threshold_value = $self->{result_values}->{used};
$threshold_value = $self->{result_values}->{free} if (defined($self->{instance_mode}->{option_results}->{free}));
if ($self->{instance_mode}->{option_results}->{units} eq '%') {
$threshold_value = $self->{result_values}->{prct_used};
$threshold_value = $self->{result_values}->{prct_free} if (defined($self->{instance_mode}->{option_results}->{free}));
}
$exit = $self->{perfdata}->threshold_check(value => $threshold_value,
threshold => [ { label => 'critical-' . $self->{thlabel}, exit_litteral => 'critical' },
{ label => 'warning-' . $self->{thlabel}, exit_litteral => 'warning' } ]);
return $exit;
}
sub custom_usage_output {
my ($self, %options) = @_;
my ($used_value, $used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used});
my ($free_value, $free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free});
my ($total_value, $total_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total});
my $msg = sprintf("Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)",
$total_value . " " . $total_unit,
$used_value . " " . $used_unit, $self->{result_values}->{prct_used},
$free_value . " " . $free_unit, $self->{result_values}->{prct_free});
return $msg;
}
sub custom_usage_calc {
my ($self, %options) = @_;
$self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_path'};
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_size'};
$self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_size_used'};
if ($self->{result_values}->{total} != 0) {
$self->{result_values}->{free} = $self->{result_values}->{total} - $self->{result_values}->{used};
$self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total};
$self->{result_values}->{prct_free} = 100 - $self->{result_values}->{prct_used};
} else {
$self->{result_values}->{free} = '0';
$self->{result_values}->{prct_used} = '0';
$self->{result_values}->{prct_free} = '0';
}
return 0;
}
sub prefix_output {
my ($self, %options) = @_;
return "Lun '" . $options{instance_value}->{path} . "' ";
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'luns', type => 1, cb_prefix_output => 'prefix_output', message_multiple => 'All luns usage are ok' },
];
$self->{maps_counters}->{luns} = [
{ label => 'usage', set => {
key_values => [ { name => 'size_used' }, { name => 'size' }, { name => 'path' } ],
closure_custom_calc => $self->can('custom_usage_calc'),
closure_custom_output => $self->can('custom_usage_output'),
closure_custom_perfdata => $self->can('custom_usage_perfdata'),
closure_custom_threshold_check => $self->can('custom_usage_threshold'),
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-name:s' => { name => 'filter_name' },
'units:s' => { name => 'units', default => '%' },
'free' => { name => 'free' }
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
my $result = $options{custom}->get(path => '/luns');
foreach my $lun (@{$result}) {
if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' &&
$lun->{path} !~ /$self->{option_results}->{filter_name}/) {
$self->{output}->output_add(long_msg => "skipping '" . $lun->{path} . "': no matching filter name.", debug => 1);
next;
}
$self->{luns}->{$lun->{key}} = {
path => $lun->{path},
size => $lun->{size},
size_used => $lun->{size_used},
}
}
if (scalar(keys %{$self->{luns}}) <= 0) {
$self->{output}->add_option_msg(short_msg => "No entry found.");
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check NetApp luns usage.
=over 8
=item B<--filter-name>
Filter lun name (can be a regexp).
=item B<--warning-usage>
Threshold warning.
=item B<--critical-usage>
Threshold critical.
=item B<--units>
Units of thresholds (Default: '%') ('%', 'B').
=item B<--free>
Thresholds are on free space left.
=back
=cut
| Tpo76/centreon-plugins | storage/netapp/ontap/oncommandapi/mode/lunusage.pm | Perl | apache-2.0 | 6,516 |
package Google::Ads::AdWords::v201809::AdGroupBidLandscapePage;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201809::NoStatsPage);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %totalNumEntries_of :ATTR(:get<totalNumEntries>);
my %Page__Type_of :ATTR(:get<Page__Type>);
my %entries_of :ATTR(:get<entries>);
__PACKAGE__->_factory(
[ qw( totalNumEntries
Page__Type
entries
) ],
{
'totalNumEntries' => \%totalNumEntries_of,
'Page__Type' => \%Page__Type_of,
'entries' => \%entries_of,
},
{
'totalNumEntries' => 'SOAP::WSDL::XSD::Typelib::Builtin::int',
'Page__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'entries' => 'Google::Ads::AdWords::v201809::AdGroupBidLandscape',
},
{
'totalNumEntries' => 'totalNumEntries',
'Page__Type' => 'Page.Type',
'entries' => 'entries',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::AdGroupBidLandscapePage
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
AdGroupBidLandscapePage from the namespace https://adwords.google.com/api/adwords/cm/v201809.
Contains a subset of {@link AdGroupBidLandscape} objects resulting from the filtering and paging of {@link DataService#getAdGroupBidLandscape} call.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * entries
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/AdGroupBidLandscapePage.pm | Perl | apache-2.0 | 1,993 |
#!/usr/bin/perl
package Alignment::STAR;
use strict;
use warnings;
use File::Basename;
use CQS::PBS;
use CQS::ConfigUtils;
use CQS::SystemUtils;
use CQS::FileUtils;
use CQS::Task;
use CQS::NGSCommon;
use CQS::StringUtils;
our @ISA = qw(CQS::Task);
sub new {
my ($class) = @_;
my $self = $class->SUPER::new();
$self->{_name} = "STAR";
$self->{_suffix} = "_star";
bless $self, $class;
return $self;
}
sub perform {
my ( $self, $config, $section ) = @_;
my ( $task_name, $path_file, $pbsDesc, $target_dir, $logDir, $pbsDir, $resultDir, $option, $sh_direct, $cluster, $thread ) = get_parameter( $config, $section );
if ( $option !~ /outSAMprimaryFlag/ ) {
$option = $option . " --outSAMprimaryFlag AllBestScore";
}
my $output_sort_by_coordinate = get_option( $config, $section, "output_sort_by_coordinate", 0 );
my $output_unsorted = get_option( $config, $section, "output_unsorted", 0 );
if ( !$output_sort_by_coordinate && !$output_unsorted ) {
$output_unsorted = 1;
}
my $output_format = "--outSAMtype BAM";
if ($output_sort_by_coordinate) {
$output_format = $output_format . " SortedByCoordinate";
}
if ($output_unsorted) {
$output_format = $output_format . " Unsorted";
}
my $genome_dir = parse_param_file( $config, $section, "genome_dir", 1 );
my %fqFiles = %{ get_raw_files( $config, $section ) };
my $shfile = $self->taskfile( $pbsDir, $task_name );
open( SH, ">$shfile" ) or die "Cannot create $shfile";
print SH get_run_command($sh_direct);
my $threadcount = get_pbs_thread( $config->{$section}{pbs} );
for my $sampleName ( sort keys %fqFiles ) {
my @sampleFiles = @{ $fqFiles{$sampleName} };
my $uncompress = ( $sampleFiles[0] =~ /.gz$/ ) ? " --readFilesCommand zcat" : "";
my $samples = join( " ", @sampleFiles );
my $pbsFile = $self->pbsfile( $pbsDir, $sampleName );
my $pbsName = basename($pbsFile);
my $log = $self->logfile( $logDir, $sampleName );
my $curDir = create_directory_or_die( $resultDir . "/$sampleName" );
my $rgline = "ID:$sampleName SM:$sampleName LB:$sampleName PL:ILLUMINA PU:ILLUMINA";
my $final = $output_sort_by_coordinate ? $sampleName . "_Aligned.sortedByCoord.out.bam" : $sampleName . "_Aligned.out.bam";
my $log_desc = $cluster->get_log_desc($log);
open( OUT, ">$pbsFile" ) or die $!;
print OUT "$pbsDesc
$log_desc
$path_file
cd $curDir
if [ -s $final ]; then
echo job has already been done. if you want to do again, delete ${curDir}/${final} and submit job again.
exit 0;
fi
echo STAR_start=`date`
STAR $option --outSAMattrRGline $rgline --runThreadN $thread --genomeDir $genome_dir --readFilesIn $samples $uncompress --outFileNamePrefix ${sampleName}_ $output_format
samtools index $final
samtools flagstat $final > ${final}.stat
echo finished=`date`
";
close(OUT);
print SH "\$MYCMD ./$pbsName \n";
print "$pbsFile created\n";
}
print SH "exit 0\n";
close(SH);
if ( is_linux() ) {
chmod 0755, $shfile;
}
print "!!!shell file $shfile created, you can run this shell file to submit all tasks.\n";
}
sub result {
my ( $self, $config, $section, $pattern ) = @_;
my ( $task_name, $path_file, $pbsDesc, $target_dir, $logDir, $pbsDir, $resultDir, $option, $sh_direct ) = get_parameter( $config, $section );
my %rawFiles = %{ get_raw_files( $config, $section ) };
my $sort_by_coordinate = get_option_value( $config->{$section}{sort_by_coordinate}, 0 );
my $result = {};
for my $sampleName ( keys %rawFiles ) {
my @resultFiles = ();
my $output_sort_by_coordinate = get_option( $config, $section, "output_sort_by_coordinate", 0 );
my $output_unsorted = get_option( $config, $section, "output_unsorted", 0 );
if ( !$output_sort_by_coordinate && !$output_unsorted ) {
$output_unsorted = 1;
}
my $tab = $sampleName . "_SJ.out.tab";
if ($output_sort_by_coordinate) {
push( @resultFiles, "${resultDir}/${sampleName}/${sampleName}_Aligned.sortedByCoord.out.bam" );
}
if ($output_unsorted) {
push( @resultFiles, "${resultDir}/${sampleName}/${sampleName}_Aligned.out.bam" );
}
push( @resultFiles, "${resultDir}/${sampleName}/${tab}" );
$result->{$sampleName} = filter_array( \@resultFiles, $pattern );
}
return $result;
}
1;
| realizor/ngsperl | lib/Alignment/STAR.pm | Perl | apache-2.0 | 4,513 |
#!/usr/bin/perl
use DBI;
use Digest::MD5 qw(md5 md5_hex md5_base64);
use File::Basename;
use lib dirname (__FILE__);
use config qw(%conf);
use warnings;
# Connect to target DB
my $dbh = DBI->connect("DBI:mysql:database=$conf{dbName};host=$conf{dbHost}","$conf{dbUser}","$conf{dbPass}", {'RaiseError' => 1});
my $filename=$conf{accessFile};
open(my $fh, '>:utf8', $filename) or die "Could not open file '$filename' $!";
print $fh "[groups]\n";
# global public users
#10:viewer,25:reporter,40:updater,55:developer,70:manager,90:administrator
my $queryUsers = $dbh->prepare("select id, username from mantis_user_table where enabled=true and access_level=55");
my ($id, $username);
$queryUsers->execute();
$queryUsers->bind_columns(undef, \$id, \$username);
print $fh "global_public=";
while ($queryUsers->fetch())
{
print $fh "$username,";
}
print $fh "\n\n";
$queryUsers->finish();
# global admin users
#10:viewer,25:reporter,40:updater,55:developer,70:manager,90:administrator
$queryUsers = $dbh->prepare("select id, username from mantis_user_table where enabled=true and access_level>=70");
$queryUsers->execute();
$queryUsers->bind_columns(undef, \$id, \$username);
print $fh "global_admin=";
while ($queryUsers->fetch())
{
print $fh "$username,";
}
print $fh "\n\n";
$queryUsers->finish();
#project groups;
#10:development,30:release,50:stable,70:obsolete
my $sqr = $dbh->prepare("select id, name, description from mantis_project_table where enabled=true and status<70");
$sqr->execute();
my($proj_name, $proj_desc);
$sqr->bind_columns(undef, \$id, \$proj_name, \$proj_desc);
my @projects;
while($sqr->fetch())
{
next if ($proj_desc !~ /svn:(.*)$/m);
my $svnurl = $1;
my @users;
#query for project users
#10:viewer,25:reporter,40:updater,55:developer,70:manager,90:administrator
$queryUsers = $dbh->prepare("select username from mantis_project_user_list_table pu, mantis_user_table u where pu.user_id = u.id and pu.project_id = ? and pu.access_level >= 55 order by username asc");
$queryUsers->execute($id);
my $username;
$queryUsers->bind_columns(undef, \$username);
while ($queryUsers->fetch())
{
push @users, $username;
}
$queryUsers->finish();
my %pr = (
name => $proj_name,
svnurl => $svnurl,
users => \@users,
);
push @projects, \%pr;
}
$sqr->finish();
$dbh->disconnect();
#output groups
foreach my $i (0 .. $#projects)
{
my %project = %{$projects[$i]};
print $fh "#Project Name: $project{name}\n";
print $fh "proj_$i = ";
my $first = 1;
foreach my $user (@{$project{users}})
{
if ($first)
{
$first = 0;
}
else
{
print $fh ",";
}
print $fh $user;
}
print $fh "\n\n";
}
#output permission control
foreach my $i (0 .. $#projects)
{
my %project = %{$projects[$i]};
print $fh "[$project{svnurl}]\n";
print $fh "\@global_public = r\n";
print $fh "\@global_admin = rw\n";
print $fh "\@proj_$i = rw\n";
print $fh "\n";
}
close $fh;
| binarier/mantis-svn | auth-export.pl | Perl | apache-2.0 | 2,919 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::f5::bigip::snmp::mode::listnodes;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
my $oid_ltmNodeAddrName = '.1.3.6.1.4.1.3375.2.2.4.1.2.1.17'; # old
my $oid_ltmNodeAddrStatusName = '.1.3.6.1.4.1.3375.2.2.4.3.2.1.7'; # new
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"filter-name:s" => { name => 'filter_name' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
}
sub manage_selection {
my ($self, %options) = @_;
my $snmp_result = $self->{snmp}->get_multiple_table(oids => [ { oid => $oid_ltmNodeAddrName }, { oid => $oid_ltmNodeAddrStatusName } ], nothing_quit => 1);
my ($branch_name) = ($oid_ltmNodeAddrStatusName);
if (!defined($snmp_result->{$oid_ltmNodeAddrStatusName}) || scalar(keys %{$snmp_result->{$oid_ltmNodeAddrStatusName}}) == 0) {
($branch_name) = ($oid_ltmNodeAddrName);
}
$self->{node} = {};
foreach my $oid (keys %{$snmp_result->{$branch_name}}) {
if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' &&
$snmp_result->{$branch_name}->{$oid} !~ /$self->{option_results}->{filter_name}/) {
$self->{output}->output_add(long_msg => "skipping service class '" . $snmp_result->{$branch_name}->{$oid} . "'.", debug => 1);
next;
}
$self->{node}->{$snmp_result->{$branch_name}->{$oid}} = { name => $snmp_result->{$branch_name}->{$oid} };
}
}
sub run {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
$self->manage_selection();
foreach my $name (sort keys %{$self->{node}}) {
$self->{output}->output_add(long_msg => "'" . $name . "'");
}
$self->{output}->output_add(severity => 'OK',
short_msg => 'List Nodes:');
$self->{output}->display(nolabel => 1, force_ignore_perfdata => 1, force_long_output => 1);
$self->{output}->exit();
}
sub disco_format {
my ($self, %options) = @_;
$self->{output}->add_disco_format(elements => ['name']);
}
sub disco_show {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
$self->manage_selection();
foreach my $name (sort keys %{$self->{node}}) {
$self->{output}->add_disco_entry(name => $name);
}
}
1;
__END__
=head1 MODE
List nodes.
=over 8
=item B<--filter-name>
Filter by node name.
=back
=cut
| wilfriedcomte/centreon-plugins | network/f5/bigip/snmp/mode/listnodes.pm | Perl | apache-2.0 | 3,511 |
package Paws::MTurk::RejectAssignment;
use Moose;
has AssignmentId => (is => 'ro', isa => 'Str', required => 1);
has RequesterFeedback => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'RejectAssignment');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::MTurk::RejectAssignmentResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::MTurk::RejectAssignment - Arguments for method RejectAssignment on Paws::MTurk
=head1 DESCRIPTION
This class represents the parameters used for calling the method RejectAssignment on the
Amazon Mechanical Turk service. Use the attributes of this class
as arguments to method RejectAssignment.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to RejectAssignment.
As an example:
$service_obj->RejectAssignment(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> AssignmentId => Str
The ID of the assignment. The assignment must correspond to a HIT
created by the Requester.
=head2 B<REQUIRED> RequesterFeedback => Str
A message for the Worker, which the Worker can see in the Status
section of the web site.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method RejectAssignment in L<Paws::MTurk>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/MTurk/RejectAssignment.pm | Perl | apache-2.0 | 1,880 |
use strict;
package main;
print("<html>\n");
print(" <head>\n");
print(" <meta http-equiv=\"content-type\" content=\"text/html;charset=iso-8859-1\">\n");
print(" <title>PerfStat Tool: Status And Performance Monitor</title>\n");
print(" </head>\n");
print(" \n");
print(" <frameset id=\"perfmonFrameset\" cols=\"250,*\" frameBorder=\"yes\" frameBorder=\"1\" frameSpacing=\"-1\" border=\"1\">\n");
print(" <frame src=\"navigation1/index.pl\" name=\"navigation\">\n");
print(" <frame src=\"../appRez/html/dsp_blank.html\" name=\"content\">\n");
print(" </frameset>\n");
print(" \n");
print(" <noframes>\n");
print(" <body bgcolor=\"#ffffff\">\n");
print(" <p>Sorry: This application requires a frames-enabled browser</p>\n");
print(" </body>\n");
print(" </noframes>\n");
print("</html>\n");
| ktenzer/perfstat | ui/perfMonitor/dsp_frames.pl | Perl | apache-2.0 | 799 |
package Paws::Firehose::ExtendedS3DestinationConfiguration;
use Moose;
has BucketARN => (is => 'ro', isa => 'Str', required => 1);
has BufferingHints => (is => 'ro', isa => 'Paws::Firehose::BufferingHints');
has CloudWatchLoggingOptions => (is => 'ro', isa => 'Paws::Firehose::CloudWatchLoggingOptions');
has CompressionFormat => (is => 'ro', isa => 'Str');
has EncryptionConfiguration => (is => 'ro', isa => 'Paws::Firehose::EncryptionConfiguration');
has Prefix => (is => 'ro', isa => 'Str');
has ProcessingConfiguration => (is => 'ro', isa => 'Paws::Firehose::ProcessingConfiguration');
has RoleARN => (is => 'ro', isa => 'Str', required => 1);
has S3BackupConfiguration => (is => 'ro', isa => 'Paws::Firehose::S3DestinationConfiguration');
has S3BackupMode => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Firehose::ExtendedS3DestinationConfiguration
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::Firehose::ExtendedS3DestinationConfiguration object:
$service_obj->Method(Att1 => { BucketARN => $value, ..., S3BackupMode => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::Firehose::ExtendedS3DestinationConfiguration object:
$result = $service_obj->Method(...);
$result->Att1->BucketARN
=head1 DESCRIPTION
Describes the configuration of a destination in Amazon S3.
=head1 ATTRIBUTES
=head2 B<REQUIRED> BucketARN => Str
The ARN of the S3 bucket.
=head2 BufferingHints => L<Paws::Firehose::BufferingHints>
The buffering option.
=head2 CloudWatchLoggingOptions => L<Paws::Firehose::CloudWatchLoggingOptions>
The CloudWatch logging options for your delivery stream.
=head2 CompressionFormat => Str
The compression format. If no value is specified, the default is
UNCOMPRESSED.
=head2 EncryptionConfiguration => L<Paws::Firehose::EncryptionConfiguration>
The encryption configuration. If no value is specified, the default is
no encryption.
=head2 Prefix => Str
The "YYYY/MM/DD/HH" time format prefix is automatically used for
delivered S3 files. You can specify an extra prefix to be added in
front of the time format prefix. If the prefix ends with a slash, it
appears as a folder in the S3 bucket. For more information, see Amazon
S3 Object Name Format in the I<Amazon Kinesis Firehose Developer
Guide>.
=head2 ProcessingConfiguration => L<Paws::Firehose::ProcessingConfiguration>
The data processing configuration.
=head2 B<REQUIRED> RoleARN => Str
The ARN of the AWS credentials.
=head2 S3BackupConfiguration => L<Paws::Firehose::S3DestinationConfiguration>
The configuration for backup in Amazon S3.
=head2 S3BackupMode => Str
The Amazon S3 backup mode.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::Firehose>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/Firehose/ExtendedS3DestinationConfiguration.pm | Perl | apache-2.0 | 3,339 |
#!/usr/bin/perl
use warnings;
use strict;
use Benchmark qw(cmpthese timethese);
use Storable;
my $file = 'cmp.txt';
storeit();
cmpthese(100, {
'by_line' => \&by_line,
'by_slurp' => \&by_slurp,
'by_store' => \&by_store,
});
sub by_line {
open my $fh, '<', $file
or die "Can't open $file: $!";
my @ref_data;
for my $line (<$fh>){
push @ref_data, $line;
}
}
sub by_slurp {
open my $fh, '<', $file
or die "Can't open $file: $!";
my @ref_data = <$fh>;
}
sub storeit {
open my $fh, '<', $file
or die "Can't open $file: $!";
my @ref_data = <$fh>;
close $fh;
store \@ref_data, 'store.dat';
}
sub by_store{
my @ref_data = retrieve('store.dat');
}
| jmcveigh/komodo-tools | scripts/perl/benchmark/cmp_slurp_storable_byline.pl | Perl | bsd-2-clause | 883 |
package Tapper::Reports::Receiver::Daemon;
use 5.010;
use strict;
use warnings;
use Tapper::Config;
use Tapper::Reports::Receiver;
use Log::Log4perl;
use Moose;
with 'MooseX::Daemonize';
after start => sub {
my $self = shift;
return unless $self->is_daemon;
my $logconf = Tapper::Config->subconfig->{files}{log4perl_cfg};
Log::Log4perl->init($logconf);
my $port = Tapper::Config->subconfig->{report_port};
Tapper::Reports::Receiver->new()->run($port);
};
=head2 run
Run daemon.
=cut
sub run
{
my ($self) = @_;
my ($command) = @ARGV;
return unless $command && grep /^$command$/, qw(start status restart stop);
$self->$command;
say $self->status_message;
}
1;
| tapper/Tapper-Reports-Receiver | lib/Tapper/Reports/Receiver/Daemon.pm | Perl | bsd-2-clause | 836 |
#!/usr/bin/perl
#
# Change the user current plan for another
#
use strict;
use warnings;
use URI::Escape;
use XML::Simple;
use LWP::UserAgent;
use LWP::Protocol::https;
use MIME::Base64;
my $help;
my $old_plan = '';
my $new_plan = '';
my $username = '';
my $verbose = '';
usage() if ( @ARGV < 1 or
! GetOptions (
'help|?' => \$help,
'new_plan=s' => \$new_plan,
'old_plan=s' => \$old_plan,
'username=s' => \$username,
'verbose' => \$verbose,
)
or defined $help );
die "old plan must not contain url encoding\n"
if($new_plan =~ /\%/);
die "old plan must not contain url encoding\n"
if($old_plan =~ /\%/);
my ($user, $pass);
open (FILE, '/root/.cpanelpwd');
while (<FILE>) {
chomp;
($user, $pass) = split(" ");
}
close (FILE);
$new_plan = uri_escape($new_plan);
my $summary_url = "xml-api/accountsummary?api.version=1&user=$username";
my $data = whm_connect($user, $pass, $summary_url);
if ($data->{data}->{acct}->{plan} eq $old_plan) {
my $pkg_url = "xml-api/changepackage?api.version=1&pkg=$new_plan&user=$username";
my $status = whm_connect($user, $pass, $pkg_url);
print "$status->{metadata}->{reason}\n" if ($verbose);
exit;
}
else {
print "No plan change applied to $username as old plan didn't match\n" if ($verbose);
exit 1;
}
sub whm_connect {
my ($user, $pass, $url) = @_;
my $auth = "Basic " . MIME::Base64::encode( $user . ":" . $pass );
my $ua = LWP::UserAgent->new(
ssl_opts => { verify_hostname => 0,
SSL_verify_mode => 'SSL_VERIFY_NONE',
SSL_use_cert => 0 },
);
my $xml = new XML::Simple;
my $request = HTTP::Request->new(GET => "https://127.0.0.1:2087/$url");
$request->header( Authorization => $auth );
my $response = $ua->request($request);
return $xml->XMLin($response->content);
}
sub usage
{
print "Unknown option: @_\n" if ( @_ );
print "usage: program [--username|-u USERNAME]";
print " [--new-plan|-n \"NEW PLAN\"]";
print " [--old-plan|-o \"OLD PLAN\"]";
print " [--verbose|-v]";
print " [--help|-?]\n";
exit;
}
| fleu42/perl-misc | change-account-pkg.pl | Perl | bsd-3-clause | 2,082 |
package MyElements::getCoverageFromTSNResponse;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'http://itis_service.itis.usgs.org' }
__PACKAGE__->__set_name('getCoverageFromTSNResponse');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
SOAP::WSDL::XSD::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(SOAP::WSDL::XSD::Typelib::ComplexType);
Class::Std::initialize();
{ # BLOCK to scope variables
my %return_of :ATTR(:get<return>);
__PACKAGE__->_factory(
[ qw( return
) ],
{
'return' => \%return_of,
},
{
'return' => 'MyTypes::SvcCoverageData',
},
{
'return' => 'return',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
MyElements::getCoverageFromTSNResponse
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
getCoverageFromTSNResponse from the namespace http://itis_service.itis.usgs.org.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * return
$element->set_return($data);
$element->get_return();
=back
=head1 METHODS
=head2 new
my $element = MyElements::getCoverageFromTSNResponse->new($data);
Constructor. The following data structure may be passed to new():
{
return => { # MyTypes::SvcCoverageData
rankId => $some_value, # int
taxonCoverage => $some_value, # string
},
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| phylotastic-legacy/TNRastic | tnrs_handler/bin/tnrs_adapter/itis/SOAPInterface/MyElements/getCoverageFromTSNResponse.pm | Perl | bsd-3-clause | 1,769 |
FROM johnnyasantoss/dotnet-mono-docker:dotnet2.0-mono5.2-sdk
WORKDIR /app
COPY ./** /app/
RUN apt-get update && apt-get install -y libgit2-24
RUN ./build.sh -t "pack" -v > dockerbuild.txt 2>&1
| PagueVeloz/JsonFluentMap | Dockerfile | Dockerfile | mit | 197 |
FROM ruby:2.3.3
RUN apt-get update && apt-get install -y \
#Packages
net-tools \
nodejs
#Install phantomjs
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
ca-certificates \
bzip2 \
libfontconfig \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
curl \
&& mkdir /tmp/phantomjs \
&& curl -L https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 \
| tar -xj --strip-components=1 -C /tmp/phantomjs \
&& cd /tmp/phantomjs \
&& mv bin/phantomjs /usr/local/bin \
&& cd \
&& apt-get purge --auto-remove -y \
curl \
&& apt-get clean \
&& rm -rf /tmp/* /var/lib/apt/lists/*
#Install gems
RUN mkdir /app
WORKDIR /app
COPY Gemfile* /app/
RUN bundle install
RUN apt-get clean
#Upload source
COPY . /app
RUN useradd ruby
RUN chown -R ruby /app
USER ruby
# Database defaults
ENV DATABASE_NAME bookIT
ENV DATABASE_HOST db
ENV DATABASE_USER bookIT
ENV DATABASE_PASSWORD password
ENV DATABASE_ADAPTER mysql2
ENV ACCOUNT_ADDRESS https://gamma.chalmers.it
#In production, Host is set to naboo.chlamers.it
# Start server
ENV RAILS_ENV production
ENV RACK_ENV production
ENV SECRET_KEY_BASE secret
ENV PORT 3000
EXPOSE 3000
RUN rake assets:precompile
CMD ["sh", "start.sh"]
| cthit/bookIT | Dockerfile | Dockerfile | mit | 1,372 |
# Using a compact OS
FROM registry.dataos.io/library/nginx
MAINTAINER Golfen Guo <golfen.guo@daocloud.io>
# Install Nginx
# Add 2048 stuff into Nginx server
COPY . /usr/share/nginx/html
EXPOSE 80
| yepengxj/dao-2048 | Dockerfile | Dockerfile | mit | 201 |
# Si se lleva a cabo un docker build de portal-andino sin el parámetro "--build-arg IMAGE_VERSION={versión de portal-base}, se usa el ARG IMAGE_VERSION por default
ARG IMAGE_VERSION=release-0.11.3
FROM datosgobar/portal-base:$IMAGE_VERSION
MAINTAINER Leandro Gomez<lgomez@devartis.com>
ARG PORTAL_VERSION
ENV CKAN_HOME /usr/lib/ckan/default
ENV CKAN_DIST_MEDIA /usr/lib/ckan/default/src/ckanext-gobar-theme/ckanext/gobar_theme/public/user_images
ENV CKAN_DEFAULT /etc/ckan/default
WORKDIR /portal
# portal-andino-theme
RUN $CKAN_HOME/bin/pip install -e git+https://github.com/datosgobar/portal-andino-theme.git@0c4b0021bde0e312505e0e4ff90a2d017c755f98#egg=ckanext-gobar_theme && \
$CKAN_HOME/bin/pip install -r $CKAN_HOME/src/ckanext-gobar-theme/requirements.txt && \
/etc/ckan_init.d/build-combined-ckan-mo.sh $CKAN_HOME/src/ckanext-gobar-theme/ckanext/gobar_theme/i18n/es/LC_MESSAGES/ckan.po
# Series de Tiempo Ar explorer
RUN $CKAN_HOME/bin/pip install -e git+https://github.com/datosgobar/ckanext-seriestiempoarexplorer.git@2.8.1#egg=ckanext-seriestiempoarexplorer
# DCAT dependencies (el plugin se instala desde el `requirements.txt` de portal-andino-theme)
RUN $CKAN_HOME/bin/pip install -r $CKAN_HOME/src/ckanext-dcat/requirements.txt
RUN mkdir -p $CKAN_DIST_MEDIA
RUN chown -R www-data:www-data $CKAN_DIST_MEDIA
RUN chmod u+rwx $CKAN_DIST_MEDIA
RUN echo "$PORTAL_VERSION" > /portal/version
RUN mkdir -p /var/lib/ckan/theme_config/templates
RUN cp $CKAN_HOME/src/ckanext-gobar-theme/ckanext/gobar_theme/templates/seccion-acerca.html /var/lib/ckan/theme_config/templates
VOLUME $CKAN_DIST_MEDIA $CKAN_DEFAULT
| datosgobar/portal-andino | Dockerfile | Dockerfile | mit | 1,633 |
FROM ubuntu:14.04
MAINTAINER Johannes Wettinger, http://github.com/jojow
ENV ENGINE_BRANCH maven
ENV ENGINE_REV HEAD
ENV MAVEN_VERSION 3.3.9
ENV MAVEN_URL http://archive.apache.org/dist/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.tar.gz
ENV HOME /root
WORKDIR ${HOME}
ENV DEBIAN_FRONTEND noninteractive
ENV PATH ${PATH}:/opt/apache-maven-${MAVEN_VERSION}/bin/
# Replace /dev/random by /dev/urandom to avoid blocking
RUN rm /dev/random && ln -s /dev/urandom /dev/random
# Install dependencies
RUN apt-get update -y && \
apt-get install -y curl wget git openjdk-7-jdk && \
apt-get clean -y
RUN wget ${MAVEN_URL} && \
tar -zxf apache-maven-${MAVEN_VERSION}-bin.tar.gz && \
cp -R apache-maven-${MAVEN_VERSION} /opt
# Install Docker, partly from https://github.com/docker-library/docker/blob/master/1.12/Dockerfile
ENV DOCKER_BUCKET get.docker.com
ENV DOCKER_VERSION 1.12.0
ENV DOCKER_SHA256 3dd07f65ea4a7b4c8829f311ab0213bca9ac551b5b24706f3e79a97e22097f8b
ENV DOCKER_COMPOSE_VERSION 1.8.0
RUN set -x && \
curl -fSL "https://${DOCKER_BUCKET}/builds/Linux/x86_64/docker-${DOCKER_VERSION}.tgz" -o docker.tgz && \
echo "${DOCKER_SHA256} *docker.tgz" | sha256sum -c - && \
tar -xzvf docker.tgz && \
mv docker/* /usr/local/bin/ && \
rmdir docker && \
rm docker.tgz && \
docker -v && \
curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose && \
chmod +x /usr/local/bin/docker-compose && \
docker-compose --version
# Install engine
RUN git clone --recursive https://github.com/OpenTOSCA/container.git -b ${ENGINE_BRANCH} /opt/engine
WORKDIR /opt/engine
RUN git checkout ${ENGINE_REV} && git reset --hard
RUN mvn clean package
#RUN mvn clean install
RUN ln -s /opt/engine/org.opentosca.container.product/target/products/org.opentosca.container.product/linux/gtk/x86_64/OpenTOSCA /usr/local/bin/opentosca-engine && \
chmod +x /usr/local/bin/opentosca-engine
EXPOSE 1337
CMD [ "/usr/local/bin/opentosca-engine" ]
| jojow/opentosca-dockerfiles | engine/Dockerfile | Dockerfile | mit | 2,100 |
FROM gliderlabs/alpine:latest
MAINTAINER Carlos León <mail@carlosleon.info>
RUN apk-install darkhttpd
EXPOSE 80
ENTRYPOINT ["/usr/bin/darkhttpd"]
CMD ["/var/www", "--chroot"]
| mongrelion/di-darkhttpd | Dockerfile | Dockerfile | mit | 181 |
FROM midvalestudent/jupyter-scipy:latest
USER root
ENV HOME /root
ADD requirements.txt /usr/local/share/requirements.txt
RUN pip install --upgrade pip && pip install -r /usr/local/share/requirements.txt
# Download/build/install ffmpeg
ARG FFMPEG_VERSION
ENV FFMPEG_VERSION ${FFMPEG_VERSION:-"3.2"}
RUN DEBIAN_FRONTEND=noninteractive \
&& REPO=http://www.deb-multimedia.org \
&& echo "deb $REPO jessie main non-free\ndeb-src $REPO jessie main non-free" >> /etc/apt/sources.list \
&& apt-get update && apt-get install -y --force-yes deb-multimedia-keyring && apt-get update \
&& apt-get remove ffmpeg \
&& apt-get install -yq --no-install-recommends \
build-essential \
libmp3lame-dev \
libvorbis-dev \
libtheora-dev \
libspeex-dev \
yasm \
pkg-config \
libfaac-dev \
libopenjpeg-dev \
libx264-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p /usr/src/build && cd /usr/src/build \
&& wget http://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.gz \
&& tar xzf ffmpeg-$FFMPEG_VERSION.tar.gz && cd ffmpeg-$FFMPEG_VERSION \
&& ./configure \
--prefix=/usr/local \
--enable-gpl \
--enable-postproc \
--enable-swscale \
--enable-avfilter \
--enable-libmp3lame \
--enable-libvorbis \
--enable-libtheora \
--enable-libx264 \
--enable-libspeex \
--enable-shared \
--enable-pthreads \
--enable-libopenjpeg \
--enable-nonfree \
&& make -j$(nproc) install && ldconfig \
&& cd /usr/src/build && rm -rf ffmpeg-$FFMPEG_VERSION* \
&& apt-get purge -y cmake && apt-get autoremove -y --purge
# Download/build/install components for opencv
ARG OPENCV_VERSION
ENV OPENCV_VERSION ${OPENCV_VERSION:-"3.2.0"}
RUN DEBIAN_FRONTEND=noninteractive \
&& REPO=http://cdn-fastly.deb.debian.org \
&& echo "deb $REPO/debian jessie main\ndeb $REPO/debian-security jessie/updates main" > /etc/apt/sources.list \
&& apt-get update && apt-get -yq dist-upgrade \
&& apt-get install -yq --no-install-recommends \
build-essential \
cmake \
git-core \
pkg-config \
libjpeg62-turbo-dev \
libtiff5-dev \
libjasper-dev \
libpng12-dev \
libavcodec-dev \
libavformat-dev \
libswscale-dev \
libv4l-dev \
libatlas-base-dev \
gfortran \
tesseract-ocr \
tesseract-ocr-eng \
libtesseract-dev \
libleptonica-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p /usr/src/build && cd /usr/src/build \
&& git clone -b $OPENCV_VERSION --depth 1 --recursive https://github.com/opencv/opencv.git \
&& git clone -b $OPENCV_VERSION --depth 1 --recursive https://github.com/opencv/opencv_contrib.git \
&& cd opencv && mkdir build && cd build \
&& cmake \
-D CMAKE_BUILD_TYPE=RELEASE \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D INSTALL_C_EXAMPLES=OFF \
-D INSTALL_PYTHON_EXAMPLES=OFF \
-D OPENCV_EXTRA_MODULES_PATH=/usr/src/build/opencv_contrib/modules \
-D BUILD_EXAMPLES=OFF \
-D FFMPEG_INCLUDE_DIR=/usr/local/include \
-D FFMPEG_LIB_DIR=/usr/local/lib \
.. \
&& make -j4 install && ldconfig \
&& cd /usr/src/build && rm -rf opencv && rm -rf opencv_contrib \
&& apt-get purge -y cmake && apt-get autoremove -y --purge
# back to unpriviliged user
ENV HOME /home/$NB_USER
| midvalestudent/jupyter | docker/image/Dockerfile | Dockerfile | mit | 3,569 |
FROM python:3
WORKDIR /root
# install any Python packages this app depends on
COPY requirements.txt /root/requirements.txt
RUN pip install -r requirements.txt
ENV FLASK_APP /root/main.py
# copy sources
COPY main.py /root/main.py
COPY templates /root/templates
CMD ["flask", "run", "--host=0.0.0.0", "--port=80"]
| anoop901/facebook-clone | Dockerfile | Dockerfile | mit | 317 |
FROM sameersbn/ubuntu:14.04.20150805
MAINTAINER sameer@damagehead.com
ENV REDMINE_VERSION=3.1.0 \
REDMINE_USER="redmine" \
REDMINE_HOME="/home/redmine" \
REDMINE_LOG_DIR="/var/log/redmine" \
SETUP_DIR="/var/cache/redmine" \
RAILS_ENV=production
ENV REDMINE_INSTALL_DIR="${REDMINE_HOME}/redmine" \
REDMINE_DATA_DIR="${REDMINE_HOME}/data"
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv E1DD270288B4E6030699E45FA1715D88E1DF1F24 \
&& echo "deb http://ppa.launchpad.net/git-core/ppa/ubuntu trusty main" >> /etc/apt/sources.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv 80F70E11F0F0D5F10CB20E62F5DA5F09C3173AA6 \
&& echo "deb http://ppa.launchpad.net/brightbox/ruby-ng/ubuntu trusty main" >> /etc/apt/sources.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv 8B3981E7A6852F782CC4951600A6F0A3C300EE8C \
&& echo "deb http://ppa.launchpad.net/nginx/stable/ubuntu trusty main" >> /etc/apt/sources.list \
&& wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
&& echo 'deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main' > /etc/apt/sources.list.d/pgdg.list \
&& apt-get update \
&& apt-get install -y supervisor logrotate nginx mysql-client postgresql-client \
imagemagick subversion git cvs bzr mercurial rsync ruby2.1 locales openssh-client \
gcc g++ make patch pkg-config ruby2.1-dev libc6-dev zlib1g-dev libxml2-dev \
libmysqlclient18 libpq5 libyaml-0-2 libcurl3 libssl1.0.0 \
libxslt1.1 libffi6 zlib1g gsfonts \
&& update-locale LANG=C.UTF-8 LC_MESSAGES=POSIX \
&& gem install --no-document bundler \
&& rm -rf /var/lib/apt/lists/*
COPY assets/setup/ ${SETUP_DIR}/
RUN bash ${SETUP_DIR}/install.sh
COPY assets/config/ ${SETUP_DIR}/config/
COPY entrypoint.sh /sbin/entrypoint.sh
RUN chmod 755 /sbin/entrypoint.sh
COPY plugins/ ${SETUP_DIR}/plugins/
COPY themes/ ${SETUP_DIR}/themes/
EXPOSE 80/tcp 443/tcp
VOLUME ["${REDMINE_DATA_DIR}", "${REDMINE_LOG_DIR}"]
WORKDIR ${REDMINE_INSTALL_DIR}
ENTRYPOINT ["/sbin/entrypoint.sh"]
CMD ["app:start"]
| rewiko/docker-redmine | Dockerfile | Dockerfile | mit | 2,089 |
FROM stilliard/pure-ftpd
ENV FTP_USER=ftpuser \
FTP_PASSWORD=ftpuser \
FTP_HOME_DIRECTORY=/share/ftp \
PASV_PORT_MIN=30000 \
PASV_PORT_MAX=30009 \
CONTAINER_USER_UID=ftpuser \
MAX_CLIENTS_NUMBER=50 \
MAX_CLIENTS_PER_IP=10 \
DOWNLOAD_LIMIT_KB=0 \
UPLOAD_LIMIT_KB=0 \
MAX_SIMULTANEOUS_SESSIONS=0 \
LOG_ENABLED=0
COPY run.sh /pure-ftpd/run.sh
RUN chmod u+x /pure-ftpd/run.sh
CMD ["/pure-ftpd/run.sh"]
| fsimplice/docker-pure-ftpd | Dockerfile | Dockerfile | mit | 446 |
FROM node
WORKDIR /usr/src/app
COPY package*.json client.js index.html rest-server.js UserModel.js ./
RUN npm ci
EXPOSE 3000
CMD ["npm", "start"]
| maritz/nohm | examples/rest-user-server/Dockerfile | Dockerfile | mit | 151 |
FROM nginx
MAINTAINER Konstantin Volodin, volodin.konstantin@gmail.com
COPY nginx.conf /etc/nginx
EXPOSE 80
EXPOSE 5500
ENTRYPOINT nginx -g "daemon off;"
| volodink/itstime4science | loadb/Dockerfile | Dockerfile | mit | 157 |
FROM golang:alpine as build
WORKDIR /go/src
COPY main.go .
RUN go build -o /go/bin/memhogger .
FROM alpine
LABEL maintainer="Michael Gasch <embano1@live.com>"
COPY --from=build /go/bin/memhogger /bin/memhogger
ENTRYPOINT ["memhogger"] | embano1/gotutorials | memhogger/Dockerfile | Dockerfile | mit | 235 |
# escape=`
FROM mcr.microsoft.com/windows/servercore as msi
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
ENV MONGO_VERSION 3.4.5
ENV MONGO_DOWNLOAD_URL https://downloads.mongodb.com/win32/mongodb-win32-x86_64-enterprise-windows-64-${MONGO_VERSION}-signed.msi
ENV MONGO_DOWNLOAD_SHA256 cda1c8d547fc6051d7bbe2953f9a75bced846ac64a733726f8443835db5b2901
RUN Write-Host ('Downloading {0} ...' -f $env:MONGO_DOWNLOAD_URL)
RUN Invoke-WebRequest -Uri $env:MONGO_DOWNLOAD_URL -OutFile 'mongodb.msi'
RUN Write-Host ('Verifying sha256 ({0}) ...' -f $env:MONGO_DOWNLOAD_SHA256)
RUN if ((Get-FileHash mongodb.msi -Algorithm sha256).Hash -ne $env:MONGO_DOWNLOAD_SHA256) { `
Write-Host 'FAILED!'; `
exit 1; `
}
RUN Start-Process msiexec.exe -ArgumentList '/i', 'mongodb.msi', '/quiet', '/norestart', 'INSTALLLOCATION=C:\mongodb', 'ADDLOCAL=Server,Client,MonitoringTools,ImportExportTools,MiscellaneousTools' -NoNewWindow -Wait
RUN Remove-Item C:\mongodb\bin\*.pdb
FROM mcr.microsoft.com/windows/nanoserver:sac2016
COPY --from=msi C:\mongodb\ C:\mongodb\
COPY --from=msi C:\windows\system32\msvcp140.dll C:\windows\system32
COPY --from=msi C:\windows\system32\vcruntime140.dll C:\windows\system32
COPY --from=msi C:\windows\system32\wininet.dll C:\windows\system32
RUN mkdir C:\data\db & setx /m PATH %PATH%;C:\mongodb\bin
VOLUME C:\data\db
EXPOSE 27017
CMD ["mongod.exe"]
| StefanScherer/dockerfiles-windows | mongo/3.4/enterprise/Dockerfile | Dockerfile | mit | 1,448 |
# Pull base image with batteries
FROM buildpack-deps:jessie
MAINTAINER EngageNExecute code@engagenexecute.com
# Install packages.
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y \
locales \
python-dev \
cmake \
libpq-dev \
python-pip \
git-core \
libopencv-dev \
postgresql-9.4 \
python-opencv \
python-numpy
# Set UTF-8 as locales
RUN dpkg-reconfigure locales && \
locale-gen C.UTF-8 && \
/usr/sbin/update-locale LANG=C.UTF-8
# ENV UTF-8
ENV LC_ALL C.UTF-8
# Disable warning driver1394 (camera)
RUN ln /dev/null /dev/raw1394
| EngageNExecute/opencv-python2 | Dockerfile | Dockerfile | mit | 649 |
FROM ubuntu
ADD a.out /a.out
| ghaering/poc-docker-coredump | Dockerfile | Dockerfile | mit | 30 |
#
# Node.js v0.11.x Dockerfile
# https://github.com/hden/nodejs-v0.11
#
FROM dockerfile/python
MAINTAINER Haokang Den <haokang.den@gmail.com>
ENV PATH $HOME/.nodebrew/current/bin:$PATH
RUN cd /data && \
curl -L git.io/nodebrew | perl - setup && \
nodebrew install-binary v0.11.13 && \
nodebrew use v0.11.13
| hden/nodejs-v0.11 | Dockerfile | Dockerfile | mit | 322 |
# 9fs file server
FROM debian:sid
MAINTAINER jordi collell <j@tmpo.io>
RUN apt-get update && apt-get install -y \
diod
VOLUME /data
CMD [ "diod", "--export=/data", "-n", "-f", "-l", "0.0.0.0:5640" ]
EXPOSE 5640
| jordic/k8s | 9fs/Dockerfile | Dockerfile | mit | 220 |
FROM debian:8.2
MAINTAINER Stuart Ellis <stuart@stuartellis.eu>
ENV REFRESHED_AT 2015-09-09
ENV PYTHON_VERSION 3.4.2-2
RUN apt-get update && \
apt-get install -qy python3=$PYTHON_VERSION && \
rm -rf /var/lib/apt/lists/*
| stuartellis/stuartellis-docker-python3-baseimage | Dockerfile | Dockerfile | mit | 230 |
FROM python:3
ADD ./simplesocial /simplesocial
WORKDIR /simplesocial
RUN pip install -r requirements.txt
EXPOSE 8000
CMD [ "python", "manage.py", "runserver", "0.0.0.0:8000"] | srijannnd/Login-and-Register-App-in-Django | Dockerfile | Dockerfile | mit | 179 |
# AUTOGENERATED FILE
FROM balenalib/up-squared-fedora:32-run
ENV NODE_VERSION 15.7.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz" \
&& echo "8081794dc8a6a1dd46045ce5a921e227407dcf7c17ee9d1ad39e354b37526f5c node-v$NODE_VERSION-linux-x64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-x64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Fedora 32 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.7.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/up-squared/fedora/32/15.7.0/run/Dockerfile | Dockerfile | apache-2.0 | 2,754 |
# AUTOGENERATED FILE
FROM balenalib/kitra710-ubuntu:eoan-build
ENV GO_VERSION 1.16
RUN mkdir -p /usr/local/go \
&& curl -SLO "https://storage.googleapis.com/golang/go$GO_VERSION.linux-arm64.tar.gz" \
&& echo "3770f7eb22d05e25fbee8fb53c2a4e897da043eb83c69b9a14f8d98562cd8098 go$GO_VERSION.linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "go$GO_VERSION.linux-arm64.tar.gz" -C /usr/local/go --strip-components=1 \
&& rm -f go$GO_VERSION.linux-arm64.tar.gz
ENV GOROOT /usr/local/go
ENV GOPATH /go
ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH
RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH"
WORKDIR $GOPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@golang.sh" \
&& echo "Running test-stack@golang" \
&& chmod +x test-stack@golang.sh \
&& bash test-stack@golang.sh \
&& rm -rf test-stack@golang.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Ubuntu eoan \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nGo v1.16 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/golang/kitra710/ubuntu/eoan/1.16/build/Dockerfile | Dockerfile | apache-2.0 | 1,989 |
# AUTOGENERATED FILE
FROM balenalib/genericx86-64-ext-debian:sid-run
# remove several traces of debian python
RUN apt-get purge -y python.*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# install python dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
netbase \
&& rm -rf /var/lib/apt/lists/*
# key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported
RUN gpg --batch --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
ENV PYTHON_VERSION 3.6.12
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.0.1
ENV SETUPTOOLS_VERSION 56.0.0
RUN set -x \
&& buildDeps=' \
curl \
' \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" \
&& echo "c811b37dfb62442ccf23f28ca81e5a48eb85b071a58ee69b278f25520196cb2e Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" \
&& ldconfig \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
# set PYTHONPATH to point to dist-packages
ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Debian Sid \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.6.12, Pip v21.0.1, Setuptools v56.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/python/genericx86-64-ext/debian/sid/3.6.12/run/Dockerfile | Dockerfile | apache-2.0 | 4,103 |
# AUTOGENERATED FILE
FROM balenalib/asus-tinker-edge-t-fedora:34-run
ENV NODE_VERSION 16.14.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "82d71968c82eb391f463df62ba277563a3bd01ce43bba0e7e1c533991567b8fe node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 34 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v16.14.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/node/asus-tinker-edge-t/fedora/34/16.14.0/run/Dockerfile | Dockerfile | apache-2.0 | 2,751 |
# Builders
ARG BUNDLE_IMAGE=fission-workflows-bundle
ARG BUNDLE_TAG=latest
ARG FISSION_BUILDER_IMAGE=fission/builder
ARG FISSION_TAG=latest
FROM $BUNDLE_IMAGE:$BUNDLE_TAG as workflows-bundle
FROM scratch
COPY --from=workflows-bundle /fission-workflows-bundle /fission-workflows-bundle
EXPOSE 8888
EXPOSE 8080
ENV FNENV_FISSION_CONTROLLER http://controller.fission
ENV FNENV_FISSION_EXECUTOR http://executor.fission
ENV ES_NATS_URL nats://defaultFissionAuthToken@nats-streaming.fission:4222
ENV ES_NATS_CLUSTER fissionMQTrigger
# Remove APIs when components stabilize
ENTRYPOINT ["/fission-workflows-bundle", \
"--nats", \
"--fission", \
"--internal", \
"--controller", \
"--api-http", \
"--api-workflow-invocation", \
"--api-workflow", \
"--api-admin", \
"--metrics"] | fission/fission-workflows | build/runtime-env/Dockerfile | Dockerfile | apache-2.0 | 880 |
# AUTOGENERATED FILE
FROM balenalib/jetson-xavier-nx-devkit-debian:bookworm-run
ENV NODE_VERSION 12.22.9
ENV YARN_VERSION 1.22.4
RUN buildDeps='curl libatomic1' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "307aa26c68600e2f73d699e58a15c59ea06928e4a348cd5a216278d9f2ee0d6c node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Debian Bookworm \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.22.9, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/node/jetson-xavier-nx-devkit/debian/bookworm/12.22.9/run/Dockerfile | Dockerfile | apache-2.0 | 2,949 |
# AUTOGENERATED FILE
FROM balenalib/photon-nano-debian:stretch-run
# remove several traces of debian python
RUN apt-get purge -y python.*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# install python dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
netbase \
&& rm -rf /var/lib/apt/lists/*
# key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported
RUN gpg --batch --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
ENV PYTHON_VERSION 3.8.12
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.3.1
ENV SETUPTOOLS_VERSION 60.5.4
RUN set -x \
&& buildDeps=' \
curl \
' \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-aarch64-libffi3.2.tar.gz" \
&& echo "7431f1179737fed6518ccf8187ad738c2f2e16feb75b7528e4e0bb7934d192cf Python-$PYTHON_VERSION.linux-aarch64-libffi3.2.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-aarch64-libffi3.2.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-aarch64-libffi3.2.tar.gz" \
&& ldconfig \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
# set PYTHONPATH to point to dist-packages
ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Debian Stretch \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.8.12, Pip v21.3.1, Setuptools v60.5.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/python/photon-nano/debian/stretch/3.8.12/run/Dockerfile | Dockerfile | apache-2.0 | 4,094 |
# AUTOGENERATED FILE
FROM balenalib/rockpi-4b-rk3399-fedora:34-run
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
RUN dnf install -y \
python3-pip \
python3-dbus \
&& dnf clean all
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip3 install -U --no-cache-dir --ignore-installed pip setuptools \
&& pip3 install --no-cache-dir virtualenv
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'As of January 1st, 2020, Python 2 was end-of-life, we will change the latest tag for Balenalib Python base image to Python 3.x and drop support for Python 2 soon. So after 1st July, 2020, all the balenalib Python latest tag will point to the latest Python 3 version and no changes, or fixes will be made to balenalib Python 2 base image. If you are using Python 2 for your application, please upgrade to Python 3 before 1st July.' > /.balena/messages/python-deprecation-warnin
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 34 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.10.0, Pip v21.2.4, Setuptools v58.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/python/rockpi-4b-rk3399/fedora/34/3.10.0/run/Dockerfile | Dockerfile | apache-2.0 | 2,439 |
# AUTOGENERATED FILE
FROM balenalib/photon-xavier-nx-alpine:3.12-run
ENV NODE_VERSION 14.15.4
ENV YARN_VERSION 1.22.4
# Install dependencies
RUN apk add --no-cache libgcc libstdc++ libuv \
&& apk add --no-cache libssl1.0 || apk add --no-cache libssl1.1
RUN buildDeps='curl' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& apk add --no-cache $buildDeps \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/node/v$NODE_VERSION/node-v$NODE_VERSION-linux-alpine-aarch64.tar.gz" \
&& echo "93e91093748c7287665d617cca0dc2ed9c26aa95dcf9152450a7961850e6d846 node-v$NODE_VERSION-linux-alpine-aarch64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-alpine-aarch64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-alpine-aarch64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Alpine Linux 3.12 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.15.4, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/photon-xavier-nx/alpine/3.12/14.15.4/run/Dockerfile | Dockerfile | apache-2.0 | 3,031 |
# AUTOGENERATED FILE
FROM balenalib/surface-pro-6-fedora:31-build
ENV NODE_VERSION 14.15.4
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz" \
&& echo "b51c033d40246cd26e52978125a3687df5cd02ee532e8614feff0ba6c13a774f node-v$NODE_VERSION-linux-x64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-x64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Fedora 31 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.15.4, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/surface-pro-6/fedora/31/14.15.4/build/Dockerfile | Dockerfile | apache-2.0 | 2,763 |
# AUTOGENERATED FILE
FROM balenalib/up-board-ubuntu:xenial-build
ENV NODE_VERSION 14.18.3
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz" \
&& echo "bd96f88e054801d1368787f7eaf77b49cd052b9543c56bd6bc0bfc90310e2756 node-v$NODE_VERSION-linux-x64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-x64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Ubuntu xenial \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.18.3, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/node/up-board/ubuntu/xenial/14.18.3/build/Dockerfile | Dockerfile | apache-2.0 | 2,758 |
# AUTOGENERATED FILE
FROM balenalib/raspberrypi0-2w-64-fedora:34-build
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
RUN dnf install -y \
python3-pip \
python3-dbus \
&& dnf clean all
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip3 install -U --no-cache-dir --ignore-installed pip setuptools \
&& pip3 install --no-cache-dir virtualenv
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'As of January 1st, 2020, Python 2 was end-of-life, we will change the latest tag for Balenalib Python base image to Python 3.x and drop support for Python 2 soon. So after 1st July, 2020, all the balenalib Python latest tag will point to the latest Python 3 version and no changes, or fixes will be made to balenalib Python 2 base image. If you are using Python 2 for your application, please upgrade to Python 3 before 1st July.' > /.balena/messages/python-deprecation-warning
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 34 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.8.12, Pip v21.3.1, Setuptools v60.5.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/python/raspberrypi0-2w-64/fedora/34/3.8.12/build/Dockerfile | Dockerfile | apache-2.0 | 2,446 |
# AUTOGENERATED FILE
FROM balenalib/zc702-zynq7-ubuntu:xenial-run
ENV NODE_VERSION 14.18.3
ENV YARN_VERSION 1.22.4
RUN buildDeps='curl libatomic1' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "6f19aa4d9c1b1706d44742218c8a7742d3fa62033d953156095bdde09f8375e5 node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu xenial \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.18.3, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/node/zc702-zynq7/ubuntu/xenial/14.18.3/run/Dockerfile | Dockerfile | apache-2.0 | 2,913 |
# AUTOGENERATED FILE
FROM balenalib/bananapi-m1-plus-ubuntu:disco-build
ENV NODE_VERSION 10.23.1
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "8f965f2757efcf3077d655bfcea36f7a29c58958355e0eb23cfb725740c3ccbe node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu disco \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v10.23.1, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/bananapi-m1-plus/ubuntu/disco/10.23.1/build/Dockerfile | Dockerfile | apache-2.0 | 2,767 |
# AUTOGENERATED FILE
FROM balenalib/beaglebone-green-gateway-ubuntu:cosmic-build
# remove several traces of debian python
RUN apt-get purge -y python.*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported
RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
ENV PYTHON_VERSION 3.5.10
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.0.1
ENV SETUPTOOLS_VERSION 56.0.0
RUN set -x \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" \
&& echo "4abc87b995e08c143de14f26d8ab6ffd9017aad400bf91bc36a802efda7fe27a Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-armv7hf-openssl1.1.tar.gz" \
&& ldconfig \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip3 install --no-cache-dir virtualenv
ENV PYTHON_DBUS_VERSION 1.2.8
# install dbus-python dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
libdbus-1-dev \
libdbus-glib-1-dev \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get -y autoremove
# install dbus-python
RUN set -x \
&& mkdir -p /usr/src/dbus-python \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \
&& gpg --verify dbus-python.tar.gz.asc \
&& tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \
&& rm dbus-python.tar.gz* \
&& cd /usr/src/dbus-python \
&& PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \
&& make -j$(nproc) \
&& make install -j$(nproc) \
&& cd / \
&& rm -rf /usr/src/dbus-python
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
# set PYTHONPATH to point to dist-packages
ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu cosmic \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.5.10, Pip v21.0.1, Setuptools v56.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/python/beaglebone-green-gateway/ubuntu/cosmic/3.5.10/build/Dockerfile | Dockerfile | apache-2.0 | 4,847 |
# AUTOGENERATED FILE
FROM balenalib/imx8mm-var-dart-fedora:33-build
ENV NODE_VERSION 15.6.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "b0660398fe590f8588431a787e9b032c7271a2fa88306c7a26e751571df998e4 node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 33 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/imx8mm-var-dart/fedora/33/15.6.0/build/Dockerfile | Dockerfile | apache-2.0 | 2,756 |
# AUTOGENERATED FILE
FROM balenalib/coral-dev-alpine:3.14-run
# remove several traces of python
RUN apk del python*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# install python dependencies
RUN apk add --no-cache ca-certificates libffi \
&& apk add --no-cache libssl1.0 || apk add --no-cache libssl1.1
# key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported
RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
# point Python at a system-provided certificate database. Otherwise, we might hit CERTIFICATE_VERIFY_FAILED.
# https://www.python.org/dev/peps/pep-0476/#trust-database
ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
ENV PYTHON_VERSION 3.10.0
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.2.4
ENV SETUPTOOLS_VERSION 58.0.0
RUN set -x \
&& buildDeps=' \
curl \
gnupg \
' \
&& apk add --no-cache --virtual .build-deps $buildDeps \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-alpine-aarch64-libffi3.3.tar.gz" \
&& echo "13ab188bd0214779de247bbde0919f4c19c91f78a34d26171b567b556a06c828 Python-$PYTHON_VERSION.linux-alpine-aarch64-libffi3.3.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-alpine-aarch64-libffi3.3.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-alpine-aarch64-libffi3.3.tar.gz" \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Alpine Linux 3.14 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.10.0, Pip v21.2.4, Setuptools v58.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/python/coral-dev/alpine/3.14/3.10.0/run/Dockerfile | Dockerfile | apache-2.0 | 4,127 |
# AUTOGENERATED FILE
FROM balenalib/generic-amd64-debian:bullseye-run
ENV NODE_VERSION 14.18.3
ENV YARN_VERSION 1.22.4
RUN buildDeps='curl libatomic1' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz" \
&& echo "bd96f88e054801d1368787f7eaf77b49cd052b9543c56bd6bc0bfc90310e2756 node-v$NODE_VERSION-linux-x64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-x64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Debian Bullseye \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.18.3, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/node/generic-amd64/debian/bullseye/14.18.3/run/Dockerfile | Dockerfile | apache-2.0 | 2,946 |
# AUTOGENERATED FILE
FROM balenalib/orangepi-plus2-debian:bullseye-build
ENV NODE_VERSION 12.21.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "6edc31a210e47eb72b0a2a150f7fe604539c1b2a45e8c81d378ac9315053a54f node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Bullseye \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.21.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/orangepi-plus2/debian/bullseye/12.21.0/build/Dockerfile | Dockerfile | apache-2.0 | 2,795 |
# AUTOGENERATED FILE
FROM balenalib/artik530-debian:buster-run
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
ca-certificates \
curl \
\
# .NET Core dependencies
libc6 \
libgcc1 \
libgssapi-krb5-2 \
libicu63 \
libssl1.1 \
libstdc++6 \
zlib1g \
&& rm -rf /var/lib/apt/lists/*
# Configure web servers to bind to port 80 when present
ENV ASPNETCORE_URLS=http://+:80 \
# Enable detection of running in a container
DOTNET_RUNNING_IN_CONTAINER=true
# Install .NET Core
ENV DOTNET_VERSION 6.0.0
RUN curl -SL --output dotnet.tar.gz "https://dotnetcli.blob.core.windows.net/dotnet/Runtime/$DOTNET_VERSION/dotnet-runtime-$DOTNET_VERSION-linux-arm.tar.gz" \
&& dotnet_sha512='575037f2e164deaf3bcdd82f7b3f2b5a5784547c5bad4070375c00373722265401b88a81695b919f92ca176f21c1bdf1716f8fce16ab3d301ae666daa8cae750' \
&& echo "$dotnet_sha512 dotnet.tar.gz" | sha512sum -c - \
&& mkdir -p /usr/share/dotnet \
&& tar -zxf dotnet.tar.gz -C /usr/share/dotnet \
&& rm dotnet.tar.gz \
&& ln -s /usr/share/dotnet/dotnet /usr/bin/dotnet
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/44e597e40f2010cdde15b3ba1e397aea3a5c5271/scripts/assets/tests/test-stack@dotnet.sh" \
&& echo "Running test-stack@dotnet" \
&& chmod +x test-stack@dotnet.sh \
&& bash test-stack@dotnet.sh \
&& rm -rf test-stack@dotnet.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Buster \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \ndotnet 6.0-runtime \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/dotnet/artik530/debian/buster/6.0-runtime/run/Dockerfile | Dockerfile | apache-2.0 | 2,521 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.