text stringlengths 2 99k | meta dict |
|---|---|
public class WebApiApplication : System.Web.HttpApplication
{
protected void Application_Start()
{
GlobalConfiguration.Configure(WebApiConfig.Register);
}
} | {
"pile_set_name": "Github"
} |
#!/usr/bin/expect
#
# $Id: parent_installer.sh 421 2007-04-05 15:46:55Z dhill $
#
# Parent OAM Installer, copy RPM's and custom OS files from postConfigure script
# Argument 0 - Parent OAM IP address
# Argument 1 - Root Password of Parent OAM Module
# Argument 2 - Calpont Config File
# Argument 3 - Debug flag 1 for on, 0 for off
set timeout 40
set USERNAME root
set SERVER [lindex $argv 0]
set PASSWORD [lindex $argv 1]
set PACKAGE [lindex $argv 2]
set RELEASE [lindex $argv 3]
set CONFIGFILE [lindex $argv 4]
set DEBUG [lindex $argv 5]
set CALPONTPACKAGE infinidb-platform-$PACKAGE
set CALPONTPACKAGE0 infinidb-0$PACKAGE
set CALPONTPACKAGE1 infinidb-1$PACKAGE
set ORACLEPACKAGE infinidb-oracle$PACKAGE
set MYSQLPACKAGE infinidb-storage-engine-$PACKAGE
set MYSQLDPACKAGE infinidb-mysql-$PACKAGE
set SHARED "//calweb/shared"
log_user $DEBUG
spawn -noecho /bin/bash
send "rm -f $PACKAGE,$CALPONTPACKAGE0,$CALPONTPACKAGE1,$ORACLEPACKAGE,$MYSQLPACKAGE,$MYSQLDPACKAGE\n"
#
# delete and erase all old packages from Director Module
#
send "ssh $USERNAME@$SERVER 'rm -f /root/calpont*.rpm'\n"
expect {
-re "authenticity" { send "yes\n"
expect {
-re "word: " { send "$PASSWORD\n" } abort
}
}
-re "service not known" { send_user "FAILED: Invalid Host\n" ; exit -1 }
-re "word: " { send "$PASSWORD\n" } abort
}
expect {
-re "#" { } abort
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
#
# erase calpont-oracle package
#
expect -re "# "
send_user "Erase Old Calpont-Oracle Connector Package "
send "ssh $USERNAME@$SERVER ' rpm -e --nodeps --allmatches calpont-oracle'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "uninstall completed" { send_user "DONE" } abort
-re "# " { send_user "DONE" } abort
-re "not installed" { send_user "WARNING: Package not installed" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies\n" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
#
# erase infinidb-mysql package
#
expect -re "# "
send_user "Erase Old Calpont-Mysqld Connector Package "
send "ssh $USERNAME@$SERVER 'pkill -9 mysqld'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "# " { } abort
}
send "ssh $USERNAME@$SERVER ' rpm -e --nodeps --allmatches infinidb-mysql'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "uninstall completed" { send_user "DONE" } abort
-re "# " { send_user "DONE" } abort
-re "not installed" { send_user "WARNING: Package not installed" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies\n" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
#
# erase infinidb-storage-engine package
#
expect -re "# "
send_user "Erase Old Calpont-Mysql Connector Package "
send "ssh $USERNAME@$SERVER ' rpm -e --nodeps --allmatches infinidb-storage-engine'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "uninstall completed" { send_user "DONE" } abort
-re "# " { send_user "DONE" } abort
-re "not installed" { send_user "WARNING: Package not installed" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies\n" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
send "rm -f $PACKAGE\n"
#
# erase calpont package
#
expect -re "# "
send_user "Erase Old Calpont Packages "
send "ssh $USERNAME@$SERVER ' rpm -e --nodeps --allmatches infinidb-libs infinidb-platform infinidb-enterprise'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "uninstall completed" { send_user "DONE" } abort
-re "# " { send_user "DONE" } abort
-re "not installed" { send_user "WARNING: Package not installed" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies\n" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
#
# get the calpont package
#
expect -re "# "
send_user "Get Calpont Package "
send "smbclient $SHARED -Wcalpont -Uoamuser%Calpont1 -c 'cd Iterations/$RELEASE/;prompt OFF;mget $CALPONTPACKAGE0'\n"
expect {
-re "NT_STATUS_NO_SUCH_FILE" {
send "smbclient $SHARED -Wcalpont -Uoamuser%Calpont1 -c 'cd Iterations/$RELEASE/;prompt OFF;mget $CALPONTPACKAGE1'\n"
expect {
-re "NT_STATUS_NO_SUCH_FILE" { send_user "FAILED: $CALPONTPACKAGE not found\n" ; exit -1 }
-re "getting" { send_user "DONE" } abort
}
}
-re "getting" { send_user "DONE" } abort
}
send_user "\n"
#
# send the calpont package
#
send_user "Copy Calpont Package "
send "scp $CALPONTPACKAGE $USERNAME@$SERVER:/root/.\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
expect {
-re "100%" { send_user "DONE" } abort
-re "scp" { send_user "FAILED\n" ;
send_user "\n*** Installation Failed\n" ;
exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
-re "No such file or directory" { send_user "FAILED: Invalid package\n" ; exit -1 }
}
send_user "\n"
send "rm -f $PACKAGE\n"
#
# install calpont package
#
expect -re "# "
set timeout 120
send_user "Install New Calpont Package "
send "ssh $USERNAME@$SERVER ' rpm -ivh /root/$CALPONTPACKAGE'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "completed" { send_user "DONE" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies\n" ;
send_user "\n*** Installation Failed\n" ;
exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
set timeout 40
expect -re "# "
send "rm -f $PACKAGE\n"
#
if { $CONFIGFILE != "NULL"} {
#
# copy over Calpont.xml file
#
send_user "Copy Calpont Configuration File "
send "scp $CONFIGFILE $USERNAME@$SERVER:/usr/local/Calpont/etc/Calpont.xml\n"
expect -re "word: "
# send the password
send "$PASSWORD\n"
expect {
-re "100%" { send_user "DONE" } abort
-re "scp" { send_user "FAILED\n" ;
send_user "\n*** Installation Failed\n" ;
exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
-re "No such file or directory" { send_user "FAILED: Invalid package\n" ; exit -1 }
}
} else {
#
# rename previous installed config file
#
send_user "Copy RPM-saved Calpont Configuration File "
send "ssh $USERNAME@$SERVER 'cd /usr/local/Calpont/etc/;mv -f Calpont.xml Calpont.xml.install;cp -v Calpont.xml.rpmsave Calpont.xml'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "Calpont.xml" { send_user "DONE" } abort
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
}
send_user "\n"
#
# get the calpont-oracle package
#
set timeout 40
expect -re "# "
send_user "Get Calpont-Oracle Connector Package "
send "smbclient $SHARED -Wcalpont -Uoamuser%Calpont1 -c 'cd Iterations/$RELEASE/;prompt OFF;mget $ORACLEPACKAGE'\n"
expect {
-re "NT_STATUS_NO_SUCH_FILE" { send_user "WARNING: $ORACLEPACKAGE not found, skipping\n" } abort
-re "getting" { send_user "DONE\n"
#
# send the calpont-oracle package
#
expect -re "# "
send_user "Copy Calpont-Oracle Connector Package "
send "scp $ORACLEPACKAGE $USERNAME@$SERVER:/root/.\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "100%" { send_user "DONE" } abort
-re "scp" { send_user "FAILED\n" ;
send_user "\n*** Installation Failed\n" ;
exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
-re "No such file or directory" { send_user "FAILED: Invalid package\n" ; exit -1 }
}
#
# install calpont-oracle package
#
send_user "\n"
expect -re "# "
set timeout 120
send_user "Install Calpont-Oracle Connector Package "
send "ssh $USERNAME@$SERVER ' rpm -ivh /root/$ORACLEPACKAGE'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "completed" { send_user "DONE" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
}
}
set timeout 40
expect -re "# "
#
# get the calpont-mysql package
#
send_user "Get Calpont-Mysql Connector Package "
send "smbclient $SHARED -Wcalpont -Uoamuser%Calpont1 -c 'cd Iterations/$RELEASE/;prompt OFF;mget $MYSQLPACKAGE'\n"
expect {
-re "NT_STATUS_NO_SUCH_FILE" { send_user "WARNING: $MYSQLPACKAGE not found, skipping\n" } abort
-re "getting" { send_user "DONE\n"
#
# send the calpont-mysql package
#
expect -re "# "
send_user "Copy Calpont-Mysql Connector Package "
send "scp $MYSQLPACKAGE $USERNAME@$SERVER:/root/.\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "100%" { send_user "DONE" } abort
-re "scp" { send_user "FAILED\n" ;
send_user "\n*** Installation Failed\n" ;
exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
-re "No such file or directory" { send_user "FAILED: Invalid package\n" ; exit -1 }
}
#
# install calpont-mysql package
#
send_user "\n"
expect -re "# "
set timeout 120
send_user "Install Calpont-Mysql Connector Package "
send "ssh $USERNAME@$SERVER ' rpm -ivh $MYSQLPACKAGE'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "completed" { send_user "DONE" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
}
}
expect -re "# "
#
# get the infinidb-mysql package
#
send_user "Get Calpont-Mysqld Package "
send "smbclient $SHARED -Wcalpont -Uoamuser%Calpont1 -c 'cd Iterations/$RELEASE/;prompt OFF;mget $MYSQLDPACKAGE'\n"
expect {
-re "NT_STATUS_NO_SUCH_FILE" { send_user "WARNING: $MYSQLDPACKAGE not found, skipping\n" } abort
-re "getting" { send_user "DONE\n"
#
# send the infinidb-mysql package
#
expect -re "# "
send_user "Copy Calpont-Mysqld Package "
send "scp $MYSQLDPACKAGE $USERNAME@$SERVER:.\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "100%" { send_user "DONE" } abort
-re "scp" { send_user "FAILED\n" ;
send_user "\n*** Installation Failed\n" ;
exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
-re "No such file or directory" { send_user "FAILED: Invalid package\n" ; exit -1 }
}
#
# install infinidb-mysql-mysqld package
#
send_user "\n"
expect -re "# "
set timeout 120
send_user "Install Calpont-Mysqld Package "
send "ssh $USERNAME@$SERVER ' rpm -ivh $MYSQLDPACKAGE'\n"
expect -re "word: "
# password for ssh
send "$PASSWORD\n"
# check return
expect {
-re "completed" { send_user "DONE" } abort
-re "Failed dependencies" { send_user "FAILED: Failed dependencies" ; exit -1 }
-re "Permission denied, please try again" { send_user "FAILED: Invalid password\n" ; exit -1 }
}
send_user "\n"
}
}
#
exit
| {
"pile_set_name": "Github"
} |
# Fri Nov 16 19:52:40 2018 -- reformated by PCGen PrettyLST v6.08.00
# CVS $Revision: 9628 $ $Author: zaister $ -- Tue Dec 2 17:52:53 2014 -- reformated by prettylst.pl v1.51 (build 25490)
SOURCELONG:Pathfinder Player Companion: Faiths of Balance SOURCESHORT:FOB SOURCEWEB:http://paizo.com/products/btpy8lvb SOURCEDATE:2011-07
# Original Entry by: Stefan Radermacher
###Block: Wondrous Items
# Equipment Name Type Cost Weight Source Page
Calming Oils TYPE:Magic.Wondrous.Elixir.Consumable COST:1000 SOURCEPAGE:p.26
Clockwork Key TYPE:Magic.Wondrous COST:500 WT:1 SOURCEPAGE:p.26
Manual of Calm Reflection TYPE:Magic.Wondrous COST:4000 WT:2 SOURCEPAGE:p.26
Mask of Destruction and Creation TYPE:Magic.Wondrous.Headgear COST:5000 SOURCEPAGE:p.27
Razored Ropes TYPE:Magic.Wondrous COST:8301 WT:3 SOURCEPAGE:p.27
Spiral Tiles TYPE:Magic.Wondrous COST:6000 WT:1 SOURCEPAGE:p.27
#
# End
#
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<data name="Body-Welcome" xml:space="preserve">
<value><h1 class="display-4"> Bienvenue! </h1>
<p class="lead"> Le projet Jp est composé de 3 applications Web: serveur à authentification unique, gestion des utilisateurs et administrateur pour ID4. </p>
<hr class="my-4">
<p> Accédez sous chacune d’elles. </p>
<p>
Voici des liens vers le
<a class="text-white-50" href="https://github.com/brunohbrito/JP-Project"> référentiel de code source </a>,
et <a class="text-white-50" href="https://jp-project.readthedocs.io/en/latest/index.html"> docs pour plus d'informations </a>.
</p></value>
</data>
<data name="Menu-Home" xml:space="preserve">
<value>Accueil</value>
</data>
<data name="Menu-User" xml:space="preserve">
<value>Utilisateur</value>
</data>
<data name="Body-Click" xml:space="preserve">
<value>Cliquez sur</value>
</data>
<data name="Body-Demo" xml:space="preserve">
<value>Choisissez la démo</value>
</data>
<data name="Body-Document" xml:space="preserve">
<value>IdentityServer publie un
<a href="~/.well-known/openid-configuration"> document de découverte </a>
où vous pouvez trouver des métadonnées et des liens vers tous les points finaux, le matériel clé, etc.</value>
</data>
<data name="Body-Here" xml:space="preserve">
<value>ici</value>
</data>
<data name="Body-HimselfData" xml:space="preserve">
<value>est une interface utilisateur permettant aux utilisateurs de gérer eux-mêmes leurs données. Vous pouvez modifier votre profil, mettre à jour le mot de passe, etc.</value>
</data>
<data name="Body-IS4-Main-Goal" xml:space="preserve">
<value>Le principal objectif de Jp Project est de fournir une</value>
</data>
<data name="Body-Jp-Description" xml:space="preserve">
<value><h1 class="mb-3 text-sm-nowrap"> Projet Jp </h1>
<h2 class="mb-3 text-sm-nowrap"> Panneau d'administration gratuit pour l'administration d'IdentityServer4 </h2>
<p class="lead">
La seule interface utilisateur administrative Bootstrap dont vous avez besoin pour <strong> créer votre solution d'authentification unique </strong>.
</p>
<p class="lead mb-4">
Jp Project est une initiative open source. Construit dans Bootstrap 4, Angular 7 et .Net Core 2.2.
</p>
<div class="d-flex flex-colonne flex-md-row lead mb-3">
<a href="#live-preview" class="btn btn-lg btn-outline-light mb-3 mb-md-0 mr-md-3"> aperçu en direct </a>
<a href="https://github.com/brunohbrito/JP-Project" class="btn btn-lg btn-outline-light mb-3 mb-md-0 mr-md-3"> Télécharger le projet Jp </a>
</div>
<div class="version text-small mt-3">
Version de la bibliothèque: <strong> 1.1 </strong> · Version du modèle d'administration: <strong> 1.1 </strong>.
</div></value>
</data>
<data name="Body-Manage" xml:space="preserve">
<value>pour gérer vos subventions stockées.</value>
</data>
<data name="Body-ManageServer" xml:space="preserve">
<value>gérer le serveur.</value>
</data>
<data name="Body-RichUI" xml:space="preserve">
<value>pour gérer Identity Server 4. Une interface utilisateur riche, développée dans Angular 6.</value>
</data>
<data name="Body-SSO" xml:space="preserve">
<value>Authentification unique</value>
</data>
<data name="Body-UIAngular7" xml:space="preserve">
<value>Une interface utilisateur riche, développée dans le modèle Core UI Angular 7.</value>
</data>
<data name="Body-User-Interface" xml:space="preserve">
<value>Interface utilisateur administrateur</value>
</data>
<data name="Body-UserManagement" xml:space="preserve">
<value>Interface utilisateur de gestion des utilisateurs</value>
</data>
</root> | {
"pile_set_name": "Github"
} |
/* Bcj2Enc.c -- BCJ2 Encoder (Converter for x86 code)
2019-02-02 : Igor Pavlov : Public domain */
#include "Precomp.h"
/* #define SHOW_STAT */
#ifdef SHOW_STAT
#include <stdio.h>
#define PRF(x) x
#else
#define PRF(x)
#endif
#include <string.h>
#include "Bcj2.h"
#include "CpuArch.h"
#define CProb UInt16
#define kTopValue ((UInt32)1 << 24)
#define kNumModelBits 11
#define kBitModelTotal (1 << kNumModelBits)
#define kNumMoveBits 5
void Bcj2Enc_Init(CBcj2Enc *p)
{
unsigned i;
p->state = BCJ2_ENC_STATE_OK;
p->finishMode = BCJ2_ENC_FINISH_MODE_CONTINUE;
p->prevByte = 0;
p->cache = 0;
p->range = 0xFFFFFFFF;
p->low = 0;
p->cacheSize = 1;
p->ip = 0;
p->fileIp = 0;
p->fileSize = 0;
p->relatLimit = BCJ2_RELAT_LIMIT;
p->tempPos = 0;
p->flushPos = 0;
for (i = 0; i < sizeof(p->probs) / sizeof(p->probs[0]); i++)
p->probs[i] = kBitModelTotal >> 1;
}
static BoolInt MY_FAST_CALL RangeEnc_ShiftLow(CBcj2Enc *p)
{
if ((UInt32)p->low < (UInt32)0xFF000000 || (UInt32)(p->low >> 32) != 0)
{
Byte *buf = p->bufs[BCJ2_STREAM_RC];
do
{
if (buf == p->lims[BCJ2_STREAM_RC])
{
p->state = BCJ2_STREAM_RC;
p->bufs[BCJ2_STREAM_RC] = buf;
return True;
}
*buf++ = (Byte)(p->cache + (Byte)(p->low >> 32));
p->cache = 0xFF;
}
while (--p->cacheSize);
p->bufs[BCJ2_STREAM_RC] = buf;
p->cache = (Byte)((UInt32)p->low >> 24);
}
p->cacheSize++;
p->low = (UInt32)p->low << 8;
return False;
}
static void Bcj2Enc_Encode_2(CBcj2Enc *p)
{
if (BCJ2_IS_32BIT_STREAM(p->state))
{
Byte *cur = p->bufs[p->state];
if (cur == p->lims[p->state])
return;
SetBe32(cur, p->tempTarget);
p->bufs[p->state] = cur + 4;
}
p->state = BCJ2_ENC_STATE_ORIG;
for (;;)
{
if (p->range < kTopValue)
{
if (RangeEnc_ShiftLow(p))
return;
p->range <<= 8;
}
{
{
const Byte *src = p->src;
const Byte *srcLim;
Byte *dest;
SizeT num = p->srcLim - src;
if (p->finishMode == BCJ2_ENC_FINISH_MODE_CONTINUE)
{
if (num <= 4)
return;
num -= 4;
}
else if (num == 0)
break;
dest = p->bufs[BCJ2_STREAM_MAIN];
if (num > (SizeT)(p->lims[BCJ2_STREAM_MAIN] - dest))
{
num = p->lims[BCJ2_STREAM_MAIN] - dest;
if (num == 0)
{
p->state = BCJ2_STREAM_MAIN;
return;
}
}
srcLim = src + num;
if (p->prevByte == 0x0F && (src[0] & 0xF0) == 0x80)
*dest = src[0];
else for (;;)
{
Byte b = *src;
*dest = b;
if (b != 0x0F)
{
if ((b & 0xFE) == 0xE8)
break;
dest++;
if (++src != srcLim)
continue;
break;
}
dest++;
if (++src == srcLim)
break;
if ((*src & 0xF0) != 0x80)
continue;
*dest = *src;
break;
}
num = src - p->src;
if (src == srcLim)
{
p->prevByte = src[-1];
p->bufs[BCJ2_STREAM_MAIN] = dest;
p->src = src;
p->ip += (UInt32)num;
continue;
}
{
Byte context = (Byte)(num == 0 ? p->prevByte : src[-1]);
BoolInt needConvert;
p->bufs[BCJ2_STREAM_MAIN] = dest + 1;
p->ip += (UInt32)num + 1;
src++;
needConvert = False;
if ((SizeT)(p->srcLim - src) >= 4)
{
UInt32 relatVal = GetUi32(src);
if ((p->fileSize == 0 || (UInt32)(p->ip + 4 + relatVal - p->fileIp) < p->fileSize)
&& ((relatVal + p->relatLimit) >> 1) < p->relatLimit)
needConvert = True;
}
{
UInt32 bound;
unsigned ttt;
Byte b = src[-1];
CProb *prob = p->probs + (unsigned)(b == 0xE8 ? 2 + (unsigned)context : (b == 0xE9 ? 1 : 0));
ttt = *prob;
bound = (p->range >> kNumModelBits) * ttt;
if (!needConvert)
{
p->range = bound;
*prob = (CProb)(ttt + ((kBitModelTotal - ttt) >> kNumMoveBits));
p->src = src;
p->prevByte = b;
continue;
}
p->low += bound;
p->range -= bound;
*prob = (CProb)(ttt - (ttt >> kNumMoveBits));
{
UInt32 relatVal = GetUi32(src);
UInt32 absVal;
p->ip += 4;
absVal = p->ip + relatVal;
p->prevByte = src[3];
src += 4;
p->src = src;
{
unsigned cj = (b == 0xE8) ? BCJ2_STREAM_CALL : BCJ2_STREAM_JUMP;
Byte *cur = p->bufs[cj];
if (cur == p->lims[cj])
{
p->state = cj;
p->tempTarget = absVal;
return;
}
SetBe32(cur, absVal);
p->bufs[cj] = cur + 4;
}
}
}
}
}
}
}
if (p->finishMode != BCJ2_ENC_FINISH_MODE_END_STREAM)
return;
for (; p->flushPos < 5; p->flushPos++)
if (RangeEnc_ShiftLow(p))
return;
p->state = BCJ2_ENC_STATE_OK;
}
void Bcj2Enc_Encode(CBcj2Enc *p)
{
PRF(printf("\n"));
PRF(printf("---- ip = %8d tempPos = %8d src = %8d\n", p->ip, p->tempPos, p->srcLim - p->src));
if (p->tempPos != 0)
{
unsigned extra = 0;
for (;;)
{
const Byte *src = p->src;
const Byte *srcLim = p->srcLim;
EBcj2Enc_FinishMode finishMode = p->finishMode;
p->src = p->temp;
p->srcLim = p->temp + p->tempPos;
if (src != srcLim)
p->finishMode = BCJ2_ENC_FINISH_MODE_CONTINUE;
PRF(printf(" ip = %8d tempPos = %8d src = %8d\n", p->ip, p->tempPos, p->srcLim - p->src));
Bcj2Enc_Encode_2(p);
{
unsigned num = (unsigned)(p->src - p->temp);
unsigned tempPos = p->tempPos - num;
unsigned i;
p->tempPos = tempPos;
for (i = 0; i < tempPos; i++)
p->temp[i] = p->temp[(size_t)i + num];
p->src = src;
p->srcLim = srcLim;
p->finishMode = finishMode;
if (p->state != BCJ2_ENC_STATE_ORIG || src == srcLim)
return;
if (extra >= tempPos)
{
p->src = src - tempPos;
p->tempPos = 0;
break;
}
p->temp[tempPos] = src[0];
p->tempPos = tempPos + 1;
p->src = src + 1;
extra++;
}
}
}
PRF(printf("++++ ip = %8d tempPos = %8d src = %8d\n", p->ip, p->tempPos, p->srcLim - p->src));
Bcj2Enc_Encode_2(p);
if (p->state == BCJ2_ENC_STATE_ORIG)
{
const Byte *src = p->src;
unsigned rem = (unsigned)(p->srcLim - src);
unsigned i;
for (i = 0; i < rem; i++)
p->temp[i] = src[i];
p->tempPos = rem;
p->src = src + rem;
}
}
| {
"pile_set_name": "Github"
} |
package org.enso.interpreter.runtime.callable.argument;
import com.oracle.truffle.api.CallTarget;
import com.oracle.truffle.api.RootCallTarget;
import com.oracle.truffle.api.frame.MaterializedFrame;
/** Runtime representation of a suspended function argument. */
public class Thunk {
private final RootCallTarget callTarget;
private final MaterializedFrame scope;
/**
* Creates a runtime thunk.
*
* @param callTarget the {@link CallTarget} representing the argument's expression
* @param scope the caller scope used for evaluating the {@code callTarget}
*/
public Thunk(RootCallTarget callTarget, MaterializedFrame scope) {
this.callTarget = callTarget;
this.scope = scope;
}
/**
* Returns the call target representing the argument's expression.
*
* @return the call target representing the argument's expression.
*/
public CallTarget getCallTarget() {
return callTarget;
}
/**
* Returns the caller scope.
*
* @return the caller scope used for evaluating this thunk.
*/
public MaterializedFrame getScope() {
return scope;
}
}
| {
"pile_set_name": "Github"
} |
Description: Converts the MFT Bodyfile into TLN format
Category: Timeline
Author: Mari DeGrazia
Version: 1.0
Id: 3bccbad7-a912-499a-ae6e-372381a585c1
BinaryUrl: https://github.com/keydet89/Tools/raw/master/exe/bodyfile.exe
ExportFormat: txt
Processors:
-
Executable: tln_tools\bodyfile.exe
CommandLine: -f %destinationDirectory%\mft_bodyfile -s %computerName%
ExportFormat: txt
ExportFile: temp.tln
Append: true
#The MFTECmd_$MFT_Bodyfile.mkape must be ran before this to create the bodyfile. This module with then convert the bodyfile into TLN format
| {
"pile_set_name": "Github"
} |
{%MainUnit ../comctrls.pp}
{******************************************************************************
TTabSheet
******************************************************************************
Author: Mattias Gaertner
*****************************************************************************
This file is part of the Lazarus Component Library (LCL)
See the file COPYING.modifiedLGPL.txt, included in this distribution,
for details about the license.
*****************************************************************************
}
{ TTabSheet }
function TTabSheet.GetTabIndex: Integer;
var
i: Integer;
begin
if not TabVisible then
Result := -1
else
begin
Result := 0;
for i := 0 to PageIndex - 1 do
if PageControl.Pages[i].TabVisible then
inc(Result);
end;
end;
function TTabSheet.GetPageControl: TPageControl;
begin
if (Parent is TPageControl) then
Result := TPageControl(Parent)
else
Result := nil;
end;
procedure TTabSheet.SetPageControl(APageControl: TPageControl);
begin
if PageControl=APageControl then exit;
Parent:=APageControl;
end;
class procedure TTabSheet.WSRegisterClass;
begin
RegisterPropertyToSkip(TTabSheet, 'Visible', 'Use TabVisible instead.', '');
inherited WSRegisterClass;
RegisterTabSheet;
end;
constructor TTabSheet.Create(TheOwner: TComponent);
begin
inherited Create(TheOwner);
end;
destructor TTabSheet.Destroy;
begin
if (ControlCount = 1) and (Controls[0].HostDockSite <> nil) then
Controls[0].HostDockSite := nil;
inherited Destroy;
end;
// included by comctrls.pp
| {
"pile_set_name": "Github"
} |
dnl This provides configure definitions used by all the newlib
dnl configure.in files.
AC_DEFUN([DEF_NEWLIB_VERSION],
m4_define([NEWLIB_VERSION],[1.19.0]))
dnl Basic newlib configury. This calls basic introductory stuff,
dnl including AM_INIT_AUTOMAKE and AC_CANONICAL_HOST. It also runs
dnl configure.host. The only argument is the relative path to the top
dnl newlib directory.
AC_DEFUN([NEWLIB_CONFIGURE],
[AC_REQUIRE([DEF_NEWLIB_VERSION])
dnl Default to --enable-multilib
AC_ARG_ENABLE(multilib,
[ --enable-multilib build many library versions (default)],
[case "${enableval}" in
yes) multilib=yes ;;
no) multilib=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for multilib option) ;;
esac], [multilib=yes])dnl
dnl Support --enable-target-optspace
AC_ARG_ENABLE(target-optspace,
[ --enable-target-optspace optimize for space],
[case "${enableval}" in
yes) target_optspace=yes ;;
no) target_optspace=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for target-optspace option) ;;
esac], [target_optspace=])dnl
dnl Support --enable-malloc-debugging - currently only supported for Cygwin
AC_ARG_ENABLE(malloc-debugging,
[ --enable-malloc-debugging indicate malloc debugging requested],
[case "${enableval}" in
yes) malloc_debugging=yes ;;
no) malloc_debugging=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for malloc-debugging option) ;;
esac], [malloc_debugging=])dnl
dnl Support --enable-newlib-multithread
AC_ARG_ENABLE(newlib-multithread,
[ --enable-newlib-multithread enable support for multiple threads],
[case "${enableval}" in
yes) newlib_multithread=yes ;;
no) newlib_multithread=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for newlib-multithread option) ;;
esac], [newlib_multithread=yes])dnl
dnl Support --enable-newlib-iconv
AC_ARG_ENABLE(newlib-iconv,
[ --enable-newlib-iconv enable iconv library support],
[if test "${newlib_iconv+set}" != set; then
case "${enableval}" in
yes) newlib_iconv=yes ;;
no) newlib_iconv=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for newlib-iconv option) ;;
esac
fi], [newlib_iconv=${newlib_iconv}])dnl
dnl Support --enable-newlib-elix-level
AC_ARG_ENABLE(newlib-elix-level,
[ --enable-newlib-elix-level supply desired elix library level (1-4)],
[case "${enableval}" in
0) newlib_elix_level=0 ;;
1) newlib_elix_level=1 ;;
2) newlib_elix_level=2 ;;
3) newlib_elix_level=3 ;;
4) newlib_elix_level=4 ;;
*) AC_MSG_ERROR(bad value ${enableval} for newlib-elix-level option) ;;
esac], [newlib_elix_level=0])dnl
dnl Support --disable-newlib-io-float
AC_ARG_ENABLE(newlib-io-float,
[ --disable-newlib-io-float disable printf/scanf family float support],
[case "${enableval}" in
yes) newlib_io_float=yes ;;
no) newlib_io_float=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for newlib-io-float option) ;;
esac], [newlib_io_float=yes])dnl
dnl Support --disable-newlib-supplied-syscalls
AC_ARG_ENABLE(newlib-supplied-syscalls,
[ --disable-newlib-supplied-syscalls disable newlib from supplying syscalls],
[case "${enableval}" in
yes) newlib_may_supply_syscalls=yes ;;
no) newlib_may_supply_syscalls=no ;;
*) AC_MSG_ERROR(bad value ${enableval} for newlib-supplied-syscalls option) ;;
esac], [newlib_may_supply_syscalls=yes])dnl
AM_CONDITIONAL(MAY_SUPPLY_SYSCALLS, test x[$]{newlib_may_supply_syscalls} = xyes)
dnl We may get other options which we don't document:
dnl --with-target-subdir, --with-multisrctop, --with-multisubdir
test -z "[$]{with_target_subdir}" && with_target_subdir=.
if test "[$]{srcdir}" = "."; then
if test "[$]{with_target_subdir}" != "."; then
newlib_basedir="[$]{srcdir}/[$]{with_multisrctop}../$1"
else
newlib_basedir="[$]{srcdir}/[$]{with_multisrctop}$1"
fi
else
newlib_basedir="[$]{srcdir}/$1"
fi
AC_SUBST(newlib_basedir)
AC_CANONICAL_HOST
AM_INIT_AUTOMAKE([cygnus no-define 1.9.5])
# FIXME: We temporarily define our own version of AC_PROG_CC. This is
# copied from autoconf 2.12, but does not call AC_PROG_CC_WORKS. We
# are probably using a cross compiler, which will not be able to fully
# link an executable. This should really be fixed in autoconf
# itself.
AC_DEFUN([LIB_AC_PROG_CC_GNU],
[AC_CACHE_CHECK(whether we are using GNU C, ac_cv_prog_gcc,
[dnl The semicolon is to pacify NeXT's syntax-checking cpp.
cat > conftest.c <<EOF
#ifdef __GNUC__
yes;
#endif
EOF
if AC_TRY_COMMAND(${CC-cc} -E conftest.c) | egrep yes >/dev/null 2>&1; then
ac_cv_prog_gcc=yes
else
ac_cv_prog_gcc=no
fi])])
AC_DEFUN([LIB_AM_PROG_AS],
[# By default we simply use the C compiler to build assembly code.
AC_REQUIRE([LIB_AC_PROG_CC])
test "${CCAS+set}" = set || CCAS=$CC
test "${CCASFLAGS+set}" = set || CCASFLAGS=$CFLAGS
AC_ARG_VAR([CCAS], [assembler compiler command (defaults to CC)])
AC_ARG_VAR([CCASFLAGS], [assembler compiler flags (defaults to CFLAGS)])
])
AC_DEFUN([LIB_AC_PROG_CC],
[AC_BEFORE([$0], [AC_PROG_CPP])dnl
AC_CHECK_PROG(CC, gcc, gcc)
_AM_DEPENDENCIES(CC)
if test -z "$CC"; then
AC_CHECK_PROG(CC, cc, cc, , , /usr/ucb/cc)
test -z "$CC" && AC_MSG_ERROR([no acceptable cc found in \$PATH])
fi
LIB_AC_PROG_CC_GNU
if test $ac_cv_prog_gcc = yes; then
GCC=yes
dnl Check whether -g works, even if CFLAGS is set, in case the package
dnl plays around with CFLAGS (such as to build both debugging and
dnl normal versions of a library), tasteless as that idea is.
ac_test_CFLAGS="${CFLAGS+set}"
ac_save_CFLAGS="$CFLAGS"
_AC_PROG_CC_G
if test "$ac_test_CFLAGS" = set; then
CFLAGS="$ac_save_CFLAGS"
elif test $ac_cv_prog_cc_g = yes; then
CFLAGS="-g -O2"
else
CFLAGS="-O2"
fi
else
GCC=
test "${CFLAGS+set}" = set || CFLAGS="-g"
fi
])
LIB_AC_PROG_CC
AC_CHECK_TOOL(AS, as)
AC_CHECK_TOOL(AR, ar)
AC_CHECK_TOOL(RANLIB, ranlib, :)
AC_CHECK_TOOL(READELF, readelf, :)
AC_PROG_INSTALL
# Hack to ensure that INSTALL won't be set to "../" with autoconf 2.13. */
ac_given_INSTALL=$INSTALL
AM_MAINTAINER_MODE
LIB_AM_PROG_AS
# We need AC_EXEEXT to keep automake happy in cygnus mode. However,
# at least currently, we never actually build a program, so we never
# need to use $(EXEEXT). Moreover, the test for EXEEXT normally
# fails, because we are probably configuring with a cross compiler
# which can't create executables. So we include AC_EXEEXT to keep
# automake happy, but we don't execute it, since we don't care about
# the result.
if false; then
AC_EXEEXT
dummy_var=1
fi
. [$]{newlib_basedir}/configure.host
newlib_cflags="[$]{newlib_cflags} -fno-builtin"
NEWLIB_CFLAGS=${newlib_cflags}
AC_SUBST(NEWLIB_CFLAGS)
LDFLAGS=${ldflags}
AC_SUBST(LDFLAGS)
AM_CONDITIONAL(ELIX_LEVEL_0, test x[$]{newlib_elix_level} = x0)
AM_CONDITIONAL(ELIX_LEVEL_1, test x[$]{newlib_elix_level} = x1)
AM_CONDITIONAL(ELIX_LEVEL_2, test x[$]{newlib_elix_level} = x2)
AM_CONDITIONAL(ELIX_LEVEL_3, test x[$]{newlib_elix_level} = x3)
AM_CONDITIONAL(ELIX_LEVEL_4, test x[$]{newlib_elix_level} = x4)
AM_CONDITIONAL(USE_LIBTOOL, test x[$]{use_libtool} = xyes)
# Hard-code OBJEXT. Normally it is set by AC_OBJEXT, but we
# use oext, which is set in configure.host based on the target platform.
OBJEXT=${oext}
AC_SUBST(OBJEXT)
AC_SUBST(oext)
AC_SUBST(aext)
AC_SUBST(lpfx)
AC_SUBST(libm_machine_dir)
AC_SUBST(machine_dir)
AC_SUBST(sys_dir)
])
| {
"pile_set_name": "Github"
} |
// Copyright 1998-2017 Epic Games, Inc. All Rights Reserved.
#include "MetalRHIPrivate.h"
#include "MetalProfiler.h"
#include "EngineGlobals.h"
#include "StaticBoundShaderState.h"
#include "MetalCommandBuffer.h"
DEFINE_STAT(STAT_MetalMakeDrawableTime);
DEFINE_STAT(STAT_MetalDrawCallTime);
DEFINE_STAT(STAT_MetalPrepareDrawTime);
DEFINE_STAT(STAT_MetalUniformBufferCleanupTime);
DEFINE_STAT(STAT_MetalTotalUniformBufferMemory);
DEFINE_STAT(STAT_MetalFreeUniformBufferMemory);
DEFINE_STAT(STAT_MetalNumFreeUniformBuffers);
DEFINE_STAT(STAT_MetalPipelineStateTime);
DEFINE_STAT(STAT_MetalBoundShaderStateTime);
DEFINE_STAT(STAT_MetalVertexDeclarationTime);
DEFINE_STAT(STAT_MetalBufferPageOffTime);
DEFINE_STAT(STAT_MetalTexturePageOffTime);
DEFINE_STAT(STAT_MetalBufferCount);
DEFINE_STAT(STAT_MetalTextureCount);
DEFINE_STAT(STAT_MetalCommandBufferCount);
DEFINE_STAT(STAT_MetalSamplerStateCount);
DEFINE_STAT(STAT_MetalDepthStencilStateCount);
DEFINE_STAT(STAT_MetalRenderPipelineStateCount);
DEFINE_STAT(STAT_MetalRenderPipelineColorAttachmentDescriptor);
DEFINE_STAT(STAT_MetalRenderPassDescriptorCount);
DEFINE_STAT(STAT_MetalRenderPassColorAttachmentDescriptorCount);
DEFINE_STAT(STAT_MetalRenderPassDepthAttachmentDescriptorCount);
DEFINE_STAT(STAT_MetalRenderPassStencilAttachmentDescriptorCount);
DEFINE_STAT(STAT_MetalVertexDescriptorCount);
DEFINE_STAT(STAT_MetalComputePipelineStateCount);
DEFINE_STAT(STAT_MetalFunctionCount);
DEFINE_STAT(STAT_MetalFreePooledBufferCount);
DEFINE_STAT(STAT_MetalPooledBufferCount);
DEFINE_STAT(STAT_MetalPooledBufferMem);
DEFINE_STAT(STAT_MetalUsedPooledBufferMem);
DEFINE_STAT(STAT_MetalFreePooledBufferMem);
DEFINE_STAT(STAT_MetalWastedPooledBufferMem);
DEFINE_STAT(STAT_MetalBufferAlloctations);
DEFINE_STAT(STAT_MetalBufferFreed);
DEFINE_STAT(STAT_MetalBufferMemAlloc);
DEFINE_STAT(STAT_MetalBufferMemFreed);
DEFINE_STAT(STAT_MetalBufferNativeAlloctations);
DEFINE_STAT(STAT_MetalBufferNativeFreed);
DEFINE_STAT(STAT_MetalBufferNativeMemAlloc);
DEFINE_STAT(STAT_MetalBufferNativeMemFreed);
DEFINE_STAT(STAT_MetalPrepareVertexDescTime);
DEFINE_STAT(STAT_MetalBoundShaderPrepareDrawTime);
DEFINE_STAT(STAT_MetalBoundShaderLockTime);
DEFINE_STAT(STAT_MetalPipelineLockTime);
DEFINE_STAT(STAT_MetalUniformMemAlloc);
DEFINE_STAT(STAT_MetalUniformMemFreed);
DEFINE_STAT(STAT_MetalVertexMemAlloc);
DEFINE_STAT(STAT_MetalVertexMemFreed);
DEFINE_STAT(STAT_MetalIndexMemAlloc);
DEFINE_STAT(STAT_MetalIndexMemFreed);
DEFINE_STAT(STAT_MetalTextureMemUpdate);
DEFINE_STAT(STAT_MetalPrivateTextureCount);
DEFINE_STAT(STAT_MetalManagedTextureCount);
DEFINE_STAT(STAT_MetalTexturePageOnTime);
DEFINE_STAT(STAT_MetalPrivateTextureMem);
DEFINE_STAT(STAT_MetalManagedTextureMem);
DEFINE_STAT(STAT_MetalGPUWorkTime);
DEFINE_STAT(STAT_MetalGPUIdleTime);
DEFINE_STAT(STAT_MetalPresentTime);
int64 volatile GMetalTexturePageOnTime = 0;
int64 volatile GMetalGPUWorkTime = 0;
int64 volatile GMetalGPUIdleTime = 0;
int64 volatile GMetalPresentTime = 0;
DEFINE_STAT(STAT_MetalCommandBufferCreatedPerFrame);
DEFINE_STAT(STAT_MetalCommandBufferCommittedPerFrame);
#if METAL_STATISTICS
void FMetalEventNode::GetStats(FMetalPipelineStats& OutStats)
{
for(auto Entry : DrawStats)
{
FMetalPipelineStats const& DrawStat = Entry->GetResult();
OutStats.RHIPrimitives += DrawStat.RHIPrimitives;
OutStats.RHIVertices += DrawStat.RHIVertices;
OutStats.VertexFunctionCost += DrawStat.VertexFunctionCost;
OutStats.FragmentFunctionCost += DrawStat.FragmentFunctionCost;
OutStats.DrawCallTime += DrawStat.DrawCallTime;
OutStats.InputVertices += DrawStat.InputVertices;
OutStats.InputPrimitives += DrawStat.InputPrimitives;
OutStats.VertexFunctionInvocations += DrawStat.VertexFunctionInvocations;
OutStats.ClipperInvocations += DrawStat.ClipperInvocations;
OutStats.ClipperPrimitives += DrawStat.ClipperPrimitives;
OutStats.FragmentFunctionInvocations += DrawStat.FragmentFunctionInvocations;
}
uint32 Num = DrawStats.Num();
if (Num > 0)
{
OutStats.VertexFunctionCost /= Num;
OutStats.FragmentFunctionCost /= Num;
}
}
/** Recursively dumps stats for each node with a depth first traversal. */
static void DumpStatsEventNode(FMetalEventNode* Node, float RootResult, int32 Depth, int32& NumNodes, int32& NumDraws)
{
NumNodes++;
if (Node->NumDraws > 0 || Node->Children.Num() > 0)
{
NumDraws += Node->NumDraws;
// Percent that this node was of the total frame time
const float Percent = Node->TimingResult * 100.0f / (RootResult * 1000.0f);
const int32 EffectiveDepth = FMath::Max(Depth - 1, 0);
FMetalPipelineStats Stats;
FMemory::Memzero(Stats);
Node->GetStats(Stats);
// Print information about this node, padded to its depth in the tree
float DrawCallTime = FPlatformTime::ToMilliseconds(Stats.DrawCallTime);
UE_LOG(LogRHI, Warning, TEXT("%s%4.1f%%%5.2fms (%5.2fms) %s %u draws %u (%u) prims %u (%u) verts %u vert invoke %u vert cost %u clip invoke %u clip prims %u pixel invoke %u pixel cost"),
*FString(TEXT("")).LeftPad(EffectiveDepth * 3),
Percent,
Node->TimingResult,
DrawCallTime,
*Node->Name,
Node->NumDraws,
Stats.RHIPrimitives,
Stats.InputPrimitives,
Stats.RHIVertices,
Stats.InputVertices,
Stats.VertexFunctionInvocations,
Stats.VertexFunctionCost,
Stats.ClipperInvocations,
Stats.ClipperPrimitives,
Stats.FragmentFunctionInvocations,
Stats.FragmentFunctionCost
);
float TotalChildTime = 0;
uint32 TotalChildDraws = 0;
for (int32 ChildIndex = 0; ChildIndex < Node->Children.Num(); ChildIndex++)
{
FMetalEventNode* ChildNode = (FMetalEventNode*)(Node->Children[ChildIndex].GetReference());
int32 NumChildDraws = 0;
// Traverse children
DumpStatsEventNode(ChildNode, RootResult, Depth + 1, NumNodes, NumChildDraws);
NumDraws += NumChildDraws;
TotalChildTime += ChildNode->TimingResult;
TotalChildDraws += NumChildDraws;
}
const float UnaccountedTime = FMath::Max(Node->TimingResult - TotalChildTime, 0.0f);
const float UnaccountedPercent = UnaccountedTime * 100.0f / (RootResult * 1000.0f);
// Add an 'Other Children' node if necessary to show time spent in the current node that is not in any of its children
if (Node->Children.Num() > 0 && TotalChildDraws > 0 && (UnaccountedPercent > 2.0f || UnaccountedTime > .2f))
{
UE_LOG(LogRHI, Warning, TEXT("%s%4.1f%%%5.2fms Unaccounted"),
*FString(TEXT("")).LeftPad((EffectiveDepth + 1) * 3),
UnaccountedPercent,
UnaccountedTime);
}
}
}
#endif
FMetalDrawProfiler::FMetalDrawProfiler(struct FMetalGPUProfiler* InProfiler, uint32 InStartPoint, uint32 InEndPoint, uint32 NumPrimitives, uint32 NumVertices)
: Profiler(InProfiler)
, StartPoint(InStartPoint)
, EndPoint(InEndPoint)
{
if(Profiler)
{
Profiler->StartGPUWork(InStartPoint, InEndPoint, NumPrimitives, NumVertices);
}
}
FMetalDrawProfiler::~FMetalDrawProfiler()
{
if(Profiler)
{
Profiler->FinishGPUWork();
}
}
FMetalEventNode::~FMetalEventNode()
{
#if METAL_STATISTICS
for(auto Entry : DrawStats)
{
delete Entry;
}
#endif
}
float FMetalEventNode::GetTiming()
{
return FPlatformTime::ToSeconds(EndTime - StartTime);
}
void FMetalEventNode::StartTiming()
{
StartTime = 0;
EndTime = 0;
Context->StartTiming(this);
}
MTLCommandBufferHandler FMetalEventNode::Start(void)
{
return Block_copy(^(id<MTLCommandBuffer> CompletedBuffer)
{
if (FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
const CFTimeInterval GpuTimeSeconds = ((id<IMetalCommandBufferExtensions>)CompletedBuffer).GPUStartTime;
const double CyclesPerSecond = 1.0 / FPlatformTime::GetSecondsPerCycle();
StartTime = GpuTimeSeconds * CyclesPerSecond;
}
else
{
StartTime = mach_absolute_time();
}
});
}
void FMetalEventNode::StopTiming()
{
Context->EndTiming(this);
}
MTLCommandBufferHandler FMetalEventNode::Stop(void)
{
return Block_copy(^(id<MTLCommandBuffer> CompletedBuffer)
{
if (FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
// This is still used by ProfileGPU
const CFTimeInterval GpuTimeSeconds = ((id<IMetalCommandBufferExtensions>)CompletedBuffer).GPUEndTime;
const double CyclesPerSecond = 1.0 / FPlatformTime::GetSecondsPerCycle();
EndTime = GpuTimeSeconds * CyclesPerSecond;
}
else
{
EndTime = mach_absolute_time();
}
if(bRoot)
{
// But we have a different mechanism for the overall frametime that works even with empty encoders and that doesn't report any GPU idle time between frames, we only use the fallback code below on older OSes.
if (!FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
uint32 Time = FMath::TruncToInt( double(GetTiming()) / double(FPlatformTime::GetSecondsPerCycle()) );
FPlatformAtomics::InterlockedExchange((int32*)&GGPUFrameTime, (int32)Time);
}
if(!bFullProfiling)
{
delete this;
}
}
});
}
#if METAL_STATISTICS
void FMetalEventNode::StartDraw(bool bActiveStats, uint32 StartPoint, uint32 EndPoint, uint32 NumPrimitives, uint32 NumVertices)
{
if(Context->GetCommandQueue().GetStatistics() && bActiveStats)
{
DrawStats.Add(Context->GetCommandQueue().GetStatistics()->CreateDrawStats(Context->GetCurrentCommandBuffer(), StartPoint, EndPoint, NumPrimitives, NumVertices));
}
}
void FMetalEventNode::StopDraw(void)
{
if(DrawStats.Num())
{
DrawStats.Last()->End();
}
}
#endif
bool MetalGPUProfilerIsInSafeThread()
{
return IsInRHIThread() || IsInActualRenderingThread();
}
/** Start this frame of per tracking */
void FMetalEventNodeFrame::StartFrame()
{
RootNode->StartTiming();
}
/** End this frame of per tracking, but do not block yet */
void FMetalEventNodeFrame::EndFrame()
{
RootNode->StopTiming();
}
/** Calculates root timing base frequency (if needed by this RHI) */
float FMetalEventNodeFrame::GetRootTimingResults()
{
return RootNode->GetTiming();
}
void FMetalEventNodeFrame::LogDisjointQuery()
{
}
FGPUProfilerEventNode* FMetalGPUProfiler::CreateEventNode(const TCHAR* InName, FGPUProfilerEventNode* InParent)
{
#if ENABLE_METAL_GPUPROFILE
FMetalEventNode* EventNode = new FMetalEventNode(FMetalContext::GetCurrentContext(), InName, InParent, false, false);
return EventNode;
#else
return nullptr;
#endif
}
void FMetalGPUProfiler::Cleanup()
{
}
void FMetalGPUProfiler::PushEvent(const TCHAR* Name, FColor Color)
{
if(MetalGPUProfilerIsInSafeThread())
{
FGPUProfiler::PushEvent(Name, Color);
}
}
void FMetalGPUProfiler::PopEvent()
{
if(MetalGPUProfilerIsInSafeThread())
{
FGPUProfiler::PopEvent();
}
}
//TGlobalResource<FVector4VertexDeclaration> GMetalVector4VertexDeclaration;
TGlobalResource<FTexture> GMetalLongTaskRT;
void FMetalGPUProfiler::BeginFrame()
{
if(!CurrentEventNodeFrame)
{
// Start tracking the frame
CurrentEventNodeFrame = new FMetalEventNodeFrame(Context, GTriggerGPUProfile);
CurrentEventNodeFrame->StartFrame();
if(GNumActiveGPUsForRendering > 1)
{
GTriggerGPUProfile = false;
}
if(GTriggerGPUProfile)
{
bTrackingEvents = true;
bLatchedGProfilingGPU = true;
GTriggerGPUProfile = false;
#if METAL_STATISTICS
if(Context->GetCommandQueue().GetStatistics())
{
bActiveStats = true;
}
#endif
/*if (bLatchedGProfilingGPU)
{
// Issue a bunch of GPU work at the beginning of the frame, to make sure that we are GPU bound
// We can't isolate idle time from GPU timestamps
const auto FeatureLevel = GMaxRHIFeatureLevel;
FRHICommandList_RecursiveHazardous RHICmdList(InRHI);
if(!IsValidRef(GMetalLongTaskRT.TextureRHI))
{
FRHIResourceCreateInfo Info;
GMetalLongTaskRT.TextureRHI = RHICreateTexture2D(1920, 1080, PF_B8G8R8A8, 1, 1, TexCreate_RenderTargetable, Info);
}
SetRenderTarget(RHICmdList, GMetalLongTaskRT.TextureRHI->GetTexture2D(), FTextureRHIRef());
FGraphicsPipelineStateInitializer GraphicsPSOInit;
RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit);
GraphicsPSOInit.PrimitiveType = PT_TriangleStrip;
GraphicsPSOInit.BlendState = TStaticBlendState<CW_RGBA, BO_Add, BF_One, BF_One>::GetRHI();
GraphicsPSOInit.SetBlendFactor(FLinearColor::Black);
GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState<false, CF_Always>::GetRHI();
GraphicsPSOInit.RasterizerState = TStaticRasterizerState<FM_Solid, CM_None>::GetRHI();
auto ShaderMap = GetGlobalShaderMap(FeatureLevel);
TShaderMapRef<TOneColorVS<true> > VertexShader(ShaderMap);
TShaderMapRef<FLongGPUTaskPS> PixelShader(ShaderMap);
GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = GMetalVector4VertexDeclaration.VertexDeclarationRHI;
GraphicsPSOInit.BoundShaderState.VertexShaderRHI = GETSAFERHISHADER_VERTEX(*VertexShader);
GraphicsPSOInit.BoundShaderState.PixelShaderRHI = GETSAFERHISHADER_PIXEL(*PixelShader);
SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit);
// Draw a fullscreen quad
FVector4 Vertices[4];
Vertices[0].Set( -1.0f, 1.0f, 0, 1.0f );
Vertices[1].Set( 1.0f, 1.0f, 0, 1.0f );
Vertices[2].Set( -1.0f, -1.0f, 0, 1.0f );
Vertices[3].Set( 1.0f, -1.0f, 0, 1.0f );
DrawPrimitiveUP(RHICmdList, PT_TriangleStrip, 2, Vertices, sizeof(Vertices[0]));
RHICmdList.SubmitCommandsHint();
// RHICmdList flushes on destruction
}*/
}
if(GEmitDrawEvents)
{
PushEvent(TEXT("FRAME"), FColor(0, 255, 0, 255));
}
}
NumNestedFrames++;
}
void FMetalGPUProfiler::EndFrame()
{
if(--NumNestedFrames == 0)
{
if(GEmitDrawEvents)
{
PopEvent();
}
#if PLATFORM_MAC
FPlatformMisc::UpdateDriverMonitorStatistics(GetMetalDeviceContext().GetDeviceIndex());
#elif METAL_STATISTICS
if(Context->GetCommandQueue().GetStatistics())
{
Context->GetCommandQueue().GetStatistics()->UpdateDriverMonitorStatistics();
}
#endif
#if STATS
SET_CYCLE_COUNTER(STAT_MetalTexturePageOnTime, GMetalTexturePageOnTime);
GMetalTexturePageOnTime = 0;
SET_CYCLE_COUNTER(STAT_MetalGPUIdleTime, GMetalGPUIdleTime);
SET_CYCLE_COUNTER(STAT_MetalGPUWorkTime, GMetalGPUWorkTime);
SET_CYCLE_COUNTER(STAT_MetalPresentTime, GMetalPresentTime);
#endif
if(CurrentEventNodeFrame)
{
CurrentEventNodeFrame->EndFrame();
if(bLatchedGProfilingGPU)
{
#if METAL_STATISTICS
if(Context->GetCommandQueue().GetStatistics())
{
bActiveStats = false;
}
#endif
bTrackingEvents = false;
bLatchedGProfilingGPU = false;
UE_LOG(LogRHI, Warning, TEXT(""));
UE_LOG(LogRHI, Warning, TEXT(""));
CurrentEventNodeFrame->DumpEventTree();
#if METAL_STATISTICS
if(Context->GetCommandQueue().GetStatistics())
{
float RootResult = CurrentEventNodeFrame->GetRootTimingResults();
UE_LOG(LogRHI, Warning, TEXT(""));
UE_LOG(LogRHI, Warning, TEXT(""));
UE_LOG(LogRHI, Warning, TEXT("Pipeline stats hierarchy, total GPU time %.2fms"), RootResult * 1000.0f);
int32 NumNodes = 0;
int32 NumDraws = 0;
for (int32 BaseNodeIndex = 0; BaseNodeIndex < CurrentEventNodeFrame->EventTree.Num(); BaseNodeIndex++)
{
DumpStatsEventNode((FMetalEventNode*)(CurrentEventNodeFrame->EventTree[BaseNodeIndex].GetReference()), RootResult, 0, NumNodes, NumDraws);
}
}
#endif
}
delete CurrentEventNodeFrame;
CurrentEventNodeFrame = NULL;
}
}
}
void FMetalGPUProfiler::StartGPUWork(uint32 StartPoint, uint32 EndPoint, uint32 NumPrimitives, uint32 NumVertices)
{
if(CurrentEventNode && MetalGPUProfilerIsInSafeThread())
{
RegisterGPUWork(NumPrimitives, NumVertices);
#if METAL_STATISTICS
FMetalEventNode* EventNode = (FMetalEventNode*)CurrentEventNode;
EventNode->StartDraw(bActiveStats, StartPoint, EndPoint, NumPrimitives, NumVertices);
#endif
}
}
void FMetalGPUProfiler::FinishGPUWork(void)
{
#if METAL_STATISTICS
if(CurrentEventNode && MetalGPUProfilerIsInSafeThread())
{
FMetalEventNode* EventNode = (FMetalEventNode*)CurrentEventNode;
EventNode->StopDraw();
}
#endif
}
void FMetalGPUProfiler::IncrementFrameIndex()
{
if (FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
FPlatformAtomics::InterlockedExchange(&FrameTimeGPUIndex, ((FrameTimeGPUIndex + 1) % MAX_FRAME_HISTORY));
FPlatformAtomics::InterlockedExchange(&FrameStartGPU[FrameTimeGPUIndex], 0);
FPlatformAtomics::InterlockedExchange(&FrameEndGPU[FrameTimeGPUIndex], 0);
FPlatformAtomics::InterlockedExchange(&FrameGPUTime[FrameTimeGPUIndex], 0);
FPlatformAtomics::InterlockedExchange(&FrameIdleTime[FrameTimeGPUIndex], 0);
FPlatformAtomics::InterlockedExchange(&FramePresentTime[FrameTimeGPUIndex], 0);
}
}
void FMetalGPUProfiler::RecordFrame(id<MTLCommandBuffer> Buffer)
{
RecordCommandBuffer(Buffer);
if (FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
uint32 Existing = FrameTimeGPUIndex;
[Buffer addCompletedHandler:^(id <MTLCommandBuffer> InBuffer)
{
uint32 Time = FMath::TruncToInt(FPlatformTime::ToSeconds64(FrameEndGPU[Existing] - FrameStartGPU[Existing]) / FPlatformTime::GetSecondsPerCycle64());
FPlatformAtomics::InterlockedExchange((int32*)&GGPUFrameTime, (int32)Time);
#if STATS
FPlatformAtomics::InterlockedExchange(&GMetalGPUWorkTime, FrameGPUTime[Existing]);
Time = FMath::TruncToInt(FPlatformTime::ToSeconds64(((FrameEndGPU[Existing] - FrameStartGPU[Existing]) - FrameGPUTime[Existing])) / FPlatformTime::GetSecondsPerCycle64());
FPlatformAtomics::InterlockedExchange(&FrameIdleTime[Existing], (int32)Time);
FPlatformAtomics::InterlockedExchange(&GMetalGPUIdleTime, Time);
#endif //STATS
}];
}
}
void FMetalGPUProfiler::RecordPresent(id<MTLCommandBuffer> Buffer)
{
if (FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
uint32 Existing = FrameTimeGPUIndex;
[Buffer addCompletedHandler:^(id <MTLCommandBuffer> InBuffer)
{
id<IMetalCommandBufferExtensions> CB = (id<IMetalCommandBufferExtensions>)InBuffer;
const CFTimeInterval GpuStartTimeSeconds = CB.GPUStartTime;
const CFTimeInterval GpuEndTimeSeconds = CB.GPUEndTime;
const double CyclesPerSecond = 1.0 / FPlatformTime::GetSecondsPerCycle();
uint64 StartTime = GpuStartTimeSeconds * CyclesPerSecond;
uint64 EndTime = GpuEndTimeSeconds * CyclesPerSecond;
uint32 Time = FMath::TruncToInt(FPlatformTime::ToSeconds64(EndTime - StartTime) / FPlatformTime::GetSecondsPerCycle64());
FPlatformAtomics::InterlockedExchange(&FramePresentTime[Existing], (int32)Time);
FPlatformAtomics::InterlockedExchange(&GMetalPresentTime, Time);
}];
}
}
void FMetalGPUProfiler::RecordCommandBuffer(id<MTLCommandBuffer> Buffer)
{
if (FMetalCommandQueue::SupportsFeature(EMetalFeaturesGPUCommandBufferTimes))
{
uint32 Index = FrameTimeGPUIndex;
[Buffer addCompletedHandler:^(id <MTLCommandBuffer> InBuffer)
{
id<IMetalCommandBufferExtensions> CB = (id<IMetalCommandBufferExtensions>)InBuffer;
const CFTimeInterval GpuTimeSeconds = CB.GPUEndTime;
const double CyclesPerSecond = 1.0 / FPlatformTime::GetSecondsPerCycle();
uint64 Time = GpuTimeSeconds * CyclesPerSecond;
uint64 Existing, New;
do
{
Existing = FrameEndGPU[Index];
New = Existing > 0 ? FMath::Max(Existing, Time) : Time;
} while(FPlatformAtomics::InterlockedCompareExchange(&FrameEndGPU[Index], New, Existing) != Existing);
const CFTimeInterval GpuStartTimeSeconds = CB.GPUStartTime;
uint64 StartTime = GpuStartTimeSeconds * CyclesPerSecond;
do
{
Existing = FrameStartGPU[Index];
New = Existing > 0 ? FMath::Min(Existing, StartTime) : Time;
} while(FPlatformAtomics::InterlockedCompareExchange(&FrameStartGPU[Index], New, Existing) != Existing);
Time = FMath::TruncToInt(FPlatformTime::ToSeconds64(Time - StartTime) / FPlatformTime::GetSecondsPerCycle64());
FPlatformAtomics::InterlockedAdd(&FrameGPUTime[Index], (int32)Time);
}];
}
}
volatile int32 FMetalGPUProfiler::FrameTimeGPUIndex = 0;
volatile int64 FMetalGPUProfiler::FrameStartGPU[MAX_FRAME_HISTORY];
volatile int64 FMetalGPUProfiler::FrameEndGPU[MAX_FRAME_HISTORY];
volatile int64 FMetalGPUProfiler::FrameGPUTime[MAX_FRAME_HISTORY];
volatile int64 FMetalGPUProfiler::FrameIdleTime[MAX_FRAME_HISTORY];
volatile int64 FMetalGPUProfiler::FramePresentTime[MAX_FRAME_HISTORY];
| {
"pile_set_name": "Github"
} |
use graphql_parser::query::{Number, Value};
use std::collections::BTreeMap;
use std::iter::FromIterator;
/// Utilities for coercing GraphQL values based on GraphQL types.
pub mod coercion;
pub use self::coercion::MaybeCoercible;
/// Creates a `graphql_parser::query::Value::Object` from key/value pairs.
/// If you don't need to determine which keys are included dynamically at runtime
/// consider using the `object! {}` macro instead.
pub fn object_value(data: Vec<(&str, Value)>) -> Value {
Value::Object(BTreeMap::from_iter(
data.into_iter().map(|(k, v)| (k.to_string(), v)),
))
}
pub trait IntoValue {
fn into_value(self) -> Value;
}
impl IntoValue for Value {
#[inline]
fn into_value(self) -> Value {
self
}
}
impl IntoValue for &'_ str {
#[inline]
fn into_value(self) -> Value {
self.to_owned().into_value()
}
}
impl IntoValue for i32 {
#[inline]
fn into_value(self) -> Value {
Value::Int(Number::from(self))
}
}
impl IntoValue for u64 {
#[inline]
fn into_value(self) -> Value {
Value::String(self.to_string())
}
}
impl<T: IntoValue> IntoValue for Option<T> {
#[inline]
fn into_value(self) -> Value {
match self {
Some(v) => v.into_value(),
None => Value::Null,
}
}
}
impl<T: IntoValue> IntoValue for Vec<T> {
#[inline]
fn into_value(self) -> Value {
Value::List(self.into_iter().map(|e| e.into_value()).collect::<Vec<_>>())
}
}
macro_rules! impl_into_values {
($(($T:ty, $V:ident)),*) => {
$(
impl IntoValue for $T {
#[inline]
fn into_value(self) -> Value {
Value::$V(self)
}
}
)+
};
}
impl_into_values![
(String, String),
(f64, Float),
(bool, Boolean),
(Number, Int)
];
/// Creates a `graphql_parser::query::Value::Object` from key/value pairs.
#[macro_export]
macro_rules! object {
($($name:ident: $value:expr,)*) => {
{
let mut result = ::std::collections::BTreeMap::new();
$(
let value = $crate::prelude::IntoValue::into_value($value);
result.insert(stringify!($name).to_string(), value);
)*
::graphql_parser::query::Value::Object(result)
}
};
($($name:ident: $value:expr),*) => {
object! {$($name: $value,)*}
};
}
| {
"pile_set_name": "Github"
} |
---
title: "微服务的战争:级联故障和雪崩"
date: 2020-08-25T21:08:39+08:00
toc: true
images:
tags:
- 微服务
---
> “微服务的战争” 是一个关于微服务设计思考的系列题材,主要是针对在微服务化后所出现的一些矛盾/冲突点,不涉及具体某一个知识点深入。如果你有任何问题或建议,欢迎随时交流。
在 [微服务的战争:统一且标准化](https://eddycjy.com/posts/microservice/standardization/) 中,经过好几周与不同业务组不同事业部的跨部门讨论后,终于把初始的标准化方案给定下来了,大家欢快的使用起了内部的统一框架,疯狂的创建起了新服务,没隔多久服务调用链就变成了下图:

服务间存在多次内部调用,服务 A =》服务 B =》服务 C =》服务D,而 服务 E =》 服务 B,服务 F =》服务 E,也就是存在着多个流量入口,且依赖相同的服务。
## 背景
服务与服务中,总存在业务服务,公共服务,基础服务等类型。但在某一个夜晚,突然发现 BFF 调用后端服务开始逐渐不正常,客户给你截图反馈问题,你发现有点问题:

单从表现来看,你发现是 BFF 调用服务 A 极度缓慢,也不知道怎么了...正当以为是服务 A 出问题,想着万能重启一下时。你在日志平台和链路追踪系统一看,发现了大量的错误日志和缓慢,让你略微震惊,一时间不知道从何下手。
这可怎么办?
## 级联故障和雪崩
实际上这是一次很经典的级联故障,最终导致系统雪崩的情景再现。单从上述拓扑来看,问题点之一在于服务 B:

服务 B 本身作为服务 A 和服务 F 的两个流量入口必经之处,想必至少是一个公共服务,但他也依赖了其他多个服务。因此若服务 C 和服务 D 其中一个有问题,在没有熔断措施的情况下,就出现级联故障,系统逐渐崩盘,最后雪崩:

服务 D 所依赖的外部接口出现了故障,而他并没有做任何的控制,因此扩散到了所有调用到他的服务,自然也就包含服务 B,因此最终出现系统雪崩。
这种最经典的是出现在默认 Go http client 调用没有设置 Timeout,从而只要出现一次故障,就足矣让记住这类 “坑”,毕竟崩的 ”慢“,错误日志还多。
## 解决方法
常见的方式是**根据特定的规则/规律进行熔断和降级**,避免请求发生堆积:
- 超时时间控制。
- 慢调用比例。
- 错误比例。
- 自适应(例如:负载情况等)。
当然,这也只是壮士断腕,后续措施还包含监控告警,通知对应的开发人员来处理。且需提前对被降级的模块进行业务逻辑进行处理等等,这样才能够比较柔和且快速地度过这一次危机。
## 总结
在分布式应用中,级联故障和雪崩是非常常见的,一些开发同学在模块设计时可能并没有意识到这块的问题,在微服务化后会一个不留神就碰到,因为其调用链变得特别的长且多。因此建议配套设施和限流熔断措施都应该及时跟上,否则面对一大堆的错误日志还是很无奈的。
同时,监控告警的建设也要做,因为在危机出现时,有一个 HTTP 调用的 P95/P99 告警出现,那就比较舒心了,直接 root cause。 | {
"pile_set_name": "Github"
} |
<!--
Copyright (c) 2006-2013, JGraph Ltd
Edge tolerance example for mxGraph. This example demonstrates increasing
the tolerance for hit detection on edges.
-->
<html>
<head>
<title>Edge tolerance example for mxGraph</title>
<!-- Sets the basepath for the library if not in same directory -->
<script type="text/javascript">
mxBasePath = '../src';
</script>
<!-- Loads and initializes the library -->
<script type="text/javascript" src="../src/js/mxClient.js"></script>
<!-- Example code -->
<script type="text/javascript">
// Program starts here. Creates a sample graph in the
// DOM node with the specified ID. This function is invoked
// from the onLoad event handler of the document (see below).
function main(container)
{
// Checks if the browser is supported
if (!mxClient.isBrowserSupported())
{
// Displays an error message if the browser is not supported.
mxUtils.error('Browser is not supported!', 200, false);
}
else
{
// Overrides the mouse event dispatching mechanism to update the
// cell which is associated with the event in case the native hit
// detection did not return anything.
var mxGraphFireMouseEvent = mxGraph.prototype.fireMouseEvent;
mxGraph.prototype.fireMouseEvent = function(evtName, me, sender)
{
// Checks if native hit detection did not return anything
if (me.getState() == null)
{
// Updates the graph coordinates in the event since we need
// them here. Storing them in the event means the overridden
// method doesn't have to do this again.
if (me.graphX == null || me.graphY == null)
{
var pt = mxUtils.convertPoint(this.container, me.getX(), me.getY());
me.graphX = pt.x;
me.graphY = pt.y;
}
var cell = this.getCellAt(me.graphX, me.graphY);
if (this.getModel().isEdge(cell))
{
me.state = this.view.getState(cell);
if (me.state != null && me.state.shape != null)
{
graph.container.style.cursor = me.state.shape.node.style.cursor;
}
}
}
if (me.state == null)
{
graph.container.style.cursor = 'default';
}
mxGraphFireMouseEvent.apply(this, arguments);
};
// Overrides double click handling to use the tolerance
var mxGraphDblClick = mxGraph.prototype.dblClick;
mxGraph.prototype.dblClick = function(evt, cell)
{
if (cell == null)
{
var pt = mxUtils.convertPoint(this.container,
mxEvent.getClientX(evt), mxEvent.getClientY(evt));
cell = this.getCellAt(pt.x, pt.y);
}
mxGraphDblClick.call(this, evt, cell);
};
// Creates the graph inside the given container
var graph = new mxGraph(container);
graph.setTolerance(20);
// Gets the default parent for inserting new cells. This
// is normally the first child of the root (ie. layer 0).
var parent = graph.getDefaultParent();
// Adds cells to the model in a single step
graph.getModel().beginUpdate();
try
{
var v1 = graph.insertVertex(parent, null, 'Hello,', 120, 120, 80, 30);
var v2 = graph.insertVertex(parent, null, 'World!', 400, 250, 80, 30);
var e1 = graph.insertEdge(parent, null, '', v1, v2, 'edgeStyle=orthogonalEdgeStyle;');
var e2 = graph.insertEdge(parent, null, '', v2, v1, 'edgeStyle=orthogonalEdgeStyle;');
}
finally
{
// Updates the display
graph.getModel().endUpdate();
}
}
};
</script>
</head>
<!-- Page passes the container for the graph to the program -->
<body onload="main(document.getElementById('graphContainer'))">
<!-- Creates a container for the graph with a grid wallpaper -->
<div id="graphContainer"
style="overflow:hidden;width:641px;height:481px;background:url('editors/images/grid.gif');cursor:default;">
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package cn.edu.jxnu.awesome_campus.support.utils.net.callback;
import com.squareup.okhttp.Headers;
import com.squareup.okhttp.Response;
import java.io.IOException;
import java.io.InputStream;
/**
* Created by MummyDing on 16-1-25.
* GitHub: https://github.com/MummyDing
* Blog: http://blog.csdn.net/mummyding
*/
public abstract class InputStreamCallback extends NetCallback{
public abstract void onSuccess(InputStream result, Headers headers);
@Override
public void onResponse(Response response) throws IOException {
onSuccess(response.body().byteStream(),response.headers());
}
}
| {
"pile_set_name": "Github"
} |
/* Toybox infrastructure.
*
* Copyright 2006 Rob Landley <rob@landley.net>
*/
// Stuff that needs to go before the standard headers
#include "generated/config.h"
#include "lib/portability.h"
// General posix-2008 headers
#include <ctype.h>
#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
#include <fnmatch.h>
#include <grp.h>
#include <inttypes.h>
#include <limits.h>
#include <math.h>
#include <paths.h>
#include <pwd.h>
#include <regex.h>
#include <sched.h>
#include <setjmp.h>
#include <signal.h>
#include <stdarg.h>
#include <stddef.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <strings.h>
#include <sys/mman.h>
#include <sys/resource.h>
#include <sys/stat.h>
#include <sys/statvfs.h>
#include <sys/time.h>
#include <sys/times.h>
#include <sys/utsname.h>
#include <sys/wait.h>
#include <syslog.h>
#include <termios.h>
#include <time.h>
#include <unistd.h>
#include <utime.h>
// Posix networking
#include <arpa/inet.h>
#include <netdb.h>
#include <net/if.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <poll.h>
#include <sys/socket.h>
#include <sys/un.h>
// Internationalization support (also in POSIX and LSB)
#include <locale.h>
#include <wchar.h>
#include <wctype.h>
// LSB 4.1 headers
#include <sys/ioctl.h>
#include "lib/lib.h"
#include "lib/lsm.h"
#include "lib/toyflags.h"
// Get list of function prototypes for all enabled command_main() functions.
#define NEWTOY(name, opts, flags) void name##_main(void);
#define OLDTOY(name, oldname, flags) void oldname##_main(void);
#include "generated/newtoys.h"
#include "generated/flags.h"
#include "generated/globals.h"
#include "generated/tags.h"
// These live in main.c
struct toy_list *toy_find(char *name);
void toy_init(struct toy_list *which, char *argv[]);
void toy_singleinit(struct toy_list *which, char *argv[]);
void toy_exec(char *argv[]);
// Array of available commands
extern struct toy_list {
char *name;
void (*toy_main)(void);
char *options;
unsigned flags;
} toy_list[];
// Global context shared by all commands.
extern struct toy_context {
struct toy_list *which; // Which entry in toy_list is this one?
char **argv; // Original command line arguments
char **optargs; // Arguments left over from get_optflags()
unsigned long long optflags; // Command line option flags from get_optflags()
int optc; // Count of optargs
short toycount; // Total number of commands in this build
char exitval; // Value error_exit feeds to exit()
char wasroot; // dropped setuid
// toy_init() should not zero past here.
sigjmp_buf *rebound; // siglongjmp here instead of exit when do_rebound
struct arg_list *xexit; // atexit() functions for xexit(), set by sigatexit()
void *stacktop; // nested toy_exec() call count, or 0 if vforked
int envc; // Count of original environ entries
int old_umask; // Old umask preserved by TOYFLAG_UMASK
short signal; // generic_signal() records what signal it saw here
int signalfd; // and writes signal to this fd, if set
} toys;
// Two big temporary buffers: one for use by commands, one for library functions
extern char toybuf[4096], libbuf[4096];
extern char **environ;
#define FLAG(x) (toys.optflags&FLAG_##x)
#define GLOBALS(...)
#define ARRAY_LEN(array) (sizeof(array)/sizeof(*array))
#define TAGGED_ARRAY(X, ...) {__VA_ARGS__}
#ifndef TOYBOX_VERSION
#ifndef TOYBOX_VENDOR
#define TOYBOX_VENDOR ""
#endif
#define TOYBOX_VERSION "0.8.3"TOYBOX_VENDOR
#endif
| {
"pile_set_name": "Github"
} |
<?php
/**
* CodeIgniter
*
* An open source application development framework for PHP
*
* This content is released under the MIT License (MIT)
*
* Copyright (c) 2014 - 2018, British Columbia Institute of Technology
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @package CodeIgniter
* @author EllisLab Dev Team
* @copyright Copyright (c) 2008 - 2014, EllisLab, Inc. (https://ellislab.com/)
* @copyright Copyright (c) 2014 - 2018, British Columbia Institute of Technology (http://bcit.ca/)
* @license http://opensource.org/licenses/MIT MIT License
* @link https://codeigniter.com
* @since Version 1.0.0
* @filesource
*/
defined('BASEPATH') OR exit('No direct script access allowed');
$lang['terabyte_abbr'] = 'TB';
$lang['gigabyte_abbr'] = 'GB';
$lang['megabyte_abbr'] = 'MB';
$lang['kilobyte_abbr'] = 'KB';
$lang['bytes'] = 'Bytes';
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2000 Silicon Graphics, Inc. All Rights Reserved.
* AUTHOR : William Roske
* CO-PILOT : Dave Fenner
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it would be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*
* Further, this software is distributed without any warranty that it is
* free of the rightful claim of any third person regarding infringement
* or the like. Any license provided herein, whether implied or
* otherwise, applies only to this software file. Patent licenses, if
* any, provided herein do not apply to combinations of this program with
* other software, or any other product whatsoever.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Contact information: Silicon Graphics, Inc., 1600 Amphitheatre Pkwy,
* Mountain View, CA 94043, or:
*
* http://www.sgi.com
*
* For further information regarding this notice, see:
*
* http://oss.sgi.com/projects/GenInfo/NoticeExplan/
*
*/
/*
* DESCRIPTION
* 1.) test sbrk(8192) should return successfully.
* 2.) test sbrk(-8192) should return successfully.
*/
#include <unistd.h>
#include <errno.h>
#include <string.h>
#include <signal.h>
#include <sys/types.h>
#include "test.h"
char *TCID = "sbrk01";
static struct test_case_t {
long increment;
} test_cases[] = {
{8192},
{-8192},
};
static void setup(void);
static void sbrk_verify(const struct test_case_t *);
static void cleanup(void);
int TST_TOTAL = ARRAY_SIZE(test_cases);
int main(int ac, char **av)
{
int lc;
int i;
tst_parse_opts(ac, av, NULL, NULL);
setup();
for (lc = 0; TEST_LOOPING(lc); lc++) {
tst_count = 0;
for (i = 0; i < TST_TOTAL; i++)
sbrk_verify(&test_cases[i]);
}
cleanup();
tst_exit();
}
static void setup(void)
{
tst_sig(NOFORK, DEF_HANDLER, cleanup);
TEST_PAUSE;
}
static void sbrk_verify(const struct test_case_t *test)
{
void *tret;
tret = sbrk(test->increment);
TEST_ERRNO = errno;
if (tret == (void *)-1) {
tst_resm(TFAIL | TTERRNO, "sbrk - Increase by %ld bytes failed",
test->increment);
} else {
tst_resm(TPASS, "sbrk - Increase by %ld bytes returned %p",
test->increment, tret);
}
}
static void cleanup(void)
{
}
| {
"pile_set_name": "Github"
} |
//===-- Implementation header for thrd_join function ------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_LIBC_SRC_THREADS_THRD_JOIN_H
#define LLVM_LIBC_SRC_THREADS_THRD_JOIN_H
#include "include/threads.h"
namespace __llvm_libc {
int thrd_join(thrd_t *thread, int *retval);
} // namespace __llvm_libc
#endif // LLVM_LIBC_SRC_THREADS_THRD_JOIN_H
| {
"pile_set_name": "Github"
} |
/**
* Xero Payroll AU
* This is the Xero Payroll API for orgs in Australia region.
*
* The version of the OpenAPI document: 2.2.10
* Contact: api@xero.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
export enum PayRunStatus {
DRAFT = <any> 'DRAFT',
POSTED = <any> 'POSTED'
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE doc [
<!ATTLIST doc attrExtEnt CDATA #IMPLIED>
<!ENTITY ent1 "Hello">
<!ENTITY ent2 SYSTEM "world.txt">
<!ENTITY entExt SYSTEM "earth.gif" NDATA gif>
<!NOTATION gif SYSTEM "viewgif.exe">
]>
<doc attrExtEnt="entExt">
&ent1;, &ent2;!
</doc>
<!-- Let world.txt contain "world" (excluding the quotes) -->
| {
"pile_set_name": "Github"
} |
# SPDX-License-Identifier: GPL-2.0+
# Renoir platform Support
snd-rn-pci-acp3x-objs := rn-pci-acp3x.o
snd-acp3x-pdm-dma-objs := acp3x-pdm-dma.o
snd-acp3x-rn-objs := acp3x-rn.o
obj-$(CONFIG_SND_SOC_AMD_RENOIR) += snd-rn-pci-acp3x.o
obj-$(CONFIG_SND_SOC_AMD_RENOIR) += snd-acp3x-pdm-dma.o
obj-$(CONFIG_SND_SOC_AMD_RENOIR_MACH) += snd-acp3x-rn.o
| {
"pile_set_name": "Github"
} |
AX25 is Andes CPU IP to adopt RISC-V architecture.
Features
========
CPU Core
- 5-stage in-order execution pipeline
- Hardware Multiplier
- radix-2/radix-4/radix-16/radix-256/fast
- Hardware Divider
- Optional branch prediction
- Machine mode and optional user mode
- Optional performance monitoring
ISA
- RV64I base integer instructions
- RVC for 16-bit compressed instructions
- RVM for multiplication and division instructions
Memory subsystem
- I & D local memory
- Size: 4KB to 16MB
- Memory subsyetem soft-error protection
- Protection scheme: parity-checking or error-checking-and-correction (ECC)
- Automatic hardware error correction
Bus
- Interface Protocol
- Synchronous AHB (32-bit/64-bit data-width), or
- Synchronous AXI4 (64-bit data-width)
Power management
- Wait for interrupt (WFI) mode
Debug
- Configurable number of breakpoints: 2/4/8
- External Debug Module
- AHB slave port
- External JTAG debug transport module
Platform Level Interrupt Controller (PLIC)
- AHB slave port
- Configurable number of interrupts: 1-1023
- Configurable number of interrupt priorities: 3/7/15/63/127/255
- Configurable number of targets: 1-16
- Preempted interrupt priority stack
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<title>flexbox | flex: 0 1 N | shrinking</title>
<link rel="author" href="http://opera.com" title="Opera Software">
<style>
div {
background: blue;
margin: 1em 0;
border: 1px solid black;
height: 8em;
width: 12em;
}
span {
background: white;
margin: 1em 0;
width: 3em;
height: 6em;
display: inline-block;
}
span:nth-child(1) {background: yellow;}
span:nth-child(2) {background: pink;}
span:nth-child(3) {background: lightblue;}
span:nth-child(4) {background: grey;}
</style>
<div>
<span>one</span><span>two</span><span>three</span><span>four</span>
</div>
| {
"pile_set_name": "Github"
} |
@mixin icon-background ($icon-url){
background-image : url($icon-url);
}
@mixin icon-shape ($size, $padding, $border-radius) {
height: $size;
width: $size;
padding: $padding;
border-radius: $border-radius;
display: inline-table;
} | {
"pile_set_name": "Github"
} |
^F:\DATA STRUCTURE\SEQQUEUE\DEBUG\MAIN.OBJ
C:\WINDOWS\GLOBALIZATION\SORTING\SORTDEFAULT.NLS
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\KERNEL32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\USER32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\GDI32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\WINSPOOL.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\COMDLG32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\ADVAPI32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\SHELL32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\OLE32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\OLEAUT32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\UUID.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\ODBC32.LIB
C:\PROGRAM FILES\WINDOWS KITS\8.0\LIB\WIN8\UM\X86\ODBCCP32.LIB
F:\DATA STRUCTURE\SEQQUEUE\DEBUG\MAIN.OBJ
C:\WINDOWS\SYSTEM32\TZRES.DLL
D:\CPP\VC\LIB\MSVCPRTD.LIB
D:\CPP\VC\LIB\MSVCRTD.LIB
D:\CPP\VC\LIB\OLDNAMES.LIB
| {
"pile_set_name": "Github"
} |
# 热部署静态文件
spring.thymeleaf.cache=false
spring.application.name: msa-weather-report-eureka
eureka.client.serviceUrl.defaultZone: http://localhost:8761/eureka/ | {
"pile_set_name": "Github"
} |
aaaaa 1
Xbbbb 2
XXbbb 2
ccccc 3
Xdddd 3
XXddd 3
XXXdd 1
eeee
fff
| {
"pile_set_name": "Github"
} |
# CVS $Revision: $ $Author: $ -- Thu Jul 10 13:59:32 2014 -- reformated by prettylst.pl v1.51 (build 24365)
# Ability Name Unique Key Category of Ability Type Description
Shadow Blending KEY:Shadow Creature ~ Shadow Blending CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature DESC:During any conditions other than full daylight, a shadow creature has the supernatural ability to conceal itself in the shadows, giving it nine-tenths concealment. Artificial light, even a continual flame spell, does not negate this ability. A daylight spell does.
#Shadow Creature Supernatural Ability
+2 luck bonus to all saving throws KEY:Shadow Creature Supernatural Ability ~ Luck Bonus CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
Cause fear once per day KEY:Shadow Creature Supernatural Ability ~ Cause Fear CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
Damage reduction 5/+1 KEY:Shadow Creature Supernatural Ability ~ DR CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
Evasion KEY:Shadow Creature Supernatural Ability ~ Evasion CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
Mirror image one per day KEY:Shadow Creature Supernatural Ability ~ Mirror Image CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
Plane shift self to or from the Plane of Shadow once per day KEY:Shadow Creature Supernatural Ability ~ Plane Shift CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
Regenerate 2 hit points per round (slain if brought to 0 hit points). KEY:Shadow Creature Supernatural Ability ~ Regen CATEGORY:Special Ability TYPE:SpecialQuality.Supernatural.Shadow Creature Supernatural Ability
| {
"pile_set_name": "Github"
} |
using System;
using Android.App;
using Android.OS;
namespace TvLeanback
{
[Activity (Label = "SearchActivity", Exported = true)]
public class SearchActivity: Activity
{
protected override void OnCreate (Bundle savedInstanceState)
{
base.OnCreate (savedInstanceState);
SetContentView (Resource.Layout.search);
}
}
}
| {
"pile_set_name": "Github"
} |
{{# def.definitions }}
{{# def.errors }}
{{# def.setupKeyword }}
{{# def.setupNextLevel }}
{{## def.validateAdditional:
{{ /* additionalProperties is schema */
$it.schema = $aProperties;
$it.schemaPath = it.schemaPath + '.additionalProperties';
$it.errSchemaPath = it.errSchemaPath + '/additionalProperties';
$it.errorPath = it.opts._errorDataPathProperty
? it.errorPath
: it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
var $passData = $data + '[' + $key + ']';
$it.dataPathArr[$dataNxt] = $key;
}}
{{# def.generateSubschemaCode }}
{{# def.optimizeValidate }}
#}}
{{
var $key = 'key' + $lvl
, $idx = 'idx' + $lvl
, $dataNxt = $it.dataLevel = it.dataLevel + 1
, $nextData = 'data' + $dataNxt
, $dataProperties = 'dataProperties' + $lvl;
var $schemaKeys = Object.keys($schema || {})
, $pProperties = it.schema.patternProperties || {}
, $pPropertyKeys = Object.keys($pProperties)
, $aProperties = it.schema.additionalProperties
, $someProperties = $schemaKeys.length || $pPropertyKeys.length
, $noAdditional = $aProperties === false
, $additionalIsSchema = typeof $aProperties == 'object'
&& Object.keys($aProperties).length
, $removeAdditional = it.opts.removeAdditional
, $checkAdditional = $noAdditional || $additionalIsSchema || $removeAdditional
, $ownProperties = it.opts.ownProperties
, $currentBaseId = it.baseId;
var $required = it.schema.required;
if ($required && !(it.opts.$data && $required.$data) && $required.length < it.opts.loopRequired)
var $requiredHash = it.util.toHash($required);
}}
var {{=$errs}} = errors;
var {{=$nextValid}} = true;
{{? $ownProperties }}
var {{=$dataProperties}} = undefined;
{{?}}
{{? $checkAdditional }}
{{# def.iterateProperties }}
{{? $someProperties }}
var isAdditional{{=$lvl}} = !(false
{{? $schemaKeys.length }}
{{? $schemaKeys.length > 8 }}
|| validate.schema{{=$schemaPath}}.hasOwnProperty({{=$key}})
{{??}}
{{~ $schemaKeys:$propertyKey }}
|| {{=$key}} == {{= it.util.toQuotedString($propertyKey) }}
{{~}}
{{?}}
{{?}}
{{? $pPropertyKeys.length }}
{{~ $pPropertyKeys:$pProperty:$i }}
|| {{= it.usePattern($pProperty) }}.test({{=$key}})
{{~}}
{{?}}
);
if (isAdditional{{=$lvl}}) {
{{?}}
{{? $removeAdditional == 'all' }}
delete {{=$data}}[{{=$key}}];
{{??}}
{{
var $currentErrorPath = it.errorPath;
var $additionalProperty = '\' + ' + $key + ' + \'';
if (it.opts._errorDataPathProperty) {
it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
}
}}
{{? $noAdditional }}
{{? $removeAdditional }}
delete {{=$data}}[{{=$key}}];
{{??}}
{{=$nextValid}} = false;
{{
var $currErrSchemaPath = $errSchemaPath;
$errSchemaPath = it.errSchemaPath + '/additionalProperties';
}}
{{# def.error:'additionalProperties' }}
{{ $errSchemaPath = $currErrSchemaPath; }}
{{? $breakOnError }} break; {{?}}
{{?}}
{{?? $additionalIsSchema }}
{{? $removeAdditional == 'failing' }}
var {{=$errs}} = errors;
{{# def.setCompositeRule }}
{{# def.validateAdditional }}
if (!{{=$nextValid}}) {
errors = {{=$errs}};
if (validate.errors !== null) {
if (errors) validate.errors.length = errors;
else validate.errors = null;
}
delete {{=$data}}[{{=$key}}];
}
{{# def.resetCompositeRule }}
{{??}}
{{# def.validateAdditional }}
{{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}}
{{?}}
{{?}}
{{ it.errorPath = $currentErrorPath; }}
{{?}}
{{? $someProperties }}
}
{{?}}
}
{{# def.ifResultValid }}
{{?}}
{{ var $useDefaults = it.opts.useDefaults && !it.compositeRule; }}
{{? $schemaKeys.length }}
{{~ $schemaKeys:$propertyKey }}
{{ var $sch = $schema[$propertyKey]; }}
{{? {{# def.nonEmptySchema:$sch}} }}
{{
var $prop = it.util.getProperty($propertyKey)
, $passData = $data + $prop
, $hasDefault = $useDefaults && $sch.default !== undefined;
$it.schema = $sch;
$it.schemaPath = $schemaPath + $prop;
$it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($propertyKey);
$it.errorPath = it.util.getPath(it.errorPath, $propertyKey, it.opts.jsonPointers);
$it.dataPathArr[$dataNxt] = it.util.toQuotedString($propertyKey);
}}
{{# def.generateSubschemaCode }}
{{? {{# def.willOptimize }} }}
{{
$code = {{# def._optimizeValidate }};
var $useData = $passData;
}}
{{??}}
{{ var $useData = $nextData; }}
var {{=$nextData}} = {{=$passData}};
{{?}}
{{? $hasDefault }}
{{= $code }}
{{??}}
{{? $requiredHash && $requiredHash[$propertyKey] }}
if ({{# def.noPropertyInData }}) {
{{=$nextValid}} = false;
{{
var $currentErrorPath = it.errorPath
, $currErrSchemaPath = $errSchemaPath
, $missingProperty = it.util.escapeQuotes($propertyKey);
if (it.opts._errorDataPathProperty) {
it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers);
}
$errSchemaPath = it.errSchemaPath + '/required';
}}
{{# def.error:'required' }}
{{ $errSchemaPath = $currErrSchemaPath; }}
{{ it.errorPath = $currentErrorPath; }}
} else {
{{??}}
{{? $breakOnError }}
if ({{# def.noPropertyInData }}) {
{{=$nextValid}} = true;
} else {
{{??}}
if ({{=$useData}} !== undefined
{{? $ownProperties }}
&& {{# def.isOwnProperty }}
{{?}}
) {
{{?}}
{{?}}
{{= $code }}
}
{{?}} {{ /* $hasDefault */ }}
{{?}} {{ /* def.nonEmptySchema */ }}
{{# def.ifResultValid }}
{{~}}
{{?}}
{{? $pPropertyKeys.length }}
{{~ $pPropertyKeys:$pProperty }}
{{ var $sch = $pProperties[$pProperty]; }}
{{? {{# def.nonEmptySchema:$sch}} }}
{{
$it.schema = $sch;
$it.schemaPath = it.schemaPath + '.patternProperties' + it.util.getProperty($pProperty);
$it.errSchemaPath = it.errSchemaPath + '/patternProperties/'
+ it.util.escapeFragment($pProperty);
}}
{{# def.iterateProperties }}
if ({{= it.usePattern($pProperty) }}.test({{=$key}})) {
{{
$it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
var $passData = $data + '[' + $key + ']';
$it.dataPathArr[$dataNxt] = $key;
}}
{{# def.generateSubschemaCode }}
{{# def.optimizeValidate }}
{{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}}
}
{{? $breakOnError }} else {{=$nextValid}} = true; {{?}}
}
{{# def.ifResultValid }}
{{?}} {{ /* def.nonEmptySchema */ }}
{{~}}
{{?}}
{{? $breakOnError }}
{{= $closingBraces }}
if ({{=$errs}} == errors) {
{{?}}
{{# def.cleanUp }}
| {
"pile_set_name": "Github"
} |
/**
* This file is copyright 2017 State of the Netherlands (Ministry of Interior Affairs and Kingdom Relations).
* It is made available under the terms of the GNU Affero General Public License, version 3 as published by the Free Software Foundation.
* The project of which this file is part, may be found at https://github.com/MinBZK/operatieBRP.
*/
package nl.bzk.brp.model.data.kern;
import org.springframework.roo.addon.dod.RooDataOnDemand;
@RooDataOnDemand(entity = HisPersverblijfsr.class)
public class HisPersverblijfsrDataOnDemand {
}
| {
"pile_set_name": "Github"
} |
package kinesis
import (
"time"
"github.com/apex/log"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
k "github.com/aws/aws-sdk-go/service/kinesis"
"github.com/aws/aws-sdk-go/service/kinesis/kinesisiface"
"github.com/jpillora/backoff"
)
const (
maxRecordsPerRequest = 500
)
type Config struct {
// StreamName is the Kinesis stream.
StreamName string
// FlushInterval is a regular interval for flushing the buffer. Defaults to 1s.
FlushInterval time.Duration
// BufferSize determines the batch request size. Must not exceed 500. Defaults to 500.
BufferSize int
// BacklogSize determines the channel capacity before Put() will begin blocking. Defaults to 500.
BacklogSize int
// Backoff determines the backoff strategy for record failures.
Backoff backoff.Backoff
// Logger is the logger used. Defaults to log.Log.
Logger log.Interface
// Client is the Kinesis API implementation.
Client kinesisiface.KinesisAPI
}
// defaults for configuration.
func (c *Config) defaults() {
if c.Client == nil {
c.Client = k.New(session.New(aws.NewConfig()))
}
if c.Logger == nil {
c.Logger = log.Log
}
c.Logger = c.Logger.WithFields(log.Fields{
"package": "kinesis",
})
if c.StreamName == "" {
c.Logger.Fatal("StreamName required")
}
c.Logger = c.Logger.WithFields(log.Fields{
"stream": c.StreamName,
})
if c.BufferSize == 0 {
c.BufferSize = maxRecordsPerRequest
}
if c.BufferSize > maxRecordsPerRequest {
c.Logger.Fatal("BufferSize exceeds 500")
}
if c.BacklogSize == 0 {
c.BacklogSize = maxRecordsPerRequest
}
if c.FlushInterval == 0 {
c.FlushInterval = time.Second
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<events xmlns="http://jelix.org/ns/events/1.0">
<listener name="jacldb">
<event name="AuthNewUser" />
<event name="AuthRemoveUser" />
<event name="AuthLogout" />
</listener>
</events>
| {
"pile_set_name": "Github"
} |
// go run mksyscall.go -l32 -openbsd -arm -tags openbsd,arm syscall_bsd.go syscall_openbsd.go syscall_openbsd_arm.go
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build openbsd,arm
package unix
import (
"syscall"
"unsafe"
)
var _ syscall.Errno
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getgroups(ngid int, gid *_Gid_t) (n int, err error) {
r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setgroups(ngid int, gid *_Gid_t) (err error) {
_, _, e1 := RawSyscall(SYS_SETGROUPS, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) {
r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0)
wpid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) {
r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_BIND, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_CONNECT, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socket(domain int, typ int, proto int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) {
_, _, e1 := Syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) {
_, _, e1 := Syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETPEERNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETSOCKNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Shutdown(s int, how int) (err error) {
_, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(s), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) {
_, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_RECVFROM, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_RECVMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_SENDMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func kevent(kq int, change unsafe.Pointer, nchange int, event unsafe.Pointer, nevent int, timeout *Timespec) (n int, err error) {
r0, _, e1 := Syscall6(SYS_KEVENT, uintptr(kq), uintptr(change), uintptr(nchange), uintptr(event), uintptr(nevent), uintptr(unsafe.Pointer(timeout)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimes(path string, timeval *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(timeval)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func futimes(fd int, timeval *[2]Timeval) (err error) {
_, _, e1 := Syscall(SYS_FUTIMES, uintptr(fd), uintptr(unsafe.Pointer(timeval)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func poll(fds *PollFd, nfds int, timeout int) (n int, err error) {
r0, _, e1 := Syscall(SYS_POLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(timeout))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Madvise(b []byte, behav int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MADVISE, uintptr(_p0), uintptr(len(b)), uintptr(behav))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlockall(flags int) (err error) {
_, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mprotect(b []byte, prot int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Msync(b []byte, flags int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MSYNC, uintptr(_p0), uintptr(len(b)), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlockall() (err error) {
_, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe2(p *[2]_C_int, flags int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE2, uintptr(unsafe.Pointer(p)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getdents(fd int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETDENTS, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getcwd(buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS___GETCWD, uintptr(_p0), uintptr(len(buf)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ioctl(fd int, req uint, arg uintptr) (err error) {
_, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) {
var _p0 unsafe.Pointer
if len(mib) > 0 {
_p0 = unsafe.Pointer(&mib[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS___SYSCTL, uintptr(_p0), uintptr(len(mib)), uintptr(unsafe.Pointer(old)), uintptr(unsafe.Pointer(oldlen)), uintptr(unsafe.Pointer(new)), uintptr(newlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) {
r0, _, e1 := Syscall6(SYS_PPOLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Access(path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_ACCESS, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Adjtime(delta *Timeval, olddelta *Timeval) (err error) {
_, _, e1 := Syscall(SYS_ADJTIME, uintptr(unsafe.Pointer(delta)), uintptr(unsafe.Pointer(olddelta)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chdir(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chflags(path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHFLAGS, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chmod(path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHMOD, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chroot(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Close(fd int) (err error) {
_, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup(fd int) (nfd int, err error) {
r0, _, e1 := Syscall(SYS_DUP, uintptr(fd), 0, 0)
nfd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup2(from int, to int) (err error) {
_, _, e1 := Syscall(SYS_DUP2, uintptr(from), uintptr(to), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup3(from int, to int, flags int) (err error) {
_, _, e1 := Syscall(SYS_DUP3, uintptr(from), uintptr(to), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Exit(code int) {
Syscall(SYS_EXIT, uintptr(code), 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Faccessat(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FACCESSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchdir(fd int) (err error) {
_, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchflags(fd int, flags int) (err error) {
_, _, e1 := Syscall(SYS_FCHFLAGS, uintptr(fd), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmod(fd int, mode uint32) (err error) {
_, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHMODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchown(fd int, uid int, gid int) (err error) {
_, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchownat(dirfd int, path string, uid int, gid int, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHOWNAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Flock(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fpathconf(fd int, name int) (val int, err error) {
r0, _, e1 := Syscall(SYS_FPATHCONF, uintptr(fd), uintptr(name), 0)
val = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstat(fd int, stat *Stat_t) (err error) {
_, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatat(fd int, path string, stat *Stat_t, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FSTATAT, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatfs(fd int, stat *Statfs_t) (err error) {
_, _, e1 := Syscall(SYS_FSTATFS, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fsync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ftruncate(fd int, length int64) (err error) {
_, _, e1 := Syscall6(SYS_FTRUNCATE, uintptr(fd), 0, uintptr(length), uintptr(length>>32), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getegid() (egid int) {
r0, _, _ := RawSyscall(SYS_GETEGID, 0, 0, 0)
egid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Geteuid() (uid int) {
r0, _, _ := RawSyscall(SYS_GETEUID, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getgid() (gid int) {
r0, _, _ := RawSyscall(SYS_GETGID, 0, 0, 0)
gid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpgid(pid int) (pgid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0)
pgid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpgrp() (pgrp int) {
r0, _, _ := RawSyscall(SYS_GETPGRP, 0, 0, 0)
pgrp = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpid() (pid int) {
r0, _, _ := RawSyscall(SYS_GETPID, 0, 0, 0)
pid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getppid() (ppid int) {
r0, _, _ := RawSyscall(SYS_GETPPID, 0, 0, 0)
ppid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpriority(which int, who int) (prio int, err error) {
r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0)
prio = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrlimit(which int, lim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrtable() (rtable int, err error) {
r0, _, e1 := RawSyscall(SYS_GETRTABLE, 0, 0, 0)
rtable = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrusage(who int, rusage *Rusage) (err error) {
_, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getsid(pid int) (sid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETSID, uintptr(pid), 0, 0)
sid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getuid() (uid int) {
r0, _, _ := RawSyscall(SYS_GETUID, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Issetugid() (tainted bool) {
r0, _, _ := Syscall(SYS_ISSETUGID, 0, 0, 0)
tainted = bool(r0 != 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Kill(pid int, signum syscall.Signal) (err error) {
_, _, e1 := Syscall(SYS_KILL, uintptr(pid), uintptr(signum), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Kqueue() (fd int, err error) {
r0, _, e1 := Syscall(SYS_KQUEUE, 0, 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lchown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Link(path string, link string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(link)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Linkat(pathfd int, path string, linkfd int, link string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(link)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_LINKAT, uintptr(pathfd), uintptr(unsafe.Pointer(_p0)), uintptr(linkfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listen(s int, backlog int) (err error) {
_, _, e1 := Syscall(SYS_LISTEN, uintptr(s), uintptr(backlog), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lstat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkdir(path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKDIR, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkdirat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKDIRAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkfifo(path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKFIFO, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkfifoat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKFIFOAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mknod(path string, mode uint32, dev int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKNOD, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mknodat(dirfd int, path string, mode uint32, dev int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MKNODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Nanosleep(time *Timespec, leftover *Timespec) (err error) {
_, _, e1 := Syscall(SYS_NANOSLEEP, uintptr(unsafe.Pointer(time)), uintptr(unsafe.Pointer(leftover)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Open(path string, mode int, perm uint32) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Openat(dirfd int, path string, mode int, perm uint32) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_OPENAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pathconf(path string, name int) (val int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_PATHCONF, uintptr(unsafe.Pointer(_p0)), uintptr(name), 0)
val = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pread(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), 0, uintptr(offset), uintptr(offset>>32))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), 0, uintptr(offset), uintptr(offset>>32))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func read(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Readlink(path string, buf []byte) (n int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(buf) > 0 {
_p1 = unsafe.Pointer(&buf[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_READLINK, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Readlinkat(dirfd int, path string, buf []byte) (n int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(buf) > 0 {
_p1 = unsafe.Pointer(&buf[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_READLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf)), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Rename(from string, to string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(from)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(to)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_RENAME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat(fromfd int, from string, tofd int, to string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(from)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(to)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(fromfd), uintptr(unsafe.Pointer(_p0)), uintptr(tofd), uintptr(unsafe.Pointer(_p1)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Revoke(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_REVOKE, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Rmdir(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_RMDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Seek(fd int, offset int64, whence int) (newoffset int64, err error) {
r0, r1, e1 := Syscall6(SYS_LSEEK, uintptr(fd), 0, uintptr(offset), uintptr(offset>>32), uintptr(whence), 0)
newoffset = int64(int64(r1)<<32 | int64(r0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) {
r0, _, e1 := Syscall6(SYS_SELECT, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setegid(egid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETEGID, uintptr(egid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Seteuid(euid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETEUID, uintptr(euid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setgid(gid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETGID, uintptr(gid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setlogin(name string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(name)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SETLOGIN, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpgid(pid int, pgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpriority(which int, who int, prio int) (err error) {
_, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setregid(rgid int, egid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setreuid(ruid int, euid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresgid(rgid int, egid int, sgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESGID, uintptr(rgid), uintptr(egid), uintptr(sgid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresuid(ruid int, euid int, suid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESUID, uintptr(ruid), uintptr(euid), uintptr(suid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setrlimit(which int, lim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setrtable(rtable int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRTABLE, uintptr(rtable), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setsid() (pid int, err error) {
r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0)
pid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Settimeofday(tp *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tp)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setuid(uid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETUID, uintptr(uid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Stat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Statfs(path string, stat *Statfs_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STATFS, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Symlink(path string, link string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(link)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SYMLINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Symlinkat(oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SYMLINKAT, uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sync() (err error) {
_, _, e1 := Syscall(SYS_SYNC, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Truncate(path string, length int64) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_TRUNCATE, uintptr(unsafe.Pointer(_p0)), 0, uintptr(length), uintptr(length>>32), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Umask(newmask int) (oldmask int) {
r0, _, _ := Syscall(SYS_UMASK, uintptr(newmask), 0, 0)
oldmask = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unlink(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNLINK, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unlinkat(dirfd int, path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unmount(path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNMOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func write(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error) {
r0, _, e1 := Syscall9(SYS_MMAP, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flag), uintptr(fd), 0, uintptr(pos), uintptr(pos>>32), 0)
ret = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func munmap(addr uintptr, length uintptr) (err error) {
_, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlen(fd int, buf *byte, nbuf int) (n int, err error) {
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(buf)), uintptr(nbuf))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func writelen(fd int, buf *byte, nbuf int) (n int, err error) {
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(unsafe.Pointer(buf)), uintptr(nbuf))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimensat(dirfd int, path string, times *[2]Timespec, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_UTIMENSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <Foundation/NSString.h>
@interface NSString (Notification)
- (id)mtStringByRemovingNotificationPrefixes;
@end
| {
"pile_set_name": "Github"
} |
<?php
/**
*
* Copyright FaShop
* License http://www.fashop.cn
* link http://www.fashop.cn
* Created by FaShop.
* User: hanwenbo
* Date: 2018/4/7
* Time: 下午2:26
*
*/
namespace App\Biz\Wechat;
use EasyWeChat\Kernel\Messages\Text;
use EasyWeChat\Kernel\Messages\Image;
use EasyWeChat\Kernel\Messages\Video;
use EasyWeChat\Kernel\Messages\Voice;
use EasyWeChat\Kernel\Messages\News;
use EasyWeChat\Kernel\Messages\NewsItem;
use EasyWeChat\Kernel\Messages\Article;
class AutoReply
{
private $key;
/**
* @return mixed
*/
public function getKey()
{
return $this->key;
}
/**
* @param mixed $key
*/
public function setKey( $key ) : void
{
$this->key = $key;
}
public function __construct( array $data =[] )
{
if(isset($data['key'])){
$this->setKey = $data['key'];
}
}
public function getReplyContentList() : ? array
{
$model = model( 'WechatAutoReplyKeywords' );
$find = \App\Model\Page::getWechatAutoReplyKeywordsInfo( ['key' => $this->getKey(), 'match_mode' => 'equal'] );
if( !$find ){
$find = \App\Model\Page::getWechatAutoReplyKeywordsInfo( [
'key' => ['like', "%{$this->getKey()}%"],
'match_mode' => 'contain',
] );
}
if( $find ){
$info = model( 'WechatAutoReply' )->getWechatAutoReplyInfo( ['id' => $find['auto_reply_id']] );
if( $info['reply_mode'] === 'random_one' ){
$message = array_rand( $info['reply_content'], 1 );
return [$message];
} else{
return $info['reply_content'];
}
} else{
return null;
}
}
public function getSubscribeReplyContent()
{
$shop = \App\Model\Shop::init()->getShopInfo(['id'=>1,'auto_reply_status'=>1],'auto_reply_subscribe_replay_content');
if( $shop ){
return $shop['auto_reply_subscribe_replay_content'];
}else{
return null;
}
}
public function buildMessage( array $message )
{
switch( $message['type'] ){
case 'text':
$result = new Text( $message['content'] );
break;
case 'image':
$result = new Image( $message['media_id'] );
break;
case 'news':
foreach( $message['extra'] as $item ){
$items[] = new NewsItem( [
'title' => $item['title'],
'description' => $item['digest'],
'image' => $item['cover_url'],
'url' => $item['content_url'],
] );
}
$result = new News( $items );
break;
case 'voice':
$result = new Voice( $message['media_id'] );
break;
case 'video':
$result = new Video( $message['media_id'] );
break;
case 'local_news':
foreach( $message['extra'] as $item ){
$option = [
'title' => $item['title'],
'url' => "http://www.fashop.cn/material?".http_build_query( array_merge( $item['link']['param'], $item['link']['action'] ) ),
];
if( isset( $item['cover_pic'] ) ){
$option['image'] = $item['cover_pic'];
}
$items[] = new NewsItem( $option );
}
$result = new News( $items );
break;
default :
$result = null;
break;
}
return $result;
}
} | {
"pile_set_name": "Github"
} |
#!/bin/bash
#
# This configuration file provides information on the
# guest instances used for this test
# All guest instances that are required for this test
#
VIRTHOSTS="alice moon winnetou sun bob"
# Corresponding block diagram
#
DIAGRAM="a-m-w-s-b.png"
# Guest instances on which tcpdump is to be started
#
TCPDUMPHOSTS="alice sun bob"
# Guest instances on which IPsec is started
# Used for IPsec logging purposes
#
IPSECHOSTS="moon sun"
# charon controlled by swanctl
#
SWANCTL=1
| {
"pile_set_name": "Github"
} |
extends Node
onready var _character = $Character
onready var _label = $Interface/Label
onready var _bar = $Interface/ExperienceBar
func _ready():
_label.update_text(_character.level, _character.experience, _character.experience_required)
_bar.initialize(_character.experience, _character.experience_required)
func _input(event):
if not event.is_action_pressed('ui_accept'):
return
_character.gain_experience(34)
_label.update_text(_character.level, _character.experience, _character.experience_required)
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2012-2020 CodeLibs Project and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.codelibs.fess.app.web.admin.dict.stemmeroverride;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.Resource;
import org.codelibs.core.beans.util.BeanUtil;
import org.codelibs.core.lang.StringUtil;
import org.codelibs.fess.Constants;
import org.codelibs.fess.annotation.Secured;
import org.codelibs.fess.app.pager.StemmerOverridePager;
import org.codelibs.fess.app.service.StemmerOverrideService;
import org.codelibs.fess.app.web.CrudMode;
import org.codelibs.fess.app.web.admin.dict.AdminDictAction;
import org.codelibs.fess.app.web.base.FessAdminAction;
import org.codelibs.fess.app.web.base.FessBaseAction;
import org.codelibs.fess.dict.stemmeroverride.StemmerOverrideItem;
import org.codelibs.fess.util.ComponentUtil;
import org.codelibs.fess.util.RenderDataUtil;
import org.dbflute.optional.OptionalEntity;
import org.dbflute.optional.OptionalThing;
import org.lastaflute.web.Execute;
import org.lastaflute.web.response.ActionResponse;
import org.lastaflute.web.response.HtmlResponse;
import org.lastaflute.web.response.render.RenderData;
import org.lastaflute.web.ruts.process.ActionRuntime;
import org.lastaflute.web.validation.VaErrorHook;
import org.lastaflute.web.validation.exception.ValidationErrorException;
/**
* @author shinsuke
*/
public class AdminDictStemmeroverrideAction extends FessAdminAction {
public static final String ROLE = "admin-dict";
// ===================================================================================
// Attribute
// =========
@Resource
private StemmerOverrideService stemmerOverrideService;
@Resource
private StemmerOverridePager stemmerOverridePager;
// ===================================================================================
// Hook
// ======
@Override
protected void setupHtmlData(final ActionRuntime runtime) {
super.setupHtmlData(runtime);
runtime.registerData("helpLink", systemHelper.getHelpLink(fessConfig.getOnlineHelpNameDictStemmeroverride()));
}
@Override
protected String getActionRole() {
return ROLE;
}
// ===================================================================================
// Search Execute
// ==============
@Execute
@Secured({ ROLE, ROLE + VIEW })
public HtmlResponse index(final SearchForm form) {
validate(form, messages -> {}, this::asDictIndexHtml);
stemmerOverridePager.clear();
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideJsp).renderWith(data -> {
searchPaging(data, form);
});
}
@Execute
@Secured({ ROLE, ROLE + VIEW })
public HtmlResponse list(final OptionalThing<Integer> pageNumber, final SearchForm form) {
validate(form, messages -> {}, this::asDictIndexHtml);
pageNumber.ifPresent(num -> {
stemmerOverridePager.setCurrentPageNumber(pageNumber.get());
}).orElse(() -> {
stemmerOverridePager.setCurrentPageNumber(0);
});
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideJsp).renderWith(data -> {
searchPaging(data, form);
});
}
@Execute
@Secured({ ROLE, ROLE + VIEW })
public HtmlResponse search(final SearchForm form) {
validate(form, messages -> {}, this::asDictIndexHtml);
copyBeanToBean(form, stemmerOverridePager, op -> op.exclude(Constants.PAGER_CONVERSION_RULE));
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideJsp).renderWith(data -> {
searchPaging(data, form);
});
}
@Execute
@Secured({ ROLE, ROLE + VIEW })
public HtmlResponse reset(final SearchForm form) {
validate(form, messages -> {}, this::asDictIndexHtml);
stemmerOverridePager.clear();
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideJsp).renderWith(data -> {
searchPaging(data, form);
});
}
protected void searchPaging(final RenderData data, final SearchForm form) {
// page navi
RenderDataUtil.register(data, "stemmerOverrideItemItems",
stemmerOverrideService.getStemmerOverrideList(form.dictId, stemmerOverridePager));
// restore from pager
BeanUtil.copyBeanToBean(stemmerOverridePager, form, op -> {
op.exclude(Constants.PAGER_CONVERSION_RULE);
});
}
// ===================================================================================
// Edit Execute
// ============
// -----------------------------------------------------
// Entry Page
// ----------
@Execute
@Secured({ ROLE })
public HtmlResponse createnew(final String dictId) {
saveToken();
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideEditJsp).useForm(CreateForm.class, op -> {
op.setup(form -> {
form.initialize();
form.crudMode = CrudMode.CREATE;
form.dictId = dictId;
});
});
}
@Execute
@Secured({ ROLE })
public HtmlResponse edit(final EditForm form) {
validate(form, messages -> {}, () -> asListHtml(form.dictId));
stemmerOverrideService
.getStemmerOverrideItem(form.dictId, form.id)
.ifPresent(entity -> {
form.input = entity.getInput();
form.output = entity.getOutput();
})
.orElse(() -> {
throwValidationError(messages -> messages.addErrorsCrudCouldNotFindCrudTable(GLOBAL, form.getDisplayId()),
() -> asListHtml(form.dictId));
});
saveToken();
if (form.crudMode.intValue() == CrudMode.EDIT) {
// back
form.crudMode = CrudMode.DETAILS;
return asDetailsHtml();
} else {
form.crudMode = CrudMode.EDIT;
return asEditHtml();
}
}
// -----------------------------------------------------
// Details
// -------
@Execute
@Secured({ ROLE, ROLE + VIEW })
public HtmlResponse details(final String dictId, final int crudMode, final long id) {
verifyCrudMode(crudMode, CrudMode.DETAILS, dictId);
saveToken();
return asDetailsHtml().useForm(
EditForm.class,
op -> {
op.setup(form -> {
stemmerOverrideService
.getStemmerOverrideItem(dictId, id)
.ifPresent(entity -> {
form.input = entity.getInput();
form.output = entity.getOutput();
})
.orElse(() -> {
throwValidationError(
messages -> messages.addErrorsCrudCouldNotFindCrudTable(GLOBAL, dictId + ":" + id),
() -> asListHtml(dictId));
});
form.id = id;
form.crudMode = crudMode;
form.dictId = dictId;
});
});
}
// -----------------------------------------------------
// Download
// -------
@Execute
@Secured({ ROLE, ROLE + VIEW })
public HtmlResponse downloadpage(final String dictId) {
saveToken();
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideDownloadJsp).useForm(DownloadForm.class, op -> {
op.setup(form -> {
form.dictId = dictId;
});
}).renderWith(data -> {
stemmerOverrideService.getStemmerOverrideFile(dictId).ifPresent(file -> {
RenderDataUtil.register(data, "path", file.getPath());
}).orElse(() -> {
throwValidationError(messages -> messages.addErrorsFailedToDownloadStemmeroverrideFile(GLOBAL), this::asDictIndexHtml);
});
});
}
@Execute
@Secured({ ROLE, ROLE + VIEW })
public ActionResponse download(final DownloadForm form) {
validate(form, messages -> {}, () -> downloadpage(form.dictId));
verifyTokenKeep(() -> downloadpage(form.dictId));
return stemmerOverrideService
.getStemmerOverrideFile(form.dictId)
.map(file -> asStream(new File(file.getPath()).getName()).contentTypeOctetStream().stream(out -> {
file.writeOut(out);
}))
.orElseGet(
() -> {
throwValidationError(messages -> messages.addErrorsFailedToDownloadStemmeroverrideFile(GLOBAL),
() -> downloadpage(form.dictId));
return null;
});
}
// -----------------------------------------------------
// Upload
// -------
@Execute
@Secured({ ROLE })
public HtmlResponse uploadpage(final String dictId) {
saveToken();
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideUploadJsp).useForm(UploadForm.class, op -> {
op.setup(form -> {
form.dictId = dictId;
});
}).renderWith(data -> {
stemmerOverrideService.getStemmerOverrideFile(dictId).ifPresent(file -> {
RenderDataUtil.register(data, "path", file.getPath());
}).orElse(() -> {
throwValidationError(messages -> messages.addErrorsFailedToDownloadStemmeroverrideFile(GLOBAL), this::asDictIndexHtml);
});
});
}
@Execute
@Secured({ ROLE })
public HtmlResponse upload(final UploadForm form) {
validate(form, messages -> {}, () -> uploadpage(form.dictId));
verifyToken(() -> uploadpage(form.dictId));
return stemmerOverrideService
.getStemmerOverrideFile(form.dictId)
.map(file -> {
try (InputStream inputStream = form.stemmerOverrideFile.getInputStream()) {
file.update(inputStream);
} catch (final IOException e) {
throwValidationError(messages -> messages.addErrorsFailedToUploadStemmeroverrideFile(GLOBAL),
() -> redirectWith(getClass(), moreUrl("uploadpage/" + form.dictId)));
}
saveInfo(messages -> messages.addSuccessUploadStemmeroverrideFile(GLOBAL));
return redirectWith(getClass(), moreUrl("list/1").params("dictId", form.dictId));
})
.orElseGet(
() -> {
throwValidationError(messages -> messages.addErrorsFailedToUploadStemmeroverrideFile(GLOBAL),
() -> uploadpage(form.dictId));
return null;
});
}
// -----------------------------------------------------
// Actually Crud
// -------------
@Execute
@Secured({ ROLE })
public HtmlResponse create(final CreateForm form) {
verifyCrudMode(form.crudMode, CrudMode.CREATE, form.dictId);
validate(form, messages -> {}, this::asEditHtml);
verifyToken(this::asEditHtml);
createStemmerOverrideItem(form, this::asEditHtml).ifPresent(
entity -> {
try {
stemmerOverrideService.store(form.dictId, entity);
saveInfo(messages -> messages.addSuccessCrudCreateCrudTable(GLOBAL));
} catch (final Exception e) {
throwValidationError(messages -> messages.addErrorsCrudFailedToCreateCrudTable(GLOBAL, buildThrowableMessage(e)),
this::asEditHtml);
}
}).orElse(() -> {
throwValidationError(messages -> messages.addErrorsCrudFailedToCreateInstance(GLOBAL), this::asEditHtml);
});
return redirectWith(getClass(), moreUrl("list/1").params("dictId", form.dictId));
}
@Execute
@Secured({ ROLE })
public HtmlResponse update(final EditForm form) {
verifyCrudMode(form.crudMode, CrudMode.EDIT, form.dictId);
validate(form, messages -> {}, this::asEditHtml);
verifyToken(this::asEditHtml);
createStemmerOverrideItem(form, this::asEditHtml).ifPresent(
entity -> {
try {
stemmerOverrideService.store(form.dictId, entity);
saveInfo(messages -> messages.addSuccessCrudUpdateCrudTable(GLOBAL));
} catch (final Exception e) {
throwValidationError(messages -> messages.addErrorsCrudFailedToUpdateCrudTable(GLOBAL, buildThrowableMessage(e)),
this::asEditHtml);
}
}).orElse(() -> {
saveToken();
throwValidationError(messages -> messages.addErrorsCrudCouldNotFindCrudTable(GLOBAL, form.getDisplayId()), this::asEditHtml);
});
return redirectWith(getClass(), moreUrl("list/1").params("dictId", form.dictId));
}
@Execute
@Secured({ ROLE })
public HtmlResponse delete(final EditForm form) {
verifyCrudMode(form.crudMode, CrudMode.DETAILS, form.dictId);
validate(form, messages -> {}, this::asDetailsHtml);
verifyToken(this::asDetailsHtml);
stemmerOverrideService
.getStemmerOverrideItem(form.dictId, form.id)
.ifPresent(
entity -> {
try {
stemmerOverrideService.delete(form.dictId, entity);
saveInfo(messages -> messages.addSuccessCrudDeleteCrudTable(GLOBAL));
} catch (final Exception e) {
throwValidationError(
messages -> messages.addErrorsCrudFailedToDeleteCrudTable(GLOBAL, buildThrowableMessage(e)),
this::asEditHtml);
}
})
.orElse(() -> {
throwValidationError(messages -> messages.addErrorsCrudCouldNotFindCrudTable(GLOBAL, form.getDisplayId()),
this::asDetailsHtml);
});
return redirectWith(getClass(), moreUrl("list/1").params("dictId", form.dictId));
}
//===================================================================================
// Assist Logic
// ============
private static OptionalEntity<StemmerOverrideItem> getEntity(final CreateForm form) {
switch (form.crudMode) {
case CrudMode.CREATE:
final StemmerOverrideItem entity = new StemmerOverrideItem(0, StringUtil.EMPTY, StringUtil.EMPTY);
return OptionalEntity.of(entity);
case CrudMode.EDIT:
if (form instanceof EditForm) {
return ComponentUtil.getComponent(StemmerOverrideService.class).getStemmerOverrideItem(form.dictId, ((EditForm) form).id);
}
break;
default:
break;
}
return OptionalEntity.empty();
}
protected OptionalEntity<StemmerOverrideItem> createStemmerOverrideItem(final CreateForm form, final VaErrorHook hook) {
try {
return createStemmerOverrideItem(this, form, hook);
} catch (final ValidationErrorException e) {
saveToken();
throw e;
}
}
public static OptionalEntity<StemmerOverrideItem> createStemmerOverrideItem(final FessBaseAction action, final CreateForm form,
final VaErrorHook hook) {
return getEntity(form).map(entity -> {
entity.setNewInput(form.input);
entity.setNewOutput(form.output);
return entity;
});
}
// ===================================================================================
// Small Helper
// ============
protected void verifyCrudMode(final int crudMode, final int expectedMode, final String dictId) {
if (crudMode != expectedMode) {
throwValidationError(messages -> {
messages.addErrorsCrudInvalidMode(GLOBAL, String.valueOf(expectedMode), String.valueOf(crudMode));
}, () -> asListHtml(dictId));
}
}
// ===================================================================================
// JSP
// =========
protected HtmlResponse asDictIndexHtml() {
return redirect(AdminDictAction.class);
}
private HtmlResponse asListHtml(final String dictId) {
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideJsp).renderWith(
data -> {
RenderDataUtil.register(data, "stemmerOverrideItemItems",
stemmerOverrideService.getStemmerOverrideList(dictId, stemmerOverridePager));
}).useForm(SearchForm.class, setup -> {
setup.setup(form -> {
copyBeanToBean(stemmerOverridePager, form, op -> op.include("id"));
});
});
}
private HtmlResponse asEditHtml() {
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideEditJsp);
}
private HtmlResponse asDetailsHtml() {
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideDetailsJsp);
}
}
| {
"pile_set_name": "Github"
} |
package com.sequenceiq.redbeams.flow.redbeams.start.event;
import com.sequenceiq.cloudbreak.cloud.context.CloudContext;
import com.sequenceiq.cloudbreak.cloud.model.CloudCredential;
import com.sequenceiq.cloudbreak.cloud.model.DatabaseStack;
import com.sequenceiq.redbeams.flow.redbeams.common.RedbeamsEvent;
/**
* A request for start a database server.
*/
public class StartDatabaseServerRequest extends RedbeamsEvent {
private final CloudContext cloudContext;
private final CloudCredential cloudCredential;
private final DatabaseStack dbStack;
public StartDatabaseServerRequest(CloudContext cloudContext, CloudCredential cloudCredential, DatabaseStack dbStack) {
super(cloudContext != null ? cloudContext.getId() : null);
this.cloudContext = cloudContext;
this.cloudCredential = cloudCredential;
this.dbStack = dbStack;
}
public CloudContext getCloudContext() {
return cloudContext;
}
public CloudCredential getCloudCredential() {
return cloudCredential;
}
public DatabaseStack getDbStack() {
return dbStack;
}
public String toString() {
return "StartDatabaseServerRequest{"
+ "cloudContext=" + cloudContext
+ ", cloudCredential=" + cloudCredential
+ ", dbStack=" + dbStack
+ '}';
}
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* REST API: WP_REST_Search_Controller class
*
* @package WordPress
* @subpackage REST_API
* @since 5.0.0
*/
/**
* Core class to search through all WordPress content via the REST API.
*
* @since 5.0.0
*
* @see WP_REST_Controller
*/
class WP_REST_Search_Controller extends WP_REST_Controller {
/**
* ID property name.
*/
const PROP_ID = 'id';
/**
* Title property name.
*/
const PROP_TITLE = 'title';
/**
* URL property name.
*/
const PROP_URL = 'url';
/**
* Type property name.
*/
const PROP_TYPE = 'type';
/**
* Subtype property name.
*/
const PROP_SUBTYPE = 'subtype';
/**
* Identifier for the 'any' type.
*/
const TYPE_ANY = 'any';
/**
* Search handlers used by the controller.
*
* @since 5.0.0
* @var array
*/
protected $search_handlers = array();
/**
* Constructor.
*
* @since 5.0.0
*
* @param array $search_handlers List of search handlers to use in the controller. Each search
* handler instance must extend the `WP_REST_Search_Handler` class.
*/
public function __construct( array $search_handlers ) {
$this->namespace = 'wp/v2';
$this->rest_base = 'search';
foreach ( $search_handlers as $search_handler ) {
if ( ! $search_handler instanceof WP_REST_Search_Handler ) {
_doing_it_wrong(
__METHOD__,
/* translators: %s: PHP class name. */
sprintf( __( 'REST search handlers must extend the %s class.' ), 'WP_REST_Search_Handler' ),
'5.0.0'
);
continue;
}
$this->search_handlers[ $search_handler->get_type() ] = $search_handler;
}
}
/**
* Registers the routes for the objects of the controller.
*
* @since 5.0.0
*
* @see register_rest_route()
*/
public function register_routes() {
register_rest_route(
$this->namespace,
'/' . $this->rest_base,
array(
array(
'methods' => WP_REST_Server::READABLE,
'callback' => array( $this, 'get_items' ),
'permission_callback' => array( $this, 'get_items_permission_check' ),
'args' => $this->get_collection_params(),
),
'schema' => array( $this, 'get_public_item_schema' ),
)
);
}
/**
* Checks if a given request has access to search content.
*
* @since 5.0.0
*
* @param WP_REST_Request $request Full details about the request.
* @return true|WP_Error True if the request has search access, WP_Error object otherwise.
*/
public function get_items_permission_check( $request ) {
return true;
}
/**
* Retrieves a collection of search results.
*
* @since 5.0.0
*
* @param WP_REST_Request $request Full details about the request.
* @return WP_REST_Response|WP_Error Response object on success, or WP_Error object on failure.
*/
public function get_items( $request ) {
$handler = $this->get_search_handler( $request );
if ( is_wp_error( $handler ) ) {
return $handler;
}
$result = $handler->search_items( $request );
if ( ! isset( $result[ WP_REST_Search_Handler::RESULT_IDS ] ) || ! is_array( $result[ WP_REST_Search_Handler::RESULT_IDS ] ) || ! isset( $result[ WP_REST_Search_Handler::RESULT_TOTAL ] ) ) {
return new WP_Error(
'rest_search_handler_error',
__( 'Internal search handler error.' ),
array( 'status' => 500 )
);
}
$ids = array_map( 'absint', $result[ WP_REST_Search_Handler::RESULT_IDS ] );
$results = array();
foreach ( $ids as $id ) {
$data = $this->prepare_item_for_response( $id, $request );
$results[] = $this->prepare_response_for_collection( $data );
}
$total = (int) $result[ WP_REST_Search_Handler::RESULT_TOTAL ];
$page = (int) $request['page'];
$per_page = (int) $request['per_page'];
$max_pages = ceil( $total / $per_page );
if ( $page > $max_pages && $total > 0 ) {
return new WP_Error(
'rest_search_invalid_page_number',
__( 'The page number requested is larger than the number of pages available.' ),
array( 'status' => 400 )
);
}
$response = rest_ensure_response( $results );
$response->header( 'X-WP-Total', $total );
$response->header( 'X-WP-TotalPages', $max_pages );
$request_params = $request->get_query_params();
$base = add_query_arg( urlencode_deep( $request_params ), rest_url( sprintf( '%s/%s', $this->namespace, $this->rest_base ) ) );
if ( $page > 1 ) {
$prev_link = add_query_arg( 'page', $page - 1, $base );
$response->link_header( 'prev', $prev_link );
}
if ( $page < $max_pages ) {
$next_link = add_query_arg( 'page', $page + 1, $base );
$response->link_header( 'next', $next_link );
}
return $response;
}
/**
* Prepares a single search result for response.
*
* @since 5.0.0
*
* @param int $id ID of the item to prepare.
* @param WP_REST_Request $request Request object.
* @return WP_REST_Response Response object.
*/
public function prepare_item_for_response( $id, $request ) {
$handler = $this->get_search_handler( $request );
if ( is_wp_error( $handler ) ) {
return new WP_REST_Response();
}
$fields = $this->get_fields_for_response( $request );
$data = $handler->prepare_item( $id, $fields );
$data = $this->add_additional_fields_to_object( $data, $request );
$context = ! empty( $request['context'] ) ? $request['context'] : 'view';
$data = $this->filter_response_by_context( $data, $context );
$response = rest_ensure_response( $data );
$links = $handler->prepare_item_links( $id );
$links['collection'] = array(
'href' => rest_url( sprintf( '%s/%s', $this->namespace, $this->rest_base ) ),
);
$response->add_links( $links );
return $response;
}
/**
* Retrieves the item schema, conforming to JSON Schema.
*
* @since 5.0.0
*
* @return array Item schema data.
*/
public function get_item_schema() {
if ( $this->schema ) {
return $this->add_additional_fields_schema( $this->schema );
}
$types = array();
$subtypes = array();
foreach ( $this->search_handlers as $search_handler ) {
$types[] = $search_handler->get_type();
$subtypes = array_merge( $subtypes, $search_handler->get_subtypes() );
}
$types = array_unique( $types );
$subtypes = array_unique( $subtypes );
$schema = array(
'$schema' => 'http://json-schema.org/draft-04/schema#',
'title' => 'search-result',
'type' => 'object',
'properties' => array(
self::PROP_ID => array(
'description' => __( 'Unique identifier for the object.' ),
'type' => 'integer',
'context' => array( 'view', 'embed' ),
'readonly' => true,
),
self::PROP_TITLE => array(
'description' => __( 'The title for the object.' ),
'type' => 'string',
'context' => array( 'view', 'embed' ),
'readonly' => true,
),
self::PROP_URL => array(
'description' => __( 'URL to the object.' ),
'type' => 'string',
'format' => 'uri',
'context' => array( 'view', 'embed' ),
'readonly' => true,
),
self::PROP_TYPE => array(
'description' => __( 'Object type.' ),
'type' => 'string',
'enum' => $types,
'context' => array( 'view', 'embed' ),
'readonly' => true,
),
self::PROP_SUBTYPE => array(
'description' => __( 'Object subtype.' ),
'type' => 'string',
'enum' => $subtypes,
'context' => array( 'view', 'embed' ),
'readonly' => true,
),
),
);
$this->schema = $schema;
return $this->add_additional_fields_schema( $this->schema );
}
/**
* Retrieves the query params for the search results collection.
*
* @since 5.0.0
*
* @return array Collection parameters.
*/
public function get_collection_params() {
$types = array();
$subtypes = array();
foreach ( $this->search_handlers as $search_handler ) {
$types[] = $search_handler->get_type();
$subtypes = array_merge( $subtypes, $search_handler->get_subtypes() );
}
$types = array_unique( $types );
$subtypes = array_unique( $subtypes );
$query_params = parent::get_collection_params();
$query_params['context']['default'] = 'view';
$query_params[ self::PROP_TYPE ] = array(
'default' => $types[0],
'description' => __( 'Limit results to items of an object type.' ),
'type' => 'string',
'enum' => $types,
);
$query_params[ self::PROP_SUBTYPE ] = array(
'default' => self::TYPE_ANY,
'description' => __( 'Limit results to items of one or more object subtypes.' ),
'type' => 'array',
'items' => array(
'enum' => array_merge( $subtypes, array( self::TYPE_ANY ) ),
'type' => 'string',
),
'sanitize_callback' => array( $this, 'sanitize_subtypes' ),
);
return $query_params;
}
/**
* Sanitizes the list of subtypes, to ensure only subtypes of the passed type are included.
*
* @since 5.0.0
*
* @param string|array $subtypes One or more subtypes.
* @param WP_REST_Request $request Full details about the request.
* @param string $parameter Parameter name.
* @return array|WP_Error List of valid subtypes, or WP_Error object on failure.
*/
public function sanitize_subtypes( $subtypes, $request, $parameter ) {
$subtypes = wp_parse_slug_list( $subtypes );
$subtypes = rest_parse_request_arg( $subtypes, $request, $parameter );
if ( is_wp_error( $subtypes ) ) {
return $subtypes;
}
// 'any' overrides any other subtype.
if ( in_array( self::TYPE_ANY, $subtypes, true ) ) {
return array( self::TYPE_ANY );
}
$handler = $this->get_search_handler( $request );
if ( is_wp_error( $handler ) ) {
return $handler;
}
return array_intersect( $subtypes, $handler->get_subtypes() );
}
/**
* Gets the search handler to handle the current request.
*
* @since 5.0.0
*
* @param WP_REST_Request $request Full details about the request.
* @return WP_REST_Search_Handler|WP_Error Search handler for the request type, or WP_Error object on failure.
*/
protected function get_search_handler( $request ) {
$type = $request->get_param( self::PROP_TYPE );
if ( ! $type || ! isset( $this->search_handlers[ $type ] ) ) {
return new WP_Error(
'rest_search_invalid_type',
__( 'Invalid type parameter.' ),
array( 'status' => 400 )
);
}
return $this->search_handlers[ $type ];
}
}
| {
"pile_set_name": "Github"
} |
// run
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This test makes sure the text output for bounds check failures is as expected.
package main
import (
"fmt"
"os"
"runtime"
"text/tabwriter"
)
// Testing with length 3 slices, arrays, and strings.
// A large (>1<<32) value is included to test 32-bit platforms.
var indexes = []uint64{0, 2, 3, 1<<32 - 1, 1<<64 - 1}
var slices = []uint64{0, 3, 4, 1<<32 - 1, 1<<64 - 1}
var w *tabwriter.Writer
func main() {
w = tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.AlignRight)
defer w.Flush()
doIndex()
doSlice()
doSlice3()
}
func doIndex() {
a := []int{1, 2, 3}
for _, i := range indexes {
printPanic(fmt.Sprintf("slice[%d]", i), func() {
_ = a[i]
})
}
b := [3]int{1, 2, 3}
for _, i := range indexes {
printPanic(fmt.Sprintf("array[%d]", i), func() {
_ = b[i]
})
}
c := "123"
for _, i := range indexes {
printPanic(fmt.Sprintf("string[%d]", i), func() {
_ = c[i]
})
}
}
func doSlice() {
a := []int{1, 2, 3}
for _, i := range slices {
for _, j := range slices {
printPanic(fmt.Sprintf("slice[%d:%d]", i, j), func() {
_ = a[i:j]
})
}
}
b := [3]int{1, 2, 3}
for _, i := range slices {
for _, j := range slices {
printPanic(fmt.Sprintf("array[%d:%d]", i, j), func() {
_ = b[i:j]
})
}
}
c := "123"
for _, i := range slices {
for _, j := range slices {
printPanic(fmt.Sprintf("string[%d:%d]", i, j), func() {
_ = c[i:j]
})
}
}
}
func doSlice3() {
a := []int{1, 2, 3}
for _, i := range slices {
for _, j := range slices {
for _, k := range slices {
printPanic(fmt.Sprintf("slice[%d:%d:%d]", i, j, k), func() {
_ = a[i:j:k]
})
}
}
}
b := [3]int{1, 2, 3}
for _, i := range slices {
for _, j := range slices {
for _, k := range slices {
printPanic(fmt.Sprintf("array[%d:%d:%d]", i, j, k), func() {
_ = b[i:j:k]
})
}
}
}
}
func printPanic(msg string, f func()) {
defer func() {
res := "no panic"
if e := recover(); e != nil {
res = e.(runtime.Error).Error()
}
fmt.Fprintf(w, "%s\t %s\n", msg, res)
}()
f()
}
| {
"pile_set_name": "Github"
} |
import { listDomainUsers } from 'lib/domains/users/list';
import { addDomainUser } from 'lib/domains/users/add';
import { addDomain } from 'lib/domains/add';
import { Ptorx } from 'types/ptorx';
test('listDomainUsers()', async () => {
const domain = await addDomain({ domain: 'example5.com' }, 1234);
await addDomainUser('example5.com', 12345);
const domainUsers = await listDomainUsers(domain.id, 1234);
expect(domainUsers).toBeArrayOfSize(1);
const keys: Array<keyof Ptorx.DomainUserList[0]> = [
'authorized',
'created',
'label',
'requestKey',
'domainId'
];
expect(domainUsers[0]).toContainAllKeys(keys);
});
| {
"pile_set_name": "Github"
} |
<!--**************************************************************************************
Toolkit for WPF
Copyright (C) 2007-2017 Xceed Software Inc.
This program is provided to you under the terms of the Microsoft Public
License (Ms-PL) as published at http://wpftoolkit.codeplex.com/license
For more features, controls, and fast professional support,
pick up the Plus Edition at https://xceed.com/xceed-toolkit-plus-for-wpf/
Stay informed: follow @datagrid on Twitter or Like http://facebook.com/datagrids
************************************************************************************-->
<local:DemoView x:Class="Xceed.Wpf.Toolkit.LiveExplorer.Samples.PropertyGrid.Views.PropertyGridAttributesView"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="clr-namespace:Xceed.Wpf.Toolkit.LiveExplorer"
xmlns:xctk="http://schemas.xceed.com/wpf/xaml/toolkit"
xmlns:sys="clr-namespace:System;assembly=mscorlib"
xmlns:col="clr-namespace:System.Collections;assembly=mscorlib"
xmlns:v="clr-namespace:Xceed.Wpf.Toolkit.LiveExplorer.Samples.PropertyGrid.Views"
x:Name="demo"
VerticalScrollBarVisibility="Disabled"
Title="Using Attributes">
<local:DemoView.Description>
<Paragraph FontSize="14" FontFamily="Segoe">
This sample expose how you can use Attributes to customize the display of the properties in the PropertyGrid.
With attributes you can, for each properties, customize their own category, display property, display name, display order, description, editor, editor combox items,
and expandability (drill down). Here is the list of all supported attributes:
<LineBreak />
<LineBreak />
BrowsableAttribute
<LineBreak />
DisplayNameAttribute
<LineBreak/>
DescriptionAttribute
<LineBreak/>
CategoryAttribute
<LineBreak/>
CategoryOrderAttribute
<LineBreak/>
EditorAttribute
<LineBreak/>
NewItemTypesAttribute
<LineBreak/>
ExpandableObjectAttribute
<LineBreak/>
ItemsSourceAttribute
<LineBreak/>
PropertyOrderAttribute
<LineBreak/>
ParenthesizePropertyNameAttribute
<LineBreak/>
<LineBreak/>
Plus edition Only:
<LineBreak />
DependsOnAttribute
<LineBreak/>
(Not demoed here. See corresponding samples)
<LineBreak />
ExpandedCategoryAttribute
<LineBreak/>
DefinitionKeyAttribute
<LineBreak/>
LocalizedCategoryAttribute
<LineBreak/>
LocalizedDisplayNameAttribute
<LineBreak/>
LocalizedDescriptionAttribute
</Paragraph>
</local:DemoView.Description>
<Grid>
<Grid.RowDefinitions>
<RowDefinition Height="Auto" />
<RowDefinition Height="*" />
</Grid.RowDefinitions>
<StackPanel Grid.Row="0" Margin="10">
<TextBlock Text="Each property in this PropertyGrid has its own special attribute that applies a different effect when it is displayed."
TextWrapping="Wrap" Margin="5"/>
<TextBlock Text="Select each property one-by-one and take a look at the description section to learn more about each attribute."
TextWrapping="Wrap" Margin="5"/>
<TextBlock Text="All properties define the CategoryAttribute (eg. 'Information','Conections') and the DescriptionAttribute."
TextWrapping="Wrap" Margin="5"/>
<TextBlock Text="(CLick the XAML and Code tabs to see details.)"
FontStyle="Italic"
Margin="0,10,0,0"/>
</StackPanel>
<xctk:PropertyGrid Grid.Row="1" x:Name="_propertyGrid" Width="450" MaxHeight="650" Margin="10"
SelectedObject="{Binding}"
ShowDescriptionByTooltip="True">
</xctk:PropertyGrid>
</Grid>
</local:DemoView>
| {
"pile_set_name": "Github"
} |
layui.use(["table", "layer"], function () {
var table = layui.table;
table.render({
elem: "#analysisTable",
height: 'full',
url: '/dbNode/analyse',
page: false,
where: {id: $('#dbNodeId').val(), page:1, limit:10},
cols: [[
{field: 'topicId', align: 'center', width: '10%', title: "TopicId"},
{field: 'topicName', align: 'center', width: '10%', title: "Topic名称"},
{field: 'queueQuantity', align: 'center', width: '10%', title: "Queue数量"},
{field: 'writeableQueueQuantity', align: 'center', width: '10%', title: "可写Queue数量"},
{field: 'quantity', align: 'center', width: '10%', title: "分布节点数"},
{field: 'dbNodeIds', align: 'center', width: '10%', title: "分布节Id"},
{field: 'dbStr', align: 'center', width: '40%', title: "分布节具体信息"},
]]
});
function requestCallback(result, xhr) {
if (xhr === 'success') {
if (result.code ==yesFlag) {
successBox(result.msg);
} else {
failBox(result.msg);
}
} else {
failBox("网络异常!"+xhr);
}
}
function successBox(msg) {
layer.alert(msg, {icon: 1})
}
function failBox(msg) {
layer.alert(msg, {icon: 2})
}
}); | {
"pile_set_name": "Github"
} |
"use strict";
let datafire = require('datafire');
let openapi = require('./openapi.json');
module.exports = datafire.Integration.fromOpenAPI(openapi, "apitore_url2labelbywordvectorapis"); | {
"pile_set_name": "Github"
} |
/*
This file is part of Darling.
Copyright (C) 2017 Lubos Dolezel
Darling is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Darling is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Darling. If not, see <http://www.gnu.org/licenses/>.
*/
#import <CoreImage/CIPortraitEffectStudio.h>
@implementation CIPortraitEffectStudio
- (NSMethodSignature *)methodSignatureForSelector:(SEL)aSelector {
return [NSMethodSignature signatureWithObjCTypes: "v@:"];
}
- (void)forwardInvocation:(NSInvocation *)anInvocation {
NSLog(@"Stub called: %@ in %@", NSStringFromSelector([anInvocation selector]), [self class]);
}
@end
| {
"pile_set_name": "Github"
} |
releaseType: node
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
/*
* r8a7740 Core CPG Clocks
*
* Copyright (C) 2014 Ulrich Hecht
*/
#include <linux/clk-provider.h>
#include <linux/clk/renesas.h>
#include <linux/init.h>
#include <linux/io.h>
#include <linux/kernel.h>
#include <linux/slab.h>
#include <linux/of.h>
#include <linux/of_address.h>
#include <linux/spinlock.h>
struct r8a7740_cpg {
struct clk_onecell_data data;
spinlock_t lock;
void __iomem *reg;
};
#define CPG_FRQCRA 0x00
#define CPG_FRQCRB 0x04
#define CPG_PLLC2CR 0x2c
#define CPG_USBCKCR 0x8c
#define CPG_FRQCRC 0xe0
#define CLK_ENABLE_ON_INIT BIT(0)
struct div4_clk {
const char *name;
unsigned int reg;
unsigned int shift;
int flags;
};
static struct div4_clk div4_clks[] = {
{ "i", CPG_FRQCRA, 20, CLK_ENABLE_ON_INIT },
{ "zg", CPG_FRQCRA, 16, CLK_ENABLE_ON_INIT },
{ "b", CPG_FRQCRA, 8, CLK_ENABLE_ON_INIT },
{ "m1", CPG_FRQCRA, 4, CLK_ENABLE_ON_INIT },
{ "hp", CPG_FRQCRB, 4, 0 },
{ "hpp", CPG_FRQCRC, 20, 0 },
{ "usbp", CPG_FRQCRC, 16, 0 },
{ "s", CPG_FRQCRC, 12, 0 },
{ "zb", CPG_FRQCRC, 8, 0 },
{ "m3", CPG_FRQCRC, 4, 0 },
{ "cp", CPG_FRQCRC, 0, 0 },
{ NULL, 0, 0, 0 },
};
static const struct clk_div_table div4_div_table[] = {
{ 0, 2 }, { 1, 3 }, { 2, 4 }, { 3, 6 }, { 4, 8 }, { 5, 12 },
{ 6, 16 }, { 7, 18 }, { 8, 24 }, { 9, 32 }, { 10, 36 }, { 11, 48 },
{ 13, 72 }, { 14, 96 }, { 0, 0 }
};
static u32 cpg_mode __initdata;
static struct clk * __init
r8a7740_cpg_register_clock(struct device_node *np, struct r8a7740_cpg *cpg,
const char *name)
{
const struct clk_div_table *table = NULL;
const char *parent_name;
unsigned int shift, reg;
unsigned int mult = 1;
unsigned int div = 1;
if (!strcmp(name, "r")) {
switch (cpg_mode & (BIT(2) | BIT(1))) {
case BIT(1) | BIT(2):
/* extal1 */
parent_name = of_clk_get_parent_name(np, 0);
div = 2048;
break;
case BIT(2):
/* extal1 */
parent_name = of_clk_get_parent_name(np, 0);
div = 1024;
break;
default:
/* extalr */
parent_name = of_clk_get_parent_name(np, 2);
break;
}
} else if (!strcmp(name, "system")) {
parent_name = of_clk_get_parent_name(np, 0);
if (cpg_mode & BIT(1))
div = 2;
} else if (!strcmp(name, "pllc0")) {
/* PLLC0/1 are configurable multiplier clocks. Register them as
* fixed factor clocks for now as there's no generic multiplier
* clock implementation and we currently have no need to change
* the multiplier value.
*/
u32 value = readl(cpg->reg + CPG_FRQCRC);
parent_name = "system";
mult = ((value >> 24) & 0x7f) + 1;
} else if (!strcmp(name, "pllc1")) {
u32 value = readl(cpg->reg + CPG_FRQCRA);
parent_name = "system";
mult = ((value >> 24) & 0x7f) + 1;
div = 2;
} else if (!strcmp(name, "pllc2")) {
u32 value = readl(cpg->reg + CPG_PLLC2CR);
parent_name = "system";
mult = ((value >> 24) & 0x3f) + 1;
} else if (!strcmp(name, "usb24s")) {
u32 value = readl(cpg->reg + CPG_USBCKCR);
if (value & BIT(7))
/* extal2 */
parent_name = of_clk_get_parent_name(np, 1);
else
parent_name = "system";
if (!(value & BIT(6)))
div = 2;
} else {
struct div4_clk *c;
for (c = div4_clks; c->name; c++) {
if (!strcmp(name, c->name)) {
parent_name = "pllc1";
table = div4_div_table;
reg = c->reg;
shift = c->shift;
break;
}
}
if (!c->name)
return ERR_PTR(-EINVAL);
}
if (!table) {
return clk_register_fixed_factor(NULL, name, parent_name, 0,
mult, div);
} else {
return clk_register_divider_table(NULL, name, parent_name, 0,
cpg->reg + reg, shift, 4, 0,
table, &cpg->lock);
}
}
static void __init r8a7740_cpg_clocks_init(struct device_node *np)
{
struct r8a7740_cpg *cpg;
struct clk **clks;
unsigned int i;
int num_clks;
if (of_property_read_u32(np, "renesas,mode", &cpg_mode))
pr_warn("%s: missing renesas,mode property\n", __func__);
num_clks = of_property_count_strings(np, "clock-output-names");
if (num_clks < 0) {
pr_err("%s: failed to count clocks\n", __func__);
return;
}
cpg = kzalloc(sizeof(*cpg), GFP_KERNEL);
clks = kcalloc(num_clks, sizeof(*clks), GFP_KERNEL);
if (cpg == NULL || clks == NULL) {
/* We're leaking memory on purpose, there's no point in cleaning
* up as the system won't boot anyway.
*/
return;
}
spin_lock_init(&cpg->lock);
cpg->data.clks = clks;
cpg->data.clk_num = num_clks;
cpg->reg = of_iomap(np, 0);
if (WARN_ON(cpg->reg == NULL))
return;
for (i = 0; i < num_clks; ++i) {
const char *name;
struct clk *clk;
of_property_read_string_index(np, "clock-output-names", i,
&name);
clk = r8a7740_cpg_register_clock(np, cpg, name);
if (IS_ERR(clk))
pr_err("%s: failed to register %pOFn %s clock (%ld)\n",
__func__, np, name, PTR_ERR(clk));
else
cpg->data.clks[i] = clk;
}
of_clk_add_provider(np, of_clk_src_onecell_get, &cpg->data);
}
CLK_OF_DECLARE(r8a7740_cpg_clks, "renesas,r8a7740-cpg-clocks",
r8a7740_cpg_clocks_init);
| {
"pile_set_name": "Github"
} |
package com.xiaopo.flying.sticker;
import android.view.MotionEvent;
/**
* @author wupanjie
*/
public interface StickerIconEvent {
void onActionDown(StickerView stickerView, MotionEvent event);
void onActionMove(StickerView stickerView, MotionEvent event);
void onActionUp(StickerView stickerView, MotionEvent event);
}
| {
"pile_set_name": "Github"
} |
HTTP/1.1 200 OK
Content-Type: application/json
{
"domains": [
{
"description": "desc of domain",
"enabled": true,
"id": "--domain-id--",
"links": {
"self": "http://identity:35357/v3/domains/--domain-id--"
},
"name": "my domain"
},
{
"description": "desc of another domain",
"enabled": true,
"id": "--domain-id--",
"links": {
"self": "http://identity:35357/v3/domains/--domain-id--"
},
"name": "another domain"
}
]
} | {
"pile_set_name": "Github"
} |
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build darwin dragonfly freebsd linux netbsd openbsd solaris
// Unix environment variables.
package unix
import "syscall"
func Getenv(key string) (value string, found bool) {
return syscall.Getenv(key)
}
func Setenv(key, value string) error {
return syscall.Setenv(key, value)
}
func Clearenv() {
syscall.Clearenv()
}
func Environ() []string {
return syscall.Environ()
}
func Unsetenv(key string) error {
return syscall.Unsetenv(key)
}
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2008-2017 Pivotal Labs
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
module.exports = function(jasmineRequire) {
var jasmine = jasmineRequire.core(jasmineRequire);
var consoleFns = require('../console/console.js');
consoleFns.console(consoleFns, jasmine);
var env = jasmine.getEnv();
var jasmineInterface = jasmineRequire.interface(jasmine, env);
extend(global, jasmineInterface);
function extend(destination, source) {
for (var property in source) destination[property] = source[property];
return destination;
}
return jasmine;
};
| {
"pile_set_name": "Github"
} |
(function () {
angular
.module('clash-royale-api')
.controller('PlayersController', PlayersController);
PlayersController.$inject = ['PlayersService', 'ngNotify'];
function PlayersController(PlayersService, ngNotify) {
var vm = this;
vm.title = 'Players';
vm.previewTitle = 'Preview Players';
vm.model = {};
vm.fields = [
{
key: 'level',
type: 'input',
templateOptions: {
label: 'Player Level',
required: true,
placeholder: 0
}
},
{
key: 'kingsTower.hitpoints',
type: 'input',
templateOptions: {
label: 'King\'s Tower Hitpoints',
required: true,
placeholder: 0
}
},
{
key: 'kingsTower.range',
type: 'input',
templateOptions: {
label: 'King\'s Tower Range',
required: true,
placeholder: 0
}
},
{
key: 'kingsTower.damage',
type: 'input',
templateOptions: {
label: 'King\'s Tower Damage',
required: true,
placeholder: 0
}
},
{
key: 'kingsTower.hitSpeed',
type: 'input',
templateOptions: {
label: 'King\'s Tower Hit Speed',
required: true,
placeholder: 0
}
},
{
key: 'arenaTowers.hitpoints',
type: 'input',
templateOptions: {
label: 'Arena Tower Hitpoints',
required: true,
placeholder: 0
}
},
{
key: 'arenaTowers.range',
type: 'input',
templateOptions: {
label: 'Arena Tower Range',
required: true,
placeholder: 0
}
},
{
key: 'arenaTowers.damage',
type: 'input',
templateOptions: {
label: 'Arena Tower Damage',
required: true,
placeholder: 0
}
},
{
key: 'arenaTowers.hitSpeed',
type: 'input',
templateOptions: {
label: 'Arena Tower Hit Speed',
required: true,
placeholder: 0
}
},
{
key: 'maxExp',
type: 'input',
templateOptions: {
label: 'Max. experience before level up.',
required: true,
placeholder: 0
}
},
];
vm.originalFields = angular.copy(vm.fields);
vm.submit = function(model) {
PlayersService.create(model)
.then(function(data) {
ngNotify.set('Your Player was successfully saved!', 'success');
vm.options.resetModel();
})
.catch(function(error) {
ngNotify.set('There was a problem saving your Player... ', 'error');
});
}
}
})();
| {
"pile_set_name": "Github"
} |
var searchData=
[
['vec1_2ehpp',['vec1.hpp',['../a00131.html',1,'']]],
['vec2_2ehpp',['vec2.hpp',['../a00132.html',1,'']]],
['vec3_2ehpp',['vec3.hpp',['../a00133.html',1,'']]],
['vec4_2ehpp',['vec4.hpp',['../a00134.html',1,'']]],
['vec_5fswizzle_2ehpp',['vec_swizzle.hpp',['../a00135.html',1,'']]],
['vector_5fangle_2ehpp',['vector_angle.hpp',['../a00136.html',1,'']]],
['vector_5fquery_2ehpp',['vector_query.hpp',['../a00137.html',1,'']]],
['vector_5frelational_2ehpp',['vector_relational.hpp',['../a00138.html',1,'']]]
];
| {
"pile_set_name": "Github"
} |
<?php
/**
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Aws\Sqs\Enum;
use Aws\Common\Enum;
/**
* Contains enumerable MessageAttribute values
*/
class MessageAttribute extends Enum
{
const ALL = 'All';
const SENDER_ID = 'SenderId';
const SENT_TIMESTAMP = 'SentTimestamp';
const APPROXIMATE_RECEIVE_COUNT = 'ApproximateReceiveCount';
const APPROXIMATE_FIRST_RECEIVE_TIMESTAMP = 'ApproximateFirstReceiveTimestamp';
}
| {
"pile_set_name": "Github"
} |
#include <cstring>
#include <string>
#include <vector>
#include "google/protobuf/text_format.h"
#include "gtest/gtest.h"
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
#include "caffe/layer.hpp"
#include "caffe/util/upgrade_proto.hpp"
#include "caffe/test/test_caffe_main.hpp"
namespace caffe {
class PaddingLayerUpgradeTest : public ::testing::Test {
protected:
void RunPaddingUpgradeTest(
const string& input_param_string, const string& output_param_string) {
// Test that UpgradeV0PaddingLayers called on the proto specified by
// input_param_string results in the proto specified by
// output_param_string.
NetParameter input_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
input_param_string, &input_param));
NetParameter expected_output_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
output_param_string, &expected_output_param));
NetParameter actual_output_param;
UpgradeV0PaddingLayers(input_param, &actual_output_param);
EXPECT_EQ(expected_output_param.DebugString(),
actual_output_param.DebugString());
// Also test idempotence.
NetParameter double_pad_upgrade_param;
UpgradeV0PaddingLayers(actual_output_param, &double_pad_upgrade_param);
EXPECT_EQ(actual_output_param.DebugString(),
double_pad_upgrade_param.DebugString());
}
};
TEST_F(PaddingLayerUpgradeTest, TestSimple) {
const string& input_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'pad1' "
" type: 'padding' "
" pad: 2 "
" } "
" bottom: 'data' "
" top: 'pad1' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad1' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
const string& expected_output_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunPaddingUpgradeTest(input_proto, expected_output_proto);
}
TEST_F(PaddingLayerUpgradeTest, TestTwoTops) {
const string& input_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'pad1' "
" type: 'padding' "
" pad: 2 "
" } "
" bottom: 'data' "
" top: 'pad1' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad1' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'conv2' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad1' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
const string& expected_output_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'conv2' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunPaddingUpgradeTest(input_proto, expected_output_proto);
}
TEST_F(PaddingLayerUpgradeTest, TestImageNet) {
const string& input_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'relu1' "
" type: 'relu' "
" } "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'pool1' "
" type: 'pool' "
" pool: MAX "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1' "
"} "
"layers { "
" layer { "
" name: 'norm1' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1' "
" top: 'norm1' "
"} "
"layers { "
" layer { "
" name: 'pad2' "
" type: 'padding' "
" pad: 2 "
" } "
" bottom: 'norm1' "
" top: 'pad2' "
"} "
"layers { "
" layer { "
" name: 'conv2' "
" type: 'conv' "
" num_output: 256 "
" group: 2 "
" kernelsize: 5 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad2' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'relu2' "
" type: 'relu' "
" } "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'pool2' "
" type: 'pool' "
" pool: MAX "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv2' "
" top: 'pool2' "
"} "
"layers { "
" layer { "
" name: 'norm2' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool2' "
" top: 'norm2' "
"} "
"layers { "
" layer { "
" name: 'pad3' "
" type: 'padding' "
" pad: 1 "
" } "
" bottom: 'norm2' "
" top: 'pad3' "
"} "
"layers { "
" layer { "
" name: 'conv3' "
" type: 'conv' "
" num_output: 384 "
" kernelsize: 3 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad3' "
" top: 'conv3' "
"} "
"layers { "
" layer { "
" name: 'relu3' "
" type: 'relu' "
" } "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" layer { "
" name: 'pad4' "
" type: 'padding' "
" pad: 1 "
" } "
" bottom: 'conv3' "
" top: 'pad4' "
"} "
"layers { "
" layer { "
" name: 'conv4' "
" type: 'conv' "
" num_output: 384 "
" group: 2 "
" kernelsize: 3 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad4' "
" top: 'conv4' "
"} "
"layers { "
" layer { "
" name: 'relu4' "
" type: 'relu' "
" } "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" layer { "
" name: 'pad5' "
" type: 'padding' "
" pad: 1 "
" } "
" bottom: 'conv4' "
" top: 'pad5' "
"} "
"layers { "
" layer { "
" name: 'conv5' "
" type: 'conv' "
" num_output: 256 "
" group: 2 "
" kernelsize: 3 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad5' "
" top: 'conv5' "
"} "
"layers { "
" layer { "
" name: 'relu5' "
" type: 'relu' "
" } "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" layer { "
" name: 'pool5' "
" type: 'pool' "
" kernelsize: 3 "
" pool: MAX "
" stride: 2 "
" } "
" bottom: 'conv5' "
" top: 'pool5' "
"} "
"layers { "
" layer { "
" name: 'fc6' "
" type: 'innerproduct' "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pool5' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'relu6' "
" type: 'relu' "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'drop6' "
" type: 'dropout' "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'fc7' "
" type: 'innerproduct' "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'fc6' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'relu7' "
" type: 'relu' "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'drop7' "
" type: 'dropout' "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'fc7' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
const string& expected_output_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'relu1' "
" type: 'relu' "
" } "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'pool1' "
" type: 'pool' "
" pool: MAX "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1' "
"} "
"layers { "
" layer { "
" name: 'norm1' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1' "
" top: 'norm1' "
"} "
"layers { "
" layer { "
" name: 'conv2' "
" type: 'conv' "
" num_output: 256 "
" group: 2 "
" kernelsize: 5 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'norm1' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'relu2' "
" type: 'relu' "
" } "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'pool2' "
" type: 'pool' "
" pool: MAX "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv2' "
" top: 'pool2' "
"} "
"layers { "
" layer { "
" name: 'norm2' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool2' "
" top: 'norm2' "
"} "
"layers { "
" layer { "
" name: 'conv3' "
" type: 'conv' "
" num_output: 384 "
" kernelsize: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'norm2' "
" top: 'conv3' "
"} "
"layers { "
" layer { "
" name: 'relu3' "
" type: 'relu' "
" } "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" layer { "
" name: 'conv4' "
" type: 'conv' "
" num_output: 384 "
" group: 2 "
" kernelsize: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv3' "
" top: 'conv4' "
"} "
"layers { "
" layer { "
" name: 'relu4' "
" type: 'relu' "
" } "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" layer { "
" name: 'conv5' "
" type: 'conv' "
" num_output: 256 "
" group: 2 "
" kernelsize: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv4' "
" top: 'conv5' "
"} "
"layers { "
" layer { "
" name: 'relu5' "
" type: 'relu' "
" } "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" layer { "
" name: 'pool5' "
" type: 'pool' "
" kernelsize: 3 "
" pool: MAX "
" stride: 2 "
" } "
" bottom: 'conv5' "
" top: 'pool5' "
"} "
"layers { "
" layer { "
" name: 'fc6' "
" type: 'innerproduct' "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pool5' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'relu6' "
" type: 'relu' "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'drop6' "
" type: 'dropout' "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'fc7' "
" type: 'innerproduct' "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'fc6' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'relu7' "
" type: 'relu' "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'drop7' "
" type: 'dropout' "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'fc7' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunPaddingUpgradeTest(input_proto, expected_output_proto);
}
class NetUpgradeTest : public ::testing::Test {
protected:
void RunV0UpgradeTest(
const string& input_param_string, const string& output_param_string) {
// Test that UpgradeV0Net called on the NetParameter proto specified by
// input_param_string results in the NetParameter proto specified by
// output_param_string.
NetParameter input_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
input_param_string, &input_param));
NetParameter expected_output_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
output_param_string, &expected_output_param));
NetParameter actual_output_param;
UpgradeV0Net(input_param, &actual_output_param);
EXPECT_EQ(expected_output_param.DebugString(),
actual_output_param.DebugString());
}
void RunV1UpgradeTest(
const string& input_param_string, const string& output_param_string) {
// Test that UpgradeV0Net called on the NetParameter proto specified by
// input_param_string results in the NetParameter proto specified by
// output_param_string.
NetParameter input_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
input_param_string, &input_param));
NetParameter expected_output_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
output_param_string, &expected_output_param));
NetParameter actual_output_param;
UpgradeV1Net(input_param, &actual_output_param);
EXPECT_EQ(expected_output_param.DebugString(),
actual_output_param.DebugString());
}
};
TEST_F(NetUpgradeTest, TestSimple) {
const string& v0_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'pad1' "
" type: 'padding' "
" pad: 2 "
" } "
" bottom: 'data' "
" top: 'pad1' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad1' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
const string& expected_v1_proto =
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
" type: DATA "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
" } "
" transform_param { "
" crop_size: 227 "
" mirror: true "
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" name: 'conv1' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
" stride: 4 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" name: 'fc8' "
" type: INNER_PRODUCT "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layers { "
" name: 'loss' "
" type: SOFTMAX_LOSS "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunV0UpgradeTest(v0_proto, expected_v1_proto);
const string& expected_v2_proto =
"name: 'CaffeNet' "
"layer { "
" name: 'data' "
" type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
" } "
" transform_param { "
" crop_size: 227 "
" mirror: true "
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layer { "
" name: 'conv1' "
" type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
" stride: 4 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layer { "
" name: 'fc8' "
" type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'conv1' "
" top: 'fc8' "
"} "
"layer { "
" name: 'loss' "
" type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunV1UpgradeTest(expected_v1_proto, expected_v2_proto);
}
// Test any layer or parameter upgrades not covered by other tests.
TEST_F(NetUpgradeTest, TestAllParams) {
const string& input_proto =
"name: 'CaffeNet' "
"input: 'input_data' "
"input_dim: 64 "
"input_dim: 3 "
"input_dim: 32 "
"input_dim: 32 "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" scale: 0.25 "
" rand_skip: 73 "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'images' "
" type: 'images' "
" source: '/home/jiayq/Data/ILSVRC12/train-images' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" scale: 0.25 "
" rand_skip: 73 "
" shuffle_images: true "
" new_height: 40 "
" new_width: 30 "
" } "
" top: 'images_data' "
" top: 'images_label' "
"} "
"layers { "
" layer { "
" name: 'window_data' "
" type: 'window_data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" det_fg_threshold: 0.25 "
" det_bg_threshold: 0.75 "
" det_fg_fraction: 0.5 "
" det_context_pad: 16 "
" det_crop_mode: 'square' "
" } "
" top: 'window_data' "
" top: 'window_label' "
"} "
"layers { "
" layer { "
" name: 'hdf5data' "
" type: 'hdf5_data' "
" source: '/my/hdf5/data' "
" batchsize: 256 "
" } "
" top: 'hdf5data' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" biasterm: false "
" pad: 4 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 3. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'pool1ave' "
" type: 'pool' "
" pool: AVE "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1ave' "
"} "
"layers { "
" layer { "
" name: 'pool1stoch' "
" type: 'pool' "
" pool: STOCHASTIC "
" kernelsize: 4 "
" stride: 5 "
" } "
" bottom: 'conv1' "
" top: 'pool1stoch' "
"} "
"layers { "
" layer { "
" name: 'concat' "
" type: 'concat' "
" concat_dim: 2 "
" } "
" bottom: 'pool1ave' "
" bottom: 'pool1stoch' "
" top: 'pool1concat' "
"} "
"layers { "
" layer { "
" name: 'norm1' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1concat' "
" top: 'norm1' "
"} "
"layers { "
" layer { "
" name: 'fc6' "
" type: 'innerproduct' "
" num_output: 4096 "
" biasterm: false "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'norm1' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'relu6' "
" type: 'relu' "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'drop6' "
" type: 'dropout' "
" dropout_ratio: 0.2 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'infogain_loss' "
" source: '/my/infogain/matrix' "
" } "
" bottom: 'fc6' "
" bottom: 'label' "
"} "
"layers { "
" layer { "
" name: 'accuracy' "
" type: 'accuracy' "
" } "
"} "
"layers { "
" layer { "
" name: 'bnll' "
" type: 'bnll' "
" } "
"} "
"layers { "
" layer { "
" name: 'euclidean_loss' "
" type: 'euclidean_loss' "
" } "
"} "
"layers { "
" layer { "
" name: 'flatten' "
" type: 'flatten' "
" } "
"} "
"layers { "
" layer { "
" name: 'hdf5_output' "
" type: 'hdf5_output' "
" hdf5_output_param { "
" file_name: '/my/hdf5/output/file' "
" } "
" } "
"} "
"layers { "
" layer { "
" name: 'im2col' "
" type: 'im2col' "
" } "
"} "
"layers { "
" layer { "
" name: 'images' "
" type: 'images' "
" } "
"} "
"layers { "
" layer { "
" name: 'multinomial_logistic_loss' "
" type: 'multinomial_logistic_loss' "
" } "
"} "
"layers { "
" layer { "
" name: 'sigmoid' "
" type: 'sigmoid' "
" } "
"} "
"layers { "
" layer { "
" name: 'softmax' "
" type: 'softmax' "
" } "
"} "
"layers { "
" layer { "
" name: 'split' "
" type: 'split' "
" } "
"} "
"layers { "
" layer { "
" name: 'tanh' "
" type: 'tanh' "
" } "
"} ";
const string& expected_output_proto =
"name: 'CaffeNet' "
"input: 'input_data' "
"input_dim: 64 "
"input_dim: 3 "
"input_dim: 32 "
"input_dim: 32 "
"layers { "
" name: 'data' "
" type: DATA "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
" rand_skip: 73 "
" } "
" transform_param { "
" crop_size: 227 "
" mirror: true "
" scale: 0.25 "
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" name: 'images' "
" type: IMAGE_DATA "
" image_data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-images' "
" batch_size: 256 "
" rand_skip: 73 "
" shuffle: true "
" new_height: 40 "
" new_width: 30 "
" } "
" transform_param {"
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" crop_size: 227 "
" mirror: true "
" scale: 0.25 "
" } "
" top: 'images_data' "
" top: 'images_label' "
"} "
"layers { "
" name: 'window_data' "
" type: WINDOW_DATA "
" window_data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
" fg_threshold: 0.25 "
" bg_threshold: 0.75 "
" fg_fraction: 0.5 "
" context_pad: 16 "
" crop_mode: 'square' "
" } "
" transform_param { "
" mirror: true "
" crop_size: 227 "
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" }"
" top: 'window_data' "
" top: 'window_label' "
"} "
"layers { "
" name: 'hdf5data' "
" type: HDF5_DATA "
" hdf5_data_param { "
" source: '/my/hdf5/data' "
" batch_size: 256 "
" } "
" top: 'hdf5data' "
"} "
"layers { "
" name: 'conv1' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 96 "
" bias_term: false "
" pad: 4 "
" kernel_size: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 3. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" name: 'pool1ave' "
" type: POOLING "
" pooling_param { "
" pool: AVE "
" kernel_size: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1ave' "
"} "
"layers { "
" name: 'pool1stoch' "
" type: POOLING "
" pooling_param { "
" pool: STOCHASTIC "
" kernel_size: 4 "
" stride: 5 "
" } "
" bottom: 'conv1' "
" top: 'pool1stoch' "
"} "
"layers { "
" name: 'concat' "
" type: CONCAT "
" concat_param { "
" concat_dim: 2 "
" } "
" bottom: 'pool1ave' "
" bottom: 'pool1stoch' "
" top: 'pool1concat' "
"} "
"layers { "
" name: 'norm1' "
" type: LRN "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1concat' "
" top: 'norm1' "
"} "
"layers { "
" name: 'fc6' "
" type: INNER_PRODUCT "
" inner_product_param { "
" num_output: 4096 "
" bias_term: false "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'norm1' "
" top: 'fc6' "
"} "
"layers { "
" name: 'relu6' "
" type: RELU "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
" type: DROPOUT "
" dropout_param { "
" dropout_ratio: 0.2 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'loss' "
" type: INFOGAIN_LOSS "
" infogain_loss_param { "
" source: '/my/infogain/matrix' "
" } "
" bottom: 'fc6' "
" bottom: 'label' "
"} "
"layers { "
" name: 'accuracy' "
" type: ACCURACY "
"} "
"layers { "
" name: 'bnll' "
" type: BNLL "
"} "
"layers { "
" name: 'euclidean_loss' "
" type: EUCLIDEAN_LOSS "
"} "
"layers { "
" name: 'flatten' "
" type: FLATTEN "
"} "
"layers { "
" name: 'hdf5_output' "
" type: HDF5_OUTPUT "
" hdf5_output_param { "
" file_name: '/my/hdf5/output/file' "
" } "
"} "
"layers { "
" name: 'im2col' "
" type: IM2COL "
"} "
"layers { "
" name: 'images' "
" type: IMAGE_DATA "
"} "
"layers { "
" name: 'multinomial_logistic_loss' "
" type: MULTINOMIAL_LOGISTIC_LOSS "
"} "
"layers { "
" name: 'sigmoid' "
" type: SIGMOID "
"} "
"layers { "
" name: 'softmax' "
" type: SOFTMAX "
"} "
"layers { "
" name: 'split' "
" type: SPLIT "
"} "
"layers { "
" name: 'tanh' "
" type: TANH "
"} ";
this->RunV0UpgradeTest(input_proto, expected_output_proto);
}
TEST_F(NetUpgradeTest, TestImageNet) {
const string& v0_proto =
"name: 'CaffeNet' "
"layers { "
" layer { "
" name: 'data' "
" type: 'data' "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" meanfile: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" batchsize: 256 "
" cropsize: 227 "
" mirror: true "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" layer { "
" name: 'conv1' "
" type: 'conv' "
" num_output: 96 "
" kernelsize: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'relu1' "
" type: 'relu' "
" } "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" layer { "
" name: 'pool1' "
" type: 'pool' "
" pool: MAX "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1' "
"} "
"layers { "
" layer { "
" name: 'norm1' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1' "
" top: 'norm1' "
"} "
"layers { "
" layer { "
" name: 'pad2' "
" type: 'padding' "
" pad: 2 "
" } "
" bottom: 'norm1' "
" top: 'pad2' "
"} "
"layers { "
" layer { "
" name: 'conv2' "
" type: 'conv' "
" num_output: 256 "
" group: 2 "
" kernelsize: 5 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad2' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'relu2' "
" type: 'relu' "
" } "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" layer { "
" name: 'pool2' "
" type: 'pool' "
" pool: MAX "
" kernelsize: 3 "
" stride: 2 "
" } "
" bottom: 'conv2' "
" top: 'pool2' "
"} "
"layers { "
" layer { "
" name: 'norm2' "
" type: 'lrn' "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool2' "
" top: 'norm2' "
"} "
"layers { "
" layer { "
" name: 'pad3' "
" type: 'padding' "
" pad: 1 "
" } "
" bottom: 'norm2' "
" top: 'pad3' "
"} "
"layers { "
" layer { "
" name: 'conv3' "
" type: 'conv' "
" num_output: 384 "
" kernelsize: 3 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad3' "
" top: 'conv3' "
"} "
"layers { "
" layer { "
" name: 'relu3' "
" type: 'relu' "
" } "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" layer { "
" name: 'pad4' "
" type: 'padding' "
" pad: 1 "
" } "
" bottom: 'conv3' "
" top: 'pad4' "
"} "
"layers { "
" layer { "
" name: 'conv4' "
" type: 'conv' "
" num_output: 384 "
" group: 2 "
" kernelsize: 3 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad4' "
" top: 'conv4' "
"} "
"layers { "
" layer { "
" name: 'relu4' "
" type: 'relu' "
" } "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" layer { "
" name: 'pad5' "
" type: 'padding' "
" pad: 1 "
" } "
" bottom: 'conv4' "
" top: 'pad5' "
"} "
"layers { "
" layer { "
" name: 'conv5' "
" type: 'conv' "
" num_output: 256 "
" group: 2 "
" kernelsize: 3 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pad5' "
" top: 'conv5' "
"} "
"layers { "
" layer { "
" name: 'relu5' "
" type: 'relu' "
" } "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" layer { "
" name: 'pool5' "
" type: 'pool' "
" kernelsize: 3 "
" pool: MAX "
" stride: 2 "
" } "
" bottom: 'conv5' "
" top: 'pool5' "
"} "
"layers { "
" layer { "
" name: 'fc6' "
" type: 'innerproduct' "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'pool5' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'relu6' "
" type: 'relu' "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'drop6' "
" type: 'dropout' "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" layer { "
" name: 'fc7' "
" type: 'innerproduct' "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'fc6' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'relu7' "
" type: 'relu' "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'drop7' "
" type: 'dropout' "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" layer { "
" name: 'fc8' "
" type: 'innerproduct' "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" } "
" bottom: 'fc7' "
" top: 'fc8' "
"} "
"layers { "
" layer { "
" name: 'loss' "
" type: 'softmax_loss' "
" } "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
const string& expected_v1_proto =
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
" type: DATA "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
" } "
" transform_param { "
" crop_size: 227 "
" mirror: true "
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layers { "
" name: 'conv1' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layers { "
" name: 'relu1' "
" type: RELU "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" name: 'pool1' "
" type: POOLING "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1' "
"} "
"layers { "
" name: 'norm1' "
" type: LRN "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1' "
" top: 'norm1' "
"} "
"layers { "
" name: 'conv2' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 256 "
" group: 2 "
" kernel_size: 5 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'norm1' "
" top: 'conv2' "
"} "
"layers { "
" name: 'relu2' "
" type: RELU "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" name: 'pool2' "
" type: POOLING "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
" stride: 2 "
" } "
" bottom: 'conv2' "
" top: 'pool2' "
"} "
"layers { "
" name: 'norm2' "
" type: LRN "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool2' "
" top: 'norm2' "
"} "
"layers { "
" name: 'conv3' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 384 "
" kernel_size: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'norm2' "
" top: 'conv3' "
"} "
"layers { "
" name: 'relu3' "
" type: RELU "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" name: 'conv4' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 384 "
" group: 2 "
" kernel_size: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'conv3' "
" top: 'conv4' "
"} "
"layers { "
" name: 'relu4' "
" type: RELU "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" name: 'conv5' "
" type: CONVOLUTION "
" convolution_param { "
" num_output: 256 "
" group: 2 "
" kernel_size: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'conv4' "
" top: 'conv5' "
"} "
"layers { "
" name: 'relu5' "
" type: RELU "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" name: 'pool5' "
" type: POOLING "
" pooling_param { "
" kernel_size: 3 "
" pool: MAX "
" stride: 2 "
" } "
" bottom: 'conv5' "
" top: 'pool5' "
"} "
"layers { "
" name: 'fc6' "
" type: INNER_PRODUCT "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'pool5' "
" top: 'fc6' "
"} "
"layers { "
" name: 'relu6' "
" type: RELU "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
" type: DROPOUT "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'fc7' "
" type: INNER_PRODUCT "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'fc6' "
" top: 'fc7' "
"} "
"layers { "
" name: 'relu7' "
" type: RELU "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" name: 'drop7' "
" type: DROPOUT "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" name: 'fc8' "
" type: INNER_PRODUCT "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" } "
" blobs_lr: 1. "
" blobs_lr: 2. "
" weight_decay: 1. "
" weight_decay: 0. "
" bottom: 'fc7' "
" top: 'fc8' "
"} "
"layers { "
" name: 'loss' "
" type: SOFTMAX_LOSS "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunV0UpgradeTest(v0_proto, expected_v1_proto);
const string& expected_v2_proto =
"name: 'CaffeNet' "
"layer { "
" name: 'data' "
" type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
" } "
" transform_param { "
" crop_size: 227 "
" mirror: true "
" mean_file: '/home/jiayq/Data/ILSVRC12/image_mean.binaryproto' "
" } "
" top: 'data' "
" top: 'label' "
"} "
"layer { "
" name: 'conv1' "
" type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
" stride: 4 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'data' "
" top: 'conv1' "
"} "
"layer { "
" name: 'relu1' "
" type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layer { "
" name: 'pool1' "
" type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
" stride: 2 "
" } "
" bottom: 'conv1' "
" top: 'pool1' "
"} "
"layer { "
" name: 'norm1' "
" type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool1' "
" top: 'norm1' "
"} "
"layer { "
" name: 'conv2' "
" type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
" kernel_size: 5 "
" pad: 2 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'norm1' "
" top: 'conv2' "
"} "
"layer { "
" name: 'relu2' "
" type: 'ReLU' "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layer { "
" name: 'pool2' "
" type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
" stride: 2 "
" } "
" bottom: 'conv2' "
" top: 'pool2' "
"} "
"layer { "
" name: 'norm2' "
" type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
" beta: 0.75 "
" } "
" bottom: 'pool2' "
" top: 'norm2' "
"} "
"layer { "
" name: 'conv3' "
" type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" kernel_size: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'norm2' "
" top: 'conv3' "
"} "
"layer { "
" name: 'relu3' "
" type: 'ReLU' "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layer { "
" name: 'conv4' "
" type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" group: 2 "
" kernel_size: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'conv3' "
" top: 'conv4' "
"} "
"layer { "
" name: 'relu4' "
" type: 'ReLU' "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layer { "
" name: 'conv5' "
" type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
" kernel_size: 3 "
" pad: 1 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'conv4' "
" top: 'conv5' "
"} "
"layer { "
" name: 'relu5' "
" type: 'ReLU' "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layer { "
" name: 'pool5' "
" type: 'Pooling' "
" pooling_param { "
" kernel_size: 3 "
" pool: MAX "
" stride: 2 "
" } "
" bottom: 'conv5' "
" top: 'pool5' "
"} "
"layer { "
" name: 'fc6' "
" type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'pool5' "
" top: 'fc6' "
"} "
"layer { "
" name: 'relu6' "
" type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layer { "
" name: 'drop6' "
" type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layer { "
" name: 'fc7' "
" type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.005 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 1. "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'fc6' "
" top: 'fc7' "
"} "
"layer { "
" name: 'relu7' "
" type: 'ReLU' "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layer { "
" name: 'drop7' "
" type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layer { "
" name: 'fc8' "
" type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
" type: 'gaussian' "
" std: 0.01 "
" } "
" bias_filler { "
" type: 'constant' "
" value: 0 "
" } "
" } "
" param { "
" lr_mult: 1 "
" decay_mult: 1 "
" } "
" param { "
" lr_mult: 2 "
" decay_mult: 0 "
" } "
" bottom: 'fc7' "
" top: 'fc8' "
"} "
"layer { "
" name: 'loss' "
" type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
this->RunV1UpgradeTest(expected_v1_proto, expected_v2_proto);
} // NOLINT(readability/fn_size)
TEST_F(NetUpgradeTest, TestUpgradeV1LayerType) {
LayerParameter layer_param;
shared_ptr<Layer<float> > layer;
for (int i = 0; i < V1LayerParameter_LayerType_LayerType_ARRAYSIZE; ++i) {
ASSERT_TRUE(V1LayerParameter_LayerType_IsValid(i));
V1LayerParameter_LayerType v1_type = V1LayerParameter_LayerType(i);
string v2_layer_type(UpgradeV1LayerType(v1_type));
if (v2_layer_type == "") {
EXPECT_EQ(V1LayerParameter_LayerType_NONE, v1_type);
continue; // Empty string isn't actually a valid layer type.
}
layer_param.set_type(v2_layer_type);
layer = LayerRegistry<float>::CreateLayer(layer_param);
EXPECT_EQ(v2_layer_type, layer->type());
}
}
} // NOLINT(readability/fn_size) // namespace caffe
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="CircleIndicator">
<attr name="ci_width" format="dimension"/>
<attr name="ci_height" format="dimension"/>
<attr name="ci_margin" format="dimension"/>
<attr name="ci_orientation" format="enum">
<!-- Defines an horizontal widget. -->
<enum name="horizontal" value="0"/>
<!-- Defines a vertical widget. -->
<enum name="vertical" value="1"/>
</attr>
<attr name="ci_gravity">
<!-- Push object to the top of its container, not changing its size. -->
<flag name="top" value="0x30"/>
<!-- Push object to the bottom of its container, not changing its size. -->
<flag name="bottom" value="0x50"/>
<!-- Push object to the left of its container, not changing its size. -->
<flag name="left" value="0x03"/>
<!-- Push object to the right of its container, not changing its size. -->
<flag name="right" value="0x05"/>
<!-- Place object in the vertical center of its container, not changing its size. -->
<flag name="center_vertical" value="0x10"/>
<!-- Grow the vertical size of the object if needed so it completely fills its container. -->
<flag name="fill_vertical" value="0x70"/>
<!-- Place object in the horizontal center of its container, not changing its size. -->
<flag name="center_horizontal" value="0x01"/>
<!-- Grow the horizontal size of the object if needed so it completely fills its container. -->
<flag name="fill_horizontal" value="0x07"/>
<!-- Place the object in the center of its container in both the vertical and horizontal axis, not changing its size. -->
<flag name="center" value="0x11"/>
<!-- Grow the horizontal and vertical size of the object if needed so it completely fills its container. -->
<flag name="fill" value="0x77"/>
<!-- Additional option that can be set to have the top and/or bottom edges of
the child clipped to its container's bounds.
The clip will be based on the vertical gravity: a top gravity will clip the bottom
edge, a bottom gravity will clip the top edge, and neither will clip both edges. -->
<flag name="clip_vertical" value="0x80"/>
<!-- Additional option that can be set to have the left and/or right edges of
the child clipped to its container's bounds.
The clip will be based on the horizontal gravity: a left gravity will clip the right
edge, a right gravity will clip the left edge, and neither will clip both edges. -->
<flag name="clip_horizontal" value="0x08"/>
<!-- Push object to the beginning of its container, not changing its size. -->
<flag name="start" value="0x00800003"/>
<!-- Push object to the end of its container, not changing its size. -->
<flag name="end" value="0x00800005"/>
</attr>
</declare-styleable>
</resources>
| {
"pile_set_name": "Github"
} |
/*
* InfoWindows.cpp, part of VCMI engine
*
* Authors: listed in file AUTHORS in main folder
*
* License: GNU General Public License v2.0 or later
* Full text of license available in license.txt file, in main folder
*
*/
#include "StdInc.h"
#include "InfoWindows.h"
#include "../CBitmapHandler.h"
#include "../Graphics.h"
#include "../CGameInfo.h"
#include "../CPlayerInterface.h"
#include "../CMessage.h"
#include "../CMusicHandler.h"
#include "../windows/CAdvmapInterface.h"
#include "../widgets/CComponent.h"
#include "../widgets/MiscWidgets.h"
#include "../gui/SDL_Pixels.h"
#include "../gui/SDL_Extensions.h"
#include "../gui/CGuiHandler.h"
#include "../gui/CCursorHandler.h"
#include "../battle/CBattleInterface.h"
#include "../battle/CBattleInterfaceClasses.h"
#include "../../CCallback.h"
#include "../../lib/CGameState.h"
#include "../../lib/CConfigHandler.h"
#include "../../lib/CondSh.h"
#include "../../lib/CGeneralTextHandler.h" //for Unicode related stuff
#include "../../lib/mapObjects/CGHeroInstance.h"
#include "../../lib/mapObjects/CGTownInstance.h"
#include "../../lib/mapObjects/MiscObjects.h"
void CSimpleWindow::show(SDL_Surface * to)
{
if(bitmap)
blitAt(bitmap,pos.x,pos.y,to);
}
CSimpleWindow::~CSimpleWindow()
{
if (bitmap)
{
SDL_FreeSurface(bitmap);
bitmap=nullptr;
}
}
void CSelWindow::selectionChange(unsigned to)
{
for (unsigned i=0;i<components.size();i++)
{
auto pom = std::dynamic_pointer_cast<CSelectableComponent>(components[i]);
if (!pom)
continue;
pom->select(i==to);
}
redraw();
}
CSelWindow::CSelWindow(const std::string &Text, PlayerColor player, int charperline, const std::vector<std::shared_ptr<CSelectableComponent>> & comps, const std::vector<std::pair<std::string, CFunctionList<void()> > > &Buttons, QueryID askID)
{
OBJECT_CONSTRUCTION_CAPTURING(255-DISPOSE);
ID = askID;
for (int i = 0; i < Buttons.size(); i++)
{
buttons.push_back(std::make_shared<CButton>(Point(0, 0), Buttons[i].first, CButton::tooltip(), Buttons[i].second));
if (!i && askID.getNum() >= 0)
buttons.back()->addCallback(std::bind(&CSelWindow::madeChoice, this));
buttons[i]->addCallback(std::bind(&CInfoWindow::close, this)); //each button will close the window apart from call-defined actions
}
text = std::make_shared<CTextBox>(Text, Rect(0, 0, 250, 100), 0, FONT_MEDIUM, CENTER, Colors::WHITE);
buttons.front()->assignedKeys.insert(SDLK_RETURN); //first button - reacts on enter
buttons.back()->assignedKeys.insert(SDLK_ESCAPE); //last button - reacts on escape
if (buttons.size() > 1 && askID.getNum() >= 0) //cancel button functionality
{
buttons.back()->addCallback([askID]() {
LOCPLINT->cb.get()->selectionMade(0, askID);
});
//buttons.back()->addCallback(std::bind(&CCallback::selectionMade, LOCPLINT->cb.get(), 0, askID));
}
for(int i=0;i<comps.size();i++)
{
comps[i]->recActions = 255-DISPOSE;
addChild(comps[i].get());
components.push_back(comps[i]);
comps[i]->onSelect = std::bind(&CSelWindow::selectionChange,this,i);
if(i<9)
comps[i]->assignedKeys.insert(SDLK_1+i);
}
CMessage::drawIWindow(this, Text, player);
}
void CSelWindow::madeChoice()
{
if(ID.getNum() < 0)
return;
int ret = -1;
for (int i=0;i<components.size();i++)
{
if(std::dynamic_pointer_cast<CSelectableComponent>(components[i])->selected)
{
ret = i;
}
}
LOCPLINT->cb->selectionMade(ret+1,ID);
}
CInfoWindow::CInfoWindow(std::string Text, PlayerColor player, const TCompsInfo & comps, const TButtonsInfo & Buttons)
{
OBJECT_CONSTRUCTION_CAPTURING(255-DISPOSE);
type |= BLOCK_ADV_HOTKEYS;
ID = QueryID(-1);
for(auto & Button : Buttons)
{
std::shared_ptr<CButton> button = std::make_shared<CButton>(Point(0,0), Button.first, CButton::tooltip(), std::bind(&CInfoWindow::close, this));
button->setBorderColor(Colors::METALLIC_GOLD);
button->addCallback(Button.second); //each button will close the window apart from call-defined actions
buttons.push_back(button);
}
text = std::make_shared<CTextBox>(Text, Rect(0, 0, 250, 100), 0, FONT_MEDIUM, CENTER, Colors::WHITE);
if(!text->slider)
{
text->resize(text->label->textSize);
}
if(buttons.size())
{
buttons.front()->assignedKeys.insert(SDLK_RETURN); //first button - reacts on enter
buttons.back()->assignedKeys.insert(SDLK_ESCAPE); //last button - reacts on escape
}
for(auto & comp : comps)
{
comp->recActions = 0xff & ~DISPOSE;
addChild(comp.get());
comp->recActions &= ~(SHOWALL | UPDATE);
components.push_back(comp);
}
CMessage::drawIWindow(this,Text,player);
}
CInfoWindow::CInfoWindow()
{
ID = QueryID(-1);
}
void CInfoWindow::close()
{
WindowBase::close();
if(LOCPLINT)
LOCPLINT->showingDialog->setn(false);
}
void CInfoWindow::show(SDL_Surface * to)
{
CIntObject::show(to);
}
CInfoWindow::~CInfoWindow() = default;
void CInfoWindow::showAll(SDL_Surface * to)
{
CSimpleWindow::show(to);
CIntObject::showAll(to);
}
void CInfoWindow::showInfoDialog(const std::string &text, const TCompsInfo & components, PlayerColor player)
{
GH.pushInt(CInfoWindow::create(text, player, components));
}
void CInfoWindow::showYesNoDialog(const std::string & text, const TCompsInfo & components, const CFunctionList<void( ) > &onYes, const CFunctionList<void()> &onNo, PlayerColor player)
{
assert(!LOCPLINT || LOCPLINT->showingDialog->get());
std::vector<std::pair<std::string,CFunctionList<void()> > > pom;
pom.push_back(std::pair<std::string,CFunctionList<void()> >("IOKAY.DEF",0));
pom.push_back(std::pair<std::string,CFunctionList<void()> >("ICANCEL.DEF",0));
std::shared_ptr<CInfoWindow> temp = std::make_shared<CInfoWindow>(text, player, components, pom);
temp->buttons[0]->addCallback( onYes );
temp->buttons[1]->addCallback( onNo );
GH.pushInt(temp);
}
std::shared_ptr<CInfoWindow> CInfoWindow::create(const std::string &text, PlayerColor playerID, const TCompsInfo & components)
{
std::vector<std::pair<std::string,CFunctionList<void()> > > pom;
pom.push_back(std::pair<std::string,CFunctionList<void()> >("IOKAY.DEF",0));
return std::make_shared<CInfoWindow>(text, playerID, components, pom);
}
std::string CInfoWindow::genText(std::string title, std::string description)
{
return std::string("{") + title + "}" + "\n\n" + description;
}
CInfoPopup::CInfoPopup(SDL_Surface * Bitmap, int x, int y, bool Free)
:free(Free),bitmap(Bitmap)
{
init(x, y);
}
CInfoPopup::CInfoPopup(SDL_Surface * Bitmap, const Point &p, EAlignment alignment, bool Free)
: free(Free),bitmap(Bitmap)
{
switch(alignment)
{
case BOTTOMRIGHT:
init(p.x - Bitmap->w, p.y - Bitmap->h);
break;
case CENTER:
init(p.x - Bitmap->w/2, p.y - Bitmap->h/2);
break;
case TOPLEFT:
init(p.x, p.y);
break;
default:
assert(0); //not implemented
}
}
CInfoPopup::CInfoPopup(SDL_Surface *Bitmap, bool Free)
{
CCS->curh->hide();
free=Free;
bitmap=Bitmap;
if(bitmap)
{
pos.x = screen->w/2 - bitmap->w/2;
pos.y = screen->h/2 - bitmap->h/2;
pos.h = bitmap->h;
pos.w = bitmap->w;
}
}
void CInfoPopup::close()
{
if(free)
SDL_FreeSurface(bitmap);
WindowBase::close();
}
void CInfoPopup::show(SDL_Surface * to)
{
blitAt(bitmap,pos.x,pos.y,to);
}
CInfoPopup::~CInfoPopup()
{
CCS->curh->show();
}
void CInfoPopup::init(int x, int y)
{
CCS->curh->hide();
pos.x = x;
pos.y = y;
pos.h = bitmap->h;
pos.w = bitmap->w;
// Put the window back on screen if necessary
vstd::amax(pos.x, 0);
vstd::amax(pos.y, 0);
vstd::amin(pos.x, screen->w - bitmap->w);
vstd::amin(pos.y, screen->h - bitmap->h);
}
void CRClickPopup::clickRight(tribool down, bool previousState)
{
if(down)
return;
close();
}
void CRClickPopup::close()
{
WindowBase::close();
}
void CRClickPopup::createAndPush(const std::string &txt, const CInfoWindow::TCompsInfo &comps)
{
PlayerColor player = LOCPLINT ? LOCPLINT->playerID : PlayerColor(1); //if no player, then use blue
if(settings["session"]["spectate"].Bool())//TODO: there must be better way to implement this
player = PlayerColor(1);
auto temp = std::make_shared<CInfoWindow>(txt, player, comps);
temp->center(Point(GH.current->motion)); //center on mouse
temp->fitToScreen(10);
GH.pushIntT<CRClickPopupInt>(temp);
}
void CRClickPopup::createAndPush(const std::string & txt, std::shared_ptr<CComponent> component)
{
CInfoWindow::TCompsInfo intComps;
intComps.push_back(component);
createAndPush(txt, intComps);
}
void CRClickPopup::createAndPush(const CGObjectInstance * obj, const Point & p, EAlignment alignment)
{
auto iWin = createInfoWin(p, obj); //try get custom infowindow for this obj
if(iWin)
{
GH.pushInt(iWin);
}
else
{
if(adventureInt->curHero())
CRClickPopup::createAndPush(obj->getHoverText(adventureInt->curHero()));
else
CRClickPopup::createAndPush(obj->getHoverText(LOCPLINT->playerID));
}
}
CRClickPopup::CRClickPopup()
{
addUsedEvents(RCLICK);
}
CRClickPopup::~CRClickPopup()
{
}
CRClickPopupInt::CRClickPopupInt(std::shared_ptr<CIntObject> our)
{
CCS->curh->hide();
defActions = SHOWALL | UPDATE;
our->recActions = defActions;
inner = our;
addChild(our.get(), false);
}
CRClickPopupInt::~CRClickPopupInt()
{
CCS->curh->show();
}
Point CInfoBoxPopup::toScreen(Point p)
{
vstd::abetween(p.x, adventureInt->terrain.pos.x + 100, adventureInt->terrain.pos.x + adventureInt->terrain.pos.w - 100);
vstd::abetween(p.y, adventureInt->terrain.pos.y + 100, adventureInt->terrain.pos.y + adventureInt->terrain.pos.h - 100);
return p;
}
CInfoBoxPopup::CInfoBoxPopup(Point position, const CGTownInstance * town)
: CWindowObject(RCLICK_POPUP | PLAYER_COLORED, "TOWNQVBK", toScreen(position))
{
InfoAboutTown iah;
LOCPLINT->cb->getTownInfo(town, iah, adventureInt->selection); //todo: should this be nearest hero?
OBJECT_CONSTRUCTION_CAPTURING(255-DISPOSE);
tooltip = std::make_shared<CTownTooltip>(Point(9, 10), iah);
}
CInfoBoxPopup::CInfoBoxPopup(Point position, const CGHeroInstance * hero)
: CWindowObject(RCLICK_POPUP | PLAYER_COLORED, "HEROQVBK", toScreen(position))
{
InfoAboutHero iah;
LOCPLINT->cb->getHeroInfo(hero, iah, adventureInt->selection);//todo: should this be nearest hero?
OBJECT_CONSTRUCTION_CAPTURING(255-DISPOSE);
tooltip = std::make_shared<CHeroTooltip>(Point(9, 10), iah);
}
CInfoBoxPopup::CInfoBoxPopup(Point position, const CGGarrison * garr)
: CWindowObject(RCLICK_POPUP | PLAYER_COLORED, "TOWNQVBK", toScreen(position))
{
InfoAboutTown iah;
LOCPLINT->cb->getTownInfo(garr, iah);
OBJECT_CONSTRUCTION_CAPTURING(255-DISPOSE);
tooltip = std::make_shared<CArmyTooltip>(Point(9, 10), iah);
}
std::shared_ptr<WindowBase> CRClickPopup::createInfoWin(Point position, const CGObjectInstance * specific) //specific=0 => draws info about selected town/hero
{
if(nullptr == specific)
specific = adventureInt->selection;
if(nullptr == specific)
{
logGlobal->error("createInfoWin: no object to describe");
return nullptr;
}
switch(specific->ID)
{
case Obj::HERO:
return std::make_shared<CInfoBoxPopup>(position, dynamic_cast<const CGHeroInstance *>(specific));
case Obj::TOWN:
return std::make_shared<CInfoBoxPopup>(position, dynamic_cast<const CGTownInstance *>(specific));
case Obj::GARRISON:
case Obj::GARRISON2:
return std::make_shared<CInfoBoxPopup>(position, dynamic_cast<const CGGarrison *>(specific));
default:
return std::shared_ptr<WindowBase>();
}
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import json
import sys
import getopt
import logging
from shadowsocks.common import to_bytes, to_str, IPNetwork, PortRange
from shadowsocks import encrypt
VERBOSE_LEVEL = 5
verbose = 0
def check_python():
info = sys.version_info
if info[0] == 2 and not info[1] >= 6:
print('Python 2.6+ required')
sys.exit(1)
elif info[0] == 3 and not info[1] >= 3:
print('Python 3.3+ required')
sys.exit(1)
elif info[0] not in [2, 3]:
print('Python version not supported')
sys.exit(1)
def print_exception(e):
global verbose
logging.error(e)
if verbose > 0:
import traceback
traceback.print_exc()
def __version():
version_str = ''
try:
import pkg_resources
version_str = pkg_resources.get_distribution('shadowsocks').version
except Exception:
try:
from shadowsocks import version
version_str = version.version()
except Exception:
pass
return version_str
def print_shadowsocks():
print('ShadowsocksR %s' % __version())
def log_shadowsocks_version():
logging.info('ShadowsocksR %s' % __version())
def find_config():
user_config_path = 'user-config.json'
config_path = 'config.json'
def sub_find(file_name):
if os.path.exists(file_name):
return file_name
file_name = os.path.join(os.path.abspath('..'), file_name)
return file_name if os.path.exists(file_name) else None
return sub_find(user_config_path) or sub_find(config_path)
def check_config(config, is_local):
if config.get('daemon', None) == 'stop':
# no need to specify configuration for daemon stop
return
if is_local and not config.get('password', None):
logging.error('password not specified')
print_help(is_local)
sys.exit(2)
if not is_local and not config.get('password', None) \
and not config.get('port_password', None):
logging.error('password or port_password not specified')
print_help(is_local)
sys.exit(2)
if 'local_port' in config:
config['local_port'] = int(config['local_port'])
if 'server_port' in config and type(config['server_port']) != list:
config['server_port'] = int(config['server_port'])
if config.get('local_address', '') in [b'0.0.0.0']:
logging.warning('warning: local set to listen on 0.0.0.0, it\'s not safe')
if config.get('server', '') in ['127.0.0.1', 'localhost']:
logging.warning('warning: server set to listen on %s:%s, are you sure?' %
(to_str(config['server']), config['server_port']))
if config.get('timeout', 300) < 100:
logging.warning('warning: your timeout %d seems too short' %
int(config.get('timeout')))
if config.get('timeout', 300) > 600:
logging.warning('warning: your timeout %d seems too long' %
int(config.get('timeout')))
if config.get('password') in [b'mypassword']:
logging.error('DON\'T USE DEFAULT PASSWORD! Please change it in your '
'config.json!')
sys.exit(1)
if config.get('user', None) is not None:
if os.name != 'posix':
logging.error('user can be used only on Unix')
sys.exit(1)
encrypt.try_cipher(config['password'], config['method'])
def get_config(is_local):
global verbose
config = {}
config_path = None
logging.basicConfig(level=logging.INFO,
format='%(levelname)-s: %(message)s')
if is_local:
shortopts = 'hd:s:b:p:k:l:m:O:o:G:g:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'user=',
'version']
else:
shortopts = 'hd:s:p:k:m:O:o:G:g:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'workers=',
'forbidden-ip=', 'user=', 'manager-address=', 'version']
try:
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-c':
config_path = value
elif key in ('-h', '--help'):
print_help(is_local)
sys.exit(0)
elif key == '--version':
print_shadowsocks()
sys.exit(0)
else:
continue
if config_path is None:
config_path = find_config()
if config_path:
logging.debug('loading config from %s' % config_path)
with open(config_path, 'rb') as f:
try:
config = parse_json_in_str(remove_comment(f.read().decode('utf8')))
except ValueError as e:
logging.error('found an error in config.json: %s', str(e))
sys.exit(1)
v_count = 0
for key, value in optlist:
if key == '-p':
config['server_port'] = int(value)
elif key == '-k':
config['password'] = to_bytes(value)
elif key == '-l':
config['local_port'] = int(value)
elif key == '-s':
config['server'] = to_str(value)
elif key == '-m':
config['method'] = to_str(value)
elif key == '-O':
config['protocol'] = to_str(value)
elif key == '-o':
config['obfs'] = to_str(value)
elif key == '-G':
config['protocol_param'] = to_str(value)
elif key == '-g':
config['obfs_param'] = to_str(value)
elif key == '-b':
config['local_address'] = to_str(value)
elif key == '-v':
v_count += 1
# '-vv' turns on more verbose mode
config['verbose'] = v_count
elif key == '-t':
config['timeout'] = int(value)
elif key == '--fast-open':
config['fast_open'] = True
elif key == '--workers':
config['workers'] = int(value)
elif key == '--manager-address':
config['manager_address'] = value
elif key == '--user':
config['user'] = to_str(value)
elif key == '--forbidden-ip':
config['forbidden_ip'] = to_str(value)
elif key == '-d':
config['daemon'] = to_str(value)
elif key == '--pid-file':
config['pid-file'] = to_str(value)
elif key == '--log-file':
config['log-file'] = to_str(value)
elif key == '-q':
v_count -= 1
config['verbose'] = v_count
else:
continue
except getopt.GetoptError as e:
print(e, file=sys.stderr)
print_help(is_local)
sys.exit(2)
if not config:
logging.error('config not specified')
print_help(is_local)
sys.exit(2)
config['password'] = to_bytes(config.get('password', b''))
config['method'] = to_str(config.get('method', 'aes-256-cfb'))
config['protocol'] = to_str(config.get('protocol', 'origin'))
config['protocol_param'] = to_str(config.get('protocol_param', ''))
config['obfs'] = to_str(config.get('obfs', 'plain'))
config['obfs_param'] = to_str(config.get('obfs_param', ''))
config['port_password'] = config.get('port_password', None)
config['additional_ports'] = config.get('additional_ports', {})
config['additional_ports_only'] = config.get('additional_ports_only', False)
config['timeout'] = int(config.get('timeout', 300))
config['udp_timeout'] = int(config.get('udp_timeout', 120))
config['udp_cache'] = int(config.get('udp_cache', 64))
config['fast_open'] = config.get('fast_open', False)
config['workers'] = config.get('workers', 1)
config['pid-file'] = config.get('pid-file', '/var/run/shadowsocksr.pid')
config['log-file'] = config.get('log-file', '/var/log/shadowsocksr.log')
config['verbose'] = config.get('verbose', False)
config['connect_verbose_info'] = config.get('connect_verbose_info', 0)
config['local_address'] = to_str(config.get('local_address', '127.0.0.1'))
config['local_port'] = config.get('local_port', 1080)
if is_local:
if config.get('server', None) is None:
logging.error('server addr not specified')
print_local_help()
sys.exit(2)
else:
config['server'] = to_str(config['server'])
else:
config['server'] = to_str(config.get('server', '0.0.0.0'))
try:
config['forbidden_ip'] = \
IPNetwork(config.get('forbidden_ip', '127.0.0.0/8,::1/128'))
except Exception as e:
logging.error(e)
sys.exit(2)
try:
config['forbidden_port'] = PortRange(config.get('forbidden_port', ''))
except Exception as e:
logging.error(e)
sys.exit(2)
try:
config['ignore_bind'] = \
IPNetwork(config.get('ignore_bind', '127.0.0.0/8,::1/128,10.0.0.0/8,192.168.0.0/16'))
except Exception as e:
logging.error(e)
sys.exit(2)
config['server_port'] = config.get('server_port', 8388)
logging.getLogger('').handlers = []
logging.addLevelName(VERBOSE_LEVEL, 'VERBOSE')
if config['verbose'] >= 2:
level = VERBOSE_LEVEL
elif config['verbose'] == 1:
level = logging.DEBUG
elif config['verbose'] == -1:
level = logging.WARN
elif config['verbose'] <= -2:
level = logging.ERROR
else:
level = logging.INFO
verbose = config['verbose']
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(filename)s:%(lineno)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
check_config(config, is_local)
return config
def print_help(is_local):
if is_local:
print_local_help()
else:
print_server_help()
def print_local_help():
print('''usage: sslocal [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address
-p SERVER_PORT server port, default: 8388
-b LOCAL_ADDR local binding address, default: 127.0.0.1
-l LOCAL_PORT local port, default: 1080
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-o OBFS obfsplugin, default: http_simple
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def print_server_help():
print('''usage: ssserver [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address, default: 0.0.0.0
-p SERVER_PORT server port, default: 8388
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-o OBFS obfsplugin, default: http_simple
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
--workers WORKERS number of workers, available on Unix/Linux
--forbidden-ip IPLIST comma seperated IP list forbidden to connect
--manager-address ADDR optional server manager UDP address, see wiki
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def _decode_list(data):
rv = []
for item in data:
if hasattr(item, 'encode'):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if hasattr(value, 'encode'):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
class JSFormat:
def __init__(self):
self.state = 0
def push(self, ch):
ch = ord(ch)
if self.state == 0:
if ch == ord('"'):
self.state = 1
return to_str(chr(ch))
elif ch == ord('/'):
self.state = 3
else:
return to_str(chr(ch))
elif self.state == 1:
if ch == ord('"'):
self.state = 0
return to_str(chr(ch))
elif ch == ord('\\'):
self.state = 2
return to_str(chr(ch))
elif self.state == 2:
self.state = 1
if ch == ord('"'):
return to_str(chr(ch))
return "\\" + to_str(chr(ch))
elif self.state == 3:
if ch == ord('/'):
self.state = 4
else:
return "/" + to_str(chr(ch))
elif self.state == 4:
if ch == ord('\n'):
self.state = 0
return "\n"
return ""
def remove_comment(json):
fmt = JSFormat()
return "".join([fmt.push(c) for c in json])
def parse_json_in_str(data):
# parse json and convert everything from unicode to str
return json.loads(data, object_hook=_decode_dict)
| {
"pile_set_name": "Github"
} |
--TEST--
DRCP: oci_connect() with oci_close() and oci8.old_oci_close_semantics OFF
--SKIPIF--
<?php if (!extension_loaded('oci8')) die("skip no oci8 extension"); ?>
--INI--
oci8.old_oci_close_semantics=0
oci8.connection_class=test
--FILE--
<?php
require dirname(__FILE__)."/details.inc";
// Test will open a connection
// Close the connection
// Open another connection
// With oci_close() the connection is released to the pool and hence the
// the second conneciton will be different
// OCI_CONNECT
echo "This is with a OCI_CONNECT\n";
var_dump($conn1 = oci_connect($user,$password,$dbase));
$rn1 = (int)$conn1;
oci_close($conn1);
// Open another connection
var_dump($conn2 = oci_connect($user,$password,$dbase));
$rn2 = (int)$conn2;
oci_close($conn2);
// Compare the resource numbers
if ($rn1 === $rn2)
echo "Both connections share a resource : NOT OK \n";
else
echo "Both connections are different : OK \n";
echo "Done\n";
?>
--EXPECTF--
This is with a OCI_CONNECT
resource(%d) of type (oci8 connection)
resource(%d) of type (oci8 connection)
Both connections are different : OK
Done
| {
"pile_set_name": "Github"
} |
; Joomla! Project
; Copyright (C) 2005 - 2015 Open Source Matters. All rights reserved.
; License GNU General Public License version 2 or later; see LICENSE.txt, see LICENSE.php
; Note : All ini files need to be saved as UTF-8
COM_TAGS="Tags"
COM_TAGS_CONTENT_TYPE_ARTICLE="Article"
COM_TAGS_CONTENT_TYPE_ARTICLE_CATEGORY="Article Category"
COM_TAGS_CONTENT_TYPE_BANNER="Banner"
COM_TAGS_CONTENT_TYPE_BANNER_CLIENT="Banner Client"
COM_TAGS_CONTENT_TYPE_BANNERS_CATEGORY="Banner Category"
COM_TAGS_CONTENT_TYPE_CONTACT="Contact"
COM_TAGS_CONTENT_TYPE_CONTACT_CATEGORY="Contact Category"
COM_TAGS_CONTENT_TYPE_NEWSFEED="News Feed"
COM_TAGS_CONTENT_TYPE_NEWSFEEDS_CATEGORY="News Feed Category"
COM_TAGS_CONTENT_TYPE_TAG="Tag"
COM_TAGS_CONTENT_TYPE_USER="User"
COM_TAGS_CONTENT_TYPE_USER_NOTES="User Notes"
COM_TAGS_CONTENT_TYPE_USER_NOTES_CATEGORY="User Notes Category"
COM_TAGS_CONTENT_TYPE_WEBLINK="Web Link"
COM_TAGS_CONTENT_TYPE_WEBLINKS_CATEGORY="Web Links Category"
COM_TAGS_TAG="Tag"
COM_TAGS_TAG_VIEW_DEFAULT_DESC="This links to a list of items with specific tags."
COM_TAGS_TAG_VIEW_DEFAULT_OPTION="Default"
COM_TAGS_TAG_VIEW_DEFAULT_TITLE="Tagged Items"
COM_TAGS_TAG_VIEW_LIST_COMPACT_OPTION="Compact layout"
COM_TAGS_TAG_VIEW_LIST_COMPACT_TITLE="Compact list of tagged items"
COM_TAGS_TAG_VIEW_LIST_DESC="List of items that have been tagged with the selected tags."
COM_TAGS_TAG_VIEW_LIST_OPTION="List view options"
COM_TAGS_TAG_VIEW_LIST_TITLE="Tagged items list"
COM_TAGS_TAGS="Tags"
COM_TAGS_TAGS_VIEW_COMPACT_DESC="Compact list of tags."
COM_TAGS_TAGS_VIEW_COMPACT_TITLE="Compact Tags View"
COM_TAGS_TAGS_VIEW_DEFAULT_DESC="This links to a detailed list of all tags."
COM_TAGS_TAGS_VIEW_DEFAULT_TITLE="List of all tags"
COM_TAGS_XML_DESCRIPTION="A component for tagging content items."
| {
"pile_set_name": "Github"
} |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.gateway.standalone.http;
import com.github.tomakehurst.wiremock.matching.EqualToPattern;
import io.gravitee.common.http.HttpHeadersValues;
import io.gravitee.common.http.MediaType;
import io.gravitee.gateway.standalone.AbstractWiremockGatewayTest;
import io.gravitee.gateway.standalone.junit.annotation.ApiDescriptor;
import io.gravitee.gateway.standalone.utils.StringUtils;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.fluent.Request;
import org.apache.http.client.fluent.Response;
import org.apache.http.entity.ContentType;
import org.junit.Ignore;
import org.junit.Test;
import static com.github.tomakehurst.wiremock.client.WireMock.*;
import static org.junit.Assert.*;
/**
* @author David BRASSELY (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
@ApiDescriptor("/io/gravitee/gateway/standalone/http/teams.json")
public class PostContentGatewayTest extends AbstractWiremockGatewayTest {
@Test
@Ignore
/*
This test seems to not work anymore since latest Wiremock & Jetty upgrade.
Content_type response header is no more sent back and replaced by transfer-encoding.
*/
public void small_body_with_content_length() throws Exception {
String mockContent = StringUtils.copy(
getClass().getClassLoader().getResourceAsStream("case1/response_content.json"));
stubFor(post(urlEqualTo("/team/my_team"))
.willReturn(
ok()
.withBody(mockContent)
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_LENGTH, Integer.toString(mockContent.length()))
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON)));
Request request = Request.Post("http://localhost:8082/test/my_team")
.bodyString(
mockContent,
ContentType.APPLICATION_JSON);
HttpResponse response = request.execute().returnResponse();
assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode());
String responseContent = StringUtils.copy(response.getEntity().getContent());
assertEquals(mockContent, responseContent);
assertEquals(mockContent.length(), Integer.parseInt(response.getFirstHeader(HttpHeaders.CONTENT_LENGTH).getValue()));
assertEquals(responseContent.length(), Integer.parseInt(response.getFirstHeader(HttpHeaders.CONTENT_LENGTH).getValue()));
verify(postRequestedFor(urlEqualTo("/team/my_team"))
.withRequestBody(equalToJson(mockContent)));
}
@Test
public void small_body_with_chunked_transfer_encoding() throws Exception {
String mockContent = StringUtils.copy(
getClass().getClassLoader().getResourceAsStream("case1/response_content.json"));
stubFor(post(urlEqualTo("/team/my_team"))
.willReturn(
ok()
.withBody(mockContent)
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON)));
Request request = Request.Post("http://localhost:8082/test/my_team")
.bodyStream(
this.getClass().getClassLoader().getResourceAsStream("case1/request_content.json"),
ContentType.APPLICATION_JSON);
HttpResponse response = request.execute().returnResponse();
String responseContent = StringUtils.copy(response.getEntity().getContent());
assertEquals(mockContent, responseContent);
assertNull(response.getFirstHeader(HttpHeaders.CONTENT_LENGTH));
assertNotNull(response.getFirstHeader(HttpHeaders.TRANSFER_ENCODING));
assertEquals(HttpHeadersValues.TRANSFER_ENCODING_CHUNKED, response.getFirstHeader(HttpHeaders.TRANSFER_ENCODING).getValue());
verify(postRequestedFor(urlEqualTo("/team/my_team"))
.withRequestBody(equalToJson(mockContent)));
}
@Test
public void large_body_with_chunked_transfer_encoding() throws Exception {
String mockContent = StringUtils.copy(
getClass().getClassLoader().getResourceAsStream("case2/response_content.json"));
stubFor(post(urlEqualTo("/team/my_team"))
.willReturn(
ok()
.withBody(mockContent)
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON)));
Request request = Request.Post("http://localhost:8082/test/my_team")
.bodyStream(
getClass().getClassLoader().getResourceAsStream("case2/response_content.json"),
ContentType.APPLICATION_JSON);
HttpResponse response = request.execute().returnResponse();
assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode());
String content = StringUtils.copy(response.getEntity().getContent());
assertEquals(652051, content.length());
assertEquals(content, content);
assertNull(response.getFirstHeader(HttpHeaders.CONTENT_LENGTH));
assertNotNull(response.getFirstHeader(HttpHeaders.TRANSFER_ENCODING));
assertEquals(HttpHeadersValues.TRANSFER_ENCODING_CHUNKED, response.getFirstHeader(HttpHeaders.TRANSFER_ENCODING).getValue());
System.out.println(wireMockRule.findAllUnmatchedRequests().size());
verify(postRequestedFor(urlEqualTo("/team/my_team"))
.withRequestBody(equalToJson(mockContent)));
}
@Test
public void large_body_with_content_length() throws Exception {
String mockContent = StringUtils.copy(
getClass().getClassLoader().getResourceAsStream("case2/response_content.json"));
stubFor(post(urlEqualTo("/team/my_team"))
.willReturn(
ok()
.withBody(mockContent)
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON)));
Request request = Request.Post("http://localhost:8082/test/my_team").bodyString(mockContent, ContentType.APPLICATION_JSON);
HttpResponse response = request.execute().returnResponse();
System.out.println(wireMockRule.findAllUnmatchedRequests().size());
assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode());
String content = StringUtils.copy(response.getEntity().getContent());
assertEquals(652051, content.length());
assertEquals(content, content);
assertNull(response.getFirstHeader(HttpHeaders.CONTENT_LENGTH));
assertNotNull(response.getFirstHeader(HttpHeaders.TRANSFER_ENCODING));
assertEquals(HttpHeadersValues.TRANSFER_ENCODING_CHUNKED, response.getFirstHeader(HttpHeaders.TRANSFER_ENCODING).getValue());
verify(postRequestedFor(urlEqualTo("/team/my_team"))
.withRequestBody(equalTo(mockContent)));
}
@Test
public void no_content_with_chunked_encoding_transfer() throws Exception {
stubFor(post(urlEqualTo("/team/my_team")).willReturn(ok()));
Request request = Request.Post("http://localhost:8082/test/my_team");
HttpResponse response = request.execute().returnResponse();
assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode());
// Set chunk mode in request but returns raw because of the size of the content
assertEquals(null, response.getFirstHeader("X-Forwarded-Transfer-Encoding"));
String responseContent = StringUtils.copy(response.getEntity().getContent());
assertEquals(0, responseContent.length());
verify(postRequestedFor(urlEqualTo("/team/my_team"))
.withoutHeader(HttpHeaders.TRANSFER_ENCODING));
}
@Test
public void no_content_without_chunked_encoding_transfer() throws Exception {
stubFor(post(urlEqualTo("/team/my_team")).willReturn(ok()));
Request request = Request.Post("http://localhost:8082/test/my_team")
.addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON)
.removeHeaders(HttpHeaders.TRANSFER_ENCODING);
Response response = request.execute();
HttpResponse returnResponse = response.returnResponse();
assertEquals(HttpStatus.SC_OK, returnResponse.getStatusLine().getStatusCode());
// Set chunk mode in request but returns raw because of the size of the content
assertEquals(null, returnResponse.getFirstHeader("X-Forwarded-Transfer-Encoding"));
String responseContent = StringUtils.copy(returnResponse.getEntity().getContent());
assertEquals(0, responseContent.length());
verify(postRequestedFor(urlEqualTo("/team/my_team"))
.withoutHeader(HttpHeaders.TRANSFER_ENCODING)
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, new EqualToPattern(MediaType.APPLICATION_JSON)));
}
@Test
public void get_no_content_with_chunked_encoding_transfer() throws Exception {
stubFor(get(urlEqualTo("/team/my_team")).willReturn(ok()));
Request request = Request.Get("http://localhost:8082/test/my_team")
.addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON)
.removeHeaders(HttpHeaders.TRANSFER_ENCODING);
Response response = request.execute();
HttpResponse returnResponse = response.returnResponse();
assertEquals(HttpStatus.SC_OK, returnResponse.getStatusLine().getStatusCode());
// Set chunk mode in request but returns raw because of the size of the content
assertEquals(null, returnResponse.getFirstHeader("X-Forwarded-Transfer-Encoding"));
String responseContent = StringUtils.copy(returnResponse.getEntity().getContent());
assertEquals(0, responseContent.length());
verify(getRequestedFor(urlEqualTo("/team/my_team"))
.withoutHeader(HttpHeaders.TRANSFER_ENCODING)
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, new EqualToPattern(MediaType.APPLICATION_JSON)));
}
@Test
public void get_no_content_with_chunked_encoding_transfer_and_content_type() throws Exception {
stubFor(get(urlEqualTo("/team/my_team")).willReturn(ok()));
Request request = Request.Get("http://localhost:8082/test/my_team")
.addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON);
Response response = request.execute();
HttpResponse returnResponse = response.returnResponse();
assertEquals(HttpStatus.SC_OK, returnResponse.getStatusLine().getStatusCode());
// Set chunk mode in request but returns raw because of the size of the content
assertEquals(null, returnResponse.getFirstHeader("X-Forwarded-Transfer-Encoding"));
String responseContent = StringUtils.copy(returnResponse.getEntity().getContent());
assertEquals(0, responseContent.length());
verify(getRequestedFor(urlEqualTo("/team/my_team"))
.withHeader(io.gravitee.common.http.HttpHeaders.CONTENT_TYPE, new EqualToPattern(MediaType.APPLICATION_JSON)));
}
}
| {
"pile_set_name": "Github"
} |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api.records.impl.pb;
import static org.junit.Assert.*;
import java.util.stream.Stream;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerSubState;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto;
import org.junit.Test;
public class TestProtoUtils {
@Test
public void testConvertFromOrToProtoFormat() {
// Check if utility has all enum values
try {
Stream.of(ContainerState.values())
.forEach(a -> ProtoUtils.convertToProtoFormat(a));
Stream.of(ContainerSubState.values())
.forEach(a -> ProtoUtils.convertToProtoFormat(a));
Stream.of(ContainerSubStateProto.values())
.forEach(a -> ProtoUtils.convertFromProtoFormat(a));
Stream.of(ContainerStateProto.values())
.forEach(a -> ProtoUtils.convertFromProtoFormat(a));
} catch (IllegalArgumentException ex) {
fail(ex.getMessage());
}
}
}
| {
"pile_set_name": "Github"
} |
#DPN92
python train.py \
--model=DPN92 \
--batch_size=256 \
--lr_strategy=cosine_decay \
--lr=0.1 \
--num_epochs=200 \
--model_save_dir=output/ \
--l2_decay=1e-4 \
--use_mixup=True \
--use_label_smoothing=True \
--label_smoothing_epsilon=0.1
| {
"pile_set_name": "Github"
} |
* {
--width: 1920px;
--height: 1080px;
}
html {
font-size: 14px;
}
#rifb {
background-color: var(--void-bg-color);
}
#rifb h1 {
font-size: 10rem;
margin-bottom: 5rem;
}
#rifb section div {
font-size: 6rem;
line-height: 10rem;
}
#rifb img {
border: 0px;
}
#rifb section.title-box {
padding: 0;
/*height: 100vh;*/
/*width: 960px;
height: 700px;*/
width: var(--width);
height: var(--height);
/*width: 1920px;
height: 1080px;*/
}
/* -------------- */
.left-box {
display: flex;
flex-wrap: nowrap;
/*width: 960px;
height: 700px;*/
height: 100%;
width: 100%;
}
#rifb .left-box > :nth-child(1) {
/*background-color: blue;*/
/*border: 1px solid blue;*/
background-color: var(--main-bg-color);
flex-basis: 30%;
flex-shrink: 0;
flex-grow: 1;
/*padding-top: 6rem;*/
padding-bottom: 10rem;
padding-left: 10rem;
padding-right: 10rem;
margin: 0;
/*height: 100%;*/
}
#rifb .left-box > :nth-child(1) > :nth-child(1) {
background-color: var(--main-bg-color);
margin-top: 6rem;
}
.left-box > :nth-child(2) {
/*background-color: red;*/
/*border: 1px solid red;*/
flex-basis: 70%;
flex-shrink: 1;
flex-grow: 0;
margin: 0;
}
#rifb .left-box > :nth-child(1) > :nth-child(1) {
margin-top: 6rem;
}
.left-box > :nth-child(2) {
/*background-color: red;*/
/*border: 1px solid red;*/
flex-basis: 70%;
flex-shrink: 1;
flex-grow: 0;
margin: 0;
}
#s-chain .left-box > :nth-child(2) {
background-image: url("../images/chain.jpg");
background-size: cover;
}
#s-chain-2 .left-box > :nth-child(2) {
background-image: url("../images/chain.jpg");
background-size: cover;
}
#s-chain-2 .left-box > :nth-child(2) span {
position: relative;
top: 20%;
text-shadow: var(--emph-text-color) -1px -1px 10px;
letter-spacing: 0.2em;
}
/* ------------------------ */
.left-img-box {
display: flex;
flex-wrap: nowrap;
/*width: 960px;
height: 700px;*/
height: 100%;
width: 100%;
}
#rifb .left-img-box > :nth-child(1) {
/*background-color: blue;*/
/*border: 1px solid blue;*/
background-color: var(--main-bg-color);
flex-basis: 30%;
flex-shrink: 0;
flex-grow: 1;
/*padding-top: 6rem;*/
padding-bottom: 10rem;
padding-left: 10rem;
padding-right: 10rem;
margin: 0;
/*height: 100%;*/
}
#rifb .left-img-box > :nth-child(1) > :nth-child(1) {
background-color: var(--main-bg-color);
margin-top: 6rem;
}
.left-img-box > :nth-child(2) {
/*background-color: red;*/
/*border: 1px solid red;*/
flex-basis: auto;
flex-shrink: 2;
flex-grow: 0;
margin: 0;
}
#rifb .left-img-box > img:nth-child(2) {
margin: 0;
height: 100%;
max-height: 100%;
max-width: 100%;
}
/* ------------------------ */
.right-img-box {
display: flex;
flex-wrap: nowrap;
/*width: 960px;
height: 700px;*/
height: 100%;
width: 100%;
}
#rifb .right-img-box > :nth-child(2) {
/*background-color: blue;*/
/*border: 1px solid blue;*/
background-color: var(--main-bg-color);
flex-basis: 30%;
flex-shrink: 0;
flex-grow: 1;
/*padding-top: 6rem;*/
padding-bottom: 10rem;
padding-left: 10rem;
padding-right: 10rem;
margin: 0;
/*height: 100%;*/
}
#rifb .right-img-box > :nth-child(2) > :nth-child(1) {
margin-top: 6rem;
}
.right-img-box > :nth-child(1) {
/*background-color: red;*/
/*border: 1px solid red;*/
flex-basis: auto;
flex-shrink: 2;
flex-grow: 0;
margin: 0;
}
#rifb .right-img-box > img:nth-child(1) {
margin: 0;
height: 100%;
max-height: 100%;
max-width: 100%;
}
#s-dalek img {
background-color: black;
}
#s-dalek .right-img-box > :nth-child(2) {
background-color: var(--alt-bg-color-2);
}
/* ------------- */
#s-logos > div {
display: flex;
flex-wrap: wrap;
align-items: center;
justify-content: center;
padding: 100px;
vertical-align: center;
background: linear-gradient(#DDD, #555);
}
#s-logos > div > img {
background-color: inherit;
box-shadow: none;
max-height: 200px;
max-width: 300px;
width: 100%;
height: auto;
margin: 1rem;
}
| {
"pile_set_name": "Github"
} |
<!--
Copyright 2013 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!DOCTYPE html>
<link rel="author" title="Sandy Phan, Sarah Heimlich",
href="mailto:sandyphan@google.com, sarahheimlich@google.com">
<title>Parallel Animation</title>
<meta name="flags" content="dom">
<meta name="tutorial" content="Web Animations tutorials">
<link rel="stylesheet" type="text/css" href="../tutorial-style.css">
<script type="text/javascript" src="../jquery.js"></script>
<script type="text/javascript" src="../page-load.js"></script>
<ul class="topMenu">
<li><a href="../home-page.html">Home</a></li>
<li><a href="#">Tutorials</a>
<ul class="subMenu">
<li><a href="../basic-animations/basic-animation.html">
Basic Animations</a></li>
<li><a href="parallel.html">Parallel Animations</a></li>
<li><a href="../sequence/sequence.html">Sequence Animations
</a></li>
<li><a href="../timing-dictionary/timing-dictionary.html">
Timing Dictionary</a></li>
<li><a href="../timing-functions/timing-function.html">
Timing Functions</a></li>
</ul>
</li>
<li><a href="../references/references.html">References</a></li>
<li><a href="../about.html">About</a></li>
</ul>
<div id="main">
<div id="title">Web Animations Tutorial</div>
<div class="line-separator"></div>
<div class="content">
<p class="description">Web animations is to have a flash or GIF animation
intergrated onto the web page. Normal web animations still require
the use of plugins such as Java Applets, Shockwave, Adobe Flash. In this
tutorial, we will be showing you how to create animations using
javascript without the need of installing plugins.</p>
<br />
<div class="heading subTitle">Parallel Animation Group</div>
<p class="description">There certainly are times when you want to
group items together such that they have the same features and effects.
There are 2 types of groupings: parallel and sequential. In this
section you will learn about parallel groups and what kind of
feature does it have to help you write less code.</p>
<p class="description">Just as what the name of the group means, all
the children being grouped in a parallel group will run in parallel,
that is they will start together and of course they will end at
different time depends on their durations.</p>
<p class="description">The following is the interface for creating
a parallel animation group.</p>
<code class="codeSamples">new ParGroup([children],
{timing/timing dictionary});</code>
<p class="description note">Note that both children and timing are
nullable (i.e. you can leave children as [] if you don't want to
specify a child). Though only timing is optional</p>
<div class="separator"></div>
</div> <!-- content ending div -->
<div class="line-separator"></div>
</div> <!-- main ending div -->
<ul class="sideMenu">
<li id="menuLabel">Parallel Animation Group</li>
<li>Basic Info</li>
<li>Exercise 1</li>
<li>Exercise 2</li>
<li>Exercise 3</li>
</ul>
<div class="separator"></div>
<script type="text/javascript" src="../try-it-yourself.js"></script> | {
"pile_set_name": "Github"
} |
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory(require("omi"));
else if(typeof define === 'function' && define.amd)
define(["omi"], factory);
else if(typeof exports === 'object')
exports["ORadio"] = factory(require("omi"));
else
root["ORadio"] = factory(root["Omi"]);
})(this, function(__WEBPACK_EXTERNAL_MODULE_omi__) {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
/******/ }
/******/ };
/******/
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/ __webpack_require__.t = function(value, mode) {
/******/ if(mode & 1) value = __webpack_require__(value);
/******/ if(mode & 8) return value;
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
/******/ return ns;
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = "./src/index.tsx");
/******/ })
/************************************************************************/
/******/ ({
/***/ "./node_modules/_css-loader@1.0.1@css-loader/index.js!./node_modules/_resolve-url-loader@3.1.1@resolve-url-loader/index.js!./node_modules/_sass-loader@7.3.1@sass-loader/dist/cjs.js?!./src/index.scss":
/*!****************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/_css-loader@1.0.1@css-loader!./node_modules/_resolve-url-loader@3.1.1@resolve-url-loader!./node_modules/_sass-loader@7.3.1@sass-loader/dist/cjs.js??ref--4-3!./src/index.scss ***!
\****************************************************************************************************************************************************************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
exports = module.exports = __webpack_require__(/*! ../node_modules/_css-loader@1.0.1@css-loader/lib/css-base.js */ "./node_modules/_css-loader@1.0.1@css-loader/lib/css-base.js")(false);
// imports
// module
exports.push([module.i, ":host {\n display: inline-block; }\n\n.o-radio,\n.o-radio--medium.is-bordered .o-radio__label {\n font-size: 14px; }\n\n.o-radio,\n.o-radio__input {\n white-space: nowrap;\n line-height: 1;\n outline: 0; }\n\n.o-radio,\n.o-radio__inner,\n.o-radio__input {\n position: relative;\n display: inline-block; }\n\n.o-radio {\n color: #606266;\n font-weight: 500;\n cursor: pointer;\n margin-right: 30px;\n -moz-user-select: none;\n -webkit-user-select: none;\n -ms-user-select: none; }\n\n.o-radio.is-bordered {\n padding: 12px 20px 0 10px;\n border-radius: 4px;\n border: 1px solid #DCDFE6;\n -webkit-box-sizing: border-box;\n box-sizing: border-box;\n height: 40px; }\n\n.o-radio.is-bordered.is-checked {\n border-color: #07c160; }\n\n.o-radio.is-bordered.is-disabled {\n cursor: not-allowed;\n border-color: #EBEEF5; }\n\n.o-radio__input.is-disabled .o-radio__inner,\n.o-radio__input.is-disabled.is-checked .o-radio__inner {\n background-color: #F5F7FA;\n border-color: #E4E7ED; }\n\n.o-radio.is-bordered + .o-radio.is-bordered {\n margin-left: 10px; }\n\n.o-radio--medium.is-bordered {\n padding: 10px 20px 0 10px;\n border-radius: 4px;\n height: 36px; }\n\n.o-radio--mini.is-bordered .o-radio__label,\n.o-radio--small.is-bordered .o-radio__label {\n font-size: 12px; }\n\n.o-radio--medium.is-bordered .o-radio__inner {\n height: 14px;\n width: 14px; }\n\n.o-radio--small.is-bordered {\n padding: 8px 15px 0 10px;\n border-radius: 3px;\n height: 32px; }\n\n.o-radio--small.is-bordered .o-radio__inner {\n height: 12px;\n width: 12px; }\n\n.o-radio--mini.is-bordered {\n padding: 6px 15px 0 10px;\n border-radius: 3px;\n height: 28px; }\n\n.o-radio--mini.is-bordered .o-radio__inner {\n height: 12px;\n width: 12px; }\n\n.o-radio:last-child {\n margin-right: 0; }\n\n.o-radio__input {\n cursor: pointer;\n vertical-align: middle; }\n\n.o-radio__input.is-disabled .o-radio__inner {\n cursor: not-allowed; }\n\n.o-radio__input.is-disabled .o-radio__inner::after {\n cursor: not-allowed;\n background-color: #F5F7FA; }\n\n.o-radio__input.is-disabled .o-radio__inner + .o-radio__label {\n cursor: not-allowed; }\n\n.o-radio__input.is-disabled.is-checked .o-radio__inner::after {\n background-color: #C0C4CC; }\n\n.o-radio__input.is-disabled + span.o-radio__label {\n color: #C0C4CC;\n cursor: not-allowed; }\n\n.o-radio__input.is-checked .o-radio__inner {\n border-color: #07c160;\n background: #07c160; }\n\n.o-radio__input.is-checked .o-radio__inner::after {\n -webkit-transform: translate(-50%, -50%) scale(1);\n transform: translate(-50%, -50%) scale(1); }\n\n.o-radio__input.is-checked + .o-radio__label {\n color: #07c160; }\n\n.o-radio__input.is-focus .o-radio__inner {\n border-color: #07c160; }\n\n.o-radio__inner {\n border: 1px solid #DCDFE6;\n border-radius: 100%;\n width: 14px;\n height: 14px;\n background-color: #FFF;\n cursor: pointer;\n -webkit-box-sizing: border-box;\n box-sizing: border-box; }\n\n.o-radio__inner:hover {\n border-color: #07c160; }\n\n.o-radio__inner::after {\n width: 4px;\n height: 4px;\n border-radius: 100%;\n background-color: #FFF;\n content: \"\";\n position: absolute;\n left: 50%;\n top: 50%;\n -webkit-transform: translate(-50%, -50%) scale(0);\n transform: translate(-50%, -50%) scale(0);\n -webkit-transition: -webkit-transform .15s ease-in;\n transition: -webkit-transform .15s ease-in;\n transition: transform .15s ease-in;\n transition: transform .15s ease-in, -webkit-transform .15s ease-in; }\n\n.o-radio__original {\n opacity: 0;\n outline: 0;\n position: absolute;\n z-index: -1;\n top: 0;\n left: 0;\n right: 0;\n bottom: 0;\n margin: 0; }\n\n.o-radio:focus:not(.is-focus):not(:active):not(.is-disabled) .o-radio__inner {\n -webkit-box-shadow: 0 0 2px 2px #07c160;\n box-shadow: 0 0 2px 2px #07c160; }\n\n.o-radio__label {\n font-size: 14px;\n padding-left: 10px; }\n", ""]);
// exports
/***/ }),
/***/ "./node_modules/_css-loader@1.0.1@css-loader/lib/css-base.js":
/*!*******************************************************************!*\
!*** ./node_modules/_css-loader@1.0.1@css-loader/lib/css-base.js ***!
\*******************************************************************/
/*! no static exports found */
/***/ (function(module, exports) {
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
// css base code, injected by the css-loader
module.exports = function(useSourceMap) {
var list = [];
// return the list of modules as css string
list.toString = function toString() {
return this.map(function (item) {
var content = cssWithMappingToString(item, useSourceMap);
if(item[2]) {
return "@media " + item[2] + "{" + content + "}";
} else {
return content;
}
}).join("");
};
// import a list of modules into the list
list.i = function(modules, mediaQuery) {
if(typeof modules === "string")
modules = [[null, modules, ""]];
var alreadyImportedModules = {};
for(var i = 0; i < this.length; i++) {
var id = this[i][0];
if(typeof id === "number")
alreadyImportedModules[id] = true;
}
for(i = 0; i < modules.length; i++) {
var item = modules[i];
// skip already imported module
// this implementation is not 100% perfect for weird media query combinations
// when a module is imported multiple times with different media queries.
// I hope this will never occur (Hey this way we have smaller bundles)
if(typeof item[0] !== "number" || !alreadyImportedModules[item[0]]) {
if(mediaQuery && !item[2]) {
item[2] = mediaQuery;
} else if(mediaQuery) {
item[2] = "(" + item[2] + ") and (" + mediaQuery + ")";
}
list.push(item);
}
}
};
return list;
};
function cssWithMappingToString(item, useSourceMap) {
var content = item[1] || '';
var cssMapping = item[3];
if (!cssMapping) {
return content;
}
if (useSourceMap && typeof btoa === 'function') {
var sourceMapping = toComment(cssMapping);
var sourceURLs = cssMapping.sources.map(function (source) {
return '/*# sourceURL=' + cssMapping.sourceRoot + source + ' */'
});
return [content].concat(sourceURLs).concat([sourceMapping]).join('\n');
}
return [content].join('\n');
}
// Adapted from convert-source-map (MIT)
function toComment(sourceMap) {
// eslint-disable-next-line no-undef
var base64 = btoa(unescape(encodeURIComponent(JSON.stringify(sourceMap))));
var data = 'sourceMappingURL=data:application/json;charset=utf-8;base64,' + base64;
return '/*# ' + data + ' */';
}
/***/ }),
/***/ "./src/index.scss":
/*!************************!*\
!*** ./src/index.scss ***!
\************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
var result = __webpack_require__(/*! !../node_modules/_css-loader@1.0.1@css-loader!../node_modules/_resolve-url-loader@3.1.1@resolve-url-loader!../node_modules/_sass-loader@7.3.1@sass-loader/dist/cjs.js??ref--4-3!./index.scss */ "./node_modules/_css-loader@1.0.1@css-loader/index.js!./node_modules/_resolve-url-loader@3.1.1@resolve-url-loader/index.js!./node_modules/_sass-loader@7.3.1@sass-loader/dist/cjs.js?!./src/index.scss");
if (typeof result === "string") {
module.exports = result;
} else {
module.exports = result.toString();
}
/***/ }),
/***/ "./src/index.tsx":
/*!***********************!*\
!*** ./src/index.tsx ***!
\***********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", { value: true });
var omi_1 = __webpack_require__(/*! omi */ "omi");
var css = __webpack_require__(/*! ./index.scss */ "./src/index.scss");
var Radio = /** @class */ (function (_super) {
__extends(Radio, _super);
function Radio() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.clickHandler = function () {
if (_this.props.disabled)
return;
if (!_this.props.checked) {
_this.group.forEach(function (item) {
item.updateProps({
checked: false
});
});
_this.updateProps({
checked: true
});
_this.fire('change', { value: _this.props.value });
}
};
return _this;
}
Radio.prototype.installed = function () {
var _this = this;
this.group = this.getScopeRoot(this.shadowRoot.host).querySelectorAll("o-radio[name='" + this.props.name + "']");
//fix group 不一致
this.group.forEach(function (ele) {
ele.group = _this.group;
});
};
Radio.prototype.render = function (props) {
return omi_1.h("label", __assign({ role: "radio", tabindex: "0", onClick: this.clickHandler }, omi_1.extractClass(props, 'o-radio', {
'is-disabled': props.disabled,
'is-checked': props.checked
}), { "aria-checked": props.checked }),
omi_1.h("span", __assign({}, omi_1.extractClass(props, 'o-radio__input', {
'is-disabled': props.disabled,
'is-checked': props.checked
})),
omi_1.h("span", { class: "o-radio__inner" }),
omi_1.h("input", __assign({ type: "radio", "aria-hidden": "true" }, extract(props, ['checked', 'value', 'disabled']), { tabindex: "-1", class: "o-radio__original" }))),
omi_1.h("span", { class: "o-radio__label" },
props.label,
omi_1.h("slot", null)));
};
Radio.prototype.getScopeRoot = function (current) {
while (true) {
var p = current.parentNode;
if (p) {
current = p;
}
else {
return current;
}
}
};
Radio.css = css;
Radio.propTypes = {
label: String,
disabled: Boolean,
checked: Boolean,
value: String,
name: String
};
Radio = __decorate([
omi_1.tag('o-radio')
], Radio);
return Radio;
}(omi_1.WeElement));
exports.default = Radio;
function extract(props, prop) {
var _a;
if (typeof prop === 'string') {
if (props.hasOwnProperty(prop)) {
return _a = {}, _a[prop] = props[prop], _a;
}
}
else {
var res_1 = {};
prop.forEach(function (key) {
if (props.hasOwnProperty(key)) {
res_1[key] = props[key];
}
});
return res_1;
}
}
/***/ }),
/***/ "omi":
/*!******************************************************************************!*\
!*** external {"commonjs":"omi","commonjs2":"omi","amd":"omi","root":"Omi"} ***!
\******************************************************************************/
/*! no static exports found */
/***/ (function(module, exports) {
module.exports = __WEBPACK_EXTERNAL_MODULE_omi__;
/***/ })
/******/ })["default"];
});
//# sourceMappingURL=index.js.map | {
"pile_set_name": "Github"
} |
// Copyright 2007, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Google Mock - a framework for writing C++ mock classes.
//
// This file tests the built-in actions in gmock-actions.h.
#include <functional>
#include <memory>
#include <sstream>
#include <string>
#include "gmock/gmock-actions.h"
#include "gmock/gmock.h"
#include "gtest/gtest-spi.h"
#include "gtest/gtest.h"
namespace testing {
namespace gmock_more_actions_test {
using ::std::plus;
using ::std::string;
using testing::_;
using testing::Action;
using testing::ActionInterface;
using testing::DeleteArg;
using testing::Invoke;
using testing::Return;
using testing::ReturnArg;
using testing::ReturnPointee;
using testing::SaveArg;
using testing::SaveArgPointee;
using testing::SetArgReferee;
using testing::Unused;
using testing::WithArg;
using testing::WithoutArgs;
// For suppressing compiler warnings on conversion possibly losing precision.
inline short Short(short n) { return n; } // NOLINT
inline char Char(char ch) { return ch; }
// Sample functions and functors for testing Invoke() and etc.
int Nullary() { return 1; }
class NullaryFunctor {
public:
int operator()() { return 2; }
};
bool g_done = false;
void VoidNullary() { g_done = true; }
class VoidNullaryFunctor {
public:
void operator()() { g_done = true; }
};
bool Unary(int x) { return x < 0; }
const char* Plus1(const char* s) { return s + 1; }
void VoidUnary(int /* n */) { g_done = true; }
bool ByConstRef(const std::string& s) { return s == "Hi"; }
const double g_double = 0;
bool ReferencesGlobalDouble(const double& x) { return &x == &g_double; }
std::string ByNonConstRef(std::string& s) { return s += "+"; } // NOLINT
struct UnaryFunctor {
int operator()(bool x) { return x ? 1 : -1; }
};
const char* Binary(const char* input, short n) { return input + n; } // NOLINT
void VoidBinary(int, char) { g_done = true; }
int Ternary(int x, char y, short z) { return x + y + z; } // NOLINT
void VoidTernary(int, char, bool) { g_done = true; }
int SumOf4(int a, int b, int c, int d) { return a + b + c + d; }
int SumOfFirst2(int a, int b, Unused, Unused) { return a + b; }
void VoidFunctionWithFourArguments(char, int, float, double) { g_done = true; }
std::string Concat4(const char* s1, const char* s2, const char* s3,
const char* s4) {
return std::string(s1) + s2 + s3 + s4;
}
int SumOf5(int a, int b, int c, int d, int e) { return a + b + c + d + e; }
struct SumOf5Functor {
int operator()(int a, int b, int c, int d, int e) {
return a + b + c + d + e;
}
};
std::string Concat5(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5) {
return std::string(s1) + s2 + s3 + s4 + s5;
}
int SumOf6(int a, int b, int c, int d, int e, int f) {
return a + b + c + d + e + f;
}
struct SumOf6Functor {
int operator()(int a, int b, int c, int d, int e, int f) {
return a + b + c + d + e + f;
}
};
std::string Concat6(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6;
}
std::string Concat7(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7;
}
std::string Concat8(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7, const char* s8) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7 + s8;
}
std::string Concat9(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7, const char* s8, const char* s9) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9;
}
std::string Concat10(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7, const char* s8, const char* s9,
const char* s10) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9 + s10;
}
class Foo {
public:
Foo() : value_(123) {}
int Nullary() const { return value_; }
short Unary(long x) { return static_cast<short>(value_ + x); } // NOLINT
std::string Binary(const std::string& str, char c) const { return str + c; }
int Ternary(int x, bool y, char z) { return value_ + x + y*z; }
int SumOf4(int a, int b, int c, int d) const {
return a + b + c + d + value_;
}
int SumOfLast2(Unused, Unused, int a, int b) const { return a + b; }
int SumOf5(int a, int b, int c, int d, int e) { return a + b + c + d + e; }
int SumOf6(int a, int b, int c, int d, int e, int f) {
return a + b + c + d + e + f;
}
std::string Concat7(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7;
}
std::string Concat8(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7, const char* s8) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7 + s8;
}
std::string Concat9(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7, const char* s8, const char* s9) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9;
}
std::string Concat10(const char* s1, const char* s2, const char* s3,
const char* s4, const char* s5, const char* s6,
const char* s7, const char* s8, const char* s9,
const char* s10) {
return std::string(s1) + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9 + s10;
}
private:
int value_;
};
// Tests using Invoke() with a nullary function.
TEST(InvokeTest, Nullary) {
Action<int()> a = Invoke(Nullary); // NOLINT
EXPECT_EQ(1, a.Perform(std::make_tuple()));
}
// Tests using Invoke() with a unary function.
TEST(InvokeTest, Unary) {
Action<bool(int)> a = Invoke(Unary); // NOLINT
EXPECT_FALSE(a.Perform(std::make_tuple(1)));
EXPECT_TRUE(a.Perform(std::make_tuple(-1)));
}
// Tests using Invoke() with a binary function.
TEST(InvokeTest, Binary) {
Action<const char*(const char*, short)> a = Invoke(Binary); // NOLINT
const char* p = "Hello";
EXPECT_EQ(p + 2, a.Perform(std::make_tuple(p, Short(2))));
}
// Tests using Invoke() with a ternary function.
TEST(InvokeTest, Ternary) {
Action<int(int, char, short)> a = Invoke(Ternary); // NOLINT
EXPECT_EQ(6, a.Perform(std::make_tuple(1, '\2', Short(3))));
}
// Tests using Invoke() with a 4-argument function.
TEST(InvokeTest, FunctionThatTakes4Arguments) {
Action<int(int, int, int, int)> a = Invoke(SumOf4); // NOLINT
EXPECT_EQ(1234, a.Perform(std::make_tuple(1000, 200, 30, 4)));
}
// Tests using Invoke() with a 5-argument function.
TEST(InvokeTest, FunctionThatTakes5Arguments) {
Action<int(int, int, int, int, int)> a = Invoke(SumOf5); // NOLINT
EXPECT_EQ(12345, a.Perform(std::make_tuple(10000, 2000, 300, 40, 5)));
}
// Tests using Invoke() with a 6-argument function.
TEST(InvokeTest, FunctionThatTakes6Arguments) {
Action<int(int, int, int, int, int, int)> a = Invoke(SumOf6); // NOLINT
EXPECT_EQ(123456,
a.Perform(std::make_tuple(100000, 20000, 3000, 400, 50, 6)));
}
// A helper that turns the type of a C-string literal from const
// char[N] to const char*.
inline const char* CharPtr(const char* s) { return s; }
// Tests using Invoke() with a 7-argument function.
TEST(InvokeTest, FunctionThatTakes7Arguments) {
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*)>
a = Invoke(Concat7);
EXPECT_EQ("1234567",
a.Perform(std::make_tuple(CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"))));
}
// Tests using Invoke() with a 8-argument function.
TEST(InvokeTest, FunctionThatTakes8Arguments) {
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*, const char*)>
a = Invoke(Concat8);
EXPECT_EQ("12345678",
a.Perform(std::make_tuple(CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"), CharPtr("8"))));
}
// Tests using Invoke() with a 9-argument function.
TEST(InvokeTest, FunctionThatTakes9Arguments) {
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*, const char*,
const char*)>
a = Invoke(Concat9);
EXPECT_EQ("123456789", a.Perform(std::make_tuple(
CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"), CharPtr("8"), CharPtr("9"))));
}
// Tests using Invoke() with a 10-argument function.
TEST(InvokeTest, FunctionThatTakes10Arguments) {
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*, const char*,
const char*, const char*)>
a = Invoke(Concat10);
EXPECT_EQ("1234567890",
a.Perform(std::make_tuple(CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"), CharPtr("8"), CharPtr("9"),
CharPtr("0"))));
}
// Tests using Invoke() with functions with parameters declared as Unused.
TEST(InvokeTest, FunctionWithUnusedParameters) {
Action<int(int, int, double, const std::string&)> a1 = Invoke(SumOfFirst2);
std::tuple<int, int, double, std::string> dummy =
std::make_tuple(10, 2, 5.6, std::string("hi"));
EXPECT_EQ(12, a1.Perform(dummy));
Action<int(int, int, bool, int*)> a2 =
Invoke(SumOfFirst2);
EXPECT_EQ(
23, a2.Perform(std::make_tuple(20, 3, true, static_cast<int*>(nullptr))));
}
// Tests using Invoke() with methods with parameters declared as Unused.
TEST(InvokeTest, MethodWithUnusedParameters) {
Foo foo;
Action<int(std::string, bool, int, int)> a1 = Invoke(&foo, &Foo::SumOfLast2);
EXPECT_EQ(12, a1.Perform(std::make_tuple(CharPtr("hi"), true, 10, 2)));
Action<int(char, double, int, int)> a2 =
Invoke(&foo, &Foo::SumOfLast2);
EXPECT_EQ(23, a2.Perform(std::make_tuple('a', 2.5, 20, 3)));
}
// Tests using Invoke() with a functor.
TEST(InvokeTest, Functor) {
Action<long(long, int)> a = Invoke(plus<long>()); // NOLINT
EXPECT_EQ(3L, a.Perform(std::make_tuple(1, 2)));
}
// Tests using Invoke(f) as an action of a compatible type.
TEST(InvokeTest, FunctionWithCompatibleType) {
Action<long(int, short, char, bool)> a = Invoke(SumOf4); // NOLINT
EXPECT_EQ(4321, a.Perform(std::make_tuple(4000, Short(300), Char(20), true)));
}
// Tests using Invoke() with an object pointer and a method pointer.
// Tests using Invoke() with a nullary method.
TEST(InvokeMethodTest, Nullary) {
Foo foo;
Action<int()> a = Invoke(&foo, &Foo::Nullary); // NOLINT
EXPECT_EQ(123, a.Perform(std::make_tuple()));
}
// Tests using Invoke() with a unary method.
TEST(InvokeMethodTest, Unary) {
Foo foo;
Action<short(long)> a = Invoke(&foo, &Foo::Unary); // NOLINT
EXPECT_EQ(4123, a.Perform(std::make_tuple(4000)));
}
// Tests using Invoke() with a binary method.
TEST(InvokeMethodTest, Binary) {
Foo foo;
Action<std::string(const std::string&, char)> a = Invoke(&foo, &Foo::Binary);
std::string s("Hell");
std::tuple<std::string, char> dummy = std::make_tuple(s, 'o');
EXPECT_EQ("Hello", a.Perform(dummy));
}
// Tests using Invoke() with a ternary method.
TEST(InvokeMethodTest, Ternary) {
Foo foo;
Action<int(int, bool, char)> a = Invoke(&foo, &Foo::Ternary); // NOLINT
EXPECT_EQ(1124, a.Perform(std::make_tuple(1000, true, Char(1))));
}
// Tests using Invoke() with a 4-argument method.
TEST(InvokeMethodTest, MethodThatTakes4Arguments) {
Foo foo;
Action<int(int, int, int, int)> a = Invoke(&foo, &Foo::SumOf4); // NOLINT
EXPECT_EQ(1357, a.Perform(std::make_tuple(1000, 200, 30, 4)));
}
// Tests using Invoke() with a 5-argument method.
TEST(InvokeMethodTest, MethodThatTakes5Arguments) {
Foo foo;
Action<int(int, int, int, int, int)> a = Invoke(&foo, &Foo::SumOf5); // NOLINT
EXPECT_EQ(12345, a.Perform(std::make_tuple(10000, 2000, 300, 40, 5)));
}
// Tests using Invoke() with a 6-argument method.
TEST(InvokeMethodTest, MethodThatTakes6Arguments) {
Foo foo;
Action<int(int, int, int, int, int, int)> a = // NOLINT
Invoke(&foo, &Foo::SumOf6);
EXPECT_EQ(123456,
a.Perform(std::make_tuple(100000, 20000, 3000, 400, 50, 6)));
}
// Tests using Invoke() with a 7-argument method.
TEST(InvokeMethodTest, MethodThatTakes7Arguments) {
Foo foo;
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*)>
a = Invoke(&foo, &Foo::Concat7);
EXPECT_EQ("1234567",
a.Perform(std::make_tuple(CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"))));
}
// Tests using Invoke() with a 8-argument method.
TEST(InvokeMethodTest, MethodThatTakes8Arguments) {
Foo foo;
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*, const char*)>
a = Invoke(&foo, &Foo::Concat8);
EXPECT_EQ("12345678",
a.Perform(std::make_tuple(CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"), CharPtr("8"))));
}
// Tests using Invoke() with a 9-argument method.
TEST(InvokeMethodTest, MethodThatTakes9Arguments) {
Foo foo;
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*, const char*,
const char*)>
a = Invoke(&foo, &Foo::Concat9);
EXPECT_EQ("123456789", a.Perform(std::make_tuple(
CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"), CharPtr("8"), CharPtr("9"))));
}
// Tests using Invoke() with a 10-argument method.
TEST(InvokeMethodTest, MethodThatTakes10Arguments) {
Foo foo;
Action<std::string(const char*, const char*, const char*, const char*,
const char*, const char*, const char*, const char*,
const char*, const char*)>
a = Invoke(&foo, &Foo::Concat10);
EXPECT_EQ("1234567890",
a.Perform(std::make_tuple(CharPtr("1"), CharPtr("2"), CharPtr("3"),
CharPtr("4"), CharPtr("5"), CharPtr("6"),
CharPtr("7"), CharPtr("8"), CharPtr("9"),
CharPtr("0"))));
}
// Tests using Invoke(f) as an action of a compatible type.
TEST(InvokeMethodTest, MethodWithCompatibleType) {
Foo foo;
Action<long(int, short, char, bool)> a = // NOLINT
Invoke(&foo, &Foo::SumOf4);
EXPECT_EQ(4444, a.Perform(std::make_tuple(4000, Short(300), Char(20), true)));
}
// Tests using WithoutArgs with an action that takes no argument.
TEST(WithoutArgsTest, NoArg) {
Action<int(int n)> a = WithoutArgs(Invoke(Nullary)); // NOLINT
EXPECT_EQ(1, a.Perform(std::make_tuple(2)));
}
// Tests using WithArg with an action that takes 1 argument.
TEST(WithArgTest, OneArg) {
Action<bool(double x, int n)> b = WithArg<1>(Invoke(Unary)); // NOLINT
EXPECT_TRUE(b.Perform(std::make_tuple(1.5, -1)));
EXPECT_FALSE(b.Perform(std::make_tuple(1.5, 1)));
}
TEST(ReturnArgActionTest, WorksForOneArgIntArg0) {
const Action<int(int)> a = ReturnArg<0>();
EXPECT_EQ(5, a.Perform(std::make_tuple(5)));
}
TEST(ReturnArgActionTest, WorksForMultiArgBoolArg0) {
const Action<bool(bool, bool, bool)> a = ReturnArg<0>();
EXPECT_TRUE(a.Perform(std::make_tuple(true, false, false)));
}
TEST(ReturnArgActionTest, WorksForMultiArgStringArg2) {
const Action<std::string(int, int, std::string, int)> a = ReturnArg<2>();
EXPECT_EQ("seven", a.Perform(std::make_tuple(5, 6, std::string("seven"), 8)));
}
TEST(SaveArgActionTest, WorksForSameType) {
int result = 0;
const Action<void(int n)> a1 = SaveArg<0>(&result);
a1.Perform(std::make_tuple(5));
EXPECT_EQ(5, result);
}
TEST(SaveArgActionTest, WorksForCompatibleType) {
int result = 0;
const Action<void(bool, char)> a1 = SaveArg<1>(&result);
a1.Perform(std::make_tuple(true, 'a'));
EXPECT_EQ('a', result);
}
TEST(SaveArgPointeeActionTest, WorksForSameType) {
int result = 0;
const int value = 5;
const Action<void(const int*)> a1 = SaveArgPointee<0>(&result);
a1.Perform(std::make_tuple(&value));
EXPECT_EQ(5, result);
}
TEST(SaveArgPointeeActionTest, WorksForCompatibleType) {
int result = 0;
char value = 'a';
const Action<void(bool, char*)> a1 = SaveArgPointee<1>(&result);
a1.Perform(std::make_tuple(true, &value));
EXPECT_EQ('a', result);
}
TEST(SetArgRefereeActionTest, WorksForSameType) {
int value = 0;
const Action<void(int&)> a1 = SetArgReferee<0>(1);
a1.Perform(std::tuple<int&>(value));
EXPECT_EQ(1, value);
}
TEST(SetArgRefereeActionTest, WorksForCompatibleType) {
int value = 0;
const Action<void(int, int&)> a1 = SetArgReferee<1>('a');
a1.Perform(std::tuple<int, int&>(0, value));
EXPECT_EQ('a', value);
}
TEST(SetArgRefereeActionTest, WorksWithExtraArguments) {
int value = 0;
const Action<void(bool, int, int&, const char*)> a1 = SetArgReferee<2>('a');
a1.Perform(std::tuple<bool, int, int&, const char*>(true, 0, value, "hi"));
EXPECT_EQ('a', value);
}
// A class that can be used to verify that its destructor is called: it will set
// the bool provided to the constructor to true when destroyed.
class DeletionTester {
public:
explicit DeletionTester(bool* is_deleted)
: is_deleted_(is_deleted) {
// Make sure the bit is set to false.
*is_deleted_ = false;
}
~DeletionTester() {
*is_deleted_ = true;
}
private:
bool* is_deleted_;
};
TEST(DeleteArgActionTest, OneArg) {
bool is_deleted = false;
DeletionTester* t = new DeletionTester(&is_deleted);
const Action<void(DeletionTester*)> a1 = DeleteArg<0>(); // NOLINT
EXPECT_FALSE(is_deleted);
a1.Perform(std::make_tuple(t));
EXPECT_TRUE(is_deleted);
}
TEST(DeleteArgActionTest, TenArgs) {
bool is_deleted = false;
DeletionTester* t = new DeletionTester(&is_deleted);
const Action<void(bool, int, int, const char*, bool,
int, int, int, int, DeletionTester*)> a1 = DeleteArg<9>();
EXPECT_FALSE(is_deleted);
a1.Perform(std::make_tuple(true, 5, 6, CharPtr("hi"), false, 7, 8, 9, 10, t));
EXPECT_TRUE(is_deleted);
}
#if GTEST_HAS_EXCEPTIONS
TEST(ThrowActionTest, ThrowsGivenExceptionInVoidFunction) {
const Action<void(int n)> a = Throw('a');
EXPECT_THROW(a.Perform(std::make_tuple(0)), char);
}
class MyException {};
TEST(ThrowActionTest, ThrowsGivenExceptionInNonVoidFunction) {
const Action<double(char ch)> a = Throw(MyException());
EXPECT_THROW(a.Perform(std::make_tuple('0')), MyException);
}
TEST(ThrowActionTest, ThrowsGivenExceptionInNullaryFunction) {
const Action<double()> a = Throw(MyException());
EXPECT_THROW(a.Perform(std::make_tuple()), MyException);
}
class Object {
public:
virtual ~Object() {}
virtual void Func() {}
};
class MockObject : public Object {
public:
~MockObject() override {}
MOCK_METHOD(void, Func, (), (override));
};
TEST(ThrowActionTest, Times0) {
EXPECT_NONFATAL_FAILURE(
[] {
try {
MockObject m;
ON_CALL(m, Func()).WillByDefault([] { throw "something"; });
EXPECT_CALL(m, Func()).Times(0);
m.Func();
} catch (...) {
// Exception is caught but Times(0) still triggers a failure.
}
}(),
"");
}
#endif // GTEST_HAS_EXCEPTIONS
// Tests that SetArrayArgument<N>(first, last) sets the elements of the array
// pointed to by the N-th (0-based) argument to values in range [first, last).
TEST(SetArrayArgumentTest, SetsTheNthArray) {
typedef void MyFunction(bool, int*, char*);
int numbers[] = { 1, 2, 3 };
Action<MyFunction> a = SetArrayArgument<1>(numbers, numbers + 3);
int n[4] = {};
int* pn = n;
char ch[4] = {};
char* pch = ch;
a.Perform(std::make_tuple(true, pn, pch));
EXPECT_EQ(1, n[0]);
EXPECT_EQ(2, n[1]);
EXPECT_EQ(3, n[2]);
EXPECT_EQ(0, n[3]);
EXPECT_EQ('\0', ch[0]);
EXPECT_EQ('\0', ch[1]);
EXPECT_EQ('\0', ch[2]);
EXPECT_EQ('\0', ch[3]);
// Tests first and last are iterators.
std::string letters = "abc";
a = SetArrayArgument<2>(letters.begin(), letters.end());
std::fill_n(n, 4, 0);
std::fill_n(ch, 4, '\0');
a.Perform(std::make_tuple(true, pn, pch));
EXPECT_EQ(0, n[0]);
EXPECT_EQ(0, n[1]);
EXPECT_EQ(0, n[2]);
EXPECT_EQ(0, n[3]);
EXPECT_EQ('a', ch[0]);
EXPECT_EQ('b', ch[1]);
EXPECT_EQ('c', ch[2]);
EXPECT_EQ('\0', ch[3]);
}
// Tests SetArrayArgument<N>(first, last) where first == last.
TEST(SetArrayArgumentTest, SetsTheNthArrayWithEmptyRange) {
typedef void MyFunction(bool, int*);
int numbers[] = { 1, 2, 3 };
Action<MyFunction> a = SetArrayArgument<1>(numbers, numbers);
int n[4] = {};
int* pn = n;
a.Perform(std::make_tuple(true, pn));
EXPECT_EQ(0, n[0]);
EXPECT_EQ(0, n[1]);
EXPECT_EQ(0, n[2]);
EXPECT_EQ(0, n[3]);
}
// Tests SetArrayArgument<N>(first, last) where *first is convertible
// (but not equal) to the argument type.
TEST(SetArrayArgumentTest, SetsTheNthArrayWithConvertibleType) {
typedef void MyFunction(bool, int*);
char chars[] = { 97, 98, 99 };
Action<MyFunction> a = SetArrayArgument<1>(chars, chars + 3);
int codes[4] = { 111, 222, 333, 444 };
int* pcodes = codes;
a.Perform(std::make_tuple(true, pcodes));
EXPECT_EQ(97, codes[0]);
EXPECT_EQ(98, codes[1]);
EXPECT_EQ(99, codes[2]);
EXPECT_EQ(444, codes[3]);
}
// Test SetArrayArgument<N>(first, last) with iterator as argument.
TEST(SetArrayArgumentTest, SetsTheNthArrayWithIteratorArgument) {
typedef void MyFunction(bool, std::back_insert_iterator<std::string>);
std::string letters = "abc";
Action<MyFunction> a = SetArrayArgument<1>(letters.begin(), letters.end());
std::string s;
a.Perform(std::make_tuple(true, back_inserter(s)));
EXPECT_EQ(letters, s);
}
TEST(ReturnPointeeTest, Works) {
int n = 42;
const Action<int()> a = ReturnPointee(&n);
EXPECT_EQ(42, a.Perform(std::make_tuple()));
n = 43;
EXPECT_EQ(43, a.Perform(std::make_tuple()));
}
} // namespace gmock_generated_actions_test
} // namespace testing
| {
"pile_set_name": "Github"
} |
---
title: Page.AutoConnectMany Method (Visio)
keywords: vis_sdr.chm10962130
f1_keywords:
- vis_sdr.chm10962130
ms.prod: visio
api_name:
- Visio.Page.AutoConnectMany
ms.assetid: 292d0f58-d753-6ef3-fd62-269fd44d003c
ms.date: 06/08/2017
---
# Page.AutoConnectMany Method (Visio)
Automatically draws multiple connections in the specified directions between the specified shapes. Returns the number of shapes connected.
## Syntax
_expression_ . **AutoConnectMany**( **_FromShapeIDs()_** , **_ToShapeIDs()_** , **_PlacementDirs()_** , **_[Connector]_** )
_expression_ A variable that represents a **[Page](page-object-visio.md)** object.
### Parameters
|**Name**|**Required/Optional**|**Data Type**|**Description**|
|:-----|:-----|:-----|:-----|
| _FromShapeIDs()_|Required| **Long**|An array of identifers of the shapes from which to draw a connection.|
| _ToShapeIDs()_|Required| **Long**|An array of identifers of the shapes to which to draw a connection.|
| _PlacementDirs()_|Required| **Long**|An array of **[VisAutoConnectDir](visautoconnectdir-enumeration-visio.md)** constants that represent the directions in which to draw the connections. See Remarks for possible values.|
| _Connector_|Optional| **[UNKNOWN]**|The connector to use. Can be a **[Master](master-object-visio.md)** , **[MasterShortcut](mastershortcut-object-visio.md)** , **[Shape](shape-object-visio.md)** , or **IDataObject** object.|
### Return Value
**Long**
## Remarks
For the _PlacementDirs()_ parameter, pass an array of values from the **VisAutoConnectDir** enumeration to specify the connection directions (that is, where to locate the connected shapes with respect to the primary shapes). Possible values for _PlacementDirs()_ are as follows.
|**Constant**|**Value**|**Description**|
|:-----|:-----|:-----|
|visAutoConnectDirDown|2|Connect down.|
|visAutoConnectDirLeft|3|Connect to the left.|
|visAutoConnectDirNone|0|Connect without relocating the shapes.|
|visAutoConnectDirRight|4|Connect to the right|
|visAutoConnectDirUp|1|Connect up.|
Calling the **AutoConnectMany** method is equivalent to calling the **[Shape.AutoConnect](shape-autoconnect-method-visio.md)** method multiple times.
You can include the same shape multiple times in each array you pass as a parameter. You cannot use the **AutoConnectMany** method to connect a shape to itself.
If a particular **AutoConnectMany** operation fails or is invalid, Microsoft Visio skips it and processes the next item in each of the parameter arrays. **AutoConnectMany** returns the total number of items successfully processed.
If the parameter arrays do not each contain the same number of values, Visio returns an Invalid Parameter error.
The optional _Connector_ parameter value must be an object that references a one-dimensional routable shape. If you do not pass a value for _Connector_ , Visio uses the default dynamic connector.
If you use the **IDataObject** interface to pass a selection of shapes for _Connector_ , Visio uses only the first shape. If _Connector_ is not a Visio object, Visio returns an Invalid Parameter error. If _Connector_ is not a shape that matches the context of the method, Visio returns an Invalid Source error.
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="OpenCvSharp3-AnyCPU" version="3.4.1.20180319" targetFramework="net40" />
</packages> | {
"pile_set_name": "Github"
} |
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
| {
"pile_set_name": "Github"
} |
object Form1: TForm1
Left = 0
Top = 0
Caption = 'Flags'
ClientHeight = 333
ClientWidth = 452
FormFactor.Width = 1680
FormFactor.Height = 1050
FormFactor.Devices = [dkDesktop]
OnCreate = FormCreate
DesignerMobile = False
DesignerWidth = 0
DesignerHeight = 0
DesignerDeviceName = ''
DesignerOrientation = 0
DesignerOSVersion = ''
object FlgViewer: TImageViewer
Cursor = crHandPoint
Height = 209.000000000000000000
Position.X = 16.000000000000000000
Position.Y = 16.000000000000000000
ShowScrollBars = False
TabOrder = 0
Width = 385.000000000000000000
BitmapScale = 1.000000000000000000
end
object FlagsCombo: TComboBox
DisableFocusEffect = False
Height = 22.000000000000000000
ItemIndex = 0
ListBoxResource = 'transparentlistboxstyle'
Position.X = 16.000000000000000000
Position.Y = 248.000000000000000000
TabOrder = 1
Width = 137.000000000000000000
OnChange = FlagsComboChange
OnClick = FlagsComboClick
object ListBoxItem1: TListBoxItem
Height = 19.000000000000000000
IsSelected = True
Text = 'United States'
Width = 119.000000000000000000
end
object ListBoxItem2: TListBoxItem
Height = 19.000000000000000000
Position.Y = 19.000000000000000000
Text = 'Romania'
Width = 119.000000000000000000
end
object ListBoxItem3: TListBoxItem
Height = 19.000000000000000000
Position.Y = 38.000000000000000000
Text = 'Russia'
Width = 119.000000000000000000
end
object ListBoxItem4: TListBoxItem
Height = 19.000000000000000000
Position.Y = 57.000000000000000000
Text = 'Spain'
Width = 119.000000000000000000
end
object ListBoxItem5: TListBoxItem
Height = 19.000000000000000000
Position.Y = 76.000000000000000000
Text = 'Japan'
Width = 119.000000000000000000
end
object ListBoxItem6: TListBoxItem
Height = 19.000000000000000000
Position.Y = 95.000000000000000000
Text = 'Canada'
Width = 119.000000000000000000
end
object ListBoxItem7: TListBoxItem
Height = 19.000000000000000000
Position.Y = 114.000000000000000000
Text = 'Italy'
Width = 119.000000000000000000
end
end
object CountryName: TLabel
Font.Family = 'Arial'
Font.Size = 15.000000000000000000
Font.Style = [fsBold]
Height = 15.000000000000000000
Position.X = 16.000000000000000000
Position.Y = 16.000000000000000000
Width = 120.000000000000000000
end
object PrintBtn: TButton
Height = 22.000000000000000000
Position.X = 16.000000000000000000
Position.Y = 288.000000000000000000
TabOrder = 3
Text = 'Print'
Width = 80.000000000000000000
OnClick = PrintBtnClick
end
object PrintDialog1: TPrintDialog
Left = 298
Top = 274
end
end
| {
"pile_set_name": "Github"
} |
<?xml version='1.0' ?>
<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
<xsl:import href="include/StandardTransforms.xsl"/>
<xsl:template match="Freed">
<xsl:choose>
<xsl:when test="@loadCount != 0">
<xsl:text> Load count reduced to </xsl:text>
<xsl:value-of select="@loadCount"/>
</xsl:when>
<xsl:otherwise>
<xsl:text> Plugin freed</xsl:text>
</xsl:otherwise>
</xsl:choose>
<xsl:call-template name="PrintReturn"/>
</xsl:template>
<xsl:template match="ModuleFree">
<xsl:text>Freeing plugin </xsl:text>
<xsl:value-of select="@id"/>
<xsl:text> (target=</xsl:text>
<xsl:value-of select="@target"/>
<xsl:text>)</xsl:text>
<xsl:call-template name="PrintReturn"/>
</xsl:template>
<xsl:template match="Success"/>
</xsl:transform> | {
"pile_set_name": "Github"
} |
<test-metadata>
<benchmark-version>1.2</benchmark-version>
<category>xss</category>
<test-number>02589</test-number>
<vulnerability>false</vulnerability>
<cwe>79</cwe>
</test-metadata>
| {
"pile_set_name": "Github"
} |
// This file is part of SVO - Semi-direct Visual Odometry.
//
// Copyright (C) 2014 Christian Forster <forster at ifi dot uzh dot ch>
// (Robotics and Perception Group, University of Zurich, Switzerland).
//
// SVO is free software: you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the Free Software
// Foundation, either version 3 of the License, or any later version.
//
// SVO is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
// FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#include <svo/config.h>
#include <stdexcept>
#include <svo/pose_optimizer.h>
#include <svo/frame.h>
#include <svo/feature.h>
#include <svo/point.h>
#include <vikit/robust_cost.h>
#include <vikit/math_utils.h>
#include <opencv2/core/eigen.hpp> // eigen2cv
namespace svo {
namespace pose_optimizer {
void optimizeGaussNewton(
const double reproj_thresh,
const size_t n_iter,
const bool verbose,
FramePtr& frame,
double& estimated_scale,
double& error_init,
double& error_final,
size_t& num_obs)
{
// init
double chi2(0.0);
vector<double> chi2_vec_init, chi2_vec_final;
vk::robust_cost::TukeyWeightFunction weight_function;
SE3 T_old(frame->T_f_w_);
Matrix6d A;
Vector6d b;
// compute the scale of the error for robust estimation
std::vector<float> errors; errors.reserve(frame->fts_.size());
for(auto it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
{
if((*it)->point == NULL)
continue;
Vector2d e = vk::project2d((*it)->f)
- vk::project2d(frame->T_f_w_ * (*it)->point->pos_);
e *= 1.0 / (1<<(*it)->level);
errors.push_back(e.norm());
}
if(errors.empty())
return;
vk::robust_cost::MADScaleEstimator scale_estimator;
estimated_scale = scale_estimator.compute(errors);
num_obs = errors.size();
chi2_vec_init.reserve(num_obs);
chi2_vec_final.reserve(num_obs);
double scale = estimated_scale;
for(size_t iter=0; iter<n_iter; iter++)
{
// overwrite scale
if(iter == 5)
scale = 0.85/frame->cam_->errorMultiplier2();
b.setZero();
A.setZero();
double new_chi2(0.0);
// compute residual
for(auto it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
{
if((*it)->point == NULL)
continue;
Matrix26d J;
Vector3d xyz_f(frame->T_f_w_ * (*it)->point->pos_);
Frame::jacobian_xyz2uv(xyz_f, J);
Vector2d e = vk::project2d((*it)->f) - vk::project2d(xyz_f);
double sqrt_inv_cov = 1.0 / (1<<(*it)->level);
e *= sqrt_inv_cov;
if(iter == 0)
chi2_vec_init.push_back(e.squaredNorm()); // just for debug
J *= sqrt_inv_cov;
double weight = weight_function.value(e.norm()/scale);
A.noalias() += J.transpose()*J*weight;
b.noalias() -= J.transpose()*e*weight;
new_chi2 += e.squaredNorm()*weight;
}
// solve linear system
const Vector6d dT(A.ldlt().solve(b));
// check if error increased
if((iter > 0 && new_chi2 > chi2) || (bool) std::isnan((double)dT[0]))
{
if(verbose)
std::cout << "it " << iter
<< "\t FAILURE \t new_chi2 = " << new_chi2 << std::endl;
frame->T_f_w_ = T_old; // roll-back
break;
}
// update the model
SE3 T_new = SE3::exp(dT)*frame->T_f_w_;
T_old = frame->T_f_w_;
frame->T_f_w_ = T_new;
chi2 = new_chi2;
if(verbose)
std::cout << "it " << iter
<< "\t Success \t new_chi2 = " << new_chi2
<< "\t norm(dT) = " << vk::norm_max(dT) << std::endl;
// stop when converged
if(vk::norm_max(dT) <= EPS)
break;
}
// Set covariance as inverse information matrix. Optimistic estimator!
const double pixel_variance=1.0;
frame->Cov_ = pixel_variance*(A*std::pow(frame->cam_->errorMultiplier2(),2)).inverse();
// Remove Measurements with too large reprojection error
double reproj_thresh_scaled = reproj_thresh / frame->cam_->errorMultiplier2();
size_t n_deleted_refs = 0;
for(Features::iterator it=frame->fts_.begin(); it!=frame->fts_.end(); ++it)
{
if((*it)->point == NULL)
continue;
Vector2d e = vk::project2d((*it)->f) - vk::project2d(frame->T_f_w_ * (*it)->point->pos_);
double sqrt_inv_cov = 1.0 / (1<<(*it)->level);
e *= sqrt_inv_cov;
chi2_vec_final.push_back(e.squaredNorm());
if(e.norm() > reproj_thresh_scaled)
{
// we don't need to delete a reference in the point since it was not created yet
(*it)->point = NULL;
++n_deleted_refs;
}
}
error_init=0.0;
error_final=0.0;
if(!chi2_vec_init.empty())
error_init = sqrt(vk::getMedian(chi2_vec_init))*frame->cam_->errorMultiplier2();
if(!chi2_vec_final.empty())
error_final = sqrt(vk::getMedian(chi2_vec_final))*frame->cam_->errorMultiplier2();
estimated_scale *= frame->cam_->errorMultiplier2();
if(verbose)
std::cout << "n deleted obs = " << n_deleted_refs
<< "\t scale = " << estimated_scale
<< "\t error init = " << error_init
<< "\t error end = " << error_final << std::endl;
num_obs -= n_deleted_refs;
}
} // namespace pose_optimizer
} // namespace svo
| {
"pile_set_name": "Github"
} |
<VisualStudioProject ProjectType="Visual C++" Version="8.00" Name="db_test" ProjectGUID="{B17B933F-6F75-4095-BE51-12666358163A}">
<Platforms>
<Platform Name="Win32"/>
<Platform Name="x64"/>
</Platforms>
<Configurations>
<Configuration Name="Debug|Win32" OutputDirectory="..\..\build_windows\$(PlatformName)\Debug" IntermediateDirectory="./$(OutDir)/db_test" ConfigurationType="1" UseOfMFC="0" ATLMinimizesCRunTimeLibraryUsage="FALSE" CharacterSet="2">
<Tool Name="VCPreLinkEventTool"/>
<Tool Name="VCResourceCompilerTool"/>
<Tool Name="VCXMLDataGeneratorTool"/>
<Tool Name="VCManagedWrapperGeneratorTool"/>
<Tool Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
<Tool Name="VCCLCompilerTool" Optimization="0" MinimalRebuild="TRUE" AdditionalIncludeDirectories="../../build_windows,../../src,../../src/dbinc,../../include" PreprocessorDefinitions="WIN32;_WINDOWS;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_DEBUG;DIAGNOSTIC;_CONSOLE" StringPooling="TRUE" RuntimeLibrary="3" UsePrecompiledHeader="0" PrecompiledHeaderFile="./$(OutDir)/db_test.pch" AssemblerListingLocation="$(OutDir)/dbkill/" ObjectFile="$(OutDir)/dbkill/" WarningLevel="3" SuppressStartupBanner="TRUE" DebugInformationFormat="3" BasicRuntimeChecks="3" CompileAs="0"/>
<Tool Name="VCLinkerTool" AdditionalOptions="/machine:x86" AdditionalDependencies="libdb61d.lib" AdditionalLibraryDirectories="$(OutDir);../../lib" OutputFile="$(OutDir)/dbkill.exe" ProgramDatabaseFile="$(OutDir)/dbkill.pdb" LinkIncremental="1" GenerateDebugInformation="TRUE" SuppressStartupBanner="TRUE" OptimizeReferences="2" TargetMachine="0"/>
<Tool Name="VCPreBuildEventTool"/>
<Tool Name="VCPostBuildEventTool"/>
<Tool Name="VCCustomBuildTool"/></Configuration>
<Configuration Name="Release|Win32" OutputDirectory="..\..\build_windows\$(PlatformName)\Release" IntermediateDirectory="./$(OutDir)/db_test" ConfigurationType="1" UseOfMFC="0" ATLMinimizesCRunTimeLibraryUsage="FALSE" CharacterSet="2">
<Tool Name="VCPreLinkEventTool"/>
<Tool Name="VCResourceCompilerTool"/>
<Tool Name="VCXMLDataGeneratorTool"/>
<Tool Name="VCManagedWrapperGeneratorTool"/>
<Tool Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
<Tool Name="VCCLCompilerTool" Optimization="2" MinimalRebuild="TRUE" InlineFunctionExpansion="1" AdditionalIncludeDirectories="../../build_windows,../../src,../../src/dbinc,../../include" PreprocessorDefinitions="WIN32;_WINDOWS;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;NDEBUG;_CONSOLE" StringPooling="TRUE" RuntimeLibrary="2" EnableFunctionLevelLinking="TRUE" UsePrecompiledHeader="0" PrecompiledHeaderFile="./$(OutDir)/db_test.pch" AssemblerListingLocation="$(OutDir)/dbkill/" ObjectFile="$(OutDir)/dbkill/" WarningLevel="3" SuppressStartupBanner="TRUE" DebugInformationFormat="3" CompileAs="0"/>
<Tool Name="VCLinkerTool" AdditionalOptions="/machine:x86" AdditionalDependencies="libdb61.lib" AdditionalLibraryDirectories="$(OutDir);../../lib" OutputFile="$(OutDir)/dbkill.exe" ProgramDatabaseFile="$(OutDir)/dbkill.pdb" LinkIncremental="1" GenerateDebugInformation="TRUE" SuppressStartupBanner="TRUE" OptimizeReferences="2" TargetMachine="0"/>
<Tool Name="VCPreBuildEventTool"/>
<Tool Name="VCPostBuildEventTool"/>
<Tool Name="VCCustomBuildTool"/></Configuration>
<Configuration Name="Debug|x64" OutputDirectory="..\..\build_windows\$(PlatformName)\Debug" IntermediateDirectory="./$(OutDir)/db_test" ConfigurationType="1" UseOfMFC="0" ATLMinimizesCRunTimeLibraryUsage="FALSE" CharacterSet="2">
<Tool Name="VCPreLinkEventTool"/>
<Tool Name="VCResourceCompilerTool"/>
<Tool Name="VCXMLDataGeneratorTool"/>
<Tool Name="VCManagedWrapperGeneratorTool"/>
<Tool Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
<Tool Name="VCCLCompilerTool" Optimization="0" MinimalRebuild="TRUE" AdditionalIncludeDirectories="../../build_windows,../../src,../../src/dbinc,../../include" PreprocessorDefinitions="WIN32;_WINDOWS;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_DEBUG;DIAGNOSTIC;_CONSOLE" StringPooling="TRUE" RuntimeLibrary="3" UsePrecompiledHeader="0" PrecompiledHeaderFile="./$(OutDir)/db_test.pch" AssemblerListingLocation="$(OutDir)/dbkill/" ObjectFile="$(OutDir)/dbkill/" WarningLevel="3" SuppressStartupBanner="TRUE" DebugInformationFormat="3" BasicRuntimeChecks="3" CompileAs="0"/>
<Tool Name="VCLinkerTool" AdditionalOptions="/machine:x64" AdditionalDependencies="libdb61d.lib" AdditionalLibraryDirectories="$(OutDir);../../lib" OutputFile="$(OutDir)/dbkill.exe" ProgramDatabaseFile="$(OutDir)/dbkill.pdb" LinkIncremental="1" GenerateDebugInformation="TRUE" SuppressStartupBanner="TRUE" OptimizeReferences="2" TargetMachine="0"/>
<Tool Name="VCPreBuildEventTool"/>
<Tool Name="VCPostBuildEventTool"/>
<Tool Name="VCCustomBuildTool"/></Configuration>
<Configuration Name="Release|x64" OutputDirectory="..\..\build_windows\$(PlatformName)\Release" IntermediateDirectory="./$(OutDir)/db_test" ConfigurationType="1" UseOfMFC="0" ATLMinimizesCRunTimeLibraryUsage="FALSE" CharacterSet="2">
<Tool Name="VCPreLinkEventTool"/>
<Tool Name="VCResourceCompilerTool"/>
<Tool Name="VCXMLDataGeneratorTool"/>
<Tool Name="VCManagedWrapperGeneratorTool"/>
<Tool Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
<Tool Name="VCCLCompilerTool" Optimization="2" MinimalRebuild="TRUE" InlineFunctionExpansion="1" AdditionalIncludeDirectories="../../build_windows,../../src,../../src/dbinc,../../include" PreprocessorDefinitions="WIN32;_WINDOWS;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;NDEBUG;_CONSOLE" StringPooling="TRUE" RuntimeLibrary="2" EnableFunctionLevelLinking="TRUE" UsePrecompiledHeader="0" PrecompiledHeaderFile="./$(OutDir)/db_test.pch" AssemblerListingLocation="$(OutDir)/dbkill/" ObjectFile="$(OutDir)/dbkill/" WarningLevel="3" SuppressStartupBanner="TRUE" DebugInformationFormat="3" CompileAs="0"/>
<Tool Name="VCLinkerTool" AdditionalOptions="/machine:x64" AdditionalDependencies="libdb61.lib" AdditionalLibraryDirectories="$(OutDir);../../lib" OutputFile="$(OutDir)/dbkill.exe" ProgramDatabaseFile="$(OutDir)/dbkill.pdb" LinkIncremental="1" GenerateDebugInformation="TRUE" SuppressStartupBanner="TRUE" OptimizeReferences="2" TargetMachine="0"/>
<Tool Name="VCPreBuildEventTool"/>
<Tool Name="VCPostBuildEventTool"/>
<Tool Name="VCCustomBuildTool"/></Configuration>
</Configurations>
<References/>
<Files>
<File RelativePath="..\..\build_windows\dbkill.cpp"/>
<File RelativePath="..\..\src\clib\getopt.c"/>
</Files>
<Globals/>
</VisualStudioProject>
| {
"pile_set_name": "Github"
} |
<?php
namespace fixtures;
class ClassMethodRemoved
{
} | {
"pile_set_name": "Github"
} |
812bd1dc93d354140fc197642b611f1a9f79ec6e cheali-charger-imaxB6-80W_2.00-e10.3.12-20160613_nuvoton-M0517.hex
| {
"pile_set_name": "Github"
} |
<!doctype html>
<title>CodeMirror: Asterisk dialplan mode</title>
<meta charset="utf-8"/>
<link rel=stylesheet href="../../doc/docs.css">
<link rel="stylesheet" href="../../lib/codemirror.css">
<script src="../../lib/codemirror.js"></script>
<script src="asterisk.js"></script>
<style>
.CodeMirror {border: 1px solid #999;}
.cm-s-default span.cm-arrow { color: red; }
</style>
<div id=nav>
<a href="http://codemirror.net"><h1>CodeMirror</h1><img id=logo src="../../doc/logo.png"></a>
<ul>
<li><a href="../../index.html">Home</a>
<li><a href="../../doc/manual.html">Manual</a>
<li><a href="https://github.com/editor/markdown/lib/codemirror/codemirror">Code</a>
</ul>
<ul>
<li><a href="../index.html">Language modes</a>
<li><a class=active href="#">Asterisk dialplan</a>
</ul>
</div>
<article>
<h2>Asterisk dialplan mode</h2>
<form><textarea id="code" name="code">
; extensions.conf - the Asterisk dial plan
;
[general]
;
; If static is set to no, or omitted, then the pbx_config will rewrite
; this file when extensions are modified. Remember that all comments
; made in the file will be lost when that happens.
static=yes
#include "/etc/asterisk/additional_general.conf
[iaxprovider]
switch => IAX2/user:[key]@myserver/mycontext
[dynamic]
#exec /usr/bin/dynamic-peers.pl
[trunkint]
;
; International long distance through trunk
;
exten => _9011.,1,Macro(dundi-e164,${EXTEN:4})
exten => _9011.,n,Dial(${GLOBAL(TRUNK)}/${FILTER(0-9,${EXTEN:${GLOBAL(TRUNKMSD)}})})
[local]
;
; Master context for local, toll-free, and iaxtel calls only
;
ignorepat => 9
include => default
[demo]
include => stdexten
;
; We start with what to do when a call first comes in.
;
exten => s,1,Wait(1) ; Wait a second, just for fun
same => n,Answer ; Answer the line
same => n,Set(TIMEOUT(digit)=5) ; Set Digit Timeout to 5 seconds
same => n,Set(TIMEOUT(response)=10) ; Set Response Timeout to 10 seconds
same => n(restart),BackGround(demo-congrats) ; Play a congratulatory message
same => n(instruct),BackGround(demo-instruct) ; Play some instructions
same => n,WaitExten ; Wait for an extension to be dialed.
exten => 2,1,BackGround(demo-moreinfo) ; Give some more information.
exten => 2,n,Goto(s,instruct)
exten => 3,1,Set(LANGUAGE()=fr) ; Set language to french
exten => 3,n,Goto(s,restart) ; Start with the congratulations
exten => 1000,1,Goto(default,s,1)
;
; We also create an example user, 1234, who is on the console and has
; voicemail, etc.
;
exten => 1234,1,Playback(transfer,skip) ; "Please hold while..."
; (but skip if channel is not up)
exten => 1234,n,Gosub(${EXTEN},stdexten(${GLOBAL(CONSOLE)}))
exten => 1234,n,Goto(default,s,1) ; exited Voicemail
exten => 1235,1,Voicemail(1234,u) ; Right to voicemail
exten => 1236,1,Dial(Console/dsp) ; Ring forever
exten => 1236,n,Voicemail(1234,b) ; Unless busy
;
; # for when they're done with the demo
;
exten => #,1,Playback(demo-thanks) ; "Thanks for trying the demo"
exten => #,n,Hangup ; Hang them up.
;
; A timeout and "invalid extension rule"
;
exten => t,1,Goto(#,1) ; If they take too long, give up
exten => i,1,Playback(invalid) ; "That's not valid, try again"
;
; Create an extension, 500, for dialing the
; Asterisk demo.
;
exten => 500,1,Playback(demo-abouttotry); Let them know what's going on
exten => 500,n,Dial(IAX2/guest@pbx.digium.com/s@default) ; Call the Asterisk demo
exten => 500,n,Playback(demo-nogo) ; Couldn't connect to the demo site
exten => 500,n,Goto(s,6) ; Return to the start over message.
;
; Create an extension, 600, for evaluating echo latency.
;
exten => 600,1,Playback(demo-echotest) ; Let them know what's going on
exten => 600,n,Echo ; Do the echo test
exten => 600,n,Playback(demo-echodone) ; Let them know it's over
exten => 600,n,Goto(s,6) ; Start over
;
; You can use the Macro Page to intercom a individual user
exten => 76245,1,Macro(page,SIP/Grandstream1)
; or if your peernames are the same as extensions
exten => _7XXX,1,Macro(page,SIP/${EXTEN})
;
;
; System Wide Page at extension 7999
;
exten => 7999,1,Set(TIMEOUT(absolute)=60)
exten => 7999,2,Page(Local/Grandstream1@page&Local/Xlite1@page&Local/1234@page/n,d)
; Give voicemail at extension 8500
;
exten => 8500,1,VoicemailMain
exten => 8500,n,Goto(s,6)
</textarea></form>
<script>
var editor = CodeMirror.fromTextArea(document.getElementById("code"), {
mode: "text/x-asterisk",
matchBrackets: true,
lineNumber: true
});
</script>
<p><strong>MIME types defined:</strong> <code>text/x-asterisk</code>.</p>
</article>
| {
"pile_set_name": "Github"
} |
CORE
Test_binary3
--max-nondet-string-length 1000 --function Test_binary3.main --cp `../../../../scripts/format_classpath.sh . ../../../lib/java-models-library/target/core-models.jar`
^EXIT=10$
^SIGNAL=0$
assertion.* line 7 .* SUCCESS$
assertion.* line 10 .* FAILURE$
--
non equal types
| {
"pile_set_name": "Github"
} |
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * payment_paypal
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2015
msgid ""
msgstr ""
"Project-Id-Version: Odoo 8.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-21 14:08+0000\n"
"PO-Revision-Date: 2015-07-17 07:44+0000\n"
"Last-Translator: Martin Trigaux\n"
"Language-Team: Slovenian (http://www.transifex.com/odoo/odoo-8/language/sl/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Language: sl\n"
"Plural-Forms: nplurals=4; plural=(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3);\n"
#. module: payment_paypal
#: model:payment.acquirer,pre_msg:payment_paypal.payment_acquirer_paypal
msgid ""
"<p>You will be redirected to the Paypal website after clicking on the "
"payment button.</p>"
msgstr "<p>Po pritisku gumba za plačilo boste preusmerjeni na Paypal spletno stran.</p>"
#. module: payment_paypal
#: field:payment.acquirer,paypal_api_access_token:0
msgid "Access Token"
msgstr "Dostopni žeton"
#. module: payment_paypal
#: field:payment.acquirer,paypal_api_access_token_validity:0
msgid "Access Token Validity"
msgstr "Veljavnost dostopnega žetona"
#. module: payment_paypal
#: model:ir.model,name:payment_paypal.model_res_company
msgid "Companies"
msgstr "Družbe"
#. module: payment_paypal
#: view:account.config.settings:payment_paypal.payment_paypal_option_config
msgid "Configure payment acquiring methods"
msgstr "Nastavitve metod prejema plačila"
#. module: payment_paypal
#: model:ir.model,name:payment_paypal.model_payment_acquirer
msgid "Payment Acquirer"
msgstr "Prejemnik plačila"
#. module: payment_paypal
#: model:ir.model,name:payment_paypal.model_payment_transaction
msgid "Payment Transaction"
msgstr "Plačilna transakcija"
#. module: payment_paypal
#: field:payment.acquirer,paypal_email_account:0
msgid "Paypal Email ID"
msgstr "Paypal ID e-pošte"
#. module: payment_paypal
#: help:payment.acquirer,paypal_use_ipn:0
msgid "Paypal Instant Payment Notification"
msgstr "Paypal obvestilo o neposrednem plačilu"
#. module: payment_paypal
#: field:payment.acquirer,paypal_seller_account:0
msgid "Paypal Merchant ID"
msgstr "Paypal ID trgovca"
#. module: payment_paypal
#: view:payment.transaction:payment_paypal.transaction_form_paypal
msgid "Paypal TX Details"
msgstr "Paypal TX podrobnosti"
#. module: payment_paypal
#: field:payment.acquirer,paypal_api_password:0
msgid "Rest API Password"
msgstr "Rest API geslo"
#. module: payment_paypal
#: field:payment.acquirer,paypal_api_username:0
msgid "Rest API Username"
msgstr "Rest API uporabniško ime"
#. module: payment_paypal
#: help:payment.acquirer,paypal_seller_account:0
msgid ""
"The Merchant ID is used to ensure communications coming from Paypal are "
"valid and secured."
msgstr "ID trgovca zagotovi varno in veljavno komunikacijo s Paypal."
#. module: payment_paypal
#: field:payment.transaction,paypal_txn_id:0
msgid "Transaction ID"
msgstr "ID transakcije"
#. module: payment_paypal
#: field:payment.transaction,paypal_txn_type:0
msgid "Transaction type"
msgstr "Tip transakcije"
#. module: payment_paypal
#: field:payment.acquirer,paypal_use_ipn:0
msgid "Use IPN"
msgstr "Uporabi IPN"
#. module: payment_paypal
#: field:payment.acquirer,paypal_api_enabled:0
msgid "Use Rest API"
msgstr "Uporabi Rest API"
| {
"pile_set_name": "Github"
} |
//
// Copyright (C) 2015-2018 Dominik Buse <dbuse@mail.uni-paderborn.de>
//
// Documentation for these modules is at http://veins.car2x.org/
//
// SPDX-License-Identifier: GPL-2.0-or-later
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
#include <string>
#include "veins/modules/world/traci/trafficLight/TraCITrafficLightInterface.h"
#include "veins/modules/messages/TraCITrafficLightMessage_m.h"
using namespace veins;
using veins::TraCITrafficLightInterface;
using veins::TraCITrafficLightLink;
using veins::TraCITrafficLightProgram;
Define_Module(veins::TraCITrafficLightInterface);
TraCITrafficLightInterface::TraCITrafficLightInterface()
: cSimpleModule()
, isPreInitialized(false)
, updateInterval()
, manager(nullptr)
, commandInterface(nullptr)
, tlCommandInterface(nullptr)
, external_id("")
, position()
, programDefinition()
, currentLogicId("")
, currentPhaseNr(-1)
, nextSwitchTime()
, inOnlineSignalState(false)
{
}
TraCITrafficLightInterface::~TraCITrafficLightInterface()
{
delete tlCommandInterface;
}
void TraCITrafficLightInterface::preInitialize(const std::string& external_id, const Coord& position, const simtime_t& updateInterval)
{
isPreInitialized = true;
this->updateInterval = updateInterval;
setExternalId(external_id);
this->position = position;
}
Coord TraCITrafficLightInterface::getPosition() const
{
return this->position;
}
std::list<std::list<TraCITrafficLightLink>> TraCITrafficLightInterface::getControlledLinks()
{
return controlledLinks;
}
TraCITrafficLightProgram::Logic TraCITrafficLightInterface::getCurrentLogic() const
{
return programDefinition.getLogic(currentLogicId);
}
std::string TraCITrafficLightInterface::getCurrentLogicId() const
{
return currentLogicId;
}
int TraCITrafficLightInterface::getCurrentPhaseId() const
{
return currentPhaseNr;
}
TraCITrafficLightProgram::Phase TraCITrafficLightInterface::getCurrentPhase() const
{
return getCurrentLogic().phases[currentPhaseNr];
}
simtime_t TraCITrafficLightInterface::getAssumedNextSwitch() const
{
return nextSwitchTime;
}
simtime_t TraCITrafficLightInterface::getRemainingDuration() const
{
return nextSwitchTime - simTime();
}
std::string TraCITrafficLightInterface::getCurrentState() const
{
if (isInOnlineSignalState()) {
return currentSignalState;
}
else {
return getCurrentPhase().state;
}
}
bool TraCITrafficLightInterface::isInOnlineSignalState() const
{
return inOnlineSignalState;
}
void TraCITrafficLightInterface::setProgramDefinition(const TraCITrafficLightProgram& programDefinition)
{
this->programDefinition = programDefinition;
}
void TraCITrafficLightInterface::setControlledLinks(const std::list<std::list<TraCITrafficLightLink>>& controlledLinks)
{
this->controlledLinks = controlledLinks;
}
void TraCITrafficLightInterface::setCurrentLogicById(const std::string& logicId, bool setSumo)
{
if (setSumo) {
ASSERT(logicId != "online");
if (!programDefinition.hasLogic(logicId)) {
throw cRuntimeError("Logic '%s' not found in program of TraCITrafficLightInterface %s", logicId.c_str(), external_id.c_str());
}
tlCommandInterface->setProgram(logicId);
const std::string newValueInSumo = tlCommandInterface->getCurrentProgramID();
ASSERT(newValueInSumo == logicId);
}
if (currentLogicId != logicId || (isInOnlineSignalState() && logicId != "online")) {
sendChangeMsg(TrafficLightAtrributeType::LOGICID, logicId, currentLogicId);
}
if (logicId != "online") {
inOnlineSignalState = false;
this->currentLogicId = logicId;
}
}
void TraCITrafficLightInterface::setCurrentPhaseByNr(const unsigned int phaseNr, bool setSumo)
{
if (setSumo) {
if (phaseNr >= getCurrentLogic().phases.size()) {
throw cRuntimeError("Cannot set current phase to %d: current logic has only %d Phases (TraCITrafficLightInterface %s)", phaseNr, getCurrentLogic().phases.size(), external_id.c_str());
}
tlCommandInterface->setPhaseIndex(phaseNr);
const unsigned int newValueInSumo = tlCommandInterface->getCurrentPhaseIndex();
ASSERT(newValueInSumo == phaseNr);
}
if (currentPhaseNr != static_cast<int>(phaseNr) || isInOnlineSignalState()) {
sendChangeMsg(TrafficLightAtrributeType::PHASEID, std::to_string(phaseNr), std::to_string(currentPhaseNr));
}
inOnlineSignalState = false;
currentPhaseNr = phaseNr;
}
void TraCITrafficLightInterface::setCurrentState(const std::string& state, bool setSumo)
{
if (setSumo) {
tlCommandInterface->setState(state);
const std::string newValueInSumo = tlCommandInterface->getCurrentState();
ASSERT(newValueInSumo == state);
}
if (currentSignalState != state) {
sendChangeMsg(TrafficLightAtrributeType::STATE, state, currentSignalState);
}
inOnlineSignalState = true;
currentSignalState = state;
}
void TraCITrafficLightInterface::setNextSwitch(const simtime_t& newNextSwitch, bool setSumo)
{
// FIXME: not working reliably - use setRemainingDuration instead!
// round to be feasible for SUMO
simtime_t nextSwitch = ceil(newNextSwitch, updateInterval, 0);
if (setSumo) {
simtime_t remainingDuration = ceil(nextSwitch - simTime(), updateInterval, 0);
if (remainingDuration < 0) {
EV << "Warning: remaining duration for switch below 0: " << remainingDuration << std::endl;
// maybe issue even an error if this occurs
remainingDuration = 0;
}
getTlCommandInterface()->setPhaseDuration(remainingDuration);
simtime_t newValueInSumo = tlCommandInterface->getAssumedNextSwitchTime();
ASSERT(newValueInSumo == nextSwitch);
}
if (nextSwitchTime != nextSwitch) {
sendChangeMsg(TrafficLightAtrributeType::SWITCHTIME, std::to_string(nextSwitch.inUnit(SIMTIME_MS)), std::to_string(nextSwitchTime.inUnit(SIMTIME_MS)));
}
nextSwitchTime = nextSwitch;
}
void TraCITrafficLightInterface::setRemainingDuration(const simtime_t& rawRemainingDuration, bool setSumo)
{
ASSERT(rawRemainingDuration >= 0);
// round (up) to match sumo time steps
simtime_t veinsTimeNow(simTime());
simtime_t sumoTimeNow(ceil(veinsTimeNow, updateInterval) - updateInterval);
simtime_t roundedRemainingDuration = ceil(rawRemainingDuration, updateInterval, 0);
// simtime_t nextSwitchInVeins = floor(simTime() + roundedRemainingDuration, updateInterval, 0) - updateInterval;
simtime_t nextSwitchInVeins = sumoTimeNow + roundedRemainingDuration;
if (setSumo) {
// set value to sumo
getTlCommandInterface()->setPhaseDuration(roundedRemainingDuration);
// check that everything is consistent
simtime_t nextSwitchInSumo = tlCommandInterface->getAssumedNextSwitchTime();
ASSERT(nextSwitchInSumo == nextSwitchInVeins);
}
if (nextSwitchTime != nextSwitchInVeins) {
sendChangeMsg(TrafficLightAtrributeType::SWITCHTIME, std::to_string(nextSwitchInVeins.inUnit(SIMTIME_MS)), std::to_string(nextSwitchTime.inUnit(SIMTIME_MS)));
}
nextSwitchTime = nextSwitchInVeins;
}
void TraCITrafficLightInterface::initialize()
{
ASSERT(isPreInitialized);
isPreInitialized = false;
setProgramDefinition(getTlCommandInterface()->getProgramDefinition());
setControlledLinks(getTlCommandInterface()->getControlledLinks());
currentLogicId = getTlCommandInterface()->getCurrentProgramID();
currentPhaseNr = getTlCommandInterface()->getCurrentPhaseIndex();
nextSwitchTime = getTlCommandInterface()->getAssumedNextSwitchTime();
currentSignalState = getTlCommandInterface()->getCurrentState();
}
void TraCITrafficLightInterface::handleMessage(cMessage* msg)
{
if (msg->isSelfMessage()) {
// not in use (yet)
}
else if (msg->arrivedOn("logic$i")) {
handleChangeCommandMessage(msg);
}
delete msg;
}
void TraCITrafficLightInterface::handleChangeCommandMessage(cMessage* msg)
{
TraCITrafficLightMessage* tmMsg = check_and_cast<TraCITrafficLightMessage*>(msg);
switch (tmMsg->getChangedAttribute()) {
case TrafficLightAtrributeType::LOGICID:
setCurrentLogicById(tmMsg->getNewValue(), true);
break;
case TrafficLightAtrributeType::PHASEID:
setCurrentPhaseByNr(std::stoi(tmMsg->getNewValue()), true);
break;
case TrafficLightAtrributeType::SWITCHTIME:
setNextSwitch(SimTime(std::stoi(tmMsg->getNewValue()), SIMTIME_MS), true);
break;
case TrafficLightAtrributeType::STATE:
setCurrentState(tmMsg->getNewValue(), true);
break;
}
}
void TraCITrafficLightInterface::sendChangeMsg(int changedAttribute, const std::string newValue, const std::string oldValue)
{
Enter_Method_Silent();
TraCITrafficLightMessage* pMsg = new TraCITrafficLightMessage("TrafficLightChangeMessage");
pMsg->setTlId(external_id.c_str());
pMsg->setChangedAttribute(changedAttribute);
pMsg->setChangeSource(TrafficLightChangeSource::SUMO);
pMsg->setOldValue(oldValue.c_str());
pMsg->setNewValue(newValue.c_str());
send(pMsg, "logic$o");
}
| {
"pile_set_name": "Github"
} |
import enum
import re
from collections import namedtuple
import slugify
import sqlalchemy as sa
from pyramid import security
from h import pubid
from h.auth import role
from h.db import Base, mixins
from h.util.group import split_groupid
GROUP_NAME_MIN_LENGTH = 3
GROUP_NAME_MAX_LENGTH = 25
GROUP_DESCRIPTION_MAX_LENGTH = 250
AUTHORITY_PROVIDED_ID_PATTERN = r"^[a-zA-Z0-9._\-+!~*()']+$"
AUTHORITY_PROVIDED_ID_MAX_LENGTH = 1024
class JoinableBy(enum.Enum):
authority = "authority"
class ReadableBy(enum.Enum):
members = "members"
world = "world"
class WriteableBy(enum.Enum):
authority = "authority"
members = "members"
class GroupMembership(Base):
__tablename__ = "user_group"
__table_args__ = (sa.UniqueConstraint("user_id", "group_id"),)
id = sa.Column("id", sa.Integer, autoincrement=True, primary_key=True)
user_id = sa.Column("user_id", sa.Integer, sa.ForeignKey("user.id"), nullable=False)
group_id = sa.Column(
"group_id", sa.Integer, sa.ForeignKey("group.id"), nullable=False
)
class Group(Base, mixins.Timestamps):
__tablename__ = "group"
__table_args__ = (
# Add a composite index of the (authority, authority_provided_id)
# columns. Also impose uniqueness such that no two records may share
# the same (authority, authority_provided_id) composite
#
# See:
#
# * http://docs.sqlalchemy.org/en/latest/core/constraints.html#indexes
sa.Index(
"ix__group__groupid", "authority", "authority_provided_id", unique=True
),
)
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
# We don't expose the integer PK to the world, so we generate a short
# random string to use as the publicly visible ID.
pubid = sa.Column(sa.Text(), default=pubid.generate, unique=True, nullable=False)
authority = sa.Column(sa.UnicodeText(), nullable=False)
name = sa.Column(sa.UnicodeText(), nullable=False, index=True)
creator_id = sa.Column(sa.Integer, sa.ForeignKey("user.id"))
creator = sa.orm.relationship("User")
description = sa.Column(sa.UnicodeText())
#: Enforce scope match for annotations in this group.
#: For groups with 1-n scopes, only allow annotations for target
#: documents whose URIs match one of the group's scopes.
#: When disabled, annotations should be allowed web-wide.
#: This setting has no effect if the group does not have any scopes.
#: Enforcement is the responsibility of services (i.e. the model
#: layer does not enforce scope compliance).
enforce_scope = sa.Column(
sa.Boolean,
nullable=False,
default=True,
server_default=sa.sql.expression.true(),
)
#: Allow authorities to define their own unique identifier for a group
#: (versus the pubid). This identifier is owned by the authority/client
#: versus ``pubid``, which is owned and controlled by the service.
authority_provided_id = sa.Column(sa.UnicodeText(), nullable=True)
#: Which type of user is allowed to join this group, possible values are:
#: authority, None
joinable_by = sa.Column(
sa.Enum(JoinableBy, name="group_joinable_by"), nullable=True
)
#: Which type of user is allowed to read annotations in this group,
#: possible values are: authority, members, world
readable_by = sa.Column(
sa.Enum(ReadableBy, name="group_readable_by"), nullable=True, index=True
)
#: Which type of user is allowed to write to this group, possible values
#: are: authority, members
writeable_by = sa.Column(
sa.Enum(WriteableBy, name="group_writeable_by"), nullable=True
)
@property
def groupid(self):
if self.authority_provided_id is None:
return None
return "group:{authority_provided_id}@{authority}".format(
authority_provided_id=self.authority_provided_id, authority=self.authority
)
@groupid.setter
def groupid(self, value):
"""
Deconstruct a formatted ``groupid`` and set its constituent properties
on the instance.
If ``groupid`` is set to None, set ``authority_provided_id`` to None
but leave authority untouched—this allows a caller to nullify the
``authority_provided_id`` field.
:raises ValueError: if ``groupid`` is an invalid format
"""
if value is None:
self.authority_provided_id = None
else:
groupid_parts = split_groupid(value)
self.authority_provided_id = groupid_parts["authority_provided_id"]
self.authority = groupid_parts["authority"]
# Group membership
members = sa.orm.relationship(
"User",
secondary="user_group",
backref=sa.orm.backref("groups", order_by="Group.name"),
)
scopes = sa.orm.relationship(
"GroupScope", backref="group", cascade="all, delete-orphan"
)
organization_id = sa.Column(
sa.Integer, sa.ForeignKey("organization.id"), nullable=True
)
organization = sa.orm.relationship("Organization")
def __init__(self, **kwargs):
super(Group, self).__init__(**kwargs)
@sa.orm.validates("name")
def validate_name(self, key, name):
if not GROUP_NAME_MIN_LENGTH <= len(name) <= GROUP_NAME_MAX_LENGTH:
raise ValueError(
"name must be between {min} and {max} characters "
"long".format(min=GROUP_NAME_MIN_LENGTH, max=GROUP_NAME_MAX_LENGTH)
)
return name
@sa.orm.validates("authority_provided_id")
def validate_authority_provided_id(self, key, authority_provided_id):
if not authority_provided_id:
return None
if not re.match(AUTHORITY_PROVIDED_ID_PATTERN, authority_provided_id):
raise ValueError(
"authority_provided_id must only contain characters allowed"
r" in encoded URIs: [a-zA-Z0-9._\-+!~*()']"
)
if len(authority_provided_id) > AUTHORITY_PROVIDED_ID_MAX_LENGTH:
raise ValueError(
"authority_provided_id must be {max} characters or fewer"
"characters long".format(max=AUTHORITY_PROVIDED_ID_MAX_LENGTH)
)
return authority_provided_id
@property
def slug(self):
"""A version of this group's name suitable for use in a URL."""
return slugify.slugify(self.name)
@property
def type(self):
"""
The "type" of this group, e.g. "open" or "private".
:rtype: string
:raises ValueError: if the type of the group isn't recognized
"""
self_type_flags = TypeFlags(
joinable_by=self.joinable_by,
readable_by=self.readable_by,
writeable_by=self.writeable_by,
)
for type_, type_flags in (
("open", OPEN_GROUP_TYPE_FLAGS),
("private", PRIVATE_GROUP_TYPE_FLAGS),
("restricted", RESTRICTED_GROUP_TYPE_FLAGS),
):
if self_type_flags == type_flags:
return type_
raise ValueError(
"This group doesn't seem to match any known type of group. "
"This shouldn't be in the database!"
)
@property
def is_public(self):
return self.readable_by == ReadableBy.world
def __acl__(self):
terms = []
join_principal = _join_principal(self)
if join_principal is not None:
terms.append((security.Allow, join_principal, "join"))
read_principal = _read_principal(self)
if read_principal is not None:
terms.append((security.Allow, read_principal, "read"))
# Any user who can read the group should also be able to see
# who is a member of the group
terms.append((security.Allow, read_principal, "member_read"))
flag_principal = _flag_principal(self)
if flag_principal is not None:
terms.append((security.Allow, flag_principal, "flag"))
write_principal = _write_principal(self)
if write_principal is not None:
terms.append((security.Allow, write_principal, "write"))
if self.creator:
# The creator of the group should be able to update it
terms.append((security.Allow, self.creator.userid, "admin"))
terms.append((security.Allow, self.creator.userid, "moderate"))
# The creator may update this group in an upsert context
terms.append((security.Allow, self.creator.userid, "upsert"))
# Temporary hack to allow the LMS app's machine user to upsert all LMS
# groups, even if the machine user isn't the group's creator.
# We can remove this once we've either:
#
# * Run a DB migration to change the creators of all LMS groups to the
# LMS app's machine user: https://github.com/hypothesis/lms/issues/1401
# * Or changed the LMS app to use h's new bulk API instead of using the
# group upsert API: https://github.com/hypothesis/lms/issues/1506
if self.authority == "lms.hypothes.is":
terms.append((security.Allow, "acct:lms@lms.hypothes.is", "upsert"))
# This authority principal may be used to grant auth clients
# permissions for groups within their authority
authority_principal = "client_authority:{}".format(self.authority)
# auth_clients that have the same authority as the target group
# may read the members within it
terms.append((security.Allow, authority_principal, "member_read"))
# auth_clients that have the same authority as the target group
# may add members to it
terms.append((security.Allow, authority_principal, "member_add"))
# auth_clients that have the same authority as this group
# should be allowed to update it
terms.append((security.Allow, authority_principal, "admin"))
# auth_clients with matching authority should be able to read
# the group
terms.append((security.Allow, authority_principal, "read"))
# Those with the admin or staff role should be able to admin/edit any group
terms.append((security.Allow, role.Staff, "admin"))
terms.append((security.Allow, role.Admin, "admin"))
terms.append(security.DENY_ALL)
return terms
def __repr__(self):
return "<Group: %s>" % self.slug
@classmethod
def created_by(cls, session, user):
"""Return a query object filtering groups by creator."""
return session.query(cls).filter(Group.creator == user)
def _join_principal(group):
return {JoinableBy.authority: "authority:{}".format(group.authority)}.get(
group.joinable_by
)
def _read_principal(group):
return {
ReadableBy.members: "group:{}".format(group.pubid),
ReadableBy.world: security.Everyone,
}.get(group.readable_by)
def _flag_principal(group):
# If a user can read (see) annotations within this group,
# they can also flag them—but they need to be logged in
# (``pyramid.security.Authenticated``)
return {
ReadableBy.members: "group:{}".format(group.pubid),
ReadableBy.world: security.Authenticated,
}.get(group.readable_by)
def _write_principal(group):
return {
WriteableBy.authority: "authority:{}".format(group.authority),
WriteableBy.members: "group:{}".format(group.pubid),
}.get(group.writeable_by)
TypeFlags = namedtuple("TypeFlags", "joinable_by readable_by writeable_by")
OPEN_GROUP_TYPE_FLAGS = TypeFlags(
joinable_by=None, readable_by=ReadableBy.world, writeable_by=WriteableBy.authority
)
PRIVATE_GROUP_TYPE_FLAGS = TypeFlags(
joinable_by=JoinableBy.authority,
readable_by=ReadableBy.members,
writeable_by=WriteableBy.members,
)
RESTRICTED_GROUP_TYPE_FLAGS = TypeFlags(
joinable_by=None, readable_by=ReadableBy.world, writeable_by=WriteableBy.members
)
| {
"pile_set_name": "Github"
} |
// TODO Outgoing. We're going to use a new version of Proof that does not have
// this sort of thing.
var fs = require('fs'),
path = require('path'),
crypto = require('crypto'),
cadence = require('cadence'),
Strata = require('..'),
rimraf = require('rimraf'),
path = require('path'),
ok = require('assert').ok
function check (callback, forward) {
return function (error, result) {
if (error) callback(error)
else forward(result)
}
}
function vivify (directory, callback) {
var files, dir = {}, lengths = {}, count = 0
directory = path.join(directory, 'pages')
fs.readdir(directory, check(callback, list))
function list ($1) {
(files = $1).forEach(function (file) {
if (!/^\./.test(file)) readFile(file)
else read()
})
}
function readFile (file) {
dir[file] = []
lengths[file] = []
fs.readFile(path.resolve(directory, file), 'utf8', check(callback, lines))
function lines (lines) {
lines = lines.split(/\n/)
lines.pop()
lines.forEach(function (line, index) {
var $ = /^\d+\s[\da-f]+\s(\S+)(?:\s(.*))?$/.exec(line)
var record = { header: JSON.parse($[1]) }
if ($[2]) {
record.body = JSON.parse($[2])
}
dir[file].push(record)
lengths[file][index] = line.length + 1
})
read()
}
}
function read () {
if (++count == files.length) callback(null, renumber(order(abstracted(dir, lengths))))
}
}
// TODO pretty print should be in here, so I can use it from stratify and the
// stringify utility.
function stringify (directory, callback) {
vivify(directory, check(callback, segments))
function segments (segments) {
callback(null, JSON.stringify(segments, null, 2))
}
}
function load (segments, callback) {
fs.readFile(segments, 'utf8', check(callback, parse))
function parse (json) {
callback(null, renumber(order(JSON.parse(json))))
}
}
var gather = cadence(function (async, strata) {
var records = [], page, item
if (typeof strata == 'function') throw new Error
async(function () {
strata.iterator(strata.left, async())
}, function (cursor) {
async.loop([ true ], function (more) {
if (!more) {
async(function () {
cursor.unlock(async())
}, function () {
return [ async.break, records ]
})
} else {
for (var i = cursor.offset; i < cursor.page.items.length; i ++) {
records.push(cursor.page.items[i].record)
}
cursor.next(async())
}
})
})
})
var serialize = cadence(function (async, segments, directory) {
async([function () {
fs.mkdir(path.join(directory, 'drafts'), 0755, async())
directory = path.join(directory, 'pages')
fs.mkdir(directory, 0755, async())
}, function (error) {
if (error.code !== 'EEXIST') {
throw error
}
}], function () {
if (typeof segments == 'string') load(segments, async())
else return [ segments ]
}, function (json) {
var dir = directivize(json)
var files = Object.keys(dir)
var count = 0
async.forEach([ files ], function (file) {
var records = []
dir[file].forEach(function (line) {
var record = [ JSON.stringify(line.header) ]
var hash = crypto.createHash('sha1')
hash.update(record[0])
if (line.body) {
var body = JSON.stringify(line.body)
hash.update(' ', 'utf8')
hash.update(body)
record.push(body)
}
record.unshift(hash.digest('hex'))
record = record.join(' ')
var length = record.length + 1
var entire = length + String(length).length + 1
length = Math.max(entire, length + String(entire).length + 1)
records.push(length + ' ' + record)
})
records = records.join('\n') + '\n'
fs.writeFile(path.resolve(directory, String(file) + '.0'), records, 'utf8', async())
})
})
})
function abstracted (dir) {
var output = {}
var position = 0
var grouped = {}
for (var file in dir) {
var address = file.split('.').shift()
var files = grouped[address]
if (!files) {
files = grouped[address] = []
}
files.push({ name: file, body: dir[file] })
}
for (var address in grouped) {
var record
if (address % 2) {
var files = grouped[address].sort(function (a, b) {
a = a.name.split('.').pop()
b = b.name.split('.').pop()
return +(a) - +(b)
})
// console.log(files.map(function (file) { return file.name }))
record = { log: [] }
var entry = 0
files.forEach(function (file) {
position = 0
file.body.forEach(function (line, index) {
// console.log(line)
var json = line.header
ok(++entry == Math.abs(json[0]), 'entry record is wrong')
if (json[0] >= 0) {
if (json[1] > 0) {
record.log.push({ type: 'add', value: line.body })
} else {
record.log.push({ type: 'del', index: Math.abs(json[1]) - 1 })
}
} else {
ok(json[0] < 0, 'meta records should have negative entry')
ok(index == 0, 'header not first entry')
if (json[1]) record.right = Math.abs(json[1])
}
})
})
} else {
var children = []
grouped[address][0].body.forEach(function (json, index) {
if (json.header[1] > 0) {
children.splice(json.header[1] - 1, 0, json.header[2])
} else {
children.splice(~json.header[1], 1)
}
})
record = { children: children }
}
output[address] = record
}
return output
}
function renumber (json) {
var addresses = Object.keys(json)
.map(function (address) { return + address })
.sort(function (a, b) { return +(a) - +(b) })
var next = 0
var map = {}
addresses.forEach(function (address) {
while ((address % 2) != (next % 2)) next++
map[address] = next++
})
var copy = {}
for (var address in json) {
var object = json[address]
if (address % 2) {
object.right && (object.right = map[object.right])
} else {
object.children = object.children.map(function (address) {
return map[address]
})
}
copy[map[address]] = json[address]
}
return copy
}
function order (json) {
for (var address in json) {
var object = json[address]
if (address % 2) {
var order = []
object.log.forEach(function (entry) {
var index
switch (entry.type) {
case 'add':
for (index = 0; index < order.length; index++) {
if (order[index] > entry.value) {
break
}
}
order.splice(index, 0, entry.value)
break
case 'del':
if (!entry.index && !object.ghost) {
object.ghost = order[0]
}
order.splice(entry.index, 1)
break
}
})
object.order = order
}
}
return json
}
function directivize (json) {
var directory = {}, keys = {}
function key (address) {
var object = json[address]
if (object.children) {
return key(object.children[0])
} else {
return object.ghost || object.order[0]
}
}
var checksum = 40
for (var address in json) {
var object = json[address]
if (object.children) {
directory[address] = object.children.map(function (address, index) {
return { header: [ index + 1, index + 1, address ], body: index ? key(address) : null }
})
} else {
var ghosts = 0
var positions = []
var lengths = []
var position = 0
var order = []
var records = 0
directory[address] = object.log.filter(function (entry) {
return entry.type != 'pos'
}).map(function (entry, count) {
var record
var index
switch (entry.type) {
case 'add':
records++
for (index = 0; index < order.length; index++) {
if (order[index] > entry.value) {
break
}
}
order.splice(index, 0, entry.value)
positions.splice(index, 0, position)
record = { header: [ count + 2, index + 1 ], body: entry.value }
break
case 'del':
records--
record = { header: [ count + 2, -(entry.index + 1) ] }
break
}
var length = JSON.stringify(record.header).length + 1 + checksum + 1
if (record.body != null) {
length += JSON.stringify(record.body).length + 1
}
var entire = length + String(length).length + 1
length = Math.max(entire, length + String(entire).length + 1)
switch (entry.type) {
case 'add':
lengths.splice(index, 0, length)
break
}
position += length
return record
})
directory[address].unshift({
header: [ -1, object.right || 0, 0 ],
body: object.right ? key(object.right) : null
})
}
}
return directory
}
function createStrata (options) {
if (!options.comparator) {
options.comparator = function (a, b) {
ok(a != null && b != null, 'key is null')
return a < b ? -1 : a > b ? 1 : 0
}
}
return new Strata(options)
}
var invoke = cadence(function (async, tmp, okay, test) {
async(function () {
rimraf(tmp, async())
}, function () {
fs.mkdir(tmp, 0755, async())
}, function () {
okay.global = function (name, value) {
global[name] = value
okay.leak(name)
}
okay.global('createStrata', createStrata)
okay.global('tmp', tmp)
okay.global('load', load)
okay.global('stringify', stringify)
okay.global('serialize', serialize)
okay.global('gather', gather)
okay.global('vivify', vivify)
okay.global('script', script)
test(okay, async())
}, function () {
if (!('UNTIDY' in process.env)) {
rimraf(tmp, async())
}
})
})
module.exports = function (module, dirname) {
var tmp = dirname + '/tmp'
module.exports = function (count, test) {
require('proof')(count, cadence(function (async, okay) {
okay.global = function (name, value) {
global[name] = value
okay.leak(name)
}
okay.global('createStrata', createStrata)
okay.global('tmp', tmp)
okay.global('load', load)
okay.global('stringify', stringify)
okay.global('serialize', serialize)
okay.global('gather', gather)
okay.global('vivify', vivify)
okay.global('script', script)
async(function () {
rimraf(tmp, async())
}, function () {
fs.mkdir(tmp, 0755, async())
}, function () {
cadence(test)(okay, {
createStrata: createStrata,
tmp: tmp,
load: load,
stringify: stringify,
serialize: serialize,
gather: gather,
vivify: vivify,
script: script,
tidy: cadence(function (async) {
rimraf(tmp, async())
})
}, async())
}, function () {
if (!('UNTIDY' in process.env)) {
rimraf(tmp, async())
}
})
}))
}
}
function pretty (json) {
function s (o) { return JSON.stringify(o) }
function array (a) {
return '[ ' + a.join(', ') + ' ]'
}
function obj (o) {
var entries = []
for (var k in o) {
entries.push(s(k) + ': ' + s(o[k]))
}
return '{ ' + entries.join(', ') + ' }'
}
var buffer = []
function puts (string) { buffer.push.apply(buffer, arguments) }
puts('{\n')
var fileSep = ''
for (var file in json) {
puts(fileSep, ' ', s(file), ': {\n')
if (file % 2) {
puts(' "log": [\n')
var logSep = ''
json[file].log.forEach(function (entry) {
puts(logSep, ' ', obj(entry))
logSep = ',\n'
})
puts('\n ]')
if (json[file].right) {
puts(',\n "right": ' + json[file].right + '\n')
} else {
puts('\n')
}
} else {
puts(' "children": ', array(json[file].children), '\n')
}
puts(' }')
fileSep = ',\n'
}
puts('\n}\n')
return buffer.join('')
}
function script (options, callback) {
var strata = new Strata({ directory: options.directory, branchSize: 3, leafSize: 3 })
var queue = [{ type: 'create' }]
var actions = {}
actions.create = cadence(function (async, action) {
async(function () {
fs.readdir(options.directory, async())
}, function (list) {
list = list.filter(function (file) { return ! /^\./.test(file) })
if (!list.every(function (file) { return /^\d+$/.test(file) })) {
throw new Error('doesn\'t look like a strata directory')
}
async.forEach([ list ], function (file) { fs.unlink(file, async()) })
}, function () {
strata.create(async())
})
})
var alphabet = 'abcdefghiklmnopqrstuvwxyz'.split('')
function inc (string) {
var parts = string.split('').reverse(), i = 0
for (;;) {
var letter = i < parts.length ? alphabet.indexOf(parts[i]) + 1 : 0
if (letter == alphabet.length) letter = 0
parts[i] = alphabet[letter]
if (letter || ++i == parts.length) break
}
if (!letter) {
parts.push('a')
}
return parts.reverse().join('')
}
actions.add = cadence(function (async, action) {
async(function () {
strata.mutator(action.values[0], async())
}, function (cursor) {
async.loop([], function () {
cursor.indexOf(action.values[0], cursor.ghosts)
}, function (index) {
ok(index < 0)
cursor.insert(action.values[0], action.values[0], ~index)
action.values.shift()
if (!action.values.length) {
async(function () {
cursor.unlock(async())
}, function () {
return [ async.break ]
})
}
})
})
})
actions.remove = cadence(function (async, action) {
var mutate, next
async.loop([], function () {
if (action.values.length) strata.mutator(action.values[0], async())
else return [ async.break ]
}, function (cursor) {
action.values.shift()
async(function () {
if (cursor.index >= 0) cursor.remove(cursor.index)
}, function () {
cursor.unlock(async())
})
})
})
actions.balance = function (action, callback) {
strata.balance(callback)
}
function print (tree, address, index, depth) {
tree.forEach(function (child, index) {
var padding = new Array(depth + 1).join(' ')
if (child.address % 2) {
var key = index ? child.children[0] : '<'
while (key.length != 2) key = key + ' '
process.stdout.write(padding + key + ' -> ')
process.stdout.write(child.children.slice(child.ghosts).join(', ') + '\n')
} else {
if (!('key' in child)) {
process.stdout.write(padding + '<\n')
} else {
process.stdout.write(padding + child.key + '\n')
}
print(child.children, child.address, 0, depth + 1)
}
})
}
actions.vivify = cadence(function (async, action) {
async(function () {
strata.vivify(async())
}, function (tree) {
print(tree, 0, 0, 0)
})
})
actions.stringify = cadence(function (async, action) {
async(function () {
stringify(options.directory, async())
}, function (result) {
fs.writeFile(action.file, pretty(JSON.parse(result)), 'utf8', async())
})
})
actions.serialize = cadence(function (async, action) {
async(function () {
serialize(action.file, options.directory, async())
}, function () {
strata.open(async())
})
})
actions.fixture = cadence(function (async, action) {
async(function () {
vivify(options.directory, async())
load(action.file, async())
}, function (actual, expected) {
options.okay(actual, expected, action.file)
})
})
function consume (callback) {
if (queue.length) {
var action = queue.shift()
actions[action.type](action, function (error) {
if (error) callback(error)
else setImmediate(function () {
consume(callback)
})
})
} else {
callback()
}
}
cadence(function (async) {
var buffer = ''
var fs = require('fs')
async(function () {
fs.readFile(options.file, 'utf8', async())
}, function (body) {
var lines = body.split(/\n/)
lines.pop()
lines.forEach(function (line) {
switch (line[0]) {
case '-':
case '+':
var $ = /^[+-]([a-z]+)(?:-([a-z]+))?\s*$/.exec(line), values = []
values.push($[1])
$[2] = $[2] || $[1]
while ($[1] != $[2]) {
$[1] = inc($[1])
values.push($[1])
}
queue.push({ type: line[0] == '+' ? 'add' : 'remove', values: values })
break
case '>':
queue.push({ type: 'stringify', file: line.substring(1) })
break
case '<':
queue.shift()
queue.push({ type: 'serialize', file: line.substring(1) })
break
case '=':
queue.push({ type: 'fixture', file: line.substring(1) })
break
case '~':
queue.push({ type: 'balance' })
break
case '!':
queue.push({ type: 'vivify' })
break
}
})
async.forEach([ queue ], function (action) {
actions[action.type](action, async())
}, function () {
setImmediate(async())
})
})
})(callback)
}
module.exports.stringify = stringify
module.exports.serialize = serialize
module.exports.script = script
| {
"pile_set_name": "Github"
} |
var baseMerge = require('./_baseMerge'),
isObject = require('./isObject');
/**
* Used by `_.defaultsDeep` to customize its `_.merge` use to merge source
* objects into destination objects that are passed thru.
*
* @private
* @param {*} objValue The destination value.
* @param {*} srcValue The source value.
* @param {string} key The key of the property to merge.
* @param {Object} object The parent object of `objValue`.
* @param {Object} source The parent object of `srcValue`.
* @param {Object} [stack] Tracks traversed source values and their merged
* counterparts.
* @returns {*} Returns the value to assign.
*/
function customDefaultsMerge(objValue, srcValue, key, object, source, stack) {
if (isObject(objValue) && isObject(srcValue)) {
// Recursively merge objects and arrays (susceptible to call stack limits).
stack.set(srcValue, objValue);
baseMerge(objValue, srcValue, undefined, customDefaultsMerge, stack);
stack['delete'](srcValue);
}
return objValue;
}
module.exports = customDefaultsMerge;
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2020, OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opentelemetry.sdk.metrics.aggregator;
import io.opentelemetry.common.Labels;
import io.opentelemetry.sdk.metrics.data.MetricData.LongPoint;
import io.opentelemetry.sdk.metrics.data.MetricData.Point;
import java.util.concurrent.atomic.AtomicLong;
public final class LongSumAggregator extends AbstractAggregator {
private static final long DEFAULT_VALUE = 0L;
private static final AggregatorFactory AGGREGATOR_FACTORY =
new AggregatorFactory() {
@Override
public Aggregator getAggregator() {
return new LongSumAggregator();
}
};
// TODO: Change to use LongAdder when changed to java8.
private final AtomicLong current = new AtomicLong(DEFAULT_VALUE);
/**
* Returns an {@link AggregatorFactory} that produces {@link LongSumAggregator} instances.
*
* @return an {@link AggregatorFactory} that produces {@link LongSumAggregator} instances.
*/
public static AggregatorFactory getFactory() {
return AGGREGATOR_FACTORY;
}
@Override
void doMergeAndReset(Aggregator aggregator) {
LongSumAggregator other = (LongSumAggregator) aggregator;
other.current.getAndAdd(this.current.getAndSet(DEFAULT_VALUE));
}
@Override
public Point toPoint(long startEpochNanos, long epochNanos, Labels labels) {
return LongPoint.create(startEpochNanos, epochNanos, labels, current.get());
}
@Override
public void doRecordLong(long value) {
current.getAndAdd(value);
}
}
| {
"pile_set_name": "Github"
} |
<file_state xmlns="http://oval.mitre.org/XMLSchema/oval-definitions-5#unix" id="oval:org.mitre.oval:ste:1691" version="1">
<uexec datatype="boolean" operation="equals">true</uexec>
</file_state>
| {
"pile_set_name": "Github"
} |
#ifndef TARGET_SIGNAL_H
#define TARGET_SIGNAL_H
#include "cpu.h"
/* this struct defines a stack used during syscall handling */
typedef struct target_sigaltstack {
abi_ulong ss_sp;
abi_long ss_flags;
abi_ulong ss_size;
} target_stack_t;
/*
* sigaltstack controls
*/
#define TARGET_SS_ONSTACK 1
#define TARGET_SS_DISABLE 2
#define TARGET_MINSIGSTKSZ 2048
#define TARGET_SIGSTKSZ 8192
static inline abi_ulong get_sp_from_cpustate(CPUM68KState *state)
{
return state->aregs[7];
}
#endif /* TARGET_SIGNAL_H */
| {
"pile_set_name": "Github"
} |
#!/bin/bash
# This shell script and the accompanying Dockerfile are used by the project
# maintainers to create the precompiled doxygen binaries that are downloaded
# during the build. They are neither called during the build nor expected to be
# called by most developers or users of the project.
set -euxo pipefail
rm -f doxygen-*-*-x86_64.tar.gz doxygen-*-x86_64.tar.gz.sha256
docker build --build-arg UBUNTU_CODENAME=bionic --tag doxygen-bionic "${BASH_SOURCE%/*}"
trap 'docker rmi doxygen-bionic' EXIT
docker run --detach --name doxygen-bionic-build --tty doxygen-bionic
trap 'docker rm -f doxygen-bionic-build && docker rmi doxygen-bionic' EXIT
docker cp doxygen-bionic-build:$(docker exec doxygen-bionic-build find /opt/doxygen/bin/ -maxdepth 1 -name 'doxygen-*-bionic-x86_64.tar.gz') .
shasum --algorithm 256 doxygen-*-bionic-x86_64.tar.gz | tee doxygen-bionic-x86_64.tar.gz.sha256
docker build --build-arg UBUNTU_CODENAME=focal --tag doxygen-focal "${BASH_SOURCE%/*}"
trap 'docker rm -f doxygen-bionic-build && docker rmi doxygen-bionic doxygen-focal' EXIT
docker run --detach --name doxygen-focal-build --tty doxygen-focal
trap 'docker rm -f doxygen-bionic-build doxygen-focal-build && docker rmi doxygen-bionic doxygen-focal' EXIT
docker cp doxygen-focal-build:$(docker exec doxygen-focal-build find /opt/doxygen/bin/ -maxdepth 1 -name 'doxygen-*-focal-x86_64.tar.gz') .
shasum --algorithm 256 doxygen-*-focal-x86_64.tar.gz | tee doxygen-focal-x86_64.tar.gz.sha256
| {
"pile_set_name": "Github"
} |
package com.github.javaparser.ast.nodeTypes;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.stream.Collectors;
import com.github.javaparser.ast.Modifier;
/**
* A Node with Modifiers.
*/
public interface NodeWithModifiers<T> {
/**
* Return the modifiers of this variable declaration.
*
* @see Modifier
* @return modifiers
*/
EnumSet<Modifier> getModifiers();
T setModifiers(EnumSet<Modifier> modifiers);
@SuppressWarnings("unchecked")
default T addModifier(Modifier... modifiers) {
getModifiers().addAll(Arrays.stream(modifiers)
.collect(Collectors.toCollection(() -> EnumSet.noneOf(Modifier.class))));
return (T) this;
}
default boolean isStatic() {
return getModifiers().contains(Modifier.STATIC);
}
default boolean isAbstract() {
return getModifiers().contains(Modifier.ABSTRACT);
}
default boolean isFinal() {
return getModifiers().contains(Modifier.FINAL);
}
default boolean isNative() {
return getModifiers().contains(Modifier.NATIVE);
}
default boolean isPrivate() {
return getModifiers().contains(Modifier.PRIVATE);
}
default boolean isProtected() {
return getModifiers().contains(Modifier.PROTECTED);
}
default boolean isPublic() {
return getModifiers().contains(Modifier.PUBLIC);
}
default boolean isStrictfp() {
return getModifiers().contains(Modifier.STRICTFP);
}
default boolean isSynchronized() {
return getModifiers().contains(Modifier.SYNCHRONIZED);
}
default boolean isTransient() {
return getModifiers().contains(Modifier.TRANSIENT);
}
default boolean isVolatile() {
return getModifiers().contains(Modifier.VOLATILE);
}
} | {
"pile_set_name": "Github"
} |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore.Sqlite.Internal;
using Xunit;
using Xunit.Abstractions;
namespace Microsoft.EntityFrameworkCore.Query
{
public class GearsOfWarQuerySqliteTest : GearsOfWarQueryRelationalTestBase<GearsOfWarQuerySqliteFixture>
{
public GearsOfWarQuerySqliteTest(GearsOfWarQuerySqliteFixture fixture, ITestOutputHelper testOutputHelper)
: base(fixture)
{
Fixture.TestSqlLoggerFactory.Clear();
//Fixture.TestSqlLoggerFactory.SetTestOutputHelper(testOutputHelper);
}
public override Task Where_datetimeoffset_date_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_date_component(async));
public override Task Where_datetimeoffset_day_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_date_component(async));
public override Task Where_datetimeoffset_dayofyear_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_dayofyear_component(async));
public override Task Where_datetimeoffset_hour_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_hour_component(async));
public override Task Where_datetimeoffset_millisecond_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_millisecond_component(async));
public override Task Where_datetimeoffset_minute_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_minute_component(async));
public override Task Where_datetimeoffset_month_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_month_component(async));
public override Task Where_datetimeoffset_now(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_now(async));
public override Task Where_datetimeoffset_second_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_second_component(async));
public override Task Where_datetimeoffset_utcnow(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_utcnow(async));
public override Task Where_datetimeoffset_year_component(bool async)
=> AssertTranslationFailed(() => base.Where_datetimeoffset_year_component(async));
public override Task DateTimeOffset_Contains_Less_than_Greater_than(bool async)
=> AssertTranslationFailed(() => base.DateTimeOffset_Contains_Less_than_Greater_than(async));
public override Task DateTimeOffset_Date_returns_datetime(bool async)
=> AssertTranslationFailed(() => base.DateTimeOffset_Date_returns_datetime(async));
public override async Task Correlated_collections_inner_subquery_predicate_references_outer_qsre(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Correlated_collections_inner_subquery_predicate_references_outer_qsre(async))).Message);
public override async Task Correlated_collections_inner_subquery_selector_references_outer_qsre(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Correlated_collections_inner_subquery_selector_references_outer_qsre(async))).Message);
public override async Task Correlated_collections_nested_inner_subquery_references_outer_qsre_one_level_up(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Correlated_collections_nested_inner_subquery_references_outer_qsre_one_level_up(async))).Message);
public override async Task Correlated_collections_nested_inner_subquery_references_outer_qsre_two_levels_up(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Correlated_collections_nested_inner_subquery_references_outer_qsre_two_levels_up(async))).Message);
public override async Task Outer_parameter_in_group_join_with_DefaultIfEmpty(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Outer_parameter_in_group_join_with_DefaultIfEmpty(async))).Message);
public override async Task Outer_parameter_in_join_key(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Outer_parameter_in_join_key(async))).Message);
public override async Task Outer_parameter_in_join_key_inner_and_outer(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Outer_parameter_in_join_key_inner_and_outer(async))).Message);
public override async Task Subquery_projecting_nullable_scalar_contains_nullable_value_needs_null_expansion(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Subquery_projecting_nullable_scalar_contains_nullable_value_needs_null_expansion(async))).Message);
public override async Task Subquery_projecting_nullable_scalar_contains_nullable_value_needs_null_expansion_negated(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Subquery_projecting_nullable_scalar_contains_nullable_value_needs_null_expansion_negated(async))).Message);
public override async Task Subquery_projecting_non_nullable_scalar_contains_non_nullable_value_doesnt_need_null_expansion(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Subquery_projecting_non_nullable_scalar_contains_non_nullable_value_doesnt_need_null_expansion(async))).Message);
public override async Task Subquery_projecting_non_nullable_scalar_contains_non_nullable_value_doesnt_need_null_expansion_negated(
bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Subquery_projecting_non_nullable_scalar_contains_non_nullable_value_doesnt_need_null_expansion_negated(async))).Message);
public override async Task SelectMany_predicate_with_non_equality_comparison_with_Take_doesnt_convert_to_join(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.SelectMany_predicate_with_non_equality_comparison_with_Take_doesnt_convert_to_join(async))).Message);
public override async Task Correlated_collection_with_inner_collection_references_element_two_levels_up(bool async)
=> Assert.Equal(
SqliteStrings.ApplyNotSupported,
(await Assert.ThrowsAsync<InvalidOperationException>(
() => base.Correlated_collection_with_inner_collection_references_element_two_levels_up(async))).Message);
public override async Task Negate_on_binary_expression(bool async)
{
await base.Negate_on_binary_expression(async);
AssertSql(
@"SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE ""s"".""Id"" = -(""s"".""Id"" + ""s"".""Id"")");
}
public override async Task Negate_on_column(bool async)
{
await base.Negate_on_column(async);
AssertSql(
@"SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE ""s"".""Id"" = -""s"".""Id""");
}
public override async Task Negate_on_like_expression(bool async)
{
await base.Negate_on_like_expression(async);
AssertSql(
@"SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE ""s"".""Name"" IS NOT NULL AND NOT (""s"".""Name"" LIKE 'us%')");
}
public override async Task Select_datetimeoffset_comparison_in_projection(bool async)
{
await base.Select_datetimeoffset_comparison_in_projection(async);
AssertSql(
@"SELECT ""m"".""Timeline""
FROM ""Missions"" AS ""m""");
}
public override async Task Byte_array_contains_literal(bool async)
{
await base.Byte_array_contains_literal(async);
AssertSql(
@"SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE instr(""s"".""Banner"", X'01') > 0");
}
public override async Task Byte_array_contains_parameter(bool async)
{
await base.Byte_array_contains_parameter(async);
AssertSql(
@"@__someByte_0='1' (DbType = String)
SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE instr(""s"".""Banner"", char(@__someByte_0)) > 0");
}
public override async Task Byte_array_filter_by_length_literal(bool async)
{
await base.Byte_array_filter_by_length_literal(async);
AssertSql(
@"SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE length(""s"".""Banner"") = 1");
}
public override async Task Byte_array_filter_by_length_parameter(bool async)
{
await base.Byte_array_filter_by_length_parameter(async);
AssertSql(
@"@__p_0='1' (DbType = String)
SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE length(""s"".""Banner"") = @__p_0");
}
public override void Byte_array_filter_by_length_parameter_compiled()
{
base.Byte_array_filter_by_length_parameter_compiled();
AssertSql(
@"@__byteArrayParam='0x2A80' (Size = 2) (DbType = String)
SELECT COUNT(*)
FROM ""Squads"" AS ""s""
WHERE length(""s"".""Banner"") = length(@__byteArrayParam)");
}
public override async Task Byte_array_filter_by_SequenceEqual(bool async)
{
await base.Byte_array_filter_by_SequenceEqual(async);
AssertSql(
@"@__byteArrayParam_0='0x0405060708' (Size = 5) (DbType = String)
SELECT ""s"".""Id"", ""s"".""Banner"", ""s"".""Banner5"", ""s"".""InternalNumber"", ""s"".""Name""
FROM ""Squads"" AS ""s""
WHERE ""s"".""Banner5"" = @__byteArrayParam_0");
}
[ConditionalTheory(Skip = "Issue#18844")]
public override Task TimeSpan_Hours(bool async)
=> base.TimeSpan_Hours(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task TimeSpan_Minutes(bool async)
=> base.TimeSpan_Minutes(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task TimeSpan_Seconds(bool async)
=> base.TimeSpan_Seconds(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task TimeSpan_Milliseconds(bool async)
=> base.TimeSpan_Milliseconds(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task Where_TimeSpan_Hours(bool async)
=> base.Where_TimeSpan_Hours(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task Where_TimeSpan_Minutes(bool async)
=> base.Where_TimeSpan_Minutes(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task Where_TimeSpan_Seconds(bool async)
=> base.Where_TimeSpan_Seconds(async);
[ConditionalTheory(Skip = "Issue#18844")]
public override Task Where_TimeSpan_Milliseconds(bool async)
=> base.Where_TimeSpan_Milliseconds(async);
private void AssertSql(params string[] expected)
=> Fixture.TestSqlLoggerFactory.AssertBaseline(expected);
}
}
| {
"pile_set_name": "Github"
} |
package com.vaadin.tests.components.table;
import com.vaadin.tests.components.TestBase;
import com.vaadin.ui.CheckBox;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.v7.data.Item;
import com.vaadin.v7.ui.Select;
import com.vaadin.v7.ui.Table;
@SuppressWarnings("serial")
public class ColumnHeaderAlignments extends TestBase {
private static final String BAZ = "Baz (right)";
private static final String BAR = "Bar (center)";
private static final String FOO = "Foo (left)";
private Table fooTable;
private Table barTable;
private Table bazTable;
@Override
protected void setup() {
Select theme = new Select();
theme.addItem("reindeer");
theme.addItem("runo");
theme.addItem("base");
theme.setValue("reindeer");
theme.setNullSelectionAllowed(false);
theme.setImmediate(true);
theme.addValueChangeListener(event -> setTheme(
String.valueOf(event.getProperty().getValue())));
addComponent(theme);
CheckBox footers = new CheckBox("Show footers");
footers.addValueChangeListener(event -> {
boolean visible = event.getValue();
fooTable.setFooterVisible(visible);
barTable.setFooterVisible(visible);
bazTable.setFooterVisible(visible);
});
addComponent(footers);
HorizontalLayout tables = new HorizontalLayout();
fooTable = createTable(null);
tables.addComponent(fooTable);
barTable = createTable("strong");
tables.addComponent(barTable);
bazTable = createTable("black");
tables.addComponent(bazTable);
addComponent(tables);
}
private Table createTable(String style) {
Table table = new Table();
table.addContainerProperty(FOO, String.class, "");
table.addContainerProperty(BAR, String.class, "");
table.addContainerProperty(BAZ, String.class, "");
table.setColumnAlignment(FOO, Table.ALIGN_LEFT);
table.setColumnAlignment(BAR, Table.ALIGN_CENTER);
table.setColumnAlignment(BAZ, Table.ALIGN_RIGHT);
if (style != null) {
table.setStyleName(style);
}
for (int i = 0; i < 100; i++) {
Item item = table.addItem(i);
item.getItemProperty(FOO).setValue("foo");
item.getItemProperty(BAR).setValue("bar");
item.getItemProperty(BAZ).setValue("baz");
}
return table;
}
@Override
protected String getDescription() {
return "Aligned column headers should have style names telling the alignment";
}
@Override
protected Integer getTicketNumber() {
return 5066;
}
}
| {
"pile_set_name": "Github"
} |
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/cards.R, R/aliases.R
\name{bs4Card}
\alias{bs4Card}
\alias{box}
\title{Create a Boostrap 4 card}
\usage{
bs4Card(
...,
inputId = NULL,
title = NULL,
footer = NULL,
status = NULL,
elevation = NULL,
solidHeader = FALSE,
headerBorder = TRUE,
gradientColor = NULL,
width = 6,
height = NULL,
collapsible = TRUE,
collapsed = FALSE,
closable = FALSE,
maximizable = FALSE,
cardLabel = NULL,
dropdownMenu = NULL,
overflow = FALSE,
sidebar = NULL
)
box(
...,
inputId = NULL,
title = NULL,
footer = NULL,
status = NULL,
elevation = NULL,
solidHeader = FALSE,
headerBorder = TRUE,
gradientColor = NULL,
width = 6,
height = NULL,
collapsible = TRUE,
collapsed = FALSE,
closable = FALSE,
maximizable = FALSE,
cardLabel = NULL,
dropdownMenu = NULL,
overflow = FALSE,
sidebar = NULL
)
}
\arguments{
\item{...}{Contents of the box.}
\item{inputId}{Get the state of the card. Optional.}
\item{title}{Optional title.}
\item{footer}{Optional footer text.}
\item{status}{The status of the card header. "primary", "secondary", "success", "warning", "danger", "white", "light", "dark", "transparent". NULL by default.}
\item{elevation}{Card elevation.}
\item{solidHeader}{Should the header be shown with a solid color background?}
\item{headerBorder}{Whether to display a border between the header and body.
TRUE by default}
\item{gradientColor}{If NULL (the default), the background of the box will be
white. Otherwise, a color string. "primary", "success", "warning" or "danger".}
\item{width}{The width of the box, using the Bootstrap grid system. This is
used for row-based layouts. The overall width of a region is 12, so the
default width of 4 occupies 1/3 of that width. For column-based
layouts, use \code{NULL} for the width; the width is set by the column that
contains the box.}
\item{height}{The height of a box, in pixels or other CSS unit. By default
the height scales automatically with the content.}
\item{collapsible}{If TRUE, display a button in the upper right that allows
the user to collapse the box.}
\item{collapsed}{If TRUE, start collapsed. This must be used with
\code{collapsible=TRUE}.}
\item{closable}{If TRUE, display a button in the upper right that allows the user to close the box.}
\item{maximizable}{If TRUE, the card can be displayed in full screen mode.}
\item{cardLabel}{Slot for \link{bs4CardLabel}.}
\item{dropdownMenu}{List of items in the the boxtool dropdown menu. Use \link{dropdownItemList}.}
\item{overflow}{Whether to enable overflow in the card body and footer. FALSE by default.}
\item{sidebar}{Slot for \link{bs4CardSidebar}.}
}
\description{
Build an adminLTE3 card
}
\examples{
if(interactive()){
library(shiny)
library(bs4Dash)
shiny::shinyApp(
ui = bs4DashPage(
navbar = bs4DashNavbar(),
sidebar = bs4DashSidebar(),
controlbar = bs4DashControlbar(),
footer = bs4DashFooter(),
title = "test",
body = bs4DashBody(
fluidRow(
column(
width = 6,
bs4Card(
title = "Closable Box with dropdown",
closable = TRUE,
width = 12,
status = "warning",
solidHeader = FALSE,
collapsible = TRUE,
cardLabel = bs4CardLabel(
text = 1,
status = "danger",
tooltip = "Hello!"
),
dropdownMenu = dropdownItemList(
dropdownItem(url = "http://www.google.com", name = "Link to google"),
dropdownItem(url = "#", name = "item 2"),
dropdownDivider(),
dropdownItem(url = "#", name = "item 3")
),
p("Box Content")
)
),
column(
width = 6,
bs4Card(
title = "Closable Box with gradient",
closable = TRUE,
width = 12,
solidHeader = FALSE,
gradientColor = "success",
collapsible = TRUE,
p("Box Content")
)
)
),
fluidRow(
bs4Card(
title = "Closable Box with solidHeader",
closable = TRUE,
width = 6,
solidHeader = TRUE,
status = "primary",
collapsible = TRUE,
p("Box Content")
),
bs4Card(
inputId = "card4",
title = "Maximizable Card",
width = 6,
status = "warning",
closable = FALSE,
maximizable = TRUE,
collapsible = TRUE,
sliderInput("obs", "Number of observations:",
min = 0, max = 1000, value = 500
),
plotOutput("distPlot")
)
)
)
),
server = function(input, output) {
output$distPlot <- renderPlot({
hist(rnorm(input$obs))
})
}
)
}
}
\seealso{
Other cards:
\code{\link{bs4InfoBox}()},
\code{\link{bs4TabCard}()},
\code{\link{bs4ValueBox}()}
}
\author{
David Granjon, \email{dgranjon@ymail.com}
}
\concept{cards}
| {
"pile_set_name": "Github"
} |
<?php
namespace Matrix\Operators;
use Matrix\Matrix;
use \Matrix\Builder;
use Matrix\Exception;
class Multiplication extends Operator
{
/**
* Execute the multiplication
*
* @param mixed $value The matrix or numeric value to multiply the current base value by
* @throws Exception If the provided argument is not appropriate for the operation
* @return $this The operation object, allowing multiple multiplications to be chained
**/
public function execute($value)
{
if (is_array($value)) {
$value = new Matrix($value);
}
if (is_object($value) && ($value instanceof Matrix)) {
return $this->multiplyMatrix($value);
} elseif (is_numeric($value)) {
return $this->multiplyScalar($value);
}
throw new Exception('Invalid argument for multiplication');
}
/**
* Execute the multiplication for a scalar
*
* @param mixed $value The numeric value to multiply with the current base value
* @return $this The operation object, allowing multiple mutiplications to be chained
**/
protected function multiplyScalar($value)
{
for ($row = 0; $row < $this->rows; ++$row) {
for ($column = 0; $column < $this->columns; ++$column) {
$this->matrix[$row][$column] *= $value;
}
}
return $this;
}
/**
* Execute the multiplication for a matrix
*
* @param Matrix $value The numeric value to multiply with the current base value
* @return $this The operation object, allowing multiple mutiplications to be chained
* @throws Exception If the provided argument is not appropriate for the operation
**/
protected function multiplyMatrix(Matrix $value)
{
$this->validateReflectingDimensions($value);
$newRows = $this->rows;
$newColumns = $value->columns;
$matrix = Builder::createFilledMatrix(0, $newRows, $newColumns)
->toArray();
for ($row = 0; $row < $newRows; ++$row) {
for ($column = 0; $column < $newColumns; ++$column) {
$columnData = $value->getColumns($column + 1)->toArray();
foreach ($this->matrix[$row] as $key => $valueData) {
$matrix[$row][$column] += $valueData * $columnData[$key][0];
}
}
}
$this->matrix = $matrix;
return $this;
}
}
| {
"pile_set_name": "Github"
} |
Used for AP+STA support in OpenWrt - preserve AP mode keys across STA reconnects
--- a/net/mac80211/cfg.c
+++ b/net/mac80211/cfg.c
@@ -1197,7 +1197,6 @@ static int ieee80211_stop_ap(struct wiph
sdata->vif.bss_conf.ftmr_params = NULL;
__sta_info_flush(sdata, true);
- ieee80211_free_keys(sdata, true);
sdata->vif.bss_conf.enable_beacon = false;
sdata->beacon_rate_set = false;
| {
"pile_set_name": "Github"
} |
package sql
import (
"crypto/tls"
"crypto/x509"
"database/sql"
"fmt"
"io/ioutil"
"net"
"regexp"
"strconv"
"strings"
"time"
"github.com/go-sql-driver/mysql"
"github.com/lib/pq"
sqlite3 "github.com/mattn/go-sqlite3"
"github.com/dexidp/dex/pkg/log"
"github.com/dexidp/dex/storage"
)
const (
// postgres error codes
pgErrUniqueViolation = "23505" // unique_violation
)
const (
// MySQL error codes
mysqlErrDupEntry = 1062
mysqlErrDupEntryWithKeyName = 1586
mysqlErrUnknownSysVar = 1193
)
// SQLite3 options for creating an SQL db.
type SQLite3 struct {
// File to
File string `json:"file"`
}
// Open creates a new storage implementation backed by SQLite3
func (s *SQLite3) Open(logger log.Logger) (storage.Storage, error) {
conn, err := s.open(logger)
if err != nil {
return nil, err
}
return conn, nil
}
func (s *SQLite3) open(logger log.Logger) (*conn, error) {
db, err := sql.Open("sqlite3", s.File)
if err != nil {
return nil, err
}
if s.File == ":memory:" {
// sqlite3 uses file locks to coordinate concurrent access. In memory
// doesn't support this, so limit the number of connections to 1.
db.SetMaxOpenConns(1)
}
errCheck := func(err error) bool {
sqlErr, ok := err.(sqlite3.Error)
if !ok {
return false
}
return sqlErr.ExtendedCode == sqlite3.ErrConstraintPrimaryKey
}
c := &conn{db, &flavorSQLite3, logger, errCheck}
if _, err := c.migrate(); err != nil {
return nil, fmt.Errorf("failed to perform migrations: %v", err)
}
return c, nil
}
// nolint
const (
// postgres SSL modes
pgSSLDisable = "disable"
pgSSLRequire = "require"
pgSSLVerifyCA = "verify-ca"
pgSSLVerifyFull = "verify-full"
)
// nolint
const (
// MySQL SSL modes
mysqlSSLTrue = "true"
mysqlSSLFalse = "false"
mysqlSSLSkipVerify = "skip-verify"
mysqlSSLCustom = "custom"
)
// NetworkDB contains options common to SQL databases accessed over network.
type NetworkDB struct {
Database string
User string
Password string
Host string
Port uint16
ConnectionTimeout int // Seconds
// database/sql tunables, see
// https://golang.org/pkg/database/sql/#DB.SetConnMaxLifetime and below
// Note: defaults will be set if these are 0
MaxOpenConns int // default: 5
MaxIdleConns int // default: 5
ConnMaxLifetime int // Seconds, default: not set
}
// SSL represents SSL options for network databases.
type SSL struct {
Mode string
CAFile string
// Files for client auth.
KeyFile string
CertFile string
}
// Postgres options for creating an SQL db.
type Postgres struct {
NetworkDB
SSL SSL `json:"ssl" yaml:"ssl"`
}
// Open creates a new storage implementation backed by Postgres.
func (p *Postgres) Open(logger log.Logger) (storage.Storage, error) {
conn, err := p.open(logger)
if err != nil {
return nil, err
}
return conn, nil
}
var strEsc = regexp.MustCompile(`([\\'])`)
func dataSourceStr(str string) string {
return "'" + strEsc.ReplaceAllString(str, `\$1`) + "'"
}
// createDataSourceName takes the configuration provided via the Postgres
// struct to create a data-source name that Go's database/sql package can
// make use of.
func (p *Postgres) createDataSourceName() string {
parameters := []string{}
addParam := func(key, val string) {
parameters = append(parameters, fmt.Sprintf("%s=%s", key, val))
}
addParam("connect_timeout", strconv.Itoa(p.ConnectionTimeout))
// detect host:port for backwards-compatibility
host, port, err := net.SplitHostPort(p.Host)
if err != nil {
// not host:port, probably unix socket or bare address
host = p.Host
if p.Port != 0 {
port = strconv.Itoa(int(p.Port))
}
}
if host != "" {
addParam("host", dataSourceStr(host))
}
if port != "" {
addParam("port", port)
}
if p.User != "" {
addParam("user", dataSourceStr(p.User))
}
if p.Password != "" {
addParam("password", dataSourceStr(p.Password))
}
if p.Database != "" {
addParam("dbname", dataSourceStr(p.Database))
}
if p.SSL.Mode == "" {
// Assume the strictest mode if unspecified.
addParam("sslmode", dataSourceStr(pgSSLVerifyFull))
} else {
addParam("sslmode", dataSourceStr(p.SSL.Mode))
}
if p.SSL.CAFile != "" {
addParam("sslrootcert", dataSourceStr(p.SSL.CAFile))
}
if p.SSL.CertFile != "" {
addParam("sslcert", dataSourceStr(p.SSL.CertFile))
}
if p.SSL.KeyFile != "" {
addParam("sslkey", dataSourceStr(p.SSL.KeyFile))
}
return strings.Join(parameters, " ")
}
func (p *Postgres) open(logger log.Logger) (*conn, error) {
dataSourceName := p.createDataSourceName()
db, err := sql.Open("postgres", dataSourceName)
if err != nil {
return nil, err
}
// set database/sql tunables if configured
if p.ConnMaxLifetime != 0 {
db.SetConnMaxLifetime(time.Duration(p.ConnMaxLifetime) * time.Second)
}
if p.MaxIdleConns == 0 {
db.SetMaxIdleConns(5)
} else {
db.SetMaxIdleConns(p.MaxIdleConns)
}
if p.MaxOpenConns == 0 {
db.SetMaxOpenConns(5)
} else {
db.SetMaxOpenConns(p.MaxOpenConns)
}
errCheck := func(err error) bool {
sqlErr, ok := err.(*pq.Error)
if !ok {
return false
}
return sqlErr.Code == pgErrUniqueViolation
}
c := &conn{db, &flavorPostgres, logger, errCheck}
if _, err := c.migrate(); err != nil {
return nil, fmt.Errorf("failed to perform migrations: %v", err)
}
return c, nil
}
// MySQL options for creating a MySQL db.
type MySQL struct {
NetworkDB
SSL SSL `json:"ssl" yaml:"ssl"`
// TODO(pborzenkov): used by tests to reduce lock wait timeout. Should
// we make it exported and allow users to provide arbitrary params?
params map[string]string
}
// Open creates a new storage implementation backed by MySQL.
func (s *MySQL) Open(logger log.Logger) (storage.Storage, error) {
conn, err := s.open(logger)
if err != nil {
return nil, err
}
return conn, nil
}
func (s *MySQL) open(logger log.Logger) (*conn, error) {
cfg := mysql.Config{
User: s.User,
Passwd: s.Password,
DBName: s.Database,
AllowNativePasswords: true,
Timeout: time.Second * time.Duration(s.ConnectionTimeout),
ParseTime: true,
Params: map[string]string{
"transaction_isolation": "'SERIALIZABLE'",
},
}
if s.Host != "" {
if s.Host[0] != '/' {
cfg.Net = "tcp"
cfg.Addr = s.Host
} else {
cfg.Net = "unix"
cfg.Addr = s.Host
}
}
if s.SSL.CAFile != "" || s.SSL.CertFile != "" || s.SSL.KeyFile != "" {
if err := s.makeTLSConfig(); err != nil {
return nil, fmt.Errorf("failed to make TLS config: %v", err)
}
cfg.TLSConfig = mysqlSSLCustom
} else if s.SSL.Mode == "" {
cfg.TLSConfig = mysqlSSLTrue
} else {
cfg.TLSConfig = s.SSL.Mode
}
for k, v := range s.params {
cfg.Params[k] = v
}
db, err := sql.Open("mysql", cfg.FormatDSN())
if err != nil {
return nil, err
}
if s.MaxIdleConns == 0 {
/*Override default behaviour to fix https://github.com/dexidp/dex/issues/1608*/
db.SetMaxIdleConns(0)
} else {
db.SetMaxIdleConns(s.MaxIdleConns)
}
err = db.Ping()
if err != nil {
if mysqlErr, ok := err.(*mysql.MySQLError); ok && mysqlErr.Number == mysqlErrUnknownSysVar {
logger.Info("reconnecting with MySQL pre-5.7.20 compatibility mode")
// MySQL 5.7.20 introduced transaction_isolation and deprecated tx_isolation.
// MySQL 8.0 doesn't have tx_isolation at all.
// https://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html#sysvar_transaction_isolation
delete(cfg.Params, "transaction_isolation")
cfg.Params["tx_isolation"] = "'SERIALIZABLE'"
db, err = sql.Open("mysql", cfg.FormatDSN())
if err != nil {
return nil, err
}
} else {
return nil, err
}
}
errCheck := func(err error) bool {
sqlErr, ok := err.(*mysql.MySQLError)
if !ok {
return false
}
return sqlErr.Number == mysqlErrDupEntry ||
sqlErr.Number == mysqlErrDupEntryWithKeyName
}
c := &conn{db, &flavorMySQL, logger, errCheck}
if _, err := c.migrate(); err != nil {
return nil, fmt.Errorf("failed to perform migrations: %v", err)
}
return c, nil
}
func (s *MySQL) makeTLSConfig() error {
cfg := &tls.Config{}
if s.SSL.CAFile != "" {
rootCertPool := x509.NewCertPool()
pem, err := ioutil.ReadFile(s.SSL.CAFile)
if err != nil {
return err
}
if ok := rootCertPool.AppendCertsFromPEM(pem); !ok {
return fmt.Errorf("failed to append PEM")
}
cfg.RootCAs = rootCertPool
}
if s.SSL.CertFile != "" && s.SSL.KeyFile != "" {
clientCert := make([]tls.Certificate, 0, 1)
certs, err := tls.LoadX509KeyPair(s.SSL.CertFile, s.SSL.KeyFile)
if err != nil {
return err
}
clientCert = append(clientCert, certs)
cfg.Certificates = clientCert
}
mysql.RegisterTLSConfig(mysqlSSLCustom, cfg)
return nil
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html xmlns:th="http://www.thymeleaf.org"
xmlns:sec="http://www.pollix.at/thymeleaf/shiro">
<head th:replace="admin/common/html/tableHead :: tableHead(~{::title},~{::link},~{::style})">
<title th:text=${title}></title>
<!-- 这儿引用单独的css link -->
<!-- Ladda for Bootstrap 3按钮加载进度插件 -->
<link rel="stylesheet" th:href="@{/static/admin/assets/js/button/ladda/ladda.min.css}">
<!-- bootstrap-table表单样式 -->
<link th:href="@{/static/admin/assets/js/bootstrap/css/bootstrap-table/bootstrap-table.min.css}" rel="stylesheet"/>
<!-- 用户界面自定义css -->
<link th:href="@{/static/admin/user/css/user-add.css}" rel="stylesheet"/>
<style type="text/css"></style>
</head>
<body>
<div class="content-wrap">
<div class="row">
<div class="col-sm-12">
<div class="nest" id="elementClose">
<div class="">
</div>
<div class="body-nest" id="element">
<div class="panel-body">
<form class="form-horizontal m" id="form-edit" th:object="${SysDepartment}">
<input id="id" name="id" type="hidden" th:field="*{id}"/>
<div class="form-group">
<label class="col-sm-3 control-label ">父级:</label>
<div class="col-sm-8">
<input class="form-control" type="hidden" id="parentId" name="parentId" th:value="*{parentId}"/>
<input class="form-control" readonly="true" type="text" th:value="${perSysDepartment.deptName}" id="pidName" onclick="selectMenuTree()" />
</div>
</div>
<div class="form-group">
<label class="col-sm-3 control-label ">部门名称:</label>
<div class="col-sm-8">
<input class="form-control" type="text" id="deptName" name="deptName" th:value="*{deptName}"/>
</div>
</div>
<div class="form-group">
<label class="col-sm-3 control-label ">部门负责人:</label>
<div class="col-sm-8">
<input class="form-control" type="text" id="leader" name="leader" th:value="*{leader}"/>
</div>
</div>
<div class="form-group">
<label class="col-sm-3 control-label ">电话:</label>
<div class="col-sm-8">
<input class="form-control" type="text" id="phone" name="phone" th:value="*{phone}"/>
</div>
</div>
<div class="form-group">
<label class="col-sm-3 control-label ">邮箱:</label>
<div class="col-sm-8">
<input class="form-control" type="text" id="email" name="email" th:value="*{email}"/>
</div>
</div>
<div class="form-group">
<label class="col-sm-3 control-label ">状态:</label>
<div class="col-sm-8">
<select class="form-control" id="status" name="status" th:field="*{status}">
<option value="1">开启</option>
<option value="0">关闭</option>
</select>
</div>
</div>
<div class="form-group">
<label class="col-sm-3 control-label ">排序:</label>
<div class="col-sm-8">
<input class="form-control" type="number" id="orderNum" name="orderNum" th:value="*{orderNum}"/>
</div>
</div>
<div class="form-group">
<div class="form-control-static col-sm-offset-9">
<button type="submit" class="btn btn-primary">提交</button>
<button onclick="$.modal.close()" class="btn btn-danger" type="button">关闭</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- 通用js -->
<div th:include="admin/common/html/js :: onload_js">
</div>
<!-- bootstarp 表格 -->
<script th:src="@{/static/admin/assets/js/bootstrap/js/bootstrap-table/bootstrap-table.min.js}" type="text/javascript"></script>
<script th:src="@{/static/admin/assets/js/bootstrap/js/bootstrap-table/locale/bootstrap-table-zh-CN.min.js}" type="text/javascript"></script>
<script th:src="@{/static/admin/assets/js/bootstrap/js/base_list.js}" type="text/javascript"></script>
<!-- 弹窗 -->
<script th:src="@{/static/admin/assets/js/layer/layer.js}" type="text/javascript"></script>
<!-- 遮罩层 -->
<script th:src="@{/static/admin/assets/js/blockUI/jquery.blockUI.js}" type="text/javascript"></script>
<script type="text/javascript">
$(function(){
$(".MyDate").datetimepicker({
format: "yyyy-mm-dd hh:00:00",
startDate: new Date(),
autoclose: true,
todayBtn: true,
language: 'zh-CN',
initialDate:new Date(),
minView: 1
});
})
$("#form-edit").validate({
submitHandler : function(form) {
edit();
}
});
/*菜单管理-修改-选择菜单树*/
function selectMenuTree() {
var url = rootPath + "/SysDepartmentController/tree";
$.modal.open("选择菜单", url, '380', '380');
}
function edit() {
var dataFormJson = $("#form-edit").serialize();
$.ajax({
cache : true,
type : "POST",
url : rootPath + "/SysDepartmentController/edit",
data : dataFormJson,
async : false,
error : function(request) {
$.modal.alertError("系统错误");
},
success : function(data) {
$.operate.saveSuccess(data);
}
});
}
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
namespace MvcForum.Core.Data.Mapping
{
using System.Data.Entity.ModelConfiguration;
using Models.Entities;
public class TagNotificationMapping : EntityTypeConfiguration<TagNotification>
{
public TagNotificationMapping()
{
HasKey(x => x.Id);
Property(x => x.Id).IsRequired();
HasRequired(x => x.Tag)
.WithMany(x => x.Notifications)
.Map(x => x.MapKey("TopicTag_Id"))
.WillCascadeOnDelete(false);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
==============================================================================
GenericAppCommand.cpp
Created: 15 May 2019 7:37:31pm
Author: bkupe
==============================================================================
*/
#include "GenericAppCommand.h"
GenericAppCommand::GenericAppCommand(ChataigneGenericModule* _module, CommandContext context, var params) :
BaseCommand(_module, context, params)
{
type = (Type)(int)(params.getProperty("type", NEW_SESSION));
if (type == OPEN_SESSION)
{
file = addFileParameter("File", "The file to open. This will replace this session !");
file->fileTypeFilter = "*.noisette";
}
}
GenericAppCommand::~GenericAppCommand()
{
}
void GenericAppCommand::triggerInternal()
{
switch (type)
{
case NEW_SESSION:
{
auto newGraphFunc = std::bind(&Engine::createNewGraph, Engine::mainEngine);
Timer::callAfterDelay(.01f, newGraphFunc); //force timer to avoid clearing everything inside the trigger func which will bubble up after everything has been deleted
}
break;
case OPEN_SESSION:
{
if(file->getFile().exists())
{
auto loadFileFunc = std::bind(&Engine::loadDocument, Engine::mainEngine, file->getFile());
Timer::callAfterDelay(.01f, loadFileFunc); //force timer to avoid clearing everything inside the trigger func which will bubble up after everything has been deleted
}
else
{
NLOGWARNING(niceName, "File doesn't exist : " << file->getFile().getFullPathName());
}
}
break;
case CLOSE_APP:
{
MessageManagerLock mmLock;
OrganicApplication::quit();
}
break;
}
}
BaseCommand* GenericAppCommand::create(ControllableContainer * module, CommandContext context, var params)
{
return new GenericAppCommand((ChataigneGenericModule*)module, context, params);
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.