code
stringlengths 3
1.01M
| repo_name
stringlengths 5
116
| path
stringlengths 3
311
| language
stringclasses 30
values | license
stringclasses 15
values | size
int64 3
1.01M
|
|---|---|---|---|---|---|
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
***************************************************************************/
#include "setup.h"
#include <string.h>
#ifdef HAVE_SYS_SOCKET_H
#include <sys/socket.h>
#endif
#ifdef HAVE_NETINET_IN_H
#include <netinet/in.h>
#endif
#ifdef HAVE_NETDB_H
#include <netdb.h>
#endif
#ifdef HAVE_ARPA_INET_H
#include <arpa/inet.h>
#endif
#ifdef HAVE_STDLIB_H
#include <stdlib.h> /* required for free() prototypes */
#endif
#ifdef HAVE_UNISTD_H
#include <unistd.h> /* for the close() proto */
#endif
#ifdef __VMS
#include <in.h>
#include <inet.h>
#include <stdlib.h>
#endif
#ifdef HAVE_PROCESS_H
#include <process.h>
#endif
#include "urldata.h"
#include "sendf.h"
#include "hostip.h"
#include "hash.h"
#include "share.h"
#include "strerror.h"
#include "url.h"
#include "inet_pton.h"
#include "connect.h"
#define _MPRINTF_REPLACE /* use our functions only */
#include <curl/mprintf.h>
#include "curl_memory.h"
/* The last #include file should be: */
#include "memdebug.h"
/***********************************************************************
* Only for ipv6-enabled builds
**********************************************************************/
#ifdef CURLRES_IPV6
#if defined(CURLDEBUG) && defined(HAVE_GETNAMEINFO)
/* These are strictly for memory tracing and are using the same style as the
* family otherwise present in memdebug.c. I put these ones here since they
* require a bunch of structs I didn't want to include in memdebug.c
*/
/*
* For CURLRES_ARS, this should be written using ares_gethostbyaddr()
* (ignoring the fact c-ares doesn't return 'serv').
*/
int curl_dogetnameinfo(GETNAMEINFO_QUAL_ARG1 GETNAMEINFO_TYPE_ARG1 sa,
GETNAMEINFO_TYPE_ARG2 salen,
char *host, GETNAMEINFO_TYPE_ARG46 hostlen,
char *serv, GETNAMEINFO_TYPE_ARG46 servlen,
GETNAMEINFO_TYPE_ARG7 flags,
int line, const char *source)
{
int res = (getnameinfo)(sa, salen,
host, hostlen,
serv, servlen,
flags);
if(0 == res)
/* success */
curl_memlog("GETNAME %s:%d getnameinfo()\n",
source, line);
else
curl_memlog("GETNAME %s:%d getnameinfo() failed = %d\n",
source, line, res);
return res;
}
#endif /* defined(CURLDEBUG) && defined(HAVE_GETNAMEINFO) */
/*
* Curl_ipv6works() returns TRUE if ipv6 seems to work.
*/
bool Curl_ipv6works(void)
{
/* the nature of most system is that IPv6 status doesn't come and go
during a program's lifetime so we only probe the first time and then we
have the info kept for fast re-use */
static int ipv6_works = -1;
if(-1 == ipv6_works) {
/* probe to see if we have a working IPv6 stack */
curl_socket_t s = socket(PF_INET6, SOCK_DGRAM, 0);
if(s == CURL_SOCKET_BAD)
/* an ipv6 address was requested but we can't get/use one */
ipv6_works = 0;
else {
ipv6_works = 1;
sclose(s);
}
}
return (ipv6_works>0)?TRUE:FALSE;
}
/*
* Curl_ipvalid() checks what CURL_IPRESOLVE_* requirements that might've
* been set and returns TRUE if they are OK.
*/
bool Curl_ipvalid(struct connectdata *conn)
{
if(conn->ip_version == CURL_IPRESOLVE_V6)
return Curl_ipv6works();
return TRUE;
}
#if defined(CURLRES_SYNCH)
#ifdef DEBUG_ADDRINFO
static void dump_addrinfo(struct connectdata *conn, const Curl_addrinfo *ai)
{
printf("dump_addrinfo:\n");
for ( ; ai; ai = ai->ai_next) {
char buf[INET6_ADDRSTRLEN];
printf(" fam %2d, CNAME %s, ",
ai->ai_family, ai->ai_canonname ? ai->ai_canonname : "<none>");
if(Curl_printable_address(ai, buf, sizeof(buf)))
printf("%s\n", buf);
else
printf("failed; %s\n", Curl_strerror(conn, SOCKERRNO));
}
}
#else
#define dump_addrinfo(x,y)
#endif
/*
* Curl_getaddrinfo() when built ipv6-enabled (non-threading and
* non-ares version).
*
* Returns name information about the given hostname and port number. If
* successful, the 'addrinfo' is returned and the forth argument will point to
* memory we need to free after use. That memory *MUST* be freed with
* Curl_freeaddrinfo(), nothing else.
*/
Curl_addrinfo *Curl_getaddrinfo(struct connectdata *conn,
const char *hostname,
int port,
int *waitp)
{
struct addrinfo hints;
Curl_addrinfo *res;
int error;
char sbuf[NI_MAXSERV];
char *sbufptr = NULL;
char addrbuf[128];
int pf;
struct SessionHandle *data = conn->data;
*waitp = 0; /* synchronous response only */
/*
* Check if a limited name resolve has been requested.
*/
switch(conn->ip_version) {
case CURL_IPRESOLVE_V4:
pf = PF_INET;
break;
case CURL_IPRESOLVE_V6:
pf = PF_INET6;
break;
default:
pf = PF_UNSPEC;
break;
}
if((pf != PF_INET) && !Curl_ipv6works())
/* the stack seems to be a non-ipv6 one */
pf = PF_INET;
memset(&hints, 0, sizeof(hints));
hints.ai_family = pf;
hints.ai_socktype = conn->socktype;
if((1 == Curl_inet_pton(AF_INET, hostname, addrbuf)) ||
(1 == Curl_inet_pton(AF_INET6, hostname, addrbuf))) {
/* the given address is numerical only, prevent a reverse lookup */
hints.ai_flags = AI_NUMERICHOST;
}
if(port) {
snprintf(sbuf, sizeof(sbuf), "%d", port);
sbufptr=sbuf;
}
error = Curl_getaddrinfo_ex(hostname, sbufptr, &hints, &res);
if(error) {
infof(data, "getaddrinfo(3) failed for %s:%d\n", hostname, port);
return NULL;
}
dump_addrinfo(conn, res);
return res;
}
#endif /* CURLRES_SYNCH */
#endif /* CURLRES_IPV6 */
|
aduros/amity
|
external/curl/lib/hostip6.c
|
C
|
bsd-2-clause
| 6,643
|
/* Copyright (C) 2018 European Spallation Source, ERIC. See LICENSE file */
//===----------------------------------------------------------------------===//
///
/// \file
///
//===----------------------------------------------------------------------===//
#include <gdgem/nmx/Readout.h>
#include <gdgem/clustering/test_data/OldReadoutFile.h>
int main(int argc, char *argv[]) {
(void) argc;
(void) argv;
if (argc < 2) {
return EXIT_FAILURE;
}
std::string fname(argv[1]);
std::cout << "Old file: " << fname << "\n";
std::vector<OldReadout> old_data;
OldReadoutFile::read(fname, old_data);
std::cout << "Hit count: " << old_data.size() << "\n";
std::vector<Readout> new_data;
new_data.resize(old_data.size());
for (size_t i = 0; i < old_data.size(); ++i) {
auto &rnew = new_data[i];
const auto &rold = old_data[i];
rnew.fec = rold.fec;
rnew.chip_id = rold.chip_id;
rnew.bonus_timestamp = rold.bonus_timestamp;
rnew.srs_timestamp = rold.srs_timestamp;
rnew.channel = rold.channel;
rnew.bcid = rold.bcid;
rnew.tdc = rold.tdc;
rnew.adc = rold.adc;
rnew.over_threshold = rold.over_threshold;
}
auto newfile = ReadoutFile::create(fname + "_conv");
newfile->push(new_data);
return EXIT_SUCCESS;
}
|
ess-dmsc/event-formation-unit
|
src/modules/gdgem/nmx/old_formats/srs_converter.cpp
|
C++
|
bsd-2-clause
| 1,278
|
/* $NetBSD: altq_blue.c,v 1.23 2011/11/19 22:51:18 tls Exp $ */
/* $KAME: altq_blue.c,v 1.15 2005/04/13 03:44:24 suz Exp $ */
/*
* Copyright (C) 1997-2002
* Sony Computer Science Laboratories Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY SONY CSL AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL SONY CSL OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
*/
/*
* Copyright (c) 1990-1994 Regents of the University of California.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by the Computer Systems
* Engineering Group at Lawrence Berkeley Laboratory.
* 4. Neither the name of the University nor of the Laboratory may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <special_includes/sys/cdefs.h>
__KERNEL_RCSID(0, "$NetBSD: altq_blue.c,v 1.23 2011/11/19 22:51:18 tls Exp $");
#ifdef _KERNEL_OPT
#include "opt_altq.h"
#include "opt_inet.h"
#endif
#ifdef ALTQ_BLUE /* blue is enabled by ALTQ_BLUE option in opt_altq.h */
#include <special_includes/sys/param.h>
#include <special_includes/sys/malloc.h>
#include <special_includes/sys/mbuf.h>
#include <special_includes/sys/socket.h>
#include <special_includes/sys/sockio.h>
#include <special_includes/sys/systm.h>
#include <special_includes/sys/proc.h>
#include <special_includes/sys/errno.h>
#include <special_includes/sys/kernel.h>
#include <special_includes/sys/kauth.h>
#include <special_includes/sys/cprng.h>
#include <net/if.h>
#include <net/if_types.h>
#include <netinet/in.h>
#include <netinet/in_systm.h>
#include <netinet/ip.h>
#ifdef INET6
#include <netinet/ip6.h>
#endif
#include <altq/altq.h>
#include <altq/altq_conf.h>
#include <altq/altq_blue.h>
#ifdef ALTQ3_COMPAT
/*
* Blue is proposed and implemented by Wu-chang Feng <wuchang@eecs.umich.edu>.
* more information on Blue is available from
* http://www.eecs.umich.edu/~wuchang/blue/
*/
/* fixed-point uses 12-bit decimal places */
#define FP_SHIFT 12 /* fixed-point shift */
#define BLUE_LIMIT 200 /* default max queue lenght */
#define BLUE_STATS /* collect statistics */
/* blue_list keeps all blue_state_t's allocated. */
static blue_queue_t *blue_list = NULL;
/* internal function prototypes */
static int blue_enqueue(struct ifaltq *, struct mbuf *, struct altq_pktattr *);
static struct mbuf *blue_dequeue(struct ifaltq *, int);
static int drop_early(blue_t *);
static int mark_ecn(struct mbuf *, struct altq_pktattr *, int);
static int blue_detach(blue_queue_t *);
static int blue_request(struct ifaltq *, int, void *);
/*
* blue device interface
*/
altqdev_decl(blue);
int
blueopen(dev_t dev, int flag, int fmt,
struct lwp *l)
{
/* everything will be done when the queueing scheme is attached. */
return 0;
}
int
blueclose(dev_t dev, int flag, int fmt,
struct lwp *l)
{
blue_queue_t *rqp;
int err, error = 0;
while ((rqp = blue_list) != NULL) {
/* destroy all */
err = blue_detach(rqp);
if (err != 0 && error == 0)
error = err;
}
return error;
}
int
blueioctl(dev_t dev, ioctlcmd_t cmd, void *addr, int flag,
struct lwp *l)
{
blue_queue_t *rqp;
struct blue_interface *ifacep;
struct ifnet *ifp;
int error = 0;
/* check super-user privilege */
switch (cmd) {
case BLUE_GETSTATS:
break;
default:
#if (__FreeBSD_version > 400000)
if ((error = suser(p)) != 0)
return (error);
#else
if ((error = kauth_authorize_network(l->l_cred,
KAUTH_NETWORK_ALTQ, KAUTH_REQ_NETWORK_ALTQ_BLUE, NULL,
NULL, NULL)) != 0)
return (error);
#endif
break;
}
switch (cmd) {
case BLUE_ENABLE:
ifacep = (struct blue_interface *)addr;
if ((rqp = altq_lookup(ifacep->blue_ifname, ALTQT_BLUE)) == NULL) {
error = EBADF;
break;
}
error = altq_enable(rqp->rq_ifq);
break;
case BLUE_DISABLE:
ifacep = (struct blue_interface *)addr;
if ((rqp = altq_lookup(ifacep->blue_ifname, ALTQT_BLUE)) == NULL) {
error = EBADF;
break;
}
error = altq_disable(rqp->rq_ifq);
break;
case BLUE_IF_ATTACH:
ifp = ifunit(((struct blue_interface *)addr)->blue_ifname);
if (ifp == NULL) {
error = ENXIO;
break;
}
/* allocate and initialize blue_state_t */
rqp = malloc(sizeof(blue_queue_t), M_DEVBUF, M_WAITOK|M_ZERO);
if (rqp == NULL) {
error = ENOMEM;
break;
}
rqp->rq_q = malloc(sizeof(class_queue_t), M_DEVBUF,
M_WAITOK|M_ZERO);
if (rqp->rq_q == NULL) {
free(rqp, M_DEVBUF);
error = ENOMEM;
break;
}
rqp->rq_blue = malloc(sizeof(blue_t), M_DEVBUF,
M_WAITOK|M_ZERO);
if (rqp->rq_blue == NULL) {
free(rqp->rq_q, M_DEVBUF);
free(rqp, M_DEVBUF);
error = ENOMEM;
break;
}
rqp->rq_ifq = &ifp->if_snd;
qtail(rqp->rq_q) = NULL;
qlen(rqp->rq_q) = 0;
qlimit(rqp->rq_q) = BLUE_LIMIT;
/* default packet time: 1000 bytes / 10Mbps * 8 * 1000000 */
blue_init(rqp->rq_blue, 0, 800, 1000, 50000);
/*
* set BLUE to this ifnet structure.
*/
error = altq_attach(rqp->rq_ifq, ALTQT_BLUE, rqp,
blue_enqueue, blue_dequeue, blue_request,
NULL, NULL);
if (error) {
free(rqp->rq_blue, M_DEVBUF);
free(rqp->rq_q, M_DEVBUF);
free(rqp, M_DEVBUF);
break;
}
/* add this state to the blue list */
rqp->rq_next = blue_list;
blue_list = rqp;
break;
case BLUE_IF_DETACH:
ifacep = (struct blue_interface *)addr;
if ((rqp = altq_lookup(ifacep->blue_ifname, ALTQT_BLUE)) == NULL) {
error = EBADF;
break;
}
error = blue_detach(rqp);
break;
case BLUE_GETSTATS:
do {
struct blue_stats *q_stats;
blue_t *rp;
q_stats = (struct blue_stats *)addr;
if ((rqp = altq_lookup(q_stats->iface.blue_ifname,
ALTQT_BLUE)) == NULL) {
error = EBADF;
break;
}
q_stats->q_len = qlen(rqp->rq_q);
q_stats->q_limit = qlimit(rqp->rq_q);
rp = rqp->rq_blue;
q_stats->q_pmark = rp->blue_pmark;
q_stats->xmit_packets = rp->blue_stats.xmit_packets;
q_stats->xmit_bytes = rp->blue_stats.xmit_bytes;
q_stats->drop_packets = rp->blue_stats.drop_packets;
q_stats->drop_bytes = rp->blue_stats.drop_bytes;
q_stats->drop_forced = rp->blue_stats.drop_forced;
q_stats->drop_unforced = rp->blue_stats.drop_unforced;
q_stats->marked_packets = rp->blue_stats.marked_packets;
} while (/*CONSTCOND*/ 0);
break;
case BLUE_CONFIG:
do {
struct blue_conf *fc;
int limit;
fc = (struct blue_conf *)addr;
if ((rqp = altq_lookup(fc->iface.blue_ifname,
ALTQT_BLUE)) == NULL) {
error = EBADF;
break;
}
limit = fc->blue_limit;
qlimit(rqp->rq_q) = limit;
fc->blue_limit = limit; /* write back the new value */
if (fc->blue_pkttime > 0)
rqp->rq_blue->blue_pkttime = fc->blue_pkttime;
if (fc->blue_max_pmark > 0)
rqp->rq_blue->blue_max_pmark = fc->blue_max_pmark;
if (fc->blue_hold_time > 0)
rqp->rq_blue->blue_hold_time = fc->blue_hold_time;
rqp->rq_blue->blue_flags = fc->blue_flags;
blue_init(rqp->rq_blue, rqp->rq_blue->blue_flags,
rqp->rq_blue->blue_pkttime,
rqp->rq_blue->blue_max_pmark,
rqp->rq_blue->blue_hold_time);
} while (/*CONSTCOND*/ 0);
break;
default:
error = EINVAL;
break;
}
return error;
}
static int
blue_detach(blue_queue_t *rqp)
{
blue_queue_t *tmp;
int error = 0;
if (ALTQ_IS_ENABLED(rqp->rq_ifq))
altq_disable(rqp->rq_ifq);
if ((error = altq_detach(rqp->rq_ifq)))
return (error);
if (blue_list == rqp)
blue_list = rqp->rq_next;
else {
for (tmp = blue_list; tmp != NULL; tmp = tmp->rq_next)
if (tmp->rq_next == rqp) {
tmp->rq_next = rqp->rq_next;
break;
}
if (tmp == NULL)
printf("blue_detach: no state found in blue_list!\n");
}
free(rqp->rq_q, M_DEVBUF);
free(rqp->rq_blue, M_DEVBUF);
free(rqp, M_DEVBUF);
return (error);
}
/*
* blue support routines
*/
int
blue_init(blue_t *rp, int flags, int pkttime, int blue_max_pmark,
int blue_hold_time)
{
int npkts_per_sec;
rp->blue_idle = 1;
rp->blue_flags = flags;
rp->blue_pkttime = pkttime;
rp->blue_max_pmark = blue_max_pmark;
rp->blue_hold_time = blue_hold_time;
if (pkttime == 0)
rp->blue_pkttime = 1;
/* when the link is very slow, adjust blue parameters */
npkts_per_sec = 1000000 / rp->blue_pkttime;
if (npkts_per_sec < 50) {
}
else if (npkts_per_sec < 300) {
}
microtime(&rp->blue_last);
return (0);
}
/*
* enqueue routine:
*
* returns: 0 when successfully queued.
* ENOBUFS when drop occurs.
*/
static int
blue_enqueue(struct ifaltq *ifq, struct mbuf *m, struct altq_pktattr *pktattr)
{
blue_queue_t *rqp = (blue_queue_t *)ifq->altq_disc;
int error = 0;
if (blue_addq(rqp->rq_blue, rqp->rq_q, m, pktattr) == 0)
ifq->ifq_len++;
else
error = ENOBUFS;
return error;
}
#define DTYPE_NODROP 0 /* no drop */
#define DTYPE_FORCED 1 /* a "forced" drop */
#define DTYPE_EARLY 2 /* an "unforced" (early) drop */
int
blue_addq(blue_t *rp, class_queue_t *q, struct mbuf *m,
struct altq_pktattr *pktattr)
{
int droptype;
/*
* if we were idle, this is an enqueue onto an empty queue
* and we should decrement marking probability
*
*/
if (rp->blue_idle) {
struct timeval now;
int t;
rp->blue_idle = 0;
microtime(&now);
t = (now.tv_sec - rp->blue_last.tv_sec);
if ( t > 1) {
rp->blue_pmark = 1;
microtime(&rp->blue_last);
} else {
t = t * 1000000 + (now.tv_usec - rp->blue_last.tv_usec);
if (t > rp->blue_hold_time) {
rp->blue_pmark--;
if (rp->blue_pmark < 0) rp->blue_pmark = 0;
microtime(&rp->blue_last);
}
}
}
/* see if we drop early */
droptype = DTYPE_NODROP;
if (drop_early(rp) && qlen(q) > 1) {
/* mark or drop by blue */
if ((rp->blue_flags & BLUEF_ECN) &&
mark_ecn(m, pktattr, rp->blue_flags)) {
/* successfully marked. do not drop. */
#ifdef BLUE_STATS
rp->blue_stats.marked_packets++;
#endif
} else {
/* unforced drop by blue */
droptype = DTYPE_EARLY;
}
}
/*
* if the queue length hits the hard limit, it's a forced drop.
*/
if (droptype == DTYPE_NODROP && qlen(q) >= qlimit(q))
droptype = DTYPE_FORCED;
/* if successful or forced drop, enqueue this packet. */
if (droptype != DTYPE_EARLY)
_addq(q, m);
if (droptype != DTYPE_NODROP) {
if (droptype == DTYPE_EARLY) {
/* drop the incoming packet */
#ifdef BLUE_STATS
rp->blue_stats.drop_unforced++;
#endif
} else {
struct timeval now;
int t;
/* forced drop, select a victim packet in the queue. */
m = _getq_random(q);
microtime(&now);
t = (now.tv_sec - rp->blue_last.tv_sec);
t = t * 1000000 + (now.tv_usec - rp->blue_last.tv_usec);
if (t > rp->blue_hold_time) {
rp->blue_pmark += rp->blue_max_pmark >> 3;
if (rp->blue_pmark > rp->blue_max_pmark)
rp->blue_pmark = rp->blue_max_pmark;
microtime(&rp->blue_last);
}
#ifdef BLUE_STATS
rp->blue_stats.drop_forced++;
#endif
}
#ifdef BLUE_STATS
rp->blue_stats.drop_packets++;
rp->blue_stats.drop_bytes += m->m_pkthdr.len;
#endif
m_freem(m);
return (-1);
}
/* successfully queued */
return (0);
}
/*
* early-drop probability is kept in blue_pmark
*
*/
static int
drop_early(blue_t *rp)
{
if ((cprng_fast32() % rp->blue_max_pmark) < rp->blue_pmark) {
/* drop or mark */
return (1);
}
/* no drop/mark */
return (0);
}
/*
* try to mark CE bit to the packet.
* returns 1 if successfully marked, 0 otherwise.
*/
static int
mark_ecn(struct mbuf *m, struct altq_pktattr *pktattr, int flags)
{
struct mbuf *m0;
if (pktattr == NULL ||
(pktattr->pattr_af != AF_INET && pktattr->pattr_af != AF_INET6))
return (0);
/* verify that pattr_hdr is within the mbuf data */
for (m0 = m; m0 != NULL; m0 = m0->m_next)
if (((char *)pktattr->pattr_hdr >= m0->m_data) &&
((char *)pktattr->pattr_hdr < m0->m_data + m0->m_len))
break;
if (m0 == NULL) {
/* ick, pattr_hdr is stale */
pktattr->pattr_af = AF_UNSPEC;
return (0);
}
switch (pktattr->pattr_af) {
case AF_INET:
if (flags & BLUEF_ECN4) {
struct ip *ip = (struct ip *)pktattr->pattr_hdr;
u_int8_t otos;
int sum;
if (ip->ip_v != 4)
return (0); /* version mismatch! */
if ((ip->ip_tos & IPTOS_ECN_MASK) == IPTOS_ECN_NOTECT)
return (0); /* not-ECT */
if ((ip->ip_tos & IPTOS_ECN_MASK) == IPTOS_ECN_CE)
return (1); /* already marked */
/*
* ecn-capable but not marked,
* mark CE and update checksum
*/
otos = ip->ip_tos;
ip->ip_tos |= IPTOS_ECN_CE;
/*
* update checksum (from RFC1624)
* HC' = ~(~HC + ~m + m')
*/
sum = ~ntohs(ip->ip_sum) & 0xffff;
sum += (~otos & 0xffff) + ip->ip_tos;
sum = (sum >> 16) + (sum & 0xffff);
sum += (sum >> 16); /* add carry */
ip->ip_sum = htons(~sum & 0xffff);
return (1);
}
break;
#ifdef INET6
case AF_INET6:
if (flags & BLUEF_ECN6) {
struct ip6_hdr *ip6 = (struct ip6_hdr *)pktattr->pattr_hdr;
u_int32_t flowlabel;
flowlabel = ntohl(ip6->ip6_flow);
if ((flowlabel >> 28) != 6)
return (0); /* version mismatch! */
if ((flowlabel & (IPTOS_ECN_MASK << 20)) ==
(IPTOS_ECN_NOTECT << 20))
return (0); /* not-ECT */
if ((flowlabel & (IPTOS_ECN_MASK << 20)) ==
(IPTOS_ECN_CE << 20))
return (1); /* already marked */
/*
* ecn-capable but not marked, mark CE
*/
flowlabel |= (IPTOS_ECN_CE << 20);
ip6->ip6_flow = htonl(flowlabel);
return (1);
}
break;
#endif /* INET6 */
}
/* not marked */
return (0);
}
/*
* dequeue routine:
* must be called in splnet.
*
* returns: mbuf dequeued.
* NULL when no packet is available in the queue.
*/
static struct mbuf *
blue_dequeue(struct ifaltq * ifq, int op)
{
blue_queue_t *rqp = (blue_queue_t *)ifq->altq_disc;
struct mbuf *m = NULL;
if (op == ALTDQ_POLL)
return (qhead(rqp->rq_q));
m = blue_getq(rqp->rq_blue, rqp->rq_q);
if (m != NULL)
ifq->ifq_len--;
return m;
}
struct mbuf *
blue_getq(blue_t *rp, class_queue_t *q)
{
struct mbuf *m;
if ((m = _getq(q)) == NULL) {
if (rp->blue_idle == 0) {
rp->blue_idle = 1;
microtime(&rp->blue_last);
}
return NULL;
}
rp->blue_idle = 0;
#ifdef BLUE_STATS
rp->blue_stats.xmit_packets++;
rp->blue_stats.xmit_bytes += m->m_pkthdr.len;
#endif
return (m);
}
static int
blue_request(struct ifaltq *ifq, int req, void *arg)
{
blue_queue_t *rqp = (blue_queue_t *)ifq->altq_disc;
switch (req) {
case ALTRQ_PURGE:
_flushq(rqp->rq_q);
if (ALTQ_IS_ENABLED(ifq))
ifq->ifq_len = 0;
break;
}
return (0);
}
#ifdef KLD_MODULE
static struct altqsw blue_sw =
{"blue", blueopen, blueclose, blueioctl};
ALTQ_MODULE(altq_blue, ALTQT_BLUE, &blue_sw);
#endif /* KLD_MODULE */
#endif /* ALTQ3_COMPAT */
#endif /* ALTQ_BLUE */
|
vadimsu/netbsd_dpdk_port
|
netbsd/altq/altq_blue.c
|
C
|
bsd-2-clause
| 17,201
|
class Mapserver < Formula
desc "Publish spatial data and interactive mapping apps to the web"
homepage "http://mapserver.org/"
url "https://download.osgeo.org/mapserver/mapserver-7.0.7.tar.gz"
sha256 "37a8c3008328bae0fea05109d6d544a3284f756a23956e8a2f5ec10a6b5fef67"
revision 2
bottle do
cellar :any
sha256 "861890728eb6d3fb016b9b820f301da7f79136f454820ef8d3eb1387711f8df3" => :high_sierra
sha256 "00e9069aba52817cbd8e27ee77113bdfd4f4ac3f0fc6fb89f16b8ae451e53fda" => :sierra
sha256 "e1c69d346d18201436a8afdbc40e7ef0b0e6ed80ec8ac47eba6ac5e04f65af8e" => :el_capitan
end
option "with-fastcgi", "Build with fastcgi support"
option "with-geos", "Build support for GEOS spatial operations"
option "with-php", "Build PHP MapScript module"
option "with-postgresql", "Build support for PostgreSQL as a data source"
depends_on "pkg-config" => :build
depends_on "cmake" => :build
depends_on "swig" => :build
depends_on "freetype"
depends_on "libpng"
depends_on "giflib"
depends_on "gd"
depends_on "proj"
depends_on "gdal"
depends_on "geos" => :optional
depends_on "postgresql" => :optional unless MacOS.version >= :lion
depends_on "cairo" => :optional
depends_on "fcgi" if build.with? "fastcgi"
def install
# Harfbuzz support requires fribidi and fribidi support requires
# harfbuzz but fribidi currently fails to build with:
# fribidi-common.h:61:12: fatal error: 'glib.h' file not found
args = std_cmake_args + %w[
-DWITH_KML=ON
-DWITH_CURL=ON
-DWITH_CLIENT_WMS=ON
-DWITH_CLIENT_WFS=ON
-DWITH_SOS=ON
-DWITH_PROJ=ON
-DWITH_GDAL=ON
-DWITH_OGR=ON
-DWITH_WFS=ON
-DWITH_FRIBIDI=OFF
-DWITH_HARFBUZZ=OFF
-DPYTHON_EXECUTABLE:FILEPATH=/usr/bin/python
]
# Install within our sandbox.
inreplace "mapscript/php/CMakeLists.txt", "${PHP5_EXTENSION_DIR}", lib/"php/extensions"
args << "-DWITH_PHP=ON" if build.with? "php"
# Install within our sandbox.
inreplace "mapscript/python/CMakeLists.txt" do |s|
s.gsub! "${PYTHON_SITE_PACKAGES}", lib/"python2.7/site-packages"
s.gsub! "${PYTHON_LIBRARIES}", "-Wl,-undefined,dynamic_lookup"
end
args << "-DWITH_PYTHON=ON"
# Using rpath on python module seems to cause problems if you attempt to
# import it with an interpreter it wasn't built against.
# 2): Library not loaded: @rpath/libmapserver.1.dylib
args << "-DCMAKE_SKIP_RPATH=ON"
# All of the below are on by default so need
# explicitly disabling if not requested.
if build.with? "geos"
args << "-DWITH_GEOS=ON"
else
args << "-DWITH_GEOS=OFF"
end
if build.with? "cairo"
args << "-WITH_CAIRO=ON"
else
args << "-DWITH_CAIRO=OFF"
end
if build.with? "postgresql"
args << "-DWITH_POSTGIS=ON"
else
args << "-DWITH_POSTGIS=OFF"
end
if build.with? "fastcgi"
args << "-DWITH_FCGI=ON"
else
args << "-DWITH_FCGI=OFF"
end
mkdir "build" do
system "cmake", "..", *args
system "make", "install"
end
end
def caveats; <<~EOS
The Mapserver CGI executable is #{opt_bin}/mapserv
If you built the PHP option:
* Add the following line to php.ini:
extension="#{opt_lib}/php/extensions/php_mapscript.so"
* Execute "php -m"
* You should see MapScript in the module list
EOS
end
test do
assert_match version.to_s, shell_output("#{bin}/mapserv -v")
system "python2.7", "-c", "import mapscript"
end
end
|
battlemidget/homebrew-core
|
Formula/mapserver.rb
|
Ruby
|
bsd-2-clause
| 3,556
|
// Original: src/os/path.go
//
// Copyright 2009 The Go Authors. All rights reserved.
// Portions Copyright 2016 Hiroshi Ioka. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package smb2
import (
"io"
"os"
"syscall"
)
// MkdirAll mimics os.MkdirAll
func (fs *Share) MkdirAll(path string, perm os.FileMode) error {
path = normPath(path)
// Fast path: if we can tell whether path is a directory or file, stop with success or error.
dir, err := fs.Stat(path)
if err == nil {
if dir.IsDir() {
return nil
}
return &os.PathError{Op: "mkdir", Path: path, Err: syscall.ENOTDIR}
}
// Slow path: make sure parent exists and then call Mkdir for path.
i := len(path)
for i > 0 && IsPathSeparator(path[i-1]) { // Skip trailing path separator.
i--
}
j := i
for j > 0 && !IsPathSeparator(path[j-1]) { // Scan backward over element.
j--
}
if j > 1 {
// Create parent
err = fs.MkdirAll(path[0:j-1], perm)
if err != nil {
return err
}
}
// Parent now exists; invoke Mkdir and use its result.
err = fs.Mkdir(path, perm)
if err != nil {
// Handle arguments like "foo/." by
// double-checking that directory doesn't exist.
dir, err1 := fs.Lstat(path)
if err1 == nil && dir.IsDir() {
return nil
}
return err
}
return nil
}
// RemoveAll removes path and any children it contains.
// It removes everything it can but returns the first error
// it encounters. If the path does not exist, RemoveAll
// returns nil (no error).
func (fs *Share) RemoveAll(path string) error {
path = normPath(path)
// Simple case: if Remove works, we're done.
err := fs.Remove(path)
if err == nil || os.IsNotExist(err) {
return nil
}
// Otherwise, is this a directory we need to recurse into?
dir, serr := fs.Lstat(path)
if serr != nil {
if serr, ok := serr.(*os.PathError); ok && (os.IsNotExist(serr.Err) || serr.Err == syscall.ENOTDIR) {
return nil
}
return serr
}
if !dir.IsDir() {
// Not a directory; return the error from Remove.
return err
}
// Directory.
fd, err := fs.Open(path)
if err != nil {
if os.IsNotExist(err) {
// Race. It was deleted between the Lstat and Open.
// Return nil per RemoveAll's docs.
return nil
}
return err
}
// Remove contents & return first error.
err = nil
for {
names, err1 := fd.Readdirnames(100)
for _, name := range names {
err1 := fs.RemoveAll(path + string(PathSeparator) + name)
if err == nil {
err = err1
}
}
if err1 == io.EOF {
break
}
// If Readdirnames returned an error, use it.
if err == nil {
err = err1
}
if len(names) == 0 {
break
}
}
// Close directory, because windows won't remove opened directory.
fd.Close()
// Remove directory.
err1 := fs.Remove(path)
if err1 == nil || os.IsNotExist(err1) {
return nil
}
if err == nil {
err = err1
}
return err
}
|
hirochachacha/go-smb2
|
all.go
|
GO
|
bsd-2-clause
| 4,311
|
class AddOutputCountAndSensorCountToDevices < ActiveRecord::Migration
def change
add_column :devices, :output_count, :integer
add_column :devices, :sensor_count, :integer
end
end
|
brewbit/brewbit-dashboard
|
db/migrate/20140417050045_add_output_count_and_sensor_count_to_devices.rb
|
Ruby
|
bsd-2-clause
| 191
|
/* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Windows.Forms;
using XenAdmin.Controls;
using XenAdmin.Controls.Ballooning;
using XenAdmin.Core;
using XenAPI;
namespace XenAdmin.TabPages
{
public partial class BallooningPage : BaseTabPage
{
private const int ROW_GAP = 10;
public BallooningPage()
{
InitializeComponent();
// http://alt.pluralsight.com/wiki/default.aspx/Craig.FlickerFreeControlDrawing
SetStyle(ControlStyles.DoubleBuffer |
ControlStyles.AllPaintingInWmPaint |
ControlStyles.Opaque |
ControlStyles.UserPaint, true);
Text = Messages.DYNAMIC_MEMORY_CONTROL;
Host_CollectionChangedWithInvoke = Program.ProgramInvokeHandler(Host_CollectionChanged);
VM_CollectionChangedWithInvoke = Program.ProgramInvokeHandler(VM_CollectionChanged);
}
IXenObject xenObject;
List<Host> hosts = new List<Host>();
List<VM> vms = new List<VM>();
private readonly CollectionChangeEventHandler Host_CollectionChangedWithInvoke;
//solution from: http://stackoverflow.com/questions/2612487/how-to-fix-the-flickering-in-user-controls
protected override CreateParams CreateParams
{
get
{
var cp = base.CreateParams;
cp.ExStyle |= 0x02000000; // Turn on WS_EX_COMPOSITED
return cp;
}
}
public IXenObject XenObject
{
set
{
System.Diagnostics.Trace.Assert(value is Pool || value is Host || value is VM);
xenObject = value;
UnregisterHandlers();
vms.Clear();
hosts.Clear();
if (value is VM)
vms.Add((VM)value);
else if (value is Host)
{
Host host = value as Host;
hosts.Add(host);
foreach (VM vm in host.Connection.Cache.VMs)
{
if (VMWanted(vm, host))
vms.Add(vm);
}
}
else // value is XenObject<Pool>
{
value.Connection.Cache.RegisterCollectionChanged<Host>(Host_CollectionChangedWithInvoke);
hosts.AddRange(value.Connection.Cache.Hosts);
}
value.Connection.XenObjectsUpdated += XenObjectsUpdated;
foreach (Host host in hosts)
RegisterHostHandlers(host);
foreach (VM vm in vms)
RegisterVMHandlers(vm);
Program.BeginInvoke(Program.MainWindow, () =>
{
Rebuild();
});
}
}
public override string HelpID => "TabPageBallooning";
private bool VMWanted(VM vm, Host host)
{
return vm.is_a_real_vm() && vm.Show(Properties.Settings.Default.ShowHiddenVMs) && vm.Home() == host;
}
private readonly CollectionChangeEventHandler VM_CollectionChangedWithInvoke;
private void RegisterHostHandlers(Host host)
{
host.PropertyChanged += host_PropertyChanged;
host.Connection.Cache.RegisterCollectionChanged<VM>(VM_CollectionChangedWithInvoke);
foreach (VM vm in host.Connection.Cache.VMs)
RegisterAllVMHandlers(vm);
}
private void host_PropertyChanged(object sender, PropertyChangedEventArgs e)
{
if (e.PropertyName == "name_label" && hosts.Count >= 2)
_rebuild_needed = true; // might change the sort order
}
private void VM_CollectionChanged(object sender, CollectionChangeEventArgs e)
{
if (e.Action == CollectionChangeAction.Remove)
{
VM vm = e.Element as VM;
UnregisterVMHandlers(vm);
}
XenObject = xenObject; // have to recalculate list of VMs etc.
}
private void Host_CollectionChanged(object sender, CollectionChangeEventArgs e)
{
if (e.Action == CollectionChangeAction.Remove)
{
Host host = e.Element as Host;
UnregisterHostHandlers(host);
}
XenObject = xenObject;
}
private void RegisterVMHandlers(VM vm)
{
vm.PropertyChanged -= vm_PropertyChanged;
vm.PropertyChanged += vm_PropertyChanged;
RegisterVMGuestMetrics(vm);
}
private void RegisterAllVMHandlers(VM vm)
{
vm.PropertyChanged -= vm_PropertyChanged_allVMs;
vm.PropertyChanged += vm_PropertyChanged_allVMs;
}
private void RegisterVMGuestMetrics(VM vm)
{
VM_guest_metrics guest_metrics = vm.Connection.Resolve(vm.guest_metrics);
if (guest_metrics != null)
{
guest_metrics.PropertyChanged -= guest_metrics_PropertyChanged;
guest_metrics.PropertyChanged += guest_metrics_PropertyChanged;
}
}
private void UnregisterHandlers()
{
foreach (Host host in hosts)
UnregisterHostHandlers(host);
if (xenObject != null)
{
Pool pool = xenObject as Pool;
if (pool != null)
pool.Connection.Cache.DeregisterCollectionChanged<Host>(Host_CollectionChangedWithInvoke);
xenObject.Connection.XenObjectsUpdated -= XenObjectsUpdated;
foreach (VM vm in xenObject.Connection.Cache.VMs)
UnregisterVMHandlers(vm);
}
foreach (VM vm in vms)
UnregisterVMHandlers(vm); // Should duplicate above line, but let's be safe
foreach (Control c in pageContainerPanel.Controls)
{
VMMemoryRow vmRow = c as VMMemoryRow;
if (vmRow != null)
{
vmRow.UnregisterHandlers();
continue;
}
HostMemoryRow hostRow = c as HostMemoryRow;
if (hostRow != null)
{
hostRow.UnregisterHandlers();
}
}
}
private void UnregisterHostHandlers(Host host)
{
host.Connection.Cache.DeregisterCollectionChanged<VM>(VM_CollectionChangedWithInvoke);
}
private void UnregisterVMHandlers(VM vm)
{
vm.PropertyChanged -= vm_PropertyChanged;
vm.PropertyChanged -= vm_PropertyChanged_allVMs;
VM_guest_metrics guest_metrics = vm.Connection.Resolve(vm.guest_metrics);
if (guest_metrics != null)
guest_metrics.PropertyChanged -= guest_metrics_PropertyChanged;
}
public override void PageHidden()
{
UnregisterHandlers();
}
private void vm_PropertyChanged(object sender, PropertyChangedEventArgs e)
{
if (e.PropertyName == "guest_metrics")
RegisterVMGuestMetrics((VM)sender);
if (e.PropertyName == "memory_static_min" || e.PropertyName == "memory_static_max" ||
e.PropertyName == "memory_dynamic_min" || e.PropertyName == "memory_dynamic_max" ||
e.PropertyName == "metrics" || e.PropertyName == "guest_metrics")
{
// We could just redraw the row for this VM (and for the host), provided it doesn't share a row
// with any other VMs before or after. But doesn't seem necessary to figure all that out.
_rebuild_needed = true;
}
}
private void vm_PropertyChanged_allVMs(object sender, PropertyChangedEventArgs e)
{
// Only observe real VMs (but templates come through here too because
// they change into real VMs during VM creation).
if (!((VM)sender).is_a_real_vm())
return;
// These are used by MainWindow.VMHome() to determine which host the VM belongs to
if (e.PropertyName == "power_state" || e.PropertyName == "VBDs" || e.PropertyName == "affinity" ||
// These can change whether the VM is shown
e.PropertyName == "name_label" || e.PropertyName == "other_config"|| e.PropertyName=="resident_on")
{
if (xenObject is Pool)
{
XenObject = xenObject; // have to recalculate list of VMs etc.
return;
}
if (xenObject is Host)
{
bool vmWanted = VMWanted((VM)sender, (Host)xenObject);
if (vmWanted != vms.Contains((VM)sender))
{
XenObject = xenObject; // have to recalculate list of VMs etc.
return;
}
}
// We also have to redisplay if the power_state or name_label changes (can change the sort order)
if ((e.PropertyName == "power_state" || e.PropertyName == "name_label") && vms.Contains((VM)sender))
_rebuild_needed = true;
}
}
private void guest_metrics_PropertyChanged(object sender, PropertyChangedEventArgs e)
{
if (e.PropertyName == "other")
_rebuild_needed = true;
}
// Whether a change to the properties of any object means we need to Rebuild.
// This is in order to batch up changes to several objects at once (see CA-35330).
private bool _rebuild_needed = false;
private void XenObjectsUpdated(object sender, EventArgs e)
{
if (_rebuild_needed)
Rebuild();
}
private bool _rebuilding = false;
private void Rebuild()
{
Program.AssertOnEventThread();
_rebuild_needed = false;
if (!this.Visible)
return;
_rebuilding = true;
pageContainerPanel.SuspendLayout();
// Store a list of the current controls. We remove them at the end because it makes less flicker that way.
// While we're going through them, remember which VMs were on expanded rows.
List<Control> oldControls = new List<Control>(pageContainerPanel.Controls.Count);
List<VM> expandedVMs = new List<VM>(vms.Count);
foreach (Control c in pageContainerPanel.Controls)
{
oldControls.Add(c);
VMMemoryRow vmRow = c as VMMemoryRow;
if (vmRow != null && vmRow.Expanded)
expandedVMs.AddRange(vmRow.VMs);
}
// Group VMs with the same settings
Dictionary<MemSettings, List<VM>> settingsToVMs = new Dictionary<MemSettings,List<VM>>(); // all VMs with a particular setting
List<MemSettings> listSettings = new List<MemSettings>(); // also make a list of MemSettings to preserve the order
vms.Sort();
foreach (VM vm in vms)
{
MemSettings settings =
vm.has_ballooning()
? new MemSettings(true, vm.power_state, vm.memory_static_min, vm.memory_static_max,
vm.memory_dynamic_min, vm.memory_dynamic_max)
: new MemSettings(false, vm.power_state, 0, vm.memory_static_max, 0, 0); // don't consider other mem settings if ballooning off
if (!settingsToVMs.ContainsKey(settings)) // we've not seen these settings on another VM
{
settingsToVMs.Add(settings, new List<VM>());
listSettings.Add(settings);
}
settingsToVMs[settings].Add(vm);
}
// Add server rows
int initScroll = pageContainerPanel.VerticalScroll.Value;
int top = pageContainerPanel.Padding.Top - initScroll;
hosts.Sort();
foreach (Host host in hosts)
{
Host_metrics metrics = host.Connection.Resolve(host.metrics);
if (metrics == null || !metrics.live)
continue;
AddRowToPanel(new HostMemoryRow(host), ref top);
}
// Add VM rows.
// Sort the rows first by power state then by usual sort order of first VM (because of vms.Sort() above).
// Easier to traverse listSettings five times, but more complicated sorts could be achieved by listSettings.Sort().
vm_power_state[] power_state_order = {vm_power_state.Running, vm_power_state.Paused, vm_power_state.Suspended, vm_power_state.Halted, vm_power_state.unknown};
foreach (vm_power_state ps in power_state_order)
{
foreach (MemSettings settings in listSettings)
{
if (settings.power_state == ps)
{
List<VM> rowVMs = settingsToVMs[settings];
bool expand = Helpers.ListsIntersect(expandedVMs, rowVMs); // expand header if any of its VMs were expanded before
AddRowToPanel(new VMMemoryRow(rowVMs, expand), ref top);
}
}
}
// Remove old controls
foreach (Control c in oldControls)
{
pageContainerPanel.Controls.Remove(c);
int scroll = initScroll;
if (scroll > pageContainerPanel.VerticalScroll.Maximum)
scroll = pageContainerPanel.VerticalScroll.Maximum;
pageContainerPanel.VerticalScroll.Value = scroll; // Without this, the scroll bar can jump around while the page is being rebuilt
c.Dispose();
}
_rebuilding = false;
pageContainerPanel.ResumeLayout();
ReLayout();
SetupDeprecationBanner();
}
private void ReLayout()
{
Program.AssertOnEventThread();
if (_rebuilding)
return;
int initScroll = pageContainerPanel.VerticalScroll.Value;
int top = pageContainerPanel.Padding.Top - initScroll;
foreach (Control row in pageContainerPanel.Controls)
{
row.Top = top;
top += row.Height + ROW_GAP;
}
}
void row_Resize(object sender, EventArgs e)
{
ReLayout();
}
private void AddRowToPanel(UserControl row, ref int top)
{
row.Top = top;
row.Left = pageContainerPanel.Padding.Left - pageContainerPanel.HorizontalScroll.Value;
SetRowWidth(row);
row.Anchor = AnchorStyles.Top | AnchorStyles.Left;
top += row.Height + ROW_GAP;
row.Resize += row_Resize;
pageContainerPanel.Controls.Add(row);
}
private struct MemSettings
{
public readonly bool has_ballooning;
public readonly vm_power_state power_state;
public readonly long static_min, static_max, dynamic_min, dynamic_max;
public MemSettings(bool has_ballooning, vm_power_state power_state,
long static_min, long static_max, long dynamic_min, long dynamic_max)
{
this.has_ballooning = has_ballooning;
this.power_state = power_state;
this.static_min = static_min;
this.static_max = static_max;
this.dynamic_min = dynamic_min;
this.dynamic_max = dynamic_max;
}
}
private void pageContainerPanel_SizeChanged(object sender, EventArgs e)
{
foreach (Control row in pageContainerPanel.Controls)
SetRowWidth(row);
}
private void SetRowWidth(Control row)
{
row.Width = pageContainerPanel.Width - pageContainerPanel.Padding.Left - 25; // It won't drop below row.MinimumSize.Width though
}
private void SetupDeprecationBanner()
{
Banner.Visible = false;
if (!Helpers.QuebecOrGreater(xenObject.Connection))
return;
if (vms.Any(vm => vm.has_ballooning() && vm.memory_dynamic_min != vm.memory_static_max))
{
Banner.AppliesToVersion = Messages.XENSERVER_8_1;
Banner.BannerType = DeprecationBanner.Type.Deprecation;
Banner.FeatureName = Messages.DMC;
Banner.LinkUri = HiddenFeatures.LinkLabelHidden ? null : new Uri(InvisibleMessages.DEPRECATION_URL);
Banner.Visible = !HiddenFeatures.LinkLabelHidden;
}
}
}
}
|
MihaelaStoica/xenadmin
|
XenAdmin/TabPages/BallooningPage.cs
|
C#
|
bsd-2-clause
| 19,136
|
goog.provide('ol.layer.Group');
goog.require('ol');
goog.require('ol.asserts');
goog.require('ol.Collection');
goog.require('ol.Object');
goog.require('ol.events');
goog.require('ol.events.EventType');
goog.require('ol.extent');
goog.require('ol.layer.Base');
goog.require('ol.obj');
goog.require('ol.source.State');
/**
* @classdesc
* A {@link ol.Collection} of layers that are handled together.
*
* A generic `change` event is triggered when the group/Collection changes.
*
* @constructor
* @extends {ol.layer.Base}
* @param {olx.layer.GroupOptions=} opt_options Layer options.
* @api stable
*/
ol.layer.Group = function(opt_options) {
var options = opt_options || {};
var baseOptions = /** @type {olx.layer.GroupOptions} */
(ol.obj.assign({}, options));
delete baseOptions.layers;
var layers = options.layers;
ol.layer.Base.call(this, baseOptions);
/**
* @private
* @type {Array.<ol.EventsKey>}
*/
this.layersListenerKeys_ = [];
/**
* @private
* @type {Object.<string, Array.<ol.EventsKey>>}
*/
this.listenerKeys_ = {};
ol.events.listen(this,
ol.Object.getChangeEventType(ol.layer.Group.Property.LAYERS),
this.handleLayersChanged_, this);
if (layers) {
if (Array.isArray(layers)) {
layers = new ol.Collection(layers.slice());
} else {
ol.asserts.assert(layers instanceof ol.Collection,
43); // Expected `layers` to be an array or an `ol.Collection`
layers = layers;
}
} else {
layers = new ol.Collection();
}
this.setLayers(layers);
};
ol.inherits(ol.layer.Group, ol.layer.Base);
/**
* @private
*/
ol.layer.Group.prototype.handleLayerChange_ = function() {
if (this.getVisible()) {
this.changed();
}
};
/**
* @param {ol.events.Event} event Event.
* @private
*/
ol.layer.Group.prototype.handleLayersChanged_ = function(event) {
this.layersListenerKeys_.forEach(ol.events.unlistenByKey);
this.layersListenerKeys_.length = 0;
var layers = this.getLayers();
this.layersListenerKeys_.push(
ol.events.listen(layers, ol.Collection.EventType.ADD,
this.handleLayersAdd_, this),
ol.events.listen(layers, ol.Collection.EventType.REMOVE,
this.handleLayersRemove_, this));
for (var id in this.listenerKeys_) {
this.listenerKeys_[id].forEach(ol.events.unlistenByKey);
}
ol.obj.clear(this.listenerKeys_);
var layersArray = layers.getArray();
var i, ii, layer;
for (i = 0, ii = layersArray.length; i < ii; i++) {
layer = layersArray[i];
this.listenerKeys_[ol.getUid(layer).toString()] = [
ol.events.listen(layer, ol.Object.EventType.PROPERTYCHANGE,
this.handleLayerChange_, this),
ol.events.listen(layer, ol.events.EventType.CHANGE,
this.handleLayerChange_, this)
];
}
this.changed();
};
/**
* @param {ol.Collection.Event} collectionEvent Collection event.
* @private
*/
ol.layer.Group.prototype.handleLayersAdd_ = function(collectionEvent) {
var layer = /** @type {ol.layer.Base} */ (collectionEvent.element);
var key = ol.getUid(layer).toString();
ol.DEBUG && console.assert(!(key in this.listenerKeys_),
'listeners already registered');
this.listenerKeys_[key] = [
ol.events.listen(layer, ol.Object.EventType.PROPERTYCHANGE,
this.handleLayerChange_, this),
ol.events.listen(layer, ol.events.EventType.CHANGE,
this.handleLayerChange_, this)
];
this.changed();
};
/**
* @param {ol.Collection.Event} collectionEvent Collection event.
* @private
*/
ol.layer.Group.prototype.handleLayersRemove_ = function(collectionEvent) {
var layer = /** @type {ol.layer.Base} */ (collectionEvent.element);
var key = ol.getUid(layer).toString();
ol.DEBUG && console.assert(key in this.listenerKeys_, 'no listeners to unregister');
this.listenerKeys_[key].forEach(ol.events.unlistenByKey);
delete this.listenerKeys_[key];
this.changed();
};
/**
* Returns the {@link ol.Collection collection} of {@link ol.layer.Layer layers}
* in this group.
* @return {!ol.Collection.<ol.layer.Base>} Collection of
* {@link ol.layer.Base layers} that are part of this group.
* @observable
* @api stable
*/
ol.layer.Group.prototype.getLayers = function() {
return /** @type {!ol.Collection.<ol.layer.Base>} */ (this.get(
ol.layer.Group.Property.LAYERS));
};
/**
* Set the {@link ol.Collection collection} of {@link ol.layer.Layer layers}
* in this group.
* @param {!ol.Collection.<ol.layer.Base>} layers Collection of
* {@link ol.layer.Base layers} that are part of this group.
* @observable
* @api stable
*/
ol.layer.Group.prototype.setLayers = function(layers) {
this.set(ol.layer.Group.Property.LAYERS, layers);
};
/**
* @inheritDoc
*/
ol.layer.Group.prototype.getLayersArray = function(opt_array) {
var array = opt_array !== undefined ? opt_array : [];
this.getLayers().forEach(function(layer) {
layer.getLayersArray(array);
});
return array;
};
/**
* @inheritDoc
*/
ol.layer.Group.prototype.getLayerStatesArray = function(opt_states) {
var states = opt_states !== undefined ? opt_states : [];
var pos = states.length;
this.getLayers().forEach(function(layer) {
layer.getLayerStatesArray(states);
});
var ownLayerState = this.getLayerState();
var i, ii, layerState;
for (i = pos, ii = states.length; i < ii; i++) {
layerState = states[i];
layerState.opacity *= ownLayerState.opacity;
layerState.visible = layerState.visible && ownLayerState.visible;
layerState.maxResolution = Math.min(
layerState.maxResolution, ownLayerState.maxResolution);
layerState.minResolution = Math.max(
layerState.minResolution, ownLayerState.minResolution);
if (ownLayerState.extent !== undefined) {
if (layerState.extent !== undefined) {
layerState.extent = ol.extent.getIntersection(
layerState.extent, ownLayerState.extent);
} else {
layerState.extent = ownLayerState.extent;
}
}
}
return states;
};
/**
* @inheritDoc
*/
ol.layer.Group.prototype.getSourceState = function() {
return ol.source.State.READY;
};
/**
* @enum {string}
*/
ol.layer.Group.Property = {
LAYERS: 'layers'
};
|
wet-boew/openlayers-dist
|
src/ol/layer/group.js
|
JavaScript
|
bsd-2-clause
| 6,217
|
# encoding: utf-8
require "json"
require "cgi"
require "uri"
require "timeout"
require "open3"
require ::File.expand_path('../sms', __FILE__)
module Watcher
extend self
SITES_DIR = ::File.expand_path('../../sites', __FILE__)
TMP_DIR = ::File.expand_path('../../tmp', __FILE__)
LOCKLIFE = 30*60 # 30 минут
TIMEOUT = 30 # 30 секунд
def run
begin
# Файл блокировки
flock = ::File.join(::Watcher::TMP_DIR, ".watcher.lock")
# Если файл блокировки уже есть - завершаем работу.
if locked?(flock)
puts "[LOCK] Lock file found: `#{flock}`."
return
end
# Иначе, создаем файл
create_lock(flock, "watcher")
# Выполняем блок
begin
check_all
ensure
# Все зависмости от того, как выполнился блок, удаляем файл блокировки,
remove_lock(flock)
end
# Если поймана ошибка доступа -- игнорируем её.
rescue ::Errno::EACCES
end
end # run
private
def check_all
# Проверяем сайты по списку
get_sites do |site, phones|
# Проверяем сакйт/севрер на доступность/недоступность
check(site) do |success, type|
# Что бы не слать каждые 10 минут смс, создаем файл блокировки
lock = generate_lock(site)
name = (type == 0 ? "Сайт" : "Сервер")
# Если результат проверки отрицательный (сайт не доступен)
unless success
# Если файл блокировки задан и дата создания валидна (файл не старее указанного периода)
# то, переходим на следующую итерацию цикла
next if locked?(lock)
# Иначе, создаем файл блокировку
create_lock(lock, site)
# Шлем сообщения
send_message(phones, "#{name} #{site} не доступен. #{::Time.now}")
else
# Сайт доступен. Если была блокировка, удаляем её и шлем сообщение о доступности сайта.
send_message(phones, "#{name} #{site} доступен. #{::Time.now}") if remove_lock(lock)
end # if
end # check
end # get_sites
end # check_all
def get_sites
::Dir.foreach(::Watcher::SITES_DIR) { |el|
file = ::File.join(::Watcher::SITES_DIR, el)
# Если не является файлом или имя начинается с точки -- пропускаем!
next if !::File.file?(file) || !(el =~ /\A\./).nil?
# Выбирааем из файла название (названием сайта или сервера, которое мы потом проверим :) )
site = el
phones = []
# Читаем файл и построчно выбираем телефоны
::File.open(file, "r") { |fl|
::IO.readlines(fl).each { |line|
phones << (line || [])
}
}
phones.compact!
phones.uniq!
# Выполняем блок с полученными данными, если указаны теефоны (иначе нет смысла слать оповещения)
yield(site, phones) unless phones.empty?
} # foreach
end # get_sites
# Хитро генерим из адреса сайта название файла
def generate_lock(site_name)
::File.join(::Watcher::TMP_DIR, "#{site_name}.lock")
end # generate_lock
# Создаем файл блокировки, записываем в него название сайта и время записи.
def create_lock(file, site)
::File.open(file, "w") { |f|
f.write("#{site} - #{::Time.now.strftime('%H:%M, %d/%m/%Y')}")
}
end # create_lock
# Рассылаем сообщения str на телефоны phones
def send_message(phones, str)
phones.each do |phone|
begin
::SMS.message(phone, str)
rescue ::SocketError
rescue => e
puts "[SMS message error] #{e.message}"
end
end # each
end # send_message
def locked?(lock)
# Если файла блокировки не существут -- false
return false unless ::File.exists?(lock)
# Если файла блокировки имеется и не устарел -- успех
return true if (::Time.now.to_i - ::File.atime(lock).to_i) < ::Watcher::LOCKLIFE
# Иначе удаляекм файл блокировки -- false
::File.unlink(lock)
false
end # locked?
# Если файл блокировки существует -- удаляем и сообщаем об успехе, иначе false
def remove_lock(lock)
if ::File.exists?(lock)
::File.unlink(lock)
true
else
false
end
end # remove_lock
def check(site, &block)
if ip?(site)
check_ip(site, &block)
else
check_domain(site, &block)
end
end # check
def ip?(address)
!(address =~ /\A((([01]?\d{1,2})|(2([0-4]\d|5[0-5])))\.){3}(([01]?\d{1,2})|(2([0-4]\d|5[0-5])))\Z/).nil?
end # ip?
def check_ip(address)
stdin, stdout, stderr = nil, nil, nil
begin
cmd = "ping -c 1 #{address}"
regexp = /
no\ answer|
host\ unreachable|
could\ not\ find\ host|
request\ timed\ out|
100%\ packet\ loss
/ix
# Проверям доступность сайта
::Timeout.timeout(::Watcher::TIMEOUT) {
stdin, stdout, stderr = ::Open3.popen3(cmd)
stdout.readlines.each { |line|
if regexp.match(line)
yield(false, 1)
break
end
}
}
rescue ::SocketError
rescue => e
puts "#{e.message}\n"
puts "#{e.backtrace.join('\n')}"
# Возникла какая-то ошибка
yield(false, 1)
ensure
# Закрываем все файловые дескрипторы
stdin.close if stdin && !stdin.closed?
stdout.close if stdout && !stdout.closed?
stderr.close if stderr && !stderr.closed?
end
end # check_ip
def check_domain(name)
# Строим url
url = ::URI.extract("http://#{name}").first
# Если ничего не получилось -- завершаем работу
return unless url
# Проверям доступность сайта
begin
::Timeout::timeout(::Watcher::TIMEOUT) {
req = ::Net::HTTP.get_response(URI(url))
yield(req.is_a?(::Net::HTTPSuccess), 0)
}
rescue ::SocketError
rescue => e
puts "#{e.message}\n"
puts "#{e.backtrace.join('\n')}"
# Возникла какая-то ошибка
yield(false, 0)
end
end # check_domain
end # Watcher
|
dancingbytes/sms_watcher
|
lib/watcher.rb
|
Ruby
|
bsd-2-clause
| 7,223
|
# CONTRIBUTING
New contributors are always welcome! This document will guide you through the
process of setting up a development environment.
[LyricWiki](http://api.wikia.com/wiki/LyricWiki_API/REST) is currently the
only supported API.
[LYRICSnMUSIC](http://www.lyricsnmusic.com/api) used to be an option but it
appears to have been shut down. However, contributions for new API integrations
are especially welcome.
### Instructions
1. Fork the project and clone it on your computer.
1. Make a virtualenv, activate it, and then install the pip dependencies:
$ pip install -r requirements-dev.txt
1. Run tests:
$ make test
pytest tests/
============================= test session starts ==============================
platform darwin -- Python 2.7.10, pytest-3.0.1, py-1.4.31, pluggy-0.3.1
rootdir: /path/to/songtext, inifile:
collected 16 items
tests/test_api_integrations.py ..............
tests/test_local_options.py ..
========================== 16 passed in 6.42 seconds ===========================
1. Follow the [MuseScore Git workflow](http://musescore.org/en/developers-handbook/git-workflow)
as a guide to setting remotes, branching, committing, making pull requests,
etc.
### Writing your own integration
* Add your package to `libsongtext/` and then the package name to
`libsongtext/songtext.py`, in the `AVAILABLE_APIS` variable.
* To query your new API, use the `--api` option:
$ songtext --api my-spiffy-api ...
* To query your new API by default, set the `SONGTEXT_DEFAULT_API`
environment variable and add this line somewhere sensible (e.g. `.bashrc` or
`.bash_profile` or some other shell startup file):
export SONGTEXT_DEFAULT_API=my-spiffy-api
*Params:*
Other than the ones already used in LyricWiki (artist, song title), it is
also possible to make use of other params already defined in click:
* `all_fields` - this is for generic search (e.g. on artist name, song name, and lyrics). Search like so:
$ songtext spice girls stop
* `words` - for searching on lyric text. Search using the `-w` or `--words`
flag:
$ songtext -w 'sleeping is giving in'
* `list` - to return a list of results in order to refine your search (e.g.
if the lyrics returned were for the wrong song, or the requested lyrics
weren't available or some reason). Search using the`-l`/`--show-list` option:
$ songtext -t colors -l
40 track(s) matched your search query.
Displaying the top 10 matches:
...
* `index` - for specifying the song that you want to select when searching
using a list. Search using the `-i`/`--index` option:
$ songtext -t colors -i 7
40 track(s) matched your search query.
Halsey: Colors
--------------
Your little brother never tells you but he loves you so
You said your mother only smiled on her TV show
You're only happy when your sorry head is filled with dope
I hope you make it to the day you're 28 years old
* `limit` - limit the number of matches returned in a list. Search using
the `--limit` option:
$ songtext zayn befour -l --limit 5
13 track(s) matched your search query.
Displaying the top 5 matches:
...
### Tips
* Modify `README.md` and then run `make readme` to convert it to `README.rst`
for PyPI.
* Copy the git hook from `git-hooks/pre-commit` to `.git/hooks/pre-commit`,
which makes sure that you commit *both* `README.md` and `README.rst` whenever
a change is made to `README.md`.
|
ysim/songtext
|
CONTRIBUTING.md
|
Markdown
|
bsd-2-clause
| 3,604
|
!< FURY test of [[qreal]].
module dBm_to_mW_converter
!-----------------------------------------------------------------------------------------------------------------------------------
!< Define the converter (user-supplied) from dBm to mW.
!-----------------------------------------------------------------------------------------------------------------------------------
use fury
!-----------------------------------------------------------------------------------------------------------------------------------
!-----------------------------------------------------------------------------------------------------------------------------------
implicit none
private
public :: dBm_to_mW
!-----------------------------------------------------------------------------------------------------------------------------------
!-----------------------------------------------------------------------------------------------------------------------------------
type, extends(uom_converter) :: dBm_to_mW
!< Converter (user-supplied) from dBm to mW.
contains
procedure, nopass :: convert_float128 !< User-supplied conversion formulas from dBm to mW (and viceversa), float128.
procedure, nopass :: convert_float64 !< User-supplied conversion formulas from dBm to mW (and viceversa), float128.
procedure, nopass :: convert_float32 !< User-supplied conversion formulas from dBm to mW (and viceversa), float128.
procedure, pass(lhs) :: assign_converter !< `converter = converter` assignment.
endtype dBm_to_mW
!-----------------------------------------------------------------------------------------------------------------------------------
contains
pure function convert_float128(magnitude, inverse) result(converted)
!---------------------------------------------------------------------------------------------------------------------------------
!< User-supplied conversion formulas from dBm to mW (and viceversa), float128.
!---------------------------------------------------------------------------------------------------------------------------------
real(R16P), intent(in) :: magnitude !< Magnitude (of the quantity) to be converted.
logical , intent(in), optional :: inverse !< Activate inverse conversion.
real(R16P) :: converted !< Converted magnitude.
logical :: inverse_ !< Activate inverse conversion, local variable.
!---------------------------------------------------------------------------------------------------------------------------------
!---------------------------------------------------------------------------------------------------------------------------------
inverse_ = .false. ; if (present(inverse)) inverse_ = inverse
if (inverse_) then
converted = 10._R16P * log10(magnitude)
else
converted = 10._R16P ** (magnitude / 10._R16P)
endif
!---------------------------------------------------------------------------------------------------------------------------------
endfunction convert_float128
pure function convert_float64(magnitude, inverse) result(converted)
!---------------------------------------------------------------------------------------------------------------------------------
!< User-supplied conversion formulas from dBm to mW (and viceversa), float64.
!---------------------------------------------------------------------------------------------------------------------------------
real(R8P), intent(in) :: magnitude !< Magnitude (of the quantity) to be converted.
logical, intent(in), optional :: inverse !< Activate inverse conversion.
real(R8P) :: converted !< Converted magnitude.
logical :: inverse_ !< Activate inverse conversion, local variable.
!---------------------------------------------------------------------------------------------------------------------------------
!---------------------------------------------------------------------------------------------------------------------------------
inverse_ = .false. ; if (present(inverse)) inverse_ = inverse
if (inverse_) then
converted = 10._R8P * log10(magnitude)
else
converted = 10._R8P ** (magnitude / 10._R8P)
endif
!---------------------------------------------------------------------------------------------------------------------------------
endfunction convert_float64
pure function convert_float32(magnitude, inverse) result(converted)
!---------------------------------------------------------------------------------------------------------------------------------
!< User-supplied conversion formulas from dBm to mW (and viceversa), float32.
!---------------------------------------------------------------------------------------------------------------------------------
real(R4P), intent(in) :: magnitude !< Magnitude (of the quantity) to be converted.
logical, intent(in), optional :: inverse !< Activate inverse conversion.
real(R4P) :: converted !< Converted magnitude.
logical :: inverse_ !< Activate inverse conversion, local variable.
!---------------------------------------------------------------------------------------------------------------------------------
!---------------------------------------------------------------------------------------------------------------------------------
inverse_ = .false. ; if (present(inverse)) inverse_ = inverse
if (inverse_) then
converted = 10._R4P * log10(magnitude)
else
converted = 10._R4P ** (magnitude / 10._R4P)
endif
!---------------------------------------------------------------------------------------------------------------------------------
endfunction convert_float32
pure subroutine assign_converter(lhs, rhs)
!---------------------------------------------------------------------------------------------------------------------------------
!< `converter = converter` assignment.
!---------------------------------------------------------------------------------------------------------------------------------
class(dBm_to_mW), intent(inout) :: lhs !< Left hand side.
class(uom_converter), intent(in) :: rhs !< Right hand side.
!---------------------------------------------------------------------------------------------------------------------------------
!---------------------------------------------------------------------------------------------------------------------------------
select type(rhs)
class is (dBm_to_mW)
lhs = rhs
endselect
!---------------------------------------------------------------------------------------------------------------------------------
endsubroutine assign_converter
endmodule dBm_to_mW_converter
program fury_test_qreal_conversions_complex
!-----------------------------------------------------------------------------------------------------------------------------------
!< FURY test of [[qreal]].
!-----------------------------------------------------------------------------------------------------------------------------------
use dBm_to_mW_converter
use fury
!-----------------------------------------------------------------------------------------------------------------------------------
!-----------------------------------------------------------------------------------------------------------------------------------
type(uom64) :: dBm !< dBm unit.
type(uom64) :: mW !< mW unit.
type(uom64) :: kelvin !< Kelvin unit.
type(uom64) :: celsius !< Celsius unit.
type(qreal64) :: q1 !< A quantity.
type(qreal64) :: q2 !< A quantity.
type(qreal64) :: q3 !< A quantity.
type(dBm_to_mW) :: dBm2mW !< Converter from dBm to mW.
logical :: test_passed(4) !< List of passed tests.
!-----------------------------------------------------------------------------------------------------------------------------------
!-----------------------------------------------------------------------------------------------------------------------------------
test_passed = .false.
dBm = uom64('dBm = @user mW')
mW = uom64('mW')
call dBm%set_alias_conversion(reference_index=1, alias_index=2, convert=dBm2mW)
q1 = 10. * dBm
q2 = q1%to(unit=mW)
test_passed(1) = q2%stringify(format='(F4.1)')=='10.0 mW'
print "(A,L1)", '10.0 dBm = '//q2%stringify(format='(F4.1)')//', is correct? ', test_passed(1)
call q1%unset
call q2%unset
q1 = 10. * mW
q2 = q1%to(unit=dBm)
test_passed(2) = q2%stringify(format='(F4.1)')=='10.0 dBm'
print "(A,L1)", '10.0 mW = '//q2%stringify(format='(F4.1)')//', is correct? ', test_passed(2)
kelvin = uom64('K')
celsius = uom64('degC<=273.15 + K=celsius>')
call q1%unset
call q2%unset
q1 = 2 * kelvin
q2 = 1 * celsius
q3 = q1 - q2%to(kelvin)
test_passed(3) = q3%stringify(format='(F7.2)')=='-272.15 K'
print "(A,L1)", '2 K - 1 celsius = '//q3%stringify(format='(F7.2)')//', is correct? ', test_passed(3)
call q3%unset
q3 = q2 - q1%to(celsius)
test_passed(4) = q3%stringify(format='(F6.2)')=='272.15 degC'
print "(A,L1)", '1 celsius - 2 K = '//q3%stringify(format='(F6.2)')//', is correct? ', test_passed(4)
print "(A,L1)", new_line('a')//'Are all tests passed? ', all(test_passed)
stop
!-----------------------------------------------------------------------------------------------------------------------------------
endprogram fury_test_qreal_conversions_complex
|
szaghi/FURY
|
src/tests/fury_test_qreal_conversions_complex.f90
|
FORTRAN
|
bsd-2-clause
| 9,539
|
/**
* LinkInfoType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.6.2 Built on : Apr 17, 2012 (05:34:40 IST)
*/
package gov.nih.nlm.ncbi.www.soap.eutils.elink;
/**
* LinkInfoType bean class
*/
@SuppressWarnings({"unchecked","unused"})
public class LinkInfoType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = LinkInfoType
Namespace URI = http://www.ncbi.nlm.nih.gov/soap/eutils/elink
Namespace Prefix = ns5
*/
/**
* field for DbTo
*/
protected java.lang.String localDbTo ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getDbTo(){
return localDbTo;
}
/**
* Auto generated setter method
* @param param DbTo
*/
public void setDbTo(java.lang.String param){
this.localDbTo=param;
}
/**
* field for LinkName
*/
protected java.lang.String localLinkName ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getLinkName(){
return localLinkName;
}
/**
* Auto generated setter method
* @param param LinkName
*/
public void setLinkName(java.lang.String param){
this.localLinkName=param;
}
/**
* field for MenuTag
*/
protected java.lang.String localMenuTag ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localMenuTagTracker = false ;
public boolean isMenuTagSpecified(){
return localMenuTagTracker;
}
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getMenuTag(){
return localMenuTag;
}
/**
* Auto generated setter method
* @param param MenuTag
*/
public void setMenuTag(java.lang.String param){
localMenuTagTracker = param != null;
this.localMenuTag=param;
}
/**
* field for HtmlTag
*/
protected java.lang.String localHtmlTag ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localHtmlTagTracker = false ;
public boolean isHtmlTagSpecified(){
return localHtmlTagTracker;
}
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getHtmlTag(){
return localHtmlTag;
}
/**
* Auto generated setter method
* @param param HtmlTag
*/
public void setHtmlTag(java.lang.String param){
localHtmlTagTracker = param != null;
this.localHtmlTag=param;
}
/**
* field for Url
*/
protected gov.nih.nlm.ncbi.www.soap.eutils.elink.UrlType localUrl ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localUrlTracker = false ;
public boolean isUrlSpecified(){
return localUrlTracker;
}
/**
* Auto generated getter method
* @return gov.nih.nlm.ncbi.www.soap.eutils.elink.UrlType
*/
public gov.nih.nlm.ncbi.www.soap.eutils.elink.UrlType getUrl(){
return localUrl;
}
/**
* Auto generated setter method
* @param param Url
*/
public void setUrl(gov.nih.nlm.ncbi.www.soap.eutils.elink.UrlType param){
localUrlTracker = param != null;
this.localUrl=param;
}
/**
* field for Priority
*/
protected java.lang.String localPriority ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getPriority(){
return localPriority;
}
/**
* Auto generated setter method
* @param param Priority
*/
public void setPriority(java.lang.String param){
this.localPriority=param;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName);
return factory.createOMElement(dataSource,parentQName);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
writeStartElement(prefix, namespace, parentQName.getLocalPart(), xmlWriter);
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://www.ncbi.nlm.nih.gov/soap/eutils/elink");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":LinkInfoType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"LinkInfoType",
xmlWriter);
}
}
namespace = "http://www.ncbi.nlm.nih.gov/soap/eutils/elink";
writeStartElement(null, namespace, "DbTo", xmlWriter);
if (localDbTo==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("DbTo cannot be null!!");
}else{
xmlWriter.writeCharacters(localDbTo);
}
xmlWriter.writeEndElement();
namespace = "http://www.ncbi.nlm.nih.gov/soap/eutils/elink";
writeStartElement(null, namespace, "LinkName", xmlWriter);
if (localLinkName==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("LinkName cannot be null!!");
}else{
xmlWriter.writeCharacters(localLinkName);
}
xmlWriter.writeEndElement();
if (localMenuTagTracker){
namespace = "http://www.ncbi.nlm.nih.gov/soap/eutils/elink";
writeStartElement(null, namespace, "MenuTag", xmlWriter);
if (localMenuTag==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("MenuTag cannot be null!!");
}else{
xmlWriter.writeCharacters(localMenuTag);
}
xmlWriter.writeEndElement();
} if (localHtmlTagTracker){
namespace = "http://www.ncbi.nlm.nih.gov/soap/eutils/elink";
writeStartElement(null, namespace, "HtmlTag", xmlWriter);
if (localHtmlTag==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("HtmlTag cannot be null!!");
}else{
xmlWriter.writeCharacters(localHtmlTag);
}
xmlWriter.writeEndElement();
} if (localUrlTracker){
if (localUrl==null){
throw new org.apache.axis2.databinding.ADBException("Url cannot be null!!");
}
localUrl.serialize(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","Url"),
xmlWriter);
}
namespace = "http://www.ncbi.nlm.nih.gov/soap/eutils/elink";
writeStartElement(null, namespace, "Priority", xmlWriter);
if (localPriority==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("Priority cannot be null!!");
}else{
xmlWriter.writeCharacters(localPriority);
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://www.ncbi.nlm.nih.gov/soap/eutils/elink")){
return "ns5";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* Utility method to write an element start tag.
*/
private void writeStartElement(java.lang.String prefix, java.lang.String namespace, java.lang.String localPart,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, localPart);
} else {
if (namespace.length() == 0) {
prefix = "";
} else if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, localPart, namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName,attValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext();
while (true) {
java.lang.String uri = nsContext.getNamespaceURI(prefix);
if (uri == null || uri.length() == 0) {
break;
}
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink",
"DbTo"));
if (localDbTo != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDbTo));
} else {
throw new org.apache.axis2.databinding.ADBException("DbTo cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink",
"LinkName"));
if (localLinkName != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localLinkName));
} else {
throw new org.apache.axis2.databinding.ADBException("LinkName cannot be null!!");
}
if (localMenuTagTracker){
elementList.add(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink",
"MenuTag"));
if (localMenuTag != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localMenuTag));
} else {
throw new org.apache.axis2.databinding.ADBException("MenuTag cannot be null!!");
}
} if (localHtmlTagTracker){
elementList.add(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink",
"HtmlTag"));
if (localHtmlTag != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localHtmlTag));
} else {
throw new org.apache.axis2.databinding.ADBException("HtmlTag cannot be null!!");
}
} if (localUrlTracker){
elementList.add(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink",
"Url"));
if (localUrl==null){
throw new org.apache.axis2.databinding.ADBException("Url cannot be null!!");
}
elementList.add(localUrl);
}
elementList.add(new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink",
"Priority"));
if (localPriority != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localPriority));
} else {
throw new org.apache.axis2.databinding.ADBException("Priority cannot be null!!");
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static LinkInfoType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
LinkInfoType object =
new LinkInfoType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"LinkInfoType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (LinkInfoType)gov.nih.nlm.ncbi.www.soap.eutils.egquery.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","DbTo").equals(reader.getName())){
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","nil");
if ("true".equals(nillableValue) || "1".equals(nillableValue)){
throw new org.apache.axis2.databinding.ADBException("The element: "+"DbTo" +" cannot be null");
}
java.lang.String content = reader.getElementText();
object.setDbTo(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","LinkName").equals(reader.getName())){
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","nil");
if ("true".equals(nillableValue) || "1".equals(nillableValue)){
throw new org.apache.axis2.databinding.ADBException("The element: "+"LinkName" +" cannot be null");
}
java.lang.String content = reader.getElementText();
object.setLinkName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","MenuTag").equals(reader.getName())){
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","nil");
if ("true".equals(nillableValue) || "1".equals(nillableValue)){
throw new org.apache.axis2.databinding.ADBException("The element: "+"MenuTag" +" cannot be null");
}
java.lang.String content = reader.getElementText();
object.setMenuTag(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","HtmlTag").equals(reader.getName())){
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","nil");
if ("true".equals(nillableValue) || "1".equals(nillableValue)){
throw new org.apache.axis2.databinding.ADBException("The element: "+"HtmlTag" +" cannot be null");
}
java.lang.String content = reader.getElementText();
object.setHtmlTag(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","Url").equals(reader.getName())){
object.setUrl(gov.nih.nlm.ncbi.www.soap.eutils.elink.UrlType.Factory.parse(reader));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/elink","Priority").equals(reader.getName())){
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","nil");
if ("true".equals(nillableValue) || "1".equals(nillableValue)){
throw new org.apache.axis2.databinding.ADBException("The element: "+"Priority" +" cannot be null");
}
java.lang.String content = reader.getElementText();
object.setPriority(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
|
milot-mirdita/GeMuDB
|
Vendor/NCBI eutils/src/gov/nih/nlm/ncbi/www/soap/eutils/elink/LinkInfoType.java
|
Java
|
bsd-2-clause
| 42,843
|
---
title: "Operators"
section: "Expressions"
menu:
toc:
parent: "expressions"
weight: 30
toc: true
---
## Infix Operators
Infix operators take two operands and are written between those operands. Arithmetic and comparison operators are the most common:
```pony
1 + 2
a < b
```
Pony has pretty much the same set of infix operators as other languages.
## Operator aliasing
Most infix operators in Pony are actually aliases for functions. The left operand is the receiver the function is called on and the right operand is passed as an argument. For example, the following expressions are equivalent:
```pony
x + y
x.add(y)
```
This means that `+` is not a special symbol that can only be applied to magic types. Any type can provide its own `add` function and the programmer can then use `+` with that type if they want to.
When defining your own `add` function there is no restriction on the types of the parameter or the return type. The right side of the `+` will have to match the parameter type and the whole `+` expression will have the type that `add` returns.
Here's a full example for defining a type which allows the use of `+`. This is all you need:
```pony
// Define a suitable type
class Pair
var _x: U32 = 0
var _y: U32 = 0
new create(x: U32, y: U32) =>
_x = x
_y = y
// Define a + function
fun add(other: Pair): Pair =>
Pair(_x + other._x, _y + other._y)
// Now let's use it
class Foo
fun foo() =>
var x = Pair(1, 2)
var y = Pair(3, 4)
var z = x + y
```
It is possible to overload infix operators to some degree using union types or f-bounded polymorphism, but this is beyond the scope of this tutorial. See the Pony standard library for further information.
You do not have to worry about any of this if you don't want to. You can simply use the existing infix operators for numbers just like any other language and not provide them for your own types.
The full list of infix operators that are aliases for functions is:
---
Operator | Method | Description | Note
-----------|----------------|---------------------------------|---------------
`+` | add() | Addition |
`-` | sub() | Subtraction |
`*` | mul() | Multiplication |
`/` | div() | Division |
`%` | rem() | Remainder |
`%%` | mod() | Modulo | Starting with version `0.26.1`
`<<` | shl() | Left bit shift |
`>>` | shr() | Right bit shift |
`and` | op_and() | And, both bitwise and logical |
`or` | op_or() | Or, both bitwise and logical |
`xor` | op_xor() | Xor, both bitwise and logical |
`==` | eq() | Equality |
`!=` | ne() | Non-equality |
`<` | lt() | Less than |
`<=` | le() | Less than or equal |
`>=` | ge() | Greater than or equal |
`>` | gt() | Greater than |
`>~` | gt_unsafe() | Unsafe greater than |
`+~` | add_unsafe() | Unsafe Addition |
`-~` | sub_unsafe() | Unsafe Subtraction |
`*~` | mul_unsafe() | Unsafe Multiplication |
`/~` | div_unsafe() | Unsafe Division |
`%~` | rem_unsafe() | Unsafe Remainder |
`%%~` | mod_unsafe() | Unsafe Modulo | Starting with version `0.26.1`
`<<~` | shl_unsafe() | Unsafe left bit shift |
`>>~` | shr_unsafe() | Unsafe right bit shift |
`==~` | eq_unsafe() | Unsafe equality |
`!=~` | ne_unsafe() | Unsafe non-equality |
`<~` | lt_unsafe() | Unsafe less than |
`<=~` | le_unsafe() | Unsafe less than or equal |
`>=~` | ge_unsafe() | Unsafe greater than or equal |
`+?` | add_partial()? | Partial Addition |
`-?` | sub_partial()? | Partial Subtraction |
`*?` | mul_partial()? | Partial Multiplication |
`/?` | div_partial()? | Partial Division |
`%?` | rem_partial()? | Partial Remainder |
`%%?` | mod_partial()? | Partial Modulo | Starting with version `0.26.1`
---
## Short circuiting
The `and` and `or` operators use __short circuiting__ when used with Bool variables. This means that the first operand is always evaluated, but the second is only evaluated if it can affect the result.
For `and`, if the first operand is __false__ then the second operand is not evaluated since it cannot affect the result.
For `or`, if the first operand is __true__ then the second operand is not evaluated since it cannot affect the result.
This is a special feature built into the compiler, it cannot be used with operator aliasing for any other type.
## Unary operators
The unary operators are handled in the same manner, but with only one operand. For example, the following expressions are equivalent:
```pony
-x
x.neg()
```
The full list of unary operators that are aliases for functions is:
---
Operator | Method | Description
---------|--------------|------------
- | neg() | Arithmetic negation
not | op_not() | Not, both bitwise and logical
-~ | neg_unsafe() | Unsafe arithmetic negation
---
## Precedence
In Pony, unary operators always bind stronger than any infix operators: `not a == b` will be interpreted as `(not a) == b` instead of `not (a == b)`.
When using infix operators in complex expressions a key question is the __precedence__, i.e. which operator is evaluated first. Given this expression:
```pony
1 + 2 * 3 // Compilation failed.
```
We will get a value of 9 if we evaluate the addition first and 7 if we evaluate the multiplication first. In mathematics, there are rules about the order in which to evaluate operators and most programming languages follow this approach.
The problem with this is that the programmer has to remember the order and people aren't very good at things like that. Most people will remember to do multiplication before addition, but what about left bit shifting versus bitwise and? Sometimes people misremember (or guess wrong) and that leads to bugs. Worse, those bugs are often very hard to spot.
Pony takes a different approach and outlaws infix precedence. Any expression where more than one infix operator is used __must__ use parentheses to remove the ambiguity. If you fail to do this the compiler will complain.
This means that the example above is illegal in Pony and should be rewritten as:
```pony
1 + (2 * 3) // 7
```
Repeated use of a single operator, however, is fine:
```pony
1 + 2 + 3 // 6
```
Meanwhile, mixing unary and infix operators do not need additional parentheses as unary operators always bind more closely, so if our example above used a negative three:
```pony
1 + 2 * -3 // Compilation failed.
```
We would still need parentheses to remove the ambiguity for our infix operators like we did above, but not for the unary arithmetic negative (`-`):
```pony
1 + (2 * -3) // -5
```
We can see that it makes more sense for the unary operator to be applied before either infix as it only acts on a single number in the expression so it is never ambiguous.
Unary operators can also be applied to parentheses and act on the result of all operations in those parentheses prior to applying any infix operators outside the parentheses:
```pony
1 + -(2 * -3) // 7
```
|
CausalityLtd/pony-tutorial
|
content/expressions/ops.md
|
Markdown
|
bsd-2-clause
| 7,849
|
from django.db import models
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from model_utils import Choices
from model_utils.models import TimeStampedModel
from crate.web.packages.models import Package, Release, ReleaseFile
class Event(TimeStampedModel):
ACTIONS = Choices(
("package_create", _("Package Created")),
("package_delete", _("Package Deleted")),
("release_create", _("Release Created")),
("release_delete", _("Release Deleted")),
("file_add", _("File Added")),
("file_remove", _("File Removed")),
)
package = models.SlugField(max_length=150)
version = models.CharField(max_length=512, blank=True)
action = models.CharField(max_length=25, choices=ACTIONS)
data = JSONField(null=True, blank=True)
@receiver(post_save, sender=Package)
def history_package_create(instance, created, **kwargs):
if created:
Event.objects.create(
package=instance.name,
action=Event.ACTIONS.package_create
)
@receiver(post_delete, sender=Package)
def history_package_delete(instance, **kwargs):
Event.objects.create(
package=instance.name,
action=Event.ACTIONS.package_delete
)
@receiver(post_save, sender=Release)
def history_release_update(instance, created, **kwargs):
if created:
Event.objects.create(
package=instance.package.name,
version=instance.version,
action=Event.ACTIONS.release_create
)
if instance.has_changed("hidden"):
if instance.hidden:
Event.objects.create(
package=instance.package.name,
version=instance.version,
action=Event.ACTIONS.release_delete
)
else:
Event.objects.create(
package=instance.package.name,
version=instance.version,
action=Event.ACTIONS.release_create
)
@receiver(post_save, sender=ReleaseFile)
def history_releasefile_update(instance, created, **kwargs):
e = None
if instance.has_changed("hidden"):
if instance.hidden:
e = Event.objects.create(
package=instance.release.package.name,
version=instance.release.version,
action=Event.ACTIONS.file_remove
)
if e is not None:
try:
e.data = {
"filename": instance.filename,
"digest": instance.digest,
"uri": instance.get_absolute_url(),
}
except ValueError:
pass
else:
e.save()
|
crateio/crate.web
|
crate/web/history/models.py
|
Python
|
bsd-2-clause
| 2,810
|
/**
* Datasets module includes some standard toy datasets. In addition, this module also includes various
* random sample generators that can be used to build artificial datasets of controlled size and complexity.
* @module datasets
* @example <caption>Asynchronous loading</caption>
* var qm = require('qminer');
* var datasets = qm.datasets;
*
* // Create clean base
* var base = new qm.Base({ mode: 'createClean' });
*
* // Load Iris dataset in async way
* datasets.loadIris(base, function (err, store) {
* if (err) throw err;
* // Sucessfully loaded database
* base.close();
* });
*
* @example <caption>Synchronous loading</caption>
* var qm = require('qminer');
* var datasets = qm.datasets;
*
* // Create clean base
* var base = new qm.Base({ mode: 'createClean' });
*
* // Load Iris in sync way.
* var Iris = datasets.loadIrisSync(base);
* // Sucessfully loaded database
* base.close();
*/
/**
* Loads Iris dataset in asynchronous way. Returns link to the created Irsi store.
* @param {module:qm.Base} base
* @returns {module:qm.Store} Store with Iris dataset.
*/
exports.loadIris = function (_base, callback) {
var options = {
file: __dirname + '/data/iris.csv',
store: 'Iris',
base: _base
}
_base.loadCSV(options, function (err) {
if (err) return callback(err);
var Iris = _base.store(options.store);
return callback(null, Iris)
})
}
/**
* Loads Iris dataset in synchronous way. Returns link to the created Iris store.
* @param {module:qm.Base} base
* @returns {module:qm.Store} Store with Iris dataset.
*/
exports.loadIrisSync = function (_base) {
var options = {
file: __dirname + '/data/iris.csv',
store: 'Iris',
base: _base
}
_base.loadCSV(options);
return _base.store(options.store);
}
// Module description
exports.description = function () {
return ("Module includes functions to load and make datasets.");
}
|
blazs/qminer
|
nodedoc/datasetsdoc.js
|
JavaScript
|
bsd-2-clause
| 2,300
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TtTrip.shape'
db.add_column(u'timetable_tttrip', 'shape',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['timetable.TtShape'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TtTrip.shape'
db.delete_column(u'timetable_tttrip', 'shape_id')
models = {
u'timetable.ttshape': {
'Meta': {'object_name': 'TtShape'},
'gtfs_shape_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points': ('django.db.models.fields.TextField', [], {})
},
u'timetable.ttstop': {
'Meta': {'object_name': 'TtStop'},
'gtfs_stop_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stop_lat': ('django.db.models.fields.FloatField', [], {}),
'stop_lon': ('django.db.models.fields.FloatField', [], {}),
'stop_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'stop_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'timetable.ttstoptime': {
'Meta': {'object_name': 'TtStopTime'},
'exp_arrival': ('django.db.models.fields.DateTimeField', [], {}),
'exp_departure': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtStop']"}),
'stop_sequence': ('django.db.models.fields.IntegerField', [], {}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtTrip']"})
},
u'timetable.tttrip': {
'Meta': {'object_name': 'TtTrip'},
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gtfs_trip_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shape': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtShape']", 'null': 'True'})
}
}
complete_apps = ['timetable']
|
hasadna/OpenTrain
|
webserver/opentrain/timetable/migrations/0013_auto__add_field_tttrip_shape.py
|
Python
|
bsd-3-clause
| 2,893
|
<!-- CUMA Initialize Form -->
<div class="modal fade" id="modal_cuma" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h4 class="modal-title" id="myModalLabel">New CU Management Assessment Form</h4>
</div>
<?php print Form::open('', array('class'=>'form-horizontal', 'id' => 'newCumaForm', 'role'=>'form', 'action-url' => URL::site('faculty/cuma/new'), 'ajax-url' => URL::site('extras/ajax/check_date')));?>
<div class="modal-body">
<div class="alert alert-danger" style="display:none">
<p class="text-center" id="invalidMessage"></p>
</div>
<div class="form-group">
<label for="cuma-period" class="col-sm-4 control-label">Period</label>
<div class="col-sm-6" id="cuma-period">
<div class="input-daterange input-group">
<input type="text" class="form-control" name="start">
<span class="input-group-addon">-</span>
<input type="text" class="form-control" name="end">
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
<?php print Form::submit(NULL, 'Generate', array('type'=>'submit', 'class'=>'btn btn-primary')); ?>
</div>
<?php print Form::close();?>
</div>
</div>
</div>
|
jmgalino/up-oams
|
application/views/faculty/cuma/form/initialize.php
|
PHP
|
bsd-3-clause
| 1,634
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-16 00:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0005_queue_name'),
]
operations = [
migrations.AlterField(
model_name='media',
name='media_service',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.MediaService'),
),
]
|
falcaopetri/enqueuer-api
|
api/migrations/0006_auto_20161015_2113.py
|
Python
|
bsd-3-clause
| 543
|
/*
* Copyright (c) 2004-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#include "fboss/agent/SwitchStats.h"
#include "common/stats/ThreadCachedServiceData.h"
#include <folly/String.h>
using facebook::stats::SUM;
namespace facebook { namespace fboss {
const std::string kNameKeySeperator = ".";
const std::string kUp = "up";
const std::string kLinkStateFlap = "link_state.flap";
PortStats::PortStats(PortID portID, std::string portName,
SwitchStats *switchStats)
: portID_(portID),
portName_(portName),
switchStats_(switchStats) {
}
PortStats::~PortStats() {
// clear counter
clearPortStatusCounter();
}
void PortStats::setPortName(const std::string &portName) {
// clear counter
clearPortStatusCounter();
portName_ = portName;
}
void PortStats::trappedPkt() {
switchStats_->trappedPkt();
}
void PortStats::pktDropped() {
switchStats_->pktDropped();
}
void PortStats::pktBogus() {
switchStats_->pktBogus();
}
void PortStats::pktError() {
switchStats_->pktError();
}
void PortStats::pktUnhandled() {
switchStats_->pktUnhandled();
}
void PortStats::pktToHost(uint32_t bytes) {
switchStats_->pktToHost(bytes);
}
void PortStats::arpPkt() {
switchStats_->arpPkt();
}
void PortStats::arpUnsupported() {
switchStats_->arpUnsupported();
}
void PortStats::arpNotMine() {
switchStats_->arpNotMine();
}
void PortStats::arpRequestRx() {
switchStats_->arpRequestRx();
}
void PortStats::arpRequestTx() {
switchStats_->arpRequestTx();
}
void PortStats::arpReplyRx() {
switchStats_->arpReplyRx();
}
void PortStats::arpReplyTx() {
switchStats_->arpReplyTx();
}
void PortStats::arpBadOp() {
switchStats_->arpBadOp();
}
void PortStats::ipv6NdpPkt() {
switchStats_->ipv6NdpPkt();
}
void PortStats::ipv6NdpBad() {
switchStats_->ipv6NdpBad();
}
void PortStats::ipv4Rx() {
switchStats_->ipv4Rx();
}
void PortStats::ipv4TooSmall() {
switchStats_->ipv4TooSmall();
}
void PortStats::ipv4WrongVer() {
switchStats_->ipv4WrongVer();
}
void PortStats::ipv4Nexthop() {
switchStats_->ipv4Nexthop();
}
void PortStats::ipv4Mine() {
switchStats_->ipv4Mine();
}
void PortStats::ipv4NoArp() {
switchStats_->ipv4NoArp();
}
void PortStats::ipv4TtlExceeded() {
switchStats_->ipv4TtlExceeded();
}
void PortStats::ipv6HopExceeded() {
switchStats_->ipv6HopExceeded();
}
void PortStats::udpTooSmall() {
switchStats_->udpTooSmall();
}
void PortStats::dhcpV4Pkt() {
switchStats_->dhcpV4Pkt();
}
void PortStats::dhcpV4BadPkt() {
switchStats_->dhcpV4BadPkt();
}
void PortStats::dhcpV4DropPkt() {
switchStats_->dhcpV4DropPkt();
}
void PortStats::dhcpV6Pkt() {
switchStats_->dhcpV6Pkt();
}
void PortStats::dhcpV6BadPkt() {
switchStats_->dhcpV6BadPkt();
}
void PortStats::dhcpV6DropPkt() {
switchStats_->dhcpV6DropPkt();
}
void PortStats::linkStateChange() {
// We decided not to maintain the TLTimeseries in PortStats and use tcData()
// to addStatValue based on the key name, because:
// 1) each thread has its own SwitchStats and PortStats
// 2) update PortName need to delete old TLTimeseries in the thread which
// can recognize the name changed.
// 3) w/o a global lock like the one in ThreadLocalStats, we might have
// race condition issue when two threads want to delete the same TLTimeseries
// from the same thread.
// Using tcData() can make sure we don't have to maintain the lifecycle of
// TLTimeseries and leave ThreadLocalStats do it for us.
if (!portName_.empty()) {
tcData().addStatValue(getCounterKey(kLinkStateFlap), 1, SUM);
}
switchStats_->linkStateChange();
}
void PortStats::ipv4DstLookupFailure() {
switchStats_->ipv4DstLookupFailure();
}
void PortStats::ipv6DstLookupFailure() {
switchStats_->ipv6DstLookupFailure();
}
void PortStats::setPortStatus(bool isUp) {
if (!portName_.empty()) {
tcData().setCounter(getCounterKey(kUp), isUp);
}
}
void PortStats::clearPortStatusCounter() {
if (!portName_.empty()) {
tcData().clearCounter(getCounterKey(kUp));
}
}
std::string PortStats::getCounterKey(const std::string &key) {
return folly::to<std::string>(portName_, kNameKeySeperator, key);
}
}} // facebook::fboss
|
peterlei/fboss
|
fboss/agent/PortStats.cpp
|
C++
|
bsd-3-clause
| 4,405
|
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Text.RegularExpressions;
namespace ShowRenamer.Extensibility
{
/// <summary>
/// Simple plugin loader that will load visible classes that implement <see cref="IPlugin"/> or a subtype.
/// </summary>
public class PluginLoader
{
/// <summary>
/// Load all plugins in the current <see cref="AppDomain"/>
/// </summary>
/// <returns>A collection of loaded plugins.</returns>
public static ICollection<IPlugin> LoadPlugins()
{
Type pluginParentType = typeof(IPlugin);
IEnumerable<Type> visiblePlugins = AppDomain.CurrentDomain.GetAssemblies().SelectMany(s => s.GetTypes()).Where(p => pluginParentType.IsAssignableFrom(p));
Debug.WriteLine($"{visiblePlugins.Count()} visible plugins.");
ICollection<IPlugin> loadedPlugins = new List<IPlugin>();
foreach(Type pluginType in visiblePlugins)
{
if (pluginType == typeof(IPlugin))
{
Debug.WriteLine("IPlugin init skipped.");
continue;
}
try
{
Debug.WriteLine($"Initialising plugin {pluginType.Name} from {pluginType.Assembly.FullName}");
IPlugin pluginInstance = (IPlugin)Activator.CreateInstance(pluginType);
Debug.WriteLine($"Plugin {pluginType.Name} loaded.");
loadedPlugins.Add(pluginInstance);
}
catch(TargetInvocationException)
{
Console.WriteLine("Failed to instantiate plugin {0} from assembly {1}.", pluginType.Name, pluginType.Assembly.FullName);
}
catch(MissingMethodException)
{
Console.WriteLine("Failed to instantiate plugin {0} from assembly {1}; no public constructor.", pluginType.Name, pluginType.Assembly.FullName);
}
}
Debug.WriteLine($"Loaded {loadedPlugins.Count} plugins.");
return loadedPlugins;
}
/// <summary>
/// Load file name providers from the given collection of plugins
/// </summary>
/// <param name="plugins">A collection of plugins to load providers from</param>
/// <returns>All loadable instances of <see cref="IFileNameProvider"/></returns>
public static IEnumerable<IFileNameProvider> LoadFileNameProviders(ICollection<IPlugin> plugins)
{
Debug.WriteLine($"Loading providers from {plugins.Count} plugins.");
List<IFileNameProvider> allProviders = new List<IFileNameProvider>();
// Regexes
IEnumerable<Regex> allPluginRegexes = plugins.SelectMany(r => r.FileNameRegexes);
Debug.WriteLine($"Found {allPluginRegexes.Count()} regexes.");
foreach (Regex r in allPluginRegexes)
{
allProviders.Add(new SimpleRegexMatcher(r));
}
Debug.WriteLine($"{allProviders.Count} providers after regexes.");
// Other providers
allProviders.Concat(plugins.SelectMany(r => r.FileNameProviders));
Debug.WriteLine($"{allProviders.Count} providers after plugin providers.");
return allProviders;
}
/// <summary>
/// Load file name providers from all loadable plugins.
/// </summary>
/// <returns>All loadable instances of <see cref="IFileNameProvider"/></returns>
public static IEnumerable<IFileNameProvider> LoadFileNameProviders()
{
return LoadFileNameProviders(LoadPlugins());
}
}
}
|
ciarancrocker/ShowRenamer
|
ShowRenamer.Extensibility/PluginLoader.cs
|
C#
|
bsd-3-clause
| 3,819
|
# Short help
def display_summary():
print("{:<13}{}".format( 'rm', "Removes a previously copied SCM Repository" ))
# DOCOPT command line definition
USAGE="""
Removes a previously 'copied' repository
===============================================================================
usage: evie [common-opts] rm [options] <dst> <repo> <origin> <id>
evie [common-opts] rm [options] get-success-msg
evie [common-opts] rm [options] get-error-msg
Arguments:
<dst> PARENT directory for where the package was copied. The
directory is specified as a relative path to the root
of primary repository.
<repo> Name of the repository to remove
<origin> Path/URL to the repository
<id> Label/Tag/Hash/Version of code to be remove
get-success-msg Returns a SCM specific message that informs the end user
of additional action(s) that may be required when
the command is successful
get-error-msg Returns a SCM specific message that informs the end user
of additional action(s) that may be required when
the command fails
Options:
-p PKGNAME Specifies the Package name if different from the <repo>
name
-b BRANCH Specifies the source branch in <repo>. The use/need
of this option in dependent on the <repo> SCM type.
Options:
-h, --help Display help for this command
Notes:
o The command MUST be run in the root of the primary respostiory.
o This command only applied to repositories previously mounted using
the 'copy' command.
"""
|
johnttaylor/Outcast
|
bin/scm/rm.py
|
Python
|
bsd-3-clause
| 1,756
|
<?php
class DiscussionsComment extends DataExtension
{
// Extend default member options
public function canView(Member $member)
{
if (!$member) {
$member = Member::currentUser();
}
// Check if this author is on the user's block list
if ($member->BlockedMembers()->exists()) {
if ($member->BlockedMembers()->find("ID", $this->owner->AuthorID)) {
return false;
}
}
return true;
}
public function onBeforeWrite()
{
if ($this->owner->BaseClass == "Discussion" && $this->owner->ID == 0) {
$discussion = Discussion::get()
->byID($this->owner->ParentID);
$discussion_author = $discussion->Author();
$author = Member::get()
->byID($this->owner->AuthorID);
// Get our default email from address
if (Discussion::config()->send_emails_from) {
$from = Discussion::config()->send_email_from;
} else {
$from = Email::config()->admin_email;
}
// Vars for the emails
$vars = array(
"Title" => $discussion->Title,
"Author" => $author,
"Comment" => $this->owner->Comment,
'Link' => Controller::join_links(
$discussion->Link("view"),
"#comments-holder"
)
);
// Send email to discussion owner
if (
$discussion_author &&
$discussion_author->Email &&
$discussion_author->RecieveCommentEmails &&
($discussion_author->ID != $this->owner->AuthorID)
) {
$subject = _t(
"Discussions.NewCreatedReplySubject",
"{Nickname} replied to your discussion",
null,
array("Nickname" => $author->Nickname)
);
$email = new Email($from, $discussion_author->Email, $subject);
$email->setTemplate('NewCreatedReplyEmail');
$email->populateTemplate($vars);
$email->send();
}
// Send to anyone who liked this, if they want notifications
foreach ($discussion->LikedBy() as $liked) {
if ($liked->RecieveLikedReplyEmails && $liked->Email && ($liked->ID != $author->ID)) {
$subject = _t(
"Discussions.NewLikedReplySubject",
"{Nickname} replied to your liked discussion",
null,
array("Nickname" => $author->Nickname)
);
$email = new Email($from, $liked->Email, $subject);
$email->setTemplate('NewLikedReplyEmail');
$email->populateTemplate($vars);
$email->send();
}
}
}
}
}
|
i-lateral/silverstripe-discussions
|
code/extensions/DiscussionsComment.php
|
PHP
|
bsd-3-clause
| 3,055
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
# I'm running this on Mac. To install the tools:
# brew install dos2unix
# I installed clang-format by downloading binary build from http://llvm.org/releases/download.html
# and doing:
# cp ~/Downloads/clang+llvm-3.5.0-macosx-apple-darwin/bin/clang-format /usr/local/bin
# so that it ends up in /usr/local/bin/clang-format
cd src
clang-format -style="{BasedOnStyle: Mozilla, IndentWidth: 4, ColumnLimit: 100}" -i *.cpp *.h
#dos2unix -s -v *.cpp *.h
|
vehar/velociraptor8
|
scripts/clang-fmt.sh
|
Shell
|
bsd-3-clause
| 510
|
#include "talk/base/macsocketserver.h"
#include "talk/base/common.h"
#include "talk/base/logging.h"
#include "talk/base/macasyncsocket.h"
#include "talk/base/macutils.h"
#include "talk/base/thread.h"
namespace talk_base {
///////////////////////////////////////////////////////////////////////////////
// MacBaseSocketServer
///////////////////////////////////////////////////////////////////////////////
MacBaseSocketServer::MacBaseSocketServer() {
}
MacBaseSocketServer::~MacBaseSocketServer() {
}
AsyncSocket* MacBaseSocketServer::CreateAsyncSocket(int type) {
return CreateAsyncSocket(AF_INET, type);
}
AsyncSocket* MacBaseSocketServer::CreateAsyncSocket(int family, int type) {
if (SOCK_STREAM != type)
return NULL;
MacAsyncSocket* socket = new MacAsyncSocket(this, family);
if (!socket->valid()) {
delete socket;
return NULL;
}
return socket;
}
void MacBaseSocketServer::RegisterSocket(MacAsyncSocket* s) {
sockets_.insert(s);
}
void MacBaseSocketServer::UnregisterSocket(MacAsyncSocket* s) {
VERIFY(1 == sockets_.erase(s)); // found 1
}
bool MacBaseSocketServer::SetPosixSignalHandler(int signum,
void (*handler)(int)) {
Dispatcher* dispatcher = signal_dispatcher();
if (!PhysicalSocketServer::SetPosixSignalHandler(signum, handler)) {
return false;
}
// Only register the FD once, when the first custom handler is installed.
if (!dispatcher && (dispatcher = signal_dispatcher())) {
CFFileDescriptorContext ctx = { 0 };
ctx.info = this;
CFFileDescriptorRef desc = CFFileDescriptorCreate(
kCFAllocatorDefault,
dispatcher->GetDescriptor(),
false,
&MacBaseSocketServer::FileDescriptorCallback,
&ctx);
if (!desc) {
return false;
}
CFFileDescriptorEnableCallBacks(desc, kCFFileDescriptorReadCallBack);
CFRunLoopSourceRef ref =
CFFileDescriptorCreateRunLoopSource(kCFAllocatorDefault, desc, 0);
if (!ref) {
CFRelease(desc);
return false;
}
CFRunLoopAddSource(CFRunLoopGetCurrent(), ref, kCFRunLoopCommonModes);
CFRelease(desc);
CFRelease(ref);
}
return true;
}
// Used to disable socket events from waking our message queue when
// process_io is false. Does not disable signal event handling though.
void MacBaseSocketServer::EnableSocketCallbacks(bool enable) {
for (std::set<MacAsyncSocket*>::iterator it = sockets().begin();
it != sockets().end(); ++it) {
if (enable) {
(*it)->EnableCallbacks();
} else {
(*it)->DisableCallbacks();
}
}
}
void MacBaseSocketServer::FileDescriptorCallback(CFFileDescriptorRef fd,
CFOptionFlags flags,
void* context) {
MacBaseSocketServer* this_ss =
reinterpret_cast<MacBaseSocketServer*>(context);
ASSERT(this_ss);
Dispatcher* signal_dispatcher = this_ss->signal_dispatcher();
ASSERT(signal_dispatcher);
signal_dispatcher->OnPreEvent(DE_READ);
signal_dispatcher->OnEvent(DE_READ, 0);
CFFileDescriptorEnableCallBacks(fd, kCFFileDescriptorReadCallBack);
}
///////////////////////////////////////////////////////////////////////////////
// MacCFSocketServer
///////////////////////////////////////////////////////////////////////////////
void WakeUpCallback(void* info) {
MacCFSocketServer* server = static_cast<MacCFSocketServer*>(info);
ASSERT(NULL != server);
server->OnWakeUpCallback();
}
MacCFSocketServer::MacCFSocketServer()
: run_loop_(CFRunLoopGetCurrent()),
wake_up_(NULL) {
CFRunLoopSourceContext ctx;
memset(&ctx, 0, sizeof(ctx));
ctx.info = this;
ctx.perform = &WakeUpCallback;
wake_up_ = CFRunLoopSourceCreate(NULL, 0, &ctx);
ASSERT(NULL != wake_up_);
if (wake_up_) {
CFRunLoopAddSource(run_loop_, wake_up_, kCFRunLoopCommonModes);
}
}
MacCFSocketServer::~MacCFSocketServer() {
if (wake_up_) {
CFRunLoopSourceInvalidate(wake_up_);
CFRelease(wake_up_);
}
}
bool MacCFSocketServer::Wait(int cms, bool process_io) {
ASSERT(CFRunLoopGetCurrent() == run_loop_);
if (!process_io && cms == 0) {
// No op.
return true;
}
if (!process_io) {
// No way to listen to common modes and not get socket events, unless
// we disable each one's callbacks.
EnableSocketCallbacks(false);
}
SInt32 result;
if (kForever == cms) {
do {
// Would prefer to run in a custom mode that only listens to wake_up,
// but we have qtkit sending work to the main thread which is effectively
// blocked here, causing deadlock. Thus listen to the common modes.
// TODO: If QTKit becomes thread safe, do the above.
result = CFRunLoopRunInMode(kCFRunLoopDefaultMode, 10000000, false);
} while (result != kCFRunLoopRunFinished && result != kCFRunLoopRunStopped);
} else {
// TODO: In the case of 0ms wait, this will only process one event, so we
// may want to loop until it returns TimedOut.
CFTimeInterval seconds = cms / 1000.0;
result = CFRunLoopRunInMode(kCFRunLoopDefaultMode, seconds, false);
}
if (!process_io) {
// Reenable them. Hopefully this won't cause spurious callbacks or
// missing ones while they were disabled.
EnableSocketCallbacks(true);
}
if (kCFRunLoopRunFinished == result) {
return false;
}
return true;
}
void MacCFSocketServer::WakeUp() {
if (wake_up_) {
CFRunLoopSourceSignal(wake_up_);
CFRunLoopWakeUp(run_loop_);
}
}
void MacCFSocketServer::OnWakeUpCallback() {
ASSERT(run_loop_ == CFRunLoopGetCurrent());
CFRunLoopStop(run_loop_);
}
///////////////////////////////////////////////////////////////////////////////
// MacCarbonSocketServer
///////////////////////////////////////////////////////////////////////////////
const UInt32 kEventClassSocketServer = 'MCSS';
const UInt32 kEventWakeUp = 'WAKE';
const EventTypeSpec kEventWakeUpSpec[] = {
{ kEventClassSocketServer, kEventWakeUp }
};
MacCarbonSocketServer::MacCarbonSocketServer()
: event_queue_(GetCurrentEventQueue()), wake_up_(NULL) {
VERIFY(noErr == CreateEvent(NULL, kEventClassSocketServer, kEventWakeUp, 0,
kEventAttributeUserEvent, &wake_up_));
}
MacCarbonSocketServer::~MacCarbonSocketServer() {
if (wake_up_) {
ReleaseEvent(wake_up_);
}
}
bool MacCarbonSocketServer::Wait(int cms, bool process_io) {
ASSERT(GetCurrentEventQueue() == event_queue_);
// Listen to all events if we're processing I/O.
// Only listen for our wakeup event if we're not.
UInt32 num_types = 0;
const EventTypeSpec* events = NULL;
if (!process_io) {
num_types = GetEventTypeCount(kEventWakeUpSpec);
events = kEventWakeUpSpec;
}
EventTargetRef target = GetEventDispatcherTarget();
EventTimeout timeout =
(kForever == cms) ? kEventDurationForever : cms / 1000.0;
EventTimeout end_time = GetCurrentEventTime() + timeout;
bool done = false;
while (!done) {
EventRef event;
OSStatus result = ReceiveNextEvent(num_types, events, timeout, true,
&event);
if (noErr == result) {
if (wake_up_ != event) {
LOG_F(LS_VERBOSE) << "Dispatching event: " << DecodeEvent(event);
result = SendEventToEventTarget(event, target);
if ((noErr != result) && (eventNotHandledErr != result)) {
LOG_E(LS_ERROR, OS, result) << "SendEventToEventTarget";
}
} else {
done = true;
}
ReleaseEvent(event);
} else if (eventLoopTimedOutErr == result) {
ASSERT(cms != kForever);
done = true;
} else if (eventLoopQuitErr == result) {
// Ignore this... we get spurious quits for a variety of reasons.
LOG_E(LS_VERBOSE, OS, result) << "ReceiveNextEvent";
} else {
// Some strange error occurred. Log it.
LOG_E(LS_WARNING, OS, result) << "ReceiveNextEvent";
return false;
}
if (kForever != cms) {
timeout = end_time - GetCurrentEventTime();
}
}
return true;
}
void MacCarbonSocketServer::WakeUp() {
if (!IsEventInQueue(event_queue_, wake_up_)) {
RetainEvent(wake_up_);
OSStatus result = PostEventToQueue(event_queue_, wake_up_,
kEventPriorityStandard);
if (noErr != result) {
LOG_E(LS_ERROR, OS, result) << "PostEventToQueue";
}
}
}
///////////////////////////////////////////////////////////////////////////////
// MacCarbonAppSocketServer
///////////////////////////////////////////////////////////////////////////////
// Carbon is deprecated for x64. Switch to Cocoa
#if !defined(__x86_64__)
MacCarbonAppSocketServer::MacCarbonAppSocketServer()
: event_queue_(GetCurrentEventQueue()) {
// Install event handler
VERIFY(noErr == InstallApplicationEventHandler(
NewEventHandlerUPP(WakeUpEventHandler), 1, kEventWakeUpSpec, this,
&event_handler_));
// Install a timer and set it idle to begin with.
VERIFY(noErr == InstallEventLoopTimer(GetMainEventLoop(),
kEventDurationForever,
kEventDurationForever,
NewEventLoopTimerUPP(TimerHandler),
this,
&timer_));
}
MacCarbonAppSocketServer::~MacCarbonAppSocketServer() {
RemoveEventLoopTimer(timer_);
RemoveEventHandler(event_handler_);
}
OSStatus MacCarbonAppSocketServer::WakeUpEventHandler(
EventHandlerCallRef next, EventRef event, void *data) {
QuitApplicationEventLoop();
return noErr;
}
void MacCarbonAppSocketServer::TimerHandler(
EventLoopTimerRef timer, void *data) {
QuitApplicationEventLoop();
}
bool MacCarbonAppSocketServer::Wait(int cms, bool process_io) {
if (!process_io && cms == 0) {
// No op.
return true;
}
if (kForever != cms) {
// Start a timer.
OSStatus error =
SetEventLoopTimerNextFireTime(timer_, cms / 1000.0);
if (error != noErr) {
LOG(LS_ERROR) << "Failed setting next fire time.";
}
}
if (!process_io) {
// No way to listen to common modes and not get socket events, unless
// we disable each one's callbacks.
EnableSocketCallbacks(false);
}
RunApplicationEventLoop();
if (!process_io) {
// Reenable them. Hopefully this won't cause spurious callbacks or
// missing ones while they were disabled.
EnableSocketCallbacks(true);
}
return true;
}
void MacCarbonAppSocketServer::WakeUp() {
// TODO: No-op if there's already a WakeUp in flight.
EventRef wake_up;
VERIFY(noErr == CreateEvent(NULL, kEventClassSocketServer, kEventWakeUp, 0,
kEventAttributeUserEvent, &wake_up));
OSStatus result = PostEventToQueue(event_queue_, wake_up,
kEventPriorityStandard);
if (noErr != result) {
LOG_E(LS_ERROR, OS, result) << "PostEventToQueue";
}
ReleaseEvent(wake_up);
}
#endif
///////////////////////////////////////////////////////////////////////////////
// MacNotificationsSocketServer
///////////////////////////////////////////////////////////////////////////////
static const CFStringRef kNotificationName =
CFSTR("MacNotificationsSocketServer");
MacNotificationsSocketServer::MacNotificationsSocketServer()
: sent_notification_(false) {
CFNotificationCenterRef nc = CFNotificationCenterGetLocalCenter();
// Passing NULL for the observed object
CFNotificationCenterAddObserver(
nc, this, NotificationCallBack, kNotificationName, NULL,
CFNotificationSuspensionBehaviorDeliverImmediately);
}
MacNotificationsSocketServer::~MacNotificationsSocketServer() {
CFNotificationCenterRef nc = CFNotificationCenterGetLocalCenter();
CFNotificationCenterRemoveObserver(nc, this, kNotificationName, NULL);
}
bool MacNotificationsSocketServer::Wait(int cms, bool process_io) {
return cms == 0;
}
void MacNotificationsSocketServer::WakeUp() {
// We could be invoked recursively, so this stops the infinite loop
if (!sent_notification_) {
sent_notification_ = true;
CFNotificationCenterRef nc = CFNotificationCenterGetLocalCenter();
CFNotificationCenterPostNotification(nc, kNotificationName, this, NULL,
true);
sent_notification_ = false;
}
}
void MacNotificationsSocketServer::NotificationCallBack(
CFNotificationCenterRef center, void* observer, CFStringRef name,
const void* object, CFDictionaryRef userInfo) {
ASSERT(CFStringCompare(name, kNotificationName, 0) == kCFCompareEqualTo);
ASSERT(userInfo == NULL);
// We have thread messages to process.
Thread* thread = Thread::Current();
if (thread == NULL) {
// We're shutting down
return;
}
Message msg;
while (thread->Get(&msg, 0)) {
thread->Dispatch(&msg);
}
}
///////////////////////////////////////////////////////////////////////////////
} // namespace talk_base
|
healthkxy/libjingle-ios
|
talk/base/macsocketserver.cc
|
C++
|
bsd-3-clause
| 13,013
|
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import {$} from 'chrome://resources/js/util.m.js';
import {GetSsrcFromReport, SsrcInfoManager} from './ssrc_info_manager.js';
/**
* Maintains the stats table.
* @param {SsrcInfoManager} ssrcInfoManager The source of the ssrc info.
*/
export class StatsTable {
/**
* @param {SsrcInfoManager} ssrcInfoManager The source of the ssrc info.
*/
constructor(ssrcInfoManager) {
/**
* @type {SsrcInfoManager}
* @private
*/
this.ssrcInfoManager_ = ssrcInfoManager;
}
/**
* Adds |report| to the stats table of |peerConnectionElement|.
*
* @param {!Element} peerConnectionElement The root element.
* @param {!Object} report The object containing stats, which is the object
* containing timestamp and values, which is an array of strings, whose
* even index entry is the name of the stat, and the odd index entry is
* the value.
*/
addStatsReport(peerConnectionElement, report) {
if (report.type === 'codec') {
return;
}
const statsTable = this.ensureStatsTable_(peerConnectionElement, report);
if (report.stats) {
this.addStatsToTable_(
statsTable, report.stats.timestamp, report.stats.values);
}
}
clearStatsLists(peerConnectionElement) {
const containerId = peerConnectionElement.id + '-table-container';
const container = $(containerId);
if (container) {
peerConnectionElement.removeChild(container);
this.ensureStatsTableContainer_(peerConnectionElement);
}
}
/**
* Ensure the DIV container for the stats tables is created as a child of
* |peerConnectionElement|.
*
* @param {!Element} peerConnectionElement The root element.
* @return {!Element} The stats table container.
* @private
*/
ensureStatsTableContainer_(peerConnectionElement) {
const containerId = peerConnectionElement.id + '-table-container';
let container = $(containerId);
if (!container) {
container = document.createElement('div');
container.id = containerId;
container.className = 'stats-table-container';
const head = document.createElement('div');
head.textContent = 'Stats Tables';
container.appendChild(head);
peerConnectionElement.appendChild(container);
}
return container;
}
/**
* Ensure the stats table for track specified by |report| of PeerConnection
* |peerConnectionElement| is created.
*
* @param {!Element} peerConnectionElement The root element.
* @param {!Object} report The object containing stats, which is the object
* containing timestamp and values, which is an array of strings, whose
* even index entry is the name of the stat, and the odd index entry is
* the value.
* @return {!Element} The stats table element.
* @private
*/
ensureStatsTable_(peerConnectionElement, report) {
const tableId = peerConnectionElement.id + '-table-' + report.id;
let table = $(tableId);
if (!table) {
const container = this.ensureStatsTableContainer_(peerConnectionElement);
const details = document.createElement('details');
container.appendChild(details);
const summary = document.createElement('summary');
summary.textContent = report.id + ' (' + report.type + ')';
details.appendChild(summary);
table = document.createElement('table');
details.appendChild(table);
table.id = tableId;
table.border = 1;
table.appendChild($('trth-template').content.cloneNode(true));
table.rows[0].cells[0].textContent = 'Statistics ' + report.id;
if (report.type === 'ssrc') {
table.insertRow(1);
table.rows[1].appendChild(
$('td-colspan-template').content.cloneNode(true));
this.ssrcInfoManager_.populateSsrcInfo(
table.rows[1].cells[0], GetSsrcFromReport(report));
}
}
return table;
}
/**
* Update |statsTable| with |time| and |statsData|.
*
* @param {!Element} statsTable Which table to update.
* @param {number} time The number of milliseconds since epoch.
* @param {Array<string>} statsData An array of stats name and value pairs.
* @private
*/
addStatsToTable_(statsTable, time, statsData) {
const date = new Date(time);
this.updateStatsTableRow_(statsTable, 'timestamp', date.toLocaleString());
for (let i = 0; i < statsData.length - 1; i = i + 2) {
this.updateStatsTableRow_(statsTable, statsData[i], statsData[i + 1]);
}
}
/**
* Update the value column of the stats row of |rowName| to |value|.
* A new row is created is this is the first report of this stats.
*
* @param {!Element} statsTable Which table to update.
* @param {string} rowName The name of the row to update.
* @param {string} value The new value to set.
* @private
*/
updateStatsTableRow_(statsTable, rowName, value) {
const trId = statsTable.id + '-' + rowName;
let trElement = $(trId);
const activeConnectionClass = 'stats-table-active-connection';
if (!trElement) {
trElement = document.createElement('tr');
trElement.id = trId;
statsTable.firstChild.appendChild(trElement);
const item = $('td2-template').content.cloneNode(true);
item.querySelector('td').textContent = rowName;
trElement.appendChild(item);
}
trElement.cells[1].textContent = value;
// Highlights the table for the active connection.
if (rowName === 'googActiveConnection') {
if (value === true) {
statsTable.parentElement.classList.add(activeConnectionClass);
} else {
statsTable.parentElement.classList.remove(activeConnectionClass);
}
}
}
}
|
nwjs/chromium.src
|
content/browser/webrtc/resources/stats_table.js
|
JavaScript
|
bsd-3-clause
| 5,855
|
\defgroup mrpt_img_grp [mrpt-img]
Basic computer vision data structures and tools: bitmap images, canvas, color
maps, and pinhole camera models.
[TOC]
# Library mrpt-img
This C++ library is part of MRPT and can be installed in Debian-based systems
with:
sudo apt install libmrpt-img-dev
Read also [how to import MRPT into your CMake scripts](mrpt_from_cmake.html).
Find below some examples of use.
## Image handling
The class mrpt::img::CImage represents a wrapper around OpenCV images, plus
extra functionality such as on-the-fly loading of images stored in disk upon
first usage. The `cv::Mat` object is always available so
OpenCV's functions can be still used to operate on MRPT images.
# Library contents
|
MRPT/mrpt
|
doc/source/doxygen-docs/lib_mrpt_img.md
|
Markdown
|
bsd-3-clause
| 723
|
#include <string>
#include <iostream>
#include <istream>
#include <stdio.h> /* printf */
#include <cstring>
#include "Walls.h"
using namespace std;
int main (int argc, char* argv[]) {
bool visualise = false;
bool testMode = false;
for (int i = 1; i < argc; i++) {
if (strcmp(argv[i], "-v") == 0) visualise = true;
if (strcmp(argv[i], "-t") == 0) testMode = true;
}
std::vector<unsigned int> walls;
if (testMode == false) {
unsigned int val;
while ((std::cin.peek() !='\n') && (std::cin>>val)) {
walls.push_back(val);
}
} else {
walls = Walls::generateWalls();
Walls::printInput(walls);
}
Walls w(walls, visualise);
long long totalFilling = w.getFilling();
printf("%lld\n", totalFilling);
if (visualise) {
long long dashes = w.printWalls();
if (totalFilling == dashes) {
printf("[OK]\n");
} else printf("[FAIL]\n");
}
return 0;
}
|
hasherezade/mastercoder2014
|
task4/main.cpp
|
C++
|
bsd-3-clause
| 926
|
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"crypto"
"crypto/rand"
"crypto/rsa"
"io"
"time"
_ "crypto/sha256"
"golang.org/x/crypto/openpgp"
"golang.org/x/crypto/openpgp/armor"
"golang.org/x/crypto/openpgp/packet"
)
//A fake static time for when key's are (were) created
var KeyDate time.Time = time.Date(1979, time.April, 10, 14, 15, 0, 0, time.FixedZone("VET", -16200))
var config = &packet.Config{
DefaultHash: crypto.SHA256,
DefaultCipher: packet.CipherAES256,
DefaultCompressionAlgo: packet.CompressionZLIB,
CompressionConfig: &packet.CompressionConfig{Level: 7},
}
func prompter(keys []openpgp.Key, symmetric bool) (passphrase []byte, err error) {
return
}
func gpgEncrypt(rsaPubKeys []*rsa.PublicKey, inFile io.Reader, outFile io.Writer) {
aesKey := make([]byte, packet.CipherAES256.KeySize())
rand.Read(aesKey)
outArmor, err := armor.Encode(outFile, "SSH-CRYPT-MESSAGE", make(map[string]string))
if err != nil {
panic(err)
}
defer outArmor.Close()
if len(rsaPubKeys) == 0 {
panic("No keys to use")
}
for _, rsaPubKey := range rsaPubKeys {
pubKey := packet.NewRSAPublicKey(KeyDate, rsaPubKey)
err = packet.SerializeEncryptedKey(outArmor, pubKey, packet.CipherAES256, aesKey, config)
if err != nil {
panic(err)
}
}
encryptedData, err := packet.SerializeSymmetricallyEncrypted(outArmor, packet.CipherAES256, aesKey, config)
if err != nil {
panic(err)
}
defer encryptedData.Close()
hints := &openpgp.FileHints{}
var epochSeconds uint32
if !hints.ModTime.IsZero() {
epochSeconds = uint32(hints.ModTime.Unix())
}
compressedData, err := packet.SerializeCompressed(encryptedData, config.DefaultCompressionAlgo, config.CompressionConfig)
if err != nil {
panic(err)
}
defer compressedData.Close()
writer, err := packet.SerializeLiteral(compressedData, hints.IsBinary, hints.FileName, epochSeconds)
if err != nil {
panic(err)
}
defer writer.Close()
// Copy the input file to the output file, encrypting as we go.
if _, err := io.Copy(writer, inFile); err != nil {
panic(err)
}
// Note that this example is simplistic in that it omits any
// authentication of the encrypted data. It you were actually to use
// StreamReader in this manner, an attacker could flip arbitrary bits in
// the decrypted result.
}
func gpgDecrypt(rsaPrivKey *rsa.PrivateKey, inFile io.Reader, outFile io.Writer) {
privKey := packet.NewRSAPrivateKey(KeyDate, rsaPrivKey)
armorBlock, err := armor.Decode(inFile)
if err != nil {
panic(err)
}
var keyRing openpgp.EntityList
keyRing = append(keyRing, &openpgp.Entity{
PrivateKey: privKey,
PrimaryKey: packet.NewRSAPublicKey(KeyDate, rsaPrivKey.Public().(*rsa.PublicKey)),
})
md, err := openpgp.ReadMessage(armorBlock.Body, keyRing, nil, config)
if err != nil {
panic(err)
}
// Copy the input file to the output file, decrypting as we go.
if _, err := io.Copy(outFile, md.UnverifiedBody); err != nil {
panic(err)
}
// Note that this example is simplistic in that it omits any
// authentication of the encrypted data. It you were actually to use
// StreamReader in this manner, an attacker could flip arbitrary bits in
// the output.
}
|
totallylegitbiz/sshcrypt
|
openpgp.go
|
GO
|
bsd-3-clause
| 3,338
|
import instanceCreate from './instanceCreate';
import instanceGet from './instanceGet';
import instanceCommitGet from './instanceCommitsGet';
export default {
init(app) {
app.post('/api/instances', instanceCreate);
app.get('/api/instances/:instanceId', instanceGet);
app.get('/api/:instanceId/commits/tags/versionone/workitem', instanceCommitGet);
}
};
|
openAgile/CommitStream.Web
|
src/app/api/instances/es6/instancesController.js
|
JavaScript
|
bsd-3-clause
| 370
|
# ------------------------------------------------------------------------
# Makefile: header file patches for the SpecC system
# ------------------------------------------------------------------------
#
# Modifications: (most recent first)
#
# RD 12/15/14 finalized scrc_V22
# SD 12/10/14 adjustments for modern Linux support
# RD 11/01/14 supplied this tree of patches from 'scc' sources
# ------------------------------------------------------------------------
# --- macros
include ../../Makefile.macros
DIST = Makefile \
RedHatEL3.README \
RedHatEL4.README \
RedHatEL5.README \
RedHatEL6.README \
Fedora4.README \
Fedora12.README \
Ubuntu8.README
ORIG = Makefile \
RedHatEL3.README \
RedHatEL4.README \
RedHatEL5.README \
RedHatEL6.README \
Fedora4.README \
Fedora12.README \
Ubuntu8.README
PLATFORMS = RedHatEL3 \
RedHatEL4 \
RedHatEL5 \
RedHatEL6 \
Fedora4 \
Fedora12 \
Ubuntu8
DIRS_RHEL3 = bits
FILES_RHEL3 = unistd.h \
bits/huge_val.h
DIRS_RHEL4 = bits
FILES_RHEL4 = unistd.h \
bits/huge_val.h
DIRS_RHEL5 = bits
FILES_RHEL5 = unistd.h \
bits/huge_val.h \
bits/pthreadtypes.h
DIRS_RHEL6 = bits
FILES_RHEL6 = unistd.h \
features.h \
bits/nan.h \
bits/huge_val.h \
bits/huge_valf.h \
bits/pthreadtypes.h \
bits/socket.h
DIRS_FC4 = bits
FILES_FC4 = unistd.h \
bits/huge_val.h
DIRS_FC12 = bits
FILES_FC12 = unistd.h \
stdio.h \
stddef.h \
xlocale.h \
bits/byteswap.h \
bits/huge_val.h \
bits/huge_valf.h \
bits/huge_vall.h \
bits/nan.h \
bits/pthreadtypes.h
DIRS_UBT8 = bits
FILES_UBT8 = unistd.h \
bits/huge_val.h \
bits/huge_valf.h \
bits/huge_vall.h \
bits/pthreadtypes.h
# --- production rules
all:
for p in $(PLATFORMS); do \
if [ "$(PLATFORM)" = "$$p" ]; then \
$(MAKE) patch_$(PLATFORM); \
fi; \
done
install:
for p in $(PLATFORMS); do \
if [ "$(PLATFORM)" = "$$p" ]; then \
$(MAKE) install_$(PLATFORM); \
fi; \
done
uninstall:
for p in $(PLATFORMS); do \
if [ "$(PLATFORM)" = "$$p" ]; then \
$(MAKE) uninstall_$(PLATFORM); \
fi; \
done
# --- service rules
clean:
-$(RM) core *.bak *.BAK
for p in $(PLATFORMS); do \
if [ "$(PLATFORM)" = "$$p" ]; then \
$(MAKE) unpatch_$(PLATFORM); \
fi; \
done
distclean: clean
dist:
for file in $(DIST); do echo $(DISTPREFIX)/$$file >>$(DISTLIST); done
for file in $(FILES_RHEL3); do echo $(DISTPREFIX)/RedHatEL3/$$file >>$(DISTLIST); done
for file in $(FILES_RHEL4); do echo $(DISTPREFIX)/RedHatEL4/$$file >>$(DISTLIST); done
for file in $(FILES_RHEL5); do echo $(DISTPREFIX)/RedHatEL5/$$file >>$(DISTLIST); done
for file in $(FILES_RHEL6); do echo $(DISTPREFIX)/RedHatEL6/$$file >>$(DISTLIST); done
for file in $(FILES_FC4); do echo $(DISTPREFIX)/Fedora4/$$file >>$(DISTLIST); done
for file in $(FILES_FC12); do echo $(DISTPREFIX)/Fedora12/$$file >>$(DISTLIST); done
for file in $(FILES_UBT8); do echo $(DISTPREFIX)/Ubuntu8/$$file >>$(DISTLIST); done
orig:
for file in $(ORIG); do echo $(ORIGPREFIX)/$$file >>$(ORIGLIST); done
for file in $(FILES_RHEL3); do echo $(ORIGPREFIX)/RedHatEL3/$$file >>$(ORIGLIST); done
for file in $(FILES_RHEL4); do echo $(ORIGPREFIX)/RedHatEL4/$$file >>$(ORIGLIST); done
for file in $(FILES_RHEL5); do echo $(ORIGPREFIX)/RedHatEL5/$$file >>$(ORIGLIST); done
for file in $(FILES_RHEL6); do echo $(ORIGPREFIX)/RedHatEL6/$$file >>$(ORIGLIST); done
for file in $(FILES_FC4); do echo $(ORIGPREFIX)/Fedora4/$$file >>$(ORIGLIST); done
for file in $(FILES_FC12); do echo $(ORIGPREFIX)/Fedora12/$$file >>$(ORIGLIST); done
for file in $(FILES_UBT8); do echo $(ORIGPREFIX)/Ubuntu8/$$file >>$(ORIGLIST); done
# --- specific rules to patch/unpatch/install/uninstall
# (customizable for each platform)
patch_Fedora4:
for dir in $(DIRS_FC4); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_FC4); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_Fedora4:
for file in $(FILES_FC4); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_FC4); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_Fedora4:
for dir in $(DIRS_FC4); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_FC4); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_Fedora4:
for file in $(FILES_FC4); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_FC4); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
patch_Fedora12:
for dir in $(DIRS_FC12); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_FC12); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_Fedora12:
for file in $(FILES_FC12); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_FC12); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_Fedora12:
for dir in $(DIRS_FC12); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_FC12); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_Fedora12:
for file in $(FILES_FC12); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_FC12); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
patch_RedHatEL3:
for dir in $(DIRS_RHEL3); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_RHEL3); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_RedHatEL3:
for file in $(FILES_RHEL3); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_RHEL3); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_RedHatEL3:
for dir in $(DIRS_RHEL3); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_RHEL3); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_RedHatEL3:
for file in $(FILES_RHEL3); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_RHEL3); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
patch_RedHatEL4:
for dir in $(DIRS_RHEL4); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_RHEL4); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_RedHatEL4:
for file in $(FILES_RHEL4); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_RHEL4); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_RedHatEL4:
for dir in $(DIRS_RHEL4); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_RHEL4); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_RedHatEL4:
for file in $(FILES_RHEL4); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_RHEL4); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
patch_RedHatEL5:
for dir in $(DIRS_RHEL5); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_RHEL5); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_RedHatEL5:
for file in $(FILES_RHEL5); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_RHEL5); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_RedHatEL5:
for dir in $(DIRS_RHEL5); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_RHEL5); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_RedHatEL5:
for file in $(FILES_RHEL5); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_RHEL5); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
patch_RedHatEL6:
for dir in $(DIRS_RHEL6); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_RHEL6); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_RedHatEL6:
for file in $(FILES_RHEL6); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_RHEL6); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_RedHatEL6:
for dir in $(DIRS_RHEL6); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_RHEL6); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_RedHatEL6:
for file in $(FILES_RHEL6); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_RHEL6); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
patch_Ubuntu8:
for dir in $(DIRS_UBT8); do \
$(MKDIR) $(SPECC_INC)/$$dir; \
done
for file in $(FILES_UBT8); do \
$(RM) $(SPECC_INC)/$$file; \
$(LINK) $(SPECC_INC)/patches/$(PLATFORM)/$$file $(SPECC_INC)/$$file; \
done
unpatch_Ubuntu8:
for file in $(FILES_UBT8); do \
$(RM) $(SPECC_INC)/$$file; \
done
for dir in $(DIRS_UBT8); do \
if [ -d $(SPECC_INC)/$$dir ]; then \
$(RMDIR) $(SPECC_INC)/$$dir; \
fi; \
done
install_Ubuntu8:
for dir in $(DIRS_UBT8); do \
$(MKDIR) $(INSTALL_PREFIX)/inc/$$dir; \
done
for file in $(FILES_UBT8); do \
$(CP) $(PLATFORM)/$$file $(INSTALL_PREFIX)/inc/$$file; \
done
uninstall_Ubuntu8:
for file in $(FILES_UBT8); do \
$(RM) $(INSTALL_PREFIX)/inc/$$file; \
done
for dir in $(DIRS_UBT8); do \
if [ -d $(INSTALL_PREFIX)/inc/$$dir ]; then \
$(RMDIR) $(INSTALL_PREFIX)/inc/$$dir; \
fi; \
done
# --- EOF Makefile ---
|
samschauer/myscc
|
inc/patches/Makefile
|
Makefile
|
bsd-3-clause
| 10,411
|
<?php
require_once "./vendor/autoload.php";
//\TheAomx\Nodes\Indentation::$indentationCharacter = " ";
//\TheAomx\Nodes\Indentation::$indentationDepth = 2;
//\TheAomx\Nodes\Indentation::$lineBreaker = "\n";
use \TheAomx\Nodes\HtmlNode as HtmlNode;
error_reporting(E_ALL);
ini_set('display_errors', 1);
class Abbreviation {
public $abbreviation;
public $long_description;
public function __construct($abbrev, $desc) {
$this->abbreviation = $abbrev;
$this->long_description = $desc;
}
}
$abbreviations = array(
new Abbreviation("SCTP", "Stream Control Transmission Protocol"),
new Abbreviation("PR-SCTP", "Partial Reliability Extension for SCTP"),
new Abbreviation("NR-SACK", "Non-Renegable Selective Acknowledgements"),
new Abbreviation("RFC", "Request for comments"),
new Abbreviation("IUT", "Implementation under Test"),
new Abbreviation("TSN", "Transmission Sequence Number"),
new Abbreviation("cwnd", "Congestion Window Size"),
new Abbreviation("cum_tsn", "Cumulative Transmission Sequence Number"),
new Abbreviation("gaps", "Gap Ackowledgement Blocks"),
new Abbreviation("nr-gaps", "Non Renegable Gap Ackowledgement Blocks"),
);
uasort($abbreviations, function ($a, $b) {
return strcmp($a->abbreviation, $b->abbreviation);
});
class ExternalReference {
public $name, $id, $link;
function __construct($name, $id, $link) {
$this->name = $name;
$this->id = $id;
$this->link = $link;
}
}
$external_references = array(
new ExternalReference("RFC 4960 (SCTP)", "rfc4960", "https://tools.ietf.org/html/rfc4960"),
new ExternalReference("RFC 3758 (PR-SCTP)", "rfc3758", "https://www.ietf.org/rfc/rfc3758.txt"),
new ExternalReference("RFC 6458 (Sockets API Extensions for SCTP)", "rfc6458", "https://www.ietf.org/rfc/rfc6458.txt"),
new ExternalReference("RFC 7496 (Additional Policies for SCTP)", "rfc7496", "https://www.ietf.org/rfc/rfc7496.txt"),
new ExternalReference("Internet Draft - Stream Schedulers and User Message Interleaving for SCTP", "ndata05", "https://tools.ietf.org/html/draft-ietf-tsvwg-sctp-ndata-05"),
new ExternalReference("Internet Draft - Load Sharing for SCTP", "loadsharing", "https://tools.ietf.org/html/draft-tuexen-tsvwg-sctp-multipath-05"),
);
class Testsuite {
public $id, $folderName, $longName, $notice;
public $test_cases = array();
function __construct($id, $folderName, $longName, $notice = "") {
$this->id = $id;
$this->folderName = $folderName;
$this->longName = $longName;
$this->notice = $notice;
}
public function __toString() {
$html = "";
$html .= HtmlNode::get_builder("h2")->text($this->longName)->build();
if (!empty($this->notice)) {
$html .= HtmlNode::get_builder("p")->text("<strong>Notice:</strong> " . $this->notice)->build();
}
foreach ($this->test_cases as $test_case) {
$html .= $test_case;
$a = HtmlNode::get_builder("a")->attribute("href", "#overview")->
s_text("Back to Testsuite-Overview")->build();
$html .= $a;
}
return $html;
}
}
class Testcase {
public $id = "", $precondition = "", $purpose = "", $references = "";
private function build_tr_row ($name, $value) {
$tr = HtmlNode::get_builder("tr")->build();
$tr->append(HtmlNode::get_builder("td")->text($name)->build())->
append(HtmlNode::get_builder("td")->text($value)->build());
return $tr;
}
public function __toString() {
$table = HtmlNode::get_builder("table")->attribute("class", "table table-bordered test_case_table")->
attribute("id", $this->id)->build();
$tbody = HtmlNode::get_builder("tbody")->build();
$table->append($tbody);
$tr_id = $this->build_tr_row("ID", $this->id);
$tr_precondition = $this->build_tr_row("Precondition", $this->precondition);
$tr_purpose = $this->build_tr_row("Purpose", $this->purpose);
$tr_references = $this->build_tr_row("References", $this->references);
$tbody->append($tr_id)->
append($tr_precondition)->
append($tr_purpose)->
append($tr_references);
return strval($table);
}
}
class TestcaseParserStates {
public static $INIT_STATE=1, $PRECONDITION_STATE=2, $PURPOSE_STATE=3, $REFERENCES_STATE=4;
}
function parseLine($line, $state, $test_case) {
switch ($state) {
case TestcaseParserStates::$INIT_STATE:
if (preg_match("/Precondition/i", $line)) {
$state = TestcaseParserStates::$PRECONDITION_STATE;
}
break;
case TestcaseParserStates::$PRECONDITION_STATE:
if (preg_match("/Purpose/i", $line)) {
$state = TestcaseParserStates::$PURPOSE_STATE;
} else {
$test_case->precondition .= $line;
}
break;
case TestcaseParserStates::$PURPOSE_STATE:
if (preg_match("/References/i", $line)) {
$state = TestcaseParserStates::$REFERENCES_STATE;
} else {
$test_case->purpose .= $line;
}
break;
case TestcaseParserStates::$REFERENCES_STATE:
$test_case->references .= $line;
break;
}
return $state;
}
function loadTestCase ($id, $filename) {
$handle = fopen($filename, "r");
if (!$handle) {
throw new RuntimeException("test case $filename could not be opened!");
}
$test_case = new Testcase();
$test_case->id = $id;
$state = TestcaseParserStates::$INIT_STATE;
while (($raw_line = fgets($handle)) != null) {
$line = trim($raw_line);
if ($line === "") {
continue;
}
$state = parseLine($raw_line, $state, $test_case);
}
fclose($handle);
return $test_case;
}
function loadTestCases($suite_folder_name) {
$test_cases = scandir($suite_folder_name);
$test_cases_filtered = array_filter($test_cases, function($element) use (&$suite_folder_name) {
return !($element === "." || $element === ".." || is_dir($suite_folder_name. '/'. $element));
});
$ret = array();
foreach ($test_cases_filtered as $test_case_id) {
$filename = $suite_folder_name . "/" . $test_case_id;
$test_case = loadTestCase($test_case_id, $filename);
array_push($ret, $test_case);
}
return $ret;
}
$test_suites = array(
new Testsuite("nftsp", "negotiation-forward-tsn-supported-parameter", "Negotiation of Forward-TSN-supported parameter"),
new Testsuite("ssi", "sender-side-implementation", "Sender Side Implementation", 'These test cases use the term "abandoned" like defined in <a href="https://tools.ietf.org/html/rfc3758#section-3.4">RFC 3758 [section 3.4]</a>.
This means that these test cases have to be implemented for each specific policy rule that defines when a data chunk should be considered "abandoned" for the sender.'),
new Testsuite("rsi", "receiver-side-implementation", "Receiver Side Implementation", 'Please note that the packet-loss test-cases can be applied to ordered, unordered or an mixture of both DATA-Chunks. To avoid redundant definitions of equivalent loss patterns these descriptions are so generic that they can be applied to both ordered and unordered or an mixture of both.'),
new Testsuite("error-cases", "error-cases", "Error Cases"),
new Testsuite("hwift", "handshake-with-i-forward-tsn", "Handshake with I-FORWARD-TSN"),
new Testsuite("hwnrs", "handshake-with-nr-sack", "Handshake with NR-SACK"),
new Testsuite("daswnrs", "data-sender", "Data Sender with NR-SACK"),
new Testsuite("darwnrs", "data-receiver", "Data Receiver with NR-SACK"),
);
function sort_by_testcase_id($a, $b) {
$regex = '!([^0-9.]*)(\d+)!';
$matches_a = array();
$matches_b = array();
preg_match_all($regex, $a->id, $matches_a);
preg_match_all($regex, $b->id, $matches_b);
$test_case_name_a = $matches_a[1][0];
$test_case_name_b = $matches_b[1][0];
$test_case_id_a = $matches_a[2][0];
$test_case_id_b = $matches_b[2][0];
if ($test_case_name_a !== $test_case_name_b) {
return $test_case_name_a > $test_case_name_b;
} else {
return $test_case_id_a > $test_case_id_b;
}
}
$all_test_cases = array();
foreach ($test_suites as $test_suite) {
$test_suite->test_cases = loadTestCases($test_suite->folderName);
}
uasort($test_suites[1]->test_cases, "sort_by_testcase_id");
uasort($test_suites[2]->test_cases, "sort_by_testcase_id");
uasort($test_suites[4]->test_cases, "sort_by_testcase_id");
uasort($test_suites[6]->test_cases, "sort_by_testcase_id");
uasort($test_suites[7]->test_cases, "sort_by_testcase_id");
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Test-Suite for the SCTP Partial Reliability Extension</title>
<!-- Bootstrap core CSS -->
<link href="bootstrap/css/bootstrap.min.css" rel="stylesheet">
<!-- IE10 viewport hack for Surface/desktop Windows 8 bug -->
<link href="bootstrap/css/ie-bugfix.css" rel="stylesheet">
<!-- jQuery (necessary for Bootstrap's JavaScript plugins) -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
<script src="bootstrap/js/bootstrap.min.js"></script>
<style type="text/css">
* {
padding: 0;
margin: 0;
}
body {
margin: 10px 100px 10px 20px;
}
h1 {
margin: 20px 0px;
}
h2 {
margin: 15px 10px;
}
h3 {
margin: 12px 15px;
}
ul {
padding: 1em 2em;
list-style-type: disc;
list-style-position: outside;
list-style-image: none;
}
.overview_ol {
list-style-type: upper-roman;
padding: 1em 2em;
}
ol {
padding: 1em 2em;
}
#abbreviation_table {
border-collapse: collapse;
}
#abbreviation_table td {
border: 1px solid black;
padding: 10px;
}
.test_case_table {
/*width: 75%;*/
/*border-collapse: collapse;*/
margin: 20px 20px;
background-color: #fbfbfb;
}
.test_case_table td {
border: 1px solid black;
padding: 10px;
}
.todo {
color: red;
}
.footer {
text-align: center;
}
a {
color: blue;
}
</style>
</head>
<body>
<div class="container">
<h1>Test-Suite for the SCTP Partial Reliability Extension</h1>
<h2>Introduction</h2>
<div class="well">
<p>This is a document that defines a test suite for the partial reliability extension of sctp.
The designed test suite for PR-SCTP consists of eight different categories. A practial implementation of
these test-cases can be found for the tool packetdrill under <a href="https://github.com/nplab/PR_SCTP_Testsuite">https://github.com/nplab/PR_SCTP_Testsuite</a>.
Most test-cases were specified close to the specification of the tested extensions of SCTP. Therefore each test-case
lists the relevant references that were used to design the test-case.
</p>
<p>
Please note that the categories <i>Sender Side Implementation</i> and <i>Receiver Side Implementation</i> can be applied to either the classic
PR-SCTP or the PR-SCTP extension with user message interleaving (see <a href="https://tools.ietf.org/html/draft-ietf-tsvwg-sctp-ndata-09">Stream Schedulers and User Message Interleaving for SCTP</a>).
If these tests are applied to the user message interleaving extension then the sid/ssn-values in the DATA-Chunks have to converted
such that they match the new sid/mid/fsn-values in the I-DATA-Chunk (and also use the I-DATA-Chunk instead).
Also each FORWARD-TSN-Chunk has to be replaced by an equivalent I-FORWARD-TSN-Chunk. Other than these conversions
every test case of the categories <i>Sender Side Implementation</i> and <i>Receiver Side Implementation</i> can also
be applied to the Stream Schedulers and User Message Interleaving extension.
</p>
</div>
<h2 id="overview">Overview of Test-Suite-Structure</h2>
<?php
function createTestsuiteListingDivs($test_suites) {
$divs = array();
foreach ($test_suites as $test_suite) {
$div = HtmlNode::get_builder("div")->attribute("class", "col-sm-4")->build();
$panel_div = HtmlNode::get_builder("div")->attribute("class", "panel panel-default")->build();
$div->addChildNode($panel_div);
$heading_div = HtmlNode::get_builder("div")->attribute("class", "panel-heading")->build();
$body_div = HtmlNode::get_builder("div")->attribute("class", "panel-body")->build();
$panel_div->append($heading_div)->append($body_div);
$h3 = HtmlNode::get_builder("h3")->attribute("class", "panel-title")->s_text($test_suite->longName)->build();
$heading_div->append($h3);
$ul = HtmlNode::get_builder("ul")->build();
foreach ($test_suite->test_cases as $test_case) {
$li_child = HtmlNode::get_builder("li")->build();
$a_child = HtmlNode::get_builder("a")->attribute("href", "#" . $test_case->id)->s_text($test_case->id)->build();
$li_child->append($a_child);
$ul->append($li_child);
}
$body_div->append($ul);
array_push($divs, $div);
}
return $divs;
}
function renderTestsuiteListing($divs) {
$outer_div = $div = HtmlNode::get_builder("div")->attr("class", "col-sm-12")->build();
$i = 1;
foreach ($divs as $div) {
$outer_div->append($div);
if (($i % 3) === 0) {
echo $outer_div;
$outer_div = $div = HtmlNode::get_builder("div")->attr("class", "col-sm-12")->build();
}
$i++;
}
echo $outer_div;
}
renderTestsuiteListing(createTestsuiteListingDivs($test_suites));
?>
<div class="col-sm-6">
<h3>Abbreviations</h3>
<?php
$abbrev_table = HtmlNode::get_builder("table")->attr("class", "table table-bordered")->build();
$abbrev_table_tbody = HtmlNode::get_builder("tbody")->build();
$abbrev_table->append($abbrev_table_tbody);
foreach ($abbreviations as $abbreviation) {
$tr = HtmlNode::get_builder("tr")->build();
$td1 = HtmlNode::get_builder("td")->s_text($abbreviation->abbreviation)->build();
$td2 = HtmlNode::get_builder("td")->s_text($abbreviation->long_description)->build();
$tr->append($td1)->append($td2);
$abbrev_table_tbody->append($tr);
}
echo $abbrev_table;
?>
</div>
<div class="col-sm-6">
<h3>External references</h3>
This testsuite is based upen the following documents:
<?php
$ul = HtmlNode::get_builder("div")->attr("style", "margin-top: 15px;")->attr("class", "list-group")->build();
foreach ($external_references as $external_reference) {
$a = HtmlNode::get_builder("a")->attr("class", "list-group-item")->attr("id", $external_reference->id)
->attr("href", $external_reference->link)->s_attribute("title", $external_reference->name)
->s_text($external_reference->name)->build();
$ul->append($a);
}
echo $ul;
?>
</div>
<div class="col-sm-12">
<h1>Definition of the Test-Cases</h1>
<?php
foreach ($test_suites as $test_suite) {
echo $test_suite;
}
?>
</div>
</div>
<footer class="footer">
<div class="container">
<p class="text-muted">© 2017 by Julian Cordes</p>
</div>
</footer>
</body>
</html>
|
nplab/PR_SCTP_Testsuite
|
test-spec/index.php
|
PHP
|
bsd-3-clause
| 17,222
|
<?php
namespace common\models\site;
use frontend\models\Categorynews;
use Yii;
use frontend\models\NewsComments;
/**
* This is the model class for table "news_block".
*
* @property integer $ID
* @property string $Slug
* @property string $Name
* @property string $About
* @property string $Photo
* @property string $Date
* @property integer $Views
* @property string $Text
* @property integer $Published
* @property integer $Grey
* @property integer $Partner
* @property integer $EstablishmentID
* @property integer $IndexBlock
* @property string $UpdateDate
*/
class NewsBlock extends \yii\db\ActiveRecord
{
/**
* @inheritdoc
*/
public static function tableName()
{
return 'news_block';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['Name', 'About', 'Date', 'Text'], 'required'],
[['About', 'Photo', 'Text'], 'string'],
[['Date'], 'safe'],
[['Views', 'Published', 'Grey', 'CategorynewsID', 'Partner', 'EstablishmentID', 'IndexBlock', 'UpdateDate'], 'integer'],
[['Name'], 'string', 'max' => 200],
[['Grey', 'Views', 'CategorynewsID', 'Partner', 'EstablishmentID', 'IndexBlock', 'Published'], 'default', 'value' => 0]
];
}
public function beforeSave($insert)
{
if (parent::beforeSave($insert)) {
$this->UpdateDate = time();
return true;
}
return false;
}
public function behaviors()
{
return [
'slug' => [
'class' => 'Zelenin\yii\behaviors\Slug',
'slugAttribute' => 'Slug',
'attribute' => 'Name',
// optional params
'ensureUnique' => false,
'replacement' => '-',
'lowercase' => true,
'immutable' => true,
// If intl extension is enabled, see http://userguide.icu-project.org/transforms/general.
'transliterateOptions' => 'Russian-Latin/BGN; Any-Latin; Latin-ASCII; NFD; [:Nonspacing Mark:] Remove; NFC;'
]
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'ID' => 'ID',
'Slug' => 'Slug',
'Name' => 'Название блока',
'About' => 'Краткое описание',
'Photo' => 'Фото',
'Date' => 'Дата и время',
'Views' => 'Просмотры',
'Text' => 'Текст',
'Published' => 'Опубликовать',
'Grey' => 'Серый дизайн блока',
'Partner' => 'Новость партнеров',
'EstablishmentID' => 'Заведение',
'IndexBlock' => 'Выводить на главной',
'UpdateDate' => 'Update Date',
];
}
public function getCategorynews() {
return Categorynews::find()->where(['ID' => $this->CategorynewsID])->one();
}
public function getComments() {
return NewsComments::find()->where(['NewsBlockID' => $this->ID, 'CommentID' => null])->count();
}
}
|
tatia-kom/gvd.my
|
common/models/site/NewsBlock.php
|
PHP
|
bsd-3-clause
| 3,238
|
<?php
/**
* Locale data for 'es_PR'.
*
* This file is automatically generated by yiic cldr command.
*
* Copyright © 1991-2007 Unicode, Inc. All rights reserved.
* Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
*
* Copyright © 2008-2010 Yii Software LLC (http://www.yiiframework.com/license/)
*/
return array (
'version' => '4178',
'numberSymbols' =>
array (
'decimal' => '.',
'group' => ',',
'list' => ';',
'percentSign' => '%',
'nativeZeroDigit' => '0',
'patternDigit' => '#',
'plusSign' => '+',
'minusSign' => '-',
'exponential' => 'E',
'perMille' => '‰',
'infinity' => '∞',
'nan' => 'NaN',
),
'decimalFormat' => '#,##0.###',
'scientificFormat' => '#E0',
'percentFormat' => '#,##0%',
'currencyFormat' => '¤ #,##0.00',
'currencySymbols' =>
array (
'AFN' => 'Af',
'ANG' => 'NAf.',
'AOA' => 'Kz',
'ARA' => '₳',
'ARL' => '$L',
'ARM' => 'm$n',
'ARS' => 'AR$',
'AUD' => 'AU$',
'AWG' => 'Afl.',
'AZN' => 'man.',
'BAM' => 'KM',
'BBD' => 'Bds$',
'BDT' => 'Tk',
'BEF' => 'BF',
'BHD' => 'BD',
'BIF' => 'FBu',
'BMD' => 'BD$',
'BND' => 'BN$',
'BOB' => 'Bs',
'BOP' => '$b.',
'BRL' => 'R$',
'BSD' => 'BS$',
'BTN' => 'Nu.',
'BWP' => 'BWP',
'BZD' => 'BZ$',
'CAD' => 'CA$',
'CDF' => 'CDF',
'CHF' => 'Fr.',
'CLE' => 'Eº',
'CLP' => 'CL$',
'CNY' => 'CN¥',
'COP' => 'CO$',
'CRC' => '₡',
'CUC' => 'CUC$',
'CUP' => 'CU$',
'CVE' => 'CV$',
'CYP' => 'CY£',
'CZK' => 'Kč',
'DEM' => 'DM',
'DJF' => 'Fdj',
'DKK' => 'Dkr',
'DOP' => 'RD$',
'DZD' => 'DA',
'EEK' => 'Ekr',
'EGP' => 'EG£',
'ERN' => 'Nfk',
'ESP' => '₧',
'ETB' => 'Br',
'EUR' => '€',
'FIM' => 'mk',
'FJD' => 'FJ$',
'FKP' => 'FK£',
'FRF' => '₣',
'GBP' => '£',
'GHC' => '₵',
'GHS' => 'GH₵',
'GIP' => 'GI£',
'GMD' => 'GMD',
'GNF' => 'FG',
'GRD' => '₯',
'GTQ' => 'GTQ',
'GYD' => 'GY$',
'HKD' => 'HK$',
'HNL' => 'HNL',
'HRK' => 'kn',
'HTG' => 'HTG',
'HUF' => 'Ft',
'IDR' => 'Rp',
'IEP' => 'IR£',
'ILP' => 'I£',
'ILS' => '₪',
'INR' => 'Rs',
'ISK' => 'Ikr',
'ITL' => 'IT₤',
'JMD' => 'J$',
'JOD' => 'JD',
'JPY' => 'JP¥',
'KES' => 'Ksh',
'KMF' => 'CF',
'KRW' => '₩',
'KWD' => 'KD',
'KYD' => 'KY$',
'LAK' => '₭',
'LBP' => 'LB£',
'LKR' => 'SLRs',
'LRD' => 'L$',
'LSL' => 'LSL',
'LTL' => 'Lt',
'LVL' => 'Ls',
'LYD' => 'LD',
'MMK' => 'MMK',
'MNT' => '₮',
'MOP' => 'MOP$',
'MRO' => 'UM',
'MTL' => 'Lm',
'MTP' => 'MT£',
'MUR' => 'MURs',
'MXP' => 'MX$',
'MYR' => 'RM',
'MZM' => 'Mt',
'MZN' => 'MTn',
'NAD' => 'N$',
'NGN' => '₦',
'NIO' => 'C$',
'NLG' => 'fl',
'NOK' => 'Nkr',
'NPR' => 'NPRs',
'NZD' => 'NZ$',
'PAB' => 'B/.',
'PEI' => 'I/.',
'PEN' => 'S/.',
'PGK' => 'PGK',
'PHP' => '₱',
'PKR' => 'PKRs',
'PLN' => 'zł',
'PTE' => 'Esc',
'PYG' => '₲',
'QAR' => 'QR',
'RHD' => 'RH$',
'RON' => 'RON',
'RSD' => 'din.',
'SAR' => 'SR',
'SBD' => 'SI$',
'SCR' => 'SRe',
'SDD' => 'LSd',
'SEK' => 'Skr',
'SGD' => 'S$',
'SHP' => 'SH£',
'SKK' => 'Sk',
'SLL' => 'Le',
'SOS' => 'Ssh',
'SRD' => 'SR$',
'SRG' => 'Sf',
'STD' => 'Db',
'SVC' => 'SV₡',
'SYP' => 'SY£',
'SZL' => 'SZL',
'THB' => '฿',
'TMM' => 'TMM',
'TND' => 'DT',
'TOP' => 'T$',
'TRL' => 'TRL',
'TRY' => 'TL',
'TTD' => 'TT$',
'TWD' => 'NT$',
'TZS' => 'TSh',
'UAH' => '₴',
'UGX' => 'USh',
'USD' => '$',
'UYU' => '$U',
'VEF' => 'Bs.F.',
'VND' => '₫',
'VUV' => 'VT',
'WST' => 'WS$',
'XAF' => 'FCFA',
'XCD' => 'EC$',
'XOF' => 'CFA',
'XPF' => 'CFPF',
'YER' => 'YR',
'ZAR' => 'R',
'ZMK' => 'ZK',
'ZRN' => 'NZ',
'ZRZ' => 'ZRZ',
'ZWD' => 'Z$',
),
'monthNames' =>
array (
'wide' =>
array (
1 => 'enero',
2 => 'febrero',
3 => 'marzo',
4 => 'abril',
5 => 'mayo',
6 => 'junio',
7 => 'julio',
8 => 'agosto',
9 => 'septiembre',
10 => 'octubre',
11 => 'noviembre',
12 => 'diciembre',
),
'abbreviated' =>
array (
1 => 'ene',
2 => 'feb',
3 => 'mar',
4 => 'abr',
5 => 'may',
6 => 'jun',
7 => 'jul',
8 => 'ago',
9 => 'sep',
10 => 'oct',
11 => 'nov',
12 => 'dic',
),
),
'monthNamesSA' =>
array (
'narrow' =>
array (
1 => 'E',
2 => 'F',
3 => 'M',
4 => 'A',
5 => 'M',
6 => 'J',
7 => 'J',
8 => 'A',
9 => 'S',
10 => 'O',
11 => 'N',
12 => 'D',
),
),
'weekDayNames' =>
array (
'wide' =>
array (
0 => 'domingo',
1 => 'lunes',
2 => 'martes',
3 => 'miércoles',
4 => 'jueves',
5 => 'viernes',
6 => 'sábado',
),
'abbreviated' =>
array (
0 => 'dom',
1 => 'lun',
2 => 'mar',
3 => 'mié',
4 => 'jue',
5 => 'vie',
6 => 'sáb',
),
),
'weekDayNamesSA' =>
array (
'narrow' =>
array (
0 => 'D',
1 => 'L',
2 => 'M',
3 => 'M',
4 => 'J',
5 => 'V',
6 => 'S',
),
),
'eraNames' =>
array (
'abbreviated' =>
array (
0 => 'a.C.',
1 => 'd.C.',
),
'wide' =>
array (
0 => 'antes de Cristo',
1 => 'anno Dómini',
),
'narrow' =>
array (
0 => 'a.C.',
1 => 'd.C.',
),
),
'dateFormats' =>
array (
'full' => 'EEEE d \'de\' MMMM \'de\' y',
'long' => 'd \'de\' MMMM \'de\' y',
'medium' => 'MM/dd/yyyy',
'short' => 'MM/dd/yy',
),
'timeFormats' =>
array (
'full' => 'HH:mm:ss zzzz',
'long' => 'HH:mm:ss z',
'medium' => 'HH:mm:ss',
'short' => 'HH:mm',
),
'dateTimeFormat' => '{1} {0}',
'amName' => 'a.m.',
'pmName' => 'p.m.',
);
|
hukumonline/yii
|
framework/i18n/data/es_pr.php
|
PHP
|
bsd-3-clause
| 6,241
|
# -*- coding: utf-8 -*-
"""
DU task for ABP Table: doing jointly row BIESO and horizontal cuts
block2line edges do not cross another block.
The cut are based on baselines of text blocks.
- the labels of horizontal cuts are SIO (instead of SO in previous version)
Copyright Naver Labs Europe(C) 2018 JL Meunier
Developed for the EU project READ. The READ project has received funding
from the European Union's Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
import sys, os
import math
from lxml import etree
import collections
import numpy as np
from sklearn.pipeline import Pipeline, FeatureUnion
try: #to ease the use without proper Python installation
import TranskribusDU_version
except ImportError:
sys.path.append( os.path.dirname(os.path.dirname( os.path.abspath(sys.argv[0]) )) )
import TranskribusDU_version
from common.trace import traceln
from tasks import _checkFindColDir, _exit
from tasks.DU_CRF_Task import DU_CRF_Task
from xml_formats.PageXml import MultiPageXml
import graph.GraphModel
from crf.Edge import Edge, SamePageEdge
from crf.Graph_MultiPageXml import Graph_MultiPageXml
from crf.NodeType_PageXml import NodeType_PageXml_type_woText
#from crf.FeatureDefinition_PageXml_std_noText import FeatureDefinition_PageXml_StandardOnes_noText
from crf.FeatureDefinition import FeatureDefinition
from crf.Transformer import Transformer, TransformerListByType
from crf.Transformer import EmptySafe_QuantileTransformer as QuantileTransformer
from crf.Transformer_PageXml import NodeTransformerXYWH_v2, NodeTransformerNeighbors, Node1HotFeatures
from crf.Transformer_PageXml import Edge1HotFeatures, EdgeBooleanFeatures_v2, EdgeNumericalSelector
from crf.PageNumberSimpleSequenciality import PageNumberSimpleSequenciality
from tasks.DU_ABPTableCutAnnotator import BaselineCutAnnotator
class GraphCut(Graph_MultiPageXml):
"""
We specialize the class of graph because the computation of edges is quite specific
"""
#Cut stuff
#iModulo = 1 # map the coordinate to this modulo
fMinPageCoverage = 0.5 # minimal coverage to consider a GT table separator
iLineVisibility = 5 * 11 # a cut line sees other cut line up to N pixels downward
iBlockVisibility = 3*7*13 # a block sees neighbouring cut lines at N pixels
_lClassicNodeType = None
@classmethod
def setClassicNodeTypeList(cls, lNodeType):
"""
determine which type of node goes thru the classical way for determining
the edges (vertical or horizontal overlap, with occlusion, etc.)
"""
cls._lClassicNodeType = lNodeType
def parseDocFile(self, sFilename, iVerbose=0):
"""
Load that document as a CRF Graph.
Also set the self.doc variable!
Return a CRF Graph object
"""
self.doc = etree.parse(sFilename)
self.lNode, self.lEdge = list(), list()
self.lNodeBlock = [] # text node
self.lNodeCutLine = [] # cut line node
root = self.doc.getroot()
doer = BaselineCutAnnotator()
doer.setLabelScheme_SIO() #use SIO instead of SO labels!
#doer.setModulo(self.iModulo) # this is optional
#load the groundtruth table separators, if any, per page (1 in tABP)
ltlYlX = doer.get_separator_YX_from_DOM(root, self.fMinPageCoverage)
for (lHi, lVi) in ltlYlX:
traceln(" - found %d horizontal, %d vertical GT separators" % (len(lHi), len(lVi)))
#create DOM node reflecting the cuts
#first clean (just in case!)
n = doer.remove_cuts_from_dom(root)
if n > 0:
traceln(" - removed %d pre-existing cut lines" % n)
# if GT, then we have labelled cut lines in DOM
_ltlYCutXCut = doer.add_cut_to_DOM(root, ltlYlX=ltlYlX)
lClassicType = [nt for nt in self.getNodeTypeList() if nt in self._lClassicNodeType]
lSpecialType = [nt for nt in self.getNodeTypeList() if nt not in self._lClassicNodeType]
for (pnum, page, domNdPage) in self._iter_Page_DocNode(self.doc):
#now that we have the page, let's create the node for each type!
lClassicPageNode = [nd for nodeType in lClassicType for nd in nodeType._iter_GraphNode(self.doc, domNdPage, page) ]
lSpecialPageNode = [nd for nodeType in lSpecialType for nd in nodeType._iter_GraphNode(self.doc, domNdPage, page) ]
self.lNode.extend(lClassicPageNode) # e.g. the TextLine objects
self.lNodeBlock.extend(lClassicPageNode)
self.lNode.extend(lSpecialPageNode) # e.g. the cut lines!
self.lNodeCutLine.extend(lSpecialPageNode)
#no previous page to consider (for cross-page links...) => None
lClassicPageEdge = Edge.computeEdges(None, lClassicPageNode)
self.lEdge.extend(lClassicPageEdge)
# Now, compute edges between special and classic objects...
lSpecialPageEdge = self.computeSpecialEdges(lClassicPageNode,
lSpecialPageNode,
doer.bCutIsBeforeText)
self.lEdge.extend(lSpecialPageEdge)
#if iVerbose>=2: traceln("\tPage %5d %6d nodes %7d edges"%(pnum, len(lPageNode), len(lPageEdge)))
if iVerbose>=2:
traceln("\tPage %5d"%(pnum))
traceln("\t block: %6d nodes %7d edges (to block)" %(pnum, len(lClassicPageNode), len(lClassicPageEdge)))
traceln("\t line: %6d nodes %7d edges (from block)"%(pnum, len(lSpecialPageNode), len(lSpecialPageEdge)))
if iVerbose: traceln("\t\t (%d nodes, %d edges)"%(len(self.lNode), len(self.lEdge)) )
return self
@classmethod
def computeSpecialEdges(cls, lClassicPageNode, lSpecialPageNode):
"""
return a list of edges
"""
raise Exception("Specialize this method")
class Edge_BL(Edge):
"""Edge block-to-Line"""
pass
class Edge_LL(Edge):
"""Edge line-to-Line"""
pass
class GraphCut_H(GraphCut):
"""
Only horizontal cut lines
"""
def __init__(self):
self.showClassParam()
@classmethod
def showClassParam(cls):
try:
cls.bParamShownOnce
assert cls.bParamShownOnce == True
except:
#traceln(" - iModulo : " , cls.iModulo)
traceln(" - block_see_line : " , cls.iBlockVisibility)
traceln(" - line_see_line : " , cls.iLineVisibility)
traceln(" - fMinPageCoverage : " , cls.fMinPageCoverage)
cls.bParamShownOnce = True
def getNodeListByType(self, iTyp):
if iTyp == 0:
return self.lNodeBlock
else:
return self.lNodeCutLine
def getEdgeListByType(self, typA, typB):
if typA == 0:
if typB == 0:
return (e for e in self.lEdge if isinstance(e, SamePageEdge))
else:
return (e for e in self.lEdge if isinstance(e, Edge_BL))
else:
if typB == 0:
return []
else:
return (e for e in self.lEdge if isinstance(e, Edge_LL))
@classmethod
def computeSpecialEdges(self, lClassicPageNode, lSpecialPageNode,
bCutIsBeforeText):
"""
Compute:
- edges between each block and the cut line above/across/below the block
- edges between cut lines
return a list of edges
"""
#augment the block with the coordinate of its baseline central point
for blk in lClassicPageNode:
try:
x,y = BaselineCutAnnotator.getDomBaselineXY(blk.node)
blk.x_bslne = x
blk.y_bslne = y
except IndexError:
traceln("** WARNING: no Baseline in ", blk.domid)
traceln("** Using x2 and y2 instead... :-/")
blk.x_bslne = blk.x2
blk.y_bslne = blk.y2
for cutBlk in lSpecialPageNode:
assert cutBlk.y1 == cutBlk.y2
cutBlk.y1 = int(round(cutBlk.y1)) #DeltaFun make float
cutBlk.y2 = cutBlk.y1
#block to cut line edges
lEdge = []
for blk in lClassicPageNode:
for cutBlk in lSpecialPageNode:
if blk.y_bslne == cutBlk.y1:
edge = Edge_BL(blk, cutBlk)
edge.len = 0
edge._type = 0 # Cut line is crossing the block
lEdge.append(edge)
elif abs(blk.y_bslne - cutBlk.y1) <= self.iBlockVisibility:
edge = Edge_BL(blk, cutBlk)
# experiments show that abs helps
# edge.len = (blk.y_bslne - cutBlk.y1) / self.iBlockVisibility
edge.len = abs(blk.y_bslne - cutBlk.y1) / self.iBlockVisibility
edge._type = -1 if blk.y_bslne > cutBlk.y1 else +1
lEdge.append(edge)
#sort those edge from top to bottom
lEdge.sort(key=lambda o: o.B.y1) # o.B.y1 == o.B.y2 by construction
#now filter those edges
n0 = len(lEdge)
if False:
print("--- before filtering: %d edges" % len(lEdge))
lSortedEdge = sorted(lEdge, key=lambda x: x.A.domid)
for edge in lSortedEdge:
print("Block domid=%s y1=%s y2=%s yg=%s"%(edge.A.domid, edge.A.y1, edge.A.y2, edge.A.y_bslne)
+ " %s line %s "%(["↑", "-", "↓"][1+edge._type],
edge.B.y1)
+ "domid=%s y1=%s " %(edge.B.domid, edge.B.y1)
+str(id(edge))
)
lEdge = self._filterBadEdge(lEdge, lSpecialPageNode, bCutIsBeforeText)
traceln(" - filtering: removed %d edges due to obstruction." % (n0-len(lEdge)))
if False:
print("--- After filtering: %d edges" % len(lEdge))
lSortedEdge = sorted(lEdge, key=lambda x: x.A.domid)
print(len(lSortedEdge))
for edge in lSortedEdge:
print("Block domid=%s y1=%s y2=%s yg=%s"%(edge.A.domid, edge.A.y1, edge.A.y2, edge.A.y_bslne)
+ " %s line %s "%(["↑", "-", "↓"][1+edge._type],
edge.B.y1)
+ "domid=%s y1=%s " %(edge.B.domid, edge.B.y1)
+str(id(edge))
)
if self.iLineVisibility > 0:
# Cut line to Cut line edges
lSpecialPageNode.sort(key=lambda o: o.y1)
for i, A in enumerate(lSpecialPageNode):
for B in lSpecialPageNode[i+1:]:
if B.y1 - A.y1 <= self.iLineVisibility:
edge = Edge_LL(A, B)
edge.len = (B.y1 - A.y1) / self.iLineVisibility
assert edge.len >= 0
lEdge.append(edge)
else:
break
return lEdge
@classmethod
def _filterBadEdge(cls, lEdge, lCutLine, bCutIsBeforeText, fRatio=0.25):
"""
We get
- a list of block2Line edges
- a sorted list of cut line
But some block should not be connected to a line due to obstruction by
another blocks.
We filter out those edges...
return a sub-list of lEdge
"""
lKeepEdge = []
def _xoverlapSrcSrc(edge, lEdge):
"""
does the source node of edge overlap with the source node of any
edge of the list?
"""
A = edge.A
for _edge in lEdge:
if A.significantXOverlap(_edge.A, fRatio): return True
return False
def _yoverlapSrcSrc(edge, lEdge):
"""
does the source node of edge overlap with the source node of any
edge of the list?
"""
A = edge.A
for _edge in lEdge:
if A.significantYOverlap(_edge.A, fRatio): return True
return False
#there are two ways for dealing with lines crossed by a block
# - either it prevents another block to link to the line (assuming an x-overlap)
# - or not (historical way)
# THIS IS THE "MODERN" way!!
#check carefully the inequality below...
if bCutIsBeforeText == True:
keep1 = 0
keep2 = 1
else:
keep1 = -1
keep2 = 0
#take each line in turn
for ndLine in lCutLine:
#--- process downward edges
#TODO: index!
lDownwardAndXingEdge = [edge for edge in lEdge \
if edge._type > keep1 and edge.B == ndLine]
if lDownwardAndXingEdge:
#sort edge by source block from closest to line block to farthest
lDownwardAndXingEdge.sort(key=lambda o: ndLine.y1 - o.A.y_bslne)
lKeepDownwardEdge = [lDownwardAndXingEdge.pop(0)]
#now keep all edges whose source does not overlap vertically with
# the source of an edge that is kept
for edge in lDownwardAndXingEdge:
if not _xoverlapSrcSrc(edge, lKeepDownwardEdge):
lKeepDownwardEdge.append(edge)
lKeepEdge.extend(lKeepDownwardEdge)
#NOTHING to do for crossing edges: they should be in the list!
# #--- keep all crossing edges
# #TODO: index!
# lCrossingEdge = [edge for edge in lEdge \
# if edge._type == 0 and edge.B == ndLine]
#
# lKeepEdge.extend(lCrossingEdge)
#--- process upward edges
#TODO: index!
lUpwarAndXingdEdge = [edge for edge in lEdge \
if edge._type < keep2 and edge.B == ndLine]
if lUpwarAndXingdEdge:
#sort edge by source block from closest to line -block to farthest
lUpwarAndXingdEdge.sort(key=lambda o: o.A.y_bslne - ndLine.y2)
lKeepUpwardEdge = [lUpwarAndXingdEdge.pop(0)]
#now keep all edges whose source does not overlap vertically with
# the source of an edge that is kept
for edge in lUpwarAndXingdEdge:
if not _xoverlapSrcSrc(edge, lKeepUpwardEdge):
lKeepUpwardEdge.append(edge)
# now we keep only the edges, excluding the crossing ones
# (already included!!)
lKeepEdge.extend(edge for edge in lKeepUpwardEdge)
#--- and include the crossing ones (that are discarded
return lKeepEdge
#------------------------------------------------------------------------------------------------------
class CutLine_NodeTransformer_v2(Transformer):
"""
features of a Cut line:
- horizontal or vertical.
"""
def transform(self, lNode):
#We allocate TWO more columns to store in it the tfidf and idf computed at document level.
#a = np.zeros( ( len(lNode), 10 ) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
a = np.zeros( ( len(lNode), 6 ) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
for i, blk in enumerate(lNode):
page = blk.page
if abs(blk.x2 - blk.x1) > abs(blk.y1 - blk.y2):
#horizontal
v = 2*blk.y1/float(page.h) - 1 # to range -1, +1
a[i,0:3] = (1.0, v, v*v)
else:
#vertical
v = 2*blk.x1/float(page.w) - 1 # to range -1, +1
a[i,3:6] = (1.0, v, v*v)
return a
class Block2CutLine_EdgeTransformer(Transformer):
"""
features of a block to Cut line edge:
- below, crossing, above
"""
def transform(self, lEdge):
a = np.zeros( ( len(lEdge), 3 + 3 + 3) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
for i, edge in enumerate(lEdge):
z = 1 + edge._type # _type is -1 or 0 or 1
a[i, z] = 1.0
a[i, 3 + z] = edge.len # normalised on [0, 1] edge length
a[i, 6 + z] = edge.len * edge.len
return a
class CutLine2CutLine_EdgeTransformer(Transformer): # ***** USELESS *****
"""
features of a block to Cut line edge:
- below, crossing, above
"""
def transform(self, lEdge):
a = np.zeros( ( len(lEdge), 3 ) , dtype=np.float64)
for i, edge in enumerate(lEdge):
a[i,:] = (1, edge.len, edge.len * edge.len)
return a
class My_FeatureDefinition_v2(FeatureDefinition):
"""
Multitype version:
so the node_transformer actually is a list of node_transformer of length n_class
the edge_transformer actually is a list of node_transformer of length n_class^2
We also inherit from FeatureDefinition_T !!!
"""
n_QUANTILES = 16
def __init__(self, **kwargs):
"""
set _node_transformer, _edge_transformer, tdifNodeTextVectorizer
"""
FeatureDefinition.__init__(self)
nbTypes = self._getTypeNumber(kwargs)
block_transformer = FeatureUnion( [ #CAREFUL IF YOU CHANGE THIS - see cleanTransformers method!!!!
("xywh", Pipeline([
('selector', NodeTransformerXYWH_v2()),
#v1 ('xywh', StandardScaler(copy=False, with_mean=True, with_std=True)) #use in-place scaling
('xywh', QuantileTransformer(n_quantiles=self.n_QUANTILES, copy=False)) #use in-place scaling
])
)
, ("neighbors", Pipeline([
('selector', NodeTransformerNeighbors()),
#v1 ('neighbors', StandardScaler(copy=False, with_mean=True, with_std=True)) #use in-place scaling
('neighbors', QuantileTransformer(n_quantiles=self.n_QUANTILES, copy=False)) #use in-place scaling
])
)
, ("1hot", Pipeline([
('1hot', Node1HotFeatures()) #does the 1-hot encoding directly
])
)
])
Cut_line_transformer = CutLine_NodeTransformer_v2()
self._node_transformer = TransformerListByType([block_transformer, Cut_line_transformer])
edge_BB_transformer = FeatureUnion( [ #CAREFUL IF YOU CHANGE THIS - see cleanTransformers method!!!!
("1hot", Pipeline([
('1hot', Edge1HotFeatures(PageNumberSimpleSequenciality()))
])
)
, ("boolean", Pipeline([
('boolean', EdgeBooleanFeatures_v2())
])
)
, ("numerical", Pipeline([
('selector', EdgeNumericalSelector()),
#v1 ('numerical', StandardScaler(copy=False, with_mean=True, with_std=True)) #use in-place scaling
('numerical', QuantileTransformer(n_quantiles=self.n_QUANTILES, copy=False)) #use in-place scaling
])
)
] )
edge_BL_transformer = Block2CutLine_EdgeTransformer()
edge_LL_transformer = CutLine2CutLine_EdgeTransformer()
self._edge_transformer = TransformerListByType([edge_BB_transformer,
edge_BL_transformer,
edge_BL_transformer, # useless but required
edge_LL_transformer
])
self.tfidfNodeTextVectorizer = None #tdifNodeTextVectorizer
def fitTranformers(self, lGraph,lY=None):
"""
Fit the transformers using the graphs, but TYPE BY TYPE !!!
return True
"""
self._node_transformer[0].fit([nd for g in lGraph for nd in g.getNodeListByType(0)])
self._node_transformer[1].fit([nd for g in lGraph for nd in g.getNodeListByType(1)])
self._edge_transformer[0].fit([e for g in lGraph for e in g.getEdgeListByType(0, 0)])
self._edge_transformer[1].fit([e for g in lGraph for e in g.getEdgeListByType(0, 1)])
self._edge_transformer[2].fit([e for g in lGraph for e in g.getEdgeListByType(1, 0)])
self._edge_transformer[3].fit([e for g in lGraph for e in g.getEdgeListByType(1, 1)])
return True
class DU_ABPTableRCut(DU_CRF_Task):
"""
We will do a CRF model for a DU task
, with the below labels
"""
sXmlFilenamePattern = "*[0-9].mpxml"
iBlockVisibility = None
iLineVisibility = None
#=== CONFIGURATION ====================================================================
@classmethod
def getConfiguredGraphClass(cls):
"""
In this class method, we must return a configured graph class
"""
# Textline labels
# Begin Inside End Single Other
lLabels_BIESO = ['B', 'I', 'E', 'S', 'O']
# Cut lines:
# Border Ignore Separator Outside
lLabels_SIO_Cut = ['S', 'I', 'O']
#DEFINING THE CLASS OF GRAPH WE USE
DU_GRAPH = GraphCut_H
DU_GRAPH.iBlockVisibility = cls.iBlockVisibility
DU_GRAPH.iLineVisibility = cls.iLineVisibility
# ROW
ntR = NodeType_PageXml_type_woText("row"
, lLabels_BIESO
, None
, False
, BBoxDeltaFun=lambda v: max(v * 0.066, min(5, v/3))
)
ntR.setLabelAttribute("DU_row")
ntR.setXpathExpr( (".//pc:TextLine" #how to find the nodes
, "./pc:TextEquiv") #how to get their text
)
DU_GRAPH.addNodeType(ntR)
# HEADER
ntCutH = NodeType_PageXml_type_woText("sepH"
, lLabels_SIO_Cut
, None
, False
, None # equiv. to: BBoxDeltaFun=lambda _: 0
)
ntCutH.setLabelAttribute("type")
ntCutH.setXpathExpr( ('.//pc:CutSeparator[@orient="0"]' #how to find the nodes
, "./pc:TextEquiv") #how to get their text
)
DU_GRAPH.addNodeType(ntCutH)
DU_GRAPH.setClassicNodeTypeList( [ntR ])
return DU_GRAPH
def __init__(self, sModelName, sModelDir,
iBlockVisibility = None,
iLineVisibility = None,
sComment = None,
C=None, tol=None, njobs=None, max_iter=None,
inference_cache=None):
DU_ABPTableRCut.iBlockVisibility = iBlockVisibility
DU_ABPTableRCut.iLineVisibility = iLineVisibility
DU_CRF_Task.__init__(self
, sModelName, sModelDir
, dFeatureConfig = {'row_row':{}, 'row_sepH':{},
'sepH_row':{}, 'sepH_sepH':{},
'sepH':{}, 'row':{}}
, dLearnerConfig = {
'C' : .1 if C is None else C
, 'njobs' : 4 if njobs is None else njobs
, 'inference_cache' : 50 if inference_cache is None else inference_cache
#, 'tol' : .1
, 'tol' : .05 if tol is None else tol
, 'save_every' : 50 #save every 50 iterations,for warm start
, 'max_iter' : 10 if max_iter is None else max_iter
}
, sComment=sComment
#,cFeatureDefinition=FeatureDefinition_PageXml_StandardOnes_noText
,cFeatureDefinition=My_FeatureDefinition_v2
)
#TODO: finish this!
def evalClusterByRow(self, sFilename):
"""
Evaluate the quality of the partitioning by table row, by comparing the
GT table information to the partition done automatically (thanks to the
separators added to the DOM).
"""
self.doc = etree.parse(sFilename)
root = self.doc.getroot()
# doer = BaselineCutAnnotator()
#
# #load the groundtruth table separators, if any, per page (1 in tABP)
# ltlYlX = doer.get_separator_YX_from_DOM(root, self.fMinPageCoverage)
# for (lHi, lVi) in ltlYlX:
# traceln(" - found %d horizontal, %d vertical GT separators" % (len(lHi), len(lVi)))
# #create DOM node reflecting the cuts
# #first clean (just in case!)
# n = doer.remove_cuts_from_dom(root)
# if n > 0:
# traceln(" - removed %d pre-existing cut lines" % n)
#
# # if GT, then we have labelled cut lines in DOM
# _ltlYCutXCut = doer.add_cut_to_DOM(root, ltlYlX=ltlYlX)
lClassicType = [nt for nt in self.getNodeTypeList() if nt in self._lClassicNodeType]
lSpecialType = [nt for nt in self.getNodeTypeList() if nt not in self._lClassicNodeType]
#load the block nodes per page
for (pnum, page, domNdPage) in self._iter_Page_DocNode(self.doc):
#now that we have the page, let's create the node for each type!
lClassicPageNode = [nd for nodeType in lClassicType for nd in nodeType._iter_GraphNode(self.doc, domNdPage, page) ]
lSpecialType = [nt for nt in self.getNodeTypeList() if nt not in self._lClassicNodeType]
# -- GT ---------------------------------------------
# partition by columns ad rows
dGTByRow = collections.defaultdict(list)
dGTByCol = collections.defaultdict(list)
for blk in lClassicPageNode:
cell = MultiPageXml.getAncestorByName(blk, 'TableCell')[0]
row, col, rowSpan, colSpan = [int(cell.get(sProp)) for sProp \
in ["row", "col", "rowSpan", "colSpan"] ]
# TODO: deal with span
dGTByRow[row].append(blk)
dGTByCol[col].append(col)
for k,l in dGTByRow.items:
l.sort(key=lambda o: (o.x1, o.y1))
for k,l in dGTByCol.items:
l.sort(key=lambda o: (o.y1, o.x1))
# -- Prediction ---------------------------------------------
# if options.bBaseline:
# self.bsln_mdl = self.addBaseline_LogisticRegression() #use a LR model trained by CutSearch as baseline
#=== END OF CONFIGURATION =============================================================
# def predict(self, lsColDir):
# """
# Return the list of produced files
# """
# self.sXmlFilenamePattern = "*.mpxml"
# return DU_CRF_Task.predict(self, lsColDir)
#
# def runForExternalMLMethod(self, lsColDir, storeX, applyY, bRevertEdges=False):
# """
# Return the list of produced files
# """
# self.sXmlFilenamePattern = "*.mpxml"
# return DU_CRF_Task.runForExternalMLMethod(self, lsColDir, storeX, applyY, bRevertEdges)
# ----------------------------------------------------------------------------
def main(sModelDir, sModelName, options):
doer = DU_ABPTableRCut(sModelName, sModelDir,
iBlockVisibility = options.iBlockVisibility,
iLineVisibility = options.iLineVisibility,
C = options.crf_C,
tol = options.crf_tol,
njobs = options.crf_njobs,
max_iter = options.max_iter,
inference_cache = options.crf_inference_cache)
if options.rm:
doer.rm()
return
lTrn, lTst, lRun, lFold = [_checkFindColDir(lsDir, bAbsolute=False) for lsDir in [options.lTrn, options.lTst, options.lRun, options.lFold]]
# if options.bAnnotate:
# doer.annotateDocument(lTrn)
# traceln('annotation done')
# sys.exit(0)
traceln("- classes: ", doer.getGraphClass().getLabelNameList())
## use. a_mpxml files
#doer.sXmlFilenamePattern = doer.sLabeledXmlFilenamePattern
if options.iFoldInitNum or options.iFoldRunNum or options.bFoldFinish:
if options.iFoldInitNum:
"""
initialization of a cross-validation
"""
splitter, ts_trn, lFilename_trn = doer._nfold_Init(lFold, options.iFoldInitNum, bStoreOnDisk=True)
elif options.iFoldRunNum:
"""
Run one fold
"""
oReport = doer._nfold_RunFoldFromDisk(options.iFoldRunNum, options.warm, options.pkl)
traceln(oReport)
elif options.bFoldFinish:
tstReport = doer._nfold_Finish()
traceln(tstReport)
else:
assert False, "Internal error"
#no more processing!!
exit(0)
#-------------------
if lFold:
loTstRpt = doer.nfold_Eval(lFold, 3, .25, None, options.pkl)
sReportPickleFilename = os.path.join(sModelDir, sModelName + "__report.txt")
traceln("Results are in %s"%sReportPickleFilename)
graph.GraphModel.GraphModel.gzip_cPickle_dump(sReportPickleFilename, loTstRpt)
elif lTrn:
doer.train_save_test(lTrn, lTst, options.warm, options.pkl)
try: traceln("Baseline best estimator: %s"%doer.bsln_mdl.best_params_) #for CutSearch
except: pass
traceln(" --- CRF Model ---")
traceln(doer.getModel().getModelInfo())
elif lTst:
doer.load()
tstReport = doer.test(lTst)
traceln(tstReport)
if options.bDetailedReport:
traceln(tstReport.getDetailledReport())
sReportPickleFilename = os.path.join(sModelDir, sModelName + "__detailled_report.txt")
graph.GraphModel.GraphModel.gzip_cPickle_dump(sReportPickleFilename, tstReport)
if lRun:
if options.storeX or options.applyY:
try: doer.load()
except: pass #we only need the transformer
lsOutputFilename = doer.runForExternalMLMethod(lRun, options.storeX, options.applyY, options.bRevertEdges)
else:
doer.load()
lsOutputFilename = doer.predict(lRun)
traceln("Done, see in:\n %s"%lsOutputFilename)
# ----------------------------------------------------------------------------
if __name__ == "__main__":
version = "v.01"
usage, description, parser = DU_CRF_Task.getBasicTrnTstRunOptionParser(sys.argv[0], version)
# parser.add_option("--annotate", dest='bAnnotate', action="store_true",default=False, help="Annotate the textlines with BIES labels")
#FOR GCN
parser.add_option("--revertEdges", dest='bRevertEdges', action="store_true", help="Revert the direction of the edges")
parser.add_option("--detail", dest='bDetailedReport', action="store_true", default=False,help="Display detailed reporting (score per document)")
parser.add_option("--baseline", dest='bBaseline', action="store_true", default=False, help="report baseline method")
parser.add_option("--line_see_line", dest='iLineVisibility', action="store",
type=int, default=0,
help="seeline2line: how far in pixel can a line see another cut line?")
parser.add_option("--block_see_line", dest='iBlockVisibility', action="store",
type=int, default=273,
help="seeblock2line: how far in pixel can a block see a cut line?")
# ---
#parse the command line
(options, args) = parser.parse_args()
# ---
try:
sModelDir, sModelName = args
except Exception as e:
traceln("Specify a model folder and a model name!")
_exit(usage, 1, e)
main(sModelDir, sModelName, options)
|
Transkribus/TranskribusDU
|
TranskribusDU/tasks/TablePrototypes/DU_ABPTableRCut1SIO.py
|
Python
|
bsd-3-clause
| 34,506
|
<?php
namespace WbBase\WbTrait\Service;
use WbBase\Service\ServiceInterface;
/**
* ServiceProxyTrait
*
* @package WbBase\WbTrait\Service
* @author Źmicier Hryškieivič <zmicier@webbison.com>
*/
trait ServiceProxyTrait
{
/**
* @var ServiceInterface
*/
protected $service;
/**
* Service setter
*
* @param ServiceInterface $service Service instance.
*
* @return void
*/
public function setService(ServiceInterface $service)
{
$this->service = $service;
return $this;
}
/**
* Service getter
*
* @return ServiceInterface
*/
public function getService()
{
return $this->service;
}
}
|
zmicier/WbBase
|
src/WbBase/WbTrait/Service/ServiceProxyTrait.php
|
PHP
|
bsd-3-clause
| 716
|
package com.openxc.hardware.hud;
import java.io.IOException;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.Set;
import java.util.UUID;
import com.openxc.hardware.hud.BluetoothException;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.IntentFilter;
import android.content.Intent;
import android.util.Log;
/**
* The DeviceManager collects the functions required to discover and open a
* socket to the Bluetooth device.
*/
public class DeviceManager {
private final static String TAG = "DeviceManager";
private final static UUID RFCOMM_UUID = UUID.fromString(
"00001101-0000-1000-8000-00805f9b34fb");
private Context mContext;
private BluetoothAdapter mBluetoothAdapter;
private BluetoothSocket mSocket;
private BluetoothDevice mTargetDevice;
private final Lock mDeviceLock = new ReentrantLock();
private final Condition mDeviceChangedCondition =
mDeviceLock.newCondition();
private BroadcastReceiver mReceiver;
/**
* The DeviceManager requires an Android Context in order to send the intent
* to enable Bluetooth if it isn't already on.
*/
public DeviceManager(Context context) throws BluetoothException {
mContext = context;
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if(mBluetoothAdapter == null) {
throw new BluetoothException();
}
}
/**
* Open an RFCOMM socket to the connected Bluetooth device.
*
* The DeviceManager must already have a device connected, so
* discoverDevices needs to be called.
*/
public BluetoothSocket setupSocket() throws BluetoothException {
if(mTargetDevice == null) {
Log.w(TAG, "Can't setup socket -- device is " + mTargetDevice);
throw new BluetoothException();
}
Log.d(TAG, "Scanning services on " + mTargetDevice);
try {
mSocket = mTargetDevice.createRfcommSocketToServiceRecord(
RFCOMM_UUID);
} catch(IOException e) {}
try {
mSocket.connect();
return mSocket;
} catch(IOException e) {
Log.e(TAG, "Could not find required service on " + mTargetDevice);
try {
mSocket.close();
} catch(IOException e2) {}
throw new BluetoothException();
}
}
/**
* Discover and connect to the target device.
*
* This method is asynchronous - after a device is connected, the user
* should call setupSocket() to get a socket connection.
*/
public void connect(String targetAddress) {
discoverDevices(targetAddress);
mDeviceLock.lock();
while(mTargetDevice == null) {
try {
mDeviceChangedCondition.await();
} catch(InterruptedException e) {}
}
mDeviceLock.unlock();
}
private void captureDevice(BluetoothDevice device) {
mDeviceLock.lock();
mTargetDevice = device;
mDeviceChangedCondition.signal();
mDeviceLock.unlock();
if(mReceiver != null) {
mContext.unregisterReceiver(mReceiver);
mBluetoothAdapter.cancelDiscovery();
}
}
private boolean deviceDiscovered(BluetoothDevice device,
String targetAddress) {
Log.d(TAG, "Found Bluetooth device: " + device);
if(device.getAddress().equals(targetAddress)) {
Log.d(TAG, "Found matching device: " + device);
return true;
}
return false;
}
/**
* Check the list of previously paired devices and any discoverable devices
* for one matching the target address. Once a matching device is found,
* calls captureDevice to connect with it.
*/
private void discoverDevices(final String targetAddress) {
Log.d(TAG, "Starting device discovery");
Set<BluetoothDevice> pairedDevices =
mBluetoothAdapter.getBondedDevices();
for(BluetoothDevice device : pairedDevices) {
Log.d(TAG, "Found already paired device: " + device);
if(deviceDiscovered(device, targetAddress)) {
captureDevice(device);
return;
}
}
mReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
if(BluetoothDevice.ACTION_FOUND.equals(intent.getAction())) {
BluetoothDevice device = intent.getParcelableExtra(
BluetoothDevice.EXTRA_DEVICE);
if (device.getBondState() != BluetoothDevice.BOND_BONDED
&& deviceDiscovered(device, targetAddress)) {
captureDevice(device);
}
}
}
};
IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND);
mContext.registerReceiver(mReceiver, filter);
if(mBluetoothAdapter.isDiscovering()) {
mBluetoothAdapter.cancelDiscovery();
}
mBluetoothAdapter.startDiscovery();
}
}
|
openxc/simple-hud
|
android/driver/src/com/openxc/hardware/hud/DeviceManager.java
|
Java
|
bsd-3-clause
| 5,430
|
using System;
using System.Threading.Tasks;
using Microsoft.Extensions.Localization;
using OrchardCore.Lucene.Drivers;
using OrchardCore.Navigation;
namespace OrchardCore.Lucene
{
public class AdminMenu : INavigationProvider
{
private readonly IStringLocalizer S;
public AdminMenu(IStringLocalizer<AdminMenu> localizer)
{
S = localizer;
}
public Task BuildNavigationAsync(string name, NavigationBuilder builder)
{
if (!String.Equals(name, "admin", StringComparison.OrdinalIgnoreCase))
{
return Task.CompletedTask;
}
builder
.Add(S["Search"], NavigationConstants.AdminMenuSearchPosition, search => search
.AddClass("search").Id("search")
.Add(S["Indexing"], S["Indexing"].PrefixPosition(), import => import
.Add(S["Lucene Indices"], S["Lucene Indices"].PrefixPosition(), indexes => indexes
.Action("Index", "Admin", new { area = "OrchardCore.Lucene" })
.Permission(Permissions.ManageIndexes)
.LocalNav())
.Add(S["Run Lucene Query"], S["Run Lucene Query"].PrefixPosition(), queries => queries
.Action("Query", "Admin", new { area = "OrchardCore.Lucene" })
.Permission(Permissions.ManageIndexes)
.LocalNav()))
.Add(S["Settings"], settings => settings
.Add(S["Search"], S["Search"].PrefixPosition(), entry => entry
.Action("Index", "Admin", new { area = "OrchardCore.Settings", groupId = LuceneSettingsDisplayDriver.GroupId })
.Permission(Permissions.ManageIndexes)
.LocalNav()
)));
return Task.CompletedTask;
}
}
}
|
xkproject/Orchard2
|
src/OrchardCore.Modules/OrchardCore.Lucene/AdminMenu.cs
|
C#
|
bsd-3-clause
| 2,000
|
/*
* Copyright 2016 Facebook, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package io.reactivesocket.tck;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Test {
boolean pass() default true;
}
|
xytosis/reactivesocket-tck
|
src/main/java/io/reactivesocket/tck/Test.java
|
Java
|
bsd-3-clause
| 924
|
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Customizing — Flask AppBuilder</title>
<link rel="stylesheet" href="_static/basic.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<link rel="stylesheet" href="_static/bootswatch-3.3.4/flatly/bootstrap.min.css" type="text/css" />
<link rel="stylesheet" href="_static/bootstrap-sphinx.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: './',
VERSION: '1.4.4',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true
};
</script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<script type="text/javascript" src="_static/js/jquery-1.11.0.min.js"></script>
<script type="text/javascript" src="_static/js/jquery-fix.js"></script>
<script type="text/javascript" src="_static/bootstrap-3.3.4/js/bootstrap.min.js"></script>
<script type="text/javascript" src="_static/bootstrap-sphinx.js"></script>
<link rel="top" title="Flask AppBuilder" href="index.html" />
<link rel="next" title="Templates" href="templates.html" />
<link rel="prev" title="Advanced Configuration" href="advanced.html" />
<meta charset='utf-8'>
<meta http-equiv='X-UA-Compatible' content='IE=edge,chrome=1'>
<meta name='viewport' content='width=device-width, initial-scale=1.0, maximum-scale=1'>
<meta name="apple-mobile-web-app-capable" content="yes">
</head>
<body role="document">
<a href="https://github.com/dpgaspar/Flask-AppBuilder"
class="visible-desktop hidden-xs"><img
id="gh-banner"
style="position: absolute; top: 50px; right: 0; border: 0;"
src="https://s3.amazonaws.com/github/ribbons/forkme_right_white_ffffff.png"
alt="Fork me on GitHub"></a>
<script>
// Adjust banner height.
$(function () {
var navHeight = $(".navbar .container").css("height");
$("#gh-banner").css("top", navHeight);
});
</script>
<div id="navbar" class="navbar navbar-default navbar-fixed-top">
<div class="container">
<div class="navbar-header">
<!-- .btn-navbar is used as the toggle for collapsed navbar content -->
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="index.html">
Flask-AppBuilder</a>
<span class="navbar-text navbar-version pull-left"><b>1.4.4</b></span>
</div>
<div class="collapse navbar-collapse nav-collapse">
<ul class="nav navbar-nav">
<li><a href="diagrams.html">Diagrams</a></li>
<li class="dropdown globaltoc-container">
<a role="button"
id="dLabelGlobalToc"
data-toggle="dropdown"
data-target="#"
href="index.html">Site <b class="caret"></b></a>
<ul class="dropdown-menu globaltoc"
role="menu"
aria-labelledby="dLabelGlobalToc"><ul>
<li class="toctree-l1"><a class="reference internal" href="intro.html">Introduction</a></li>
<li class="toctree-l1"><a class="reference internal" href="installation.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="fabmanager.html">Command Line Manager</a></li>
<li class="toctree-l1"><a class="reference internal" href="config.html">Base Configuration</a></li>
</ul>
<ul>
<li class="toctree-l1"><a class="reference internal" href="views.html">Base Views</a></li>
<li class="toctree-l1"><a class="reference internal" href="quickhowto.html">Model Views (Quick How to)</a></li>
<li class="toctree-l1"><a class="reference internal" href="quickhowto_mongo.html">Model Views on MongoDB</a></li>
<li class="toctree-l1"><a class="reference internal" href="quickcharts.html">Chart Views</a></li>
<li class="toctree-l1"><a class="reference internal" href="quickfiles.html">Model Views with Files and Images</a></li>
<li class="toctree-l1"><a class="reference internal" href="quickminimal.html">Quick Minimal Application</a></li>
</ul>
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="relations.html">Model Relations</a></li>
<li class="toctree-l1"><a class="reference internal" href="actions.html">Actions</a></li>
<li class="toctree-l1"><a class="reference internal" href="advanced.html">Advanced Configuration</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="">Customizing</a></li>
<li class="toctree-l1"><a class="reference internal" href="templates.html">Templates</a></li>
</ul>
<ul>
<li class="toctree-l1"><a class="reference internal" href="generic_datasource.html">Generic Data Sources</a></li>
<li class="toctree-l1"><a class="reference internal" href="multipledbs.html">Multiple Databases</a></li>
<li class="toctree-l1"><a class="reference internal" href="i18n.html">i18n Translations</a></li>
</ul>
<ul>
<li class="toctree-l1"><a class="reference internal" href="security.html">Security</a></li>
<li class="toctree-l1"><a class="reference internal" href="user_registration.html">User Registration</a></li>
</ul>
<ul>
<li class="toctree-l1"><a class="reference internal" href="api.html">API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="versionmigration.html">Version Migration</a></li>
<li class="toctree-l1"><a class="reference internal" href="versions.html">Versions</a></li>
</ul>
</ul>
</li>
<li class="dropdown">
<a role="button"
id="dLabelLocalToc"
data-toggle="dropdown"
data-target="#"
href="#">Page <b class="caret"></b></a>
<ul class="dropdown-menu localtoc"
role="menu"
aria-labelledby="dLabelLocalToc"><ul>
<li><a class="reference internal" href="#">Customizing</a><ul>
<li><a class="reference internal" href="#changing-themes">Changing themes</a></li>
<li><a class="reference internal" href="#changing-the-index">Changing the index</a></li>
<li><a class="reference internal" href="#changing-the-footer">Changing the Footer</a></li>
<li><a class="reference internal" href="#changing-menu-construction">Changing Menu Construction</a></li>
<li><a class="reference internal" href="#changing-widgets-and-templates">Changing Widgets and Templates</a></li>
<li><a class="reference internal" href="#change-default-view-behaviour">Change Default View Behaviour</a></li>
</ul>
</li>
</ul>
</ul>
</li>
<li>
<a href="advanced.html" title="Previous Chapter: Advanced Configuration"><span class="glyphicon glyphicon-chevron-left visible-sm"></span><span class="hidden-sm hidden-tablet">« Advanced Configu...</span>
</a>
</li>
<li>
<a href="templates.html" title="Next Chapter: Templates"><span class="glyphicon glyphicon-chevron-right visible-sm"></span><span class="hidden-sm hidden-tablet">Templates »</span>
</a>
</li>
</ul>
<form class="navbar-form navbar-right" action="search.html" method="get">
<div class="form-group">
<input type="text" name="q" class="form-control" placeholder="Search" />
</div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="section" id="customizing">
<h1>Customizing<a class="headerlink" href="#customizing" title="Permalink to this headline">¶</a></h1>
<p>You can override and customize almost everything on the UI, or use different templates and widgets already on the framework.</p>
<p>Even better you can develop your own widgets or templates and contribute to the project.</p>
<div class="section" id="changing-themes">
<h2>Changing themes<a class="headerlink" href="#changing-themes" title="Permalink to this headline">¶</a></h2>
<p>F.A.B comes with bootswatch themes ready to use, to change bootstrap default theme just change the APP_THEME key’s value.</p>
<ul>
<li><p class="first">On config.py (from flask-appbuilder-skeleton), using spacelab theme:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="n">APP_THEME</span> <span class="o">=</span> <span class="s">"spacelab.css"</span>
</pre></div>
</div>
</li>
<li><p class="first">Not using a config.py on your applications, set the key like this:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="n">app</span><span class="o">.</span><span class="n">config</span><span class="p">[</span><span class="s">'APP_THEME'</span><span class="p">]</span> <span class="o">=</span> <span class="s">"spacelab.css"</span>
</pre></div>
</div>
</li>
</ul>
<p>You can choose from the folowing <a class="reference external" href="https://github.com/dpgaspar/Flask-AppBuilder-Skeleton/blob/master/config.py">themes</a></p>
</div>
<div class="section" id="changing-the-index">
<h2>Changing the index<a class="headerlink" href="#changing-the-index" title="Permalink to this headline">¶</a></h2>
<p>The index can be easily overridden by your own.
You must develop your template, then define it in a IndexView and pass it to AppBuilder</p>
<p>The default index template is very simple, you can create your own like this:</p>
<p>1 - Develop your template (on your <PROJECT_NAME>/app/templates/my_index.html):</p>
<div class="highlight-python"><div class="highlight"><pre>{% extends "appbuilder/base.html" %}
{% block content %}
<div class="jumbotron">
<div class="container">
<h1>{{_("My App on F.A.B.")}}</h1>
<p>{{_("My first app using F.A.B, bla, bla, bla")}}</p>
</div>
</div>
{% endblock %}
</pre></div>
</div>
<p>What happened here? We should always extend from “appbuilder/base.html” this is the base template that will include all CSS’s, Javascripts, and construct the menu based on the user’s security definition.</p>
<p>Next we will override the “content” block, we could override other areas like CSS, extend CSS, Javascript or extend javascript. We can even override the base.html completely</p>
<p>I’ve presented the text on the content like:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="p">{{</span><span class="n">_</span><span class="p">(</span><span class="s">"text to be translated"</span><span class="p">)}}</span>
</pre></div>
</div>
<p>So that we can use Babel to translate our index text</p>
<p>2 - Define an IndexView</p>
<p>Define a special and simple view inherit from IndexView:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">MyIndexView</span><span class="p">(</span><span class="n">IndexView</span><span class="p">):</span>
<span class="n">index_template</span> <span class="o">=</span> <span class="s">'index.html'</span>
</pre></div>
</div>
<p>3 - Tell F.A.B to use your index view, when initializing AppBuilder:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="n">appbuilder</span> <span class="o">=</span> <span class="n">AppBuilder</span><span class="p">(</span><span class="n">app</span><span class="p">,</span> <span class="n">db</span><span class="o">.</span><span class="n">session</span><span class="p">,</span> <span class="n">indexview</span><span class="o">=</span><span class="n">MyIndexView</span><span class="p">)</span>
</pre></div>
</div>
<p>Of course you can use a more complex index view, you can use any kind of view (BaseView childs), you can even
change relative url path to whatever you want, remember to set <strong>default_view</strong> to your function.</p>
<p>You can override <strong>IndexView</strong> index function to display a different view if a user is logged in or not.</p>
</div>
<div class="section" id="changing-the-footer">
<h2>Changing the Footer<a class="headerlink" href="#changing-the-footer" title="Permalink to this headline">¶</a></h2>
<p>The default footer can be easily changed by your own. You must develop your template,
to override the existing one.</p>
<p>Develop your jinja2 template and place it on the following relative path to override the F.A.B footer.</p>
<p>./your_root_project_path/app/templates/appbuilder/footer.html</p>
<p>Actually you can override any given F.A.B. template.</p>
</div>
<div class="section" id="changing-menu-construction">
<h2>Changing Menu Construction<a class="headerlink" href="#changing-menu-construction" title="Permalink to this headline">¶</a></h2>
<p>You can change the way the menu is constructed adding your own links, separators and changing the navbar reverse property.</p>
<p>By default menu is constructed based on your classes and in a reversed navbar. Let’s take a quick look on how to easily change this</p>
<blockquote>
<div><ul>
<li><p class="first">Change the reversed navbar style, on AppBuilder initialization:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="n">appbuilder</span> <span class="o">=</span> <span class="n">AppBuilder</span><span class="p">(</span><span class="n">app</span><span class="p">,</span> <span class="n">db</span><span class="p">,</span> <span class="n">menu</span><span class="o">=</span><span class="n">Menu</span><span class="p">(</span><span class="n">reverse</span><span class="o">=</span><span class="bp">False</span><span class="p">))</span>
</pre></div>
</div>
</li>
<li><p class="first">Add your own menu links, on a default reversed navbar:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="c"># Register a view, rendering a top menu without icon</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyModelView</span><span class="p">,</span> <span class="s">"My View"</span><span class="p">)</span>
<span class="c"># Register a view, a submenu "Other View" from "Other" with a phone icon</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyOtherModelView</span><span class="p">,</span> <span class="s">"Other View"</span><span class="p">,</span> <span class="n">icon</span><span class="o">=</span><span class="s">'fa-phone'</span><span class="p">,</span> <span class="n">category</span><span class="o">=</span><span class="s">"Others"</span><span class="p">)</span>
<span class="c"># Register a view, with label for babel support (internationalization), setup an icon for the category.</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyOtherModelView</span><span class="p">,</span> <span class="s">"Other View"</span><span class="p">,</span> <span class="n">icon</span><span class="o">=</span><span class="s">'fa-phone'</span><span class="p">,</span> <span class="n">label</span><span class="o">=</span><span class="n">lazy_gettext</span><span class="p">(</span><span class="s">'Other View'</span><span class="p">),</span>
<span class="n">category</span><span class="o">=</span><span class="s">"Others"</span><span class="p">,</span> <span class="n">category_label</span><span class="o">=</span><span class="n">lazy_gettext</span><span class="p">(</span><span class="s">'Other'</span><span class="p">),</span> <span class="n">category_label</span><span class="o">=</span><span class="s">'fa-envelope'</span><span class="p">)</span>
<span class="c"># Add a link</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_link</span><span class="p">(</span><span class="s">"google"</span><span class="p">,</span> <span class="n">href</span><span class="o">=</span><span class="s">"www.google.com"</span><span class="p">,</span> <span class="n">icon</span> <span class="o">=</span> <span class="s">"fa-google-plus"</span><span class="p">)</span>
</pre></div>
</div>
</li>
<li><p class="first">Add separators:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="c"># Register a view, rendering a top menu without icon</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyModelView1</span><span class="p">,</span> <span class="s">"My View 1"</span><span class="p">,</span> <span class="n">category</span><span class="o">=</span><span class="s">"My Views"</span><span class="p">)</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyModelView2</span><span class="p">,</span> <span class="s">"My View 2"</span><span class="p">,</span> <span class="n">category</span><span class="o">=</span><span class="s">"My Views"</span><span class="p">)</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_separator</span><span class="p">(</span><span class="s">"My Views"</span><span class="p">)</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyModelView3</span><span class="p">,</span> <span class="s">"My View 3"</span><span class="p">,</span> <span class="n">category</span><span class="o">=</span><span class="s">"My Views"</span><span class="p">)</span>
</pre></div>
</div>
</li>
</ul>
</div></blockquote>
<p>Using <em>label</em> argument is optional for view name or category, but it’s advised for internationalization, if you use it with Babel’s <em>lazy_gettext</em> function it will automate translation’s extraction.</p>
<p>Category icon and label can be setup only for the first time. Internally F.A.B. has already stored it, next references will be made by name.</p>
</div>
<div class="section" id="changing-widgets-and-templates">
<h2>Changing Widgets and Templates<a class="headerlink" href="#changing-widgets-and-templates" title="Permalink to this headline">¶</a></h2>
<p>F.A.B. has a collection of widgets to change your views presentation,
you can create your own and override,
or (even better) create them and contribute to the project on git.</p>
<p>All views have templates that will display widgets in a certain layout.
For example, on the edit or show view, you can display the related list (from <em>related_views</em>) on the same page,
or as tab (default).</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">ServerDiskTypeModelView</span><span class="p">(</span><span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">ServerDiskType</span><span class="p">)</span>
<span class="n">list_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'quantity'</span><span class="p">,</span> <span class="s">'disktype'</span><span class="p">]</span>
<span class="k">class</span> <span class="nc">ServerModelView</span><span class="p">(</span><span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">Server</span><span class="p">)</span>
<span class="n">related_views</span> <span class="o">=</span> <span class="p">[</span><span class="n">ServerDiskTypeModelView</span><span class="p">]</span>
<span class="n">show_template</span> <span class="o">=</span> <span class="s">'appbuilder/general/model/show_cascade.html'</span>
<span class="n">edit_template</span> <span class="o">=</span> <span class="s">'appbuilder/general/model/edit_cascade.html'</span>
<span class="n">list_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'name'</span><span class="p">,</span> <span class="s">'serial'</span><span class="p">]</span>
<span class="n">order_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'name'</span><span class="p">,</span> <span class="s">'serial'</span><span class="p">]</span>
<span class="n">search_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'name'</span><span class="p">,</span> <span class="s">'serial'</span><span class="p">]</span>
</pre></div>
</div>
<p>The above example will override the show and edit templates that will change the related lists layout presentation.</p>
<a class="reference internal image-reference" href="_images/list_cascade.png"><img alt="_images/list_cascade.png" src="_images/list_cascade.png" style="width: 100%;" /></a>
<p>If you want to change the above example, and change the way the server disks are displayed has a list just use the available widgets:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">ServerDiskTypeModelView</span><span class="p">(</span><span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">ServerDiskType</span><span class="p">)</span>
<span class="n">list_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'quantity'</span><span class="p">,</span> <span class="s">'disktype'</span><span class="p">]</span>
<span class="n">list_widget</span> <span class="o">=</span> <span class="n">ListBlock</span>
<span class="k">class</span> <span class="nc">ServerModelView</span><span class="p">(</span><span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">Server</span><span class="p">)</span>
<span class="n">related_views</span> <span class="o">=</span> <span class="p">[</span><span class="n">ServerDiskTypeModelView</span><span class="p">]</span>
<span class="n">show_template</span> <span class="o">=</span> <span class="s">'appbuilder/general/model/show_cascade.html'</span>
<span class="n">edit_template</span> <span class="o">=</span> <span class="s">'appbuilder/general/model/edit_cascade.html'</span>
<span class="n">list_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'name'</span><span class="p">,</span> <span class="s">'serial'</span><span class="p">]</span>
<span class="n">order_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'name'</span><span class="p">,</span> <span class="s">'serial'</span><span class="p">]</span>
<span class="n">search_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'name'</span><span class="p">,</span> <span class="s">'serial'</span><span class="p">]</span>
</pre></div>
</div>
<p>We have overridden the list_widget property with the ListBlock Class. This will look like this.</p>
<a class="reference internal image-reference" href="_images/list_cascade_block.png"><img alt="_images/list_cascade_block.png" src="_images/list_cascade_block.png" style="width: 100%;" /></a>
<p>You have the following widgets already available</p>
<ul class="simple">
<li>ListWidget (default)</li>
<li>ListItem</li>
<li>ListThumbnail</li>
<li>ListBlock</li>
</ul>
<p>If you want to develop your own widgets just look at the
<a class="reference external" href="https://github.com/dpgaspar/Flask-AppBuilder/tree/master/flask_appbuilder/templates/appbuilder/general/widgets">code</a></p>
<p>Read the docs for developing your own template widgets <a class="reference internal" href="templates.html"><em>Templates</em></a></p>
<p>Implement your own and then create a very simple class like this one:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">MyWidgetList</span><span class="p">(</span><span class="n">ListWidget</span><span class="p">):</span>
<span class="n">template</span> <span class="o">=</span> <span class="s">'/widgets/my_widget_list.html'</span>
</pre></div>
</div>
</div>
<div class="section" id="change-default-view-behaviour">
<h2>Change Default View Behaviour<a class="headerlink" href="#change-default-view-behaviour" title="Permalink to this headline">¶</a></h2>
<p>If you want to have Add, edit and list on the same page, this can be done. This could be very helpful on master/detail lists (inline) on views based on tables with very few columns.</p>
<p>All you have to do is to mix <em>CompactCRUDMixin</em> class with the <em>ModelView</em> class.</p>
<div class="highlight-python"><div class="highlight"><pre><span class="kn">from</span> <span class="nn">flask.ext.appbuilder.models.sqla.interface</span> <span class="kn">import</span> <span class="n">SQLAInterface</span>
<span class="kn">from</span> <span class="nn">flask.ext.appbuilder.views</span> <span class="kn">import</span> <span class="n">ModelView</span><span class="p">,</span> <span class="n">CompactCRUDMixin</span>
<span class="kn">from</span> <span class="nn">app.models</span> <span class="kn">import</span> <span class="n">Project</span><span class="p">,</span> <span class="n">ProjectFiles</span>
<span class="kn">from</span> <span class="nn">app</span> <span class="kn">import</span> <span class="n">appbuilder</span>
<span class="k">class</span> <span class="nc">MyInlineView</span><span class="p">(</span><span class="n">CompactCRUDMixin</span><span class="p">,</span> <span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">MyInlineTable</span><span class="p">)</span>
<span class="k">class</span> <span class="nc">MyView</span><span class="p">(</span><span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">MyViewTable</span><span class="p">)</span>
<span class="n">related_views</span> <span class="o">=</span> <span class="p">[</span><span class="n">MyInlineView</span><span class="p">]</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view</span><span class="p">(</span><span class="n">MyView</span><span class="p">,</span> <span class="s">"List My View"</span><span class="p">,</span><span class="n">icon</span> <span class="o">=</span> <span class="s">"fa-table"</span><span class="p">,</span> <span class="n">category</span> <span class="o">=</span> <span class="s">"My Views"</span><span class="p">)</span>
<span class="n">appbuilder</span><span class="o">.</span><span class="n">add_view_no_menu</span><span class="p">(</span><span class="n">MyInlineView</span><span class="p">)</span>
</pre></div>
</div>
<p>Notice the class mixin, with this configuration you will have a <em>Master View</em> with the inline view <em>MyInlineView</em> where you can Add and Edit on the same page.</p>
<p>Of course you could use the mixin on <em>MyView</em> also, use it only on ModelView classes.</p>
<p>Take a look at the example: <a class="reference external" href="https://github.com/dpgaspar/Flask-appBuilder/tree/master/examples/quickfiles">https://github.com/dpgaspar/Flask-appBuilder/tree/master/examples/quickfiles</a></p>
<a class="reference internal image-reference" href="_images/list_compact_inline.png"><img alt="_images/list_compact_inline.png" src="_images/list_compact_inline.png" style="width: 100%;" /></a>
<p>Next we will take a look at a different view behaviour. A master detail style view, master is a view associated with a database table that is linked to the detail view.</p>
<p>Let’s assume our quick how to example, a simple contacts applications. We have <em>Contact</em> table related with <em>Group</em> table.</p>
<p>So we are using master detail view, first we will define the detail view (this view can be customized like the examples above):</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">ContactModelView</span><span class="p">(</span><span class="n">ModelView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">Contact</span><span class="p">)</span>
</pre></div>
</div>
<p>Then we define the master detail view, where master is the one side of the 1-N relation:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">GroupMasterView</span><span class="p">(</span><span class="n">MasterDetailView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">Group</span><span class="p">)</span>
<span class="n">related_views</span> <span class="o">=</span> <span class="p">[</span><span class="n">ContactModelView</span><span class="p">]</span>
</pre></div>
</div>
<p>Remember you can use charts has related views, you can use it like this:</p>
<div class="highlight-python"><div class="highlight"><pre><span class="k">class</span> <span class="nc">ContactTimeChartView</span><span class="p">(</span><span class="n">TimeChartView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">Contact</span><span class="p">)</span>
<span class="n">chart_title</span> <span class="o">=</span> <span class="s">'Grouped Birth contacts'</span>
<span class="n">chart_type</span> <span class="o">=</span> <span class="s">'AreaChart'</span>
<span class="n">label_columns</span> <span class="o">=</span> <span class="n">ContactModelView</span><span class="o">.</span><span class="n">label_columns</span>
<span class="n">group_by_columns</span> <span class="o">=</span> <span class="p">[</span><span class="s">'birthday'</span><span class="p">]</span>
<span class="k">class</span> <span class="nc">GroupMasterView</span><span class="p">(</span><span class="n">MasterDetailView</span><span class="p">):</span>
<span class="n">datamodel</span> <span class="o">=</span> <span class="n">SQLAInterface</span><span class="p">(</span><span class="n">Group</span><span class="p">)</span>
<span class="n">related_views</span> <span class="o">=</span> <span class="p">[</span><span class="n">ContactModelView</span><span class="p">,</span> <span class="n">ContactTimeChartView</span><span class="p">]</span>
</pre></div>
</div>
<p>This will show a left side menu with the <em>groups</em> and a right side list with contacts, and a time chart with the number of birthdays during time by the selected group.</p>
<p>Finally register everything:</p>
<div class="highlight-python"><div class="highlight"><pre>// if Using the above example with related chart
appbuilder.add_view_no_menu(ContactTimeChartView)
appbuilder.add_view(GroupMasterView, "List Groups", icon="fa-folder-open-o", category="Contacts")
appbuilder.add_separator("Contacts")
appbuilder.add_view(ContactModelView, "List Contacts", icon="fa-envelope", category="Contacts")
</pre></div>
</div>
<a class="reference internal image-reference" href="_images/list_master_detail.png"><img alt="_images/list_master_detail.png" src="_images/list_master_detail.png" style="width: 100%;" /></a>
</div>
</div>
</div>
</div>
</div>
<footer class="footer">
<div class="container">
<p class="pull-right">
<a href="#">Back to top</a>
</p>
<p>
© Copyright 2013, Daniel Vaz Gaspar.<br/>
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.3.1.<br/>
</p>
</div>
</footer>
</body>
</html>
|
rpiotti/Flask-AppBuilder
|
docs/_build/html/customizing.html
|
HTML
|
bsd-3-clause
| 33,021
|
// Copyright (c) 2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SYNC_NOTIFIER_BASE_SSL_ADAPTER_H_
#define CHROME_BROWSER_SYNC_NOTIFIER_BASE_SSL_ADAPTER_H_
namespace talk_base {
class AsyncSocket;
class SSLAdapter;
} // namespace talk_base
namespace notifier {
// Wraps the given socket in a platform-dependent SSLAdapter
// implementation.
talk_base::SSLAdapter* CreateSSLAdapter(talk_base::AsyncSocket* socket);
// Utility template class that overrides CreateSSLAdapter() to use the
// above function.
template <class SocketFactory>
class SSLAdapterSocketFactory : public SocketFactory {
public:
virtual talk_base::SSLAdapter* CreateSSLAdapter(
talk_base::AsyncSocket* socket) {
return ::notifier::CreateSSLAdapter(socket);
}
};
} // namespace notifier
#endif // CHROME_BROWSER_SYNC_NOTIFIER_BASE_SSL_ADAPTER_H_
|
rwatson/chromium-capsicum
|
chrome/browser/sync/notifier/base/ssl_adapter.h
|
C
|
bsd-3-clause
| 967
|
package org.usfirst.frc4579.filters;
/*************************************************************************************
* AVERAGE FILTER
*
* Allows the caller to maintain a running average of sample values. Standard deviation
* is also available. The class is instantiated with the max number of samples that
* should be averaged.
*
*************************************************************************************/
public class AverageFilter {
private final int arraySize; // Number of elements in averageArray.
private int newest = -1; // Array index of the newest entry in the array.
private int numEntries = 0; // Number of entries used in the array.
private double sum = 0.0; // Current sum of array entries.
private double[] averageArray; // The array that contains the sample values.
//*************************************************************************************
// Constructor.
//*************************************************************************************
public AverageFilter(int numberOfSamples) {
this.arraySize = numberOfSamples;
this.averageArray = new double[numberOfSamples];
}
//*************************************************************************************
// Add "input" into the running average without returning the average (filtered value).
//*************************************************************************************
public void accumulate (double input) {
// Add the new value into the running sum.
sum += input;
// Compute the array index of where to put the newest input.
newest = (newest+1) % arraySize;
// if the array is full
if (numEntries == arraySize) {
// "newest" has wrapped around and now points to the oldest entry.
// Remove the oldest entry from the sum.
sum -= averageArray[newest];
}
else
// Increment the number of samples in the array.
numEntries++;
// Save the newest value.
averageArray[newest] = input;
}
//*************************************************************************************
// Add "input" into the running average and return the average (filtered value).
//*************************************************************************************
public double filter(double input) {
accumulate(input);
return average();
}
//*************************************************************************************
// Return the current running average.
//*************************************************************************************
public double average () { return sum / (double)numEntries; }
//*************************************************************************************
// Return the standard deviation of the current running average.
//*************************************************************************************
public double stdDeviation () {
double avg = average();
double stdDev = 0.0;
for (int i=0; i < numEntries; i++) stdDev += Math.pow((averageArray[i] - avg), 2.0);
stdDev = Math.sqrt(stdDev / (double)numEntries);
return stdDev;
}
}
|
RoboEagles/Robot2017
|
src/org/usfirst/frc4579/filters/AverageFilter.java
|
Java
|
bsd-3-clause
| 3,244
|
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/password_manager/core/browser/ui/bulk_leak_check_service_adapter.h"
#include <memory>
#include <tuple>
#include <vector>
#include "base/containers/span.h"
#include "base/memory/ptr_util.h"
#include "base/strings/string_piece_forward.h"
#include "base/strings/utf_string_conversions.h"
#include "base/test/gmock_move_support.h"
#include "base/test/task_environment.h"
#include "components/autofill/core/common/password_form.h"
#include "components/password_manager/core/browser/bulk_leak_check_service.h"
#include "components/password_manager/core/browser/leak_detection/bulk_leak_check.h"
#include "components/password_manager/core/browser/leak_detection/leak_detection_check_factory.h"
#include "components/password_manager/core/browser/leak_detection/mock_leak_detection_check_factory.h"
#include "components/password_manager/core/browser/test_password_store.h"
#include "components/password_manager/core/browser/ui/saved_passwords_presenter.h"
#include "components/password_manager/core/common/password_manager_pref_names.h"
#include "components/prefs/pref_registry_simple.h"
#include "components/prefs/testing_pref_service.h"
#include "components/safe_browsing/core/common/safe_browsing_prefs.h"
#include "components/signin/public/identity_manager/identity_test_environment.h"
#include "services/network/test/test_shared_url_loader_factory.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace password_manager {
namespace {
constexpr char kExampleCom[] = "https://example.com";
constexpr char kExampleOrg[] = "https://example.org";
constexpr char kUsername1[] = "alice";
constexpr char kUsername2[] = "bob";
constexpr char kPassword1[] = "f00b4r";
constexpr char kPassword2[] = "s3cr3t";
using autofill::PasswordForm;
using ::testing::ByMove;
using ::testing::NiceMock;
using ::testing::Return;
MATCHER_P(CredentialsAre, credentials, "") {
return std::equal(arg.begin(), arg.end(), credentials.get().begin(),
credentials.get().end(),
[](const auto& lhs, const auto& rhs) {
return lhs.username() == rhs.username() &&
lhs.password() == rhs.password();
});
}
MATCHER_P(SavedPasswordsAre, passwords, "") {
return std::equal(arg.begin(), arg.end(), passwords.begin(), passwords.end(),
[](const auto& lhs, const auto& rhs) {
return lhs.signon_realm == rhs.signon_realm &&
lhs.username_value == rhs.username_value &&
lhs.password_value == rhs.password_value;
});
}
PasswordForm MakeSavedPassword(base::StringPiece signon_realm,
base::StringPiece username,
base::StringPiece password) {
PasswordForm form;
form.signon_realm = std::string(signon_realm);
form.username_value = base::ASCIIToUTF16(username);
form.password_value = base::ASCIIToUTF16(password);
return form;
}
LeakCheckCredential MakeLeakCheckCredential(base::StringPiece username,
base::StringPiece password) {
return LeakCheckCredential(base::ASCIIToUTF16(username),
base::ASCIIToUTF16(password));
}
struct MockBulkLeakCheck : BulkLeakCheck {
MOCK_METHOD(void,
CheckCredentials,
(std::vector<LeakCheckCredential> credentials),
(override));
MOCK_METHOD(size_t, GetPendingChecksCount, (), (const override));
};
using NiceMockBulkLeakCheck = ::testing::NiceMock<MockBulkLeakCheck>;
class BulkLeakCheckServiceAdapterTest : public ::testing::Test {
public:
BulkLeakCheckServiceAdapterTest() {
auto factory = std::make_unique<MockLeakDetectionCheckFactory>();
factory_ = factory.get();
service_.set_leak_factory(std::move(factory));
store_->Init(/*prefs=*/nullptr);
prefs_.registry()->RegisterBooleanPref(prefs::kPasswordLeakDetectionEnabled,
true);
prefs_.registry()->RegisterBooleanPref(::prefs::kSafeBrowsingEnabled, true);
prefs_.registry()->RegisterBooleanPref(::prefs::kSafeBrowsingEnhanced,
false);
}
~BulkLeakCheckServiceAdapterTest() override {
store_->ShutdownOnUIThread();
task_env_.RunUntilIdle();
}
TestPasswordStore& store() { return *store_; }
SavedPasswordsPresenter& presenter() { return presenter_; }
MockLeakDetectionCheckFactory& factory() { return *factory_; }
PrefService& prefs() { return prefs_; }
BulkLeakCheckServiceAdapter& adapter() { return adapter_; }
void RunUntilIdle() { task_env_.RunUntilIdle(); }
private:
base::test::TaskEnvironment task_env_;
signin::IdentityTestEnvironment identity_test_env_;
scoped_refptr<TestPasswordStore> store_ =
base::MakeRefCounted<TestPasswordStore>();
SavedPasswordsPresenter presenter_{store_};
BulkLeakCheckService service_{
identity_test_env_.identity_manager(),
base::MakeRefCounted<network::TestSharedURLLoaderFactory>()};
MockLeakDetectionCheckFactory* factory_ = nullptr;
TestingPrefServiceSimple prefs_;
BulkLeakCheckServiceAdapter adapter_{&presenter_, &service_, &prefs_};
};
} // namespace
TEST_F(BulkLeakCheckServiceAdapterTest, OnCreation) {
EXPECT_EQ(0u, adapter().GetPendingChecksCount());
EXPECT_EQ(BulkLeakCheckService::State::kIdle,
adapter().GetBulkLeakCheckState());
}
// Checks that starting a leak check correctly transforms the list of saved
// passwords into LeakCheckCredentials and attaches the underlying password
// forms as user data.
TEST_F(BulkLeakCheckServiceAdapterTest, StartBulkLeakCheck) {
std::vector<PasswordForm> passwords = {
MakeSavedPassword(kExampleCom, kUsername1, kPassword1),
MakeSavedPassword(kExampleOrg, kUsername2, kPassword2)};
store().AddLogin(passwords[0]);
store().AddLogin(passwords[1]);
RunUntilIdle();
auto leak_check = std::make_unique<NiceMockBulkLeakCheck>();
std::vector<LeakCheckCredential> credentials;
EXPECT_CALL(*leak_check, CheckCredentials).WillOnce(MoveArg(&credentials));
EXPECT_CALL(factory(), TryCreateBulkLeakCheck)
.WillOnce(Return(ByMove(std::move(leak_check))));
adapter().StartBulkLeakCheck();
std::vector<LeakCheckCredential> expected;
expected.push_back(MakeLeakCheckCredential(kUsername1, kPassword1));
expected.push_back(MakeLeakCheckCredential(kUsername2, kPassword2));
EXPECT_THAT(credentials, CredentialsAre(std::cref(expected)));
}
TEST_F(BulkLeakCheckServiceAdapterTest, StartBulkLeakCheckAttachesData) {
constexpr char kKey[] = "key";
struct UserData : LeakCheckCredential::Data {
std::unique_ptr<Data> Clone() override { return std::make_unique<Data>(); }
} data;
std::vector<PasswordForm> passwords = {
MakeSavedPassword(kExampleCom, kUsername1, kPassword1)};
store().AddLogin(passwords[0]);
RunUntilIdle();
auto leak_check = std::make_unique<NiceMockBulkLeakCheck>();
std::vector<LeakCheckCredential> credentials;
EXPECT_CALL(*leak_check, CheckCredentials).WillOnce(MoveArg(&credentials));
EXPECT_CALL(factory(), TryCreateBulkLeakCheck)
.WillOnce(Return(ByMove(std::move(leak_check))));
adapter().StartBulkLeakCheck(kKey, &data);
EXPECT_NE(nullptr, credentials.at(0).GetUserData(kKey));
}
// Tests that multiple credentials with effectively the same username are
// correctly deduped before starting the leak check.
TEST_F(BulkLeakCheckServiceAdapterTest, StartBulkLeakCheckDedupes) {
std::vector<PasswordForm> passwords = {
MakeSavedPassword(kExampleCom, "alice", kPassword1),
MakeSavedPassword(kExampleCom, "ALICE", kPassword1),
MakeSavedPassword(kExampleCom, "Alice@example.com", kPassword1)};
store().AddLogin(passwords[0]);
store().AddLogin(passwords[1]);
store().AddLogin(passwords[2]);
RunUntilIdle();
auto leak_check = std::make_unique<NiceMockBulkLeakCheck>();
std::vector<LeakCheckCredential> credentials;
EXPECT_CALL(*leak_check, CheckCredentials).WillOnce(MoveArg(&credentials));
EXPECT_CALL(factory(), TryCreateBulkLeakCheck)
.WillOnce(Return(ByMove(std::move(leak_check))));
adapter().StartBulkLeakCheck();
std::vector<LeakCheckCredential> expected;
expected.push_back(MakeLeakCheckCredential("alice", kPassword1));
EXPECT_THAT(credentials, CredentialsAre(std::cref(expected)));
}
// Checks that trying to start a leak check when another check is already
// running does nothing and returns false to the caller.
TEST_F(BulkLeakCheckServiceAdapterTest, MultipleStarts) {
store().AddLogin(MakeSavedPassword(kExampleCom, "alice", kPassword1));
RunUntilIdle();
auto leak_check = std::make_unique<NiceMockBulkLeakCheck>();
auto& leak_check_ref = *leak_check;
EXPECT_CALL(leak_check_ref, CheckCredentials);
EXPECT_CALL(factory(), TryCreateBulkLeakCheck)
.WillOnce(Return(ByMove(std::move(leak_check))));
EXPECT_TRUE(adapter().StartBulkLeakCheck());
EXPECT_CALL(leak_check_ref, CheckCredentials).Times(0);
EXPECT_FALSE(adapter().StartBulkLeakCheck());
}
// Checks that stopping the leak check correctly resets the state of the bulk
// leak check.
TEST_F(BulkLeakCheckServiceAdapterTest, StopBulkLeakCheck) {
store().AddLogin(MakeSavedPassword(kExampleCom, "alice", kPassword1));
RunUntilIdle();
auto leak_check = std::make_unique<NiceMockBulkLeakCheck>();
EXPECT_CALL(*leak_check, CheckCredentials);
EXPECT_CALL(factory(), TryCreateBulkLeakCheck)
.WillOnce(Return(ByMove(std::move(leak_check))));
adapter().StartBulkLeakCheck();
EXPECT_EQ(BulkLeakCheckService::State::kRunning,
adapter().GetBulkLeakCheckState());
adapter().StopBulkLeakCheck();
EXPECT_EQ(BulkLeakCheckService::State::kCanceled,
adapter().GetBulkLeakCheckState());
}
// Tests that editing a password through the presenter does not result in
// another call to CheckCredentials with a corresponding change to the checked
// password if the corresponding prefs are not set.
TEST_F(BulkLeakCheckServiceAdapterTest, OnEditedNoPrefs) {
prefs().SetBoolean(prefs::kPasswordLeakDetectionEnabled, false);
prefs().SetBoolean(::prefs::kSafeBrowsingEnabled, false);
PasswordForm password =
MakeSavedPassword(kExampleCom, kUsername1, kPassword1);
store().AddLogin(password);
// When |password| is read back from the store, its |in_store| member will be
// set, and SavedPasswordsPresenter::EditPassword() actually depends on that.
// So set it here too.
password.in_store = PasswordForm::Store::kProfileStore;
RunUntilIdle();
EXPECT_CALL(factory(), TryCreateBulkLeakCheck).Times(0);
presenter().EditPassword(password, base::ASCIIToUTF16(kPassword2));
}
// Tests that editing a password through the presenter will result in another
// call to CheckCredentials with a corresponding change to the checked password
// if the corresponding prefs are set.
TEST_F(BulkLeakCheckServiceAdapterTest, OnEditedWithPrefs) {
PasswordForm password =
MakeSavedPassword(kExampleCom, kUsername1, kPassword1);
store().AddLogin(password);
// When |password| is read back from the store, its |in_store| member will be
// set, and SavedPasswordsPresenter::EditPassword() actually depends on that.
// So set it here too.
password.in_store = PasswordForm::Store::kProfileStore;
RunUntilIdle();
std::vector<LeakCheckCredential> expected;
expected.push_back(MakeLeakCheckCredential(kUsername1, kPassword2));
auto leak_check = std::make_unique<NiceMockBulkLeakCheck>();
EXPECT_CALL(*leak_check,
CheckCredentials(CredentialsAre(std::cref(expected))));
EXPECT_CALL(factory(), TryCreateBulkLeakCheck)
.WillOnce(Return(ByMove(std::move(leak_check))));
presenter().EditPassword(password, base::ASCIIToUTF16(kPassword2));
}
} // namespace password_manager
|
endlessm/chromium-browser
|
components/password_manager/core/browser/ui/bulk_leak_check_service_adapter_unittest.cc
|
C++
|
bsd-3-clause
| 12,088
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<meta name="lang:clipboard.copy" content="Copy to clipboard">
<meta name="lang:clipboard.copied" content="Copied to clipboard">
<meta name="lang:search.language" content="en">
<meta name="lang:search.pipeline.stopwords" content="True">
<meta name="lang:search.pipeline.trimmer" content="True">
<meta name="lang:search.result.none" content="No matching documents">
<meta name="lang:search.result.one" content="1 matching document">
<meta name="lang:search.result.other" content="# matching documents">
<meta name="lang:search.tokenizer" content="[\s\-]+">
<link href="https://fonts.gstatic.com/" rel="preconnect" crossorigin>
<link href="https://fonts.googleapis.com/css?family=Roboto+Mono:400,500,700|Roboto:300,400,400i,700&display=fallback" rel="stylesheet">
<style>
body,
input {
font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif
}
code,
kbd,
pre {
font-family: "Roboto Mono", "Courier New", Courier, monospace
}
</style>
<link rel="stylesheet" href="../_static/stylesheets/application.css"/>
<link rel="stylesheet" href="../_static/stylesheets/application-palette.css"/>
<link rel="stylesheet" href="../_static/stylesheets/application-fixes.css"/>
<link rel="stylesheet" href="../_static/fonts/material-icons.css"/>
<meta name="theme-color" content="#3f51b5">
<script src="../_static/javascripts/modernizr.js"></script>
<title>statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf — statsmodels</title>
<link rel="icon" type="image/png" sizes="32x32" href="../_static/icons/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="../_static/icons/favicon-16x16.png">
<link rel="manifest" href="../_static/icons/site.webmanifest">
<link rel="mask-icon" href="../_static/icons/safari-pinned-tab.svg" color="#919191">
<meta name="msapplication-TileColor" content="#2b5797">
<meta name="msapplication-config" content="../_static/icons/browserconfig.xml">
<link rel="stylesheet" href="../_static/stylesheets/examples.css">
<link rel="stylesheet" href="../_static/stylesheets/deprecation.css">
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<link rel="stylesheet" href="../_static/material.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../_static/graphviz.css" />
<script id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
<script src="../_static/jquery.js"></script>
<script src="../_static/underscore.js"></script>
<script src="../_static/doctools.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<link rel="shortcut icon" href="../_static/favicon.ico"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cov_params_func_l1" href="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cov_params_func_l1.html" />
<link rel="prev" title="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson" href="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.html" />
</head>
<body dir=ltr
data-md-color-primary=indigo data-md-color-accent=blue>
<svg class="md-svg">
<defs data-children-count="0">
<svg xmlns="http://www.w3.org/2000/svg" width="416" height="448" viewBox="0 0 416 448" id="__github"><path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19T128 352t-18.125-8.5-10.75-19T96 304t3.125-20.5 10.75-19T128 256t18.125 8.5 10.75 19T160 304zm160 0q0 10-3.125 20.5t-10.75 19T288 352t-18.125-8.5-10.75-19T256 304t3.125-20.5 10.75-19T288 256t18.125 8.5 10.75 19T320 304zm40 0q0-30-17.25-51T296 232q-10.25 0-48.75 5.25Q229.5 240 208 240t-39.25-2.75Q130.75 232 120 232q-29.5 0-46.75 21T56 304q0 22 8 38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0 37.25-1.75t35-7.375 30.5-15 20.25-25.75T360 304zm56-44q0 51.75-15.25 82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5T212 416q-19.5 0-35.5-.75t-36.875-3.125-38.125-7.5-34.25-12.875T37 371.5t-21.5-28.75Q0 312 0 260q0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25 30.875Q171.5 96 212 96q37 0 70 8 26.25-20.5 46.75-30.25T376 64q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34 99.5z"/></svg>
</defs>
</svg>
<input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer">
<input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search">
<label class="md-overlay" data-md-component="overlay" for="__drawer"></label>
<a href="#generated/statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf" tabindex="1" class="md-skip"> Skip to content </a>
<header class="md-header" data-md-component="header">
<nav class="md-header-nav md-grid">
<div class="md-flex navheader">
<div class="md-flex__cell md-flex__cell--shrink">
<a href="../index.html" title="statsmodels"
class="md-header-nav__button md-logo">
<img src="../_static/statsmodels-logo-v2-bw.svg" height="26"
alt="statsmodels logo">
</a>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--menu md-header-nav__button" for="__drawer"></label>
</div>
<div class="md-flex__cell md-flex__cell--stretch">
<div class="md-flex__ellipsis md-header-nav__title" data-md-component="title">
<span class="md-header-nav__topic">statsmodels v0.12.2</span>
<span class="md-header-nav__topic"> statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf </span>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--search md-header-nav__button" for="__search"></label>
<div class="md-search" data-md-component="search" role="dialog">
<label class="md-search__overlay" for="__search"></label>
<div class="md-search__inner" role="search">
<form class="md-search__form" action="../search.html" method="GET" name="search">
<input type="text" class="md-search__input" name="q" placeholder="Search"
autocapitalize="off" autocomplete="off" spellcheck="false"
data-md-component="query" data-md-state="active">
<label class="md-icon md-search__icon" for="__search"></label>
<button type="reset" class="md-icon md-search__icon" data-md-component="reset" tabindex="-1">

</button>
</form>
<div class="md-search__output">
<div class="md-search__scrollwrap" data-md-scrollfix>
<div class="md-search-result" data-md-component="result">
<div class="md-search-result__meta">
Type to start searching
</div>
<ol class="md-search-result__list"></ol>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<div class="md-header-nav__source">
<a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28">
<use xlink:href="#__github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
statsmodels
</div>
</a>
</div>
</div>
<script src="../_static/javascripts/version_dropdown.js"></script>
<script>
var json_loc = "../_static/versions.json",
target_loc = "../../",
text = "Versions";
$( document ).ready( add_version_dropdown(json_loc, target_loc, text));
</script>
</div>
</nav>
</header>
<div class="md-container">
<nav class="md-tabs" data-md-component="tabs">
<div class="md-tabs__inner md-grid">
<ul class="md-tabs__list">
<li class="md-tabs__item"><a href="../user-guide.html" class="md-tabs__link">User Guide</a></li>
<li class="md-tabs__item"><a href="../discretemod.html" class="md-tabs__link">Regression with Discrete Dependent Variable</a></li>
<li class="md-tabs__item"><a href="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.html" class="md-tabs__link">statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson</a></li>
</ul>
</div>
</nav>
<main class="md-main">
<div class="md-main__inner md-grid" data-md-component="container">
<div class="md-sidebar md-sidebar--primary" data-md-component="navigation">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--primary" data-md-level="0">
<label class="md-nav__title md-nav__title--site" for="__drawer">
<a href="../index.html" title="statsmodels" class="md-nav__button md-logo">
<img src="../_static/statsmodels-logo-v2-bw.svg" alt=" logo" width="48" height="48">
</a>
<a href="../index.html"
title="statsmodels">statsmodels v0.12.2</a>
</label>
<div class="md-nav__source">
<a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28">
<use xlink:href="#__github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
statsmodels
</div>
</a>
</div>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../install.html" class="md-nav__link">Installing statsmodels</a>
</li>
<li class="md-nav__item">
<a href="../gettingstarted.html" class="md-nav__link">Getting started</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html" class="md-nav__link">User Guide</a>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../user-guide.html#background" class="md-nav__link">Background</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#regression-and-linear-models" class="md-nav__link">Regression and Linear Models</a>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../regression.html" class="md-nav__link">Linear Regression</a>
</li>
<li class="md-nav__item">
<a href="../glm.html" class="md-nav__link">Generalized Linear Models</a>
</li>
<li class="md-nav__item">
<a href="../gee.html" class="md-nav__link">Generalized Estimating Equations</a>
</li>
<li class="md-nav__item">
<a href="../gam.html" class="md-nav__link">Generalized Additive Models (GAM)</a>
</li>
<li class="md-nav__item">
<a href="../rlm.html" class="md-nav__link">Robust Linear Models</a>
</li>
<li class="md-nav__item">
<a href="../mixed_linear.html" class="md-nav__link">Linear Mixed Effects Models</a>
</li>
<li class="md-nav__item">
<a href="../discretemod.html" class="md-nav__link">Regression with Discrete Dependent Variable</a>
</li>
<li class="md-nav__item">
<a href="../mixed_glm.html" class="md-nav__link">Generalized Linear Mixed Effects Models</a>
</li>
<li class="md-nav__item">
<a href="../anova.html" class="md-nav__link">ANOVA</a>
</li></ul>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#time-series-analysis" class="md-nav__link">Time Series Analysis</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#other-models" class="md-nav__link">Other Models</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#statistics-and-tools" class="md-nav__link">Statistics and Tools</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#data-sets" class="md-nav__link">Data Sets</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#sandbox" class="md-nav__link">Sandbox</a>
</li></ul>
</li>
<li class="md-nav__item">
<a href="../examples/index.html" class="md-nav__link">Examples</a>
</li>
<li class="md-nav__item">
<a href="../api.html" class="md-nav__link">API Reference</a>
</li>
<li class="md-nav__item">
<a href="../about.html" class="md-nav__link">About statsmodels</a>
</li>
<li class="md-nav__item">
<a href="../dev/index.html" class="md-nav__link">Developer Page</a>
</li>
<li class="md-nav__item">
<a href="../release/index.html" class="md-nav__link">Release Notes</a>
</li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-sidebar md-sidebar--secondary" data-md-component="toc">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--secondary">
<ul class="md-nav__list" data-md-scrollfix="">
<li class="md-nav__item"><a class="md-nav__extra_link" href="../_sources/generated/statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf.rst.txt">Show Source</a> </li>
<li id="searchbox" class="md-nav__item"></li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-content">
<article class="md-content__inner md-typeset" role="main">
<h1 id="generated-statsmodels-discrete-count-model-zeroinflatedgeneralizedpoisson-cdf--page-root">statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf<a class="headerlink" href="#generated-statsmodels-discrete-count-model-zeroinflatedgeneralizedpoisson-cdf--page-root" title="Permalink to this headline">¶</a></h1>
<dl class="py method">
<dt id="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf">
<code class="sig-prename descclassname">ZeroInflatedGeneralizedPoisson.</code><code class="sig-name descname">cdf</code><span class="sig-paren">(</span><em class="sig-param"><span class="n">X</span></em><span class="sig-paren">)</span><a class="headerlink" href="#statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf" title="Permalink to this definition">¶</a></dt>
<dd><p>The cumulative distribution function of the model.</p>
</dd></dl>
</article>
</div>
</div>
</main>
</div>
<footer class="md-footer">
<div class="md-footer-nav">
<nav class="md-footer-nav__inner md-grid">
<a href="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.html" title="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson"
class="md-flex md-footer-nav__link md-footer-nav__link--prev"
rel="prev">
<div class="md-flex__cell md-flex__cell--shrink">
<i class="md-icon md-icon--arrow-back md-footer-nav__button"></i>
</div>
<div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title">
<span class="md-flex__ellipsis">
<span
class="md-footer-nav__direction"> Previous </span> statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson </span>
</div>
</a>
<a href="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cov_params_func_l1.html" title="statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cov_params_func_l1"
class="md-flex md-footer-nav__link md-footer-nav__link--next"
rel="next">
<div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"><span
class="md-flex__ellipsis"> <span
class="md-footer-nav__direction"> Next </span> statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cov_params_func_l1 </span>
</div>
<div class="md-flex__cell md-flex__cell--shrink"><i
class="md-icon md-icon--arrow-forward md-footer-nav__button"></i>
</div>
</a>
</nav>
</div>
<div class="md-footer-meta md-typeset">
<div class="md-footer-meta__inner md-grid">
<div class="md-footer-copyright">
<div class="md-footer-copyright__highlight">
© Copyright 2009-2019, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers.
</div>
Last updated on
Feb 02, 2021.
<br/>
Created using
<a href="http://www.sphinx-doc.org/">Sphinx</a> 3.4.3.
and
<a href="https://github.com/bashtage/sphinx-material/">Material for
Sphinx</a>
</div>
</div>
</div>
</footer>
<script src="../_static/javascripts/application.js"></script>
<script>app.initialize({version: "1.0.4", url: {base: ".."}})</script>
</body>
</html>
|
statsmodels/statsmodels.github.io
|
v0.12.2/generated/statsmodels.discrete.count_model.ZeroInflatedGeneralizedPoisson.cdf.html
|
HTML
|
bsd-3-clause
| 18,680
|
---------------------------------------------------------------------------------------------------
-- Description:
-- Check absence of HMI level resumption in case if:
-- - app has FULL level before unexpected disconnect
-- - app has been registered and switched to BACKGROUND just after unexpected disconnect
--
-- Preconditions:
-- 1) SDL and HMI are started
-- 2) App is registered and switched to FULL HMI level
--
-- Steps:
-- 1) App is disconnected unexpectedly and re-registered again
-- 2) App switched to BACKGROUND within default 3 sec. timeout
-- 3) Timeout expires and SDL starts HMI level resumption process
-- SDL does not resume app's HMI level
-- 4) App switched to FULL
-- SDL does change app's HMI level
---------------------------------------------------------------------------------------------------
--[[ Required Shared libraries ]]
local common = require("test_scripts/Resumption/HMI_Level/common")
--[[ Scenario ]]
common.Title("Preconditions")
common.Step("Clean environment", common.preconditions)
common.Step("Start SDL, HMI, connect Mobile, start Session", common.start)
common.Step("Register App 1", common.registerApp, { "DEFAULT" })
common.Step("Set HMI level App 1", common.setAppHMILevel, { "FULL" })
common.Title("Test")
common.Step("Unexpected disconnect", common.unexpectedDisconnect)
common.Step("Register App 1", common.registerApp, { "DEFAULT" })
common.Step("Set HMI level App 1", common.setAppHMILevel, { "BACKGROUND" })
common.Step("Check HMI level resumption App 1", common.checkHMILevelResumption, { nil })
common.Step("Activate App 1", common.activateApp, { "FULL" })
common.Title("Postconditions")
common.Step("Stop SDL", common.postconditions)
|
smartdevicelink/sdl_atf_test_scripts
|
test_scripts/Resumption/HMI_Level/02_single_app_LIMITED_FULL/002_07_FULL_BACKGROUND_no_resumption.lua
|
Lua
|
bsd-3-clause
| 1,698
|
package de.plushnikov.builder;
import lombok.Builder;
import lombok.ToString;
@Builder
@ToString
public class BuilderPredefined {
private String name;
private int age;
public static class FirstInnerClassDefined {
private boolean injectHere = false;
}
public static class BuilderPredefinedBuilder {
private String name;
private int someField;
public void age(int age) {
this.age = age;
}
}
public static void main(String[] args) {
BuilderPredefinedBuilder builder = BuilderPredefined.builder();
builder.name("Mascha").age(172);
System.out.println(builder);
BuilderPredefined result = builder.build();
System.out.println(result.toString());
}
}
|
mplushnikov/lombok-intellij-plugin
|
test-manual/src/main/java/de/plushnikov/builder/BuilderPredefined.java
|
Java
|
bsd-3-clause
| 712
|
body {
}
#tblData th
{
font-weight: bold;
}
.toast-top-center {
top: 80px;
}
.align-center {
text-align: center;
}
.bar-transparent {
border: 0px !important;
border-bottom-color: transparent !important;
background-image: none;
background-color:transparent !important;
border-bottom: none;
}
.align-right {
text-align:right !important;
float: right !important;
}
|
jeje87/DQMV
|
www/css/index.css
|
CSS
|
bsd-3-clause
| 412
|
#ifndef __LULESH_TYPES_H__
#define __LULESH_TYPES_H__
#include <math.h>
// Allow flexibility for arithmetic representations
// Could also support fixed point and interval arithmetic types
typedef float real4;
typedef double real8;
typedef long double real10; // 10 bytes on x86
typedef int Index_t; // array subscript and loop index
typedef real8 Real_t; // floating point representation
typedef int Int_t; // integer representation
inline real4 SQRT(real4 arg) { return sqrtf(arg); }
inline real8 SQRT(real8 arg) { return sqrt(arg); }
inline real10 SQRT(real10 arg) { return sqrtl(arg); }
inline real4 CBRT(real4 arg) { return cbrtf(arg); }
inline real8 CBRT(real8 arg) { return cbrt(arg); }
inline real10 CBRT(real10 arg) { return cbrtl(arg); }
inline real4 FABS(real4 arg) { return fabsf(arg); }
inline real8 FABS(real8 arg) { return fabs(arg); }
inline real10 FABS(real10 arg) { return fabsl(arg); }
struct Dom {
// Simulation Time
Real_t deltaTime;
Real_t totalTime;
// Ghosts
std::vector<Real_t> ng_front;
std::vector<Real_t> ng_back;
std::vector<Real_t> ng_right;
std::vector<Real_t> ng_left;
std::vector<Real_t> ng_up;
std::vector<Real_t> ng_down;
// Node centered persistent
std::vector<Real_t> m_x;
std::vector<Real_t> m_y;
std::vector<Real_t> m_z;
std::vector<Real_t> m_xd;
std::vector<Real_t> m_yd;
std::vector<Real_t> m_zd;
std::vector<Real_t> m_xdd;
std::vector<Real_t> m_ydd;
std::vector<Real_t> m_zdd;
std::vector<Real_t> m_fx;
std::vector<Real_t> m_fy;
std::vector<Real_t> m_fz;
std::vector<Real_t> m_nodalMass;
// Node centered nodesets
std::vector<Index_t> m_symmX;
std::vector<Index_t> m_symmY;
std::vector<Index_t> m_symmZ;
// Elem centered persistent
std::vector<Index_t> m_matElemlist;
std::vector<Index_t> m_nodelist;
std::vector<Index_t> m_lxim;
std::vector<Index_t> m_lxip;
std::vector<Index_t> m_letam;
std::vector<Index_t> m_letap;
std::vector<Index_t> m_lzetam;
std::vector<Index_t> m_lzetap;
std::vector<Int_t> m_elemBC;
std::vector<Real_t> m_e;
std::vector<Real_t> m_p;
std::vector<Real_t> m_q;
std::vector<Real_t> m_ql;
std::vector<Real_t> m_qq;
std::vector<Real_t> m_v;
std::vector<Real_t> m_volo;
std::vector<Real_t> m_delv;
std::vector<Real_t> m_vdov;
std::vector<Real_t> m_arealg;
std::vector<Real_t> m_ss;
std::vector<Real_t> m_elemMass;
// Elem centered temporary
std::vector<Real_t> m_dxx;
std::vector<Real_t> m_dyy;
std::vector<Real_t> m_dzz;
std::vector<Real_t> m_delv_xi;
std::vector<Real_t> m_delv_eta;
std::vector<Real_t> m_delv_zeta;
std::vector<Real_t> m_delx_xi;
std::vector<Real_t> m_delx_eta;
std::vector<Real_t> m_delx_zeta;
std::vector<Real_t> m_vnew;
std::vector<Real_t> m_determ;
// Parameters
Real_t u_cut;
Real_t hgcoef;
Real_t qstop;
Real_t monoq_max_slope;
Real_t monoq_limiter_mult;
Real_t e_cut;
Real_t p_cut;
Real_t ss4o3;
Real_t q_cut;
Real_t v_cut;
Real_t qlc_monoq;
Real_t qqc_monoq;
Real_t qqc;
Real_t eosvmax;
Real_t eosvmin;
Real_t pmin;
Real_t emin;
Real_t dvovmax;
Real_t refdens;
Real_t m_dtcourant;
Real_t m_dthydro;
};
#endif //__LULESH_TYPES_H__
|
tmcdonell/accelerate-lulesh
|
reference/emerging/charm++/lulesh_types.h
|
C
|
bsd-3-clause
| 3,229
|
//
// IDPAppDelegate.h
// OSX
//
// Created by Oleksa Korin on 10/1/15.
// Copyright (c) 2015 IDAP Group. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface IDPAppDelegate : NSObject <NSApplicationDelegate>
@property (assign) IBOutlet NSWindow *window;
@end
|
idapgroup/IDPKit
|
Tests/OSX/OSX/IDPAppDelegate.h
|
C
|
bsd-3-clause
| 274
|
// This is the base browser library for dy.js. It is all that is needed in the
// browser for modules written for dy.js to work.
//
// In development you'd probably also want to include the dy/ext/reload
// extension to add dynamic reloading functionality. However, in production
// this is the *only* code you need (plus a `dy.load()` somewhere) for your
// dy.js-compatible code to work.
(function (window) {
var dy = function (name, dependencies, initializer) {
var self = dy.retrieveOrCreateModule(name)
// Ensure all the dependencies exist
dependencies = dependencies.map(function (dep) {
return dy.retrieveOrCreateModule(dep)
})
initializer.apply(self, dependencies)
}
// Registry of all active modules
dy.modules = {}
dy.retrieveOrCreateModule = function (name) {
if (dy.modules[name] === undefined) {
// Set up new instance
dy.modules[name] = {}
}
// Retrieve the instance of the module
return dy.modules[name]
}
// Trigger _load() hook on all loaded modules
dy.load = function () {
for (var name in dy.modules) {
if (dy.modules.hasOwnProperty(name) && dy.modules[name]._load !== undefined) {
dy.modules[name]._load()
}
}
}
if (window) {
window.dy = dy;
}
})(window);
|
dirk/dy.js
|
dy.js
|
JavaScript
|
bsd-3-clause
| 1,289
|
#include "common.h"
#define BOOST_SYSTEM_SOURCE
#include <boost/system/error_code.hpp>
#include <boost/system/detail/error_code.ipp>
|
ruslanch/memhook
|
src/memhook/boost_error_code.cpp
|
C++
|
bsd-3-clause
| 134
|
//
// AppDelegate.h
// GiottoDataViewer
//
// Created by Eiji Hayashi on 3/20/16.
// Copyright © 2016 Eiji Hayashi. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "GVLocationManager.h"
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@property (strong, nonatomic) GVLocationManager* locationManager;
@end
|
IoT-Expedition/GiottoDataViewer
|
iOSApp/GiottoDataViewer/GiottoDataViewer/AppDelegate.h
|
C
|
bsd-3-clause
| 385
|
//
// CachetHQIncidents.h
// CachetHQIncidents
//
// Created by Yoann Gini on 12/09/2018.
// Copyright © 2018 Yoann Gini (Open Source Project). All rights reserved.
//
#import <HITDevKit/HITDevKit.h>
@interface CachetHQIncidents : HITPeriodicPlugin
@end
|
ygini/Hello-IT
|
src/Plugins/CachetHQ/CachetHQIncidents/CachetHQIncidents.h
|
C
|
bsd-3-clause
| 262
|
package spec.concordion.results.exception;
import java.util.ArrayList;
import java.util.List;
import org.concordion.api.Element;
import org.concordion.integration.junit3.ConcordionTestCase;
import org.concordion.internal.command.ThrowableCaughtEvent;
import org.concordion.internal.listener.ThrowableRenderer;
import test.concordion.TestRig;
public class ExceptionTest extends ConcordionTestCase {
private List<StackTraceElement> stackTraceElements = new ArrayList<StackTraceElement>();
public void addStackTraceElement(String declaringClassName, String methodName, String filename, int lineNumber) {
if (filename.equals("null")) {
filename = null;
}
stackTraceElements.add(new StackTraceElement(declaringClassName, methodName, filename, lineNumber));
}
public String markAsException(String fragment, String expression, String errorMessage) {
Throwable t = new Throwable(errorMessage);
t.setStackTrace(stackTraceElements.toArray(new StackTraceElement[0]));
Element element = new Element((nu.xom.Element) new TestRig()
.processFragment(fragment)
.getXOMDocument()
.query("//p")
.get(0));
new ThrowableRenderer().throwableCaught(new ThrowableCaughtEvent(t, element, expression));
return element.toXML();
}
}
|
pobrelkey/xcordion
|
xcordion/spec/spec/concordion/results/exception/ExceptionTest.java
|
Java
|
bsd-3-clause
| 1,438
|
<?php
/**
* Created by PhpStorm.
* User: veysman
* Date: 01.02.15
* Time: 15:55
*/
namespace app\components\widgets;
use Yii;
use yii\base\Widget;
use app\components\EditURL;
use app\components\helper\TagHelper;
use yii\helpers\Url;
class OtherPostWidget extends ListGroupWidget
{
public $PostList=[];
public function init()
{
parent::init();
}
public function run()
{
if(count($this->PostList)>0){
foreach($this->PostList as $current){
array_push($this->Items, [
'label'=>$current['title'],
'url'=>Url::toRoute(['post/view', 'id'=>$current['id']]),
'active'=>false,
]);
}
array_push($this->Items, [
'label'=>'Все посты',
'url'=>Url::toRoute(['post/index']),
'active'=>false,
]);
parent::run();
}
}
}
|
roma-glushko/iblog2
|
components/widgets/OtherPostWidget.php
|
PHP
|
bsd-3-clause
| 970
|
#!/usr/bin/env python
import matplotlib
matplotlib.use('Agg')
import numpy as np # noqa
import pandas as pd # noqa
import pandas_ml as pdml # noqa
import pandas_ml.util.testing as tm # noqa
import sklearn.datasets as datasets # noqa
import xgboost as xgb # noqa
class TestXGBoost(tm.TestCase):
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.xgboost.XGBRegressor, xgb.XGBRegressor)
self.assertIs(df.xgboost.XGBClassifier, xgb.XGBClassifier)
def test_XGBClassifier(self):
iris = datasets.load_iris()
df = pdml.ModelFrame(iris)
models = ['XGBClassifier']
for model in models:
mod1 = getattr(df.xgboost, model)()
mod2 = getattr(xgb, model)()
df.fit(mod1)
mod2.fit(iris.data, iris.target)
result = df.predict(mod1)
expected = mod2.predict(iris.data)
self.assertIsInstance(result, pdml.ModelSeries)
self.assert_numpy_array_almost_equal(result.values, expected)
def test_XGBRegressor(self):
# http://scikit-learn.org/stable/auto_examples/plot_kernel_ridge_regression.html
X = 5 * np.random.rand(1000, 1)
y = np.sin(X).ravel()
# Add noise to targets
y[::5] += 3 * (0.5 - np.random.rand(X.shape[0] // 5))
df = pdml.ModelFrame(data=X, target=y)
models = ['XGBRegressor']
for model in models:
mod1 = getattr(df.xgboost, model)()
mod2 = getattr(xgb, model)()
df.fit(mod1)
mod2.fit(X, y)
result = df.predict(mod1)
expected = mod2.predict(X)
self.assertIsInstance(result, pdml.ModelSeries)
self.assert_numpy_array_almost_equal(result.values, expected)
self.assertIsInstance(df.predicted, pdml.ModelSeries)
self.assert_numpy_array_almost_equal(df.predicted.values, expected)
def test_grid_search(self):
tuned_parameters = [{'max_depth': [3, 4],
'n_estimators': [50, 100]}]
df = pdml.ModelFrame(datasets.load_digits())
cv = df.grid_search.GridSearchCV(df.xgb.XGBClassifier(), tuned_parameters, cv=5)
with tm.RNGContext(1):
df.fit(cv)
result = df.grid_search.describe(cv)
expected = pd.DataFrame({'mean': [0.89705064, 0.91764051, 0.91263216, 0.91930996],
'std': [0.03244061, 0.03259985, 0.02764891, 0.0266436],
'max_depth': [3, 3, 4, 4],
'n_estimators': [50, 100, 50, 100]},
columns=['mean', 'std', 'max_depth', 'n_estimators'])
self.assertIsInstance(result, pdml.ModelFrame)
tm.assert_frame_equal(result, expected)
def test_plotting(self):
iris = datasets.load_iris()
df = pdml.ModelFrame(iris)
df.fit(df.svm.SVC())
# raises if df.estimator is not XGBModel
with self.assertRaises(ValueError):
df.xgb.plot_importance()
with self.assertRaises(ValueError):
df.xgb.to_graphviz()
with self.assertRaises(ValueError):
df.xgb.plot_tree()
df.fit(df.xgb.XGBClassifier())
from matplotlib.axes import Axes
from graphviz import Digraph
try:
ax = df.xgb.plot_importance()
except ImportError:
import nose
# matplotlib.use doesn't work on Travis
# PYTHON=3.4 PANDAS=0.17.1 SKLEARN=0.16.1
raise nose.SkipTest()
self.assertIsInstance(ax, Axes)
assert ax.get_title() == 'Feature importance'
assert ax.get_xlabel() == 'F score'
assert ax.get_ylabel() == 'Features'
assert len(ax.patches) == 4
g = df.xgb.to_graphviz(num_trees=0)
self.assertIsInstance(g, Digraph)
ax = df.xgb.plot_tree(num_trees=0)
self.assertIsInstance(ax, Axes)
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
|
sinhrks/pandas-ml
|
pandas_ml/xgboost/test/test_base.py
|
Python
|
bsd-3-clause
| 4,415
|
# -------------------------------------------------------------------------------------------------
# Copyright (c) 2010-2020 zsh-syntax-highlighting contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the zsh-syntax-highlighting contributors nor the names of its contributors
# may be used to endorse or promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -------------------------------------------------------------------------------------------------
# -*- mode: zsh; sh-indentation: 2; indent-tabs-mode: nil; sh-basic-offset: 2; -*-
# vim: ft=zsh sw=2 ts=2 et
# -------------------------------------------------------------------------------------------------
# Define default styles.
: ${ZSH_HIGHLIGHT_STYLES[default]:=none}
: ${ZSH_HIGHLIGHT_STYLES[unknown-token]:=fg=red,bold}
: ${ZSH_HIGHLIGHT_STYLES[reserved-word]:=fg=yellow}
: ${ZSH_HIGHLIGHT_STYLES[suffix-alias]:=fg=green,underline}
: ${ZSH_HIGHLIGHT_STYLES[precommand]:=fg=green,underline}
: ${ZSH_HIGHLIGHT_STYLES[commandseparator]:=none}
: ${ZSH_HIGHLIGHT_STYLES[path]:=underline}
: ${ZSH_HIGHLIGHT_STYLES[path_pathseparator]:=}
: ${ZSH_HIGHLIGHT_STYLES[path_prefix_pathseparator]:=}
: ${ZSH_HIGHLIGHT_STYLES[globbing]:=fg=blue}
: ${ZSH_HIGHLIGHT_STYLES[history-expansion]:=fg=blue}
: ${ZSH_HIGHLIGHT_STYLES[command-substitution]:=none}
: ${ZSH_HIGHLIGHT_STYLES[command-substitution-delimiter]:=fg=magenta}
: ${ZSH_HIGHLIGHT_STYLES[process-substitution]:=none}
: ${ZSH_HIGHLIGHT_STYLES[process-substitution-delimiter]:=fg=magenta}
: ${ZSH_HIGHLIGHT_STYLES[single-hyphen-option]:=none}
: ${ZSH_HIGHLIGHT_STYLES[double-hyphen-option]:=none}
: ${ZSH_HIGHLIGHT_STYLES[back-quoted-argument]:=none}
: ${ZSH_HIGHLIGHT_STYLES[back-quoted-argument-delimiter]:=fg=magenta}
: ${ZSH_HIGHLIGHT_STYLES[single-quoted-argument]:=fg=yellow}
: ${ZSH_HIGHLIGHT_STYLES[double-quoted-argument]:=fg=yellow}
: ${ZSH_HIGHLIGHT_STYLES[dollar-quoted-argument]:=fg=yellow}
: ${ZSH_HIGHLIGHT_STYLES[rc-quote]:=fg=cyan}
: ${ZSH_HIGHLIGHT_STYLES[dollar-double-quoted-argument]:=fg=cyan}
: ${ZSH_HIGHLIGHT_STYLES[back-double-quoted-argument]:=fg=cyan}
: ${ZSH_HIGHLIGHT_STYLES[back-dollar-quoted-argument]:=fg=cyan}
: ${ZSH_HIGHLIGHT_STYLES[assign]:=none}
: ${ZSH_HIGHLIGHT_STYLES[redirection]:=fg=yellow}
: ${ZSH_HIGHLIGHT_STYLES[comment]:=fg=black,bold}
: ${ZSH_HIGHLIGHT_STYLES[named-fd]:=none}
: ${ZSH_HIGHLIGHT_STYLES[arg0]:=fg=green}
# Whether the highlighter should be called or not.
_zsh_highlight_highlighter_main_predicate()
{
# may need to remove path_prefix highlighting when the line ends
[[ $WIDGET == zle-line-finish ]] || _zsh_highlight_buffer_modified
}
# Helper to deal with tokens crossing line boundaries.
_zsh_highlight_main_add_region_highlight() {
integer start=$1 end=$2
shift 2
if (( in_alias )); then
[[ $1 == unknown-token ]] && alias_style=unknown-token
return
fi
if (( in_param )); then
if [[ $1 == unknown-token ]]; then
param_style=unknown-token
fi
if [[ -n $param_style ]]; then
return
fi
param_style=$1
return
fi
# The calculation was relative to $buf but region_highlight is relative to $BUFFER.
(( start += buf_offset ))
(( end += buf_offset ))
list_highlights+=($start $end $1)
}
_zsh_highlight_main_add_many_region_highlights() {
for 1 2 3; do
_zsh_highlight_main_add_region_highlight $1 $2 $3
done
}
_zsh_highlight_main_calculate_fallback() {
local -A fallback_of; fallback_of=(
alias arg0
suffix-alias arg0
builtin arg0
function arg0
command arg0
precommand arg0
hashed-command arg0
arg0_\* arg0
path_prefix path
# The path separator fallback won't ever be used, due to the optimisation
# in _zsh_highlight_main_highlighter_highlight_path_separators().
path_pathseparator path
path_prefix_pathseparator path_prefix
single-quoted-argument{-unclosed,}
double-quoted-argument{-unclosed,}
dollar-quoted-argument{-unclosed,}
back-quoted-argument{-unclosed,}
command-substitution{-quoted,,-unquoted,}
command-substitution-delimiter{-quoted,,-unquoted,}
command-substitution{-delimiter,}
process-substitution{-delimiter,}
back-quoted-argument{-delimiter,}
)
local needle=$1 value
reply=($1)
while [[ -n ${value::=$fallback_of[(k)$needle]} ]]; do
unset "fallback_of[$needle]" # paranoia against infinite loops
reply+=($value)
needle=$value
done
}
# Get the type of a command.
#
# Uses the zsh/parameter module if available to avoid forks, and a
# wrapper around 'type -w' as fallback.
#
# If $2 is 0, do not consider aliases.
#
# The result will be stored in REPLY.
_zsh_highlight_main__type() {
integer -r aliases_allowed=${2-1}
# We won't cache replies of anything that exists as an alias at all, to
# ensure the cached value is correct regardless of $aliases_allowed.
#
# ### We probably _should_ cache them in a cache that's keyed on the value of
# ### $aliases_allowed, on the assumption that aliases are the common case.
integer may_cache=1
# Cache lookup
if (( $+_zsh_highlight_main__command_type_cache )); then
REPLY=$_zsh_highlight_main__command_type_cache[(e)$1]
if [[ -n "$REPLY" ]]; then
return
fi
fi
# Main logic
if (( $#options_to_set )); then
setopt localoptions $options_to_set;
fi
unset REPLY
if zmodload -e zsh/parameter; then
if (( $+aliases[(e)$1] )); then
may_cache=0
fi
if (( $+aliases[(e)$1] )) && (( aliases_allowed )); then
REPLY=alias
elif [[ $1 == *.* && -n ${1%.*} ]] && (( $+saliases[(e)${1##*.}] )); then
REPLY='suffix alias'
elif (( $reswords[(Ie)$1] )); then
REPLY=reserved
elif (( $+functions[(e)$1] )); then
REPLY=function
elif (( $+builtins[(e)$1] )); then
REPLY=builtin
elif (( $+commands[(e)$1] )); then
REPLY=command
# None of the special hashes had a match, so fall back to 'type -w', for
# forward compatibility with future versions of zsh that may add new command
# types.
#
# zsh 5.2 and older have a bug whereby running 'type -w ./sudo' implicitly
# runs 'hash ./sudo=/usr/local/bin/./sudo' (assuming /usr/local/bin/sudo
# exists and is in $PATH). Avoid triggering the bug, at the expense of
# falling through to the $() below, incurring a fork. (Issue #354.)
#
# The first disjunct mimics the isrelative() C call from the zsh bug.
elif { [[ $1 != */* ]] || is-at-least 5.3 } &&
# Add a subshell to avoid a zsh upstream bug; see issue #606.
# ### Remove the subshell when we stop supporting zsh 5.7.1 (I assume 5.8 will have the bugfix).
! (builtin type -w -- "$1") >/dev/null 2>&1; then
REPLY=none
fi
fi
if ! (( $+REPLY )); then
# zsh/parameter not available or had no matches.
#
# Note that 'type -w' will run 'rehash' implicitly.
#
# We 'unalias' in a subshell, so the parent shell is not affected.
#
# The colon command is there just to avoid a command substitution that
# starts with an arithmetic expression [«((…))» as the first thing inside
# «$(…)»], which is area that has had some parsing bugs before 5.6
# (approximately).
REPLY="${$(:; (( aliases_allowed )) || unalias -- "$1" 2>/dev/null; LC_ALL=C builtin type -w -- "$1" 2>/dev/null)##*: }"
if [[ $REPLY == 'alias' ]]; then
may_cache=0
fi
fi
# Cache population
if (( may_cache )) && (( $+_zsh_highlight_main__command_type_cache )); then
_zsh_highlight_main__command_type_cache[(e)$1]=$REPLY
fi
[[ -n $REPLY ]]
return $?
}
# Checks whether $1 is something that can be run.
#
# Return 0 if runnable, 1 if not runnable, 2 if trouble.
_zsh_highlight_main__is_runnable() {
if _zsh_highlight_main__type "$1"; then
[[ $REPLY != none ]]
else
return 2
fi
}
# Check whether the first argument is a redirection operator token.
# Report result via the exit code.
_zsh_highlight_main__is_redirection() {
# A redirection operator token:
# - starts with an optional single-digit number;
# - then, has a '<' or '>' character;
# - is not a process substitution [<(...) or >(...)].
# - is not a numeric glob <->
[[ $1 == (<0-9>|)(\<|\>)* ]] && [[ $1 != (\<|\>)$'\x28'* ]] && [[ $1 != *'<'*'-'*'>'* ]]
}
# Resolve alias.
#
# Takes a single argument.
#
# The result will be stored in REPLY.
_zsh_highlight_main__resolve_alias() {
if zmodload -e zsh/parameter; then
REPLY=${aliases[$arg]}
else
REPLY="${"$(alias -- $arg)"#*=}"
fi
}
# Check that the top of $braces_stack has the expected value. If it does, set
# the style according to $2; otherwise, set style=unknown-token.
#
# $1: character expected to be at the top of $braces_stack
# $2: optional assignment to style it if matches
# return value is 0 if there is a match else 1
_zsh_highlight_main__stack_pop() {
if [[ $braces_stack[1] == $1 ]]; then
braces_stack=${braces_stack:1}
if (( $+2 )); then
style=$2
fi
return 0
else
style=unknown-token
return 1
fi
}
# Main syntax highlighting function.
_zsh_highlight_highlighter_main_paint()
{
setopt localoptions extendedglob
# At the PS3 prompt and in vared, highlight nothing.
#
# (We can't check this in _zsh_highlight_highlighter_main_predicate because
# if the predicate returns false, the previous value of region_highlight
# would be reused.)
if [[ $CONTEXT == (select|vared) ]]; then
return
fi
typeset -a ZSH_HIGHLIGHT_TOKENS_COMMANDSEPARATOR
typeset -a ZSH_HIGHLIGHT_TOKENS_CONTROL_FLOW
local -a options_to_set reply # used in callees
local REPLY
# $flags_with_argument is a set of letters, corresponding to the option letters
# that would be followed by a colon in a getopts specification.
local flags_with_argument
# $flags_sans_argument is a set of letters, corresponding to the option letters
# that wouldn't be followed by a colon in a getopts specification.
local flags_sans_argument
# $precommand_options maps precommand name to values of $flags_with_argument and
# $flags_sans_argument for that precommand, joined by a colon. (The value is NOT
# a getopt(3) spec, although it resembles one.)
#
# Currently, setting $flags_sans_argument is only important for commands that
# have a non-empty $flags_with_argument; see test-data/precommand4.zsh.
local -A precommand_options
precommand_options=(
# Precommand modifiers as of zsh 5.6.2 cf. zshmisc(1).
'-' ''
'builtin' ''
'command' :pvV
'exec' a:cl
'noglob' ''
# 'time' and 'nocorrect' shouldn't be added here; they're reserved words, not precommands.
'doas' aCu:Lns # as of OpenBSD's doas(1) dated September 4, 2016
'nice' n: # as of current POSIX spec
'pkexec' '' # doesn't take short options; immune to #121 because it's usually not passed --option flags
# Argumentless flags that can't be followed by a command: -e -h -K -k -V -v
'sudo' Cgprtu:AEHPSbilns # as of sudo 1.8.21p2
'stdbuf' ioe:
'eatmydata' ''
'catchsegv' ''
'nohup' ''
'setsid' :wc
# As of OpenSSH 8.1p1; -k is deliberately left out since it may not be followed by a command
'ssh-agent' aEPt:csDd
# suckless-tools v44
# Argumentless flags that can't be followed by a command: -v
'tabbed' gnprtTuU:cdfhs
# moreutils 0.62-1
'chronic' :ev
'ifne' :n
)
# Commands that would need to skip one positional argument:
# flock
# ssh
if [[ $zsyh_user_options[ignorebraces] == on || ${zsyh_user_options[ignoreclosebraces]:-off} == on ]]; then
local right_brace_is_recognised_everywhere=false
else
local right_brace_is_recognised_everywhere=true
fi
if [[ $zsyh_user_options[pathdirs] == on ]]; then
options_to_set+=( PATH_DIRS )
fi
ZSH_HIGHLIGHT_TOKENS_COMMANDSEPARATOR=(
'|' '||' ';' '&' '&&'
'|&'
'&!' '&|'
# ### 'case' syntax, but followed by a pattern, not by a command
# ';;' ';&' ';|'
)
# Tokens that, at (naively-determined) "command position", are followed by
# a de jure command position. All of these are reserved words.
ZSH_HIGHLIGHT_TOKENS_CONTROL_FLOW=(
$'\x7b' # block
$'\x28' # subshell
'()' # anonymous function
'while'
'until'
'if'
'then'
'elif'
'else'
'do'
'time'
'coproc'
'!' # reserved word; unrelated to $histchars[1]
)
if (( $+X_ZSH_HIGHLIGHT_DIRS_BLACKLIST )); then
print >&2 'zsh-syntax-highlighting: X_ZSH_HIGHLIGHT_DIRS_BLACKLIST is deprecated. Please use ZSH_HIGHLIGHT_DIRS_BLACKLIST.'
ZSH_HIGHLIGHT_DIRS_BLACKLIST=($X_ZSH_HIGHLIGHT_DIRS_BLACKLIST)
unset X_ZSH_HIGHLIGHT_DIRS_BLACKLIST
fi
_zsh_highlight_main_highlighter_highlight_list -$#PREBUFFER '' 1 "$PREBUFFER$BUFFER"
# end is a reserved word
local start end_ style
for start end_ style in $reply; do
(( start >= end_ )) && { print -r -- >&2 "zsh-syntax-highlighting: BUG: _zsh_highlight_highlighter_main_paint: start($start) >= end($end_)"; return }
(( end_ <= 0 )) && continue
(( start < 0 )) && start=0 # having start<0 is normal with e.g. multiline strings
_zsh_highlight_main_calculate_fallback $style
_zsh_highlight_add_highlight $start $end_ $reply
done
}
# $1 is the offset of $4 from the parent buffer. Added to the returned highlights.
# $2 is the initial braces_stack (for a closing paren).
# $3 is 1 if $4 contains the end of $BUFFER, else 0.
# $4 is the buffer to highlight.
# Returns:
# $REPLY: $buf[REPLY] is the last character parsed.
# $reply is an array of region_highlight additions.
# exit code is 0 if the braces_stack is empty, 1 otherwise.
_zsh_highlight_main_highlighter_highlight_list()
{
integer start_pos end_pos=0 buf_offset=$1 has_end=$3
# alias_style is the style to apply to an alias once in_alias=0
# Usually 'alias' but set to 'unknown-token' if any word expanded from
# the alias would be highlighted as unknown-token
# param_style is analogous for parameter expansions
local alias_style param_style arg buf=$4 highlight_glob=true style
local in_array_assignment=false # true between 'a=(' and the matching ')'
# in_alias is equal to the number of shifts needed until arg=args[1] pops an
# arg from BUFFER and not added by an alias.
# in_param is analogous for parameter expansions
integer in_alias=0 in_param=0 len=$#buf
local -a match mbegin mend list_highlights
# seen_alias is a map of aliases already seen to avoid loops like alias a=b b=a
local -A seen_alias
# Pattern for parameter names
readonly parameter_name_pattern='([A-Za-z_][A-Za-z0-9_]*|[0-9]+)'
list_highlights=()
# "R" for round
# "Q" for square
# "Y" for curly
# "T" for [[ ]]
# "S" for $( )
# "D" for do/done
# "$" for 'end' (matches 'foreach' always; also used with cshjunkiequotes in repeat/while)
# "?" for 'if'/'fi'; also checked by 'elif'/'else'
# ":" for 'then'
local braces_stack=$2
# State machine
#
# The states are:
# - :start: Command word
# - :start_of_pipeline: Start of a 'pipeline' as defined in zshmisc(1).
# Only valid when :start: is present
# - :sudo_opt: A leading-dash option to a precommand, whether it takes an
# argument or not. (Example: sudo's "-u" or "-i".)
# - :sudo_arg: The argument to a precommand's leading-dash option,
# when given as a separate word; i.e., "foo" in "-u foo" (two
# words) but not in "-ufoo" (one word).
# - :regular: "Not a command word", and command delimiters are permitted.
# Mainly used to detect premature termination of commands.
# - :always: The word 'always' in the «{ foo } always { bar }» syntax.
#
# When the kind of a word is not yet known, $this_word / $next_word may contain
# multiple states. For example, after "sudo -i", the next word may be either
# another --flag or a command name, hence the state would include both ':start:'
# and ':sudo_opt:'.
#
# The tokens are always added with both leading and trailing colons to serve as
# word delimiters (an improvised array); [[ $x == *':foo:'* ]] and x=${x//:foo:/}
# will DTRT regardless of how many elements or repetitions $x has.
#
# Handling of redirections: upon seeing a redirection token, we must stall
# the current state --- that is, the value of $this_word --- for two iterations
# (one for the redirection operator, one for the word following it representing
# the redirection target). Therefore, we set $in_redirection to 2 upon seeing a
# redirection operator, decrement it each iteration, and stall the current state
# when it is non-zero. Thus, upon reaching the next word (the one that follows
# the redirection operator and target), $this_word will still contain values
# appropriate for the word immediately following the word that preceded the
# redirection operator.
#
# The "the previous word was a redirection operator" state is not communicated
# to the next iteration via $next_word/$this_word as usual, but via
# $in_redirection. The value of $next_word from the iteration that processed
# the operator is discarded.
#
local this_word next_word=':start::start_of_pipeline:'
integer in_redirection
# Processing buffer
local proc_buf="$buf"
local -a args
if [[ $zsyh_user_options[interactivecomments] == on ]]; then
args=(${(zZ+c+)buf})
else
args=(${(z)buf})
fi
while (( $#args )); do
arg=$args[1]
shift args
if (( in_alias )); then
(( in_alias-- ))
if (( in_alias == 0 )); then
seen_alias=()
# start_pos and end_pos are of the alias (previous $arg) here
_zsh_highlight_main_add_region_highlight $start_pos $end_pos $alias_style
fi
fi
if (( in_param )); then
(( in_param-- ))
if (( in_param == 0 )); then
# start_pos and end_pos are of the '$foo' word (previous $arg) here
_zsh_highlight_main_add_region_highlight $start_pos $end_pos $param_style
param_style=""
fi
fi
# Initialize this_word and next_word.
if (( in_redirection == 0 )); then
this_word=$next_word
next_word=':regular:'
else
# Stall $next_word.
(( --in_redirection ))
fi
# Initialize per-"simple command" [zshmisc(1)] variables:
#
# $style how to highlight $arg
# $in_array_assignment boolean flag for "between '(' and ')' of array assignment"
# $highlight_glob boolean flag for "'noglob' is in effect"
#
style=unknown-token
if [[ $this_word == *':start:'* ]]; then
in_array_assignment=false
if [[ $arg == 'noglob' ]]; then
highlight_glob=false
fi
fi
if (( in_alias == 0 && in_param == 0 )); then
# Compute the new $start_pos and $end_pos, skipping over whitespace in $buf.
[[ "$proc_buf" = (#b)(#s)(([ $'\t']|\\$'\n')#)* ]]
# The first, outer parenthesis
integer offset="${#match[1]}"
(( start_pos = end_pos + offset ))
(( end_pos = start_pos + $#arg ))
# Compute the new $proc_buf. We advance it
# (chop off characters from the beginning)
# beyond what end_pos points to, by skipping
# as many characters as end_pos was advanced.
#
# end_pos was advanced by $offset (via start_pos)
# and by $#arg. Note the `start_pos=$end_pos`
# below.
#
# As for the [,len]. We could use [,len-start_pos+offset]
# here, but to make it easier on eyes, we use len and
# rely on the fact that Zsh simply handles that. The
# length of proc_buf is len-start_pos+offset because
# we're chopping it to match current start_pos, so its
# length matches the previous value of start_pos.
#
# Why [,-1] is slower than [,length] isn't clear.
proc_buf="${proc_buf[offset + $#arg + 1,len]}"
fi
# Handle the INTERACTIVE_COMMENTS option.
#
# We use the (Z+c+) flag so the entire comment is presented as one token in $arg.
if [[ $zsyh_user_options[interactivecomments] == on && $arg[1] == $histchars[3] ]]; then
if [[ $this_word == *(':regular:'|':start:')* ]]; then
style=comment
else
style=unknown-token # prematurely terminated
fi
_zsh_highlight_main_add_region_highlight $start_pos $end_pos $style
# Stall this arg
in_redirection=1
continue
fi
if [[ $this_word == *':start:'* ]] && ! (( in_redirection )); then
# Expand aliases.
# An alias is ineligible for expansion while it's being expanded (see #652/#653).
_zsh_highlight_main__type "$arg" "$(( ! ${+seen_alias[$arg]} ))"
local res="$REPLY"
if [[ $res == "alias" ]]; then
# Mark insane aliases as unknown-token (cf. #263).
if [[ $arg == ?*=* ]]; then
(( in_alias == 0 )) && in_alias=1
_zsh_highlight_main_add_region_highlight $start_pos $end_pos unknown-token
continue
fi
seen_alias[$arg]=1
_zsh_highlight_main__resolve_alias $arg
local -a alias_args
# Elision is desired in case alias x=''
if [[ $zsyh_user_options[interactivecomments] == on ]]; then
alias_args=(${(zZ+c+)REPLY})
else
alias_args=(${(z)REPLY})
fi
args=( $alias_args $args )
if (( in_alias == 0 )); then
alias_style=alias
# Add one because we will in_alias-- on the next loop iteration so
# this iteration should be considered in in_alias as well
(( in_alias += $#alias_args + 1 ))
else
# This arg is already included in the count, so no need to + 1.
(( in_alias += $#alias_args ))
fi
(( in_redirection++ )) # Stall this arg
continue
else
_zsh_highlight_main_highlighter_expand_path $arg
_zsh_highlight_main__type "$REPLY" 0
res="$REPLY"
fi
fi
# Analyse the current word.
if _zsh_highlight_main__is_redirection $arg ; then
if (( in_redirection == 1 )); then
# The condition excludes the case that BUFFER='{foo}>&2' and we're on the '>&'.
_zsh_highlight_main_add_region_highlight $start_pos $end_pos unknown-token
else
in_redirection=2
_zsh_highlight_main_add_region_highlight $start_pos $end_pos redirection
fi
continue
elif [[ $arg == '{'${~parameter_name_pattern}'}' ]] && _zsh_highlight_main__is_redirection $args[1]; then
# named file descriptor: {foo}>&2
in_redirection=3
_zsh_highlight_main_add_region_highlight $start_pos $end_pos named-fd
continue
fi
# Expand parameters.
#
# ### For now, expand just '$foo' or '${foo}', possibly with braces, but with
# ### no other features of the parameter expansion syntax. (No ${(x)foo},
# ### no ${foo[x]}, no ${foo:-x}.)
() {
# That's not entirely correct --- if the parameter's value happens to be a reserved
# word, the parameter expansion will be highlighted as a reserved word --- but that
# incorrectness is outweighed by the usability improvement of permitting the use of
# parameters that refer to commands, functions, and builtins.
local -a match mbegin mend
local MATCH; integer MBEGIN MEND
local parameter_name
local -a words
if [[ $arg[1] == '$' ]] && [[ ${arg[2]} == '{' ]] && [[ ${arg[-1]} == '}' ]]; then
parameter_name=${${arg:2}%?}
elif [[ $arg[1] == '$' ]]; then
parameter_name=${arg:1}
fi
if [[ $res == none ]] && zmodload -e zsh/parameter &&
[[ ${parameter_name} =~ ^${~parameter_name_pattern}$ ]] &&
(( ${+parameters[(e)${MATCH}]} )) && [[ ${parameters[(e)$MATCH]} != *special* ]]
then
# Set $arg.
case ${(tP)MATCH} in
(*array*|*assoc*)
words=( ${(P)MATCH} )
;;
(*)
# scalar, presumably
words=( ${(P)MATCH} )
;;
esac
(( in_param = 1 + $#words ))
args=( $words $args )
arg=$args[1]
_zsh_highlight_main__type "$arg" 0
res=$REPLY
fi
}
# Parse the sudo command line
if (( ! in_redirection )); then
if [[ $this_word == *':sudo_opt:'* ]]; then
if [[ -n $flags_with_argument ]] &&
{ [[ -n $flags_sans_argument ]] && [[ $arg == '-'[$flags_sans_argument]#[$flags_with_argument] ]] ||
[[ $arg == '-'[$flags_with_argument] ]] }; then
# Flag that requires an argument
this_word=${this_word//:start:/}
next_word=':sudo_arg:'
elif [[ -n $flags_with_argument ]] &&
{ [[ -n $flags_sans_argument ]] && [[ $arg == '-'[$flags_sans_argument]#[$flags_with_argument]* ]] ||
[[ $arg == '-'[$flags_with_argument]* ]] }; then
# Argument attached in the same word
this_word=${this_word//:start:/}
next_word+=':start:'
next_word+=':sudo_opt:'
elif [[ -n $flags_sans_argument ]] &&
[[ $arg == '-'[$flags_sans_argument]# ]]; then
# Flag that requires no argument
this_word=':sudo_opt:'
next_word+=':start:'
next_word+=':sudo_opt:'
elif [[ $arg == '-'* ]]; then
# Unknown flag. We don't know whether it takes an argument or not,
# so modify $next_word as we do for flags that require no argument.
# With that behaviour, if the flag in fact takes no argument we'll
# highlight the inner command word correctly, and if it does take an
# argument we'll highlight the command word correctly if the argument
# was given in the same shell word as the flag (as in '-uphy1729' or
# '--user=phy1729' without spaces).
this_word=':sudo_opt:'
next_word+=':start:'
next_word+=':sudo_opt:'
else
# Not an option flag; nothing to do. (If the command line is
# syntactically valid, ${this_word//:sudo_opt:/} should be
# non-empty now.)
this_word=${this_word//:sudo_opt:/}
fi
elif [[ $this_word == *':sudo_arg:'* ]]; then
next_word+=':sudo_opt:'
next_word+=':start:'
fi
fi
# The Great Fork: is this a command word? Is this a non-command word?
if [[ -n ${(M)ZSH_HIGHLIGHT_TOKENS_COMMANDSEPARATOR:#"$arg"} ]]; then
if _zsh_highlight_main__stack_pop T || _zsh_highlight_main__stack_pop Q; then
# Missing closing square bracket(s)
style=unknown-token
elif [[ $this_word == *':regular:'* ]]; then
# This highlights empty commands (semicolon follows nothing) as an error.
# Zsh accepts them, though.
style=commandseparator
else
style=unknown-token
fi
if [[ $arg == ';' ]] && $in_array_assignment; then
# literal newline inside an array assignment
next_word=':regular:'
else
next_word=':start:'
highlight_glob=true
if [[ $arg != '|' && $arg != '|&' ]]; then
next_word+=':start_of_pipeline:'
fi
fi
elif ! (( in_redirection)) && [[ $this_word == *':always:'* && $arg == 'always' ]]; then
# try-always construct
style=reserved-word # de facto a reserved word, although not de jure
highlight_glob=true
next_word=':start::start_of_pipeline:' # only left brace is allowed, apparently
elif ! (( in_redirection)) && [[ $this_word == *':start:'* ]]; then # $arg is the command word
if (( ${+precommand_options[$arg]} )) && _zsh_highlight_main__is_runnable $arg; then
style=precommand
flags_with_argument=${precommand_options[$arg]%:*}
flags_sans_argument=${precommand_options[$arg]#*:}
next_word=${next_word//:regular:/}
next_word+=':sudo_opt:'
next_word+=':start:'
else
case $res in
reserved) # reserved word
style=reserved-word
# Match braces and handle special cases.
case $arg in
(time|nocorrect)
next_word=${next_word//:regular:/}
next_word+=':start:'
;;
($'\x7b')
braces_stack='Y'"$braces_stack"
;;
($'\x7d')
# We're at command word, so no need to check $right_brace_is_recognised_everywhere
_zsh_highlight_main__stack_pop 'Y' reserved-word
if [[ $style == reserved-word ]]; then
next_word+=':always:'
fi
;;
($'\x5b\x5b')
braces_stack='T'"$braces_stack"
;;
('do')
braces_stack='D'"$braces_stack"
;;
('done')
_zsh_highlight_main__stack_pop 'D' reserved-word
;;
('if')
braces_stack=':?'"$braces_stack"
;;
('then')
_zsh_highlight_main__stack_pop ':' reserved-word
;;
('elif')
if [[ ${braces_stack[1]} == '?' ]]; then
braces_stack=':'"$braces_stack"
else
style=unknown-token
fi
;;
('else')
if [[ ${braces_stack[1]} == '?' ]]; then
:
else
style=unknown-token
fi
;;
('fi')
_zsh_highlight_main__stack_pop '?'
;;
('foreach')
braces_stack='$'"$braces_stack"
;;
('end')
_zsh_highlight_main__stack_pop '$' reserved-word
;;
('repeat')
# skip the repeat-count word
in_redirection=2
# The redirection mechanism assumes $this_word describes the word
# following the redirection. Make it so.
#
# That word can be a command word with shortloops (`repeat 2 ls`)
# or a command separator (`repeat 2; ls` or `repeat 2; do ls; done`).
#
# The repeat-count word will be handled like a redirection target.
this_word=':start::regular:'
;;
('!')
if [[ $this_word != *':start_of_pipeline:'* ]]; then
style=unknown-token
else
# '!' reserved word at start of pipeline; style already set above
fi
;;
esac
;;
'suffix alias') style=suffix-alias;;
alias) :;;
builtin) style=builtin
[[ $arg == $'\x5b' ]] && braces_stack='Q'"$braces_stack"
;;
function) style=function;;
command) style=command;;
hashed) style=hashed-command;;
none) if (( ! in_param )) && _zsh_highlight_main_highlighter_check_assign; then
_zsh_highlight_main_add_region_highlight $start_pos $end_pos assign
local i=$(( arg[(i)=] + 1 ))
if [[ $arg[i] == '(' ]]; then
in_array_assignment=true
else
# assignment to a scalar parameter.
# (For array assignments, the command doesn't start until the ")" token.)
#
# Discard :start_of_pipeline:, if present, as '!' is not valid
# after assignments.
next_word+=':start:'
if (( i <= $#arg )); then
() {
local highlight_glob=false
[[ $zsyh_user_options[globassign] == on ]] && highlight_glob=true
_zsh_highlight_main_highlighter_highlight_argument $i
}
fi
fi
continue
elif (( ! in_param )) &&
[[ $arg[0,1] = $histchars[0,1] ]] && (( $#arg[0,2] == 2 )); then
style=history-expansion
elif (( ! in_param )) &&
[[ $arg[0,1] == $histchars[2,2] ]]; then
style=history-expansion
elif (( ! in_param )) &&
[[ $arg[1,2] == '((' ]]; then
# Arithmetic evaluation.
#
# Note: prior to zsh-5.1.1-52-g4bed2cf (workers/36669), the ${(z)...}
# splitter would only output the '((' token if the matching '))' had
# been typed. Therefore, under those versions of zsh, BUFFER="(( 42"
# would be highlighted as an error until the matching "))" are typed.
#
# We highlight just the opening parentheses, as a reserved word; this
# is how [[ ... ]] is highlighted, too.
_zsh_highlight_main_add_region_highlight $start_pos $((start_pos + 2)) reserved-word
if [[ $arg[-2,-1] == '))' ]]; then
_zsh_highlight_main_add_region_highlight $((end_pos - 2)) $end_pos reserved-word
fi
continue
elif (( ! in_param )) &&
[[ $arg == '()' ]]; then
# anonymous function
style=reserved-word
elif (( ! in_param )) &&
[[ $arg == $'\x28' ]]; then
# subshell
style=reserved-word
braces_stack='R'"$braces_stack"
elif (( ! in_param )) &&
[[ $arg == $'\x29' ]]; then
# end of subshell or command substitution
if _zsh_highlight_main__stack_pop 'S'; then
REPLY=$start_pos
reply=($list_highlights)
return 0
fi
_zsh_highlight_main__stack_pop 'R' reserved-word
else
if _zsh_highlight_main_highlighter_check_path $arg; then
style=$REPLY
else
style=unknown-token
fi
fi
;;
*) _zsh_highlight_main_add_region_highlight $start_pos $end_pos arg0_$res
continue
;;
esac
fi
if [[ -n ${(M)ZSH_HIGHLIGHT_TOKENS_CONTROL_FLOW:#"$arg"} ]]; then
next_word=':start::start_of_pipeline:'
fi
else # $arg is a non-command word
case $arg in
$'\x29') # subshell or end of array assignment
if $in_array_assignment; then
style=assign
in_array_assignment=false
next_word+=':start:'
elif (( in_redirection )); then
style=unknown-token
else
if _zsh_highlight_main__stack_pop 'S'; then
REPLY=$start_pos
reply=($list_highlights)
return 0
fi
_zsh_highlight_main__stack_pop 'R' reserved-word
fi;;
$'\x28\x29') # possibly a function definition
if (( in_redirection )) || $in_array_assignment; then
style=unknown-token
else
if [[ $zsyh_user_options[multifuncdef] == on ]] || false # TODO: or if the previous word was a command word
then
next_word+=':start::start_of_pipeline:'
fi
style=reserved-word
fi
;;
*) if false; then
elif [[ $arg = $'\x7d' ]] && $right_brace_is_recognised_everywhere; then
# Parsing rule: {
#
# Additionally, `tt(})' is recognized in any position if neither the
# tt(IGNORE_BRACES) option nor the tt(IGNORE_CLOSE_BRACES) option is set.
if (( in_redirection )) || $in_array_assignment; then
style=unknown-token
else
_zsh_highlight_main__stack_pop 'Y' reserved-word
if [[ $style == reserved-word ]]; then
next_word+=':always:'
fi
fi
elif [[ $arg[0,1] = $histchars[0,1] ]] && (( $#arg[0,2] == 2 )); then
style=history-expansion
elif [[ $arg == $'\x5d\x5d' ]] && _zsh_highlight_main__stack_pop 'T' reserved-word; then
:
elif [[ $arg == $'\x5d' ]] && _zsh_highlight_main__stack_pop 'Q' builtin; then
:
else
_zsh_highlight_main_highlighter_highlight_argument 1 $(( 1 != in_redirection ))
continue
fi
;;
esac
fi
_zsh_highlight_main_add_region_highlight $start_pos $end_pos $style
done
(( in_alias == 1 )) && in_alias=0 _zsh_highlight_main_add_region_highlight $start_pos $end_pos $alias_style
(( in_param == 1 )) && in_param=0 _zsh_highlight_main_add_region_highlight $start_pos $end_pos $param_style
[[ "$proc_buf" = (#b)(#s)(([[:space:]]|\\$'\n')#) ]]
REPLY=$(( end_pos + ${#match[1]} - 1 ))
reply=($list_highlights)
return $(( $#braces_stack > 0 ))
}
# Check if $arg is variable assignment
_zsh_highlight_main_highlighter_check_assign()
{
setopt localoptions extended_glob
[[ $arg == [[:alpha:]_][[:alnum:]_]#(|\[*\])(|[+])=* ]] ||
[[ $arg == [0-9]##(|[+])=* ]]
}
_zsh_highlight_main_highlighter_highlight_path_separators()
{
local pos style_pathsep
style_pathsep=$1_pathseparator
reply=()
[[ -z "$ZSH_HIGHLIGHT_STYLES[$style_pathsep]" || "$ZSH_HIGHLIGHT_STYLES[$1]" == "$ZSH_HIGHLIGHT_STYLES[$style_pathsep]" ]] && return 0
for (( pos = start_pos; $pos <= end_pos; pos++ )) ; do
if [[ $BUFFER[pos] == / ]]; then
reply+=($((pos - 1)) $pos $style_pathsep)
fi
done
}
# Check if $1 is a path.
# If yes, return 0 and in $REPLY the style to use.
# Else, return non-zero (and the contents of $REPLY is undefined).
_zsh_highlight_main_highlighter_check_path()
{
_zsh_highlight_main_highlighter_expand_path "$1"
local expanded_path="$REPLY" tmp_path
REPLY=path
[[ -z $expanded_path ]] && return 1
# Check if this is a blacklisted path
if [[ $expanded_path[1] == / ]]; then
tmp_path=$expanded_path
else
tmp_path=$PWD/$expanded_path
fi
tmp_path=$tmp_path:a
while [[ $tmp_path != / ]]; do
[[ -n ${(M)ZSH_HIGHLIGHT_DIRS_BLACKLIST:#$tmp_path} ]] && return 1
tmp_path=$tmp_path:h
done
[[ -L $expanded_path ]] && return 0
[[ -e $expanded_path ]] && return 0
# Search the path in CDPATH
local cdpath_dir
for cdpath_dir in $cdpath ; do
[[ -e "$cdpath_dir/$expanded_path" ]] && return 0
done
# If dirname($1) doesn't exist, neither does $1.
[[ ! -d ${expanded_path:h} ]] && return 1
# If this word ends the buffer, check if it's the prefix of a valid path.
if (( has_end && (len == end_pos) )) &&
[[ $WIDGET != zle-line-finish ]]; then
local -a tmp
tmp=( ${expanded_path}*(N) )
(( $#tmp > 0 )) && REPLY=path_prefix && return 0
fi
# It's not a path.
return 1
}
# Highlight an argument and possibly special chars in quotes starting at $1 in $arg
# This command will at least highlight $1 to end_pos with the default style
# If $2 is set to 0, the argument cannot be highlighted as an option.
_zsh_highlight_main_highlighter_highlight_argument()
{
local base_style=default i=$1 option_eligible=${2:-1} path_eligible=1 ret start style
local -a highlights
local -a match mbegin mend
local MATCH; integer MBEGIN MEND
case "$arg[i]" in
'%')
if [[ $arg[i+1] == '?' ]]; then
(( i += 2 ))
fi
;;
'-')
if (( option_eligible )); then
if [[ $arg[i+1] == - ]]; then
base_style=double-hyphen-option
else
base_style=single-hyphen-option
fi
path_eligible=0
fi
;;
'=')
if [[ $arg[i+1] == $'\x28' ]]; then
(( i += 2 ))
_zsh_highlight_main_highlighter_highlight_list $(( start_pos + i - 1 )) S $has_end $arg[i,-1]
ret=$?
(( i += REPLY ))
highlights+=(
$(( start_pos + $1 - 1 )) $(( start_pos + i )) process-substitution
$(( start_pos + $1 - 1 )) $(( start_pos + $1 + 1 )) process-substitution-delimiter
$reply
)
if (( ret == 0 )); then
highlights+=($(( start_pos + i - 1 )) $(( start_pos + i )) process-substitution-delimiter)
fi
fi
esac
for (( ; i <= $#arg ; i += 1 )); do
case "$arg[$i]" in
"\\") (( i += 1 )); continue;;
"'")
_zsh_highlight_main_highlighter_highlight_single_quote $i
(( i = REPLY ))
highlights+=($reply)
;;
'"')
_zsh_highlight_main_highlighter_highlight_double_quote $i
(( i = REPLY ))
highlights+=($reply)
;;
'`')
_zsh_highlight_main_highlighter_highlight_backtick $i
(( i = REPLY ))
highlights+=($reply)
;;
'$')
if [[ $arg[i+1] != "'" ]]; then
path_eligible=0
fi
if [[ $arg[i+1] == "'" ]]; then
_zsh_highlight_main_highlighter_highlight_dollar_quote $i
(( i = REPLY ))
highlights+=($reply)
continue
elif [[ $arg[i+1] == $'\x28' ]]; then
start=$i
(( i += 2 ))
_zsh_highlight_main_highlighter_highlight_list $(( start_pos + i - 1 )) S $has_end $arg[i,-1]
ret=$?
(( i += REPLY ))
highlights+=(
$(( start_pos + start - 1)) $(( start_pos + i )) command-substitution-unquoted
$(( start_pos + start - 1)) $(( start_pos + start + 1)) command-substitution-delimiter-unquoted
$reply
)
if (( ret == 0 )); then
highlights+=($(( start_pos + i - 1)) $(( start_pos + i )) command-substitution-delimiter-unquoted)
fi
continue
fi
while [[ $arg[i+1] == [\^=~#+] ]]; do
(( i += 1 ))
done
if [[ $arg[i+1] == [*@#?$!-] ]]; then
(( i += 1 ))
fi;;
[\<\>])
if [[ $arg[i+1] == $'\x28' ]]; then # \x28 = open paren
start=$i
(( i += 2 ))
_zsh_highlight_main_highlighter_highlight_list $(( start_pos + i - 1 )) S $has_end $arg[i,-1]
ret=$?
(( i += REPLY ))
highlights+=(
$(( start_pos + start - 1)) $(( start_pos + i )) process-substitution
$(( start_pos + start - 1)) $(( start_pos + start + 1 )) process-substitution-delimiter
$reply
)
if (( ret == 0 )); then
highlights+=($(( start_pos + i - 1)) $(( start_pos + i )) process-substitution-delimiter)
fi
continue
fi
;|
*)
if $highlight_glob && [[ ${arg[$i]} =~ ^[*?] || ${arg:$i-1} =~ ^\<[0-9]*-[0-9]*\> ]]; then
highlights+=($(( start_pos + i - 1 )) $(( start_pos + i + $#MATCH - 1)) globbing)
(( i += $#MATCH - 1 ))
path_eligible=0
else
continue
fi
;;
esac
done
if (( path_eligible )) && _zsh_highlight_main_highlighter_check_path $arg[$1,-1]; then
base_style=$REPLY
_zsh_highlight_main_highlighter_highlight_path_separators $base_style
highlights+=($reply)
fi
highlights=($(( start_pos + $1 - 1 )) $end_pos $base_style $highlights)
_zsh_highlight_main_add_many_region_highlights $highlights
}
# Quote Helper Functions
#
# $arg is expected to be set to the current argument
# $start_pos is expected to be set to the start of $arg in $BUFFER
# $1 is the index in $arg which starts the quote
# $REPLY is returned as the end of quote index in $arg
# $reply is returned as an array of region_highlight additions
# Highlight single-quoted strings
_zsh_highlight_main_highlighter_highlight_single_quote()
{
local arg1=$1 i q=\' style
i=$arg[(ib:arg1+1:)$q]
reply=()
if [[ $zsyh_user_options[rcquotes] == on ]]; then
while [[ $arg[i+1] == "'" ]]; do
reply+=($(( start_pos + i - 1 )) $(( start_pos + i + 1 )) rc-quote)
(( i++ ))
i=$arg[(ib:i+1:)$q]
done
fi
if [[ $arg[i] == "'" ]]; then
style=single-quoted-argument
else
# If unclosed, i points past the end
(( i-- ))
style=single-quoted-argument-unclosed
fi
reply=($(( start_pos + arg1 - 1 )) $(( start_pos + i )) $style $reply)
REPLY=$i
}
# Highlight special chars inside double-quoted strings
_zsh_highlight_main_highlighter_highlight_double_quote()
{
local -a breaks match mbegin mend saved_reply
local MATCH; integer last_break=$(( start_pos + $1 - 1 )) MBEGIN MEND
local i j k ret style
reply=()
for (( i = $1 + 1 ; i <= $#arg ; i += 1 )) ; do
(( j = i + start_pos - 1 ))
(( k = j + 1 ))
case "$arg[$i]" in
'"') break;;
'`') saved_reply=($reply)
_zsh_highlight_main_highlighter_highlight_backtick $i
(( i = REPLY ))
reply=($saved_reply $reply)
continue
;;
'$' ) style=dollar-double-quoted-argument
# Look for an alphanumeric parameter name.
if [[ ${arg:$i} =~ ^([A-Za-z_][A-Za-z0-9_]*|[0-9]+) ]] ; then
(( k += $#MATCH )) # highlight the parameter name
(( i += $#MATCH )) # skip past it
elif [[ ${arg:$i} =~ ^[{]([A-Za-z_][A-Za-z0-9_]*|[0-9]+)[}] ]] ; then
(( k += $#MATCH )) # highlight the parameter name and braces
(( i += $#MATCH )) # skip past it
elif [[ $arg[i+1] == '$' ]]; then
# $$ - pid
(( k += 1 )) # highlight both dollar signs
(( i += 1 )) # don't consider the second one as introducing another parameter expansion
elif [[ $arg[i+1] == [-#*@?] ]]; then
# $#, $*, $@, $?, $- - like $$ above
(( k += 1 )) # highlight both dollar signs
(( i += 1 )) # don't consider the second one as introducing another parameter expansion
elif [[ $arg[i+1] == $'\x28' ]]; then
breaks+=( $last_break $(( start_pos + i - 1 )) )
(( i += 2 ))
saved_reply=($reply)
_zsh_highlight_main_highlighter_highlight_list $(( start_pos + i - 1 )) S $has_end $arg[i,-1]
ret=$?
(( i += REPLY ))
last_break=$(( start_pos + i ))
reply=(
$saved_reply
$j $(( start_pos + i )) command-substitution-quoted
$j $(( j + 2 )) command-substitution-delimiter-quoted
$reply
)
if (( ret == 0 )); then
reply+=($(( start_pos + i - 1 )) $(( start_pos + i )) command-substitution-delimiter-quoted)
fi
continue
else
continue
fi
;;
"\\") style=back-double-quoted-argument
if [[ \\\`\"\$${histchars[1]} == *$arg[$i+1]* ]]; then
(( k += 1 )) # Color following char too.
(( i += 1 )) # Skip parsing the escaped char.
else
continue
fi
;;
($histchars[1]) # ! - may be a history expansion
if [[ $arg[i+1] != ('='|$'\x28'|$'\x7b'|[[:blank:]]) ]]; then
style=history-expansion
else
continue
fi
;;
*) continue ;;
esac
reply+=($j $k $style)
done
if [[ $arg[i] == '"' ]]; then
style=double-quoted-argument
else
# If unclosed, i points past the end
(( i-- ))
style=double-quoted-argument-unclosed
fi
(( last_break != start_pos + i )) && breaks+=( $last_break $(( start_pos + i )) )
saved_reply=($reply)
reply=()
for 1 2 in $breaks; do
(( $1 != $2 )) && reply+=($1 $2 $style)
done
reply+=($saved_reply)
REPLY=$i
}
# Highlight special chars inside dollar-quoted strings
_zsh_highlight_main_highlighter_highlight_dollar_quote()
{
local -a match mbegin mend
local MATCH; integer MBEGIN MEND
local i j k style
local AA
integer c
reply=()
for (( i = $1 + 2 ; i <= $#arg ; i += 1 )) ; do
(( j = i + start_pos - 1 ))
(( k = j + 1 ))
case "$arg[$i]" in
"'") break;;
"\\") style=back-dollar-quoted-argument
for (( c = i + 1 ; c <= $#arg ; c += 1 )); do
[[ "$arg[$c]" != ([0-9xXuUa-fA-F]) ]] && break
done
AA=$arg[$i+1,$c-1]
# Matching for HEX and OCT values like \0xA6, \xA6 or \012
if [[ "$AA" =~ "^(x|X)[0-9a-fA-F]{1,2}"
|| "$AA" =~ "^[0-7]{1,3}"
|| "$AA" =~ "^u[0-9a-fA-F]{1,4}"
|| "$AA" =~ "^U[0-9a-fA-F]{1,8}"
]]; then
(( k += $#MATCH ))
(( i += $#MATCH ))
else
if (( $#arg > $i+1 )) && [[ $arg[$i+1] == [xXuU] ]]; then
# \x not followed by hex digits is probably an error
style=unknown-token
fi
(( k += 1 )) # Color following char too.
(( i += 1 )) # Skip parsing the escaped char.
fi
;;
*) continue ;;
esac
reply+=($j $k $style)
done
if [[ $arg[i] == "'" ]]; then
style=dollar-quoted-argument
else
# If unclosed, i points past the end
(( i-- ))
style=dollar-quoted-argument-unclosed
fi
reply=($(( start_pos + $1 - 1 )) $(( start_pos + i )) $style $reply)
REPLY=$i
}
# Highlight backtick substitutions
_zsh_highlight_main_highlighter_highlight_backtick()
{
# buf is the contents of the backticks with a layer of backslashes removed.
# last is the index of arg for the start of the string to be copied into buf.
# It is either one past the beginning backtick or one past the last backslash.
# offset is a count of consumed \ (the delta between buf and arg).
# offsets is an array indexed by buf offset of when the delta between buf and arg changes.
# It is sparse, so search backwards to the last value
local buf highlight style=back-quoted-argument-unclosed style_end
local -i arg1=$1 end_ i=$1 last offset=0 start subshell_has_end=0
local -a highlight_zone highlights offsets
reply=()
last=$(( arg1 + 1 ))
# Remove one layer of backslashes and find the end
while i=$arg[(ib:i+1:)[\\\\\`]]; do # find the next \ or `
if (( i > $#arg )); then
buf=$buf$arg[last,i]
offsets[i-arg1-offset]='' # So we never index past the end
(( i-- ))
subshell_has_end=$(( has_end && (start_pos + i == len) ))
break
fi
if [[ $arg[i] == '\' ]]; then
(( i++ ))
# POSIX XCU 2.6.3
if [[ $arg[i] == ('$'|'`'|'\') ]]; then
buf=$buf$arg[last,i-2]
(( offset++ ))
# offsets is relative to buf, so adjust by -arg1
offsets[i-arg1-offset]=$offset
else
buf=$buf$arg[last,i-1]
fi
else # it's an unquoted ` and this is the end
style=back-quoted-argument
style_end=back-quoted-argument-delimiter
buf=$buf$arg[last,i-1]
offsets[i-arg1-offset]='' # So we never index past the end
break
fi
last=$i
done
_zsh_highlight_main_highlighter_highlight_list 0 '' $subshell_has_end $buf
# Munge the reply to account for removed backslashes
for start end_ highlight in $reply; do
start=$(( start_pos + arg1 + start + offsets[(Rb:start:)?*] ))
end_=$(( start_pos + arg1 + end_ + offsets[(Rb:end_:)?*] ))
highlights+=($start $end_ $highlight)
if [[ $highlight == back-quoted-argument-unclosed && $style == back-quoted-argument ]]; then
# An inner backtick command substitution is unclosed, but this level is closed
style_end=unknown-token
fi
done
reply=(
$(( start_pos + arg1 - 1 )) $(( start_pos + i )) $style
$(( start_pos + arg1 - 1 )) $(( start_pos + arg1 )) back-quoted-argument-delimiter
$highlights
)
if (( $#style_end )); then
reply+=($(( start_pos + i - 1)) $(( start_pos + i )) $style_end)
fi
REPLY=$i
}
# Called with a single positional argument.
# Perform filename expansion (tilde expansion) on the argument and set $REPLY to the expanded value.
#
# Does not perform filename generation (globbing).
_zsh_highlight_main_highlighter_expand_path()
{
(( $# == 1 )) || print -r -- >&2 "zsh-syntax-highlighting: BUG: _zsh_highlight_main_highlighter_expand_path: called without argument"
# The $~1 syntax normally performs filename generation, but not when it's on the right-hand side of ${x:=y}.
setopt localoptions nonomatch
unset REPLY
: ${REPLY:=${(Q)${~1}}}
}
# -------------------------------------------------------------------------------------------------
# Main highlighter initialization
# -------------------------------------------------------------------------------------------------
_zsh_highlight_main__precmd_hook() {
_zsh_highlight_main__command_type_cache=()
}
autoload -Uz add-zsh-hook
if add-zsh-hook precmd _zsh_highlight_main__precmd_hook 2>/dev/null; then
# Initialize command type cache
typeset -gA _zsh_highlight_main__command_type_cache
else
print -r -- >&2 'zsh-syntax-highlighting: Failed to load add-zsh-hook. Some speed optimizations will not be used.'
# Make sure the cache is unset
unset _zsh_highlight_main__command_type_cache
fi
typeset -ga ZSH_HIGHLIGHT_DIRS_BLACKLIST
|
x-way/zsh-syntax-highlighting
|
highlighters/main/main-highlighter.zsh
|
Shell
|
bsd-3-clause
| 56,819
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <fcntl.h>
#include <linux/videodev2.h>
#include <poll.h>
#include <sys/eventfd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/numerics/safe_conversions.h"
#include "base/strings/stringprintf.h"
#include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "ui/gl/scoped_binders.h"
#define LOGF(level) LOG(level) << __FUNCTION__ << "(): "
#define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): "
#define NOTIFY_ERROR(x) \
do { \
LOG(ERROR) << "Setting error state:" << x; \
SetErrorState(x); \
} while (0)
#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \
do { \
if (device_->Ioctl(type, arg) != 0) { \
PLOG(ERROR) << __FUNCTION__ << "(): ioctl() failed: " << #type; \
return value; \
} \
} while (0)
#define IOCTL_OR_ERROR_RETURN(type, arg) \
IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
#define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
#define IOCTL_OR_LOG_ERROR(type, arg) \
do { \
if (device_->Ioctl(type, arg) != 0) \
PLOG(ERROR) << __FUNCTION__ << "(): ioctl() failed: " << #type; \
} while (0)
namespace content {
class V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface
: public base::RefCounted<V4L2DecodeSurface> {
public:
using ReleaseCB = base::Callback<void(int)>;
V4L2DecodeSurface(int32 bitstream_id,
int input_record,
int output_record,
const ReleaseCB& release_cb);
// Mark the surface as decoded. This will also release all references, as
// they are not needed anymore.
void SetDecoded();
bool decoded() const { return decoded_; }
int32 bitstream_id() const { return bitstream_id_; }
int input_record() const { return input_record_; }
int output_record() const { return output_record_; }
uint32_t config_store() const { return config_store_; }
// Take references to each reference surface and keep them until the
// target surface is decoded.
void SetReferenceSurfaces(
const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces);
std::string ToString() const;
private:
friend class base::RefCounted<V4L2DecodeSurface>;
~V4L2DecodeSurface();
int32 bitstream_id_;
int input_record_;
int output_record_;
uint32_t config_store_;
bool decoded_;
ReleaseCB release_cb_;
std::vector<scoped_refptr<V4L2DecodeSurface>> reference_surfaces_;
DISALLOW_COPY_AND_ASSIGN(V4L2DecodeSurface);
};
V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::V4L2DecodeSurface(
int32 bitstream_id,
int input_record,
int output_record,
const ReleaseCB& release_cb)
: bitstream_id_(bitstream_id),
input_record_(input_record),
output_record_(output_record),
config_store_(input_record + 1),
decoded_(false),
release_cb_(release_cb) {
}
V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::~V4L2DecodeSurface() {
DVLOGF(5) << "Releasing output record id=" << output_record_;
release_cb_.Run(output_record_);
}
void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetReferenceSurfaces(
const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces) {
DCHECK(reference_surfaces_.empty());
reference_surfaces_ = ref_surfaces;
}
void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetDecoded() {
DCHECK(!decoded_);
decoded_ = true;
// We can now drop references to all reference surfaces for this surface
// as we are done with decoding.
reference_surfaces_.clear();
}
std::string V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::ToString()
const {
std::string out;
base::StringAppendF(&out, "Buffer %d -> %d. ", input_record_, output_record_);
base::StringAppendF(&out, "Reference surfaces:");
for (const auto& ref : reference_surfaces_) {
DCHECK_NE(ref->output_record(), output_record_);
base::StringAppendF(&out, " %d", ref->output_record());
}
return out;
}
V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord()
: input_id(-1),
address(nullptr),
length(0),
bytes_used(0),
at_device(false) {
}
V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord()
: at_device(false),
at_client(false),
picture_id(-1),
egl_image(EGL_NO_IMAGE_KHR),
egl_sync(EGL_NO_SYNC_KHR),
cleared(false) {
}
struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef {
BitstreamBufferRef(
base::WeakPtr<VideoDecodeAccelerator::Client>& client,
const scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
base::SharedMemory* shm,
size_t size,
int32 input_id);
~BitstreamBufferRef();
const base::WeakPtr<VideoDecodeAccelerator::Client> client;
const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
const scoped_ptr<base::SharedMemory> shm;
const size_t size;
off_t bytes_used;
const int32 input_id;
};
V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
base::WeakPtr<VideoDecodeAccelerator::Client>& client,
const scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
base::SharedMemory* shm,
size_t size,
int32 input_id)
: client(client),
client_message_loop_proxy(client_message_loop_proxy),
shm(shm),
size(size),
bytes_used(0),
input_id(input_id) {
}
V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
if (input_id >= 0) {
DVLOGF(5) << "returning input_id: " << input_id;
client_message_loop_proxy->PostTask(
FROM_HERE,
base::Bind(&VideoDecodeAccelerator::Client::NotifyEndOfBitstreamBuffer,
client, input_id));
}
}
struct V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef {
EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
~EGLSyncKHRRef();
EGLDisplay const egl_display;
EGLSyncKHR egl_sync;
};
V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
EGLDisplay egl_display,
EGLSyncKHR egl_sync)
: egl_display(egl_display), egl_sync(egl_sync) {
}
V4L2SliceVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
// We don't check for eglDestroySyncKHR failures, because if we get here
// with a valid sync object, something went wrong and we are getting
// destroyed anyway.
if (egl_sync != EGL_NO_SYNC_KHR)
eglDestroySyncKHR(egl_display, egl_sync);
}
struct V4L2SliceVideoDecodeAccelerator::PictureRecord {
PictureRecord(bool cleared, const media::Picture& picture);
~PictureRecord();
bool cleared; // Whether the texture is cleared and safe to render from.
media::Picture picture; // The decoded picture.
};
V4L2SliceVideoDecodeAccelerator::PictureRecord::PictureRecord(
bool cleared,
const media::Picture& picture)
: cleared(cleared), picture(picture) {
}
V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() {
}
class V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator
: public H264Decoder::H264Accelerator {
public:
V4L2H264Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
~V4L2H264Accelerator() override;
// H264Decoder::H264Accelerator implementation.
scoped_refptr<H264Picture> CreateH264Picture() override;
bool SubmitFrameMetadata(const media::H264SPS* sps,
const media::H264PPS* pps,
const H264DPB& dpb,
const H264Picture::Vector& ref_pic_listp0,
const H264Picture::Vector& ref_pic_listb0,
const H264Picture::Vector& ref_pic_listb1,
const scoped_refptr<H264Picture>& pic) override;
bool SubmitSlice(const media::H264PPS* pps,
const media::H264SliceHeader* slice_hdr,
const H264Picture::Vector& ref_pic_list0,
const H264Picture::Vector& ref_pic_list1,
const scoped_refptr<H264Picture>& pic,
const uint8_t* data,
size_t size) override;
bool SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
bool OutputPicture(const scoped_refptr<H264Picture>& pic) override;
void Reset() override;
private:
// Max size of reference list.
static const size_t kDPBIndicesListSize = 32;
void H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
uint8_t dst_list[kDPBIndicesListSize]);
void H264DPBToV4L2DPB(
const H264DPB& dpb,
std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces);
scoped_refptr<V4L2DecodeSurface> H264PictureToV4L2DecodeSurface(
const scoped_refptr<H264Picture>& pic);
size_t num_slices_;
V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
// TODO(posciak): This should be queried from hardware once supported.
static const size_t kMaxSlices = 16;
struct v4l2_ctrl_h264_slice_param v4l2_slice_params_[kMaxSlices];
struct v4l2_ctrl_h264_decode_param v4l2_decode_param_;
DISALLOW_COPY_AND_ASSIGN(V4L2H264Accelerator);
};
class V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator
: public VP8Decoder::VP8Accelerator {
public:
V4L2VP8Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
~V4L2VP8Accelerator() override;
// H264Decoder::VP8Accelerator implementation.
scoped_refptr<VP8Picture> CreateVP8Picture() override;
bool SubmitDecode(const scoped_refptr<VP8Picture>& pic,
const media::Vp8FrameHeader* frame_hdr,
const scoped_refptr<VP8Picture>& last_frame,
const scoped_refptr<VP8Picture>& golden_frame,
const scoped_refptr<VP8Picture>& alt_frame) override;
bool OutputPicture(const scoped_refptr<VP8Picture>& pic) override;
private:
scoped_refptr<V4L2DecodeSurface> VP8PictureToV4L2DecodeSurface(
const scoped_refptr<VP8Picture>& pic);
V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
DISALLOW_COPY_AND_ASSIGN(V4L2VP8Accelerator);
};
// Codec-specific subclasses of software decoder picture classes.
// This allows us to keep decoders oblivious of our implementation details.
class V4L2H264Picture : public H264Picture {
public:
V4L2H264Picture(const scoped_refptr<
V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface);
V4L2H264Picture* AsV4L2H264Picture() override { return this; }
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
dec_surface() {
return dec_surface_;
}
private:
~V4L2H264Picture() override;
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
dec_surface_;
DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture);
};
V4L2H264Picture::V4L2H264Picture(const scoped_refptr<
V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface)
: dec_surface_(dec_surface) {
}
V4L2H264Picture::~V4L2H264Picture() {
}
class V4L2VP8Picture : public VP8Picture {
public:
V4L2VP8Picture(const scoped_refptr<
V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface);
V4L2VP8Picture* AsV4L2VP8Picture() override { return this; }
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
dec_surface() {
return dec_surface_;
}
private:
~V4L2VP8Picture() override;
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
dec_surface_;
DISALLOW_COPY_AND_ASSIGN(V4L2VP8Picture);
};
V4L2VP8Picture::V4L2VP8Picture(const scoped_refptr<
V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>& dec_surface)
: dec_surface_(dec_surface) {
}
V4L2VP8Picture::~V4L2VP8Picture() {
}
V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator(
const scoped_refptr<V4L2Device>& device,
EGLDisplay egl_display,
EGLContext egl_context,
const base::WeakPtr<Client>& io_client,
const base::Callback<bool(void)>& make_context_current,
const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy)
: input_planes_count_(0),
output_planes_count_(0),
child_message_loop_proxy_(base::MessageLoopProxy::current()),
io_message_loop_proxy_(io_message_loop_proxy),
io_client_(io_client),
device_(device),
decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"),
device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"),
input_streamon_(false),
input_buffer_queued_count_(0),
output_streamon_(false),
output_buffer_queued_count_(0),
video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
output_format_fourcc_(0),
state_(kUninitialized),
decoder_flushing_(false),
decoder_resetting_(false),
surface_set_change_pending_(false),
picture_clearing_count_(0),
pictures_assigned_(false, false),
make_context_current_(make_context_current),
egl_display_(egl_display),
egl_context_(egl_context),
weak_this_factory_(this) {
weak_this_ = weak_this_factory_.GetWeakPtr();
}
V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() {
DVLOGF(2);
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
DCHECK(!decoder_thread_.IsRunning());
DCHECK(!device_poll_thread_.IsRunning());
DCHECK(input_buffer_map_.empty());
DCHECK(output_buffer_map_.empty());
}
void V4L2SliceVideoDecodeAccelerator::NotifyError(Error error) {
if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
child_message_loop_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::NotifyError,
weak_this_, error));
return;
}
if (client_) {
client_->NotifyError(error);
client_ptr_factory_.reset();
}
}
bool V4L2SliceVideoDecodeAccelerator::Initialize(
media::VideoCodecProfile profile,
VideoDecodeAccelerator::Client* client) {
DVLOGF(3) << "profile: " << profile;
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
DCHECK_EQ(state_, kUninitialized);
client_ptr_factory_.reset(
new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client));
client_ = client_ptr_factory_->GetWeakPtr();
video_profile_ = profile;
if (video_profile_ >= media::H264PROFILE_MIN &&
video_profile_ <= media::H264PROFILE_MAX) {
h264_accelerator_.reset(new V4L2H264Accelerator(this));
decoder_.reset(new H264Decoder(h264_accelerator_.get()));
} else if (video_profile_ >= media::VP8PROFILE_MIN &&
video_profile_ <= media::VP8PROFILE_MAX) {
vp8_accelerator_.reset(new V4L2VP8Accelerator(this));
decoder_.reset(new VP8Decoder(vp8_accelerator_.get()));
} else {
DLOG(ERROR) << "Unsupported profile " << video_profile_;
return false;
}
// TODO(posciak): This needs to be queried once supported.
input_planes_count_ = 1;
output_planes_count_ = 1;
if (egl_display_ == EGL_NO_DISPLAY) {
LOG(ERROR) << "Initialize(): could not get EGLDisplay";
return false;
}
// We need the context to be initialized to query extensions.
if (!make_context_current_.Run()) {
LOG(ERROR) << "Initialize(): could not make context current";
return false;
}
if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
LOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
return false;
}
// Capabilities check.
struct v4l2_capability caps;
const __u32 kCapsRequired =
V4L2_CAP_VIDEO_CAPTURE_MPLANE |
V4L2_CAP_VIDEO_OUTPUT_MPLANE |
V4L2_CAP_STREAMING;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
", caps check failed: 0x" << std::hex << caps.capabilities;
return false;
}
if (!SetupFormats())
return false;
if (!decoder_thread_.Start()) {
DLOG(ERROR) << "Initialize(): device thread failed to start";
return false;
}
decoder_thread_proxy_ = decoder_thread_.message_loop_proxy();
state_ = kInitialized;
// InitializeTask will NOTIFY_ERROR on failure.
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::InitializeTask,
base::Unretained(this)));
DVLOGF(1) << "V4L2SliceVideoDecodeAccelerator initialized";
return true;
}
void V4L2SliceVideoDecodeAccelerator::InitializeTask() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK_EQ(state_, kInitialized);
if (!CreateInputBuffers())
NOTIFY_ERROR(PLATFORM_FAILURE);
// Output buffers will be created once decoder gives us information
// about their size and required count.
state_ = kDecoding;
}
void V4L2SliceVideoDecodeAccelerator::Destroy() {
DVLOGF(3);
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
if (decoder_thread_.IsRunning()) {
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DestroyTask,
base::Unretained(this)));
// Wait for tasks to finish/early-exit.
decoder_thread_.Stop();
}
delete this;
DVLOGF(3) << "Destroyed";
}
void V4L2SliceVideoDecodeAccelerator::DestroyTask() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
state_ = kError;
decoder_->Reset();
decoder_current_bitstream_buffer_.reset();
while (!decoder_input_queue_.empty())
decoder_input_queue_.pop();
// Stop streaming and the device_poll_thread_.
StopDevicePoll(false);
DestroyInputBuffers();
DestroyOutputs(false);
DCHECK(surfaces_at_device_.empty());
DCHECK(surfaces_at_display_.empty());
DCHECK(decoder_display_queue_.empty());
}
bool V4L2SliceVideoDecodeAccelerator::SetupFormats() {
DCHECK_EQ(state_, kUninitialized);
__u32 input_format_fourcc =
V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, true);
if (!input_format_fourcc) {
NOTREACHED();
return false;
}
size_t input_size;
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kIgnoreResolutionLimitsForAcceleratedVideoDecode))
input_size = kInputBufferMaxSizeFor4k;
else
input_size = kInputBufferMaxSizeFor1080p;
struct v4l2_format format;
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
format.fmt.pix_mp.pixelformat = input_format_fourcc;
format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
format.fmt.pix_mp.num_planes = input_planes_count_;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
// We have to set up the format for output, because the driver may not allow
// changing it once we start streaming; whether it can support our chosen
// output format or not may depend on the input format.
struct v4l2_fmtdesc fmtdesc;
memset(&fmtdesc, 0, sizeof(fmtdesc));
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
output_format_fourcc_ = 0;
while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
if (device_->CanCreateEGLImageFrom(fmtdesc.pixelformat)) {
output_format_fourcc_ = fmtdesc.pixelformat;
break;
}
++fmtdesc.index;
}
if (output_format_fourcc_ == 0) {
LOG(ERROR) << "Could not find a usable output format";
return false;
}
// Only set fourcc for output; resolution, etc., will come from the
// driver once it extracts it from the stream.
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
format.fmt.pix_mp.pixelformat = output_format_fourcc_;
format.fmt.pix_mp.num_planes = output_planes_count_;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
return true;
}
bool V4L2SliceVideoDecodeAccelerator::CreateInputBuffers() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK(!input_streamon_);
DCHECK(input_buffer_map_.empty());
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = kNumInputBuffers;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
if (reqbufs.count < kNumInputBuffers) {
PLOG(ERROR) << "Could not allocate enough output buffers";
return false;
}
input_buffer_map_.resize(reqbufs.count);
for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
free_input_buffers_.push_back(i);
// Query for the MEMORY_MMAP pointer.
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
memset(planes, 0, sizeof(planes));
buffer.index = i;
buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.m.planes = planes;
buffer.length = input_planes_count_;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
void* address = device_->Mmap(nullptr,
buffer.m.planes[0].length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
buffer.m.planes[0].m.mem_offset);
if (address == MAP_FAILED) {
PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
return false;
}
input_buffer_map_[i].address = address;
input_buffer_map_[i].length = buffer.m.planes[0].length;
}
return true;
}
bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK(!output_streamon_);
DCHECK(output_buffer_map_.empty());
DCHECK(surfaces_at_display_.empty());
DCHECK(surfaces_at_device_.empty());
visible_size_ = decoder_->GetPicSize();
size_t num_pictures = decoder_->GetRequiredNumOfPictures();
DCHECK_GT(num_pictures, 0u);
DCHECK(!visible_size_.IsEmpty());
struct v4l2_format format;
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
format.fmt.pix_mp.pixelformat = output_format_fourcc_;
format.fmt.pix_mp.width = visible_size_.width();
format.fmt.pix_mp.height = visible_size_.height();
format.fmt.pix_mp.num_planes = input_planes_count_;
if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
PLOG(ERROR) << "Failed setting format to: " << output_format_fourcc_;
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
coded_size_.SetSize(base::checked_cast<int>(format.fmt.pix_mp.width),
base::checked_cast<int>(format.fmt.pix_mp.height));
DCHECK_EQ(coded_size_.width() % 16, 0);
DCHECK_EQ(coded_size_.height() % 16, 0);
if (!gfx::Rect(coded_size_).Contains(gfx::Rect(visible_size_))) {
LOG(ERROR) << "Got invalid adjusted coded size: " << coded_size_.ToString();
return false;
}
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = num_pictures;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
if (reqbufs.count < num_pictures) {
PLOG(ERROR) << "Could not allocate enough output buffers";
return false;
}
output_buffer_map_.resize(reqbufs.count);
DVLOGF(3) << "buffer_count=" << output_buffer_map_.size()
<< ", visible size=" << visible_size_.ToString()
<< ", coded size=" << coded_size_.ToString();
child_message_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&VideoDecodeAccelerator::Client::ProvidePictureBuffers,
client_, output_buffer_map_.size(), coded_size_,
device_->GetTextureTarget()));
// Wait for the client to call AssignPictureBuffers() on the Child thread.
// We do this, because if we continue decoding without finishing buffer
// allocation, we may end up Resetting before AssignPictureBuffers arrives,
// resulting in unnecessary complications and subtle bugs.
pictures_assigned_.Wait();
return true;
}
void V4L2SliceVideoDecodeAccelerator::DestroyInputBuffers() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread() ||
!decoder_thread_.IsRunning());
DCHECK(!input_streamon_);
for (auto& input_record : input_buffer_map_) {
if (input_record.address != nullptr)
device_->Munmap(input_record.address, input_record.length);
}
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 0;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
input_buffer_map_.clear();
free_input_buffers_.clear();
}
void V4L2SliceVideoDecodeAccelerator::DismissPictures(
std::vector<int32> picture_buffer_ids,
base::WaitableEvent* done) {
DVLOGF(3);
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
for (auto picture_buffer_id : picture_buffer_ids) {
DVLOGF(1) << "dismissing PictureBuffer id=" << picture_buffer_id;
client_->DismissPictureBuffer(picture_buffer_id);
}
done->Signal();
}
void V4L2SliceVideoDecodeAccelerator::DevicePollTask(bool poll_device) {
DVLOGF(4);
DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
bool event_pending;
if (!device_->Poll(poll_device, &event_pending)) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
// All processing should happen on ServiceDeviceTask(), since we shouldn't
// touch encoder state from this thread.
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask,
base::Unretained(this)));
}
void V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask() {
DVLOGF(4);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
// ServiceDeviceTask() should only ever be scheduled from DevicePollTask().
Dequeue();
SchedulePollIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::SchedulePollIfNeeded() {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (!device_poll_thread_.IsRunning()) {
DVLOGF(2) << "Device poll thread stopped, will not schedule poll";
return;
}
DCHECK(input_streamon_ || output_streamon_);
if (input_buffer_queued_count_ + output_buffer_queued_count_ == 0) {
DVLOGF(4) << "No buffers queued, will not schedule poll";
return;
}
DVLOGF(4) << "Scheduling device poll task";
device_poll_thread_.message_loop()->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
base::Unretained(this), true));
DVLOGF(2) << "buffer counts: "
<< "INPUT[" << decoder_input_queue_.size() << "]"
<< " => DEVICE["
<< free_input_buffers_.size() << "+"
<< input_buffer_queued_count_ << "/"
<< input_buffer_map_.size() << "]->["
<< free_output_buffers_.size() << "+"
<< output_buffer_queued_count_ << "/"
<< output_buffer_map_.size() << "]"
<< " => DISPLAYQ[" << decoder_display_queue_.size() << "]"
<< " => CLIENT[" << surfaces_at_display_.size() << "]";
}
void V4L2SliceVideoDecodeAccelerator::Enqueue(
const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
const int old_inputs_queued = input_buffer_queued_count_;
const int old_outputs_queued = output_buffer_queued_count_;
if (!EnqueueInputRecord(dec_surface->input_record(),
dec_surface->config_store())) {
DVLOGF(1) << "Failed queueing an input buffer";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
if (!EnqueueOutputRecord(dec_surface->output_record())) {
DVLOGF(1) << "Failed queueing an output buffer";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
bool inserted =
surfaces_at_device_.insert(std::make_pair(dec_surface->output_record(),
dec_surface)).second;
DCHECK(inserted);
if (old_inputs_queued == 0 && old_outputs_queued == 0)
SchedulePollIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::Dequeue() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
struct v4l2_buffer dqbuf;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
while (input_buffer_queued_count_ > 0) {
DCHECK(input_streamon_);
memset(&dqbuf, 0, sizeof(dqbuf));
memset(&planes, 0, sizeof(planes));
dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
dqbuf.memory = V4L2_MEMORY_USERPTR;
dqbuf.m.planes = planes;
dqbuf.length = input_planes_count_;
if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
if (errno == EAGAIN) {
// EAGAIN if we're just out of buffers to dequeue.
break;
}
PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
InputRecord& input_record = input_buffer_map_[dqbuf.index];
DCHECK(input_record.at_device);
input_record.at_device = false;
ReuseInputBuffer(dqbuf.index);
input_buffer_queued_count_--;
DVLOGF(4) << "Dequeued input=" << dqbuf.index
<< " count: " << input_buffer_queued_count_;
}
while (output_buffer_queued_count_ > 0) {
DCHECK(output_streamon_);
memset(&dqbuf, 0, sizeof(dqbuf));
memset(&planes, 0, sizeof(planes));
dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
dqbuf.memory = V4L2_MEMORY_MMAP;
dqbuf.m.planes = planes;
dqbuf.length = output_planes_count_;
if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
if (errno == EAGAIN) {
// EAGAIN if we're just out of buffers to dequeue.
break;
}
PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
OutputRecord& output_record = output_buffer_map_[dqbuf.index];
DCHECK(output_record.at_device);
output_record.at_device = false;
output_buffer_queued_count_--;
DVLOGF(3) << "Dequeued output=" << dqbuf.index
<< " count " << output_buffer_queued_count_;
V4L2DecodeSurfaceByOutputId::iterator it =
surfaces_at_device_.find(dqbuf.index);
if (it == surfaces_at_device_.end()) {
DLOG(ERROR) << "Got invalid surface from device.";
NOTIFY_ERROR(PLATFORM_FAILURE);
}
it->second->SetDecoded();
surfaces_at_device_.erase(it);
}
// A frame was decoded, see if we can output it.
TryOutputSurfaces();
ProcessPendingEventsIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::ProcessPendingEventsIfNeeded() {
// Process pending events, if any, in the correct order.
// We always first process the surface set change, as it is an internal
// event from the decoder and interleaving it with external requests would
// put the decoder in an undefined state.
FinishSurfaceSetChangeIfNeeded();
// Process external (client) requests.
FinishFlushIfNeeded();
FinishResetIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::ReuseInputBuffer(int index) {
DVLOGF(4) << "Reusing input buffer, index=" << index;
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
InputRecord& input_record = input_buffer_map_[index];
DCHECK(!input_record.at_device);
input_record.input_id = -1;
input_record.bytes_used = 0;
DCHECK_EQ(std::count(free_input_buffers_.begin(), free_input_buffers_.end(),
index), 0);
free_input_buffers_.push_back(index);
}
void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) {
DVLOGF(4) << "Reusing output buffer, index=" << index;
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
OutputRecord& output_record = output_buffer_map_[index];
DCHECK(!output_record.at_device);
DCHECK(!output_record.at_client);
DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
index), 0);
free_output_buffers_.push_back(index);
ScheduleDecodeBufferTaskIfNeeded();
}
bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord(
int index,
uint32_t config_store) {
DVLOGF(3);
DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
DCHECK_GT(config_store, 0u);
// Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
InputRecord& input_record = input_buffer_map_[index];
DCHECK(!input_record.at_device);
struct v4l2_buffer qbuf;
struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
memset(&qbuf, 0, sizeof(qbuf));
memset(qbuf_planes, 0, sizeof(qbuf_planes));
qbuf.index = index;
qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
qbuf.memory = V4L2_MEMORY_MMAP;
qbuf.m.planes = qbuf_planes;
qbuf.m.planes[0].bytesused = input_record.bytes_used;
qbuf.length = input_planes_count_;
qbuf.config_store = config_store;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
input_record.at_device = true;
input_buffer_queued_count_++;
DVLOGF(4) << "Enqueued input=" << qbuf.index
<< " count: " << input_buffer_queued_count_;
return true;
}
bool V4L2SliceVideoDecodeAccelerator::EnqueueOutputRecord(int index) {
DVLOGF(3);
DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
// Enqueue an output (VIDEO_CAPTURE) buffer.
OutputRecord& output_record = output_buffer_map_[index];
DCHECK(!output_record.at_device);
DCHECK(!output_record.at_client);
DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
DCHECK_NE(output_record.picture_id, -1);
if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
// If we have to wait for completion, wait. Note that
// free_output_buffers_ is a FIFO queue, so we always wait on the
// buffer that has been in the queue the longest.
if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
EGL_FOREVER_KHR) == EGL_FALSE) {
// This will cause tearing, but is safe otherwise.
DVLOGF(1) << "eglClientWaitSyncKHR failed!";
}
if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
LOGF(ERROR) << "eglDestroySyncKHR failed!";
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
output_record.egl_sync = EGL_NO_SYNC_KHR;
}
struct v4l2_buffer qbuf;
struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
memset(&qbuf, 0, sizeof(qbuf));
memset(qbuf_planes, 0, sizeof(qbuf_planes));
qbuf.index = index;
qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
qbuf.memory = V4L2_MEMORY_MMAP;
qbuf.m.planes = qbuf_planes;
qbuf.length = output_planes_count_;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
output_record.at_device = true;
output_buffer_queued_count_++;
DVLOGF(4) << "Enqueued output=" << qbuf.index
<< " count: " << output_buffer_queued_count_;
return true;
}
bool V4L2SliceVideoDecodeAccelerator::StartDevicePoll() {
DVLOGF(3) << "Starting device poll";
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK(!device_poll_thread_.IsRunning());
// Start up the device poll thread and schedule its first DevicePollTask().
if (!device_poll_thread_.Start()) {
DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
if (!input_streamon_) {
__u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
input_streamon_ = true;
}
if (!output_streamon_) {
__u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
output_streamon_ = true;
}
device_poll_thread_.message_loop()->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
base::Unretained(this), true));
return true;
}
bool V4L2SliceVideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
DVLOGF(3) << "Stopping device poll";
if (decoder_thread_.IsRunning())
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
// Signal the DevicePollTask() to stop, and stop the device poll thread.
if (!device_->SetDevicePollInterrupt()) {
PLOG(ERROR) << "SetDevicePollInterrupt(): failed";
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
device_poll_thread_.Stop();
DVLOGF(3) << "Device poll thread stopped";
// Clear the interrupt now, to be sure.
if (!device_->ClearDevicePollInterrupt()) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
if (!keep_input_state) {
if (input_streamon_) {
__u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
}
input_streamon_ = false;
}
if (output_streamon_) {
__u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
}
output_streamon_ = false;
if (!keep_input_state) {
for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
InputRecord& input_record = input_buffer_map_[i];
if (input_record.at_device) {
input_record.at_device = false;
ReuseInputBuffer(i);
input_buffer_queued_count_--;
}
}
DCHECK_EQ(input_buffer_queued_count_, 0);
}
// STREAMOFF makes the driver drop all buffers without decoding and DQBUFing,
// so we mark them all as at_device = false and clear surfaces_at_device_.
for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
OutputRecord& output_record = output_buffer_map_[i];
if (output_record.at_device) {
output_record.at_device = false;
output_buffer_queued_count_--;
}
}
surfaces_at_device_.clear();
DCHECK_EQ(output_buffer_queued_count_, 0);
// Drop all surfaces that were awaiting decode before being displayed,
// since we've just cancelled all outstanding decodes.
while (!decoder_display_queue_.empty())
decoder_display_queue_.pop();
DVLOGF(3) << "Device poll stopped";
return true;
}
void V4L2SliceVideoDecodeAccelerator::Decode(
const media::BitstreamBuffer& bitstream_buffer) {
DVLOGF(3) << "input_id=" << bitstream_buffer.id()
<< ", size=" << bitstream_buffer.size();
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask,
base::Unretained(this), bitstream_buffer));
}
void V4L2SliceVideoDecodeAccelerator::DecodeTask(
const media::BitstreamBuffer& bitstream_buffer) {
DVLOGF(3) << "input_id=" << bitstream_buffer.id()
<< " size=" << bitstream_buffer.size();
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
io_client_, io_message_loop_proxy_,
new base::SharedMemory(bitstream_buffer.handle(), true),
bitstream_buffer.size(), bitstream_buffer.id()));
if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
LOGF(ERROR) << "Could not map bitstream_buffer";
NOTIFY_ERROR(UNREADABLE_INPUT);
return;
}
DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory();
decoder_input_queue_.push(
linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
ScheduleDecodeBufferTaskIfNeeded();
}
bool V4L2SliceVideoDecodeAccelerator::TrySetNewBistreamBuffer() {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK(!decoder_current_bitstream_buffer_);
if (decoder_input_queue_.empty())
return false;
decoder_current_bitstream_buffer_.reset(
decoder_input_queue_.front().release());
decoder_input_queue_.pop();
if (decoder_current_bitstream_buffer_->input_id == kFlushBufferId) {
// This is a buffer we queued for ourselves to trigger flush at this time.
InitiateFlush();
return false;
}
const uint8_t* const data = reinterpret_cast<const uint8_t*>(
decoder_current_bitstream_buffer_->shm->memory());
const size_t data_size = decoder_current_bitstream_buffer_->size;
decoder_->SetStream(data, data_size);
return true;
}
void V4L2SliceVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (state_ == kDecoding) {
decoder_thread_proxy_->PostTask(
FROM_HERE,
base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeBufferTask,
base::Unretained(this)));
}
}
void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (state_ != kDecoding) {
DVLOGF(3) << "Early exit, not in kDecoding";
return;
}
while (true) {
AcceleratedVideoDecoder::DecodeResult res;
res = decoder_->Decode();
switch (res) {
case AcceleratedVideoDecoder::kAllocateNewSurfaces:
DVLOGF(2) << "Decoder requesting a new set of surfaces";
InitiateSurfaceSetChange();
return;
case AcceleratedVideoDecoder::kRanOutOfStreamData:
decoder_current_bitstream_buffer_.reset();
if (!TrySetNewBistreamBuffer())
return;
break;
case AcceleratedVideoDecoder::kRanOutOfSurfaces:
// No more surfaces for the decoder, we'll come back once we have more.
DVLOGF(4) << "Ran out of surfaces";
return;
case AcceleratedVideoDecoder::kDecodeError:
DVLOGF(1) << "Error decoding stream";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
}
}
void V4L2SliceVideoDecodeAccelerator::InitiateSurfaceSetChange() {
DVLOGF(2);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK_EQ(state_, kDecoding);
state_ = kIdle;
DCHECK(!surface_set_change_pending_);
surface_set_change_pending_ = true;
FinishSurfaceSetChangeIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChangeIfNeeded() {
DVLOGF(2);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (!surface_set_change_pending_ || !surfaces_at_device_.empty())
return;
DCHECK_EQ(state_, kIdle);
DCHECK(decoder_display_queue_.empty());
// All output buffers should've been returned from decoder and device by now.
// The only remaining owner of surfaces may be display (client), and we will
// dismiss them when destroying output buffers below.
DCHECK_EQ(free_output_buffers_.size() + surfaces_at_display_.size(),
output_buffer_map_.size());
// Keep input queue running while we switch outputs.
if (!StopDevicePoll(true)) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
// This will return only once all buffers are dismissed and destroyed.
// This does not wait until they are displayed however, as display retains
// references to the buffers bound to textures and will release them
// after displaying.
if (!DestroyOutputs(true)) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
if (!CreateOutputBuffers()) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
if (!StartDevicePoll()) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
DVLOGF(3) << "Surface set change finished";
surface_set_change_pending_ = false;
state_ = kDecoding;
ScheduleDecodeBufferTaskIfNeeded();
}
bool V4L2SliceVideoDecodeAccelerator::DestroyOutputs(bool dismiss) {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
std::vector<EGLImageKHR> egl_images_to_destroy;
std::vector<int32> picture_buffers_to_dismiss;
if (output_buffer_map_.empty())
return true;
for (auto output_record : output_buffer_map_) {
DCHECK(!output_record.at_device);
if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE)
DVLOGF(1) << "eglDestroySyncKHR failed.";
}
if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
child_message_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(base::IgnoreResult(&V4L2Device::DestroyEGLImage), device_,
egl_display_, output_record.egl_image));
}
picture_buffers_to_dismiss.push_back(output_record.picture_id);
}
if (dismiss) {
DVLOGF(2) << "Scheduling picture dismissal";
base::WaitableEvent done(false, false);
child_message_loop_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DismissPictures,
weak_this_, picture_buffers_to_dismiss, &done));
done.Wait();
}
// At this point client can't call ReusePictureBuffer on any of the pictures
// anymore, so it's safe to destroy.
return DestroyOutputBuffers();
}
bool V4L2SliceVideoDecodeAccelerator::DestroyOutputBuffers() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread() ||
!decoder_thread_.IsRunning());
DCHECK(!output_streamon_);
DCHECK(surfaces_at_device_.empty());
DCHECK(decoder_display_queue_.empty());
DCHECK_EQ(surfaces_at_display_.size() + free_output_buffers_.size(),
output_buffer_map_.size());
if (output_buffer_map_.empty())
return true;
// It's ok to do this, client will retain references to textures, but we are
// not interested in reusing the surfaces anymore.
// This will prevent us from reusing old surfaces in case we have some
// ReusePictureBuffer() pending on ChildThread already. It's ok to ignore
// them, because we have already dismissed them (in DestroyOutputs()).
for (const auto& surface_at_display : surfaces_at_display_) {
size_t index = surface_at_display.second->output_record();
DCHECK_LT(index, output_buffer_map_.size());
OutputRecord& output_record = output_buffer_map_[index];
DCHECK(output_record.at_client);
output_record.at_client = false;
}
surfaces_at_display_.clear();
DCHECK_EQ(free_output_buffers_.size(), output_buffer_map_.size());
free_output_buffers_.clear();
output_buffer_map_.clear();
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 0;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
return true;
}
void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers(
const std::vector<media::PictureBuffer>& buffers) {
DVLOGF(3);
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
if (buffers.size() != output_buffer_map_.size()) {
DLOG(ERROR) << "Failed to provide requested picture buffers. "
<< "(Got " << buffers.size()
<< ", requested " << output_buffer_map_.size() << ")";
NOTIFY_ERROR(INVALID_ARGUMENT);
return;
}
if (!make_context_current_.Run()) {
DLOG(ERROR) << "could not make context current";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
// It's safe to manipulate all the buffer state here, because the decoder
// thread is waiting on pictures_assigned_.
DCHECK(free_output_buffers_.empty());
for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
DCHECK(buffers[i].size() == coded_size_);
OutputRecord& output_record = output_buffer_map_[i];
DCHECK(!output_record.at_device);
DCHECK(!output_record.at_client);
DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
DCHECK_EQ(output_record.picture_id, -1);
DCHECK_EQ(output_record.cleared, false);
EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
egl_context_,
buffers[i].texture_id(),
coded_size_,
i,
output_format_fourcc_,
output_planes_count_);
if (egl_image == EGL_NO_IMAGE_KHR) {
LOGF(ERROR) << "Could not create EGLImageKHR";
// Ownership of EGLImages allocated in previous iterations of this loop
// has been transferred to output_buffer_map_. After we error-out here
// the destructor will handle their cleanup.
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
output_record.egl_image = egl_image;
output_record.picture_id = buffers[i].id();
free_output_buffers_.push_back(i);
DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
}
pictures_assigned_.Signal();
}
void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer(
int32 picture_buffer_id) {
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
if (!make_context_current_.Run()) {
LOGF(ERROR) << "could not make context current";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
EGLSyncKHR egl_sync =
eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
if (egl_sync == EGL_NO_SYNC_KHR) {
LOGF(ERROR) << "eglCreateSyncKHR() failed";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
scoped_ptr<EGLSyncKHRRef> egl_sync_ref(
new EGLSyncKHRRef(egl_display_, egl_sync));
decoder_thread_proxy_->PostTask(
FROM_HERE,
base::Bind(&V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask,
base::Unretained(this), picture_buffer_id,
base::Passed(&egl_sync_ref)));
}
void V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask(
int32 picture_buffer_id,
scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
V4L2DecodeSurfaceByPictureBufferId::iterator it =
surfaces_at_display_.find(picture_buffer_id);
if (it == surfaces_at_display_.end()) {
// It's possible that we've already posted a DismissPictureBuffer for this
// picture, but it has not yet executed when this ReusePictureBuffer was
// posted to us by the client. In that case just ignore this (we've already
// dismissed it and accounted for that) and let the sync object get
// destroyed.
DVLOGF(3) << "got picture id=" << picture_buffer_id
<< " not in use (anymore?).";
return;
}
OutputRecord& output_record = output_buffer_map_[it->second->output_record()];
if (output_record.at_device || !output_record.at_client) {
DVLOGF(1) << "picture_buffer_id not reusable";
NOTIFY_ERROR(INVALID_ARGUMENT);
return;
}
DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
DCHECK(!output_record.at_device);
output_record.at_client = false;
output_record.egl_sync = egl_sync_ref->egl_sync;
// Take ownership of the EGLSync.
egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
surfaces_at_display_.erase(it);
}
void V4L2SliceVideoDecodeAccelerator::Flush() {
DVLOGF(3);
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::FlushTask,
base::Unretained(this)));
}
void V4L2SliceVideoDecodeAccelerator::FlushTask() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (!decoder_input_queue_.empty()) {
// We are not done with pending inputs, so queue an empty buffer,
// which - when reached - will trigger flush sequence.
decoder_input_queue_.push(
linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
io_client_, io_message_loop_proxy_, nullptr, 0, kFlushBufferId)));
return;
}
// No more inputs pending, so just finish flushing here.
InitiateFlush();
}
void V4L2SliceVideoDecodeAccelerator::InitiateFlush() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK(!decoder_flushing_);
DCHECK_EQ(state_, kDecoding);
state_ = kIdle;
// This will trigger output for all remaining surfaces in the decoder.
// However, not all of them may be decoded yet (they would be queued
// in hardware then).
if (!decoder_->Flush()) {
DVLOGF(1) << "Failed flushing the decoder.";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
// Put the decoder in an idle state, ready to resume.
decoder_->Reset();
decoder_flushing_ = true;
decoder_thread_proxy_->PostTask(
FROM_HERE,
base::Bind(&V4L2SliceVideoDecodeAccelerator::FinishFlushIfNeeded,
base::Unretained(this)));
}
void V4L2SliceVideoDecodeAccelerator::FinishFlushIfNeeded() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (!decoder_flushing_ || !surfaces_at_device_.empty())
return;
DCHECK_EQ(state_, kIdle);
// At this point, all remaining surfaces are decoded and dequeued, and since
// we have already scheduled output for them in InitiateFlush(), their
// respective PictureReady calls have been posted (or they have been queued on
// pending_picture_ready_). So at this time, once we SendPictureReady(),
// we will have all remaining PictureReady() posted to the client and we
// can post NotifyFlushDone().
DCHECK(decoder_display_queue_.empty());
// Decoder should have already returned all surfaces and all surfaces are
// out of hardware. There can be no other owners of input buffers.
DCHECK_EQ(free_input_buffers_.size(), input_buffer_map_.size());
SendPictureReady();
child_message_loop_proxy_->PostTask(
FROM_HERE, base::Bind(&Client::NotifyFlushDone, client_));
decoder_flushing_ = false;
DVLOGF(3) << "Flush finished";
state_ = kDecoding;
ScheduleDecodeBufferTaskIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::Reset() {
DVLOGF(3);
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ResetTask,
base::Unretained(this)));
}
void V4L2SliceVideoDecodeAccelerator::ResetTask() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (decoder_resetting_) {
// This is a bug in the client, multiple Reset()s before NotifyResetDone()
// are not allowed.
NOTREACHED() << "Client should not be requesting multiple Reset()s";
return;
}
DCHECK_EQ(state_, kDecoding);
state_ = kIdle;
// Put the decoder in an idle state, ready to resume.
decoder_->Reset();
decoder_resetting_ = true;
// Drop all remaining inputs.
decoder_current_bitstream_buffer_.reset();
while (!decoder_input_queue_.empty())
decoder_input_queue_.pop();
FinishResetIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::FinishResetIfNeeded() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
if (!decoder_resetting_ || !surfaces_at_device_.empty())
return;
DCHECK_EQ(state_, kIdle);
DCHECK(!decoder_flushing_);
SendPictureReady();
// Drop any pending outputs.
while (!decoder_display_queue_.empty())
decoder_display_queue_.pop();
// At this point we can have no input buffers in the decoder, because we
// Reset()ed it in ResetTask(), and have not scheduled any new Decode()s
// having been in kIdle since. We don't have any surfaces in the HW either -
// we just checked that surfaces_at_device_.empty(), and inputs are tied
// to surfaces. Since there can be no other owners of input buffers, we can
// simply mark them all as available.
DCHECK_EQ(input_buffer_queued_count_, 0);
free_input_buffers_.clear();
for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
DCHECK(!input_buffer_map_[i].at_device);
ReuseInputBuffer(i);
}
decoder_resetting_ = false;
child_message_loop_proxy_->PostTask(
FROM_HERE, base::Bind(&Client::NotifyResetDone, client_));
DVLOGF(3) << "Reset finished";
state_ = kDecoding;
ScheduleDecodeBufferTaskIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::SetErrorState(Error error) {
// We can touch decoder_state_ only if this is the decoder thread or the
// decoder thread isn't running.
if (decoder_thread_.IsRunning() &&
!decoder_thread_proxy_->BelongsToCurrentThread()) {
decoder_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::SetErrorState,
base::Unretained(this), error));
return;
}
// Post NotifyError only if we are already initialized, as the API does
// not allow doing so before that.
if (state_ != kError && state_ != kUninitialized)
NotifyError(error);
state_ = kError;
}
V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::V4L2H264Accelerator(
V4L2SliceVideoDecodeAccelerator* v4l2_dec)
: num_slices_(0), v4l2_dec_(v4l2_dec) {
DCHECK(v4l2_dec_);
}
V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::~V4L2H264Accelerator() {
}
scoped_refptr<H264Picture>
V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::CreateH264Picture() {
scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
if (!dec_surface)
return nullptr;
return new V4L2H264Picture(dec_surface);
}
void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
uint8_t dst_list[kDPBIndicesListSize]) {
size_t i;
for (i = 0; i < src_pic_list.size() && i < kDPBIndicesListSize; ++i) {
const scoped_refptr<H264Picture>& pic = src_pic_list[i];
dst_list[i] = pic ? pic->dpb_position : VIDEO_MAX_FRAME;
}
while (i < kDPBIndicesListSize)
dst_list[i++] = VIDEO_MAX_FRAME;
}
void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::H264DPBToV4L2DPB(
const H264DPB& dpb,
std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces) {
memset(v4l2_decode_param_.dpb, 0, sizeof(v4l2_decode_param_.dpb));
size_t i = 0;
for (const auto& pic : dpb) {
if (i >= arraysize(v4l2_decode_param_.dpb)) {
DVLOG(1) << "Invalid DPB size";
break;
}
struct v4l2_h264_dpb_entry& entry = v4l2_decode_param_.dpb[i++];
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic);
entry.buf_index = dec_surface->output_record();
entry.frame_num = pic->frame_num;
entry.pic_num = pic->pic_num;
entry.top_field_order_cnt = pic->top_field_order_cnt;
entry.bottom_field_order_cnt = pic->bottom_field_order_cnt;
entry.flags = (pic->ref ? V4L2_H264_DPB_ENTRY_FLAG_ACTIVE : 0) |
(pic->long_term ? V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM : 0);
ref_surfaces->push_back(dec_surface);
}
}
bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
const media::H264SPS* sps,
const media::H264PPS* pps,
const H264DPB& dpb,
const H264Picture::Vector& ref_pic_listp0,
const H264Picture::Vector& ref_pic_listb0,
const H264Picture::Vector& ref_pic_listb1,
const scoped_refptr<H264Picture>& pic) {
struct v4l2_ext_control ctrl;
std::vector<struct v4l2_ext_control> ctrls;
struct v4l2_ctrl_h264_sps v4l2_sps;
memset(&v4l2_sps, 0, sizeof(v4l2_sps));
v4l2_sps.constraint_set_flags =
sps->constraint_set0_flag ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG : 0 |
sps->constraint_set1_flag ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG : 0 |
sps->constraint_set2_flag ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG : 0 |
sps->constraint_set3_flag ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG : 0 |
sps->constraint_set4_flag ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG : 0 |
sps->constraint_set5_flag ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG : 0;
#define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a
SPS_TO_V4L2SPS(profile_idc);
SPS_TO_V4L2SPS(level_idc);
SPS_TO_V4L2SPS(seq_parameter_set_id);
SPS_TO_V4L2SPS(chroma_format_idc);
SPS_TO_V4L2SPS(bit_depth_luma_minus8);
SPS_TO_V4L2SPS(bit_depth_chroma_minus8);
SPS_TO_V4L2SPS(log2_max_frame_num_minus4);
SPS_TO_V4L2SPS(pic_order_cnt_type);
SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4);
SPS_TO_V4L2SPS(offset_for_non_ref_pic);
SPS_TO_V4L2SPS(offset_for_top_to_bottom_field);
SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle);
static_assert(arraysize(v4l2_sps.offset_for_ref_frame) ==
arraysize(sps->offset_for_ref_frame),
"offset_for_ref_frame arrays must be same size");
for (size_t i = 0; i < arraysize(v4l2_sps.offset_for_ref_frame); ++i)
v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i];
SPS_TO_V4L2SPS(max_num_ref_frames);
SPS_TO_V4L2SPS(pic_width_in_mbs_minus1);
SPS_TO_V4L2SPS(pic_height_in_map_units_minus1);
#undef SPS_TO_V4L2SPS
#define SET_V4L2_SPS_FLAG_IF(cond, flag) \
v4l2_sps.flags |= ((sps->cond) ? (flag) : 0)
SET_V4L2_SPS_FLAG_IF(separate_colour_plane_flag,
V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE);
SET_V4L2_SPS_FLAG_IF(qpprime_y_zero_transform_bypass_flag,
V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS);
SET_V4L2_SPS_FLAG_IF(delta_pic_order_always_zero_flag,
V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO);
SET_V4L2_SPS_FLAG_IF(gaps_in_frame_num_value_allowed_flag,
V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED);
SET_V4L2_SPS_FLAG_IF(frame_mbs_only_flag, V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY);
SET_V4L2_SPS_FLAG_IF(mb_adaptive_frame_field_flag,
V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD);
SET_V4L2_SPS_FLAG_IF(direct_8x8_inference_flag,
V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE);
#undef SET_FLAG
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SPS;
ctrl.size = sizeof(v4l2_sps);
ctrl.p_h264_sps = &v4l2_sps;
ctrls.push_back(ctrl);
struct v4l2_ctrl_h264_pps v4l2_pps;
memset(&v4l2_pps, 0, sizeof(v4l2_pps));
#define PPS_TO_V4L2PPS(a) v4l2_pps.a = pps->a
PPS_TO_V4L2PPS(pic_parameter_set_id);
PPS_TO_V4L2PPS(seq_parameter_set_id);
PPS_TO_V4L2PPS(num_slice_groups_minus1);
PPS_TO_V4L2PPS(num_ref_idx_l0_default_active_minus1);
PPS_TO_V4L2PPS(num_ref_idx_l1_default_active_minus1);
PPS_TO_V4L2PPS(weighted_bipred_idc);
PPS_TO_V4L2PPS(pic_init_qp_minus26);
PPS_TO_V4L2PPS(pic_init_qs_minus26);
PPS_TO_V4L2PPS(chroma_qp_index_offset);
PPS_TO_V4L2PPS(second_chroma_qp_index_offset);
#undef PPS_TO_V4L2PPS
#define SET_V4L2_PPS_FLAG_IF(cond, flag) \
v4l2_pps.flags |= ((pps->cond) ? (flag) : 0)
SET_V4L2_PPS_FLAG_IF(entropy_coding_mode_flag,
V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE);
SET_V4L2_PPS_FLAG_IF(
bottom_field_pic_order_in_frame_present_flag,
V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT);
SET_V4L2_PPS_FLAG_IF(weighted_pred_flag, V4L2_H264_PPS_FLAG_WEIGHTED_PRED);
SET_V4L2_PPS_FLAG_IF(deblocking_filter_control_present_flag,
V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT);
SET_V4L2_PPS_FLAG_IF(constrained_intra_pred_flag,
V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED);
SET_V4L2_PPS_FLAG_IF(redundant_pic_cnt_present_flag,
V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT);
SET_V4L2_PPS_FLAG_IF(transform_8x8_mode_flag,
V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE);
SET_V4L2_PPS_FLAG_IF(pic_scaling_matrix_present_flag,
V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT);
#undef SET_V4L2_PPS_FLAG_IF
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS;
ctrl.size = sizeof(v4l2_pps);
ctrl.p_h264_pps = &v4l2_pps;
ctrls.push_back(ctrl);
struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix;
memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix));
static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <=
arraysize(pps->scaling_list4x4) &&
arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <=
arraysize(pps->scaling_list4x4[0]) &&
arraysize(v4l2_scaling_matrix.scaling_list_8x8) <=
arraysize(pps->scaling_list8x8) &&
arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <=
arraysize(pps->scaling_list8x8[0]),
"scaling_lists must be of correct size");
for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_4x4); ++i) {
for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_4x4[i]);
++j) {
v4l2_scaling_matrix.scaling_list_4x4[i][j] = pps->scaling_list4x4[i][j];
}
}
for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_8x8); ++i) {
for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_8x8[i]);
++j) {
v4l2_scaling_matrix.scaling_list_8x8[i][j] = pps->scaling_list8x8[i][j];
}
}
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX;
ctrl.size = sizeof(v4l2_scaling_matrix);
ctrl.p_h264_scal_mtrx = &v4l2_scaling_matrix;
ctrls.push_back(ctrl);
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic);
struct v4l2_ext_controls ext_ctrls;
memset(&ext_ctrls, 0, sizeof(ext_ctrls));
ext_ctrls.count = ctrls.size();
ext_ctrls.controls = &ctrls[0];
ext_ctrls.config_store = dec_surface->config_store();
v4l2_dec_->SubmitExtControls(&ext_ctrls);
H264PictureListToDPBIndicesList(ref_pic_listp0,
v4l2_decode_param_.ref_pic_list_p0);
H264PictureListToDPBIndicesList(ref_pic_listb0,
v4l2_decode_param_.ref_pic_list_b0);
H264PictureListToDPBIndicesList(ref_pic_listb1,
v4l2_decode_param_.ref_pic_list_b1);
std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
H264DPBToV4L2DPB(dpb, &ref_surfaces);
dec_surface->SetReferenceSurfaces(ref_surfaces);
return true;
}
bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
const media::H264PPS* pps,
const media::H264SliceHeader* slice_hdr,
const H264Picture::Vector& ref_pic_list0,
const H264Picture::Vector& ref_pic_list1,
const scoped_refptr<H264Picture>& pic,
const uint8_t* data,
size_t size) {
if (num_slices_ == kMaxSlices) {
LOGF(ERROR) << "Over limit of supported slices per frame";
return false;
}
struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
v4l2_slice_params_[num_slices_++];
memset(&v4l2_slice_param, 0, sizeof(v4l2_slice_param));
v4l2_slice_param.size = size;
#define SHDR_TO_V4L2SPARM(a) v4l2_slice_param.a = slice_hdr->a
SHDR_TO_V4L2SPARM(header_bit_size);
SHDR_TO_V4L2SPARM(first_mb_in_slice);
SHDR_TO_V4L2SPARM(slice_type);
SHDR_TO_V4L2SPARM(pic_parameter_set_id);
SHDR_TO_V4L2SPARM(colour_plane_id);
SHDR_TO_V4L2SPARM(frame_num);
SHDR_TO_V4L2SPARM(idr_pic_id);
SHDR_TO_V4L2SPARM(pic_order_cnt_lsb);
SHDR_TO_V4L2SPARM(delta_pic_order_cnt_bottom);
SHDR_TO_V4L2SPARM(delta_pic_order_cnt0);
SHDR_TO_V4L2SPARM(delta_pic_order_cnt1);
SHDR_TO_V4L2SPARM(redundant_pic_cnt);
SHDR_TO_V4L2SPARM(dec_ref_pic_marking_bit_size);
SHDR_TO_V4L2SPARM(cabac_init_idc);
SHDR_TO_V4L2SPARM(slice_qp_delta);
SHDR_TO_V4L2SPARM(slice_qs_delta);
SHDR_TO_V4L2SPARM(disable_deblocking_filter_idc);
SHDR_TO_V4L2SPARM(slice_alpha_c0_offset_div2);
SHDR_TO_V4L2SPARM(slice_beta_offset_div2);
SHDR_TO_V4L2SPARM(num_ref_idx_l0_active_minus1);
SHDR_TO_V4L2SPARM(num_ref_idx_l1_active_minus1);
SHDR_TO_V4L2SPARM(pic_order_cnt_bit_size);
#undef SHDR_TO_V4L2SPARM
#define SET_V4L2_SPARM_FLAG_IF(cond, flag) \
v4l2_slice_param.flags |= ((slice_hdr->cond) ? (flag) : 0)
SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_SLICE_FLAG_FIELD_PIC);
SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_SLICE_FLAG_BOTTOM_FIELD);
SET_V4L2_SPARM_FLAG_IF(direct_spatial_mv_pred_flag,
V4L2_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag, V4L2_SLICE_FLAG_SP_FOR_SWITCH);
#undef SET_V4L2_SPARM_FLAG_IF
struct v4l2_h264_pred_weight_table* pred_weight_table =
&v4l2_slice_param.pred_weight_table;
if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) &&
pps->weighted_pred_flag) ||
(slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) {
pred_weight_table->luma_log2_weight_denom =
slice_hdr->luma_log2_weight_denom;
pred_weight_table->chroma_log2_weight_denom =
slice_hdr->chroma_log2_weight_denom;
struct v4l2_h264_weight_factors* factorsl0 =
&pred_weight_table->weight_factors[0];
for (int i = 0; i < 32; ++i) {
factorsl0->luma_weight[i] =
slice_hdr->pred_weight_table_l0.luma_weight[i];
factorsl0->luma_offset[i] =
slice_hdr->pred_weight_table_l0.luma_offset[i];
for (int j = 0; j < 2; ++j) {
factorsl0->chroma_weight[i][j] =
slice_hdr->pred_weight_table_l0.chroma_weight[i][j];
factorsl0->chroma_offset[i][j] =
slice_hdr->pred_weight_table_l0.chroma_offset[i][j];
}
}
if (slice_hdr->IsBSlice()) {
struct v4l2_h264_weight_factors* factorsl1 =
&pred_weight_table->weight_factors[1];
for (int i = 0; i < 32; ++i) {
factorsl1->luma_weight[i] =
slice_hdr->pred_weight_table_l1.luma_weight[i];
factorsl1->luma_offset[i] =
slice_hdr->pred_weight_table_l1.luma_offset[i];
for (int j = 0; j < 2; ++j) {
factorsl1->chroma_weight[i][j] =
slice_hdr->pred_weight_table_l1.chroma_weight[i][j];
factorsl1->chroma_offset[i][j] =
slice_hdr->pred_weight_table_l1.chroma_offset[i][j];
}
}
}
}
H264PictureListToDPBIndicesList(ref_pic_list0,
v4l2_slice_param.ref_pic_list0);
H264PictureListToDPBIndicesList(ref_pic_list1,
v4l2_slice_param.ref_pic_list1);
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic);
v4l2_decode_param_.nal_ref_idc = slice_hdr->nal_ref_idc;
// TODO(posciak): Don't add start code back here, but have it passed from
// the parser.
size_t data_copy_size = size + 3;
scoped_ptr<uint8_t[]> data_copy(new uint8_t[data_copy_size]);
memset(data_copy.get(), 0, data_copy_size);
data_copy[2] = 0x01;
memcpy(data_copy.get() + 3, data, size);
return v4l2_dec_->SubmitSlice(dec_surface->input_record(), data_copy.get(),
data_copy_size);
}
bool V4L2SliceVideoDecodeAccelerator::SubmitSlice(int index,
const uint8_t* data,
size_t size) {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
InputRecord& input_record = input_buffer_map_[index];
if (input_record.bytes_used + size > input_record.length) {
DVLOGF(1) << "Input buffer too small";
return false;
}
memcpy(static_cast<uint8_t*>(input_record.address) + input_record.bytes_used,
data, size);
input_record.bytes_used += size;
return true;
}
bool V4L2SliceVideoDecodeAccelerator::SubmitExtControls(
struct v4l2_ext_controls* ext_ctrls) {
DCHECK_GT(ext_ctrls->config_store, 0u);
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, ext_ctrls);
return true;
}
bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
const scoped_refptr<H264Picture>& pic) {
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic);
v4l2_decode_param_.num_slices = num_slices_;
v4l2_decode_param_.idr_pic_flag = pic->idr;
v4l2_decode_param_.top_field_order_cnt = pic->top_field_order_cnt;
v4l2_decode_param_.bottom_field_order_cnt = pic->bottom_field_order_cnt;
struct v4l2_ext_control ctrl;
std::vector<struct v4l2_ext_control> ctrls;
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAM;
ctrl.size = sizeof(v4l2_slice_params_);
ctrl.p_h264_slice_param = v4l2_slice_params_;
ctrls.push_back(ctrl);
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAM;
ctrl.size = sizeof(v4l2_decode_param_);
ctrl.p_h264_decode_param = &v4l2_decode_param_;
ctrls.push_back(ctrl);
struct v4l2_ext_controls ext_ctrls;
memset(&ext_ctrls, 0, sizeof(ext_ctrls));
ext_ctrls.count = ctrls.size();
ext_ctrls.controls = &ctrls[0];
ext_ctrls.config_store = dec_surface->config_store();
v4l2_dec_->SubmitExtControls(&ext_ctrls);
Reset();
v4l2_dec_->DecodeSurface(dec_surface);
return true;
}
bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::OutputPicture(
const scoped_refptr<H264Picture>& pic) {
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic);
v4l2_dec_->SurfaceReady(dec_surface);
return true;
}
void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::Reset() {
num_slices_ = 0;
memset(&v4l2_decode_param_, 0, sizeof(v4l2_decode_param_));
memset(&v4l2_slice_params_, 0, sizeof(v4l2_slice_params_));
}
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
H264PictureToV4L2DecodeSurface(const scoped_refptr<H264Picture>& pic) {
V4L2H264Picture* v4l2_pic = pic->AsV4L2H264Picture();
CHECK(v4l2_pic);
return v4l2_pic->dec_surface();
}
V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::V4L2VP8Accelerator(
V4L2SliceVideoDecodeAccelerator* v4l2_dec)
: v4l2_dec_(v4l2_dec) {
DCHECK(v4l2_dec_);
}
V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::~V4L2VP8Accelerator() {
}
scoped_refptr<VP8Picture>
V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::CreateVP8Picture() {
scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
if (!dec_surface)
return nullptr;
return new V4L2VP8Picture(dec_surface);
}
#define ARRAY_MEMCPY_CHECKED(to, from) \
do { \
static_assert(sizeof(to) == sizeof(from), \
#from " and " #to " arrays must be of same size"); \
memcpy(to, from, sizeof(to)); \
} while (0)
static void FillV4L2SegmentationHeader(
const media::Vp8SegmentationHeader& vp8_sgmnt_hdr,
struct v4l2_vp8_sgmnt_hdr* v4l2_sgmnt_hdr) {
#define SET_V4L2_SGMNT_HDR_FLAG_IF(cond, flag) \
v4l2_sgmnt_hdr->flags |= ((vp8_sgmnt_hdr.cond) ? (flag) : 0)
SET_V4L2_SGMNT_HDR_FLAG_IF(segmentation_enabled,
V4L2_VP8_SEGMNT_HDR_FLAG_ENABLED);
SET_V4L2_SGMNT_HDR_FLAG_IF(update_mb_segmentation_map,
V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_MAP);
SET_V4L2_SGMNT_HDR_FLAG_IF(update_segment_feature_data,
V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_FEATURE_DATA);
#undef SET_V4L2_SPARM_FLAG_IF
v4l2_sgmnt_hdr->segment_feature_mode = vp8_sgmnt_hdr.segment_feature_mode;
ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->quant_update,
vp8_sgmnt_hdr.quantizer_update_value);
ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->lf_update,
vp8_sgmnt_hdr.lf_update_value);
ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->segment_probs,
vp8_sgmnt_hdr.segment_prob);
}
static void FillV4L2LoopfilterHeader(
const media::Vp8LoopFilterHeader& vp8_loopfilter_hdr,
struct v4l2_vp8_loopfilter_hdr* v4l2_lf_hdr) {
#define SET_V4L2_LF_HDR_FLAG_IF(cond, flag) \
v4l2_lf_hdr->flags |= ((vp8_loopfilter_hdr.cond) ? (flag) : 0)
SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable, V4L2_VP8_LF_HDR_ADJ_ENABLE);
SET_V4L2_LF_HDR_FLAG_IF(mode_ref_lf_delta_update,
V4L2_VP8_LF_HDR_DELTA_UPDATE);
#undef SET_V4L2_SGMNT_HDR_FLAG_IF
#define LF_HDR_TO_V4L2_LF_HDR(a) v4l2_lf_hdr->a = vp8_loopfilter_hdr.a;
LF_HDR_TO_V4L2_LF_HDR(type);
LF_HDR_TO_V4L2_LF_HDR(level);
LF_HDR_TO_V4L2_LF_HDR(sharpness_level);
#undef LF_HDR_TO_V4L2_LF_HDR
ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->ref_frm_delta_magnitude,
vp8_loopfilter_hdr.ref_frame_delta);
ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->mb_mode_delta_magnitude,
vp8_loopfilter_hdr.mb_mode_delta);
}
static void FillV4L2QuantizationHeader(
const media::Vp8QuantizationHeader& vp8_quant_hdr,
struct v4l2_vp8_quantization_hdr* v4l2_quant_hdr) {
v4l2_quant_hdr->y_ac_qi = vp8_quant_hdr.y_ac_qi;
v4l2_quant_hdr->y_dc_delta = vp8_quant_hdr.y_dc_delta;
v4l2_quant_hdr->y2_dc_delta = vp8_quant_hdr.y2_dc_delta;
v4l2_quant_hdr->y2_ac_delta = vp8_quant_hdr.y2_ac_delta;
v4l2_quant_hdr->uv_dc_delta = vp8_quant_hdr.uv_dc_delta;
v4l2_quant_hdr->uv_ac_delta = vp8_quant_hdr.uv_ac_delta;
}
static void FillV4L2EntropyHeader(
const media::Vp8EntropyHeader& vp8_entropy_hdr,
struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) {
ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->coeff_probs,
vp8_entropy_hdr.coeff_probs);
ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->y_mode_probs,
vp8_entropy_hdr.y_mode_probs);
ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->uv_mode_probs,
vp8_entropy_hdr.uv_mode_probs);
ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->mv_probs,
vp8_entropy_hdr.mv_probs);
}
bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
const scoped_refptr<VP8Picture>& pic,
const media::Vp8FrameHeader* frame_hdr,
const scoped_refptr<VP8Picture>& last_frame,
const scoped_refptr<VP8Picture>& golden_frame,
const scoped_refptr<VP8Picture>& alt_frame) {
struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
#define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
FHDR_TO_V4L2_FHDR(key_frame);
FHDR_TO_V4L2_FHDR(version);
FHDR_TO_V4L2_FHDR(width);
FHDR_TO_V4L2_FHDR(horizontal_scale);
FHDR_TO_V4L2_FHDR(height);
FHDR_TO_V4L2_FHDR(vertical_scale);
FHDR_TO_V4L2_FHDR(sign_bias_golden);
FHDR_TO_V4L2_FHDR(sign_bias_alternate);
FHDR_TO_V4L2_FHDR(prob_skip_false);
FHDR_TO_V4L2_FHDR(prob_intra);
FHDR_TO_V4L2_FHDR(prob_last);
FHDR_TO_V4L2_FHDR(prob_gf);
FHDR_TO_V4L2_FHDR(bool_dec_range);
FHDR_TO_V4L2_FHDR(bool_dec_value);
FHDR_TO_V4L2_FHDR(bool_dec_count);
#undef FHDR_TO_V4L2_FHDR
#define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
SET_V4L2_FRM_HDR_FLAG_IF(is_experimental,
V4L2_VP8_FRAME_HDR_FLAG_EXPERIMENTAL);
SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HDR_FLAG_SHOW_FRAME);
SET_V4L2_FRM_HDR_FLAG_IF(mb_no_skip_coeff,
V4L2_VP8_FRAME_HDR_FLAG_MB_NO_SKIP_COEFF);
#undef SET_V4L2_FRM_HDR_FLAG_IF
FillV4L2SegmentationHeader(frame_hdr->segmentation_hdr,
&v4l2_frame_hdr.sgmnt_hdr);
FillV4L2LoopfilterHeader(frame_hdr->loopfilter_hdr, &v4l2_frame_hdr.lf_hdr);
FillV4L2QuantizationHeader(frame_hdr->quantization_hdr,
&v4l2_frame_hdr.quant_hdr);
FillV4L2EntropyHeader(frame_hdr->entropy_hdr, &v4l2_frame_hdr.entropy_hdr);
v4l2_frame_hdr.first_part_size =
base::checked_cast<__u32>(frame_hdr->first_part_size);
v4l2_frame_hdr.first_part_offset =
base::checked_cast<__u32>(frame_hdr->first_part_offset);
v4l2_frame_hdr.macroblock_bit_offset =
base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset);
v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions;
static_assert(arraysize(v4l2_frame_hdr.dct_part_sizes) ==
arraysize(frame_hdr->dct_partition_sizes),
"DCT partition size arrays must have equal number of elements");
for (size_t i = 0; i < frame_hdr->num_of_dct_partitions &&
i < arraysize(v4l2_frame_hdr.dct_part_sizes); ++i)
v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i];
scoped_refptr<V4L2DecodeSurface> dec_surface =
VP8PictureToV4L2DecodeSurface(pic);
std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
if (last_frame) {
scoped_refptr<V4L2DecodeSurface> last_frame_surface =
VP8PictureToV4L2DecodeSurface(last_frame);
v4l2_frame_hdr.last_frame = last_frame_surface->output_record();
ref_surfaces.push_back(last_frame_surface);
} else {
v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
}
if (golden_frame) {
scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
VP8PictureToV4L2DecodeSurface(golden_frame);
v4l2_frame_hdr.golden_frame = golden_frame_surface->output_record();
ref_surfaces.push_back(golden_frame_surface);
} else {
v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
}
if (alt_frame) {
scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
VP8PictureToV4L2DecodeSurface(alt_frame);
v4l2_frame_hdr.alt_frame = alt_frame_surface->output_record();
ref_surfaces.push_back(alt_frame_surface);
} else {
v4l2_frame_hdr.alt_frame = VIDEO_MAX_FRAME;
}
struct v4l2_ext_control ctrl;
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HDR;
ctrl.size = sizeof(v4l2_frame_hdr);
ctrl.p_vp8_frame_hdr = &v4l2_frame_hdr;
struct v4l2_ext_controls ext_ctrls;
memset(&ext_ctrls, 0, sizeof(ext_ctrls));
ext_ctrls.count = 1;
ext_ctrls.controls = &ctrl;
ext_ctrls.config_store = dec_surface->config_store();
if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
return false;
dec_surface->SetReferenceSurfaces(ref_surfaces);
if (!v4l2_dec_->SubmitSlice(dec_surface->input_record(), frame_hdr->data,
frame_hdr->frame_size))
return false;
v4l2_dec_->DecodeSurface(dec_surface);
return true;
}
bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::OutputPicture(
const scoped_refptr<VP8Picture>& pic) {
scoped_refptr<V4L2DecodeSurface> dec_surface =
VP8PictureToV4L2DecodeSurface(pic);
v4l2_dec_->SurfaceReady(dec_surface);
return true;
}
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::
VP8PictureToV4L2DecodeSurface(const scoped_refptr<VP8Picture>& pic) {
V4L2VP8Picture* v4l2_pic = pic->AsV4L2VP8Picture();
CHECK(v4l2_pic);
return v4l2_pic->dec_surface();
}
void V4L2SliceVideoDecodeAccelerator::DecodeSurface(
const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DVLOGF(3) << "Submitting decode for surface: " << dec_surface->ToString();
Enqueue(dec_surface);
}
void V4L2SliceVideoDecodeAccelerator::SurfaceReady(
const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
decoder_display_queue_.push(dec_surface);
TryOutputSurfaces();
}
void V4L2SliceVideoDecodeAccelerator::TryOutputSurfaces() {
while (!decoder_display_queue_.empty()) {
scoped_refptr<V4L2DecodeSurface> dec_surface =
decoder_display_queue_.front();
if (!dec_surface->decoded())
break;
decoder_display_queue_.pop();
OutputSurface(dec_surface);
}
}
void V4L2SliceVideoDecodeAccelerator::OutputSurface(
const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
OutputRecord& output_record =
output_buffer_map_[dec_surface->output_record()];
bool inserted =
surfaces_at_display_.insert(std::make_pair(output_record.picture_id,
dec_surface)).second;
DCHECK(inserted);
DCHECK(!output_record.at_client);
DCHECK(!output_record.at_device);
DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
DCHECK_NE(output_record.picture_id, -1);
output_record.at_client = true;
media::Picture picture(output_record.picture_id, dec_surface->bitstream_id(),
gfx::Rect(visible_size_), false);
DVLOGF(3) << dec_surface->ToString()
<< ", bitstream_id: " << picture.bitstream_buffer_id()
<< ", picture_id: " << picture.picture_buffer_id();
pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
SendPictureReady();
output_record.cleared = true;
}
scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
V4L2SliceVideoDecodeAccelerator::CreateSurface() {
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK_EQ(state_, kDecoding);
if (free_input_buffers_.empty() || free_output_buffers_.empty())
return nullptr;
int input = free_input_buffers_.front();
free_input_buffers_.pop_front();
int output = free_output_buffers_.front();
free_output_buffers_.pop_front();
InputRecord& input_record = input_buffer_map_[input];
DCHECK_EQ(input_record.bytes_used, 0u);
DCHECK_EQ(input_record.input_id, -1);
DCHECK(decoder_current_bitstream_buffer_ != nullptr);
input_record.input_id = decoder_current_bitstream_buffer_->input_id;
scoped_refptr<V4L2DecodeSurface> dec_surface = new V4L2DecodeSurface(
decoder_current_bitstream_buffer_->input_id, input, output,
base::Bind(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
base::Unretained(this)));
DVLOGF(4) << "Created surface " << input << " -> " << output;
return dec_surface;
}
void V4L2SliceVideoDecodeAccelerator::SendPictureReady() {
DVLOGF(3);
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_);
while (!pending_picture_ready_.empty()) {
bool cleared = pending_picture_ready_.front().cleared;
const media::Picture& picture = pending_picture_ready_.front().picture;
if (cleared && picture_clearing_count_ == 0) {
DVLOGF(4) << "Posting picture ready to IO for: "
<< picture.picture_buffer_id();
// This picture is cleared. Post it to IO thread to reduce latency. This
// should be the case after all pictures are cleared at the beginning.
io_message_loop_proxy_->PostTask(
FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
pending_picture_ready_.pop();
} else if (!cleared || resetting_or_flushing) {
DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared
<< ", decoder_resetting_=" << decoder_resetting_
<< ", decoder_flushing_=" << decoder_flushing_
<< ", picture_clearing_count_=" << picture_clearing_count_;
DVLOGF(4) << "Posting picture ready to GPU for: "
<< picture.picture_buffer_id();
// If the picture is not cleared, post it to the child thread because it
// has to be cleared in the child thread. A picture only needs to be
// cleared once. If the decoder is resetting or flushing, send all
// pictures to ensure PictureReady arrive before reset or flush done.
child_message_loop_proxy_->PostTaskAndReply(
FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
// Unretained is safe. If Client::PictureReady gets to run, |this| is
// alive. Destroy() will wait the decode thread to finish.
base::Bind(&V4L2SliceVideoDecodeAccelerator::PictureCleared,
base::Unretained(this)));
picture_clearing_count_++;
pending_picture_ready_.pop();
} else {
// This picture is cleared. But some pictures are about to be cleared on
// the child thread. To preserve the order, do not send this until those
// pictures are cleared.
break;
}
}
}
void V4L2SliceVideoDecodeAccelerator::PictureCleared() {
DVLOGF(3) << "clearing count=" << picture_clearing_count_;
DCHECK(decoder_thread_proxy_->BelongsToCurrentThread());
DCHECK_GT(picture_clearing_count_, 0);
picture_clearing_count_--;
SendPictureReady();
}
bool V4L2SliceVideoDecodeAccelerator::CanDecodeOnIOThread() {
return true;
}
} // namespace content
|
fujunwei/chromium-crosswalk
|
content/common/gpu/media/v4l2_slice_video_decode_accelerator.cc
|
C++
|
bsd-3-clause
| 88,604
|
#main{
padding-left:30px;
}
|
yhong/nf_hack
|
resources/css/default/main.css
|
CSS
|
bsd-3-clause
| 34
|
package org.consec.oauth2.authzserver.common;
public class Constants {
public static final String SESSION_PARAM_OWNER_UUID = "owner_uuid";
public static final String SESSION_PARAM_AUTHENTICATED = "authenticated";
}
|
consec/ConSec
|
oauth2/oauth2-authz-server/src/main/java/org/consec/oauth2/authzserver/common/Constants.java
|
Java
|
bsd-3-clause
| 224
|
# Copyright (c) 2020, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import itertools
import numpy as np
import pytest
from coremltools.converters.mil.mil import Builder as mb
from coremltools.converters.mil.testing_utils import (
assert_model_is_valid,
get_op_types_in_program,
apply_pass_and_basic_check,
)
@pytest.mark.parametrize("op_type, pos, val", itertools.product(['add', 'mul', 'floor_div', 'pow', 'real_div', 'sub'], ['x', 'y'], [0, 1, [0, 0, 0, 0], [1, 1, 1, 1]]))
def test_elementwise_elimination(op_type, pos, val):
if 'div' in op_type and np.prod(val) == 0:
return
if 'pow' in op_type and (val != 0 or val != 1):
return
test_op = getattr(mb, op_type)
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
if pos == "x":
r1 = test_op(x=val, y=x)
else:
r1 = test_op(x=x, y=val)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
original_program = [op_type, "relu"]
new_program = original_program
if op_type in {'add'}:
if val == 0 or val == [0, 0, 0, 0]:
new_program = ["relu"]
elif op_type in {'mul'}:
if val == 1 or val == [1, 1, 1, 1]:
new_program = ["relu"]
elif op_type in {'real_div'}:
# TODO(rdar://79925291): Remove this branch and add `real_div` to the
# following elif once fp32 casts for `real_div` are no longer required.
original_program = ["cast"] + original_program
new_program = original_program
if pos == 'y' and (val == 1 or val == [1, 1, 1, 1]):
new_program = ["cast", "relu"]
elif op_type in {'pow', 'floor_div'}:
if pos == 'y' and (val == 1 or val == [1, 1, 1, 1]):
new_program = ["relu"]
elif op_type in {'sub'}:
if pos == 'y' and (val == 0 or val == [0, 0, 0, 0]):
new_program = ["relu"]
assert get_op_types_in_program(prev_prog) == original_program
assert get_op_types_in_program(prog) == new_program
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_elementwise_broadcast():
@mb.program(input_specs=[mb.TensorSpec(shape=[4])])
def prog(x):
r1 = mb.add(x=x, y=[[0, 0, 0, 0], [0, 0, 0, 0]])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
original_program = ["add", "relu"]
assert get_op_types_in_program(prev_prog) == original_program
assert get_op_types_in_program(prog) == original_program
assert_model_is_valid(
prog,
{"x": [4]},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_reshape_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.reshape(x=x, shape=[1, 8])
r2 = mb.reshape(x=r1, shape=[1, 8])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["reshape", "reshape", "relu"]
assert get_op_types_in_program(prog) == ["reshape", "relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (1, 8)},
)
def test_oneway_split_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.split(x=x, num_splits=1, axis=-1)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["split", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_full_split_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.split(x=x, split_sizes=[4], axis=-1)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["split", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_slicebysize_full_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.slice_by_size(x=x, begin=[0, 0], size=[2, 4])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["slice_by_size", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_slicebysize_to_end_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.slice_by_size(x=x, begin=[0, 0], size=[-1, -1])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["slice_by_size", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_slicebyindex_full_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.slice_by_index(x=x, begin=[0, 0], end=[2, 4])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["slice_by_index", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
@pytest.mark.parametrize("begin_mask, end_mask",
itertools.product(itertools.product([True, False],[True, False]),
itertools.product([True, False],[True, False])))
def test_slicebyindex_mask_elimination(begin_mask, end_mask):
@mb.program(input_specs=[mb.TensorSpec(shape=(4, 4))])
def prog(x):
begin = [1, 1]
end = [1, 1]
for i in range(2):
if not begin_mask[i]:
begin[i] = 0
if not end_mask[i]:
end[i] = 4
r1 = mb.slice_by_index(x=x, begin=begin, end=end, begin_mask=begin_mask, end_mask=end_mask)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["slice_by_index", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (4, 4)},
expected_output_shapes={block.outputs[0].name: (4, 4)},
)
def test_pad_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.pad(x=x, pad=[0, 0, 0, 0])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["pad", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_keep_pad():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.pad(x=x, pad=[4, 4, 2, 2])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["pad", "relu"]
assert get_op_types_in_program(prog) == ["pad", "relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (10, 8)},
)
def test_tile_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.tile(x=x, reps=[1, 1])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["tile", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_keep_tile():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.tile(x=x, reps=[2, 2])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["tile", "relu"]
assert get_op_types_in_program(prog) == ["tile", "relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (4, 8)},
)
def test_upsample_nearest_neighbor_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(3, 2, 4))])
def prog(x):
r1 = mb.upsample_nearest_neighbor(x=x)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["upsample_nearest_neighbor", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (3, 2, 4)},
expected_output_shapes={block.outputs[0].name: (3, 2, 4)},
)
def test_upsample_bilinear_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(3, 2, 4))])
def prog(x):
r1 = mb.upsample_bilinear(x=x)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["upsample_bilinear", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (3, 2, 4)},
expected_output_shapes={block.outputs[0].name: (3, 2, 4)},
)
def test_resize_bilinear_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(3, 2, 4))])
def prog(x):
r1 = mb.resize_bilinear(x=x, target_size_height=2, target_size_width=4)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["resize_bilinear", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (3, 2, 4)},
expected_output_shapes={block.outputs[0].name: (3, 2, 4)},
)
def test_crop_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(3, 2, 4))])
def prog(x):
r1 = mb.crop(x=x, crop_height=[0, 0], crop_width=[0, 0])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["crop", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (3, 2, 4)},
expected_output_shapes={block.outputs[0].name: (3, 2, 4)},
)
def test_linear_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 4))])
def prog(x):
r1 = mb.linear_activation(x=x, alpha=1.0, beta=0.0)
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["linear_activation", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 4)},
expected_output_shapes={block.outputs[0].name: (2, 4)},
)
def test_transpose_elimination():
@mb.program(input_specs=[mb.TensorSpec(shape=(2, 3, 4))])
def prog(x):
r1 = mb.transpose(x=x, perm=[0, 1, 2])
return mb.relu(x=r1)
prev_prog, prev_block, block = apply_pass_and_basic_check(
prog, "common::noop_elimination"
)
assert get_op_types_in_program(prev_prog) == ["transpose", "relu"]
assert get_op_types_in_program(prog) == ["relu"]
assert_model_is_valid(
prog,
{"x": (2, 3, 4)},
expected_output_shapes={block.outputs[0].name: (2, 3, 4)},
)
|
apple/coremltools
|
coremltools/converters/mil/mil/passes/test_noop_elimination.py
|
Python
|
bsd-3-clause
| 13,225
|
---
layout: organization
category: local
title: Casabe House
impact_area: The Elderly
keywords:
location_services:
location_offices:
website: www.nycservice.org/organizations/893
description:
mission: |
A senior citizen’s residence at 143 East 120th Street in East Harlem
cash_grants:
grants:
service_opp:
services:
learn:
cont_relationship:
salutation:
first_name:
last_name:
title_contact_person:
city: New York
state: NY
address: |
150 East 121st Street
New York NY 10035
lat: 40.801479
lng: -73.938485
phone: 212-410-6030
ext:
fax: 212-996-8134
email:
preferred_contact:
contact_person_intro:
---
|
flipside-org/penny-harvest
|
_posts/organizations/2015-01-12-O228.md
|
Markdown
|
bsd-3-clause
| 637
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE190_Integer_Overflow__int_fgets_square_05.c
Label Definition File: CWE190_Integer_Overflow__int.label.xml
Template File: sources-sinks-05.tmpl.c
*/
/*
* @description
* CWE: 190 Integer Overflow
* BadSource: fgets Read data from the console using fgets()
* GoodSource: Set data to a small, non-zero number (two)
* Sinks: square
* GoodSink: Ensure there will not be an overflow before squaring data
* BadSink : Square data, which can lead to overflow
* Flow Variant: 05 Control flow: if(staticTrue) and if(staticFalse)
*
* */
#include "std_testcase.h"
#define CHAR_ARRAY_SIZE (3 * sizeof(data) + 2)
#include <math.h>
/* The two variables below are not defined as "const", but are never
assigned any other value, so a tool should be able to identify that
reads of these will always return their initialized values. */
static int staticTrue = 1; /* true */
static int staticFalse = 0; /* false */
#ifndef OMITBAD
void CWE190_Integer_Overflow__int_fgets_square_05_bad()
{
int data;
/* Initialize data */
data = 0;
if(staticTrue)
{
{
char inputBuffer[CHAR_ARRAY_SIZE] = "";
/* POTENTIAL FLAW: Read data from the console using fgets() */
if (fgets(inputBuffer, CHAR_ARRAY_SIZE, stdin) != NULL)
{
/* Convert to int */
data = atoi(inputBuffer);
}
else
{
printLine("fgets() failed.");
}
}
}
if(staticTrue)
{
{
/* POTENTIAL FLAW: if (data*data) > INT_MAX, this will overflow */
int result = data * data;
printIntLine(result);
}
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodB2G1() - use badsource and goodsink by changing the second staticTrue to staticFalse */
static void goodB2G1()
{
int data;
/* Initialize data */
data = 0;
if(staticTrue)
{
{
char inputBuffer[CHAR_ARRAY_SIZE] = "";
/* POTENTIAL FLAW: Read data from the console using fgets() */
if (fgets(inputBuffer, CHAR_ARRAY_SIZE, stdin) != NULL)
{
/* Convert to int */
data = atoi(inputBuffer);
}
else
{
printLine("fgets() failed.");
}
}
}
if(staticFalse)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
printLine("Benign, fixed string");
}
else
{
/* FIX: Add a check to prevent an overflow from occurring */
if (data > INT_MIN && abs(data) < (long)sqrt((double)INT_MAX))
{
int result = data * data;
printIntLine(result);
}
else
{
printLine("data value is too large to perform arithmetic safely.");
}
}
}
/* goodB2G2() - use badsource and goodsink by reversing the blocks in the second if */
static void goodB2G2()
{
int data;
/* Initialize data */
data = 0;
if(staticTrue)
{
{
char inputBuffer[CHAR_ARRAY_SIZE] = "";
/* POTENTIAL FLAW: Read data from the console using fgets() */
if (fgets(inputBuffer, CHAR_ARRAY_SIZE, stdin) != NULL)
{
/* Convert to int */
data = atoi(inputBuffer);
}
else
{
printLine("fgets() failed.");
}
}
}
if(staticTrue)
{
/* FIX: Add a check to prevent an overflow from occurring */
if (data > INT_MIN && abs(data) < (long)sqrt((double)INT_MAX))
{
int result = data * data;
printIntLine(result);
}
else
{
printLine("data value is too large to perform arithmetic safely.");
}
}
}
/* goodG2B1() - use goodsource and badsink by changing the first staticTrue to staticFalse */
static void goodG2B1()
{
int data;
/* Initialize data */
data = 0;
if(staticFalse)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
printLine("Benign, fixed string");
}
else
{
/* FIX: Use a small, non-zero value that will not cause an integer overflow in the sinks */
data = 2;
}
if(staticTrue)
{
{
/* POTENTIAL FLAW: if (data*data) > INT_MAX, this will overflow */
int result = data * data;
printIntLine(result);
}
}
}
/* goodG2B2() - use goodsource and badsink by reversing the blocks in the first if */
static void goodG2B2()
{
int data;
/* Initialize data */
data = 0;
if(staticTrue)
{
/* FIX: Use a small, non-zero value that will not cause an integer overflow in the sinks */
data = 2;
}
if(staticTrue)
{
{
/* POTENTIAL FLAW: if (data*data) > INT_MAX, this will overflow */
int result = data * data;
printIntLine(result);
}
}
}
void CWE190_Integer_Overflow__int_fgets_square_05_good()
{
goodB2G1();
goodB2G2();
goodG2B1();
goodG2B2();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE190_Integer_Overflow__int_fgets_square_05_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE190_Integer_Overflow__int_fgets_square_05_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
JianpingZeng/xcc
|
xcc/test/juliet/testcases/CWE190_Integer_Overflow/s03/CWE190_Integer_Overflow__int_fgets_square_05.c
|
C
|
bsd-3-clause
| 6,233
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.10"/>
<title>HE_Mesh: Member List</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtreedata.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">HE_Mesh
 <span id="projectnumber">6.0.1</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.10 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="namespaces.html"><span>Packages</span></a></li>
<li class="current"><a href="annotated.html"><span>Classes</span></a></li>
<li><a href="files.html"><span>Files</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.html"><span>Class List</span></a></li>
<li><a href="classes.html"><span>Class Index</span></a></li>
<li><a href="hierarchy.html"><span>Class Hierarchy</span></a></li>
<li><a href="functions.html"><span>Class Members</span></a></li>
</ul>
</div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html','');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="headertitle">
<div class="title">wblut.hemesh.HET_MeshOp.HET_IntersectionResult Member List</div> </div>
</div><!--header-->
<div class="contents">
<p>This is the complete list of members for <a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html">wblut.hemesh.HET_MeshOp.HET_IntersectionResult</a>, including all inherited members.</p>
<table class="directory">
<tr class="even"><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html#a3ad241c7c8a175873c8ff429be676da9">getFace1</a>()</td><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html">wblut.hemesh.HET_MeshOp.HET_IntersectionResult</a></td><td class="entry"></td></tr>
<tr><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html#a9aa5bc0670b19e16f75f3acbb1041388">getFace2</a>()</td><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html">wblut.hemesh.HET_MeshOp.HET_IntersectionResult</a></td><td class="entry"></td></tr>
<tr class="even"><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html#a8c45aeee2203feb1c905a2570f8eece7">getSegment</a>()</td><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html">wblut.hemesh.HET_MeshOp.HET_IntersectionResult</a></td><td class="entry"></td></tr>
<tr><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html#a477ff2abccd45335744be36f240d5dba">HET_IntersectionResult</a>(final HE_Face f1, final HE_Face f2, final WB_Segment seg)</td><td class="entry"><a class="el" href="classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result.html">wblut.hemesh.HET_MeshOp.HET_IntersectionResult</a></td><td class="entry"></td></tr>
</table></div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="footer">Generated on Tue Dec 19 2017 21:20:58 for HE_Mesh by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.10 </li>
</ul>
</div>
</body>
</html>
|
DweebsUnited/CodeMonkey
|
resources/hemesh/ref/html/classwblut_1_1hemesh_1_1_h_e_t___mesh_op_1_1_h_e_t___intersection_result-members.html
|
HTML
|
bsd-3-clause
| 6,761
|
* Hackage: <http://hackage.haskell.org/package/linearEqSolver>
* GitHub: <http://github.com/LeventErkok/linearEqSolver>
* Latest Hackage released version: 2.1
### Version 2.1, 2019-5-13
* Update to compile with more recent versions of SBV. Thanks to
Colin McKibben for reporting the breakage.
### Version 2.0, 2017-10-25
* Use defaultSMTConfig exported from SBV
* All-solution variants now take the max-number of solutions requested,
following the corresponding changes in SBV itself. Thanks to Mitchell Rosen
for reporting.
### Version 1.3, 2014-08-27
* Use the Solver type from SBV directly for picking the solver, avoiding bit-rot.
* Adjust SBV dependency to >= 3.1, to get proper access to Solver type
### Version 1.2, 2013-01-02
* Allow both CVC4 and Z3 to be used as the SMT solver.
* Adjust SBV dependency to >= 2.9, to get access to CVC4.
### Version 1.1, 2012-10-22
* Add solvers over rationals, in addition to just integers.
* Adjust SBV dependency to >= 2.7 as we depend on the new Real
instance for the AlgReal type.
### Version 1.0, 2012-10-18
* Initial release, contains solver for integer linear equations.
|
LeventErkok/linearEqSolver
|
CHANGES.md
|
Markdown
|
bsd-3-clause
| 1,171
|
import logging
import os
# URL to clone product_details JSON files from.
# Include trailing slash.
PROD_DETAILS_URL = 'http://svn.mozilla.org/libs/product-details/json/'
# Target dir to drop JSON files into (must be writable)
PROD_DETAILS_DIR = os.path.join(os.path.dirname(__file__), 'json')
# log level.
LOG_LEVEL = logging.INFO
|
pmclanahan/django-mozilla-product-details
|
product_details/settings_defaults.py
|
Python
|
bsd-3-clause
| 335
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.plasmarobotics.jim.sensors;
import edu.wpi.first.wpilibj.AnalogChannel;
/**
*This class reads input from a supersonic rangefinder as an analog input
* @author jim
*
*/
public class SonicRange {
private AnalogChannel sensor;
private double rangeConstant;
/**
* Creates an instance of SonicRange at the specified channel with
* default rangeConstant of .0098
* @param channel PWM channel of the sensor
*/
public SonicRange(int channel){
sensor = new AnalogChannel(channel);
rangeConstant = .0098;
}
/**
* Creates an instance of SonicRange at the specified channel along
* with constant for range
* @param channel PWM channel of the sensor
* @param rangeConstant Constant used with voltage to calculate distance
*/
public SonicRange(int channel, double rangeConstant){
sensor = new AnalogChannel(channel);
this.rangeConstant = rangeConstant;
}
/**
* Sets the rangeConstant of the sensor
* @param rangeConstant Constant used with voltage to calculate distance
*/
public void setRangeConstant(float rangeConstant){
this.rangeConstant = rangeConstant;
}
/**
* gets the current voltage of the sensor
* @return the voltage of the sensor
*/
public double getVoltage(){
return sensor.getVoltage();
}
/**
* Determines how far away the sensor is from the target
* @return Distance (inches)
*/
public double getDistance(){
return sensor.getVoltage()/rangeConstant;
}
}
|
dumbnut7410/Frobo
|
src/org/plasmarobotics/jim/sensors/SonicRange.java
|
Java
|
bsd-3-clause
| 1,803
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.cronet_sample_apk;
import android.util.Log;
/**
* Cronet Library Loader.
*/
public class LibraryLoader {
private static final String TAG = "LibraryLoader";
private static Boolean sInitialized = false;
public static void ensureInitialized() throws UnsatisfiedLinkError {
if (sInitialized)
return;
sInitialized = true;
System.loadLibrary("cronet");
Log.i(TAG, "libcronet initialization success.");
}
}
|
patrickm/chromium.src
|
net/cronet/android/sample/src/org/chromium/cronet_sample_apk/LibraryLoader.java
|
Java
|
bsd-3-clause
| 653
|
/* image replacement */
.graphic, #prevBtn, #nextBtn, #slider1prev, #slider1next{
margin:0;
padding:0;
display:block;
overflow:hidden;
text-indent:-8000px;
}
/* Easy Slider */
#slider ul, #slider li,
#slider2 ul, #slider2 li{
margin:0;
padding:0;
list-style:none;
}
#slider2{margin-top:1em;}
#slider li, #slider2 li{
/*
define width and height of list item (slide)
entire slider area will adjust according to the parameters provided here
*/
margin-top: 16px;
width:710px;
height:200px;
overflow:hidden;
}
#prevBtn, #nextBtn,
#slider1next, #slider1prev{
display:block;
width:30px;
height:77px;
position:absolute;
left:-30px;
top:71px;
z-index:1000;
}
#nextBtn, #slider1next{
left:696px;
}
#prevBtn a, #nextBtn a,
#slider1next a, #slider1prev a{
display:block;
position:relative;
width:30px;
height:77px;
background:url(../images/btn_prev.gif) no-repeat 0 0;
}
#nextBtn a, #slider1next a{
background:url(../images/btn_next.gif) no-repeat 0 0;
}
/* numeric controls */
ol#controls{
margin:-2em 0;
padding-left: 10px;
height:28px;
}
ol#controls li{
margin:0 10px 0 0;
padding:0;
float:left;
list-style:none;
height:28px;
line-height:28px;
}
ol#controls li a{
float:left;
height:28px;
line-height:28px;
border:1px solid #ccc;
background:#DAF3F8;
color:#555;
padding:0 10px;
text-decoration:none;
}
ol#controls li.current a{
background:#5DC9E1;
color:#fff;
}
ol#controls li a:focus, #prevBtn a:focus, #nextBtn a:focus{outline:none;}
/* // Easy Slider */
|
irvingoliveira/SIGEPROC
|
public/css/screen.css
|
CSS
|
bsd-3-clause
| 1,687
|
"""
Luminous Efficiency Functions Spectral Distributions
====================================================
Defines the luminous efficiency functions computation related objects.
References
----------
- :cite:`Wikipedia2005d` : Wikipedia. (2005). Mesopic weighting function.
Retrieved June 20, 2014, from
http://en.wikipedia.org/wiki/Mesopic_vision#Mesopic_weighting_function
"""
from __future__ import annotations
from colour.colorimetry import (
SDS_LEFS_PHOTOPIC,
SDS_LEFS_SCOTOPIC,
SpectralDistribution,
SpectralShape,
)
from colour.colorimetry.datasets.lefs import DATA_MESOPIC_X
from colour.hints import (
Floating,
FloatingOrArrayLike,
FloatingOrNDArray,
Literal,
Optional,
Union,
cast,
)
from colour.utilities import closest, optional, validate_method
__author__ = "Colour Developers"
__copyright__ = "Copyright 2013 Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"mesopic_weighting_function",
"sd_mesopic_luminous_efficiency_function",
]
def mesopic_weighting_function(
wavelength: FloatingOrArrayLike,
L_p: Floating,
source: Union[Literal["Blue Heavy", "Red Heavy"], str] = "Blue Heavy",
method: Union[Literal["MOVE", "LRC"], str] = "MOVE",
photopic_lef: Optional[SpectralDistribution] = None,
scotopic_lef: Optional[SpectralDistribution] = None,
) -> FloatingOrNDArray:
"""
Calculate the mesopic weighting function factor :math:`V_m` at given
wavelength :math:`\\lambda` using the photopic luminance :math:`L_p`.
Parameters
----------
wavelength
Wavelength :math:`\\lambda` to calculate the mesopic weighting function
factor.
L_p
Photopic luminance :math:`L_p`.
source
Light source colour temperature.
method
Method to calculate the weighting factor.
photopic_lef
:math:`V(\\lambda)` photopic luminous efficiency function, default to
the *CIE 1924 Photopic Standard Observer*.
scotopic_lef
:math:`V^\\prime(\\lambda)` scotopic luminous efficiency function,
default to the *CIE 1951 Scotopic Standard Observer*.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Mesopic weighting function factor :math:`V_m`.
References
----------
:cite:`Wikipedia2005d`
Examples
--------
>>> mesopic_weighting_function(500, 0.2) # doctest: +ELLIPSIS
0.7052200...
"""
photopic_lef = cast(
SpectralDistribution,
optional(
photopic_lef,
SDS_LEFS_PHOTOPIC["CIE 1924 Photopic Standard Observer"],
),
)
scotopic_lef = cast(
SpectralDistribution,
optional(
scotopic_lef,
SDS_LEFS_SCOTOPIC["CIE 1951 Scotopic Standard Observer"],
),
)
source = validate_method(
source,
["Blue Heavy", "Red Heavy"],
'"{0}" light source colour temperature is invalid, '
"it must be one of {1}!",
)
method = validate_method(method, ["MOVE", "LRC"])
mesopic_x_luminance_values = sorted(DATA_MESOPIC_X.keys())
index = mesopic_x_luminance_values.index(
closest(mesopic_x_luminance_values, L_p)
)
x = DATA_MESOPIC_X[mesopic_x_luminance_values[index]][source][method]
V_m = (1 - x) * scotopic_lef[wavelength] + x * photopic_lef[wavelength]
return V_m
def sd_mesopic_luminous_efficiency_function(
L_p: Floating,
source: Union[Literal["Blue Heavy", "Red Heavy"], str] = "Blue Heavy",
method: Union[Literal["MOVE", "LRC"], str] = "MOVE",
photopic_lef: Optional[SpectralDistribution] = None,
scotopic_lef: Optional[SpectralDistribution] = None,
) -> SpectralDistribution:
"""
Return the mesopic luminous efficiency function :math:`V_m(\\lambda)` for
given photopic luminance :math:`L_p`.
Parameters
----------
L_p
Photopic luminance :math:`L_p`.
source
Light source colour temperature.
method
Method to calculate the weighting factor.
photopic_lef
:math:`V(\\lambda)` photopic luminous efficiency function, default to
the *CIE 1924 Photopic Standard Observer*.
scotopic_lef
:math:`V^\\prime(\\lambda)` scotopic luminous efficiency function,
default to the *CIE 1951 Scotopic Standard Observer*.
Returns
-------
:class:`colour.SpectralDistribution`
Mesopic luminous efficiency function :math:`V_m(\\lambda)`.
References
----------
:cite:`Wikipedia2005d`
Examples
--------
>>> from colour.utilities import numpy_print_options
>>> with numpy_print_options(suppress=True):
... sd_mesopic_luminous_efficiency_function(0.2) # doctest: +ELLIPSIS
SpectralDistribution([[ 380. , 0.000424 ...],
[ 381. , 0.0004781...],
[ 382. , 0.0005399...],
[ 383. , 0.0006122...],
[ 384. , 0.0006961...],
[ 385. , 0.0007929...],
[ 386. , 0.000907 ...],
[ 387. , 0.0010389...],
[ 388. , 0.0011923...],
[ 389. , 0.0013703...],
[ 390. , 0.0015771...],
[ 391. , 0.0018167...],
[ 392. , 0.0020942...],
[ 393. , 0.0024160...],
[ 394. , 0.0027888...],
[ 395. , 0.0032196...],
[ 396. , 0.0037222...],
[ 397. , 0.0042957...],
[ 398. , 0.0049531...],
[ 399. , 0.0057143...],
[ 400. , 0.0065784...],
[ 401. , 0.0075658...],
[ 402. , 0.0086912...],
[ 403. , 0.0099638...],
[ 404. , 0.0114058...],
[ 405. , 0.0130401...],
[ 406. , 0.0148750...],
[ 407. , 0.0169310...],
[ 408. , 0.0192211...],
[ 409. , 0.0217511...],
[ 410. , 0.0245342...],
[ 411. , 0.0275773...],
[ 412. , 0.0309172...],
[ 413. , 0.0345149...],
[ 414. , 0.0383998...],
[ 415. , 0.0425744...],
[ 416. , 0.0471074...],
[ 417. , 0.0519322...],
[ 418. , 0.0570541...],
[ 419. , 0.0625466...],
[ 420. , 0.0683463...],
[ 421. , 0.0745255...],
[ 422. , 0.0809440...],
[ 423. , 0.0877344...],
[ 424. , 0.0948915...],
[ 425. , 0.1022731...],
[ 426. , 0.109877 ...],
[ 427. , 0.1178421...],
[ 428. , 0.1260316...],
[ 429. , 0.1343772...],
[ 430. , 0.143017 ...],
[ 431. , 0.1518128...],
[ 432. , 0.1608328...],
[ 433. , 0.1700088...],
[ 434. , 0.1792726...],
[ 435. , 0.1886934...],
[ 436. , 0.1982041...],
[ 437. , 0.2078032...],
[ 438. , 0.2174184...],
[ 439. , 0.2271147...],
[ 440. , 0.2368196...],
[ 441. , 0.2464623...],
[ 442. , 0.2561153...],
[ 443. , 0.2657160...],
[ 444. , 0.2753387...],
[ 445. , 0.2848520...],
[ 446. , 0.2944648...],
[ 447. , 0.3034902...],
[ 448. , 0.3132347...],
[ 449. , 0.3223257...],
[ 450. , 0.3314513...],
[ 451. , 0.3406129...],
[ 452. , 0.3498117...],
[ 453. , 0.3583617...],
[ 454. , 0.3676377...],
[ 455. , 0.3762670...],
[ 456. , 0.3849392...],
[ 457. , 0.3936540...],
[ 458. , 0.4024077...],
[ 459. , 0.4111965...],
[ 460. , 0.4193298...],
[ 461. , 0.4281803...],
[ 462. , 0.4363804...],
[ 463. , 0.4453117...],
[ 464. , 0.4542949...],
[ 465. , 0.4626509...],
[ 466. , 0.4717570...],
[ 467. , 0.4809300...],
[ 468. , 0.4901776...],
[ 469. , 0.4995075...],
[ 470. , 0.5096145...],
[ 471. , 0.5191293...],
[ 472. , 0.5294259...],
[ 473. , 0.5391316...],
[ 474. , 0.5496217...],
[ 475. , 0.5602103...],
[ 476. , 0.5702197...],
[ 477. , 0.5810207...],
[ 478. , 0.5919093...],
[ 479. , 0.6028683...],
[ 480. , 0.6138806...],
[ 481. , 0.6249373...],
[ 482. , 0.6360619...],
[ 483. , 0.6465989...],
[ 484. , 0.6579538...],
[ 485. , 0.6687841...],
[ 486. , 0.6797939...],
[ 487. , 0.6909887...],
[ 488. , 0.7023827...],
[ 489. , 0.7133032...],
[ 490. , 0.7244513...],
[ 491. , 0.7358470...],
[ 492. , 0.7468118...],
[ 493. , 0.7580294...],
[ 494. , 0.7694964...],
[ 495. , 0.7805225...],
[ 496. , 0.7917805...],
[ 497. , 0.8026123...],
[ 498. , 0.8130793...],
[ 499. , 0.8239297...],
[ 500. , 0.8352251...],
[ 501. , 0.8456342...],
[ 502. , 0.8564818...],
[ 503. , 0.8676921...],
[ 504. , 0.8785021...],
[ 505. , 0.8881489...],
[ 506. , 0.8986405...],
[ 507. , 0.9079322...],
[ 508. , 0.9174255...],
[ 509. , 0.9257739...],
[ 510. , 0.9350656...],
[ 511. , 0.9432365...],
[ 512. , 0.9509063...],
[ 513. , 0.9586931...],
[ 514. , 0.9658413...],
[ 515. , 0.9722825...],
[ 516. , 0.9779924...],
[ 517. , 0.9836106...],
[ 518. , 0.9883465...],
[ 519. , 0.9920964...],
[ 520. , 0.9954436...],
[ 521. , 0.9976202...],
[ 522. , 0.9993457...],
[ 523. , 1. ...],
[ 524. , 0.9996498...],
[ 525. , 0.9990487...],
[ 526. , 0.9975356...],
[ 527. , 0.9957615...],
[ 528. , 0.9930143...],
[ 529. , 0.9899559...],
[ 530. , 0.9858741...],
[ 531. , 0.9814453...],
[ 532. , 0.9766885...],
[ 533. , 0.9709363...],
[ 534. , 0.9648947...],
[ 535. , 0.9585832...],
[ 536. , 0.952012 ...],
[ 537. , 0.9444916...],
[ 538. , 0.9367089...],
[ 539. , 0.9293506...],
[ 540. , 0.9210429...],
[ 541. , 0.9124772...],
[ 542. , 0.9036604...],
[ 543. , 0.8945958...],
[ 544. , 0.8845999...],
[ 545. , 0.8750500...],
[ 546. , 0.8659457...],
[ 547. , 0.8559224...],
[ 548. , 0.8456846...],
[ 549. , 0.8352499...],
[ 550. , 0.8253229...],
[ 551. , 0.8152079...],
[ 552. , 0.8042205...],
[ 553. , 0.7944209...],
[ 554. , 0.7837466...],
[ 555. , 0.7735680...],
[ 556. , 0.7627808...],
[ 557. , 0.7522710...],
[ 558. , 0.7417549...],
[ 559. , 0.7312909...],
[ 560. , 0.7207983...],
[ 561. , 0.7101939...],
[ 562. , 0.6996362...],
[ 563. , 0.6890656...],
[ 564. , 0.6785599...],
[ 565. , 0.6680593...],
[ 566. , 0.6575697...],
[ 567. , 0.6471578...],
[ 568. , 0.6368208...],
[ 569. , 0.6264871...],
[ 570. , 0.6161541...],
[ 571. , 0.6058896...],
[ 572. , 0.5957000...],
[ 573. , 0.5855937...],
[ 574. , 0.5754412...],
[ 575. , 0.5653883...],
[ 576. , 0.5553742...],
[ 577. , 0.5454680...],
[ 578. , 0.5355972...],
[ 579. , 0.5258267...],
[ 580. , 0.5160152...],
[ 581. , 0.5062322...],
[ 582. , 0.4965595...],
[ 583. , 0.4868746...],
[ 584. , 0.4773299...],
[ 585. , 0.4678028...],
[ 586. , 0.4583704...],
[ 587. , 0.4489722...],
[ 588. , 0.4397606...],
[ 589. , 0.4306131...],
[ 590. , 0.4215446...],
[ 591. , 0.4125681...],
[ 592. , 0.4037550...],
[ 593. , 0.3950359...],
[ 594. , 0.3864104...],
[ 595. , 0.3778777...],
[ 596. , 0.3694405...],
[ 597. , 0.3611074...],
[ 598. , 0.3528596...],
[ 599. , 0.3447056...],
[ 600. , 0.3366470...],
[ 601. , 0.3286917...],
[ 602. , 0.3208410...],
[ 603. , 0.3130808...],
[ 604. , 0.3054105...],
[ 605. , 0.2978225...],
[ 606. , 0.2903027...],
[ 607. , 0.2828727...],
[ 608. , 0.2755311...],
[ 609. , 0.2682900...],
[ 610. , 0.2611478...],
[ 611. , 0.2541176...],
[ 612. , 0.2471885...],
[ 613. , 0.2403570...],
[ 614. , 0.2336057...],
[ 615. , 0.2269379...],
[ 616. , 0.2203527...],
[ 617. , 0.2138465...],
[ 618. , 0.2073946...],
[ 619. , 0.2009789...],
[ 620. , 0.1945818...],
[ 621. , 0.1881943...],
[ 622. , 0.1818226...],
[ 623. , 0.1754987...],
[ 624. , 0.1692476...],
[ 625. , 0.1630876...],
[ 626. , 0.1570257...],
[ 627. , 0.151071 ...],
[ 628. , 0.1452469...],
[ 629. , 0.1395845...],
[ 630. , 0.1341087...],
[ 631. , 0.1288408...],
[ 632. , 0.1237666...],
[ 633. , 0.1188631...],
[ 634. , 0.1141075...],
[ 635. , 0.1094766...],
[ 636. , 0.1049613...],
[ 637. , 0.1005679...],
[ 638. , 0.0962924...],
[ 639. , 0.0921296...],
[ 640. , 0.0880778...],
[ 641. , 0.0841306...],
[ 642. , 0.0802887...],
[ 643. , 0.0765559...],
[ 644. , 0.0729367...],
[ 645. , 0.0694345...],
[ 646. , 0.0660491...],
[ 647. , 0.0627792...],
[ 648. , 0.0596278...],
[ 649. , 0.0565970...],
[ 650. , 0.0536896...],
[ 651. , 0.0509068...],
[ 652. , 0.0482444...],
[ 653. , 0.0456951...],
[ 654. , 0.0432510...],
[ 655. , 0.0409052...],
[ 656. , 0.0386537...],
[ 657. , 0.0364955...],
[ 658. , 0.0344285...],
[ 659. , 0.0324501...],
[ 660. , 0.0305579...],
[ 661. , 0.0287496...],
[ 662. , 0.0270233...],
[ 663. , 0.0253776...],
[ 664. , 0.0238113...],
[ 665. , 0.0223226...],
[ 666. , 0.0209086...],
[ 667. , 0.0195688...],
[ 668. , 0.0183056...],
[ 669. , 0.0171216...],
[ 670. , 0.0160192...],
[ 671. , 0.0149986...],
[ 672. , 0.0140537...],
[ 673. , 0.0131784...],
[ 674. , 0.0123662...],
[ 675. , 0.0116107...],
[ 676. , 0.0109098...],
[ 677. , 0.0102587...],
[ 678. , 0.0096476...],
[ 679. , 0.0090665...],
[ 680. , 0.0085053...],
[ 681. , 0.0079567...],
[ 682. , 0.0074229...],
[ 683. , 0.0069094...],
[ 684. , 0.0064213...],
[ 685. , 0.0059637...],
[ 686. , 0.0055377...],
[ 687. , 0.0051402...],
[ 688. , 0.00477 ...],
[ 689. , 0.0044263...],
[ 690. , 0.0041081...],
[ 691. , 0.0038149...],
[ 692. , 0.0035456...],
[ 693. , 0.0032984...],
[ 694. , 0.0030718...],
[ 695. , 0.0028639...],
[ 696. , 0.0026738...],
[ 697. , 0.0025000...],
[ 698. , 0.0023401...],
[ 699. , 0.0021918...],
[ 700. , 0.0020526...],
[ 701. , 0.0019207...],
[ 702. , 0.001796 ...],
[ 703. , 0.0016784...],
[ 704. , 0.0015683...],
[ 705. , 0.0014657...],
[ 706. , 0.0013702...],
[ 707. , 0.001281 ...],
[ 708. , 0.0011976...],
[ 709. , 0.0011195...],
[ 710. , 0.0010464...],
[ 711. , 0.0009776...],
[ 712. , 0.0009131...],
[ 713. , 0.0008525...],
[ 714. , 0.0007958...],
[ 715. , 0.0007427...],
[ 716. , 0.0006929...],
[ 717. , 0.0006462...],
[ 718. , 0.0006026...],
[ 719. , 0.0005619...],
[ 720. , 0.0005240...],
[ 721. , 0.0004888...],
[ 722. , 0.0004561...],
[ 723. , 0.0004255...],
[ 724. , 0.0003971...],
[ 725. , 0.0003704...],
[ 726. , 0.0003455...],
[ 727. , 0.0003221...],
[ 728. , 0.0003001...],
[ 729. , 0.0002796...],
[ 730. , 0.0002604...],
[ 731. , 0.0002423...],
[ 732. , 0.0002254...],
[ 733. , 0.0002095...],
[ 734. , 0.0001947...],
[ 735. , 0.0001809...],
[ 736. , 0.0001680...],
[ 737. , 0.0001560...],
[ 738. , 0.0001449...],
[ 739. , 0.0001345...],
[ 740. , 0.0001249...],
[ 741. , 0.0001159...],
[ 742. , 0.0001076...],
[ 743. , 0.0000999...],
[ 744. , 0.0000927...],
[ 745. , 0.0000862...],
[ 746. , 0.0000801...],
[ 747. , 0.0000745...],
[ 748. , 0.0000693...],
[ 749. , 0.0000646...],
[ 750. , 0.0000602...],
[ 751. , 0.0000561...],
[ 752. , 0.0000523...],
[ 753. , 0.0000488...],
[ 754. , 0.0000456...],
[ 755. , 0.0000425...],
[ 756. , 0.0000397...],
[ 757. , 0.0000370...],
[ 758. , 0.0000346...],
[ 759. , 0.0000322...],
[ 760. , 0.0000301...],
[ 761. , 0.0000281...],
[ 762. , 0.0000262...],
[ 763. , 0.0000244...],
[ 764. , 0.0000228...],
[ 765. , 0.0000213...],
[ 766. , 0.0000198...],
[ 767. , 0.0000185...],
[ 768. , 0.0000173...],
[ 769. , 0.0000161...],
[ 770. , 0.0000150...],
[ 771. , 0.0000140...],
[ 772. , 0.0000131...],
[ 773. , 0.0000122...],
[ 774. , 0.0000114...],
[ 775. , 0.0000106...],
[ 776. , 0.0000099...],
[ 777. , 0.0000092...],
[ 778. , 0.0000086...],
[ 779. , 0.0000080...],
[ 780. , 0.0000075...]],
interpolator=SpragueInterpolator,
interpolator_kwargs={},
extrapolator=Extrapolator,
extrapolator_kwargs={...})
"""
photopic_lef = cast(
SpectralDistribution,
optional(
photopic_lef,
SDS_LEFS_PHOTOPIC["CIE 1924 Photopic Standard Observer"],
),
)
scotopic_lef = cast(
SpectralDistribution,
optional(
scotopic_lef,
SDS_LEFS_SCOTOPIC["CIE 1951 Scotopic Standard Observer"],
),
)
shape = SpectralShape(
max([photopic_lef.shape.start, scotopic_lef.shape.start]),
min([photopic_lef.shape.end, scotopic_lef.shape.end]),
max([photopic_lef.shape.interval, scotopic_lef.shape.interval]),
)
wavelengths = shape.range()
sd = SpectralDistribution(
mesopic_weighting_function(
wavelengths, L_p, source, method, photopic_lef, scotopic_lef
),
wavelengths,
name=f"{L_p} Lp Mesopic Luminous Efficiency Function",
)
return sd.normalise()
|
colour-science/colour
|
colour/colorimetry/lefs.py
|
Python
|
bsd-3-clause
| 30,125
|
<?php
namespace app\models;
use Yii;
use yii\base\Model;
use app\models\general\GeneralLabel;
use app\models\general\GeneralMessage;
class IsnLaporanPerkhidmatanBiomekanikAtlet extends Model
{
public $tarikh_dari;
public $tarikh_hingga;
public $format;
public function rules()
{
return [
[['format'], 'required', 'message' => GeneralMessage::yii_validation_required],
[['tarikh_dari', 'tarikh_hingga'], 'safe'],
[['tarikh_hingga'], 'compare', 'compareAttribute'=>'tarikh_dari', 'operator'=>'>=', 'skipOnEmpty'=>true, 'message' => GeneralMessage::yii_validation_compare],
];
}
public function attributeLabels()
{
return [
'tarikh_dari' => GeneralLabel::tarikh_dari,
'tarikh_hingga' => GeneralLabel::tarikh_hingga,
'format' => GeneralLabel::format,
];
}
}
|
hung101/kbs
|
frontend/models/IsnLaporanPerkhidmatanBiomekanikAtlet.php
|
PHP
|
bsd-3-clause
| 902
|
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances #-}
module Lambda.DataType.SExpr where
import DeepControl.Applicative
import DeepControl.Monad
import Lambda.DataType.Common
import Lambda.DataType.PatternMatch (PM)
import qualified Lambda.DataType.PatternMatch as PM
import Lambda.DataType.Type (Type((:->)))
import qualified Lambda.DataType.Type as Ty
import Util.Pseudo
import qualified Util.LISP as L
import Data.List (unwords, intersperse)
import Data.Foldable (fold)
-- | sugared expression
data SExpr = BOOL Bool MSP
| INT Integer MSP
| CHAR Char MSP
| UNIT MSP
| VAR Name MSP -- variable
| OPR String MSP -- symbolic operator
-- tuple
| TUPLE [SExpr] MSP
| TPLPrj SExpr Index
-- tag
| TAGPrj SExpr (Name, Index)
--
| FIX SExpr MSP
-- syntax
| IF SExpr SExpr SExpr MSP
| CASE SExpr [(PM, SExpr)] MSP
-- sentence
| TYPESig (Name, Type) MSP
| DEF Name [([PM], SExpr, MSP)]
| BNF Name [(Name, [Type])] MSP
-- quote
| QUT SExpr MSP -- quote
| QQUT SExpr MSP -- quasi-quote
| UNQUT SExpr MSP -- unquote
-- syntactic-sugar
| APP SExpr [SExpr] MSP -- application
| APPSeq [SExpr] MSP -- infix application sequence
| LAM [(PM, Type)] SExpr MSP -- lambda
| LAMM [(PM, Type)] SExpr MSP -- lambda-macro
| AS (SExpr, Type) MSP -- ascription
| LET (PM, Type) SExpr SExpr MSP
| LETREC (Name, Type) SExpr SExpr MSP
-- list
| NIL MSP
| CONS SExpr SExpr MSP
| HEAD SExpr
| TAIL SExpr
deriving (Eq)
unit = UNIT Nothing
var name = VAR name Nothing
bool b = BOOL b Nothing
int n = INT n Nothing
char c = CHAR c Nothing
nil = NIL Nothing
cons a d = CONS a d Nothing
tuple xs = TUPLE xs Nothing
fix x = FIX x Nothing
app x xs = APP x xs Nothing
appseq xs = APPSeq xs Nothing
lam ps x = LAM ps x Nothing
lamm ps x = LAMM ps x Nothing
let_ p x1 x2 = LET p x1 x2 Nothing
letrec p x1 x2 = LETREC p x1 x2 Nothing
instance L.LISP SExpr where
car (CONS a _ _) = a
car _ = error "car: empty structure"
cdr (CONS _ d _) = d
cdr _ = error "cdr: empty structure"
cons a d = CONS a d Nothing
nil = NIL Nothing
isCell (CONS _ _ _) = True
isCell _ = False
instance Show SExpr where
-- value
show (BOOL bool _) = show bool
show (INT n _) = show n
show (CHAR c _) = "'"++ [c] ++"'"
-- variable
show (UNIT _) = "_"
show (VAR name _) = if isSymbolic name
then "("++ name ++")"
else name
show (OPR sym _) = sym
-- tuple
show (TUPLE xs _) = show |$> xs
>- (intersperse ", " >-> fold)
>- \s -> "("++ s ++ ")"
-- apply
show (FIX lambda _) = "(fix " ++ show lambda ++")"
show (TPLPrj x n) = show x ++"."++ show n
show (TAGPrj x (name,n)) = "("++ show x ++")."++ name ++"["++ show n ++"]"
-- syntax
show (IF e1 e2 e3 _) = "if "++ show e1 ++" then "++ show e2 ++" else "++ show e3
show (CASE e pairs _) = "case "++ show e ++" of "++ (pairs <$| (\(pm,e) -> show pm ++" -> "++ show e)
>- (intersperse " | " >-> fold))
-- sentence
show (TYPESig (name, ty) _) = if isSymbolic name
then "("++ name ++") :: "++ show ty
else name ++" :: "++ show ty
show (DEF name defs) = fold $ intersperse "\n" $ defs <$| showdef
where
showdef ([],s,_) = showname ++" = "++ show s
showdef (pms,s,_) = showname ++" "++ (unwords $ pms <$| show) ++" = "++ show s
showname = if isSymbolic name
then "("++ name ++")"
else name
show (BNF name tags _) = "data "++ name ++" = "++ ((showTag |$> tags) >- (intersperse " | " >-> fold))
where
showTag (name, []) = name
showTag (name, tys) = name ++" "++ ((show |$> tys) >- (intersperse " " >-> fold))
-- quote
show (QUT t _) = "{"++ show t ++ "}"
show (QQUT t _) = "`"++ show t
show (UNQUT t _) = ","++ show t
-- syntactic-sugar
show (APP e [] _) = error "APP: empty args"
show (APP e args _) = "("++ show e ++" "++ (unwords $ args <$| show) ++")"
show (APPSeq [] _) = error "APPSeq: empty seq"
show (APPSeq (x:[]) _) = show x
show (APPSeq seq _) = "("++ (unwords $ seq <$| show) ++")"
show (LAM [] e _) = error "LAM: empty params"
show (LAM params e _) = "(\\"++ showParams ++"."++ show e ++")"
where
showParams = fold $ params <$| (\(pm, ty) -> show pm ++ showType ty)
>- intersperse " "
where
showType ty@(_ :-> _) = "::"++ "("++ show ty ++")"
showType Ty.UNIT = ""
showType ty = "::"++ show ty
show (LAMM [] e _) = error "LAMM: empty params"
show (LAMM params e _) = "("++ showParams ++ show e ++")"
where
showParams = fold $ params <$| (\(pm, ty) -> "#"++ show pm ++ showType ty ++ ".")
where
showType ty@(_ :-> _) = "::"++ "("++ show ty ++")"
showType Ty.UNIT = ""
showType ty = "::"++ show ty
show (AS (e, ty@(_ :-> _)) _) = show e ++"::("++ show ty ++")"
show (AS (e, ty) _) = show e ++"::"++ show ty
show (LET (pm,Ty.UNIT) e1 e2 _) = "let "++ show pm ++" = "++ show e1 ++" in "++ show e2
show (LET (pm,ty) e1 e2 _) = "let "++ show pm ++"::"++ show ty ++" = "++ show e1 ++" in "++ show e2
show (LETREC (var,ty) e1 e2 _) = "letrec "++ var ++"::"++ show ty ++" = "++ show e1 ++" in "++ show e2
-- list
show (NIL _) = "[]"
show x@(CONS a d _) =
if L.isList x
then case a of
CHAR _ _ -> show $ toString $ L.toList x
_ -> show $ L.toList x
else showParens a ++ ":"++ showParens d
where
toString [] = []
toString ((CHAR c _):cs) = c : toString cs
showParens x@(LAM _ _ _) = "("++ show x ++")"
showParens x@(FIX _ _) = "("++ show x ++")"
showParens x@(APP _ _ _) = "("++ show x ++")"
showParens x@(IF _ _ _ _) = "("++ show x ++")"
showParens x@(CASE _ _ _) = "("++ show x ++")"
showParens x = show x
show (HEAD x) = "(head " ++ show x ++")"
show (TAIL x) = "(tail " ++ show x ++")"
|
ocean0yohsuke/Simply-Typed-Lambda
|
src/Lambda/DataType/SExpr.hs
|
Haskell
|
bsd-3-clause
| 6,954
|
/**
*============================================================================
* The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC,
* and Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-portal/LICENSE.txt for details.
*============================================================================
**/
/**
*
*/
package gov.nih.nci.cagrid.portal.portlet.discovery.details;
import gov.nih.nci.cagrid.portal.portlet.AbstractActionResponseHandlerCommandController;
import gov.nih.nci.cagrid.portal.portlet.discovery.DiscoveryModel;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.validation.BindException;
/**
* @author <a href="mailto:joshua.phillips@semanticbits.com">Joshua Phillips</a>
*
*/
public abstract class AbstractDiscoverySelectDetailsController extends AbstractActionResponseHandlerCommandController {
private DiscoveryModel discoveryModel;
/**
*
*/
public AbstractDiscoverySelectDetailsController() {
}
/**
* @param commandClass
*/
public AbstractDiscoverySelectDetailsController(Class commandClass) {
super(commandClass);
}
/**
* @param commandClass
* @param commandName
*/
public AbstractDiscoverySelectDetailsController(Class commandClass, String commandName) {
super(commandClass, commandName);
}
protected void doHandleAction(ActionRequest request, ActionResponse response, Object obj, BindException errors) throws Exception{
doSelect(getDiscoveryModel(), ((SelectDetailsCommand)obj).getSelectedId());
}
protected abstract void doSelect(DiscoveryModel model, Integer selectedId);
@Required
public DiscoveryModel getDiscoveryModel() {
return discoveryModel;
}
public void setDiscoveryModel(DiscoveryModel discoveryModel) {
this.discoveryModel = discoveryModel;
}
}
|
NCIP/cagrid-portal
|
cagrid-portal/portlets/src/java/gov/nih/nci/cagrid/portal/portlet/discovery/details/AbstractDiscoverySelectDetailsController.java
|
Java
|
bsd-3-clause
| 2,042
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/quic/test_tools/quic_test_utils.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "net/quic/crypto/crypto_framer.h"
#include "net/quic/crypto/crypto_handshake.h"
#include "net/quic/crypto/crypto_utils.h"
#include "net/quic/crypto/null_encrypter.h"
#include "net/quic/crypto/quic_decrypter.h"
#include "net/quic/crypto/quic_encrypter.h"
#include "net/quic/quic_framer.h"
#include "net/quic/quic_packet_creator.h"
#include "net/quic/quic_utils.h"
#include "net/quic/test_tools/quic_connection_peer.h"
#include "net/spdy/spdy_frame_builder.h"
using base::StringPiece;
using std::max;
using std::min;
using std::string;
using testing::_;
namespace net {
namespace test {
namespace {
// No-op alarm implementation used by MockHelper.
class TestAlarm : public QuicAlarm {
public:
explicit TestAlarm(QuicAlarm::Delegate* delegate)
: QuicAlarm(delegate) {
}
virtual void SetImpl() OVERRIDE {}
virtual void CancelImpl() OVERRIDE {}
};
} // namespace
MockFramerVisitor::MockFramerVisitor() {
// By default, we want to accept packets.
ON_CALL(*this, OnProtocolVersionMismatch(_))
.WillByDefault(testing::Return(false));
// By default, we want to accept packets.
ON_CALL(*this, OnUnauthenticatedHeader(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnUnauthenticatedPublicHeader(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnPacketHeader(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnStreamFrame(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnAckFrame(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnCongestionFeedbackFrame(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnStopWaitingFrame(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnRstStreamFrame(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnConnectionCloseFrame(_))
.WillByDefault(testing::Return(true));
ON_CALL(*this, OnGoAwayFrame(_))
.WillByDefault(testing::Return(true));
}
MockFramerVisitor::~MockFramerVisitor() {
}
bool NoOpFramerVisitor::OnProtocolVersionMismatch(QuicVersion version) {
return false;
}
bool NoOpFramerVisitor::OnUnauthenticatedPublicHeader(
const QuicPacketPublicHeader& header) {
return true;
}
bool NoOpFramerVisitor::OnUnauthenticatedHeader(
const QuicPacketHeader& header) {
return true;
}
bool NoOpFramerVisitor::OnPacketHeader(const QuicPacketHeader& header) {
return true;
}
bool NoOpFramerVisitor::OnStreamFrame(const QuicStreamFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnAckFrame(const QuicAckFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnCongestionFeedbackFrame(
const QuicCongestionFeedbackFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnStopWaitingFrame(
const QuicStopWaitingFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnRstStreamFrame(
const QuicRstStreamFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnConnectionCloseFrame(
const QuicConnectionCloseFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnGoAwayFrame(const QuicGoAwayFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnWindowUpdateFrame(
const QuicWindowUpdateFrame& frame) {
return true;
}
bool NoOpFramerVisitor::OnBlockedFrame(const QuicBlockedFrame& frame) {
return true;
}
FramerVisitorCapturingFrames::FramerVisitorCapturingFrames() : frame_count_(0) {
}
FramerVisitorCapturingFrames::~FramerVisitorCapturingFrames() {
Reset();
}
void FramerVisitorCapturingFrames::Reset() {
STLDeleteElements(&stream_data_);
stream_frames_.clear();
frame_count_ = 0;
ack_.reset();
feedback_.reset();
rst_.reset();
close_.reset();
goaway_.reset();
version_negotiation_packet_.reset();
}
bool FramerVisitorCapturingFrames::OnPacketHeader(
const QuicPacketHeader& header) {
header_ = header;
frame_count_ = 0;
return true;
}
bool FramerVisitorCapturingFrames::OnStreamFrame(const QuicStreamFrame& frame) {
// Make a copy of the frame and store a copy of underlying string, since
// frame.data may not exist outside this callback.
stream_data_.push_back(frame.GetDataAsString());
QuicStreamFrame frame_copy = frame;
frame_copy.data.Clear();
frame_copy.data.Append(const_cast<char*>(stream_data_.back()->data()),
stream_data_.back()->size());
stream_frames_.push_back(frame_copy);
++frame_count_;
return true;
}
bool FramerVisitorCapturingFrames::OnAckFrame(const QuicAckFrame& frame) {
ack_.reset(new QuicAckFrame(frame));
++frame_count_;
return true;
}
bool FramerVisitorCapturingFrames::OnCongestionFeedbackFrame(
const QuicCongestionFeedbackFrame& frame) {
feedback_.reset(new QuicCongestionFeedbackFrame(frame));
++frame_count_;
return true;
}
bool FramerVisitorCapturingFrames::OnStopWaitingFrame(
const QuicStopWaitingFrame& frame) {
stop_waiting_.reset(new QuicStopWaitingFrame(frame));
++frame_count_;
return true;
}
bool FramerVisitorCapturingFrames::OnRstStreamFrame(
const QuicRstStreamFrame& frame) {
rst_.reset(new QuicRstStreamFrame(frame));
++frame_count_;
return true;
}
bool FramerVisitorCapturingFrames::OnConnectionCloseFrame(
const QuicConnectionCloseFrame& frame) {
close_.reset(new QuicConnectionCloseFrame(frame));
++frame_count_;
return true;
}
bool FramerVisitorCapturingFrames::OnGoAwayFrame(const QuicGoAwayFrame& frame) {
goaway_.reset(new QuicGoAwayFrame(frame));
++frame_count_;
return true;
}
void FramerVisitorCapturingFrames::OnVersionNegotiationPacket(
const QuicVersionNegotiationPacket& packet) {
version_negotiation_packet_.reset(new QuicVersionNegotiationPacket(packet));
frame_count_ = 0;
}
FramerVisitorCapturingPublicReset::FramerVisitorCapturingPublicReset() {
}
FramerVisitorCapturingPublicReset::~FramerVisitorCapturingPublicReset() {
}
void FramerVisitorCapturingPublicReset::OnPublicResetPacket(
const QuicPublicResetPacket& public_reset) {
public_reset_packet_ = public_reset;
}
MockConnectionVisitor::MockConnectionVisitor() {
}
MockConnectionVisitor::~MockConnectionVisitor() {
}
MockHelper::MockHelper() {
}
MockHelper::~MockHelper() {
}
const QuicClock* MockHelper::GetClock() const {
return &clock_;
}
QuicRandom* MockHelper::GetRandomGenerator() {
return &random_generator_;
}
QuicAlarm* MockHelper::CreateAlarm(QuicAlarm::Delegate* delegate) {
return new TestAlarm(delegate);
}
void MockHelper::AdvanceTime(QuicTime::Delta delta) {
clock_.AdvanceTime(delta);
}
MockConnection::MockConnection(bool is_server)
: QuicConnection(kTestConnectionId,
IPEndPoint(TestPeerIPAddress(), kTestPort),
new testing::NiceMock<MockHelper>(),
new testing::NiceMock<MockPacketWriter>(),
is_server, QuicSupportedVersions()),
writer_(QuicConnectionPeer::GetWriter(this)),
helper_(helper()) {
}
MockConnection::MockConnection(IPEndPoint address,
bool is_server)
: QuicConnection(kTestConnectionId, address,
new testing::NiceMock<MockHelper>(),
new testing::NiceMock<MockPacketWriter>(),
is_server, QuicSupportedVersions()),
writer_(QuicConnectionPeer::GetWriter(this)),
helper_(helper()) {
}
MockConnection::MockConnection(QuicConnectionId connection_id,
bool is_server)
: QuicConnection(connection_id,
IPEndPoint(TestPeerIPAddress(), kTestPort),
new testing::NiceMock<MockHelper>(),
new testing::NiceMock<MockPacketWriter>(),
is_server, QuicSupportedVersions()),
writer_(QuicConnectionPeer::GetWriter(this)),
helper_(helper()) {
}
MockConnection::MockConnection(bool is_server,
const QuicVersionVector& supported_versions)
: QuicConnection(kTestConnectionId,
IPEndPoint(TestPeerIPAddress(), kTestPort),
new testing::NiceMock<MockHelper>(),
new testing::NiceMock<MockPacketWriter>(),
is_server, supported_versions),
writer_(QuicConnectionPeer::GetWriter(this)),
helper_(helper()) {
}
MockConnection::~MockConnection() {
}
void MockConnection::AdvanceTime(QuicTime::Delta delta) {
static_cast<MockHelper*>(helper())->AdvanceTime(delta);
}
PacketSavingConnection::PacketSavingConnection(bool is_server)
: MockConnection(is_server) {
}
PacketSavingConnection::PacketSavingConnection(
bool is_server,
const QuicVersionVector& supported_versions)
: MockConnection(is_server, supported_versions) {
}
PacketSavingConnection::~PacketSavingConnection() {
STLDeleteElements(&packets_);
STLDeleteElements(&encrypted_packets_);
}
bool PacketSavingConnection::SendOrQueuePacket(
EncryptionLevel level,
const SerializedPacket& packet,
TransmissionType transmission_type) {
packets_.push_back(packet.packet);
QuicEncryptedPacket* encrypted = QuicConnectionPeer::GetFramer(this)->
EncryptPacket(level, packet.sequence_number, *packet.packet);
encrypted_packets_.push_back(encrypted);
return true;
}
MockSession::MockSession(QuicConnection* connection)
: QuicSession(connection, DefaultQuicConfig()) {
ON_CALL(*this, WritevData(_, _, _, _, _))
.WillByDefault(testing::Return(QuicConsumedData(0, false)));
}
MockSession::~MockSession() {
}
TestSession::TestSession(QuicConnection* connection,
const QuicConfig& config)
: QuicSession(connection, config),
crypto_stream_(NULL) {
}
TestSession::~TestSession() {}
void TestSession::SetCryptoStream(QuicCryptoStream* stream) {
crypto_stream_ = stream;
}
QuicCryptoStream* TestSession::GetCryptoStream() {
return crypto_stream_;
}
MockPacketWriter::MockPacketWriter() {
}
MockPacketWriter::~MockPacketWriter() {
}
MockSendAlgorithm::MockSendAlgorithm() {
}
MockSendAlgorithm::~MockSendAlgorithm() {
}
MockLossAlgorithm::MockLossAlgorithm() {
}
MockLossAlgorithm::~MockLossAlgorithm() {
}
MockAckNotifierDelegate::MockAckNotifierDelegate() {
}
MockAckNotifierDelegate::~MockAckNotifierDelegate() {
}
namespace {
string HexDumpWithMarks(const char* data, int length,
const bool* marks, int mark_length) {
static const char kHexChars[] = "0123456789abcdef";
static const int kColumns = 4;
const int kSizeLimit = 1024;
if (length > kSizeLimit || mark_length > kSizeLimit) {
LOG(ERROR) << "Only dumping first " << kSizeLimit << " bytes.";
length = min(length, kSizeLimit);
mark_length = min(mark_length, kSizeLimit);
}
string hex;
for (const char* row = data; length > 0;
row += kColumns, length -= kColumns) {
for (const char *p = row; p < row + 4; ++p) {
if (p < row + length) {
const bool mark =
(marks && (p - data) < mark_length && marks[p - data]);
hex += mark ? '*' : ' ';
hex += kHexChars[(*p & 0xf0) >> 4];
hex += kHexChars[*p & 0x0f];
hex += mark ? '*' : ' ';
} else {
hex += " ";
}
}
hex = hex + " ";
for (const char *p = row; p < row + 4 && p < row + length; ++p)
hex += (*p >= 0x20 && *p <= 0x7f) ? (*p) : '.';
hex = hex + '\n';
}
return hex;
}
} // namespace
IPAddressNumber TestPeerIPAddress() { return Loopback4(); }
QuicVersion QuicVersionMax() { return QuicSupportedVersions().front(); }
QuicVersion QuicVersionMin() { return QuicSupportedVersions().back(); }
IPAddressNumber Loopback4() {
IPAddressNumber addr;
CHECK(ParseIPLiteralToNumber("127.0.0.1", &addr));
return addr;
}
QuicEncryptedPacket* ConstructEncryptedPacket(
QuicConnectionId connection_id,
bool version_flag,
bool reset_flag,
QuicPacketSequenceNumber sequence_number,
const string& data) {
QuicPacketHeader header;
header.public_header.connection_id = connection_id;
header.public_header.connection_id_length = PACKET_8BYTE_CONNECTION_ID;
header.public_header.version_flag = version_flag;
header.public_header.reset_flag = reset_flag;
header.public_header.sequence_number_length = PACKET_6BYTE_SEQUENCE_NUMBER;
header.packet_sequence_number = sequence_number;
header.entropy_flag = false;
header.entropy_hash = 0;
header.fec_flag = false;
header.is_in_fec_group = NOT_IN_FEC_GROUP;
header.fec_group = 0;
QuicStreamFrame stream_frame(1, false, 0, MakeIOVector(data));
QuicFrame frame(&stream_frame);
QuicFrames frames;
frames.push_back(frame);
QuicFramer framer(QuicSupportedVersions(), QuicTime::Zero(), false);
scoped_ptr<QuicPacket> packet(
framer.BuildUnsizedDataPacket(header, frames).packet);
EXPECT_TRUE(packet != NULL);
QuicEncryptedPacket* encrypted = framer.EncryptPacket(ENCRYPTION_NONE,
sequence_number,
*packet);
EXPECT_TRUE(encrypted != NULL);
return encrypted;
}
void CompareCharArraysWithHexError(
const string& description,
const char* actual,
const int actual_len,
const char* expected,
const int expected_len) {
EXPECT_EQ(actual_len, expected_len);
const int min_len = min(actual_len, expected_len);
const int max_len = max(actual_len, expected_len);
scoped_ptr<bool[]> marks(new bool[max_len]);
bool identical = (actual_len == expected_len);
for (int i = 0; i < min_len; ++i) {
if (actual[i] != expected[i]) {
marks[i] = true;
identical = false;
} else {
marks[i] = false;
}
}
for (int i = min_len; i < max_len; ++i) {
marks[i] = true;
}
if (identical) return;
ADD_FAILURE()
<< "Description:\n"
<< description
<< "\n\nExpected:\n"
<< HexDumpWithMarks(expected, expected_len, marks.get(), max_len)
<< "\nActual:\n"
<< HexDumpWithMarks(actual, actual_len, marks.get(), max_len);
}
bool DecodeHexString(const base::StringPiece& hex, std::string* bytes) {
bytes->clear();
if (hex.empty())
return true;
std::vector<uint8> v;
if (!base::HexStringToBytes(hex.as_string(), &v))
return false;
if (!v.empty())
bytes->assign(reinterpret_cast<const char*>(&v[0]), v.size());
return true;
}
static QuicPacket* ConstructPacketFromHandshakeMessage(
QuicConnectionId connection_id,
const CryptoHandshakeMessage& message,
bool should_include_version) {
CryptoFramer crypto_framer;
scoped_ptr<QuicData> data(crypto_framer.ConstructHandshakeMessage(message));
QuicFramer quic_framer(QuicSupportedVersions(), QuicTime::Zero(), false);
QuicPacketHeader header;
header.public_header.connection_id = connection_id;
header.public_header.reset_flag = false;
header.public_header.version_flag = should_include_version;
header.packet_sequence_number = 1;
header.entropy_flag = false;
header.entropy_hash = 0;
header.fec_flag = false;
header.fec_group = 0;
QuicStreamFrame stream_frame(kCryptoStreamId, false, 0,
MakeIOVector(data->AsStringPiece()));
QuicFrame frame(&stream_frame);
QuicFrames frames;
frames.push_back(frame);
return quic_framer.BuildUnsizedDataPacket(header, frames).packet;
}
QuicPacket* ConstructHandshakePacket(QuicConnectionId connection_id,
QuicTag tag) {
CryptoHandshakeMessage message;
message.set_tag(tag);
return ConstructPacketFromHandshakeMessage(connection_id, message, false);
}
size_t GetPacketLengthForOneStream(
QuicVersion version,
bool include_version,
QuicSequenceNumberLength sequence_number_length,
InFecGroup is_in_fec_group,
size_t* payload_length) {
*payload_length = 1;
const size_t stream_length =
NullEncrypter().GetCiphertextSize(*payload_length) +
QuicPacketCreator::StreamFramePacketOverhead(
version, PACKET_8BYTE_CONNECTION_ID, include_version,
sequence_number_length, is_in_fec_group);
const size_t ack_length = NullEncrypter().GetCiphertextSize(
QuicFramer::GetMinAckFrameSize(
version, sequence_number_length, PACKET_1BYTE_SEQUENCE_NUMBER)) +
GetPacketHeaderSize(PACKET_8BYTE_CONNECTION_ID, include_version,
sequence_number_length, is_in_fec_group);
if (stream_length < ack_length) {
*payload_length = 1 + ack_length - stream_length;
}
return NullEncrypter().GetCiphertextSize(*payload_length) +
QuicPacketCreator::StreamFramePacketOverhead(
version, PACKET_8BYTE_CONNECTION_ID, include_version,
sequence_number_length, is_in_fec_group);
}
TestEntropyCalculator::TestEntropyCalculator() { }
TestEntropyCalculator::~TestEntropyCalculator() { }
QuicPacketEntropyHash TestEntropyCalculator::EntropyHash(
QuicPacketSequenceNumber sequence_number) const {
return 1u;
}
MockEntropyCalculator::MockEntropyCalculator() { }
MockEntropyCalculator::~MockEntropyCalculator() { }
QuicConfig DefaultQuicConfig() {
QuicConfig config;
config.SetDefaults();
return config;
}
QuicVersionVector SupportedVersions(QuicVersion version) {
QuicVersionVector versions;
versions.push_back(version);
return versions;
}
} // namespace test
} // namespace net
|
anirudhSK/chromium
|
net/quic/test_tools/quic_test_utils.cc
|
C++
|
bsd-3-clause
| 17,601
|
/*-
* Copyright (C) 2012-2015 Intel Corporation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <sys/cdefs.h>
__FBSDID("$FreeBSD$");
#include <sys/param.h>
#include <sys/systm.h>
#include <sys/buf.h>
#include <sys/bus.h>
#include <sys/conf.h>
#include <sys/ioccom.h>
#include <sys/proc.h>
#include <sys/smp.h>
#include <sys/uio.h>
#include <dev/pci/pcireg.h>
#include <dev/pci/pcivar.h>
#include "nvme_private.h"
static void nvme_ctrlr_construct_and_submit_aer(struct nvme_controller *ctrlr,
struct nvme_async_event_request *aer);
static int
nvme_ctrlr_allocate_bar(struct nvme_controller *ctrlr)
{
ctrlr->resource_id = PCIR_BAR(0);
ctrlr->resource = bus_alloc_resource(ctrlr->dev, SYS_RES_MEMORY,
&ctrlr->resource_id, 0, ~0, 1, RF_ACTIVE);
if(ctrlr->resource == NULL) {
nvme_printf(ctrlr, "unable to allocate pci resource\n");
return (ENOMEM);
}
ctrlr->bus_tag = rman_get_bustag(ctrlr->resource);
ctrlr->bus_handle = rman_get_bushandle(ctrlr->resource);
ctrlr->regs = (struct nvme_registers *)ctrlr->bus_handle;
/*
* The NVMe spec allows for the MSI-X table to be placed behind
* BAR 4/5, separate from the control/doorbell registers. Always
* try to map this bar, because it must be mapped prior to calling
* pci_alloc_msix(). If the table isn't behind BAR 4/5,
* bus_alloc_resource() will just return NULL which is OK.
*/
ctrlr->bar4_resource_id = PCIR_BAR(4);
ctrlr->bar4_resource = bus_alloc_resource(ctrlr->dev, SYS_RES_MEMORY,
&ctrlr->bar4_resource_id, 0, ~0, 1, RF_ACTIVE);
return (0);
}
static void
nvme_ctrlr_construct_admin_qpair(struct nvme_controller *ctrlr)
{
struct nvme_qpair *qpair;
uint32_t num_entries;
qpair = &ctrlr->adminq;
num_entries = NVME_ADMIN_ENTRIES;
TUNABLE_INT_FETCH("hw.nvme.admin_entries", &num_entries);
/*
* If admin_entries was overridden to an invalid value, revert it
* back to our default value.
*/
if (num_entries < NVME_MIN_ADMIN_ENTRIES ||
num_entries > NVME_MAX_ADMIN_ENTRIES) {
nvme_printf(ctrlr, "invalid hw.nvme.admin_entries=%d "
"specified\n", num_entries);
num_entries = NVME_ADMIN_ENTRIES;
}
/*
* The admin queue's max xfer size is treated differently than the
* max I/O xfer size. 16KB is sufficient here - maybe even less?
*/
nvme_qpair_construct(qpair,
0, /* qpair ID */
0, /* vector */
num_entries,
NVME_ADMIN_TRACKERS,
ctrlr);
}
static int
nvme_ctrlr_construct_io_qpairs(struct nvme_controller *ctrlr)
{
struct nvme_qpair *qpair;
union cap_lo_register cap_lo;
int i, num_entries, num_trackers;
num_entries = NVME_IO_ENTRIES;
TUNABLE_INT_FETCH("hw.nvme.io_entries", &num_entries);
/*
* NVMe spec sets a hard limit of 64K max entries, but
* devices may specify a smaller limit, so we need to check
* the MQES field in the capabilities register.
*/
cap_lo.raw = nvme_mmio_read_4(ctrlr, cap_lo);
num_entries = min(num_entries, cap_lo.bits.mqes+1);
num_trackers = NVME_IO_TRACKERS;
TUNABLE_INT_FETCH("hw.nvme.io_trackers", &num_trackers);
num_trackers = max(num_trackers, NVME_MIN_IO_TRACKERS);
num_trackers = min(num_trackers, NVME_MAX_IO_TRACKERS);
/*
* No need to have more trackers than entries in the submit queue.
* Note also that for a queue size of N, we can only have (N-1)
* commands outstanding, hence the "-1" here.
*/
num_trackers = min(num_trackers, (num_entries-1));
ctrlr->ioq = malloc(ctrlr->num_io_queues * sizeof(struct nvme_qpair),
M_NVME, M_ZERO | M_WAITOK);
for (i = 0; i < ctrlr->num_io_queues; i++) {
qpair = &ctrlr->ioq[i];
/*
* Admin queue has ID=0. IO queues start at ID=1 -
* hence the 'i+1' here.
*
* For I/O queues, use the controller-wide max_xfer_size
* calculated in nvme_attach().
*/
nvme_qpair_construct(qpair,
i+1, /* qpair ID */
ctrlr->msix_enabled ? i+1 : 0, /* vector */
num_entries,
num_trackers,
ctrlr);
if (ctrlr->per_cpu_io_queues)
bus_bind_intr(ctrlr->dev, qpair->res, i);
}
return (0);
}
static void
nvme_ctrlr_fail(struct nvme_controller *ctrlr)
{
int i;
ctrlr->is_failed = TRUE;
nvme_qpair_fail(&ctrlr->adminq);
for (i = 0; i < ctrlr->num_io_queues; i++)
nvme_qpair_fail(&ctrlr->ioq[i]);
nvme_notify_fail_consumers(ctrlr);
}
void
nvme_ctrlr_post_failed_request(struct nvme_controller *ctrlr,
struct nvme_request *req)
{
mtx_lock(&ctrlr->lock);
STAILQ_INSERT_TAIL(&ctrlr->fail_req, req, stailq);
mtx_unlock(&ctrlr->lock);
taskqueue_enqueue(ctrlr->taskqueue, &ctrlr->fail_req_task);
}
static void
nvme_ctrlr_fail_req_task(void *arg, int pending)
{
struct nvme_controller *ctrlr = arg;
struct nvme_request *req;
mtx_lock(&ctrlr->lock);
while (!STAILQ_EMPTY(&ctrlr->fail_req)) {
req = STAILQ_FIRST(&ctrlr->fail_req);
STAILQ_REMOVE_HEAD(&ctrlr->fail_req, stailq);
nvme_qpair_manual_complete_request(req->qpair, req,
NVME_SCT_GENERIC, NVME_SC_ABORTED_BY_REQUEST, TRUE);
}
mtx_unlock(&ctrlr->lock);
}
static int
nvme_ctrlr_wait_for_ready(struct nvme_controller *ctrlr, int desired_val)
{
int ms_waited;
union cc_register cc;
union csts_register csts;
cc.raw = nvme_mmio_read_4(ctrlr, cc);
csts.raw = nvme_mmio_read_4(ctrlr, csts);
if (cc.bits.en != desired_val) {
nvme_printf(ctrlr, "%s called with desired_val = %d "
"but cc.en = %d\n", __func__, desired_val, cc.bits.en);
return (ENXIO);
}
ms_waited = 0;
while (csts.bits.rdy != desired_val) {
DELAY(1000);
if (ms_waited++ > ctrlr->ready_timeout_in_ms) {
nvme_printf(ctrlr, "controller ready did not become %d "
"within %d ms\n", desired_val, ctrlr->ready_timeout_in_ms);
return (ENXIO);
}
csts.raw = nvme_mmio_read_4(ctrlr, csts);
}
return (0);
}
static void
nvme_ctrlr_disable(struct nvme_controller *ctrlr)
{
union cc_register cc;
union csts_register csts;
cc.raw = nvme_mmio_read_4(ctrlr, cc);
csts.raw = nvme_mmio_read_4(ctrlr, csts);
if (cc.bits.en == 1 && csts.bits.rdy == 0)
nvme_ctrlr_wait_for_ready(ctrlr, 1);
cc.bits.en = 0;
nvme_mmio_write_4(ctrlr, cc, cc.raw);
DELAY(5000);
nvme_ctrlr_wait_for_ready(ctrlr, 0);
}
static int
nvme_ctrlr_enable(struct nvme_controller *ctrlr)
{
union cc_register cc;
union csts_register csts;
union aqa_register aqa;
cc.raw = nvme_mmio_read_4(ctrlr, cc);
csts.raw = nvme_mmio_read_4(ctrlr, csts);
if (cc.bits.en == 1) {
if (csts.bits.rdy == 1)
return (0);
else
return (nvme_ctrlr_wait_for_ready(ctrlr, 1));
}
nvme_mmio_write_8(ctrlr, asq, ctrlr->adminq.cmd_bus_addr);
DELAY(5000);
nvme_mmio_write_8(ctrlr, acq, ctrlr->adminq.cpl_bus_addr);
DELAY(5000);
aqa.raw = 0;
/* acqs and asqs are 0-based. */
aqa.bits.acqs = ctrlr->adminq.num_entries-1;
aqa.bits.asqs = ctrlr->adminq.num_entries-1;
nvme_mmio_write_4(ctrlr, aqa, aqa.raw);
DELAY(5000);
cc.bits.en = 1;
cc.bits.css = 0;
cc.bits.ams = 0;
cc.bits.shn = 0;
cc.bits.iosqes = 6; /* SQ entry size == 64 == 2^6 */
cc.bits.iocqes = 4; /* CQ entry size == 16 == 2^4 */
/* This evaluates to 0, which is according to spec. */
cc.bits.mps = (PAGE_SIZE >> 13);
nvme_mmio_write_4(ctrlr, cc, cc.raw);
DELAY(5000);
return (nvme_ctrlr_wait_for_ready(ctrlr, 1));
}
int
nvme_ctrlr_hw_reset(struct nvme_controller *ctrlr)
{
int i;
nvme_admin_qpair_disable(&ctrlr->adminq);
for (i = 0; i < ctrlr->num_io_queues; i++)
nvme_io_qpair_disable(&ctrlr->ioq[i]);
DELAY(100*1000);
nvme_ctrlr_disable(ctrlr);
return (nvme_ctrlr_enable(ctrlr));
}
void
nvme_ctrlr_reset(struct nvme_controller *ctrlr)
{
int cmpset;
cmpset = atomic_cmpset_32(&ctrlr->is_resetting, 0, 1);
if (cmpset == 0 || ctrlr->is_failed)
/*
* Controller is already resetting or has failed. Return
* immediately since there is no need to kick off another
* reset in these cases.
*/
return;
taskqueue_enqueue(ctrlr->taskqueue, &ctrlr->reset_task);
}
static int
nvme_ctrlr_identify(struct nvme_controller *ctrlr)
{
struct nvme_completion_poll_status status;
status.done = FALSE;
nvme_ctrlr_cmd_identify_controller(ctrlr, &ctrlr->cdata,
nvme_completion_poll_cb, &status);
while (status.done == FALSE)
pause("nvme", 1);
if (nvme_completion_is_error(&status.cpl)) {
nvme_printf(ctrlr, "nvme_identify_controller failed!\n");
return (ENXIO);
}
/*
* Use MDTS to ensure our default max_xfer_size doesn't exceed what the
* controller supports.
*/
if (ctrlr->cdata.mdts > 0)
ctrlr->max_xfer_size = min(ctrlr->max_xfer_size,
ctrlr->min_page_size * (1 << (ctrlr->cdata.mdts)));
return (0);
}
static int
nvme_ctrlr_set_num_qpairs(struct nvme_controller *ctrlr)
{
struct nvme_completion_poll_status status;
int cq_allocated, i, sq_allocated;
status.done = FALSE;
nvme_ctrlr_cmd_set_num_queues(ctrlr, ctrlr->num_io_queues,
nvme_completion_poll_cb, &status);
while (status.done == FALSE)
pause("nvme", 1);
if (nvme_completion_is_error(&status.cpl)) {
nvme_printf(ctrlr, "nvme_set_num_queues failed!\n");
return (ENXIO);
}
/*
* Data in cdw0 is 0-based.
* Lower 16-bits indicate number of submission queues allocated.
* Upper 16-bits indicate number of completion queues allocated.
*/
sq_allocated = (status.cpl.cdw0 & 0xFFFF) + 1;
cq_allocated = (status.cpl.cdw0 >> 16) + 1;
/*
* Check that the controller was able to allocate the number of
* queues we requested. If not, revert to one IO queue pair.
*/
if (sq_allocated < ctrlr->num_io_queues ||
cq_allocated < ctrlr->num_io_queues) {
/*
* Destroy extra IO queue pairs that were created at
* controller construction time but are no longer
* needed. This will only happen when a controller
* supports fewer queues than MSI-X vectors. This
* is not the normal case, but does occur with the
* Chatham prototype board.
*/
for (i = 1; i < ctrlr->num_io_queues; i++)
nvme_io_qpair_destroy(&ctrlr->ioq[i]);
ctrlr->num_io_queues = 1;
ctrlr->per_cpu_io_queues = 0;
}
return (0);
}
static int
nvme_ctrlr_create_qpairs(struct nvme_controller *ctrlr)
{
struct nvme_completion_poll_status status;
struct nvme_qpair *qpair;
int i;
for (i = 0; i < ctrlr->num_io_queues; i++) {
qpair = &ctrlr->ioq[i];
status.done = FALSE;
nvme_ctrlr_cmd_create_io_cq(ctrlr, qpair, qpair->vector,
nvme_completion_poll_cb, &status);
while (status.done == FALSE)
pause("nvme", 1);
if (nvme_completion_is_error(&status.cpl)) {
nvme_printf(ctrlr, "nvme_create_io_cq failed!\n");
return (ENXIO);
}
status.done = FALSE;
nvme_ctrlr_cmd_create_io_sq(qpair->ctrlr, qpair,
nvme_completion_poll_cb, &status);
while (status.done == FALSE)
pause("nvme", 1);
if (nvme_completion_is_error(&status.cpl)) {
nvme_printf(ctrlr, "nvme_create_io_sq failed!\n");
return (ENXIO);
}
}
return (0);
}
static int
nvme_ctrlr_construct_namespaces(struct nvme_controller *ctrlr)
{
struct nvme_namespace *ns;
int i, status;
for (i = 0; i < ctrlr->cdata.nn; i++) {
ns = &ctrlr->ns[i];
status = nvme_ns_construct(ns, i+1, ctrlr);
if (status != 0)
return (status);
}
return (0);
}
static boolean_t
is_log_page_id_valid(uint8_t page_id)
{
switch (page_id) {
case NVME_LOG_ERROR:
case NVME_LOG_HEALTH_INFORMATION:
case NVME_LOG_FIRMWARE_SLOT:
return (TRUE);
}
return (FALSE);
}
static uint32_t
nvme_ctrlr_get_log_page_size(struct nvme_controller *ctrlr, uint8_t page_id)
{
uint32_t log_page_size;
switch (page_id) {
case NVME_LOG_ERROR:
log_page_size = min(
sizeof(struct nvme_error_information_entry) *
ctrlr->cdata.elpe,
NVME_MAX_AER_LOG_SIZE);
break;
case NVME_LOG_HEALTH_INFORMATION:
log_page_size = sizeof(struct nvme_health_information_page);
break;
case NVME_LOG_FIRMWARE_SLOT:
log_page_size = sizeof(struct nvme_firmware_page);
break;
default:
log_page_size = 0;
break;
}
return (log_page_size);
}
static void
nvme_ctrlr_log_critical_warnings(struct nvme_controller *ctrlr,
union nvme_critical_warning_state state)
{
if (state.bits.available_spare == 1)
nvme_printf(ctrlr, "available spare space below threshold\n");
if (state.bits.temperature == 1)
nvme_printf(ctrlr, "temperature above threshold\n");
if (state.bits.device_reliability == 1)
nvme_printf(ctrlr, "device reliability degraded\n");
if (state.bits.read_only == 1)
nvme_printf(ctrlr, "media placed in read only mode\n");
if (state.bits.volatile_memory_backup == 1)
nvme_printf(ctrlr, "volatile memory backup device failed\n");
if (state.bits.reserved != 0)
nvme_printf(ctrlr,
"unknown critical warning(s): state = 0x%02x\n", state.raw);
}
static void
nvme_ctrlr_async_event_log_page_cb(void *arg, const struct nvme_completion *cpl)
{
struct nvme_async_event_request *aer = arg;
struct nvme_health_information_page *health_info;
/*
* If the log page fetch for some reason completed with an error,
* don't pass log page data to the consumers. In practice, this case
* should never happen.
*/
if (nvme_completion_is_error(cpl))
nvme_notify_async_consumers(aer->ctrlr, &aer->cpl,
aer->log_page_id, NULL, 0);
else {
if (aer->log_page_id == NVME_LOG_HEALTH_INFORMATION) {
health_info = (struct nvme_health_information_page *)
aer->log_page_buffer;
nvme_ctrlr_log_critical_warnings(aer->ctrlr,
health_info->critical_warning);
/*
* Critical warnings reported through the
* SMART/health log page are persistent, so
* clear the associated bits in the async event
* config so that we do not receive repeated
* notifications for the same event.
*/
aer->ctrlr->async_event_config.raw &=
~health_info->critical_warning.raw;
nvme_ctrlr_cmd_set_async_event_config(aer->ctrlr,
aer->ctrlr->async_event_config, NULL, NULL);
}
/*
* Pass the cpl data from the original async event completion,
* not the log page fetch.
*/
nvme_notify_async_consumers(aer->ctrlr, &aer->cpl,
aer->log_page_id, aer->log_page_buffer, aer->log_page_size);
}
/*
* Repost another asynchronous event request to replace the one
* that just completed.
*/
nvme_ctrlr_construct_and_submit_aer(aer->ctrlr, aer);
}
static void
nvme_ctrlr_async_event_cb(void *arg, const struct nvme_completion *cpl)
{
struct nvme_async_event_request *aer = arg;
if (nvme_completion_is_error(cpl)) {
/*
* Do not retry failed async event requests. This avoids
* infinite loops where a new async event request is submitted
* to replace the one just failed, only to fail again and
* perpetuate the loop.
*/
return;
}
/* Associated log page is in bits 23:16 of completion entry dw0. */
aer->log_page_id = (cpl->cdw0 & 0xFF0000) >> 16;
nvme_printf(aer->ctrlr, "async event occurred (log page id=0x%x)\n",
aer->log_page_id);
if (is_log_page_id_valid(aer->log_page_id)) {
aer->log_page_size = nvme_ctrlr_get_log_page_size(aer->ctrlr,
aer->log_page_id);
memcpy(&aer->cpl, cpl, sizeof(*cpl));
nvme_ctrlr_cmd_get_log_page(aer->ctrlr, aer->log_page_id,
NVME_GLOBAL_NAMESPACE_TAG, aer->log_page_buffer,
aer->log_page_size, nvme_ctrlr_async_event_log_page_cb,
aer);
/* Wait to notify consumers until after log page is fetched. */
} else {
nvme_notify_async_consumers(aer->ctrlr, cpl, aer->log_page_id,
NULL, 0);
/*
* Repost another asynchronous event request to replace the one
* that just completed.
*/
nvme_ctrlr_construct_and_submit_aer(aer->ctrlr, aer);
}
}
static void
nvme_ctrlr_construct_and_submit_aer(struct nvme_controller *ctrlr,
struct nvme_async_event_request *aer)
{
struct nvme_request *req;
aer->ctrlr = ctrlr;
req = nvme_allocate_request_null(nvme_ctrlr_async_event_cb, aer);
aer->req = req;
/*
* Disable timeout here, since asynchronous event requests should by
* nature never be timed out.
*/
req->timeout = FALSE;
req->cmd.opc = NVME_OPC_ASYNC_EVENT_REQUEST;
nvme_ctrlr_submit_admin_request(ctrlr, req);
}
static void
nvme_ctrlr_configure_aer(struct nvme_controller *ctrlr)
{
struct nvme_completion_poll_status status;
struct nvme_async_event_request *aer;
uint32_t i;
ctrlr->async_event_config.raw = 0xFF;
ctrlr->async_event_config.bits.reserved = 0;
status.done = FALSE;
nvme_ctrlr_cmd_get_feature(ctrlr, NVME_FEAT_TEMPERATURE_THRESHOLD,
0, NULL, 0, nvme_completion_poll_cb, &status);
while (status.done == FALSE)
pause("nvme", 1);
if (nvme_completion_is_error(&status.cpl) ||
(status.cpl.cdw0 & 0xFFFF) == 0xFFFF ||
(status.cpl.cdw0 & 0xFFFF) == 0x0000) {
nvme_printf(ctrlr, "temperature threshold not supported\n");
ctrlr->async_event_config.bits.temperature = 0;
}
nvme_ctrlr_cmd_set_async_event_config(ctrlr,
ctrlr->async_event_config, NULL, NULL);
/* aerl is a zero-based value, so we need to add 1 here. */
ctrlr->num_aers = min(NVME_MAX_ASYNC_EVENTS, (ctrlr->cdata.aerl+1));
for (i = 0; i < ctrlr->num_aers; i++) {
aer = &ctrlr->aer[i];
nvme_ctrlr_construct_and_submit_aer(ctrlr, aer);
}
}
static void
nvme_ctrlr_configure_int_coalescing(struct nvme_controller *ctrlr)
{
ctrlr->int_coal_time = 0;
TUNABLE_INT_FETCH("hw.nvme.int_coal_time",
&ctrlr->int_coal_time);
ctrlr->int_coal_threshold = 0;
TUNABLE_INT_FETCH("hw.nvme.int_coal_threshold",
&ctrlr->int_coal_threshold);
nvme_ctrlr_cmd_set_interrupt_coalescing(ctrlr, ctrlr->int_coal_time,
ctrlr->int_coal_threshold, NULL, NULL);
}
static void
nvme_ctrlr_start(void *ctrlr_arg)
{
struct nvme_controller *ctrlr = ctrlr_arg;
int i;
nvme_qpair_reset(&ctrlr->adminq);
for (i = 0; i < ctrlr->num_io_queues; i++)
nvme_qpair_reset(&ctrlr->ioq[i]);
nvme_admin_qpair_enable(&ctrlr->adminq);
if (nvme_ctrlr_identify(ctrlr) != 0) {
nvme_ctrlr_fail(ctrlr);
return;
}
if (nvme_ctrlr_set_num_qpairs(ctrlr) != 0) {
nvme_ctrlr_fail(ctrlr);
return;
}
if (nvme_ctrlr_create_qpairs(ctrlr) != 0) {
nvme_ctrlr_fail(ctrlr);
return;
}
if (nvme_ctrlr_construct_namespaces(ctrlr) != 0) {
nvme_ctrlr_fail(ctrlr);
return;
}
nvme_ctrlr_configure_aer(ctrlr);
nvme_ctrlr_configure_int_coalescing(ctrlr);
for (i = 0; i < ctrlr->num_io_queues; i++)
nvme_io_qpair_enable(&ctrlr->ioq[i]);
}
void
nvme_ctrlr_start_config_hook(void *arg)
{
struct nvme_controller *ctrlr = arg;
nvme_ctrlr_start(ctrlr);
config_intrhook_disestablish(&ctrlr->config_hook);
ctrlr->is_initialized = 1;
nvme_notify_new_controller(ctrlr);
}
static void
nvme_ctrlr_reset_task(void *arg, int pending)
{
struct nvme_controller *ctrlr = arg;
int status;
nvme_printf(ctrlr, "resetting controller\n");
status = nvme_ctrlr_hw_reset(ctrlr);
/*
* Use pause instead of DELAY, so that we yield to any nvme interrupt
* handlers on this CPU that were blocked on a qpair lock. We want
* all nvme interrupts completed before proceeding with restarting the
* controller.
*
* XXX - any way to guarantee the interrupt handlers have quiesced?
*/
pause("nvmereset", hz / 10);
if (status == 0)
nvme_ctrlr_start(ctrlr);
else
nvme_ctrlr_fail(ctrlr);
atomic_cmpset_32(&ctrlr->is_resetting, 1, 0);
}
static void
nvme_ctrlr_intx_handler(void *arg)
{
struct nvme_controller *ctrlr = arg;
nvme_mmio_write_4(ctrlr, intms, 1);
nvme_qpair_process_completions(&ctrlr->adminq);
if (ctrlr->ioq[0].cpl)
nvme_qpair_process_completions(&ctrlr->ioq[0]);
nvme_mmio_write_4(ctrlr, intmc, 1);
}
static int
nvme_ctrlr_configure_intx(struct nvme_controller *ctrlr)
{
ctrlr->num_io_queues = 1;
ctrlr->per_cpu_io_queues = 0;
ctrlr->rid = 0;
ctrlr->res = bus_alloc_resource_any(ctrlr->dev, SYS_RES_IRQ,
&ctrlr->rid, RF_SHAREABLE | RF_ACTIVE);
if (ctrlr->res == NULL) {
nvme_printf(ctrlr, "unable to allocate shared IRQ\n");
return (ENOMEM);
}
bus_setup_intr(ctrlr->dev, ctrlr->res,
INTR_TYPE_MISC | INTR_MPSAFE, NULL, nvme_ctrlr_intx_handler,
ctrlr, &ctrlr->tag);
if (ctrlr->tag == NULL) {
nvme_printf(ctrlr, "unable to setup intx handler\n");
return (ENOMEM);
}
return (0);
}
static void
nvme_pt_done(void *arg, const struct nvme_completion *cpl)
{
struct nvme_pt_command *pt = arg;
bzero(&pt->cpl, sizeof(pt->cpl));
pt->cpl.cdw0 = cpl->cdw0;
pt->cpl.status = cpl->status;
pt->cpl.status.p = 0;
mtx_lock(pt->driver_lock);
wakeup(pt);
mtx_unlock(pt->driver_lock);
}
int
nvme_ctrlr_passthrough_cmd(struct nvme_controller *ctrlr,
struct nvme_pt_command *pt, uint32_t nsid, int is_user_buffer,
int is_admin_cmd)
{
struct nvme_request *req;
struct mtx *mtx;
struct buf *buf = NULL;
int ret = 0;
if (pt->len > 0) {
if (pt->len > ctrlr->max_xfer_size) {
nvme_printf(ctrlr, "pt->len (%d) "
"exceeds max_xfer_size (%d)\n", pt->len,
ctrlr->max_xfer_size);
return EIO;
}
if (is_user_buffer) {
/*
* Ensure the user buffer is wired for the duration of
* this passthrough command.
*/
PHOLD(curproc);
buf = getpbuf(NULL);
buf->b_saveaddr = buf->b_data;
buf->b_data = pt->buf;
buf->b_bufsize = pt->len;
buf->b_iocmd = pt->is_read ? BIO_READ : BIO_WRITE;
#ifdef NVME_UNMAPPED_BIO_SUPPORT
if (vmapbuf(buf, 1) < 0) {
#else
if (vmapbuf(buf) < 0) {
#endif
ret = EFAULT;
goto err;
}
req = nvme_allocate_request_vaddr(buf->b_data, pt->len,
nvme_pt_done, pt);
} else
req = nvme_allocate_request_vaddr(pt->buf, pt->len,
nvme_pt_done, pt);
} else
req = nvme_allocate_request_null(nvme_pt_done, pt);
req->cmd.opc = pt->cmd.opc;
req->cmd.cdw10 = pt->cmd.cdw10;
req->cmd.cdw11 = pt->cmd.cdw11;
req->cmd.cdw12 = pt->cmd.cdw12;
req->cmd.cdw13 = pt->cmd.cdw13;
req->cmd.cdw14 = pt->cmd.cdw14;
req->cmd.cdw15 = pt->cmd.cdw15;
req->cmd.nsid = nsid;
if (is_admin_cmd)
mtx = &ctrlr->lock;
else
mtx = &ctrlr->ns[nsid-1].lock;
mtx_lock(mtx);
pt->driver_lock = mtx;
if (is_admin_cmd)
nvme_ctrlr_submit_admin_request(ctrlr, req);
else
nvme_ctrlr_submit_io_request(ctrlr, req);
mtx_sleep(pt, mtx, PRIBIO, "nvme_pt", 0);
mtx_unlock(mtx);
pt->driver_lock = NULL;
err:
if (buf != NULL) {
relpbuf(buf, NULL);
PRELE(curproc);
}
return (ret);
}
static int
nvme_ctrlr_ioctl(struct cdev *cdev, u_long cmd, caddr_t arg, int flag,
struct thread *td)
{
struct nvme_controller *ctrlr;
struct nvme_pt_command *pt;
ctrlr = cdev->si_drv1;
switch (cmd) {
case NVME_RESET_CONTROLLER:
nvme_ctrlr_reset(ctrlr);
break;
case NVME_PASSTHROUGH_CMD:
pt = (struct nvme_pt_command *)arg;
return (nvme_ctrlr_passthrough_cmd(ctrlr, pt, pt->cmd.nsid,
1 /* is_user_buffer */, 1 /* is_admin_cmd */));
default:
return (ENOTTY);
}
return (0);
}
static struct cdevsw nvme_ctrlr_cdevsw = {
.d_version = D_VERSION,
.d_flags = 0,
.d_ioctl = nvme_ctrlr_ioctl
};
int
nvme_ctrlr_construct(struct nvme_controller *ctrlr, device_t dev)
{
union cap_lo_register cap_lo;
union cap_hi_register cap_hi;
int i, per_cpu_io_queues, rid;
int num_vectors_requested, num_vectors_allocated;
int status, timeout_period;
ctrlr->dev = dev;
mtx_init(&ctrlr->lock, "nvme ctrlr lock", NULL, MTX_DEF);
status = nvme_ctrlr_allocate_bar(ctrlr);
if (status != 0)
return (status);
/*
* Software emulators may set the doorbell stride to something
* other than zero, but this driver is not set up to handle that.
*/
cap_hi.raw = nvme_mmio_read_4(ctrlr, cap_hi);
if (cap_hi.bits.dstrd != 0)
return (ENXIO);
ctrlr->min_page_size = 1 << (12 + cap_hi.bits.mpsmin);
/* Get ready timeout value from controller, in units of 500ms. */
cap_lo.raw = nvme_mmio_read_4(ctrlr, cap_lo);
ctrlr->ready_timeout_in_ms = cap_lo.bits.to * 500;
timeout_period = NVME_DEFAULT_TIMEOUT_PERIOD;
TUNABLE_INT_FETCH("hw.nvme.timeout_period", &timeout_period);
timeout_period = min(timeout_period, NVME_MAX_TIMEOUT_PERIOD);
timeout_period = max(timeout_period, NVME_MIN_TIMEOUT_PERIOD);
ctrlr->timeout_period = timeout_period;
nvme_retry_count = NVME_DEFAULT_RETRY_COUNT;
TUNABLE_INT_FETCH("hw.nvme.retry_count", &nvme_retry_count);
per_cpu_io_queues = 1;
TUNABLE_INT_FETCH("hw.nvme.per_cpu_io_queues", &per_cpu_io_queues);
ctrlr->per_cpu_io_queues = per_cpu_io_queues ? TRUE : FALSE;
if (ctrlr->per_cpu_io_queues)
ctrlr->num_io_queues = mp_ncpus;
else
ctrlr->num_io_queues = 1;
ctrlr->force_intx = 0;
TUNABLE_INT_FETCH("hw.nvme.force_intx", &ctrlr->force_intx);
ctrlr->enable_aborts = 0;
TUNABLE_INT_FETCH("hw.nvme.enable_aborts", &ctrlr->enable_aborts);
ctrlr->msix_enabled = 1;
if (ctrlr->force_intx) {
ctrlr->msix_enabled = 0;
goto intx;
}
/* One vector per IO queue, plus one vector for admin queue. */
num_vectors_requested = ctrlr->num_io_queues + 1;
/*
* If we cannot even allocate 2 vectors (one for admin, one for
* I/O), then revert to INTx.
*/
if (pci_msix_count(dev) < 2) {
ctrlr->msix_enabled = 0;
goto intx;
} else if (pci_msix_count(dev) < num_vectors_requested) {
ctrlr->per_cpu_io_queues = FALSE;
ctrlr->num_io_queues = 1;
num_vectors_requested = 2; /* one for admin, one for I/O */
}
num_vectors_allocated = num_vectors_requested;
if (pci_alloc_msix(dev, &num_vectors_allocated) != 0) {
ctrlr->msix_enabled = 0;
goto intx;
} else if (num_vectors_allocated < num_vectors_requested) {
if (num_vectors_allocated < 2) {
pci_release_msi(dev);
ctrlr->msix_enabled = 0;
goto intx;
} else {
ctrlr->per_cpu_io_queues = FALSE;
ctrlr->num_io_queues = 1;
/*
* Release whatever vectors were allocated, and just
* reallocate the two needed for the admin and single
* I/O qpair.
*/
num_vectors_allocated = 2;
pci_release_msi(dev);
if (pci_alloc_msix(dev, &num_vectors_allocated) != 0)
panic("could not reallocate any vectors\n");
if (num_vectors_allocated != 2)
panic("could not reallocate 2 vectors\n");
}
}
/*
* On earlier FreeBSD releases, there are reports that
* pci_alloc_msix() can return successfully with all vectors
* requested, but a subsequent bus_alloc_resource_any()
* for one of those vectors fails. This issue occurs more
* readily with multiple devices using per-CPU vectors.
* To workaround this issue, try to allocate the resources now,
* and fall back to INTx if we cannot allocate all of them.
* This issue cannot be reproduced on more recent versions of
* FreeBSD which have increased the maximum number of MSI-X
* vectors, but adding the workaround makes it easier for
* vendors wishing to import this driver into kernels based on
* older versions of FreeBSD.
*/
for (i = 0; i < num_vectors_allocated; i++) {
rid = i + 1;
ctrlr->msi_res[i] = bus_alloc_resource_any(ctrlr->dev,
SYS_RES_IRQ, &rid, RF_ACTIVE);
if (ctrlr->msi_res[i] == NULL) {
ctrlr->msix_enabled = 0;
while (i > 0) {
i--;
bus_release_resource(ctrlr->dev,
SYS_RES_IRQ,
rman_get_rid(ctrlr->msi_res[i]),
ctrlr->msi_res[i]);
}
pci_release_msi(dev);
nvme_printf(ctrlr, "could not obtain all MSI-X "
"resources, reverting to intx\n");
break;
}
}
intx:
if (!ctrlr->msix_enabled)
nvme_ctrlr_configure_intx(ctrlr);
ctrlr->max_xfer_size = NVME_MAX_XFER_SIZE;
nvme_ctrlr_construct_admin_qpair(ctrlr);
status = nvme_ctrlr_construct_io_qpairs(ctrlr);
if (status != 0)
return (status);
ctrlr->cdev = make_dev(&nvme_ctrlr_cdevsw, device_get_unit(dev),
UID_ROOT, GID_WHEEL, 0600, "nvme%d", device_get_unit(dev));
if (ctrlr->cdev == NULL)
return (ENXIO);
ctrlr->cdev->si_drv1 = (void *)ctrlr;
ctrlr->taskqueue = taskqueue_create("nvme_taskq", M_WAITOK,
taskqueue_thread_enqueue, &ctrlr->taskqueue);
taskqueue_start_threads(&ctrlr->taskqueue, 1, PI_DISK, "nvme taskq");
ctrlr->is_resetting = 0;
ctrlr->is_initialized = 0;
ctrlr->notification_sent = 0;
TASK_INIT(&ctrlr->reset_task, 0, nvme_ctrlr_reset_task, ctrlr);
TASK_INIT(&ctrlr->fail_req_task, 0, nvme_ctrlr_fail_req_task, ctrlr);
STAILQ_INIT(&ctrlr->fail_req);
ctrlr->is_failed = FALSE;
return (0);
}
void
nvme_ctrlr_destruct(struct nvme_controller *ctrlr, device_t dev)
{
int i;
/*
* Notify the controller of a shutdown, even though this is due to
* a driver unload, not a system shutdown (this path is not invoked
* during shutdown). This ensures the controller receives a
* shutdown notification in case the system is shutdown before
* reloading the driver.
*/
nvme_ctrlr_shutdown(ctrlr);
nvme_ctrlr_disable(ctrlr);
taskqueue_free(ctrlr->taskqueue);
for (i = 0; i < NVME_MAX_NAMESPACES; i++)
nvme_ns_destruct(&ctrlr->ns[i]);
if (ctrlr->cdev)
destroy_dev(ctrlr->cdev);
for (i = 0; i < ctrlr->num_io_queues; i++) {
nvme_io_qpair_destroy(&ctrlr->ioq[i]);
}
free(ctrlr->ioq, M_NVME);
nvme_admin_qpair_destroy(&ctrlr->adminq);
if (ctrlr->resource != NULL) {
bus_release_resource(dev, SYS_RES_MEMORY,
ctrlr->resource_id, ctrlr->resource);
}
if (ctrlr->bar4_resource != NULL) {
bus_release_resource(dev, SYS_RES_MEMORY,
ctrlr->bar4_resource_id, ctrlr->bar4_resource);
}
if (ctrlr->tag)
bus_teardown_intr(ctrlr->dev, ctrlr->res, ctrlr->tag);
if (ctrlr->res)
bus_release_resource(ctrlr->dev, SYS_RES_IRQ,
rman_get_rid(ctrlr->res), ctrlr->res);
if (ctrlr->msix_enabled)
pci_release_msi(dev);
}
void
nvme_ctrlr_shutdown(struct nvme_controller *ctrlr)
{
union cc_register cc;
union csts_register csts;
int ticks = 0;
cc.raw = nvme_mmio_read_4(ctrlr, cc);
cc.bits.shn = NVME_SHN_NORMAL;
nvme_mmio_write_4(ctrlr, cc, cc.raw);
csts.raw = nvme_mmio_read_4(ctrlr, csts);
while ((csts.bits.shst != NVME_SHST_COMPLETE) && (ticks++ < 5*hz)) {
pause("nvme shn", 1);
csts.raw = nvme_mmio_read_4(ctrlr, csts);
}
if (csts.bits.shst != NVME_SHST_COMPLETE)
nvme_printf(ctrlr, "did not complete shutdown within 5 seconds "
"of notification\n");
}
void
nvme_ctrlr_submit_admin_request(struct nvme_controller *ctrlr,
struct nvme_request *req)
{
nvme_qpair_submit_request(&ctrlr->adminq, req);
}
void
nvme_ctrlr_submit_io_request(struct nvme_controller *ctrlr,
struct nvme_request *req)
{
struct nvme_qpair *qpair;
if (ctrlr->per_cpu_io_queues)
qpair = &ctrlr->ioq[curcpu];
else
qpair = &ctrlr->ioq[0];
nvme_qpair_submit_request(qpair, req);
}
device_t
nvme_ctrlr_get_device(struct nvme_controller *ctrlr)
{
return (ctrlr->dev);
}
const struct nvme_controller_data *
nvme_ctrlr_get_data(struct nvme_controller *ctrlr)
{
return (&ctrlr->cdata);
}
|
jrobhoward/SCADAbase
|
sys/dev/nvme/nvme_ctrlr.c
|
C
|
bsd-3-clause
| 31,739
|
<?php
namespace backend\controllers;
use Yii;
use common\models\User;
use common\models\Post;
use common\models\PostSearch;
use yii\web\Controller;
use yii\web\NotFoundHttpException;
use yii\filters\VerbFilter;
use yii\web\UploadedFile;
use common\models\Asset;
/**
* PostController implements the CRUD actions for Post model.
*/
class PostController extends Controller
{
public function behaviors()
{
return [
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'delete' => ['post'],
],
],
'access' => [
'class' => \yii\filters\AccessControl::className(),
'rules' => [
[
'allow' => true,
'roles' => ['admin'],
],
],
],
];
}
/**
* Lists all Post models.
* @return mixed
*/
public function actionIndex()
{
$searchModel = new PostSearch();
$dataProvider = $searchModel->search(Yii::$app->request->queryParams);
return $this->render('index', [
'searchModel' => $searchModel,
'dataProvider' => $dataProvider,
]);
}
/**
* Displays a single Post model.
* @param integer $id
* @return mixed
*/
public function actionView($id)
{
$model = $this->findModel($id);
$mainImage = $model->getMainImage();
$sliderImages = $model->getSliderImages();
return $this->render('view', compact('model', 'mainImage', 'sliderImages'));
}
/**
* Creates a new Post model.
* If creation is successful, the browser will be redirected to the 'view' page.
* @return mixed
*/
public function actionCreate()
{
$model = new Post();
$model->status = true;
$user = Yii::$app->user->identity;
$mainImage = $model->getMainImage();
$sliderImages = $model->getSliderImages();
if ($model->load(Yii::$app->request->post()) && $model->validate()) {
$model->user_id = $user->id;
$model->save();
// Main Image
$mainImage->uploadedFile = UploadedFile::getInstance($model, 'mainImageFile');
$mainImage->cropData = $model->mainImageCropData;
$mainImage->assetable_type = $mainImage::ASSETABLE_POST;
$mainImage->assetable_id = $model->id;
$mainImage->saveCroppedAsset();
// Save images
$newSliderImageFiles = UploadedFile::getInstances($model, 'sliderImageFiles');
foreach ($newSliderImageFiles as $file)
{
$asset = new Asset;
$asset->uploadedFile = $file;
$asset->assetable_type = $asset::ASSETABLE_POST;
$asset->thumbnail = $asset::THUMBNAIL_CONTENT;
$asset->assetable_id = $model->id;
$asset->saveAsset();
}
return $this->redirect(['view', 'id' => $model->id]);
} else {
return $this->render('create', compact('model', 'mainImage', 'sliderImages'));
}
}
/**
* Updates an existing Post model.
* If update is successful, the browser will be redirected to the 'view' page.
* @param integer $id
* @return mixed
*/
public function actionUpdate($id)
{
$model = $this->findModel($id);
$mainImage = $model->getMainImage();
$sliderImages = $model->getSliderImages();
$assetKeys = [];
foreach ($sliderImages as $asset) {
$assetKeys[] = $asset->id;
}
$model->sliderImageKeys = implode(';', $assetKeys);
if ($model->load(Yii::$app->request->post()) && $model->validate()) {
// Main Image
if($model->mainImageRemove) {
$mainImage->delete();
}
else {
$mainImage->uploadedFile = UploadedFile::getInstance($model, 'mainImageFile');
$mainImage->cropData = $model->mainImageCropData;
$mainImage->assetable_type = $mainImage::ASSETABLE_POST;
$mainImage->assetable_id = $model->id;
$mainImage->saveCroppedAsset();
}
// Slider Images
// Remove selected images
$currentAssetKeys = explode(';', $model->sliderImageKeys);
if(count($currentAssetKeys) > 0)
{
foreach ($sliderImages as $asset) {
if(!in_array($asset->id, $currentAssetKeys))
{
$asset->delete();
}
}
}
// Remove not existing images
foreach($sliderImages as $asset)
{
if(!file_exists($asset->getFilePath()))
{
$asset->delete();
}
}
// Save images
$newSliderImageFiles = UploadedFile::getInstances($model, 'sliderImageFiles');
// var_dump($currentAssetKeys);
// var_dump($newSliderImageFiles);
// die;
foreach ($newSliderImageFiles as $file)
{
$asset = new Asset;
$asset->uploadedFile = $file;
$asset->assetable_type = $asset::ASSETABLE_POST;
$asset->thumbnail = $asset::THUMBNAIL_CONTENT;
$asset->assetable_id = $model->id;
$asset->saveAsset();
}
$model->save();
return $this->redirect(['view', 'id' => $model->id]);
} else {
return $this->render('update', compact('model', 'mainImage', 'sliderImages'));
}
}
/**
* Deletes an existing Post model.
* If deletion is successful, the browser will be redirected to the 'index' page.
* @param integer $id
* @return mixed
*/
public function actionDelete($id)
{
$post = $this->findModel($id);
$assets = Asset::find()->where(['assetable_type' => Asset::ASSETABLE_POST ,'assetable_id' => $id])->all();
foreach ($assets as $asset) {
$asset->delete();
}
$post->delete();
return $this->redirect(['index']);
}
/**
* Fake function for delete image
*
* @return mixed
*/
public function actionImageDelete()
{
Yii::$app->response->format = \yii\web\Response::FORMAT_JSON;
return [];
}
/**
* Finds the Post model based on its primary key value.
* If the model is not found, a 404 HTTP exception will be thrown.
* @param integer $id
* @return Post the loaded model
* @throws NotFoundHttpException if the model cannot be found
*/
protected function findModel($id)
{
if (($model = Post::findOne($id)) !== null) {
return $model;
} else {
throw new NotFoundHttpException('The requested page does not exist.');
}
}
}
|
pro100olgert/lm
|
backend/controllers/PostController.php
|
PHP
|
bsd-3-clause
| 7,197
|
<!DOCTYPE html>
<html class="writer-html5" lang="en" >
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>S5P_L2_CLOUD_CAL — HARP 1.14 documentation</title>
<link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<link rel="stylesheet" href="../_static/custom.css" type="text/css" />
<!--[if lt IE 9]>
<script src="../_static/js/html5shiv.min.js"></script>
<![endif]-->
<script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
<script src="../_static/jquery.js"></script>
<script src="../_static/underscore.js"></script>
<script src="../_static/doctools.js"></script>
<script src="../_static/language_data.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/javascript" src="../_static/js/theme.js"></script>
<link rel="index" title="Index" href="../genindex.html" />
</head>
<body class="wy-body-for-nav">
<div class="wy-grid-for-nav">
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-scroll">
<div class="wy-side-nav-search" >
<a href="../index.html" class="icon icon-home"> HARP
</a>
<div class="version">
1.14
</div>
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../install.html">Installation</a></li>
<li class="toctree-l1"><a class="reference internal" href="../conventions/index.html">Conventions</a></li>
<li class="toctree-l1"><a class="reference internal" href="../algorithms/index.html">Algorithms</a></li>
<li class="toctree-l1"><a class="reference internal" href="../operations.html">Operations</a></li>
<li class="toctree-l1"><a class="reference internal" href="index.html">Ingestion definitions</a></li>
<li class="toctree-l1"><a class="reference internal" href="../libharp.html">C library</a></li>
<li class="toctree-l1"><a class="reference internal" href="../idl.html">IDL interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="../matlab.html">MATLAB interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="../python.html">Python interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="../R.html">R interface</a></li>
<li class="toctree-l1"><a class="reference internal" href="../tools.html">Command line tools</a></li>
</ul>
</div>
</div>
</nav>
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
<nav class="wy-nav-top" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="../index.html">HARP</a>
</nav>
<div class="wy-nav-content">
<div class="rst-content">
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="../index.html" class="icon icon-home"></a> »</li>
<li>S5P_L2_CLOUD_CAL</li>
<li class="wy-breadcrumbs-aside">
</li>
</ul>
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
<div itemprop="articleBody">
<div class="section" id="s5p-l2-cloud-cal">
<h1>S5P_L2_CLOUD_CAL</h1>
<div class="section" id="variables">
<h2>Variables</h2>
<p>The table below lists the variables that are present in the HARP product that results from an ingestion of <code class="docutils literal notranslate"><span class="pre">S5P_L2_CLOUD_CAL</span></code> data.</p>
<table class="colwidths-given docutils align-default">
<colgroup>
<col style="width: 25%" />
<col style="width: 5%" />
<col style="width: 15%" />
<col style="width: 15%" />
<col style="width: 40%" />
</colgroup>
<thead>
<tr class="row-odd"><th class="head"><p>field name</p></th>
<th class="head"><p>type</p></th>
<th class="head"><p>dimensions</p></th>
<th class="head"><p>unit</p></th>
<th class="head"><p>description</p></th>
</tr>
</thead>
<tbody>
<tr class="row-even"><td><p><strong>scan_subindex</strong></p></td>
<td><p>int16</p></td>
<td><p>{<em>time</em>}</p></td>
<td></td>
<td><p>pixel index (0-based) within the scanline</p></td>
</tr>
<tr class="row-odd"><td><p><strong>datetime_start</strong></p></td>
<td><p>double</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[seconds since 2010-01-01]</p></td>
<td><p>start time of the measurement</p></td>
</tr>
<tr class="row-even"><td><p><strong>datetime_length</strong></p></td>
<td><p>double</p></td>
<td></td>
<td><p>[s]</p></td>
<td><p>duration of the measurement</p></td>
</tr>
<tr class="row-odd"><td><p><strong>orbit_index</strong></p></td>
<td><p>int32</p></td>
<td></td>
<td></td>
<td><p>absolute orbit number</p></td>
</tr>
<tr class="row-even"><td><p><strong>validity</strong></p></td>
<td><p>int32</p></td>
<td><p>{<em>time</em>}</p></td>
<td></td>
<td><p>processing quality flag</p></td>
</tr>
<tr class="row-odd"><td><p><strong>latitude</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree_north]</p></td>
<td><p>latitude of the ground pixel center (WGS84)</p></td>
</tr>
<tr class="row-even"><td><p><strong>longitude</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree_east]</p></td>
<td><p>longitude of the ground pixel center (WGS84)</p></td>
</tr>
<tr class="row-odd"><td><p><strong>latitude_bounds</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>, 4}</p></td>
<td><p>[degree_north]</p></td>
<td><p>latitudes of the ground pixel corners (WGS84)</p></td>
</tr>
<tr class="row-even"><td><p><strong>longitude_bounds</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>, 4}</p></td>
<td><p>[degree_east]</p></td>
<td><p>longitudes of the ground pixel corners (WGS84)</p></td>
</tr>
<tr class="row-odd"><td><p><strong>sensor_latitude</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree_north]</p></td>
<td><p>latitude of the geodetic sub-satellite point (WGS84)</p></td>
</tr>
<tr class="row-even"><td><p><strong>sensor_longitude</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree_east]</p></td>
<td><p>longitude of the goedetic sub-satellite point (WGS84)</p></td>
</tr>
<tr class="row-odd"><td><p><strong>sensor_altitude</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>altitude of the satellite with respect to the geodetic sub-satellite point (WGS84)</p></td>
</tr>
<tr class="row-even"><td><p><strong>solar_zenith_angle</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree]</p></td>
<td><p>zenith angle of the Sun at the ground pixel location (WGS84); angle measured away from the vertical</p></td>
</tr>
<tr class="row-odd"><td><p><strong>solar_azimuth_angle</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree]</p></td>
<td><p>azimuth angle of the Sun at the ground pixel location (WGS84); angle measured East-of-North</p></td>
</tr>
<tr class="row-even"><td><p><strong>sensor_zenith_angle</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree]</p></td>
<td><p>zenith angle of the satellite at the ground pixel location (WGS84); angle measured away from the vertical</p></td>
</tr>
<tr class="row-odd"><td><p><strong>sensor_azimuth_angle</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[degree]</p></td>
<td><p>azimuth angle of the satellite at the ground pixel location (WGS84); angle measured East-of-North</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_fraction</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[]</p></td>
<td><p>retrieved fraction of horizontal area occupied by clouds using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_fraction_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[]</p></td>
<td><p>uncertainty of the retrieved fraction of horizontal area occupied by clouds using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_fraction_validity</strong></p></td>
<td><p>int8</p></td>
<td><p>{<em>time</em>}</p></td>
<td></td>
<td><p>continuous quality descriptor, varying between 0 (no data) and 100 (full quality data)</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_fraction_apriori</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[]</p></td>
<td><p>effective radiometric cloud fraction a priori</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_base_pressure</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[Pa]</p></td>
<td><p>cloud base pressure calculated using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_base_pressure_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[Pa]</p></td>
<td><p>error of the cloud base pressure calculated using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_base_height</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>cloud base height calculated using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_base_height_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>error of the cloud base height calculated using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_top_pressure</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[Pa]</p></td>
<td><p>retrieved atmospheric pressure at the level of cloud top using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_top_pressure_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[Pa]</p></td>
<td><p>uncertainty of the retrieved atmospheric pressure at the level of cloud top using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_top_height</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>retrieved altitude of the cloud top using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_top_height_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>uncertainty of the altitude of the cloud top using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_top_temperature</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[K]</p></td>
<td><p>atmospheric temperature at cloud top level using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_optical_depth</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>retrieved cloud optical depth using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_optical_depth_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>uncertainty of the retrieved cloud optical depth using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_type</strong></p></td>
<td><p>int8</p></td>
<td><p>{<em>time</em>}</p></td>
<td></td>
<td><p>phase of the retrieved cloud; enumeration values: clear_sky (0), liquid_water_clouds (1), ice_clouds (2)</p></td>
</tr>
<tr class="row-even"><td><p><strong>surface_albedo</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[]</p></td>
<td><p>surface albedo fitted using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-odd"><td><p><strong>surface_albedo_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[]</p></td>
<td><p>uncertainty of the surface albedo fitted using the OCRA/ROCINN CAL model</p></td>
</tr>
<tr class="row-even"><td><p><strong>surface_altitude</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>surface altitude</p></td>
</tr>
<tr class="row-odd"><td><p><strong>surface_altitude_uncertainty</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m]</p></td>
<td><p>surface altitude precision</p></td>
</tr>
<tr class="row-even"><td><p><strong>surface_pressure</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[Pa]</p></td>
<td><p>surface pressure</p></td>
</tr>
<tr class="row-odd"><td><p><strong>surface_meridional_wind_velocity</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m/s]</p></td>
<td><p>northward wind</p></td>
</tr>
<tr class="row-even"><td><p><strong>surface_zonal_wind_velocity</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[m/s]</p></td>
<td><p>eastward wind</p></td>
</tr>
<tr class="row-odd"><td><p><strong>snow_ice_type</strong></p></td>
<td><p>int8</p></td>
<td><p>{<em>time</em>}</p></td>
<td></td>
<td><p>surface snow/ice type; enumeration values: snow_free_land (0), sea_ice (1), permanent_ice (2), snow (3), ocean (4)</p></td>
</tr>
<tr class="row-even"><td><p><strong>sea_ice_fraction</strong></p></td>
<td><p>float</p></td>
<td><p>{<em>time</em>}</p></td>
<td><p>[]</p></td>
<td><p>sea-ice concentration (as a fraction)</p></td>
</tr>
<tr class="row-odd"><td><p><strong>index</strong></p></td>
<td><p>int32</p></td>
<td><p>{<em>time</em>}</p></td>
<td></td>
<td><p>zero-based index of the sample within the source product</p></td>
</tr>
</tbody>
</table>
</div>
<div class="section" id="ingestion-options">
<h2>Ingestion options</h2>
<p>The table below lists the available ingestion options for <code class="docutils literal notranslate"><span class="pre">S5P_L2_CLOUD</span></code> products.</p>
<table class="colwidths-given docutils align-default">
<colgroup>
<col style="width: 15%" />
<col style="width: 25%" />
<col style="width: 60%" />
</colgroup>
<thead>
<tr class="row-odd"><th class="head"><p>option name</p></th>
<th class="head"><p>legal values</p></th>
<th class="head"><p>description</p></th>
</tr>
</thead>
<tbody>
<tr class="row-even"><td><p>model</p></td>
<td><p>CAL, CRB</p></td>
<td><p>whether to retrieve the cloud properties from the CAL model or the CRB model; option values are ‘CAL’ (default) and ‘CRB’</p></td>
</tr>
</tbody>
</table>
<p>This definition is only applicable when: model=CAL or model unset</p>
</div>
<div class="section" id="mapping-description">
<h2>Mapping description</h2>
<p>The table below details where and how each variable was retrieved from the input product.</p>
<table class="docutils align-default">
<colgroup>
<col style="width: 15%" />
<col style="width: 6%" />
<col style="width: 79%" />
</colgroup>
<thead>
<tr class="row-odd"><th class="head"><p>field name</p></th>
<th class="head" colspan="2"><p>mapping description</p></th>
</tr>
</thead>
<tbody>
<tr class="row-even"><td><p><strong>scan_subindex</strong></p></td>
<td><p><em>description</em></p></td>
<td><p>the scanline and pixel dimensions are collapsed into a temporal dimension; the index of the pixel within the scanline is computed as the index on the temporal dimension modulo the number of scanlines</p></td>
</tr>
<tr class="row-odd"><td rowspan="2"><p><strong>datetime_start</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/time, /PRODUCT/delta_time[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><em>description</em></p></td>
<td><p>time converted from milliseconds since a reference time (given as seconds since 2010-01-01) to seconds since 2010-01-01 (using 86400 seconds per day)</p></td>
</tr>
<tr class="row-odd"><td rowspan="2"><p><strong>datetime_length</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/@time_coverage_resolution</strong></p></td>
</tr>
<tr class="row-even"><td><p><em>description</em></p></td>
<td><p>the measurement length is parsed assuming the ISO 8601 ‘PT%(interval_seconds)fS’ format</p></td>
</tr>
<tr class="row-odd"><td><p><strong>orbit_index</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/@orbit</strong></p></td>
</tr>
<tr class="row-even"><td rowspan="2"><p><strong>validity</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/processing_quality_flags[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><em>description</em></p></td>
<td><p>the uint32 data is cast to int32</p></td>
</tr>
<tr class="row-even"><td><p><strong>latitude</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/latitude[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>longitude</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/longitude[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>latitude_bounds</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>longitude_bounds</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds[]</strong></p></td>
</tr>
<tr class="row-even"><td rowspan="2"><p><strong>sensor_latitude</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/satellite_latitude[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><em>description</em></p></td>
<td><p>the satellite latitude associated with a scanline is repeated for each pixel in the scanline</p></td>
</tr>
<tr class="row-even"><td rowspan="2"><p><strong>sensor_longitude</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/satellite_longitude[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><em>description</em></p></td>
<td><p>the satellite longitude associated with a scanline is repeated for each pixel in the scanline</p></td>
</tr>
<tr class="row-even"><td rowspan="2"><p><strong>sensor_altitude</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/satellite_altitude[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><em>description</em></p></td>
<td><p>the satellite altitude associated with a scanline is repeated for each pixel in the scanline</p></td>
</tr>
<tr class="row-even"><td><p><strong>solar_zenith_angle</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/solar_zenith_angle[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>solar_azimuth_angle</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/solar_azimuth_angle[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>sensor_zenith_angle</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/viewing_zenith_angle[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>sensor_azimuth_angle</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/GEOLOCATIONS/viewing_azimuth_angle[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_fraction</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_fraction[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_fraction_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_fraction_precision[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_fraction_validity</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/qa_value</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_fraction_apriori</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/cloud_fraction_apriori[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_base_pressure</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_base_pressure[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_base_pressure_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_base_pressure_precision[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_base_height</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_base_height[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_base_height_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_base_height_precision[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_top_pressure</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_top_pressure[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_top_pressure_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_top_pressure_precision[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_top_height</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_top_height[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_top_height_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_top_height_precision[]</strong></p></td>
</tr>
<tr class="row-even"><td rowspan="3"><p><strong>cloud_top_temperature</strong></p></td>
<td><p><em>available</em></p></td>
<td><p>optional</p></td>
</tr>
<tr class="row-odd"><td><p><em>condition</em></p></td>
<td><p>processor version >= 02.00.00</p></td>
</tr>
<tr class="row-even"><td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/cloud_top_temperature[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>cloud_optical_depth</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_optical_thickness[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>cloud_optical_depth_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/cloud_optical_thickness_precision[]</strong></p></td>
</tr>
<tr class="row-odd"><td rowspan="4"><p><strong>cloud_type</strong></p></td>
<td><p><em>available</em></p></td>
<td><p>optional</p></td>
</tr>
<tr class="row-even"><td><p><em>condition</em></p></td>
<td><p>processor version >= 02.00.00</p></td>
</tr>
<tr class="row-odd"><td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/cloud_phase[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><em>description</em></p></td>
<td><p>values are mapped to signed values, so 255 (undefined cloud phase) becomes -1</p></td>
</tr>
<tr class="row-odd"><td><p><strong>surface_albedo</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/surface_albedo_fitted[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>surface_albedo_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/surface_albedo_fitted_precision[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>surface_altitude</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/INPUT_DATA/surface_altitude[]</strong></p></td>
</tr>
<tr class="row-even"><td><p><strong>surface_altitude_uncertainty</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/INPUT_DATA/surface_altitude_precision[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><strong>surface_pressure</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/INPUT_DATA/surface_pressure[]</strong></p></td>
</tr>
<tr class="row-even"><td rowspan="3"><p><strong>surface_meridional_wind_velocity</strong></p></td>
<td><p><em>available</em></p></td>
<td><p>optional</p></td>
</tr>
<tr class="row-odd"><td><p><em>condition</em></p></td>
<td><p>processor version >= 02.00.00</p></td>
</tr>
<tr class="row-even"><td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/INPUT_DATA/northward_wind[]</strong></p></td>
</tr>
<tr class="row-odd"><td rowspan="3"><p><strong>surface_zonal_wind_velocity</strong></p></td>
<td><p><em>available</em></p></td>
<td><p>optional</p></td>
</tr>
<tr class="row-even"><td><p><em>condition</em></p></td>
<td><p>processor version >= 02.00.00</p></td>
</tr>
<tr class="row-odd"><td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/INPUT_DATA/eastward_wind[]</strong></p></td>
</tr>
<tr class="row-even"><td rowspan="2"><p><strong>snow_ice_type</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/snow_ice_flag_nise[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><em>description</em></p></td>
<td><p>0: snow_free_land (0), 1-100: sea_ice (1), 101: permanent_ice (2), 103: snow (3), 255: ocean (4), other values map to -1</p></td>
</tr>
<tr class="row-even"><td rowspan="2"><p><strong>sea_ice_fraction</strong></p></td>
<td><p><em>path</em></p></td>
<td><p><strong>/PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/snow_ice_flag_nise[]</strong></p></td>
</tr>
<tr class="row-odd"><td><p><em>description</em></p></td>
<td><p>if 1 <= snow_ice_flag <= 100 then snow_ice_flag/100.0 else 0.0</p></td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
</div>
<footer>
<hr/>
<div role="contentinfo">
<p>
© Copyright 2015-2021 S[&]T, The Netherlands.
</p>
</div>
</footer>
</div>
</div>
</section>
</div>
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.Navigation.enable(true);
});
</script>
</body>
</html>
|
stcorp/harp
|
doc/html/ingestions/S5P_L2_CLOUD_CAL.html
|
HTML
|
bsd-3-clause
| 26,706
|
package jme3_test;
import com.jme3.app.SimpleApplication;
import com.jme3.material.Material;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.shape.Box;
import com.jme3.math.ColorRGBA;
public class HelloJME3 extends SimpleApplication {
public static void main(String[] args){
HelloJME3 app = new HelloJME3();
app.start();
}
@Override
public void simpleInitApp() {
Box b = new Box(Vector3f.ZERO, 1, 1, 1);
Geometry geom = new Geometry("Box", b);
Material mat = new Material(assetManager,
"Common/MatDefs/Misc/Unshaded.j3md");
mat.setColor("Color", ColorRGBA.Blue);
geom.setMaterial(mat);
rootNode.attachChild(geom);
}
}
|
jarek-przygodzki/jME3-OSGi
|
test/jME3 Test/src/jme3_test/HelloJME3.java
|
Java
|
bsd-3-clause
| 756
|
import qt
class CollapsibleMultilineText(qt.QTextEdit):
"""Text field that expands when it gets the focus and remain collapsed otherwise"""
def __init__(self):
super(CollapsibleMultilineText, self).__init__()
self.minHeight = 20
self.maxHeight = 50
self.setFixedHeight(self.minHeight)
def focusInEvent(self, event):
# super(MyLineEdit, self).focusInEvent(event)
self.setFixedHeight(self.maxHeight)
def focusOutEvent(self, event):
# super(MyLineEdit, self).focusOutEvent(event)
self.setFixedHeight(self.minHeight)
|
acil-bwh/SlicerCIP
|
Scripted/CIP_/CIP/ui/CollapsibleMultilineText.py
|
Python
|
bsd-3-clause
| 595
|
<?php
namespace PragmaRX\Health\Checkers;
use Illuminate\Filesystem\Filesystem;
use PragmaRX\Health\Support\Result;
class Writable extends Base
{
protected $filesystem;
/**
* Check resource.
*
* @return Result
*/
public function check()
{
foreach ($this->target->paths as $path) {
if (! $this->getFilesystem()->isWritable($path)) {
return $this->makeResult(
false,
sprintf($this->target->getErrorMessage(), $path)
);
}
}
return $this->makeHealthyResult();
}
public function getFilesystem()
{
if ($this->filesystem) {
return $this->filesystem;
}
return $this->filesystem = app(Filesystem::class);
}
}
|
antonioribeiro/health
|
src/Checkers/Writable.php
|
PHP
|
bsd-3-clause
| 815
|
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
# file Copyright.txt or https://cmake.org/licensing for details.
#.rst:
# CPack
# -----
#
# Build binary and source package installers.
#
# Variables common to all CPack generators
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# The
# CPack module generates binary and source installers in a variety of
# formats using the cpack program. Inclusion of the CPack module adds
# two new targets to the resulting makefiles, package and
# package_source, which build the binary and source installers,
# respectively. The generated binary installers contain everything
# installed via CMake's INSTALL command (and the deprecated
# INSTALL_FILES, INSTALL_PROGRAMS, and INSTALL_TARGETS commands).
#
# For certain kinds of binary installers (including the graphical
# installers on Mac OS X and Windows), CPack generates installers that
# allow users to select individual application components to install.
# See CPackComponent module for that.
#
# The CPACK_GENERATOR variable has different meanings in different
# contexts. In your CMakeLists.txt file, CPACK_GENERATOR is a *list of
# generators*: when run with no other arguments, CPack will iterate over
# that list and produce one package for each generator. In a
# CPACK_PROJECT_CONFIG_FILE, though, CPACK_GENERATOR is a *string naming
# a single generator*. If you need per-cpack- generator logic to
# control *other* cpack settings, then you need a
# CPACK_PROJECT_CONFIG_FILE.
#
# The CMake source tree itself contains a CPACK_PROJECT_CONFIG_FILE.
# See the top level file CMakeCPackOptions.cmake.in for an example.
#
# If set, the CPACK_PROJECT_CONFIG_FILE is included automatically on a
# per-generator basis. It only need contain overrides.
#
# Here's how it works:
#
# * cpack runs
# * it includes CPackConfig.cmake
# * it iterates over the generators listed in that file's
# CPACK_GENERATOR list variable (unless told to use just a
# specific one via -G on the command line...)
# * foreach generator, it then
#
# - sets CPACK_GENERATOR to the one currently being iterated
# - includes the CPACK_PROJECT_CONFIG_FILE
# - produces the package for that generator
#
# This is the key: For each generator listed in CPACK_GENERATOR in
# CPackConfig.cmake, cpack will *reset* CPACK_GENERATOR internally to
# *the one currently being used* and then include the
# CPACK_PROJECT_CONFIG_FILE.
#
# Before including this CPack module in your CMakeLists.txt file, there
# are a variety of variables that can be set to customize the resulting
# installers. The most commonly-used variables are:
#
# .. variable:: CPACK_PACKAGE_NAME
#
# The name of the package (or application). If not specified, defaults to
# the project name.
#
# .. variable:: CPACK_PACKAGE_VENDOR
#
# The name of the package vendor. (e.g., "Kitware").
#
# .. variable:: CPACK_PACKAGE_DIRECTORY
#
# The directory in which CPack is doing its packaging. If it is not set
# then this will default (internally) to the build dir. This variable may
# be defined in CPack config file or from the cpack command line option
# "-B". If set the command line option override the value found in the
# config file.
#
# .. variable:: CPACK_PACKAGE_VERSION_MAJOR
#
# Package major Version
#
# .. variable:: CPACK_PACKAGE_VERSION_MINOR
#
# Package minor Version
#
# .. variable:: CPACK_PACKAGE_VERSION_PATCH
#
# Package patch Version
#
# .. variable:: CPACK_PACKAGE_DESCRIPTION_FILE
#
# A text file used to describe the project. Used, for example, the
# introduction screen of a CPack-generated Windows installer to describe
# the project.
#
# .. variable:: CPACK_PACKAGE_DESCRIPTION_SUMMARY
#
# Short description of the project (only a few words). Default value is::
#
# ${PROJECT_DESCRIPTION}
#
# if DESCRIPTION has given to the project() call or
# CMake generated string with PROJECT_NAME otherwise.
#
# .. variable:: CPACK_PACKAGE_FILE_NAME
#
# The name of the package file to generate, not including the
# extension. For example, cmake-2.6.1-Linux-i686. The default value is::
#
# ${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-${CPACK_SYSTEM_NAME}.
#
# .. variable:: CPACK_PACKAGE_INSTALL_DIRECTORY
#
# Installation directory on the target system. This may be used by some
# CPack generators like NSIS to create an installation directory e.g.,
# "CMake 2.5" below the installation prefix. All installed element will be
# put inside this directory.
#
# .. variable:: CPACK_PACKAGE_ICON
#
# A branding image that will be displayed inside the installer (used by GUI
# installers).
#
# .. variable:: CPACK_PACKAGE_CHECKSUM
#
# An algorithm that will be used to generate additional file with checksum
# of the package. Output file name will be::
#
# ${CPACK_PACKAGE_FILE_NAME}.${CPACK_PACKAGE_CHECKSUM}
#
# Supported algorithms are those listed by the
# :ref:`string(\<HASH\>) <Supported Hash Algorithms>`
# command.
#
# .. variable:: CPACK_PROJECT_CONFIG_FILE
#
# CPack-time project CPack configuration file. This file included at cpack
# time, once per generator after CPack has set CPACK_GENERATOR to the
# actual generator being used. It allows per-generator setting of CPACK_*
# variables at cpack time.
#
# .. variable:: CPACK_RESOURCE_FILE_LICENSE
#
# License to be embedded in the installer. It will typically be displayed
# to the user by the produced installer (often with an explicit "Accept"
# button, for graphical installers) prior to installation. This license
# file is NOT added to installed file but is used by some CPack generators
# like NSIS. If you want to install a license file (may be the same as this
# one) along with your project you must add an appropriate CMake INSTALL
# command in your CMakeLists.txt.
#
# .. variable:: CPACK_RESOURCE_FILE_README
#
# ReadMe file to be embedded in the installer. It typically describes in
# some detail the purpose of the project during the installation. Not all
# CPack generators uses this file.
#
# .. variable:: CPACK_RESOURCE_FILE_WELCOME
#
# Welcome file to be embedded in the installer. It welcomes users to this
# installer. Typically used in the graphical installers on Windows and Mac
# OS X.
#
# .. variable:: CPACK_MONOLITHIC_INSTALL
#
# Disables the component-based installation mechanism. When set the
# component specification is ignored and all installed items are put in a
# single "MONOLITHIC" package. Some CPack generators do monolithic
# packaging by default and may be asked to do component packaging by
# setting CPACK_<GENNAME>_COMPONENT_INSTALL to 1/TRUE.
#
# .. variable:: CPACK_GENERATOR
#
# List of CPack generators to use. If not specified, CPack will create a
# set of options CPACK_BINARY_<GENNAME> (e.g., CPACK_BINARY_NSIS) allowing
# the user to enable/disable individual generators. This variable may be
# used on the command line as well as in::
#
# cpack -D CPACK_GENERATOR="ZIP;TGZ" /path/to/build/tree
#
# .. variable:: CPACK_OUTPUT_CONFIG_FILE
#
# The name of the CPack binary configuration file. This file is the CPack
# configuration generated by the CPack module for binary
# installers. Defaults to CPackConfig.cmake.
#
# .. variable:: CPACK_PACKAGE_EXECUTABLES
#
# Lists each of the executables and associated text label to be used to
# create Start Menu shortcuts. For example, setting this to the list
# ccmake;CMake will create a shortcut named "CMake" that will execute the
# installed executable ccmake. Not all CPack generators use it (at least
# NSIS, WIX and OSXX11 do).
#
# .. variable:: CPACK_STRIP_FILES
#
# List of files to be stripped. Starting with CMake 2.6.0 CPACK_STRIP_FILES
# will be a boolean variable which enables stripping of all files (a list
# of files evaluates to TRUE in CMake, so this change is compatible).
#
# .. variable:: CPACK_VERBATIM_VARIABLES
#
# If set to TRUE, values of variables prefixed with CPACK_ will be escaped
# before being written to the configuration files, so that the cpack program
# receives them exactly as they were specified. If not, characters like quotes
# and backslashes can cause parsing errors or alter the value received by the
# cpack program. Defaults to FALSE for backwards compatibility.
#
# * Mandatory : NO
# * Default : FALSE
#
# The following CPack variables are specific to source packages, and
# will not affect binary packages:
#
# .. variable:: CPACK_SOURCE_PACKAGE_FILE_NAME
#
# The name of the source package. For example cmake-2.6.1.
#
# .. variable:: CPACK_SOURCE_STRIP_FILES
#
# List of files in the source tree that will be stripped. Starting with
# CMake 2.6.0 CPACK_SOURCE_STRIP_FILES will be a boolean variable which
# enables stripping of all files (a list of files evaluates to TRUE in
# CMake, so this change is compatible).
#
# .. variable:: CPACK_SOURCE_GENERATOR
#
# List of generators used for the source packages. As with CPACK_GENERATOR,
# if this is not specified then CPack will create a set of options (e.g.,
# CPACK_SOURCE_ZIP) allowing users to select which packages will be
# generated.
#
# .. variable:: CPACK_SOURCE_OUTPUT_CONFIG_FILE
#
# The name of the CPack source configuration file. This file is the CPack
# configuration generated by the CPack module for source
# installers. Defaults to CPackSourceConfig.cmake.
#
# .. variable:: CPACK_SOURCE_IGNORE_FILES
#
# Pattern of files in the source tree that won't be packaged when building
# a source package. This is a list of regular expression patterns (that
# must be properly escaped), e.g.,
# /CVS/;/\\.svn/;\\.swp$;\\.#;/#;.*~;cscope.*
#
# The following variables are for advanced uses of CPack:
#
# .. variable:: CPACK_CMAKE_GENERATOR
#
# What CMake generator should be used if the project is CMake
# project. Defaults to the value of CMAKE_GENERATOR few users will want to
# change this setting.
#
# .. variable:: CPACK_INSTALL_CMAKE_PROJECTS
#
# List of four values that specify what project to install. The four values
# are: Build directory, Project Name, Project Component, Directory. If
# omitted, CPack will build an installer that installs everything.
#
# .. variable:: CPACK_SYSTEM_NAME
#
# System name, defaults to the value of ${CMAKE_SYSTEM_NAME}.
#
# .. variable:: CPACK_PACKAGE_VERSION
#
# Package full version, used internally. By default, this is built from
# CPACK_PACKAGE_VERSION_MAJOR, CPACK_PACKAGE_VERSION_MINOR, and
# CPACK_PACKAGE_VERSION_PATCH.
#
# .. variable:: CPACK_TOPLEVEL_TAG
#
# Directory for the installed files.
#
# .. variable:: CPACK_INSTALL_COMMANDS
#
# Extra commands to install components.
#
# .. variable:: CPACK_INSTALLED_DIRECTORIES
#
# Extra directories to install.
#
# .. variable:: CPACK_PACKAGE_INSTALL_REGISTRY_KEY
#
# Registry key used when installing this project. This is only used by
# installer for Windows. The default value is based on the installation
# directory.
#
# .. variable:: CPACK_CREATE_DESKTOP_LINKS
#
# List of desktop links to create.
# Each desktop link requires a corresponding start menu shortcut
# as created by :variable:`CPACK_PACKAGE_EXECUTABLES`.
# Define this var in order to avoid (or warn) concerning multiple inclusion
if(CPack_CMake_INCLUDED)
message(WARNING "CPack.cmake has already been included!!")
else()
set(CPack_CMake_INCLUDED 1)
endif()
# Pick a configuration file
set(cpack_input_file "${CMAKE_ROOT}/Templates/CPackConfig.cmake.in")
if(EXISTS "${CMAKE_SOURCE_DIR}/CPackConfig.cmake.in")
set(cpack_input_file "${CMAKE_SOURCE_DIR}/CPackConfig.cmake.in")
endif()
set(cpack_source_input_file "${CMAKE_ROOT}/Templates/CPackConfig.cmake.in")
if(EXISTS "${CMAKE_SOURCE_DIR}/CPackSourceConfig.cmake.in")
set(cpack_source_input_file "${CMAKE_SOURCE_DIR}/CPackSourceConfig.cmake.in")
endif()
# Backward compatibility
# Include CPackComponent macros if it has not already been included before.
include(CPackComponent)
# Macro for setting values if a user did not overwrite them
# Mangles CMake-special characters. Only kept for backwards compatibility.
macro(cpack_set_if_not_set name value)
message(DEPRECATION "cpack_set_if_not_set is obsolete; do not use.")
_cpack_set_default("${name}" "${value}")
endmacro()
# cpack_encode_variables - Function to encode variables for the configuration file
# find any variable that starts with CPACK and create a variable
# _CPACK_OTHER_VARIABLES_ that contains SET commands for
# each cpack variable. _CPACK_OTHER_VARIABLES_ is then
# used as an @ replacment in configure_file for the CPackConfig.
function(cpack_encode_variables)
set(commands "")
get_cmake_property(res VARIABLES)
foreach(var ${res})
if(var MATCHES "^CPACK")
if(CPACK_VERBATIM_VARIABLES)
_cpack_escape_for_cmake(value "${${var}}")
else()
set(value "${${var}}")
endif()
string(APPEND commands "\nSET(${var} \"${value}\")")
endif()
endforeach()
set(_CPACK_OTHER_VARIABLES_ "${commands}" PARENT_SCOPE)
endfunction()
# Internal use functions
function(_cpack_set_default name value)
if(NOT DEFINED "${name}")
set("${name}" "${value}" PARENT_SCOPE)
endif()
endfunction()
function(_cpack_escape_for_cmake var value)
string(REGEX REPLACE "([\\\$\"])" "\\\\\\1" escaped "${value}")
set("${var}" "${escaped}" PARENT_SCOPE)
endfunction()
# Set the package name
_cpack_set_default(CPACK_PACKAGE_NAME "${CMAKE_PROJECT_NAME}")
_cpack_set_default(CPACK_PACKAGE_VERSION_MAJOR "0")
_cpack_set_default(CPACK_PACKAGE_VERSION_MINOR "1")
_cpack_set_default(CPACK_PACKAGE_VERSION_PATCH "1")
_cpack_set_default(CPACK_PACKAGE_VERSION
"${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}")
_cpack_set_default(CPACK_PACKAGE_VENDOR "Humanity")
if(CMAKE_PROJECT_DESCRIPTION)
_cpack_set_default(CPACK_PACKAGE_DESCRIPTION_SUMMARY
"${CMAKE_PROJECT_DESCRIPTION}")
else()
_cpack_set_default(CPACK_PACKAGE_DESCRIPTION_SUMMARY
"${CMAKE_PROJECT_NAME} built using CMake")
endif()
_cpack_set_default(CPACK_PACKAGE_DESCRIPTION_FILE
"${CMAKE_ROOT}/Templates/CPack.GenericDescription.txt")
_cpack_set_default(CPACK_RESOURCE_FILE_LICENSE
"${CMAKE_ROOT}/Templates/CPack.GenericLicense.txt")
_cpack_set_default(CPACK_RESOURCE_FILE_README
"${CMAKE_ROOT}/Templates/CPack.GenericDescription.txt")
_cpack_set_default(CPACK_RESOURCE_FILE_WELCOME
"${CMAKE_ROOT}/Templates/CPack.GenericWelcome.txt")
_cpack_set_default(CPACK_MODULE_PATH "${CMAKE_MODULE_PATH}")
if(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL)
set(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL ON)
endif()
if(CPACK_NSIS_MODIFY_PATH)
set(CPACK_NSIS_MODIFY_PATH ON)
endif()
set(__cpack_system_name ${CMAKE_SYSTEM_NAME})
if(__cpack_system_name MATCHES "Windows")
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
set(__cpack_system_name win64)
else()
set(__cpack_system_name win32)
endif()
endif()
_cpack_set_default(CPACK_SYSTEM_NAME "${__cpack_system_name}")
# Root dir: default value should be the string literal "$PROGRAMFILES"
# for backwards compatibility. Projects may set this value to anything.
# When creating 64 bit binaries we set the default value to "$PROGRAMFILES64"
if("x${__cpack_system_name}" STREQUAL "xwin64")
set(__cpack_root_default "$PROGRAMFILES64")
else()
set(__cpack_root_default "$PROGRAMFILES")
endif()
_cpack_set_default(CPACK_NSIS_INSTALL_ROOT "${__cpack_root_default}")
# <project>-<major>.<minor>.<patch>-<release>-<platform>.<pkgtype>
_cpack_set_default(CPACK_PACKAGE_FILE_NAME
"${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-${CPACK_SYSTEM_NAME}")
_cpack_set_default(CPACK_PACKAGE_INSTALL_DIRECTORY
"${CPACK_PACKAGE_NAME} ${CPACK_PACKAGE_VERSION}")
_cpack_set_default(CPACK_PACKAGE_INSTALL_REGISTRY_KEY
"${CPACK_PACKAGE_INSTALL_DIRECTORY}")
_cpack_set_default(CPACK_PACKAGE_DEFAULT_LOCATION "/")
_cpack_set_default(CPACK_PACKAGE_RELOCATABLE "true")
# always force to exactly "true" or "false" for CPack.Info.plist.in:
if(CPACK_PACKAGE_RELOCATABLE)
set(CPACK_PACKAGE_RELOCATABLE "true")
else()
set(CPACK_PACKAGE_RELOCATABLE "false")
endif()
macro(cpack_check_file_exists file description)
if(NOT EXISTS "${file}")
message(SEND_ERROR "CPack ${description} file: \"${file}\" could not be found.")
endif()
endmacro()
cpack_check_file_exists("${CPACK_PACKAGE_DESCRIPTION_FILE}" "package description")
cpack_check_file_exists("${CPACK_RESOURCE_FILE_LICENSE}" "license resource")
cpack_check_file_exists("${CPACK_RESOURCE_FILE_README}" "readme resource")
cpack_check_file_exists("${CPACK_RESOURCE_FILE_WELCOME}" "welcome resource")
macro(cpack_optional_append _list _cond _item)
if(${_cond})
set(${_list} ${${_list}} ${_item})
endif()
endmacro()
#.rst:
# .. variable:: CPACK_BINARY_<GENNAME>
#
# CPack generated options for binary generators. The CPack.cmake module
# generates (when CPACK_GENERATOR is not set) a set of CMake options (see
# CMake option command) which may then be used to select the CPack
# generator(s) to be used when launching the package target.
#
# Provide options to choose generators we might check here if the required
# tools for the generates exist and set the defaults according to the results
if(NOT CPACK_GENERATOR)
if(UNIX)
if(CYGWIN)
option(CPACK_BINARY_CYGWIN "Enable to build Cygwin binary packages" ON)
else()
if(APPLE)
option(CPACK_BINARY_BUNDLE "Enable to build OSX bundles" OFF)
option(CPACK_BINARY_DRAGNDROP "Enable to build OSX Drag And Drop package" OFF)
option(CPACK_BINARY_OSXX11 "Enable to build OSX X11 packages" OFF)
option(CPACK_BINARY_PACKAGEMAKER "Enable to build PackageMaker packages" OFF)
option(CPACK_BINARY_PRODUCTBUILD "Enable to build productbuild packages" OFF)
else()
option(CPACK_BINARY_TZ "Enable to build TZ packages" ON)
endif()
option(CPACK_BINARY_DEB "Enable to build Debian packages" OFF)
option(CPACK_BINARY_NSIS "Enable to build NSIS packages" OFF)
option(CPACK_BINARY_RPM "Enable to build RPM packages" OFF)
option(CPACK_BINARY_STGZ "Enable to build STGZ packages" ON)
option(CPACK_BINARY_TBZ2 "Enable to build TBZ2 packages" OFF)
option(CPACK_BINARY_TGZ "Enable to build TGZ packages" ON)
option(CPACK_BINARY_TXZ "Enable to build TXZ packages" OFF)
endif()
else()
option(CPACK_BINARY_7Z "Enable to build 7-Zip packages" OFF)
option(CPACK_BINARY_NSIS "Enable to build NSIS packages" ON)
option(CPACK_BINARY_WIX "Enable to build WiX packages" OFF)
option(CPACK_BINARY_ZIP "Enable to build ZIP packages" OFF)
endif()
option(CPACK_BINARY_IFW "Enable to build IFW packages" OFF)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_7Z 7Z)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_BUNDLE Bundle)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_CYGWIN CygwinBinary)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_DEB DEB)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_DRAGNDROP DragNDrop)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_IFW IFW)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_NSIS NSIS)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_OSXX11 OSXX11)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_PACKAGEMAKER PackageMaker)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_PRODUCTBUILD productbuild)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_RPM RPM)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_STGZ STGZ)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_TBZ2 TBZ2)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_TGZ TGZ)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_TXZ TXZ)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_TZ TZ)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_WIX WIX)
cpack_optional_append(CPACK_GENERATOR CPACK_BINARY_ZIP ZIP)
endif()
# Provide options to choose source generators
if(NOT CPACK_SOURCE_GENERATOR)
if(UNIX)
if(CYGWIN)
option(CPACK_SOURCE_CYGWIN "Enable to build Cygwin source packages" ON)
else()
option(CPACK_SOURCE_RPM "Enable to build RPM source packages" OFF)
option(CPACK_SOURCE_TBZ2 "Enable to build TBZ2 source packages" ON)
option(CPACK_SOURCE_TGZ "Enable to build TGZ source packages" ON)
option(CPACK_SOURCE_TXZ "Enable to build TXZ source packages" ON)
option(CPACK_SOURCE_TZ "Enable to build TZ source packages" ON)
option(CPACK_SOURCE_ZIP "Enable to build ZIP source packages" OFF)
endif()
else()
option(CPACK_SOURCE_7Z "Enable to build 7-Zip source packages" ON)
option(CPACK_SOURCE_ZIP "Enable to build ZIP source packages" ON)
endif()
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_7Z 7Z)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_CYGWIN CygwinSource)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_RPM RPM)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_TBZ2 TBZ2)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_TGZ TGZ)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_TXZ TXZ)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_TZ TZ)
cpack_optional_append(CPACK_SOURCE_GENERATOR CPACK_SOURCE_ZIP ZIP)
endif()
# mark the above options as advanced
mark_as_advanced(
CPACK_BINARY_7Z
CPACK_BINARY_BUNDLE
CPACK_BINARY_CYGWIN
CPACK_BINARY_DEB
CPACK_BINARY_DRAGNDROP
CPACK_BINARY_IFW
CPACK_BINARY_NSIS
CPACK_BINARY_OSXX11
CPACK_BINARY_PACKAGEMAKER
CPACK_BINARY_PRODUCTBUILD
CPACK_BINARY_RPM
CPACK_BINARY_STGZ
CPACK_BINARY_TBZ2
CPACK_BINARY_TGZ
CPACK_BINARY_TXZ
CPACK_BINARY_TZ
CPACK_BINARY_WIX
CPACK_BINARY_ZIP
CPACK_SOURCE_7Z
CPACK_SOURCE_CYGWIN
CPACK_SOURCE_RPM
CPACK_SOURCE_TBZ2
CPACK_SOURCE_TGZ
CPACK_SOURCE_TXZ
CPACK_SOURCE_TZ
CPACK_SOURCE_ZIP
)
# Set some other variables
_cpack_set_default(CPACK_INSTALL_CMAKE_PROJECTS
"${CMAKE_BINARY_DIR};${CMAKE_PROJECT_NAME};ALL;/")
_cpack_set_default(CPACK_CMAKE_GENERATOR "${CMAKE_GENERATOR}")
_cpack_set_default(CPACK_TOPLEVEL_TAG "${CPACK_SYSTEM_NAME}")
# if the user has set CPACK_NSIS_DISPLAY_NAME remember it
if(DEFINED CPACK_NSIS_DISPLAY_NAME)
set(CPACK_NSIS_DISPLAY_NAME_SET TRUE)
endif()
# if the user has set CPACK_NSIS_DISPLAY
# explicitly, then use that as the default
# value of CPACK_NSIS_PACKAGE_NAME instead
# of CPACK_PACKAGE_INSTALL_DIRECTORY
_cpack_set_default(CPACK_NSIS_DISPLAY_NAME "${CPACK_PACKAGE_INSTALL_DIRECTORY}")
if(CPACK_NSIS_DISPLAY_NAME_SET)
_cpack_set_default(CPACK_NSIS_PACKAGE_NAME "${CPACK_NSIS_DISPLAY_NAME}")
else()
_cpack_set_default(CPACK_NSIS_PACKAGE_NAME "${CPACK_PACKAGE_INSTALL_DIRECTORY}")
endif()
_cpack_set_default(CPACK_OUTPUT_CONFIG_FILE
"${CMAKE_BINARY_DIR}/CPackConfig.cmake")
_cpack_set_default(CPACK_SOURCE_OUTPUT_CONFIG_FILE
"${CMAKE_BINARY_DIR}/CPackSourceConfig.cmake")
_cpack_set_default(CPACK_SET_DESTDIR OFF)
_cpack_set_default(CPACK_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
_cpack_set_default(CPACK_NSIS_INSTALLER_ICON_CODE "")
_cpack_set_default(CPACK_NSIS_INSTALLER_MUI_ICON_CODE "")
# WiX specific variables
_cpack_set_default(CPACK_WIX_SIZEOF_VOID_P "${CMAKE_SIZEOF_VOID_P}")
# set sysroot so SDK tools can be used
if(CMAKE_OSX_SYSROOT)
_cpack_set_default(CPACK_OSX_SYSROOT "${_CMAKE_OSX_SYSROOT_PATH}")
endif()
_cpack_set_default(CPACK_BUILD_SOURCE_DIRS "${CMAKE_SOURCE_DIR};${CMAKE_BINARY_DIR}")
if(DEFINED CPACK_COMPONENTS_ALL)
if(CPACK_MONOLITHIC_INSTALL)
message("CPack warning: both CPACK_COMPONENTS_ALL and CPACK_MONOLITHIC_INSTALL have been set.\nDefaulting to a monolithic installation.")
set(CPACK_COMPONENTS_ALL)
else()
# The user has provided the set of components to be installed as
# part of a component-based installation; trust her.
set(CPACK_COMPONENTS_ALL_SET_BY_USER TRUE)
endif()
else()
# If the user has not specifically requested a monolithic installer
# but has specified components in various "install" commands, tell
# CPack about those components.
if(NOT CPACK_MONOLITHIC_INSTALL)
get_cmake_property(CPACK_COMPONENTS_ALL COMPONENTS)
list(LENGTH CPACK_COMPONENTS_ALL CPACK_COMPONENTS_LEN)
if(CPACK_COMPONENTS_LEN EQUAL 1)
# Only one component: this is not a component-based installation
# (at least, it isn't a component-based installation, but may
# become one later if the user uses the cpack_add_* commands).
set(CPACK_COMPONENTS_ALL)
endif()
set(CPACK_COMPONENTS_LEN)
endif()
endif()
# CMake always generates a component named "Unspecified", which is
# used to install everything that doesn't have an explicitly-provided
# component. Since these files should always be installed, we'll make
# them hidden and required.
set(CPACK_COMPONENT_UNSPECIFIED_HIDDEN TRUE)
set(CPACK_COMPONENT_UNSPECIFIED_REQUIRED TRUE)
cpack_encode_variables()
configure_file("${cpack_input_file}" "${CPACK_OUTPUT_CONFIG_FILE}" @ONLY)
# Generate source file
_cpack_set_default(CPACK_SOURCE_INSTALLED_DIRECTORIES
"${CMAKE_SOURCE_DIR};/")
_cpack_set_default(CPACK_SOURCE_TOPLEVEL_TAG "${CPACK_SYSTEM_NAME}-Source")
_cpack_set_default(CPACK_SOURCE_PACKAGE_FILE_NAME
"${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-Source")
set(__cpack_source_ignore_files_default
"/CVS/;/\\.svn/;/\\.bzr/;/\\.hg/;/\\.git/;\\.swp$;\\.#;/#")
if(NOT CPACK_VERBATIM_VARIABLES)
_cpack_escape_for_cmake(__cpack_source_ignore_files_default
"${__cpack_source_ignore_files_default}")
endif()
_cpack_set_default(CPACK_SOURCE_IGNORE_FILES "${__cpack_source_ignore_files_default}")
set(CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_SOURCE_INSTALL_CMAKE_PROJECTS}")
set(CPACK_INSTALLED_DIRECTORIES "${CPACK_SOURCE_INSTALLED_DIRECTORIES}")
set(CPACK_GENERATOR "${CPACK_SOURCE_GENERATOR}")
set(CPACK_TOPLEVEL_TAG "${CPACK_SOURCE_TOPLEVEL_TAG}")
set(CPACK_PACKAGE_FILE_NAME "${CPACK_SOURCE_PACKAGE_FILE_NAME}")
set(CPACK_IGNORE_FILES "${CPACK_SOURCE_IGNORE_FILES}")
set(CPACK_STRIP_FILES "${CPACK_SOURCE_STRIP_FILES}")
set(CPACK_RPM_PACKAGE_SOURCES "ON")
cpack_encode_variables()
configure_file("${cpack_source_input_file}"
"${CPACK_SOURCE_OUTPUT_CONFIG_FILE}" @ONLY)
|
dava/dava.engine
|
Bin/CMakeWin32/share/cmake-3.9/Modules/CPack.cmake
|
CMake
|
bsd-3-clause
| 27,010
|
/**
* Copyright (c) 2011-2012 Optimax Software Ltd.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Optimax Software, ElasticInbox, nor the names
* of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.elasticinbox.lmtp.filter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.elasticinbox.core.message.MimeParser;
import com.elasticinbox.core.model.Message;
import com.elasticinbox.core.model.ReservedLabels;
/**
* Labels message as Spam if respective header is found.
*
* @author Rustam Aliyev
*/
public final class SpamMailFilter implements Filter<Message>
{
private static final Logger logger = LoggerFactory
.getLogger(SpamMailFilter.class);
private final static String MIME_HEADER_SPAM_VALUE = "YES";
@Override
public Message filter(Message message)
{
if (message.getMinorHeader(MimeParser.MIME_HEADER_SPAM) != null
&& message.getMinorHeader(MimeParser.MIME_HEADER_SPAM).equalsIgnoreCase(MIME_HEADER_SPAM_VALUE))
{
logger.debug("Applying filter for SPAM");
message.addLabel(ReservedLabels.SPAM.getId());
}
return message;
}
}
|
elasticinbox/elasticinbox
|
modules/lmtp/src/main/java/com/elasticinbox/lmtp/filter/SpamMailFilter.java
|
Java
|
bsd-3-clause
| 2,502
|
// Copyright 2015 The Cobalt Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef COBALT_BINDINGS_TESTING_CONSTRUCTOR_WITH_ARGUMENTS_INTERFACE_H_
#define COBALT_BINDINGS_TESTING_CONSTRUCTOR_WITH_ARGUMENTS_INTERFACE_H_
#include <string>
#include "cobalt/script/wrappable.h"
namespace cobalt {
namespace bindings {
namespace testing {
class ConstructorWithArgumentsInterface : public script::Wrappable {
public:
ConstructorWithArgumentsInterface() {}
ConstructorWithArgumentsInterface(int32_t arg1, bool arg2,
const std::string& arg3)
: arg1_(arg1), arg2_(arg2), arg3_(arg3) {}
int32_t long_arg() { return arg1_; }
bool boolean_arg() { return arg2_; }
std::string string_arg() { return arg3_; }
DEFINE_WRAPPABLE_TYPE(ConstructorWithArgumentsInterface);
private:
int32_t arg1_;
bool arg2_;
std::string arg3_;
};
} // namespace testing
} // namespace bindings
} // namespace cobalt
#endif // COBALT_BINDINGS_TESTING_CONSTRUCTOR_WITH_ARGUMENTS_INTERFACE_H_
|
youtube/cobalt
|
cobalt/bindings/testing/constructor_with_arguments_interface.h
|
C
|
bsd-3-clause
| 1,571
|
// Copyright (c) 2021, Viktor Larsson
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// * Neither the name of the copyright holder nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef POSELIB_JACOBIAN_IMPL_H_
#define POSELIB_JACOBIAN_IMPL_H_
#include "PoseLib/camera_pose.h"
#include "PoseLib/misc/colmap_models.h"
#include "PoseLib/misc/essential.h"
#include "PoseLib/types.h"
namespace poselib {
// For the accumulators we support supplying a vector<double> with point-wise weights for the residuals
// In case we don't want to have weighted residuals, we can pass UniformWeightVector instead of filling a std::vector
// with 1.0 The multiplication is then hopefully is optimized away since it always returns 1.0
class UniformWeightVector {
public:
UniformWeightVector() {}
constexpr double operator[](std::size_t idx) const { return 1.0; }
};
class UniformWeightVectors { // this corresponds to std::vector<std::vector<double>> used for generalized cameras etc
public:
UniformWeightVectors() {}
constexpr const UniformWeightVector &operator[](std::size_t idx) const { return w; }
const UniformWeightVector w;
typedef UniformWeightVector value_type;
};
template <typename CameraModel, typename LossFunction, typename ResidualWeightVector = UniformWeightVector>
class CameraJacobianAccumulator {
public:
CameraJacobianAccumulator(const std::vector<Point2D> &points2D, const std::vector<Point3D> &points3D,
const Camera &cam, const LossFunction &loss,
const ResidualWeightVector &w = ResidualWeightVector())
: x(points2D), X(points3D), camera(cam), loss_fn(loss), weights(w) {}
double residual(const CameraPose &pose) const {
double cost = 0;
for (size_t i = 0; i < x.size(); ++i) {
const Eigen::Vector3d Z = pose.apply(X[i]);
// Note this assumes points that are behind the camera will stay behind the camera
// during the optimization
if (Z(2) < 0)
continue;
const double inv_z = 1.0 / Z(2);
Eigen::Vector2d p(Z(0) * inv_z, Z(1) * inv_z);
CameraModel::project(camera.params, p, &p);
const double r0 = p(0) - x[i](0);
const double r1 = p(1) - x[i](1);
const double r_squared = r0 * r0 + r1 * r1;
cost += weights[i] * loss_fn.loss(r_squared);
}
return cost;
}
// computes J.transpose() * J and J.transpose() * res
// Only computes the lower half of JtJ
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 6, 6> &JtJ,
Eigen::Matrix<double, 6, 1> &Jtr) const {
Eigen::Matrix3d R = pose.R();
Eigen::Matrix2d Jcam;
Jcam.setIdentity(); // we initialize to identity here (this is for the calibrated case)
size_t num_residuals = 0;
for (size_t i = 0; i < x.size(); ++i) {
const Eigen::Vector3d Z = R * X[i] + pose.t;
const Eigen::Vector2d z = Z.hnormalized();
// Note this assumes points that are behind the camera will stay behind the camera
// during the optimization
if (Z(2) < 0)
continue;
// Project with intrinsics
Eigen::Vector2d zp = z;
CameraModel::project_with_jac(camera.params, z, &zp, &Jcam);
// Setup residual
Eigen::Vector2d r = zp - x[i];
const double r_squared = r.squaredNorm();
const double weight = weights[i] * loss_fn.weight(r_squared);
if (weight == 0.0) {
continue;
}
num_residuals++;
// Compute jacobian w.r.t. Z (times R)
Eigen::Matrix<double, 2, 3> dZ;
dZ.block<2, 2>(0, 0) = Jcam;
dZ.col(2) = -Jcam * z;
dZ *= 1.0 / Z(2);
dZ *= R;
const double X0 = X[i](0);
const double X1 = X[i](1);
const double X2 = X[i](2);
const double dZtdZ_0_0 = weight * dZ.col(0).dot(dZ.col(0));
const double dZtdZ_1_0 = weight * dZ.col(1).dot(dZ.col(0));
const double dZtdZ_1_1 = weight * dZ.col(1).dot(dZ.col(1));
const double dZtdZ_2_0 = weight * dZ.col(2).dot(dZ.col(0));
const double dZtdZ_2_1 = weight * dZ.col(2).dot(dZ.col(1));
const double dZtdZ_2_2 = weight * dZ.col(2).dot(dZ.col(2));
JtJ(0, 0) += X2 * (X2 * dZtdZ_1_1 - X1 * dZtdZ_2_1) + X1 * (X1 * dZtdZ_2_2 - X2 * dZtdZ_2_1);
JtJ(1, 0) += -X2 * (X2 * dZtdZ_1_0 - X0 * dZtdZ_2_1) - X1 * (X0 * dZtdZ_2_2 - X2 * dZtdZ_2_0);
JtJ(2, 0) += X1 * (X0 * dZtdZ_2_1 - X1 * dZtdZ_2_0) - X2 * (X0 * dZtdZ_1_1 - X1 * dZtdZ_1_0);
JtJ(3, 0) += X1 * dZtdZ_2_0 - X2 * dZtdZ_1_0;
JtJ(4, 0) += X1 * dZtdZ_2_1 - X2 * dZtdZ_1_1;
JtJ(5, 0) += X1 * dZtdZ_2_2 - X2 * dZtdZ_2_1;
JtJ(1, 1) += X2 * (X2 * dZtdZ_0_0 - X0 * dZtdZ_2_0) + X0 * (X0 * dZtdZ_2_2 - X2 * dZtdZ_2_0);
JtJ(2, 1) += -X2 * (X1 * dZtdZ_0_0 - X0 * dZtdZ_1_0) - X0 * (X0 * dZtdZ_2_1 - X1 * dZtdZ_2_0);
JtJ(3, 1) += X2 * dZtdZ_0_0 - X0 * dZtdZ_2_0;
JtJ(4, 1) += X2 * dZtdZ_1_0 - X0 * dZtdZ_2_1;
JtJ(5, 1) += X2 * dZtdZ_2_0 - X0 * dZtdZ_2_2;
JtJ(2, 2) += X1 * (X1 * dZtdZ_0_0 - X0 * dZtdZ_1_0) + X0 * (X0 * dZtdZ_1_1 - X1 * dZtdZ_1_0);
JtJ(3, 2) += X0 * dZtdZ_1_0 - X1 * dZtdZ_0_0;
JtJ(4, 2) += X0 * dZtdZ_1_1 - X1 * dZtdZ_1_0;
JtJ(5, 2) += X0 * dZtdZ_2_1 - X1 * dZtdZ_2_0;
JtJ(3, 3) += dZtdZ_0_0;
JtJ(4, 3) += dZtdZ_1_0;
JtJ(5, 3) += dZtdZ_2_0;
JtJ(4, 4) += dZtdZ_1_1;
JtJ(5, 4) += dZtdZ_2_1;
JtJ(5, 5) += dZtdZ_2_2;
r *= weight;
Jtr(0) += (r(0) * (X1 * dZ(0, 2) - X2 * dZ(0, 1)) + r(1) * (X1 * dZ(1, 2) - X2 * dZ(1, 1)));
Jtr(1) += (-r(0) * (X0 * dZ(0, 2) - X2 * dZ(0, 0)) - r(1) * (X0 * dZ(1, 2) - X2 * dZ(1, 0)));
Jtr(2) += (r(0) * (X0 * dZ(0, 1) - X1 * dZ(0, 0)) + r(1) * (X0 * dZ(1, 1) - X1 * dZ(1, 0)));
Jtr(3) += (dZ(0, 0) * r(0) + dZ(1, 0) * r(1));
Jtr(4) += (dZ(0, 1) * r(0) + dZ(1, 1) * r(1));
Jtr(5) += (dZ(0, 2) * r(0) + dZ(1, 2) * r(1));
}
return num_residuals;
}
CameraPose step(Eigen::Matrix<double, 6, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
// The rotation is parameterized via the lie-rep. and post-multiplication
// i.e. R(delta) = R * expm([delta]_x)
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
// Translation is parameterized as (negative) shift in position
// i.e. t(delta) = t + R*delta
pose_new.t = pose.t + pose.rotate(dp.block<3, 1>(3, 0));
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 6;
private:
const std::vector<Point2D> &x;
const std::vector<Point3D> &X;
const Camera &camera;
const LossFunction &loss_fn;
const ResidualWeightVector &weights;
};
template <typename LossFunction, typename ResidualWeightVectors = UniformWeightVectors>
class GeneralizedCameraJacobianAccumulator {
public:
GeneralizedCameraJacobianAccumulator(const std::vector<std::vector<Point2D>> &points2D,
const std::vector<std::vector<Point3D>> &points3D,
const std::vector<CameraPose> &camera_ext,
const std::vector<Camera> &camera_int, const LossFunction &l,
const ResidualWeightVectors &w = ResidualWeightVectors())
: num_cams(points2D.size()), x(points2D), X(points3D), rig_poses(camera_ext), cameras(camera_int), loss_fn(l),
weights(w) {}
double residual(const CameraPose &pose) const {
double cost = 0.0;
for (size_t k = 0; k < num_cams; ++k) {
if (x[k].size() == 0) {
continue;
}
const Camera &camera = cameras[k];
CameraPose full_pose;
full_pose.q = quat_multiply(rig_poses[k].q, pose.q);
full_pose.t = rig_poses[k].rotate(pose.t) + rig_poses[k].t;
switch (camera.model_id) {
#define SWITCH_CAMERA_MODEL_CASE(Model) \
case Model::model_id: { \
CameraJacobianAccumulator<Model, decltype(loss_fn), typename ResidualWeightVectors::value_type> accum( \
x[k], X[k], cameras[k], loss_fn, weights[k]); \
cost += accum.residual(full_pose); \
break; \
}
SWITCH_CAMERA_MODELS
#undef SWITCH_CAMERA_MODEL_CASE
}
}
return cost;
}
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 6, 6> &JtJ,
Eigen::Matrix<double, 6, 1> &Jtr) const {
size_t num_residuals = 0;
for (size_t k = 0; k < num_cams; ++k) {
if (x[k].size() == 0) {
continue;
}
const Camera &camera = cameras[k];
CameraPose full_pose;
full_pose.q = quat_multiply(rig_poses[k].q, pose.q);
full_pose.t = rig_poses[k].rotate(pose.t) + rig_poses[k].t;
switch (camera.model_id) {
#define SWITCH_CAMERA_MODEL_CASE(Model) \
case Model::model_id: { \
CameraJacobianAccumulator<Model, decltype(loss_fn), typename ResidualWeightVectors::value_type> accum( \
x[k], X[k], cameras[k], loss_fn, weights[k]); \
num_residuals += accum.accumulate(full_pose, JtJ, Jtr); \
break; \
}
SWITCH_CAMERA_MODELS
#undef SWITCH_CAMERA_MODEL_CASE
}
}
return num_residuals;
}
CameraPose step(Eigen::Matrix<double, 6, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
pose_new.t = pose.t + pose.rotate(dp.block<3, 1>(3, 0));
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 6;
private:
const size_t num_cams;
const std::vector<std::vector<Point2D>> &x;
const std::vector<std::vector<Point3D>> &X;
const std::vector<CameraPose> &rig_poses;
const std::vector<Camera> &cameras;
const LossFunction &loss_fn;
const ResidualWeightVectors &weights;
};
template <typename LossFunction, typename ResidualWeightVector = UniformWeightVector> class LineJacobianAccumulator {
public:
LineJacobianAccumulator(const std::vector<Line2D> &lines2D_, const std::vector<Line3D> &lines3D_,
const LossFunction &loss, const ResidualWeightVector &w = ResidualWeightVector())
: lines2D(lines2D_), lines3D(lines3D_), loss_fn(loss), weights(w) {}
double residual(const CameraPose &pose) const {
Eigen::Matrix3d R = pose.R();
double cost = 0;
for (size_t i = 0; i < lines2D.size(); ++i) {
const Eigen::Vector3d Z1 = R * lines3D[i].X1 + pose.t;
const Eigen::Vector3d Z2 = R * lines3D[i].X2 + pose.t;
Eigen::Vector3d l = Z1.cross(Z2);
l /= l.topRows<2>().norm();
const double r0 = l.dot(lines2D[i].x1.homogeneous());
const double r1 = l.dot(lines2D[i].x2.homogeneous());
const double r_squared = r0 * r0 + r1 * r1;
cost += weights[i] * loss_fn.loss(r_squared);
}
return cost;
}
// computes J.transpose() * J and J.transpose() * res
// Only computes the lower half of JtJ
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 6, 6> &JtJ,
Eigen::Matrix<double, 6, 1> &Jtr) const {
Eigen::Matrix3d E, R;
R = pose.R();
E << pose.t.cross(R.col(0)), pose.t.cross(R.col(1)), pose.t.cross(R.col(2));
size_t num_residuals = 0;
for (size_t k = 0; k < lines2D.size(); ++k) {
const Eigen::Vector3d Z1 = R * lines3D[k].X1 + pose.t;
const Eigen::Vector3d Z2 = R * lines3D[k].X2 + pose.t;
const Eigen::Vector3d X12 = lines3D[k].X1.cross(lines3D[k].X2);
const Eigen::Vector3d dX = lines3D[k].X1 - lines3D[k].X2;
// Projected line
const Eigen::Vector3d l = Z1.cross(Z2);
// Normalized line by first two coordinates
Eigen::Vector2d alpha = l.topRows<2>();
double beta = l(2);
const double n_alpha = alpha.norm();
alpha /= n_alpha;
beta /= n_alpha;
// Compute residual
Eigen::Vector2d r;
r << alpha.dot(lines2D[k].x1) + beta, alpha.dot(lines2D[k].x2) + beta;
const double r_squared = r.squaredNorm();
const double weight = weights[k] * loss_fn.weight(r_squared);
if (weight == 0.0) {
continue;
}
num_residuals++;
Eigen::Matrix<double, 3, 6> dl_drt;
// Differentiate line with respect to rotation parameters
dl_drt.block<1, 3>(0, 0) = E.row(0).cross(dX) - R.row(0).cross(X12);
dl_drt.block<1, 3>(1, 0) = E.row(1).cross(dX) - R.row(1).cross(X12);
dl_drt.block<1, 3>(2, 0) = E.row(2).cross(dX) - R.row(2).cross(X12);
// and translation params
dl_drt.block<1, 3>(0, 3) = R.row(0).cross(dX);
dl_drt.block<1, 3>(1, 3) = R.row(1).cross(dX);
dl_drt.block<1, 3>(2, 3) = R.row(2).cross(dX);
// Differentiate normalized line w.r.t. original line
Eigen::Matrix3d dln_dl;
dln_dl.block<2, 2>(0, 0) = (Eigen::Matrix2d::Identity() - alpha * alpha.transpose()) / n_alpha;
dln_dl.block<1, 2>(2, 0) = -beta * alpha / n_alpha;
dln_dl.block<2, 1>(0, 2).setZero();
dln_dl(2, 2) = 1 / n_alpha;
// Differentiate residual w.r.t. line
Eigen::Matrix<double, 2, 3> dr_dl;
dr_dl.row(0) << lines2D[k].x1.transpose(), 1.0;
dr_dl.row(1) << lines2D[k].x2.transpose(), 1.0;
Eigen::Matrix<double, 2, 6> J = dr_dl * dln_dl * dl_drt;
// Accumulate into JtJ and Jtr
Jtr += weight * J.transpose() * r;
for (size_t i = 0; i < 6; ++i) {
for (size_t j = 0; j <= i; ++j) {
JtJ(i, j) += weight * (J.col(i).dot(J.col(j)));
}
}
}
return num_residuals;
}
CameraPose step(Eigen::Matrix<double, 6, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
// The rotation is parameterized via the lie-rep. and post-multiplication
// i.e. R(delta) = R * expm([delta]_x)
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
// Translation is parameterized as (negative) shift in position
// i.e. t(delta) = t + R*delta
pose_new.t = pose.t + pose.rotate(dp.block<3, 1>(3, 0));
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 6;
private:
const std::vector<Line2D> &lines2D;
const std::vector<Line3D> &lines3D;
const LossFunction &loss_fn;
const ResidualWeightVector &weights;
};
template <typename PointLossFunction, typename LineLossFunction, typename PointResidualsVector = UniformWeightVector,
typename LineResidualsVector = UniformWeightVector>
class PointLineJacobianAccumulator {
public:
PointLineJacobianAccumulator(const std::vector<Point2D> &points2D, const std::vector<Point3D> &points3D,
const std::vector<Line2D> &lines2D, const std::vector<Line3D> &lines3D,
const PointLossFunction &l_point, const LineLossFunction &l_line,
const PointResidualsVector &weights_pts = PointResidualsVector(),
const LineResidualsVector &weights_l = LineResidualsVector())
: pts_accum(points2D, points3D, trivial_camera, l_point, weights_pts),
line_accum(lines2D, lines3D, l_line, weights_l) {
trivial_camera.model_id = NullCameraModel::model_id;
}
double residual(const CameraPose &pose) const { return pts_accum.residual(pose) + line_accum.residual(pose); }
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 6, 6> &JtJ,
Eigen::Matrix<double, 6, 1> &Jtr) const {
return pts_accum.accumulate(pose, JtJ, Jtr) + line_accum.accumulate(pose, JtJ, Jtr);
}
CameraPose step(Eigen::Matrix<double, 6, 1> dp, const CameraPose &pose) const {
// Both CameraJacobianAccumulator and LineJacobianAccumulator have the same step!
CameraPose pose_new;
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
pose_new.t = pose.t + pose.rotate(dp.block<3, 1>(3, 0));
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 6;
private:
Camera trivial_camera;
CameraJacobianAccumulator<NullCameraModel, PointLossFunction, PointResidualsVector> pts_accum;
LineJacobianAccumulator<LineLossFunction, LineResidualsVector> line_accum;
};
template <typename LossFunction, typename ResidualWeightVector = UniformWeightVector>
class RelativePoseJacobianAccumulator {
public:
RelativePoseJacobianAccumulator(const std::vector<Point2D> &points2D_1, const std::vector<Point2D> &points2D_2,
const LossFunction &l, const ResidualWeightVector &w = ResidualWeightVector())
: x1(points2D_1), x2(points2D_2), loss_fn(l), weights(w) {}
double residual(const CameraPose &pose) const {
Eigen::Matrix3d E;
essential_from_motion(pose, &E);
double cost = 0.0;
for (size_t k = 0; k < x1.size(); ++k) {
double C = x2[k].homogeneous().dot(E * x1[k].homogeneous());
double nJc_sq = (E.block<2, 3>(0, 0) * x1[k].homogeneous()).squaredNorm() +
(E.block<3, 2>(0, 0).transpose() * x2[k].homogeneous()).squaredNorm();
double r2 = (C * C) / nJc_sq;
cost += weights[k] * loss_fn.loss(r2);
}
return cost;
}
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 5, 5> &JtJ, Eigen::Matrix<double, 5, 1> &Jtr) {
// We start by setting up a basis for the updates in the translation (orthogonal to t)
// We find the minimum element of t and cross product with the corresponding basis vector.
// (this ensures that the first cross product is not close to the zero vector)
if (std::abs(pose.t.x()) < std::abs(pose.t.y())) {
// x < y
if (std::abs(pose.t.x()) < std::abs(pose.t.z())) {
tangent_basis.col(0) = pose.t.cross(Eigen::Vector3d::UnitX()).normalized();
} else {
tangent_basis.col(0) = pose.t.cross(Eigen::Vector3d::UnitZ()).normalized();
}
} else {
// x > y
if (std::abs(pose.t.y()) < std::abs(pose.t.z())) {
tangent_basis.col(0) = pose.t.cross(Eigen::Vector3d::UnitY()).normalized();
} else {
tangent_basis.col(0) = pose.t.cross(Eigen::Vector3d::UnitZ()).normalized();
}
}
tangent_basis.col(1) = tangent_basis.col(0).cross(pose.t).normalized();
Eigen::Matrix3d E, R;
R = pose.R();
essential_from_motion(pose, &E);
// Matrices contain the jacobians of E w.r.t. the rotation and translation parameters
Eigen::Matrix<double, 9, 3> dR;
Eigen::Matrix<double, 9, 2> dt;
// Each column is vec(E*skew(e_k)) where e_k is k:th basis vector
dR.block<3, 1>(0, 0).setZero();
dR.block<3, 1>(0, 1) = -E.col(2);
dR.block<3, 1>(0, 2) = E.col(1);
dR.block<3, 1>(3, 0) = E.col(2);
dR.block<3, 1>(3, 1).setZero();
dR.block<3, 1>(3, 2) = -E.col(0);
dR.block<3, 1>(6, 0) = -E.col(1);
dR.block<3, 1>(6, 1) = E.col(0);
dR.block<3, 1>(6, 2).setZero();
// Each column is vec(skew(tangent_basis[k])*R)
dt.block<3, 1>(0, 0) = tangent_basis.col(0).cross(R.col(0));
dt.block<3, 1>(0, 1) = tangent_basis.col(1).cross(R.col(0));
dt.block<3, 1>(3, 0) = tangent_basis.col(0).cross(R.col(1));
dt.block<3, 1>(3, 1) = tangent_basis.col(1).cross(R.col(1));
dt.block<3, 1>(6, 0) = tangent_basis.col(0).cross(R.col(2));
dt.block<3, 1>(6, 1) = tangent_basis.col(1).cross(R.col(2));
size_t num_residuals = 0;
for (size_t k = 0; k < x1.size(); ++k) {
double C = x2[k].homogeneous().dot(E * x1[k].homogeneous());
// J_C is the Jacobian of the epipolar constraint w.r.t. the image points
Eigen::Vector4d J_C;
J_C << E.block<3, 2>(0, 0).transpose() * x2[k].homogeneous(), E.block<2, 3>(0, 0) * x1[k].homogeneous();
const double nJ_C = J_C.norm();
const double inv_nJ_C = 1.0 / nJ_C;
const double r = C * inv_nJ_C;
// Compute weight from robust loss function (used in the IRLS)
const double weight = weights[k] * loss_fn.weight(r * r);
if (weight == 0.0) {
continue;
}
num_residuals++;
// Compute Jacobian of Sampson error w.r.t the fundamental/essential matrix (3x3)
Eigen::Matrix<double, 1, 9> dF;
dF << x1[k](0) * x2[k](0), x1[k](0) * x2[k](1), x1[k](0), x1[k](1) * x2[k](0), x1[k](1) * x2[k](1),
x1[k](1), x2[k](0), x2[k](1), 1.0;
const double s = C * inv_nJ_C * inv_nJ_C;
dF(0) -= s * (J_C(2) * x1[k](0) + J_C(0) * x2[k](0));
dF(1) -= s * (J_C(3) * x1[k](0) + J_C(0) * x2[k](1));
dF(2) -= s * (J_C(0));
dF(3) -= s * (J_C(2) * x1[k](1) + J_C(1) * x2[k](0));
dF(4) -= s * (J_C(3) * x1[k](1) + J_C(1) * x2[k](1));
dF(5) -= s * (J_C(1));
dF(6) -= s * (J_C(2));
dF(7) -= s * (J_C(3));
dF *= inv_nJ_C;
// and then w.r.t. the pose parameters (rotation + tangent basis for translation)
Eigen::Matrix<double, 1, 5> J;
J.block<1, 3>(0, 0) = dF * dR;
J.block<1, 2>(0, 3) = dF * dt;
// Accumulate into JtJ and Jtr
Jtr += weight * C * inv_nJ_C * J.transpose();
JtJ(0, 0) += weight * (J(0) * J(0));
JtJ(1, 0) += weight * (J(1) * J(0));
JtJ(1, 1) += weight * (J(1) * J(1));
JtJ(2, 0) += weight * (J(2) * J(0));
JtJ(2, 1) += weight * (J(2) * J(1));
JtJ(2, 2) += weight * (J(2) * J(2));
JtJ(3, 0) += weight * (J(3) * J(0));
JtJ(3, 1) += weight * (J(3) * J(1));
JtJ(3, 2) += weight * (J(3) * J(2));
JtJ(3, 3) += weight * (J(3) * J(3));
JtJ(4, 0) += weight * (J(4) * J(0));
JtJ(4, 1) += weight * (J(4) * J(1));
JtJ(4, 2) += weight * (J(4) * J(2));
JtJ(4, 3) += weight * (J(4) * J(3));
JtJ(4, 4) += weight * (J(4) * J(4));
}
return num_residuals;
}
CameraPose step(Eigen::Matrix<double, 5, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
pose_new.t = pose.t + tangent_basis * dp.block<2, 1>(3, 0);
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 5;
private:
const std::vector<Point2D> &x1;
const std::vector<Point2D> &x2;
const LossFunction &loss_fn;
const ResidualWeightVector &weights;
Eigen::Matrix<double, 3, 2> tangent_basis;
};
template <typename LossFunction, typename ResidualWeightVectors = UniformWeightVectors>
class GeneralizedRelativePoseJacobianAccumulator {
public:
GeneralizedRelativePoseJacobianAccumulator(const std::vector<PairwiseMatches> &pairwise_matches,
const std::vector<CameraPose> &camera1_ext,
const std::vector<CameraPose> &camera2_ext, const LossFunction &l,
const ResidualWeightVectors &w = ResidualWeightVectors())
: matches(pairwise_matches), rig1_poses(camera1_ext), rig2_poses(camera2_ext), loss_fn(l), weights(w) {}
double residual(const CameraPose &pose) const {
double cost = 0.0;
for (size_t match_k = 0; match_k < matches.size(); ++match_k) {
const PairwiseMatches &m = matches[match_k];
Eigen::Vector4d q1 = rig1_poses[m.cam_id1].q;
Eigen::Vector3d t1 = rig1_poses[m.cam_id1].t;
Eigen::Vector4d q2 = rig2_poses[m.cam_id2].q;
Eigen::Vector3d t2 = rig2_poses[m.cam_id2].t;
CameraPose relpose;
relpose.q = quat_multiply(q2, quat_multiply(pose.q, quat_conj(q1)));
relpose.t = t2 + quat_rotate(q2, pose.t) - relpose.rotate(t1);
RelativePoseJacobianAccumulator<LossFunction, typename ResidualWeightVectors::value_type> accum(
m.x1, m.x2, loss_fn, weights[match_k]);
cost += accum.residual(relpose);
}
return cost;
}
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 6, 6> &JtJ,
Eigen::Matrix<double, 6, 1> &Jtr) const {
Eigen::Matrix3d R = pose.R();
size_t num_residuals = 0;
for (size_t match_k = 0; match_k < matches.size(); ++match_k) {
const PairwiseMatches &m = matches[match_k];
// Cameras are
// [R1 t1]
// [R2 t2] * [R t; 0 1] = [R2*R t2+R2*t]
// Relative pose is
// [R2*R*R1' t2+R2*t-R2*R*R1'*t1]
// Essential matrix is
// [t2]_x*R2*R*R1' + [R2*t]_x*R2*R*R1' - R2*R*R1'*[t1]_x
Eigen::Vector4d q1 = rig1_poses[m.cam_id1].q;
Eigen::Matrix3d R1 = quat_to_rotmat(q1);
Eigen::Vector3d t1 = rig1_poses[m.cam_id1].t;
Eigen::Vector4d q2 = rig2_poses[m.cam_id2].q;
Eigen::Matrix3d R2 = quat_to_rotmat(q2);
Eigen::Vector3d t2 = rig2_poses[m.cam_id2].t;
CameraPose relpose;
relpose.q = quat_multiply(q2, quat_multiply(pose.q, quat_conj(q1)));
relpose.t = t2 + R2 * pose.t - relpose.rotate(t1);
Eigen::Matrix3d E;
essential_from_motion(relpose, &E);
Eigen::Matrix3d R2R = R2 * R;
Eigen::Vector3d Rt = R.transpose() * pose.t;
// The messy expressions below compute
// dRdw = [vec(S1) vec(S2) vec(S3)];
// dR = (kron(R1,skew(t2)*R2R+ R2*skew(t)*R) + kron(skew(t1)*R1,R2*R)) * dRdw
// dt = [vec(R2*R*S1*R1.') vec(R2*R*S2*R1.') vec(R2*R*S3*R1.')]
// TODO: Replace with something nice
Eigen::Matrix<double, 9, 3> dR;
Eigen::Matrix<double, 9, 3> dt;
dR(0, 0) = R2R(0, 1) * (R1(1, 2) * t1(2) - R1(2, 2) * t1(1)) -
R2R(0, 2) * (R1(1, 1) * t1(2) - R1(2, 1) * t1(1)) +
R1(0, 1) * (R2R(0, 0) * Rt(1) - R2R(0, 1) * Rt(0) - R2R(1, 2) * t2(2) + R2R(2, 2) * t2(1)) +
R1(0, 2) * (R2R(0, 0) * Rt(2) - R2R(0, 2) * Rt(0) + R2R(1, 1) * t2(2) - R2R(2, 1) * t2(1));
dR(0, 1) = R2R(0, 2) * (R1(1, 0) * t1(2) - R1(2, 0) * t1(1)) -
R2R(0, 0) * (R1(1, 2) * t1(2) - R1(2, 2) * t1(1)) -
R1(0, 0) * (R2R(0, 0) * Rt(1) - R2R(0, 1) * Rt(0) - R2R(1, 2) * t2(2) + R2R(2, 2) * t2(1)) +
R1(0, 2) * (R2R(0, 1) * Rt(2) - R2R(0, 2) * Rt(1) - R2R(1, 0) * t2(2) + R2R(2, 0) * t2(1));
dR(0, 2) = R2R(0, 0) * (R1(1, 1) * t1(2) - R1(2, 1) * t1(1)) -
R2R(0, 1) * (R1(1, 0) * t1(2) - R1(2, 0) * t1(1)) -
R1(0, 0) * (R2R(0, 0) * Rt(2) - R2R(0, 2) * Rt(0) + R2R(1, 1) * t2(2) - R2R(2, 1) * t2(1)) -
R1(0, 1) * (R2R(0, 1) * Rt(2) - R2R(0, 2) * Rt(1) - R2R(1, 0) * t2(2) + R2R(2, 0) * t2(1));
dR(1, 0) = R2R(1, 1) * (R1(1, 2) * t1(2) - R1(2, 2) * t1(1)) -
R2R(1, 2) * (R1(1, 1) * t1(2) - R1(2, 1) * t1(1)) +
R1(0, 1) * (R2R(1, 0) * Rt(1) - R2R(1, 1) * Rt(0) + R2R(0, 2) * t2(2) - R2R(2, 2) * t2(0)) +
R1(0, 2) * (R2R(1, 0) * Rt(2) - R2R(1, 2) * Rt(0) - R2R(0, 1) * t2(2) + R2R(2, 1) * t2(0));
dR(1, 1) = R2R(1, 2) * (R1(1, 0) * t1(2) - R1(2, 0) * t1(1)) -
R2R(1, 0) * (R1(1, 2) * t1(2) - R1(2, 2) * t1(1)) -
R1(0, 0) * (R2R(1, 0) * Rt(1) - R2R(1, 1) * Rt(0) + R2R(0, 2) * t2(2) - R2R(2, 2) * t2(0)) +
R1(0, 2) * (R2R(1, 1) * Rt(2) - R2R(1, 2) * Rt(1) + R2R(0, 0) * t2(2) - R2R(2, 0) * t2(0));
dR(1, 2) = R2R(1, 0) * (R1(1, 1) * t1(2) - R1(2, 1) * t1(1)) -
R2R(1, 1) * (R1(1, 0) * t1(2) - R1(2, 0) * t1(1)) -
R1(0, 0) * (R2R(1, 0) * Rt(2) - R2R(1, 2) * Rt(0) - R2R(0, 1) * t2(2) + R2R(2, 1) * t2(0)) -
R1(0, 1) * (R2R(1, 1) * Rt(2) - R2R(1, 2) * Rt(1) + R2R(0, 0) * t2(2) - R2R(2, 0) * t2(0));
dR(2, 0) = R2R(2, 1) * (R1(1, 2) * t1(2) - R1(2, 2) * t1(1)) -
R2R(2, 2) * (R1(1, 1) * t1(2) - R1(2, 1) * t1(1)) +
R1(0, 1) * (R2R(2, 0) * Rt(1) - R2R(2, 1) * Rt(0) - R2R(0, 2) * t2(1) + R2R(1, 2) * t2(0)) +
R1(0, 2) * (R2R(2, 0) * Rt(2) - R2R(2, 2) * Rt(0) + R2R(0, 1) * t2(1) - R2R(1, 1) * t2(0));
dR(2, 1) = R2R(2, 2) * (R1(1, 0) * t1(2) - R1(2, 0) * t1(1)) -
R2R(2, 0) * (R1(1, 2) * t1(2) - R1(2, 2) * t1(1)) -
R1(0, 0) * (R2R(2, 0) * Rt(1) - R2R(2, 1) * Rt(0) - R2R(0, 2) * t2(1) + R2R(1, 2) * t2(0)) +
R1(0, 2) * (R2R(2, 1) * Rt(2) - R2R(2, 2) * Rt(1) - R2R(0, 0) * t2(1) + R2R(1, 0) * t2(0));
dR(2, 2) = R2R(2, 0) * (R1(1, 1) * t1(2) - R1(2, 1) * t1(1)) -
R2R(2, 1) * (R1(1, 0) * t1(2) - R1(2, 0) * t1(1)) -
R1(0, 0) * (R2R(2, 0) * Rt(2) - R2R(2, 2) * Rt(0) + R2R(0, 1) * t2(1) - R2R(1, 1) * t2(0)) -
R1(0, 1) * (R2R(2, 1) * Rt(2) - R2R(2, 2) * Rt(1) - R2R(0, 0) * t2(1) + R2R(1, 0) * t2(0));
dR(3, 0) = R2R(0, 2) * (R1(0, 1) * t1(2) - R1(2, 1) * t1(0)) -
R2R(0, 1) * (R1(0, 2) * t1(2) - R1(2, 2) * t1(0)) +
R1(1, 1) * (R2R(0, 0) * Rt(1) - R2R(0, 1) * Rt(0) - R2R(1, 2) * t2(2) + R2R(2, 2) * t2(1)) +
R1(1, 2) * (R2R(0, 0) * Rt(2) - R2R(0, 2) * Rt(0) + R2R(1, 1) * t2(2) - R2R(2, 1) * t2(1));
dR(3, 1) = R2R(0, 0) * (R1(0, 2) * t1(2) - R1(2, 2) * t1(0)) -
R2R(0, 2) * (R1(0, 0) * t1(2) - R1(2, 0) * t1(0)) -
R1(1, 0) * (R2R(0, 0) * Rt(1) - R2R(0, 1) * Rt(0) - R2R(1, 2) * t2(2) + R2R(2, 2) * t2(1)) +
R1(1, 2) * (R2R(0, 1) * Rt(2) - R2R(0, 2) * Rt(1) - R2R(1, 0) * t2(2) + R2R(2, 0) * t2(1));
dR(3, 2) = R2R(0, 1) * (R1(0, 0) * t1(2) - R1(2, 0) * t1(0)) -
R2R(0, 0) * (R1(0, 1) * t1(2) - R1(2, 1) * t1(0)) -
R1(1, 0) * (R2R(0, 0) * Rt(2) - R2R(0, 2) * Rt(0) + R2R(1, 1) * t2(2) - R2R(2, 1) * t2(1)) -
R1(1, 1) * (R2R(0, 1) * Rt(2) - R2R(0, 2) * Rt(1) - R2R(1, 0) * t2(2) + R2R(2, 0) * t2(1));
dR(4, 0) = R2R(1, 2) * (R1(0, 1) * t1(2) - R1(2, 1) * t1(0)) -
R2R(1, 1) * (R1(0, 2) * t1(2) - R1(2, 2) * t1(0)) +
R1(1, 1) * (R2R(1, 0) * Rt(1) - R2R(1, 1) * Rt(0) + R2R(0, 2) * t2(2) - R2R(2, 2) * t2(0)) +
R1(1, 2) * (R2R(1, 0) * Rt(2) - R2R(1, 2) * Rt(0) - R2R(0, 1) * t2(2) + R2R(2, 1) * t2(0));
dR(4, 1) = R2R(1, 0) * (R1(0, 2) * t1(2) - R1(2, 2) * t1(0)) -
R2R(1, 2) * (R1(0, 0) * t1(2) - R1(2, 0) * t1(0)) -
R1(1, 0) * (R2R(1, 0) * Rt(1) - R2R(1, 1) * Rt(0) + R2R(0, 2) * t2(2) - R2R(2, 2) * t2(0)) +
R1(1, 2) * (R2R(1, 1) * Rt(2) - R2R(1, 2) * Rt(1) + R2R(0, 0) * t2(2) - R2R(2, 0) * t2(0));
dR(4, 2) = R2R(1, 1) * (R1(0, 0) * t1(2) - R1(2, 0) * t1(0)) -
R2R(1, 0) * (R1(0, 1) * t1(2) - R1(2, 1) * t1(0)) -
R1(1, 0) * (R2R(1, 0) * Rt(2) - R2R(1, 2) * Rt(0) - R2R(0, 1) * t2(2) + R2R(2, 1) * t2(0)) -
R1(1, 1) * (R2R(1, 1) * Rt(2) - R2R(1, 2) * Rt(1) + R2R(0, 0) * t2(2) - R2R(2, 0) * t2(0));
dR(5, 0) = R2R(2, 2) * (R1(0, 1) * t1(2) - R1(2, 1) * t1(0)) -
R2R(2, 1) * (R1(0, 2) * t1(2) - R1(2, 2) * t1(0)) +
R1(1, 1) * (R2R(2, 0) * Rt(1) - R2R(2, 1) * Rt(0) - R2R(0, 2) * t2(1) + R2R(1, 2) * t2(0)) +
R1(1, 2) * (R2R(2, 0) * Rt(2) - R2R(2, 2) * Rt(0) + R2R(0, 1) * t2(1) - R2R(1, 1) * t2(0));
dR(5, 1) = R2R(2, 0) * (R1(0, 2) * t1(2) - R1(2, 2) * t1(0)) -
R2R(2, 2) * (R1(0, 0) * t1(2) - R1(2, 0) * t1(0)) -
R1(1, 0) * (R2R(2, 0) * Rt(1) - R2R(2, 1) * Rt(0) - R2R(0, 2) * t2(1) + R2R(1, 2) * t2(0)) +
R1(1, 2) * (R2R(2, 1) * Rt(2) - R2R(2, 2) * Rt(1) - R2R(0, 0) * t2(1) + R2R(1, 0) * t2(0));
dR(5, 2) = R2R(2, 1) * (R1(0, 0) * t1(2) - R1(2, 0) * t1(0)) -
R2R(2, 0) * (R1(0, 1) * t1(2) - R1(2, 1) * t1(0)) -
R1(1, 0) * (R2R(2, 0) * Rt(2) - R2R(2, 2) * Rt(0) + R2R(0, 1) * t2(1) - R2R(1, 1) * t2(0)) -
R1(1, 1) * (R2R(2, 1) * Rt(2) - R2R(2, 2) * Rt(1) - R2R(0, 0) * t2(1) + R2R(1, 0) * t2(0));
dR(6, 0) = R2R(0, 1) * (R1(0, 2) * t1(1) - R1(1, 2) * t1(0)) -
R2R(0, 2) * (R1(0, 1) * t1(1) - R1(1, 1) * t1(0)) +
R1(2, 1) * (R2R(0, 0) * Rt(1) - R2R(0, 1) * Rt(0) - R2R(1, 2) * t2(2) + R2R(2, 2) * t2(1)) +
R1(2, 2) * (R2R(0, 0) * Rt(2) - R2R(0, 2) * Rt(0) + R2R(1, 1) * t2(2) - R2R(2, 1) * t2(1));
dR(6, 1) = R2R(0, 2) * (R1(0, 0) * t1(1) - R1(1, 0) * t1(0)) -
R2R(0, 0) * (R1(0, 2) * t1(1) - R1(1, 2) * t1(0)) -
R1(2, 0) * (R2R(0, 0) * Rt(1) - R2R(0, 1) * Rt(0) - R2R(1, 2) * t2(2) + R2R(2, 2) * t2(1)) +
R1(2, 2) * (R2R(0, 1) * Rt(2) - R2R(0, 2) * Rt(1) - R2R(1, 0) * t2(2) + R2R(2, 0) * t2(1));
dR(6, 2) = R2R(0, 0) * (R1(0, 1) * t1(1) - R1(1, 1) * t1(0)) -
R2R(0, 1) * (R1(0, 0) * t1(1) - R1(1, 0) * t1(0)) -
R1(2, 0) * (R2R(0, 0) * Rt(2) - R2R(0, 2) * Rt(0) + R2R(1, 1) * t2(2) - R2R(2, 1) * t2(1)) -
R1(2, 1) * (R2R(0, 1) * Rt(2) - R2R(0, 2) * Rt(1) - R2R(1, 0) * t2(2) + R2R(2, 0) * t2(1));
dR(7, 0) = R2R(1, 1) * (R1(0, 2) * t1(1) - R1(1, 2) * t1(0)) -
R2R(1, 2) * (R1(0, 1) * t1(1) - R1(1, 1) * t1(0)) +
R1(2, 1) * (R2R(1, 0) * Rt(1) - R2R(1, 1) * Rt(0) + R2R(0, 2) * t2(2) - R2R(2, 2) * t2(0)) +
R1(2, 2) * (R2R(1, 0) * Rt(2) - R2R(1, 2) * Rt(0) - R2R(0, 1) * t2(2) + R2R(2, 1) * t2(0));
dR(7, 1) = R2R(1, 2) * (R1(0, 0) * t1(1) - R1(1, 0) * t1(0)) -
R2R(1, 0) * (R1(0, 2) * t1(1) - R1(1, 2) * t1(0)) -
R1(2, 0) * (R2R(1, 0) * Rt(1) - R2R(1, 1) * Rt(0) + R2R(0, 2) * t2(2) - R2R(2, 2) * t2(0)) +
R1(2, 2) * (R2R(1, 1) * Rt(2) - R2R(1, 2) * Rt(1) + R2R(0, 0) * t2(2) - R2R(2, 0) * t2(0));
dR(7, 2) = R2R(1, 0) * (R1(0, 1) * t1(1) - R1(1, 1) * t1(0)) -
R2R(1, 1) * (R1(0, 0) * t1(1) - R1(1, 0) * t1(0)) -
R1(2, 0) * (R2R(1, 0) * Rt(2) - R2R(1, 2) * Rt(0) - R2R(0, 1) * t2(2) + R2R(2, 1) * t2(0)) -
R1(2, 1) * (R2R(1, 1) * Rt(2) - R2R(1, 2) * Rt(1) + R2R(0, 0) * t2(2) - R2R(2, 0) * t2(0));
dR(8, 0) = R2R(2, 1) * (R1(0, 2) * t1(1) - R1(1, 2) * t1(0)) -
R2R(2, 2) * (R1(0, 1) * t1(1) - R1(1, 1) * t1(0)) +
R1(2, 1) * (R2R(2, 0) * Rt(1) - R2R(2, 1) * Rt(0) - R2R(0, 2) * t2(1) + R2R(1, 2) * t2(0)) +
R1(2, 2) * (R2R(2, 0) * Rt(2) - R2R(2, 2) * Rt(0) + R2R(0, 1) * t2(1) - R2R(1, 1) * t2(0));
dR(8, 1) = R2R(2, 2) * (R1(0, 0) * t1(1) - R1(1, 0) * t1(0)) -
R2R(2, 0) * (R1(0, 2) * t1(1) - R1(1, 2) * t1(0)) -
R1(2, 0) * (R2R(2, 0) * Rt(1) - R2R(2, 1) * Rt(0) - R2R(0, 2) * t2(1) + R2R(1, 2) * t2(0)) +
R1(2, 2) * (R2R(2, 1) * Rt(2) - R2R(2, 2) * Rt(1) - R2R(0, 0) * t2(1) + R2R(1, 0) * t2(0));
dR(8, 2) = R2R(2, 0) * (R1(0, 1) * t1(1) - R1(1, 1) * t1(0)) -
R2R(2, 1) * (R1(0, 0) * t1(1) - R1(1, 0) * t1(0)) -
R1(2, 0) * (R2R(2, 0) * Rt(2) - R2R(2, 2) * Rt(0) + R2R(0, 1) * t2(1) - R2R(1, 1) * t2(0)) -
R1(2, 1) * (R2R(2, 1) * Rt(2) - R2R(2, 2) * Rt(1) - R2R(0, 0) * t2(1) + R2R(1, 0) * t2(0));
dt(0, 0) = R2R(0, 2) * R1(0, 1) - R2R(0, 1) * R1(0, 2);
dt(0, 1) = R2R(0, 0) * R1(0, 2) - R2R(0, 2) * R1(0, 0);
dt(0, 2) = R2R(0, 1) * R1(0, 0) - R2R(0, 0) * R1(0, 1);
dt(1, 0) = R2R(1, 2) * R1(0, 1) - R2R(1, 1) * R1(0, 2);
dt(1, 1) = R2R(1, 0) * R1(0, 2) - R2R(1, 2) * R1(0, 0);
dt(1, 2) = R2R(1, 1) * R1(0, 0) - R2R(1, 0) * R1(0, 1);
dt(2, 0) = R2R(2, 2) * R1(0, 1) - R2R(2, 1) * R1(0, 2);
dt(2, 1) = R2R(2, 0) * R1(0, 2) - R2R(2, 2) * R1(0, 0);
dt(2, 2) = R2R(2, 1) * R1(0, 0) - R2R(2, 0) * R1(0, 1);
dt(3, 0) = R2R(0, 2) * R1(1, 1) - R2R(0, 1) * R1(1, 2);
dt(3, 1) = R2R(0, 0) * R1(1, 2) - R2R(0, 2) * R1(1, 0);
dt(3, 2) = R2R(0, 1) * R1(1, 0) - R2R(0, 0) * R1(1, 1);
dt(4, 0) = R2R(1, 2) * R1(1, 1) - R2R(1, 1) * R1(1, 2);
dt(4, 1) = R2R(1, 0) * R1(1, 2) - R2R(1, 2) * R1(1, 0);
dt(4, 2) = R2R(1, 1) * R1(1, 0) - R2R(1, 0) * R1(1, 1);
dt(5, 0) = R2R(2, 2) * R1(1, 1) - R2R(2, 1) * R1(1, 2);
dt(5, 1) = R2R(2, 0) * R1(1, 2) - R2R(2, 2) * R1(1, 0);
dt(5, 2) = R2R(2, 1) * R1(1, 0) - R2R(2, 0) * R1(1, 1);
dt(6, 0) = R2R(0, 2) * R1(2, 1) - R2R(0, 1) * R1(2, 2);
dt(6, 1) = R2R(0, 0) * R1(2, 2) - R2R(0, 2) * R1(2, 0);
dt(6, 2) = R2R(0, 1) * R1(2, 0) - R2R(0, 0) * R1(2, 1);
dt(7, 0) = R2R(1, 2) * R1(2, 1) - R2R(1, 1) * R1(2, 2);
dt(7, 1) = R2R(1, 0) * R1(2, 2) - R2R(1, 2) * R1(2, 0);
dt(7, 2) = R2R(1, 1) * R1(2, 0) - R2R(1, 0) * R1(2, 1);
dt(8, 0) = R2R(2, 2) * R1(2, 1) - R2R(2, 1) * R1(2, 2);
dt(8, 1) = R2R(2, 0) * R1(2, 2) - R2R(2, 2) * R1(2, 0);
dt(8, 2) = R2R(2, 1) * R1(2, 0) - R2R(2, 0) * R1(2, 1);
for (size_t k = 0; k < m.x1.size(); ++k) {
double C = m.x2[k].homogeneous().dot(E * m.x1[k].homogeneous());
// J_C is the Jacobian of the epipolar constraint w.r.t. the image points
Eigen::Vector4d J_C;
J_C << E.block<3, 2>(0, 0).transpose() * m.x2[k].homogeneous(),
E.block<2, 3>(0, 0) * m.x1[k].homogeneous();
const double nJ_C = J_C.norm();
const double inv_nJ_C = 1.0 / nJ_C;
const double r = C * inv_nJ_C;
// Compute weight from robust loss function (used in the IRLS)
const double weight = weights[match_k][k] * loss_fn.weight(r * r);
if (weight == 0.0) {
continue;
}
num_residuals++;
// Compute Jacobian of Sampson error w.r.t the fundamental/essential matrix (3x3)
Eigen::Matrix<double, 1, 9> dF;
dF << m.x1[k](0) * m.x2[k](0), m.x1[k](0) * m.x2[k](1), m.x1[k](0), m.x1[k](1) * m.x2[k](0),
m.x1[k](1) * m.x2[k](1), m.x1[k](1), m.x2[k](0), m.x2[k](1), 1.0;
const double s = C * inv_nJ_C * inv_nJ_C;
dF(0) -= s * (J_C(2) * m.x1[k](0) + J_C(0) * m.x2[k](0));
dF(1) -= s * (J_C(3) * m.x1[k](0) + J_C(0) * m.x2[k](1));
dF(2) -= s * (J_C(0));
dF(3) -= s * (J_C(2) * m.x1[k](1) + J_C(1) * m.x2[k](0));
dF(4) -= s * (J_C(3) * m.x1[k](1) + J_C(1) * m.x2[k](1));
dF(5) -= s * (J_C(1));
dF(6) -= s * (J_C(2));
dF(7) -= s * (J_C(3));
dF *= inv_nJ_C;
// and then w.r.t. the pose parameters
Eigen::Matrix<double, 1, 6> J;
J.block<1, 3>(0, 0) = dF * dR;
J.block<1, 3>(0, 3) = dF * dt;
// Accumulate into JtJ and Jtr
Jtr += weight * C * inv_nJ_C * J.transpose();
for (size_t i = 0; i < 6; ++i) {
for (size_t j = 0; j <= i; ++j) {
JtJ(i, j) += weight * (J(i) * J(j));
}
}
}
}
return num_residuals;
}
CameraPose step(Eigen::Matrix<double, 6, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
pose_new.t = pose.t + pose.rotate(dp.block<3, 1>(3, 0));
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 6;
private:
const std::vector<PairwiseMatches> &matches;
const std::vector<CameraPose> &rig1_poses;
const std::vector<CameraPose> &rig2_poses;
const LossFunction &loss_fn;
const ResidualWeightVectors &weights;
};
template <typename LossFunction, typename AbsResidualsVector = UniformWeightVector,
typename RelResidualsVectors = UniformWeightVectors>
class HybridPoseJacobianAccumulator {
public:
HybridPoseJacobianAccumulator(const std::vector<Point2D> &points2D, const std::vector<Point3D> &points3D,
const std::vector<PairwiseMatches> &pairwise_matches,
const std::vector<CameraPose> &map_ext, const LossFunction &l,
const LossFunction &l_epi,
const AbsResidualsVector &weights_abs = AbsResidualsVector(),
const RelResidualsVectors &weights_rel = RelResidualsVectors())
: abs_pose_accum(points2D, points3D, trivial_camera, l, weights_abs),
gen_rel_accum(pairwise_matches, map_ext, trivial_rig, l_epi, weights_rel) {
trivial_camera.model_id = NullCameraModel::model_id;
trivial_rig.emplace_back();
}
double residual(const CameraPose &pose) const {
return abs_pose_accum.residual(pose) + gen_rel_accum.residual(pose);
}
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 6, 6> &JtJ,
Eigen::Matrix<double, 6, 1> &Jtr) const {
return abs_pose_accum.accumulate(pose, JtJ, Jtr) + gen_rel_accum.accumulate(pose, JtJ, Jtr);
}
CameraPose step(Eigen::Matrix<double, 6, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
pose_new.q = quat_step_post(pose.q, dp.block<3, 1>(0, 0));
pose_new.t = pose.t + pose.rotate(dp.block<3, 1>(3, 0));
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 6;
private:
Camera trivial_camera;
std::vector<CameraPose> trivial_rig;
CameraJacobianAccumulator<NullCameraModel, LossFunction, AbsResidualsVector> abs_pose_accum;
GeneralizedRelativePoseJacobianAccumulator<LossFunction, RelResidualsVectors> gen_rel_accum;
};
// This is the SVD factorization proposed by Bartoli and Sturm in
// Non-Linear Estimation of the Fundamental Matrix With Minimal Parameters, PAMI 2004
// Though we do different updates (lie vs the euler angles used in the original paper)
struct FactorizedFundamentalMatrix {
FactorizedFundamentalMatrix() {}
FactorizedFundamentalMatrix(const Eigen::Matrix3d &F) {
Eigen::JacobiSVD<Eigen::Matrix3d> svd(F, Eigen::ComputeFullV | Eigen::ComputeFullU);
Eigen::Matrix3d U = svd.matrixU();
Eigen::Matrix3d V = svd.matrixV();
if (U.determinant() < 0) {
U = -U;
}
if (V.determinant() < 0) {
V = -V;
}
qU = rotmat_to_quat(U);
qV = rotmat_to_quat(V);
Eigen::Vector3d s = svd.singularValues();
sigma = s(1) / s(0);
}
Eigen::Matrix3d F() const {
Eigen::Matrix3d U = quat_to_rotmat(qU);
Eigen::Matrix3d V = quat_to_rotmat(qV);
return U.col(0) * V.col(0).transpose() + sigma * U.col(1) * V.col(1).transpose();
}
Eigen::Vector4d qU, qV;
double sigma;
};
template <typename LossFunction, typename ResidualWeightVector = UniformWeightVector>
class FundamentalJacobianAccumulator {
public:
FundamentalJacobianAccumulator(const std::vector<Point2D> &points2D_1, const std::vector<Point2D> &points2D_2,
const LossFunction &l, const ResidualWeightVector &w = ResidualWeightVector())
: x1(points2D_1), x2(points2D_2), loss_fn(l), weights(w) {}
double residual(const FactorizedFundamentalMatrix &FF) const {
Eigen::Matrix3d F = FF.F();
double cost = 0.0;
for (size_t k = 0; k < x1.size(); ++k) {
double C = x2[k].homogeneous().dot(F * x1[k].homogeneous());
double nJc_sq = (F.block<2, 3>(0, 0) * x1[k].homogeneous()).squaredNorm() +
(F.block<3, 2>(0, 0).transpose() * x2[k].homogeneous()).squaredNorm();
double r2 = (C * C) / nJc_sq;
cost += weights[k] * loss_fn.loss(r2);
}
return cost;
}
size_t accumulate(const FactorizedFundamentalMatrix &FF, Eigen::Matrix<double, 7, 7> &JtJ,
Eigen::Matrix<double, 7, 1> &Jtr) const {
const Eigen::Matrix3d F = FF.F();
// Matrices contain the jacobians of F w.r.t. the factorized fundamental matrix (U,V,sigma)
const Eigen::Matrix3d U = quat_to_rotmat(FF.qU);
const Eigen::Matrix3d V = quat_to_rotmat(FF.qV);
const Eigen::Matrix3d d_sigma = U.col(1) * V.col(1).transpose();
Eigen::Matrix<double, 9, 7> dF_dparams;
dF_dparams << 0, F(2, 0), -F(1, 0), 0, F(0, 2), -F(0, 1), d_sigma(0, 0), -F(2, 0), 0, F(0, 0), 0, F(1, 2),
-F(1, 1), d_sigma(1, 0), F(1, 0), -F(0, 0), 0, 0, F(2, 2), -F(2, 1), d_sigma(2, 0), 0, F(2, 1), -F(1, 1),
-F(0, 2), 0, F(0, 0), d_sigma(0, 1), -F(2, 1), 0, F(0, 1), -F(1, 2), 0, F(1, 0), d_sigma(1, 1), F(1, 1),
-F(0, 1), 0, -F(2, 2), 0, F(2, 0), d_sigma(2, 1), 0, F(2, 2), -F(1, 2), F(0, 1), -F(0, 0), 0, d_sigma(0, 2),
-F(2, 2), 0, F(0, 2), F(1, 1), -F(1, 0), 0, d_sigma(1, 2), F(1, 2), -F(0, 2), 0, F(2, 1), -F(2, 0), 0,
d_sigma(2, 2);
size_t num_residuals = 0;
for (size_t k = 0; k < x1.size(); ++k) {
const double C = x2[k].homogeneous().dot(F * x1[k].homogeneous());
// J_C is the Jacobian of the epipolar constraint w.r.t. the image points
Eigen::Vector4d J_C;
J_C << F.block<3, 2>(0, 0).transpose() * x2[k].homogeneous(), F.block<2, 3>(0, 0) * x1[k].homogeneous();
const double nJ_C = J_C.norm();
const double inv_nJ_C = 1.0 / nJ_C;
const double r = C * inv_nJ_C;
// Compute weight from robust loss function (used in the IRLS)
const double weight = weights[k] * loss_fn.weight(r * r);
if (weight == 0.0) {
continue;
}
num_residuals++;
// Compute Jacobian of Sampson error w.r.t the fundamental/essential matrix (3x3)
Eigen::Matrix<double, 1, 9> dF;
dF << x1[k](0) * x2[k](0), x1[k](0) * x2[k](1), x1[k](0), x1[k](1) * x2[k](0), x1[k](1) * x2[k](1),
x1[k](1), x2[k](0), x2[k](1), 1.0;
const double s = C * inv_nJ_C * inv_nJ_C;
dF(0) -= s * (J_C(2) * x1[k](0) + J_C(0) * x2[k](0));
dF(1) -= s * (J_C(3) * x1[k](0) + J_C(0) * x2[k](1));
dF(2) -= s * (J_C(0));
dF(3) -= s * (J_C(2) * x1[k](1) + J_C(1) * x2[k](0));
dF(4) -= s * (J_C(3) * x1[k](1) + J_C(1) * x2[k](1));
dF(5) -= s * (J_C(1));
dF(6) -= s * (J_C(2));
dF(7) -= s * (J_C(3));
dF *= inv_nJ_C;
// and then w.r.t. the pose parameters (rotation + tangent basis for translation)
Eigen::Matrix<double, 1, 7> J = dF * dF_dparams;
// Accumulate into JtJ and Jtr
Jtr += weight * C * inv_nJ_C * J.transpose();
for (size_t i = 0; i < 7; ++i) {
for (size_t j = 0; j <= i; ++j) {
JtJ(i, j) += weight * (J(i) * J(j));
}
}
}
return num_residuals;
}
FactorizedFundamentalMatrix step(Eigen::Matrix<double, 7, 1> dp, const FactorizedFundamentalMatrix &F) const {
FactorizedFundamentalMatrix F_new;
F_new.qU = quat_step_pre(F.qU, dp.block<3, 1>(0, 0));
F_new.qV = quat_step_pre(F.qV, dp.block<3, 1>(3, 0));
F_new.sigma = F.sigma + dp(6);
return F_new;
}
typedef FactorizedFundamentalMatrix param_t;
static constexpr size_t num_params = 7;
private:
const std::vector<Point2D> &x1;
const std::vector<Point2D> &x2;
const LossFunction &loss_fn;
const ResidualWeightVector &weights;
};
// Non-linear refinement of transfer error |x2 - pi(H*x1)|^2, parameterized by fixing H(2,2) = 1
// I did some preliminary experiments comparing different error functions (e.g. symmetric and transfer)
// as well as other parameterizations (different affine patches, SVD as in Bartoli/Sturm, etc)
// but it does not seem to have a big impact (and is sometimes even worse)
// Implementations of these can be found at https://github.com/vlarsson/homopt
template <typename LossFunction, typename ResidualWeightVector = UniformWeightVector>
class HomographyJacobianAccumulator {
public:
HomographyJacobianAccumulator(const std::vector<Point2D> &points2D_1, const std::vector<Point2D> &points2D_2,
const LossFunction &l, const ResidualWeightVector &w = ResidualWeightVector())
: x1(points2D_1), x2(points2D_2), loss_fn(l), weights(w) {}
double residual(const Eigen::Matrix3d &H) const {
double cost = 0.0;
const double H0_0 = H(0, 0), H0_1 = H(0, 1), H0_2 = H(0, 2);
const double H1_0 = H(1, 0), H1_1 = H(1, 1), H1_2 = H(1, 2);
const double H2_0 = H(2, 0), H2_1 = H(2, 1), H2_2 = H(2, 2);
for (size_t k = 0; k < x1.size(); ++k) {
const double x1_0 = x1[k](0), x1_1 = x1[k](1);
const double x2_0 = x2[k](0), x2_1 = x2[k](1);
const double Hx1_0 = H0_0 * x1_0 + H0_1 * x1_1 + H0_2;
const double Hx1_1 = H1_0 * x1_0 + H1_1 * x1_1 + H1_2;
const double inv_Hx1_2 = 1.0 / (H2_0 * x1_0 + H2_1 * x1_1 + H2_2);
const double r0 = Hx1_0 * inv_Hx1_2 - x2_0;
const double r1 = Hx1_1 * inv_Hx1_2 - x2_1;
const double r2 = r0 * r0 + r1 * r1;
cost += weights[k] * loss_fn.loss(r2);
}
return cost;
}
size_t accumulate(const Eigen::Matrix3d &H, Eigen::Matrix<double, 8, 8> &JtJ, Eigen::Matrix<double, 8, 1> &Jtr) {
Eigen::Matrix<double, 2, 8> dH;
const double H0_0 = H(0, 0), H0_1 = H(0, 1), H0_2 = H(0, 2);
const double H1_0 = H(1, 0), H1_1 = H(1, 1), H1_2 = H(1, 2);
const double H2_0 = H(2, 0), H2_1 = H(2, 1), H2_2 = H(2, 2);
size_t num_residuals = 0;
for (size_t k = 0; k < x1.size(); ++k) {
const double x1_0 = x1[k](0), x1_1 = x1[k](1);
const double x2_0 = x2[k](0), x2_1 = x2[k](1);
const double Hx1_0 = H0_0 * x1_0 + H0_1 * x1_1 + H0_2;
const double Hx1_1 = H1_0 * x1_0 + H1_1 * x1_1 + H1_2;
const double inv_Hx1_2 = 1.0 / (H2_0 * x1_0 + H2_1 * x1_1 + H2_2);
const double z0 = Hx1_0 * inv_Hx1_2;
const double z1 = Hx1_1 * inv_Hx1_2;
const double r0 = z0 - x2_0;
const double r1 = z1 - x2_1;
const double r2 = r0 * r0 + r1 * r1;
// Compute weight from robust loss function (used in the IRLS)
const double weight = weights[k] * loss_fn.weight(r2);
if (weight == 0.0)
continue;
num_residuals++;
dH << x1_0, 0.0, -x1_0 * z0, x1_1, 0.0, -x1_1 * z0, 1.0, 0.0, // -z0,
0.0, x1_0, -x1_0 * z1, 0.0, x1_1, -x1_1 * z1, 0.0, 1.0; // -z1,
dH = dH * inv_Hx1_2;
// accumulate into JtJ and Jtr
Jtr += dH.transpose() * (weight * Eigen::Vector2d(r0, r1));
for (size_t i = 0; i < 8; ++i) {
for (size_t j = 0; j <= i; ++j) {
JtJ(i, j) += weight * dH.col(i).dot(dH.col(j));
}
}
}
return num_residuals;
}
Eigen::Matrix3d step(Eigen::Matrix<double, 8, 1> dp, const Eigen::Matrix3d &H) const {
Eigen::Matrix3d H_new = H;
Eigen::Map<Eigen::Matrix<double, 8, 1>>(H_new.data()) += dp;
return H_new;
}
typedef Eigen::Matrix3d param_t;
static constexpr size_t num_params = 8;
private:
const std::vector<Point2D> &x1;
const std::vector<Point2D> &x2;
const LossFunction &loss_fn;
const ResidualWeightVector &weights;
};
template <typename LossFunction, typename ResidualWeightVector = UniformWeightVector>
class Radial1DJacobianAccumulator {
public:
Radial1DJacobianAccumulator(const std::vector<Point2D> &points2D, const std::vector<Point3D> &points3D,
const LossFunction &l, const ResidualWeightVector &w = ResidualWeightVector())
: x(points2D), X(points3D), loss_fn(l), weights(w) {}
double residual(const CameraPose &pose) const {
double cost = 0.0;
Eigen::Matrix3d R = pose.R();
for (size_t k = 0; k < x.size(); ++k) {
Eigen::Vector2d z = (R * X[k] + pose.t).template topRows<2>().normalized();
double alpha = z.dot(x[k]);
// This assumes points will not cross the half-space during optimization
if (alpha < 0)
continue;
double r2 = (alpha * z - x[k]).squaredNorm();
cost += weights[k] * loss_fn.loss(r2);
}
return cost;
}
size_t accumulate(const CameraPose &pose, Eigen::Matrix<double, 5, 5> &JtJ,
Eigen::Matrix<double, 5, 1> &Jtr) const {
Eigen::Matrix3d R = pose.R();
size_t num_residuals = 0;
for (size_t k = 0; k < x.size(); ++k) {
Eigen::Vector3d RX = R * X[k];
const Eigen::Vector2d z = (RX + pose.t).topRows<2>();
const double n_z = z.norm();
const Eigen::Vector2d zh = z / n_z;
const double alpha = zh.dot(x[k]);
// This assumes points will not cross the half-space during optimization
if (alpha < 0)
continue;
// Setup residual
Eigen::Vector2d r = alpha * zh - x[k];
const double r_squared = r.squaredNorm();
const double weight = weights[k] * loss_fn.weight(r_squared);
if (weight == 0.0) {
continue;
}
num_residuals++;
// differentiate residual with respect to z
Eigen::Matrix2d dr_dz = (zh * x[k].transpose() + alpha * Eigen::Matrix2d::Identity()) *
(Eigen::Matrix2d::Identity() - zh * zh.transpose()) / n_z;
Eigen::Matrix<double, 2, 5> dz;
dz << 0.0, RX(2), -RX(1), 1.0, 0.0, -RX(2), 0.0, RX(0), 0.0, 1.0;
Eigen::Matrix<double, 2, 5> J = dr_dz * dz;
// Accumulate into JtJ and Jtr
Jtr += weight * J.transpose() * r;
for (size_t i = 0; i < 5; ++i) {
for (size_t j = 0; j <= i; ++j) {
JtJ(i, j) += weight * (J.col(i).dot(J.col(j)));
}
}
}
return num_residuals;
}
CameraPose step(Eigen::Matrix<double, 5, 1> dp, const CameraPose &pose) const {
CameraPose pose_new;
pose_new.q = quat_step_pre(pose.q, dp.block<3, 1>(0, 0));
pose_new.t(0) = pose.t(0) + dp(3);
pose_new.t(1) = pose.t(1) + dp(4);
return pose_new;
}
typedef CameraPose param_t;
static constexpr size_t num_params = 5;
private:
const std::vector<Point2D> &x;
const std::vector<Point3D> &X;
const LossFunction &loss_fn;
const ResidualWeightVector &weights;
};
} // namespace poselib
#endif
|
vlarsson/PoseLib
|
PoseLib/robust/jacobian_impl.h
|
C
|
bsd-3-clause
| 59,820
|
# encoding: UTF-8
require 'sequel'
require 'fileutils'
require 'uuidtools'
require_relative './user'
require_relative './table'
require_relative './log'
require_relative './visualization/member'
require_relative './table_registrar'
require_relative './quota_checker'
require_relative '../../lib/cartodb/errors'
require_relative '../../lib/cartodb/import_error_codes'
require_relative '../../lib/cartodb/metrics'
require_relative '../../lib/cartodb/mixpanel'
require_relative '../../lib/cartodb_stats'
require_relative '../../config/initializers/redis'
require_relative '../../services/importer/lib/importer'
require_relative '../connectors/importer'
require_relative '../../services/importer/lib/importer/datasource_downloader'
require_relative '../../services/datasources/lib/datasources'
require_relative '../../services/importer/lib/importer/unp'
require_relative '../../services/importer/lib/importer/post_import_handler'
require_relative '../../services/importer/lib/importer/mail_notifier'
require_relative '../../services/platform-limits/platform_limits'
include CartoDB::Datasources
class DataImport < Sequel::Model
MERGE_WITH_UNMATCHING_COLUMN_TYPES_RE = /No .*matches.*argument type.*/
attr_accessor :log, :results
# @see store_results() method also when adding new fields
PUBLIC_ATTRIBUTES = [
'id',
'user_id',
'table_id',
'data_type',
'table_name',
'state',
'error_code',
'queue_id',
'get_error_text',
'get_error_source',
'tables_created_count',
'synchronization_id',
'service_name',
'service_item_id',
'type_guessing',
'quoted_fields_guessing',
'content_guessing',
'server',
'host',
'upload_host',
'resque_ppid',
'create_visualization',
'visualization_id',
# String field containing a json, format:
# {
# twitter_credits: Integer
# }
# No automatic conversion coded
'user_defined_limits'
]
# This attributes will get removed from public_values upon calling api_call_public_values
NON_API_VISIBLE_ATTRIBUTES = [
'service_item_id',
'service_name',
'server',
'host',
'upload_host',
'resque_ppid',
]
# Not all constants are used, but so that we keep track of available states
STATE_ENQUEUED = 'enqueued' # Default state for imports whose files are not yet at "import source"
STATE_PENDING = 'pending' # Default state for files already at "import source" (e.g. S3 bucket)
STATE_UNPACKING = 'unpacking'
STATE_IMPORTING = 'importing'
STATE_COMPLETE = 'complete'
STATE_UPLOADING = 'uploading'
STATE_FAILURE = 'failure'
STATE_STUCK = 'stuck'
TYPE_EXTERNAL_TABLE = 'external_table'
TYPE_FILE = 'file'
TYPE_URL = 'url'
TYPE_QUERY = 'query'
TYPE_DATASOURCE = 'datasource'
def after_initialize
instantiate_log
self.results = []
self.state ||= STATE_PENDING
end
# This before_create should be only necessary to track old dashboard data imports.
# New ones are already tracked during the data_import create inside the controller
# For the old dashboard
def before_create
if self.from_common_data?
self.extra_options = self.extra_options.merge({:common_data => true})
end
end
def before_save
self.logger = self.log.id unless self.logger.present?
self.updated_at = Time.now
end
def from_common_data?
if Cartodb.config[:common_data] &&
!Cartodb.config[:common_data]['username'].blank? &&
!Cartodb.config[:common_data]['host'].blank?
if !self.extra_options.has_key?('common_data') &&
self.data_source &&
self.data_source.include?("#{Cartodb.config[:common_data]['username']}.#{Cartodb.config[:common_data]['host']}")
return true
end
end
return false
end
def extra_options
return {} if self.import_extra_options.nil?
::JSON.parse(self.import_extra_options).symbolize_keys
end
def extra_options=(value)
if !value.nil?
self.import_extra_options = ::JSON.dump(value)
end
end
def dataimport_logger
@@dataimport_logger ||= Logger.new("#{Rails.root}/log/imports.log")
end
# Meant to be used when calling from API endpoints (hides some fields not needed at editor scope)
def api_public_values
public_values.reject { |key|
DataImport::NON_API_VISIBLE_ATTRIBUTES.include?(key)
}
end
def public_values
values = Hash[PUBLIC_ATTRIBUTES.map{ |attribute| [attribute, send(attribute)] }]
values.merge!('queue_id' => id)
values.merge!(success: success) if (state == STATE_COMPLETE || state == STATE_FAILURE || state == STATE_STUCK)
values
end
def run_import!
self.resque_ppid = Process.ppid
self.server = Socket.gethostname
log.append "Running on server #{self.server} with PID: #{Process.pid}"
begin
success = !!dispatch
rescue TokenExpiredOrInvalidError => ex
success = false
begin
current_user.oauths.remove(ex.service_name)
rescue => ex2
log.append "Exception removing OAuth: #{ex2.message}"
log.append ex2.backtrace
end
end
log.append 'After dispatch'
if self.results.empty?
self.error_code = 1002
self.state = STATE_FAILURE
save
end
success ? handle_success : handle_failure
Rails.logger.debug log.to_s
self
rescue CartoDB::QuotaExceeded => quota_exception
CartoDB::notify_warning_exception(quota_exception)
handle_failure(quota_exception)
self
rescue => exception
log.append "Exception: #{exception.to_s}"
log.append exception.backtrace
stacktrace = exception.to_s + exception.backtrace.join
Rollbar.report_message('Import error', 'error', error_info: stacktrace)
handle_failure(exception)
self
end
# Notice that this returns the entire error hash, not just the text
# It seems that it's only used for the rollbar reporting
def get_error_text
if self.error_code == CartoDB::NO_ERROR_CODE
CartoDB::NO_ERROR_CODE
else
self.error_code.blank? ? CartoDB::IMPORTER_ERROR_CODES[99999] : CartoDB::IMPORTER_ERROR_CODES[self.error_code]
end
end
def get_error_source
if self.error_code == CartoDB::NO_ERROR_CODE
CartoDB::NO_ERROR_CODE
else
self.error_code.blank? ? CartoDB::IMPORTER_ERROR_CODES[99999][:source] : CartoDB::IMPORTER_ERROR_CODES[self.error_code][:source]
end
end
def raise_over_table_quota_error
log.append 'Over account table limit, please upgrade'
self.error_code = 8002
self.state = STATE_FAILURE
save
raise CartoDB::QuotaExceeded, 'More tables required'
end
def mark_as_failed_if_stuck!
return false unless stuck?
log.append "Import timed out. Id:#{self.id} State:#{self.state} Created at:#{self.created_at} Running imports:#{running_import_ids}"
self.success = false
self.state = STATE_STUCK
save
CartoDB::notify_exception(
CartoDB::Importer2::GenericImportError.new('Import timed out or got stuck'),
user: current_user
)
true
end
def data_source=(data_source)
path = Rails.root.join("public#{data_source}").to_s
if data_source.nil?
self.values[:data_type] = TYPE_DATASOURCE
self.values[:data_source] = ''
elsif File.exist?(path) && !File.directory?(path)
self.values[:data_type] = TYPE_FILE
self.values[:data_source] = path
elsif Addressable::URI.parse(data_source).host.present?
self.values[:data_type] = TYPE_URL
self.values[:data_source] = data_source
end
# else SQL-based import
end
def remove_uploaded_resources
return nil unless uploaded_file
path = Rails.root.join('public', 'uploads', uploaded_file[1])
FileUtils.rm_rf(path) if Dir.exists?(path)
end
def handle_success
self.success = true
self.state = STATE_COMPLETE
table_names = results.map { |result| result.name }.select { |name| name != nil}.sort
self.table_names = table_names.join(' ')
self.tables_created_count = table_names.size
log.append "Import finished\n"
save
begin
CartoDB::PlatformLimits::Importer::UserConcurrentImportsAmount.new({
user: current_user,
redis: {
db: $users_metadata
}
})
.decrement
rescue => exception
CartoDB::Logger.info('Error decreasing concurrent import limit',
"#{exception.message} #{exception.backtrace.inspect}")
end
notify(results)
self
end
def handle_failure(supplied_exception = nil)
self.success = false
self.state = STATE_FAILURE
if !supplied_exception.nil? && supplied_exception.respond_to?(:error_code)
self.error_code = supplied_exception.error_code
end
log.append "ERROR!\n"
self.save
begin
CartoDB::PlatformLimits::Importer::UserConcurrentImportsAmount.new({
user: current_user,
redis: {
db: $users_metadata
}
})
.decrement
rescue => exception
CartoDB::Logger.info('Error decreasing concurrent import limit',
"#{exception.message} #{exception.backtrace.inspect}")
end
notify(results)
self
rescue => exception
log.append "Exception: #{exception.to_s}"
log.append exception.backtrace
self
end
def table
# We can assume the owner is always who imports the data
# so no need to change to a Visualization::Collection based load
# TODO better to use an association for this
::Table.new(user_table: UserTable.where(id: table_id, user_id: user_id).first)
end
def is_raster?
::JSON.parse(self.stats).select{ |item| item['type'] == '.tif' }.length > 0
end
# Calculates the maximum timeout in seconds for a given user, to be used when performing HTTP requests
# TODO: Candidate for being private if we join syncs and data imports someday
# TODO: Add timeout config (if we need to change this)
def self.http_timeout_for(user, assumed_kb_sec = 50*1024)
if user.nil? || !user.respond_to?(:quota_in_bytes)
raise ArgumentError.new('Need a User object to calculate its download speed')
end
if assumed_kb_sec < 1
raise ArgumentError.new('KB per second must be > 0')
end
(user.quota_in_bytes / assumed_kb_sec).round
end
private
def dispatch
self.state = STATE_UPLOADING
return migrate_existing if migrate_table.present?
return from_table if table_copy.present? || from_query.present?
new_importer
rescue => exception
puts exception.to_s + exception.backtrace.join("\n")
raise
end
def running_import_ids
Resque::Worker.all.map do |worker|
next unless worker.job['queue'] == 'imports'
worker.job['payload']['args'].first['job_id'] rescue nil
end.compact
end
def public_url
return data_source unless uploaded_file
"https://#{current_user.username}.cartodb.com/#{uploaded_file[0]}"
end
def valid_uuid?(text)
!!UUIDTools::UUID.parse(text)
rescue TypeError
false
rescue ArgumentError
false
end
def before_destroy
self.remove_uploaded_resources
end
def instantiate_log
uuid = self.logger
if valid_uuid?(uuid)
self.log = CartoDB::Log.where(id: uuid.to_s).first
else
self.log = CartoDB::Log.new(
type: CartoDB::Log::TYPE_DATA_IMPORT,
user_id: current_user.id
)
self.log.save
end
end
def uploaded_file
data_source.to_s.match(/uploads\/([a-z0-9]{20})\/.*/)
end
# A stuck job should've started but not be finished, so it's state should not be complete nor failed, it should
# have been in the queue for more than 5 minutes and it shouldn't be currently processed by any active worker
def stuck?
![STATE_ENQUEUED, STATE_PENDING, STATE_COMPLETE, STATE_FAILURE].include?(self.state) &&
self.created_at < 5.minutes.ago &&
!running_import_ids.include?(self.id)
end
def from_table
log.append 'from_table()'
number_of_tables = 1
quota_checker = CartoDB::QuotaChecker.new(current_user)
if quota_checker.will_be_over_table_quota?(number_of_tables)
raise_over_table_quota_error
end
query = table_copy ? "SELECT * FROM #{table_copy}" : from_query
new_table_name = import_from_query(table_name, query)
self.update(table_names: new_table_name)
migrate_existing(new_table_name)
self.results.push CartoDB::Importer2::Result.new(success: true, error: nil)
rescue Sequel::DatabaseError => exception
if exception.to_s =~ MERGE_WITH_UNMATCHING_COLUMN_TYPES_RE
set_merge_error(8004, exception.to_s)
else
set_merge_error(8003, exception.to_s)
end
false
end
def import_from_query(name, query)
log.append 'import_from_query()'
self.data_type = TYPE_QUERY
self.data_source = query
self.save
candidates = current_user.tables.select_map(:name)
table_name = ::Table.get_valid_table_name(name, {
connection: current_user.in_database,
database_schema: current_user.database_schema
})
current_user.in_database.run(%Q{CREATE TABLE #{table_name} AS #{query}})
if current_user.over_disk_quota?
log.append "Over storage quota. Dropping table #{table_name}"
current_user.in_database.run(%Q{DROP TABLE #{table_name}})
self.error_code = 8001
self.state = STATE_FAILURE
save
raise CartoDB::QuotaExceeded, 'More storage required'
end
table_name
end
def migrate_existing(imported_name=migrate_table, name=nil)
new_name = imported_name || name
log.append 'migrate_existing()'
table = ::Table.new
table.user_id = user_id
table.name = new_name
table.migrate_existing_table = imported_name
table.data_import_id = self.id
if table.valid?
log.append 'Table valid'
table.save
table.optimize
table.map.recalculate_bounds!
if current_user.remaining_quota < 0
log.append 'Over storage quota, removing table'
self.error_code = 8001
self.state = STATE_FAILURE
save
table.destroy
raise CartoDB::QuotaExceeded, 'More storage required'
end
refresh
self.table_id = table.id
self.table_name = table.name
log.append "Table '#{table.name}' registered"
save
true
else
reload
log.append "Table invalid: Error linking #{imported_name} to UI: " + table.errors.full_messages.join(' - ')
false
end
end
def pg_options
Rails.configuration.database_configuration[Rails.env].symbolize_keys
.merge(
user: current_user.database_username,
password: current_user.database_password,
database: current_user.database_name,
host: current_user.database_host
) {|key, o, n| n.nil? || n.empty? ? o : n}
end
def ogr2ogr_options
options = Cartodb.config.fetch(:ogr2ogr, {})
if options['binary'].nil? || options['csv_guessing'].nil?
{}
else
{
ogr2ogr_binary: options['binary'],
ogr2ogr_csv_guessing: options['csv_guessing'] && self.type_guessing,
quoted_fields_guessing: self.quoted_fields_guessing
}
end
end
def content_guessing_options
guessing_config = Cartodb.config.fetch(:importer, {}).deep_symbolize_keys.fetch(:content_guessing, {})
geocoder_config = Cartodb.config.fetch(:geocoder, {}).deep_symbolize_keys
if guessing_config[:enabled] and self.content_guessing and geocoder_config
{ guessing: guessing_config, geocoder: geocoder_config }
else
{ guessing: { enabled: false } }
end
end
def new_importer
manual_fields = {}
had_errors = false
log.append 'new_importer()'
datasource_provider = get_datasource_provider
# If retrieving metadata we get an error, fail early
begin
downloader = get_downloader(datasource_provider)
rescue DataDownloadError => ex
had_errors = true
manual_fields = {
error_code: 1012,
log_info: ex.to_s
}
rescue ResponseError => ex
had_errors = true
manual_fields = {
error_code: 1011,
log_info: ex.to_s
}
rescue InvalidServiceError => ex
had_errors = true
manual_fields = {
error_code: 1013,
log_info: ex.to_s
}
rescue CartoDB::Importer2::FileTooBigError => ex
had_errors = true
manual_fields = {
error_code: ex.error_code,
log_info: CartoDB::IMPORTER_ERROR_CODES[ex.error_code]
}
rescue => ex
had_errors = true
manual_fields = {
error_code: 99999,
log_info: ex.to_s
}
end
if had_errors
importer = runner = datasource_provider = nil
else
tracker = lambda { |state|
self.state = state
save
}
post_import_handler = CartoDB::Importer2::PostImportHandler.new
case datasource_provider.class::DATASOURCE_NAME
when Url::ArcGIS::DATASOURCE_NAME
post_import_handler.add_fix_geometries_task
when Search::Twitter::DATASOURCE_NAME
post_import_handler.add_transform_geojson_geom_column
end
database_options = pg_options
self.host = database_options[:host]
runner = CartoDB::Importer2::Runner.new({
pg: database_options,
downloader: downloader,
log: log,
user: current_user,
unpacker: CartoDB::Importer2::Unp.new,
post_import_handler: post_import_handler
})
runner.loader_options = ogr2ogr_options.merge content_guessing_options
graphite_conf = Cartodb.config[:graphite]
unless graphite_conf.nil?
runner.set_importer_stats_options(graphite_conf['host'], graphite_conf['port'], Socket.gethostname)
end
registrar = CartoDB::TableRegistrar.new(current_user, ::Table)
quota_checker = CartoDB::QuotaChecker.new(current_user)
database = current_user.in_database
destination_schema = current_user.database_schema
importer = CartoDB::Connector::Importer.new(runner, registrar, quota_checker, database, id,
destination_schema, current_user.public_user_roles)
log.append 'Before importer run'
importer.run(tracker)
log.append 'After importer run'
end
store_results(importer, runner, datasource_provider, manual_fields)
importer.nil? ? false : importer.success?
end
# Note: Assumes that if importer is nil an error happened
# @param importer CartoDB::Connector::Importer|nil
# @param runner CartoDB::Importer2::Runner|nil
# @param datasource_provider mixed|nil
# @param manual_fields Hash
def store_results(importer=nil, runner=nil, datasource_provider=nil, manual_fields={})
if importer.nil?
set_error(manual_fields.fetch(:error_code, 99999), manual_fields.fetch(:log_info, nil))
else
self.results = importer.results
self.error_code = importer.error_code
# Table.after_create() setted fields that won't be saved to "final" data import unless specified here
self.table_name = importer.table.name if importer.success? && importer.table
self.table_id = importer.table.id if importer.success? && importer.table
if importer.success? && importer.data_import.create_visualization
self.visualization_id = importer.data_import.visualization_id
end
update_synchronization(importer)
importer.success? ? set_datasource_audit_to_complete(datasource_provider,
importer.success? && importer.table ? importer.table.id : nil)
: set_datasource_audit_to_failed(datasource_provider)
end
unless runner.nil?
self.stats = ::JSON.dump(runner.stats)
end
end
def update_synchronization(importer)
if synchronization_id
log.append "synchronization_id: #{synchronization_id}"
synchronization = CartoDB::Synchronization::Member.new(id: synchronization_id).fetch
synchronization.name = self.table_name
synchronization.log_id = log.id
if importer.success?
synchronization.state = 'success'
synchronization.error_code = nil
synchronization.error_message = nil
else
synchronization.state = 'failure'
synchronization.error_code = error_code
synchronization.error_message = get_error_text
end
log.append "importer.success? #{synchronization.state}"
synchronization.store
end
end
def get_datasource_provider
datasource_name = (service_name.nil? || service_name.size == 0) ? Url::PublicUrl::DATASOURCE_NAME : service_name
if service_item_id.nil? || service_item_id.size == 0
self.service_item_id = data_source
end
get_datasource(datasource_name, service_item_id)
end
def get_downloader(datasource_provider)
log.append "Fetching datasource #{datasource_provider.to_s} metadata for item id #{service_item_id}"
metadata = datasource_provider.get_resource_metadata(service_item_id)
if hit_platform_limit?(datasource_provider, metadata, current_user)
raise CartoDB::Importer2::FileTooBigError.new(metadata.inspect)
end
if datasource_provider.providers_download_url?
downloader = CartoDB::Importer2::Downloader.new(
(metadata[:url].present? && datasource_provider.providers_download_url?) ? metadata[:url] : data_source,
{ http_timeout: ::DataImport.http_timeout_for(current_user) }
)
log.append "File will be downloaded from #{downloader.url}"
else
log.append 'Downloading file data from datasource'
downloader = CartoDB::Importer2::DatasourceDownloader.new(
datasource_provider, metadata, { http_timeout: ::DataImport.http_timeout_for(current_user) }, log
)
end
downloader
end
def hit_platform_limit?(datasource, metadata, user)
if datasource.has_resource_size?(metadata)
CartoDB::PlatformLimits::Importer::InputFileSize.new({ user: user })
.is_over_limit!(metadata[:size])
else
false
end
end
def current_user
@current_user ||= User[user_id]
end
def notify(results)
owner = User.where(:id => self.user_id).first
imported_tables = results.select {|r| r.success }.length
failed_tables = results.length - imported_tables
import_log = {'user' => owner.username,
'state' => self.state,
'tables' => results.length,
'imported_tables' => imported_tables,
'failed_tables' => failed_tables,
'error_code' => self.error_code,
'import_timestamp' => Time.now,
'queue_server' => `hostname`.strip,
'database_host' => owner.database_host,
'service_name' => self.service_name,
'data_type' => self.data_type,
'is_sync_import' => !self.synchronization_id.nil?,
'import_time' => self.updated_at - self.created_at,
'file_stats' => ::JSON.parse(self.stats),
'resque_ppid' => self.resque_ppid,
'user_timeout' => ::DataImport.http_timeout_for(current_user),
'error_source' => get_error_source
}
if !self.extra_options.nil?
import_log['extra_options'] = self.extra_options
end
import_log.merge!(decorate_log(self))
dataimport_logger.info(import_log.to_json)
CartoDB::Importer2::MailNotifier.new(self, results, ::Resque).notify_if_needed
results.each { |result| CartoDB::Metrics.new.report(:import, payload_for(result)) }
# TODO: remove mixpanel
results.each { |result| CartoDB::Mixpanel.new.report(:import, payload_for(result)) }
end
def decorate_log(data_import)
decoration = { retrieved_items: 0}
if data_import.success && data_import.table_id
datasource = get_datasource_provider
if datasource.persists_state_via_data_import?
decoration = datasource.get_audit_stats
end
end
decoration
end
def payload_for(result=nil)
payload = {
file_url: public_url,
distinct_id: current_user.username,
username: current_user.username,
account_type: current_user.account_type,
database: current_user.database_name,
email: current_user.email,
log: log.to_s
}
payload.merge!(
name: result.name,
extension: result.extension,
success: result.success,
error_code: result.error_code,
) if result
payload.merge!(
file_url_hostname: URI.parse(public_url).hostname
) if public_url rescue nil
payload.merge!(error_title: get_error_text) if state == STATE_FAILURE
payload
end
# @param datasource_name String
# @param service_item_id String|nil
# @return mixed|nil
# @throws DataSourceError
def get_datasource(datasource_name, service_item_id)
begin
oauth = current_user.oauths.select(datasource_name)
# Tables metadata DB also store resque data
datasource = DatasourcesFactory.get_datasource(
datasource_name, current_user, {
http_timeout: ::DataImport.http_timeout_for(current_user),
redis_storage: $tables_metadata,
user_defined_limits: ::JSON.parse(user_defined_limits).symbolize_keys
})
datasource.report_component = Rollbar
datasource.token = oauth.token unless oauth.nil?
rescue => ex
log.append "Exception: #{ex.message}"
log.append ex.backtrace
Rollbar.report_message('Import error: ', 'error', error_info: ex.message + ex.backtrace.join)
raise CartoDB::DataSourceError.new("Datasource #{datasource_name} could not be instantiated")
end
if service_item_id.nil?
raise CartoDB::DataSourceError.new("Datasource #{datasource_name} without item id")
end
if datasource.persists_state_via_data_import?
datasource.data_import_item = self
end
datasource
end
def set_error(error_code, log_info='')
log.append("Additional error info: #{log_info}") unless log_info.empty?
self.results = [CartoDB::Importer2::Result.new(
success: false, error_code: error_code
)]
self.error_code = error_code
self.state = STATE_FAILURE
end
def set_merge_error(error_code, log_info='')
log.append("Going to set merge error with code #{error_code}")
set_error(error_code, log_info)
end
def set_datasource_audit_to_complete(datasource, table_id = nil)
if datasource.persists_state_via_data_import?
datasource.data_import_item = self
datasource.set_audit_to_completed(table_id)
end
end
def set_datasource_audit_to_failed(datasource)
if datasource.persists_state_via_data_import?
datasource.data_import_item = self
datasource.set_audit_to_failed
end
end
end
|
raquel-ucl/cartodb
|
app/models/data_import.rb
|
Ruby
|
bsd-3-clause
| 28,396
|
var logging = require('logging');
var Events = function(options) {
options = options || {};
};
Events.prototype.on = function(event, fn) {
var _this = this;
_this._events = _this._events || {};
_this._events[event] = this._events[event] || [];
_this._events[event].push(fn);
};
Events.prototype.off = function(event, fn) {
var _this = this;
_this._events = _this._events || {};
if (event in _this._events === false) {
return;
}
_this._events[event].splice(_this._events[event].indexOf(fn), 1);
};
Events.prototype.trigger = function(event) {
var _this = this;
_this._events = _this._events || {};
if (event in _this._events === false) {
return;
}
logging.debug("Events.trigger()", event, arguments);
for(var i = 0; i < _this._events[event].length; i++) {
_this._events[event][i].apply(_this, Array.prototype.slice.call(arguments, 1));
}
};
Events.mixin = function(dest) {
['on','off','trigger'].forEach(function(method) {
if (typeof dest === 'function') {
dest.prototype[method] = Events.prototype[method];
} else {
dest[method] = Events.prototype[method];
}
});
};
module.exports.Events = Events;
module.exports.hub = new Events();
// This defines the list of events intended to be used with the
// global event "hub".
module.exports.EVT = {
ALIGN: 'align',
CLEAR_HIGHLIGHTS: 'clear_highlights',
DELETE_ALIGNMENTS: 'delete_alignments',
BUILD_INDEX: 'build_index',
EXPORT: 'export',
LOGIN: 'login',
LOADING: 'loading',
ERROR: 'error',
NOTIFICATION: 'notification'
};
|
Harvard-ATG/wordmapper
|
src/js/events.js
|
JavaScript
|
bsd-3-clause
| 1,563
|
---------------------------------------------------------------------------------------------------
-- User story: https://github.com/smartdevicelink/sdl_requirements/issues/3
-- Use case: https://github.com/smartdevicelink/sdl_requirements/blob/master/detailed_docs/RC/SetInteriorVehicleData.md
-- Item: Use Case 1: Main Flow
--
-- Requirement summary:
-- [SDL_RC] Set available control module settings SetInteriorVehicleData
--
-- Description:
-- In case:
-- 1) RC app sends SetInteriorVehicleData request with valid parameters
-- 2) and HMI didn't respond within default timeout
-- SDL must:
-- 1) Respond to App with success:false, "GENERIC_ERROR"
---------------------------------------------------------------------------------------------------
--[[ Required Shared libraries ]]
local runner = require('user_modules/script_runner')
local commonRC = require('test_scripts/RC/commonRC')
--[[ Test Configuration ]]
runner.testSettings.isSelfIncluded = false
--[[ Local Functions ]]
local function setVehicleData(pModuleType)
local cid = commonRC.getMobileSession():SendRPC("SetInteriorVehicleData", {
moduleData = commonRC.getSettableModuleControlData(pModuleType)
})
EXPECT_HMICALL("RC.SetInteriorVehicleData", {
appID = commonRC.getHMIAppId(),
moduleData = commonRC.getSettableModuleControlData(pModuleType)
})
:Do(function(_, _)
-- HMI does not respond
end)
commonRC.getMobileSession():ExpectResponse(cid, { success = false, resultCode = "GENERIC_ERROR"})
commonRC.wait(11000)
end
--[[ Scenario ]]
runner.Title("Preconditions")
runner.Step("Clean environment", commonRC.preconditions)
runner.Step("Start SDL, HMI, connect Mobile, start Session", commonRC.start)
runner.Step("RAI", commonRC.registerAppWOPTU)
runner.Step("Activate App", commonRC.activateApp)
runner.Title("Test")
for _, mod in pairs(commonRC.modules) do
runner.Step("SetInteriorVehicleData " .. mod .. " HMI does not respond", setVehicleData, { mod })
end
runner.Title("Postconditions")
runner.Step("Stop SDL", commonRC.postconditions)
|
smartdevicelink/sdl_atf_test_scripts
|
test_scripts/RC/CLIMATE_RADIO/SetInteriorVehicleData/013_GENERIC_ERROR_in_case_HMI_did_not_respond.lua
|
Lua
|
bsd-3-clause
| 2,054
|
/*================================================================================
Copyright (c) 2009 VMware, Inc. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VMware, Inc. nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
@author Steve Jin (sjin@vmware.com)
*/
public class VmMigratedEvent extends VmEvent
{
public HostEventArgument sourceHost;
public HostEventArgument getSourceHost()
{
return this.sourceHost;
}
public void setSourceHost(HostEventArgument sourceHost)
{
this.sourceHost=sourceHost;
}
}
|
mikem2005/vijava
|
src/com/vmware/vim25/VmMigratedEvent.java
|
Java
|
bsd-3-clause
| 1,981
|
import hashlib
import tempfile
import unittest
import shutil
import os
import sys
from testfixtures import LogCapture
from scrapy.dupefilters import RFPDupeFilter
from scrapy.http import Request
from scrapy.core.scheduler import Scheduler
from scrapy.utils.python import to_bytes
from scrapy.utils.job import job_dir
from scrapy.utils.test import get_crawler
from tests.spiders import SimpleSpider
class FromCrawlerRFPDupeFilter(RFPDupeFilter):
@classmethod
def from_crawler(cls, crawler):
debug = crawler.settings.getbool('DUPEFILTER_DEBUG')
df = cls(job_dir(crawler.settings), debug)
df.method = 'from_crawler'
return df
class FromSettingsRFPDupeFilter(RFPDupeFilter):
@classmethod
def from_settings(cls, settings):
debug = settings.getbool('DUPEFILTER_DEBUG')
df = cls(job_dir(settings), debug)
df.method = 'from_settings'
return df
class DirectDupeFilter(object):
method = 'n/a'
class RFPDupeFilterTest(unittest.TestCase):
def test_df_from_crawler_scheduler(self):
settings = {'DUPEFILTER_DEBUG': True,
'DUPEFILTER_CLASS': __name__ + '.FromCrawlerRFPDupeFilter'}
crawler = get_crawler(settings_dict=settings)
scheduler = Scheduler.from_crawler(crawler)
self.assertTrue(scheduler.df.debug)
self.assertEqual(scheduler.df.method, 'from_crawler')
def test_df_from_settings_scheduler(self):
settings = {'DUPEFILTER_DEBUG': True,
'DUPEFILTER_CLASS': __name__ + '.FromSettingsRFPDupeFilter'}
crawler = get_crawler(settings_dict=settings)
scheduler = Scheduler.from_crawler(crawler)
self.assertTrue(scheduler.df.debug)
self.assertEqual(scheduler.df.method, 'from_settings')
def test_df_direct_scheduler(self):
settings = {'DUPEFILTER_CLASS': __name__ + '.DirectDupeFilter'}
crawler = get_crawler(settings_dict=settings)
scheduler = Scheduler.from_crawler(crawler)
self.assertEqual(scheduler.df.method, 'n/a')
def test_filter(self):
dupefilter = RFPDupeFilter()
dupefilter.open()
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
assert not dupefilter.request_seen(r1)
assert dupefilter.request_seen(r1)
assert not dupefilter.request_seen(r2)
assert dupefilter.request_seen(r3)
dupefilter.close('finished')
def test_dupefilter_path(self):
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
path = tempfile.mkdtemp()
try:
df = RFPDupeFilter(path)
try:
df.open()
assert not df.request_seen(r1)
assert df.request_seen(r1)
finally:
df.close('finished')
df2 = RFPDupeFilter(path)
try:
df2.open()
assert df2.request_seen(r1)
assert not df2.request_seen(r2)
assert df2.request_seen(r2)
finally:
df2.close('finished')
finally:
shutil.rmtree(path)
def test_request_fingerprint(self):
"""Test if customization of request_fingerprint method will change
output of request_seen.
"""
r1 = Request('http://scrapytest.org/index.html')
r2 = Request('http://scrapytest.org/INDEX.html')
dupefilter = RFPDupeFilter()
dupefilter.open()
assert not dupefilter.request_seen(r1)
assert not dupefilter.request_seen(r2)
dupefilter.close('finished')
class CaseInsensitiveRFPDupeFilter(RFPDupeFilter):
def request_fingerprint(self, request):
fp = hashlib.sha1()
fp.update(to_bytes(request.url.lower()))
return fp.hexdigest()
case_insensitive_dupefilter = CaseInsensitiveRFPDupeFilter()
case_insensitive_dupefilter.open()
assert not case_insensitive_dupefilter.request_seen(r1)
assert case_insensitive_dupefilter.request_seen(r2)
case_insensitive_dupefilter.close('finished')
def test_seenreq_newlines(self):
""" Checks against adding duplicate \r to
line endings on Windows platforms. """
r1 = Request('http://scrapytest.org/1')
path = tempfile.mkdtemp()
try:
df = RFPDupeFilter(path)
df.open()
df.request_seen(r1)
df.close('finished')
with open(os.path.join(path, 'requests.seen'), 'rb') as seen_file:
line = next(seen_file).decode()
assert not line.endswith('\r\r\n')
if sys.platform == 'win32':
assert line.endswith('\r\n')
else:
assert line.endswith('\n')
finally:
shutil.rmtree(path)
def test_log(self):
with LogCapture() as l:
settings = {'DUPEFILTER_DEBUG': False,
'DUPEFILTER_CLASS': __name__ + '.FromCrawlerRFPDupeFilter'}
crawler = get_crawler(SimpleSpider, settings_dict=settings)
scheduler = Scheduler.from_crawler(crawler)
spider = SimpleSpider.from_crawler(crawler)
dupefilter = scheduler.df
dupefilter.open()
r1 = Request('http://scrapytest.org/index.html')
r2 = Request('http://scrapytest.org/index.html')
dupefilter.log(r1, spider)
dupefilter.log(r2, spider)
assert crawler.stats.get_value('dupefilter/filtered') == 2
l.check_present(('scrapy.dupefilters', 'DEBUG',
('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
' - no more duplicates will be shown'
' (see DUPEFILTER_DEBUG to show all duplicates)')))
dupefilter.close('finished')
def test_log_debug(self):
with LogCapture() as l:
settings = {'DUPEFILTER_DEBUG': True,
'DUPEFILTER_CLASS': __name__ + '.FromCrawlerRFPDupeFilter'}
crawler = get_crawler(SimpleSpider, settings_dict=settings)
scheduler = Scheduler.from_crawler(crawler)
spider = SimpleSpider.from_crawler(crawler)
dupefilter = scheduler.df
dupefilter.open()
r1 = Request('http://scrapytest.org/index.html')
r2 = Request('http://scrapytest.org/index.html',
headers={'Referer': 'http://scrapytest.org/INDEX.html'}
)
dupefilter.log(r1, spider)
dupefilter.log(r2, spider)
assert crawler.stats.get_value('dupefilter/filtered') == 2
l.check_present(('scrapy.dupefilters', 'DEBUG',
('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
' (referer: None)')))
l.check_present(('scrapy.dupefilters', 'DEBUG',
('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
' (referer: http://scrapytest.org/INDEX.html)')))
dupefilter.close('finished')
|
eLRuLL/scrapy
|
tests/test_dupefilters.py
|
Python
|
bsd-3-clause
| 7,297
|
<!DOCTYPE html>
<html lang="pl">
<head>
<!-- Required meta tags always come first -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="https://cdn.rawgit.com/twbs/bootstrap/v4-dev/dist/css/bootstrap.css">
</head>
<body>
<h1 class="text-center">Pory.</h1>
<p class="text-muted"></p>
<!-- jQuery first, then Bootstrap JS. -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="https://cdn.rawgit.com/twbs/bootstrap/v4-dev/dist/js/bootstrap.js"></script>
</body>
</html>
|
ksamborski/tcg
|
demo/questions/plazy4.html
|
HTML
|
bsd-3-clause
| 723
|
Instructions for using git-hooks to automatically update FloPy `build` in `version.py`
-----------------------------------------------
## Update files
1. Delete `pre-commit.sample` file in `.git/hooks`.
2. Copy `pre-commit.sh` in root directory to `.git/hooks` directory.
3. Remove `.sh` extension from `pre-commit.sh` in `.git/hooks` directory.
4. Make sure `.git/hooks/pre-commit` is executable using `chmod +x .git/hooks/pre-commit`.
## Reset SourceTree to use system git
1. SourceTree will use git-hooks if the Embedded Git is git 2.9+. SourceTree version 2.5.3 uses git 2.10.0.
2. If your version of SourceTree is using a version earlier than 2.9 then modify the SourceTree preferences to use the system version of git. All you have to do is got to `SourceTree -> Preferences -> Git` and choose `Use System Git` which can be found at `/usr/bin/git/`. See [https://medium.com/@onmyway133/sourcetree-and-pre-commit-hook-52545f22fe10](https://medium.com/@onmyway133/sourcetree-and-pre-commit-hook-52545f22fe10) for additional information.
|
brclark-usgs/flopy
|
docs/enable_githooks.md
|
Markdown
|
bsd-3-clause
| 1,048
|
package com.ericsson.research.trap.utils;
/*
* ##_BEGIN_LICENSE_##
* Transport Abstraction Package (trap)
* ----------
* Copyright (C) 2014 Ericsson AB
* ----------
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Ericsson AB nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ##_END_LICENSE_##
*/
import java.io.UnsupportedEncodingException;
/**
* StringUtil contains a number of Java 1.4 and Android-safe String operations that should be preferred over the Java
* native one. It will use the JSE one where available, but take care of the compatibility with Android and earlier Java
* versions.
*
* @author Vladimir Katardjiev
*/
public abstract class StringUtil
{
private static StringUtil instance;
static
{
try
{
String name = StringUtil.class.getName() + "Impl";
Class<?> c = Class.forName(name);
instance = (StringUtil) c.newInstance();
}
catch (Throwable t)
{
System.err.println("Could not initialise StringUtilImpl");
}
}
/**
* Splits the string <i>s</i> using the character <i>c</i> as a delimiter
*
* @param s
* The string to split
* @param c
* The delimiter
* @return An array of the string parts.
*/
public static String[] split(String s, char c)
{
return instance.doSplit(s, c);
}
abstract String[] doSplit(String s, char c);
/**
* Converts the supplied string into its UTF-8 representation.
*
* @param s
* The string to convert.
* @return The UTF-8 representation of <i>s</i>
* @throws RuntimeException
* in the rare event that the JVM does not support UTF-8. It should always support it, so this function
* wraps away the exception
*/
public static byte[] toUtfBytes(final String s)
{
try
{
byte[] b = s.getBytes("UTF-8");
return (b);
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("This java machine is stupid. The code will die. Cake.");
}
}
/**
* Convert a UTF-8 byte array to a string.
*
* @param buffer
* The array to convert
* @return A string representing the array.
* @throws RuntimeException
* in the rare event that the JVM does not support UTF-8. It should always support it, so this function
* wraps away the exception
*/
public static String toUtfString(final byte[] buffer)
{
try
{
String s = new String(buffer, "UTF-8");
return (s);
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("This java machine is stupid. The code will die. Cake.");
}
}
/**
* Convert a char array to UTF string
*
* @param encoded
* The UTF-16 char array
* @return A string with the array
*/
public static String toUtfString(char[] encoded)
{
String s = new String(encoded);
return s;
}
/**
* Formats the object's name and full class name to be logger-safe
*
* @param o
* The object to create the logger name for.
* @return The logger name for the object.
*/
public static String getLoggerComponent(Object o)
{
String component = o.getClass().getName().substring(o.getClass().getPackage().getName().length());
if (!component.startsWith("."))
component = "." + component;
if (component.startsWith(".."))
component = component.substring(1);
return component;
}
StringUtil()
{
}
/**
* Convert a UTF-8 byte array to a string.
*
* @param buffer
* The array to convert
* @param i
* The index to start at
* @param length
* The number of bytes to convert
* @return A string representing the array.
* @throws RuntimeException
* in the rare event that the JVM does not support UTF-8. It should always support it, so this function
* wraps away the exception
*/
public static String toUtfString(byte[] bytes, int i, int length)
{
try
{
String s = new String(bytes, i, length, "UTF-8");
return (s);
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException("This java machine is stupid. The code will die. Cake.");
}
}
}
|
meledin/trap
|
trap-utils-api/src/main/java/com/ericsson/research/trap/utils/StringUtil.java
|
Java
|
bsd-3-clause
| 6,145
|
# -*- coding: utf-8 -*-
"""Provides vertical object."""
from __future__ import absolute_import
from ..entity import Entity
class Vertical(Entity):
"""docstring for Vertical."""
collection = 'verticals'
resource = 'vertical'
_relations = {
'advertiser',
}
_pull = {
'id': int,
'name': None,
'created_on': Entity._strpt,
'updated_on': Entity._strpt,
'version': int,
}
_push = _pull
def __init__(self, session, properties=None, **kwargs):
super(Vertical, self).__init__(session, properties, **kwargs)
|
Cawb07/t1-python
|
terminalone/models/vertical.py
|
Python
|
bsd-3-clause
| 596
|
<?php
/**
* Zend Framework (http://framework.zend.com/)
*
* @link http://github.com/zendframework/zf2 for the canonical source repository
* @copyright Copyright (c) 2005-2014 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
namespace ZendTest\Cache\Pattern;
use Zend\Cache;
/**
* Test class
*/
class TestClassCache
{
/**
* A counter how oftern the method "bar" was called
*/
public static $fooCounter = 0;
public static function bar()
{
++static::$fooCounter;
$args = func_get_args();
echo 'foobar_output('.implode(', ', $args) . ') : ' . static::$fooCounter;
return 'foobar_return('.implode(', ', $args) . ') : ' . static::$fooCounter;
}
public static function emptyMethod() {}
}
/**
* @group Zend_Cache
*/
class ClassCacheTest extends CommonPatternTest
{
/**
* @var Zend\Cache\Storage\StorageInterface
*/
protected $_storage;
public function setUp()
{
$this->_storage = new Cache\Storage\Adapter\Memory(array(
'memory_limit' => 0
));
$this->_options = new Cache\Pattern\PatternOptions(array(
'class' => __NAMESPACE__ . '\TestClassCache',
'storage' => $this->_storage,
));
$this->_pattern = new Cache\Pattern\ClassCache();
$this->_pattern->setOptions($this->_options);
parent::setUp();
}
public function tearDown()
{
parent::tearDown();
}
public function testCallEnabledCacheOutputByDefault()
{
$this->_testCall(
'bar',
array('testCallEnabledCacheOutputByDefault', 'arg2')
);
}
public function testCallDisabledCacheOutput()
{
$this->_options->setCacheOutput(false);
$this->_testCall(
'bar',
array('testCallDisabledCacheOutput', 'arg2')
);
}
public function testGenerateKey()
{
$args = array('arg1', 2, 3.33, null);
$generatedKey = $this->_pattern->generateKey('emptyMethod', $args);
$usedKey = null;
$this->_options->getStorage()->getEventManager()->attach('setItem.pre', function ($event) use (&$usedKey) {
$params = $event->getParams();
$usedKey = $params['key'];
});
$this->_pattern->call('emptyMethod', $args);
$this->assertEquals($generatedKey, $usedKey);
}
protected function _testCall($method, array $args)
{
$returnSpec = 'foobar_return(' . implode(', ', $args) . ') : ';
$outputSpec = 'foobar_output(' . implode(', ', $args) . ') : ';
// first call - not cached
$firstCounter = TestClassCache::$fooCounter + 1;
ob_start();
ob_implicit_flush(false);
$return = call_user_func_array(array($this->_pattern, $method), $args);
$data = ob_get_clean();
$this->assertEquals($returnSpec . $firstCounter, $return);
$this->assertEquals($outputSpec . $firstCounter, $data);
// second call - cached
ob_start();
ob_implicit_flush(false);
$return = call_user_func_array(array($this->_pattern, $method), $args);
$data = ob_get_clean();
$this->assertEquals($returnSpec . $firstCounter, $return);
if ($this->_options->getCacheOutput()) {
$this->assertEquals($outputSpec . $firstCounter, $data);
} else {
$this->assertEquals('', $data);
}
}
}
|
PatidarWeb/zf2
|
tests/ZendTest/Cache/Pattern/ClassCacheTest.php
|
PHP
|
bsd-3-clause
| 3,557
|
describe("module:ng.directive:ngBindTemplate",function(){var a;beforeEach(function(){a=browser.rootEl,browser.get("./examples/example-example14/index-jquery.html")}),it("should check ng-bind",function(){var a=element(by.binding("salutation")),b=element(by.model("salutation")),c=element(by.model("name"));expect(a.getText()).toBe("Hello World!"),b.clear(),b.sendKeys("Greetings"),c.clear(),c.sendKeys("user"),expect(a.getText()).toBe("Greetings user!")})});
//# sourceMappingURL=..\..\..\..\debug\angular\docs\ptore2e\example-example14\jquery_test.min.map
|
infrabel/docs-gnap
|
node_modules/gnap-theme-gnap-angular/js/angular/docs/ptore2e/example-example14/jquery_test.min.js
|
JavaScript
|
bsd-3-clause
| 555
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>statsmodels.sandbox.sysreg.SUR — statsmodels 0.8.0 documentation</title>
<link rel="stylesheet" href="../_static/nature.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../',
VERSION: '0.8.0',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script type="text/javascript" src="../_static/jquery.js"></script>
<script type="text/javascript" src="../_static/underscore.js"></script>
<script type="text/javascript" src="../_static/doctools.js"></script>
<link rel="shortcut icon" href="../_static/statsmodels_hybi_favico.ico"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="statsmodels.sandbox.sysreg.SUR.fit" href="statsmodels.sandbox.sysreg.SUR.fit.html" />
<link rel="prev" title="statsmodels.sandbox.regression.try_catdata.labelmeanfilter_str" href="statsmodels.sandbox.regression.try_catdata.labelmeanfilter_str.html" />
<link rel="stylesheet" href="../_static/examples.css" type="text/css" />
<link rel="stylesheet" href="../_static/facebox.css" type="text/css" />
<script type="text/javascript" src="../_static/scripts.js">
</script>
<script type="text/javascript" src="../_static/facebox.js">
</script>
</head>
<body role="document">
<div class="headerwrap">
<div class = "header">
<a href = "../index.html">
<img src="../_static/statsmodels_hybi_banner.png" alt="Logo"
style="padding-left: 15px"/></a>
</div>
</div>
<div class="related" role="navigation" aria-label="related navigation">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="../genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="../py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="statsmodels.sandbox.sysreg.SUR.fit.html" title="statsmodels.sandbox.sysreg.SUR.fit"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="statsmodels.sandbox.regression.try_catdata.labelmeanfilter_str.html" title="statsmodels.sandbox.regression.try_catdata.labelmeanfilter_str"
accesskey="P">previous</a> |</li>
<li><a href ="../install.html">Install</a></li> |
<li><a href="https://groups.google.com/group/pystatsmodels?hl=en">Support</a></li> |
<li><a href="https://github.com/statsmodels/statsmodels/issues">Bugs</a></li> |
<li><a href="../dev/index.html">Develop</a></li> |
<li><a href="../examples/index.html">Examples</a></li> |
<li><a href="../faq.html">FAQ</a></li> |
<li class="nav-item nav-item-1"><a href="../sandbox.html" accesskey="U">Sandbox</a> |</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="statsmodels-sandbox-sysreg-sur">
<h1>statsmodels.sandbox.sysreg.SUR<a class="headerlink" href="#statsmodels-sandbox-sysreg-sur" title="Permalink to this headline">¶</a></h1>
<dl class="class">
<dt id="statsmodels.sandbox.sysreg.SUR">
<em class="property">class </em><code class="descclassname">statsmodels.sandbox.sysreg.</code><code class="descname">SUR</code><span class="sig-paren">(</span><em>sys</em>, <em>sigma=None</em>, <em>dfk=None</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/statsmodels/sandbox/sysreg.html#SUR"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#statsmodels.sandbox.sysreg.SUR" title="Permalink to this definition">¶</a></dt>
<dd><p>Seemingly Unrelated Regression</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>sys</strong> : list</p>
<blockquote>
<div><p>[endog1, exog1, endog2, exog2,...] It will be of length 2 x M,
where M is the number of equations endog = exog.</p>
</div></blockquote>
<p><strong>sigma</strong> : array-like</p>
<blockquote>
<div><p>M x M array where sigma[i,j] is the covariance between equation i and j</p>
</div></blockquote>
<p><strong>dfk</strong> : None, ‘dfk1’, or ‘dfk2’</p>
<blockquote class="last">
<div><p>Default is None. Correction for the degrees of freedom
should be specified for small samples. See the notes for more
information.</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Notes</p>
<p>All individual equations are assumed to be well-behaved, homoeskedastic
iid errors. This is basically an extension of GLS, using sparse matrices.</p>
<div class="math">
<p><img src="../_images/math/1cf215fed0a30e96b8ad5cd542d8eb2cf8040918.png" alt="\Sigma=\left[\begin{array}{cccc}
\sigma_{11} & \sigma_{12} & \cdots & \sigma_{1M}\\
\sigma_{21} & \sigma_{22} & \cdots & \sigma_{2M}\\
\vdots & \vdots & \ddots & \vdots\\
\sigma_{M1} & \sigma_{M2} & \cdots & \sigma_{MM}\end{array}\right]"/></p>
</div><p class="rubric">References</p>
<p>Zellner (1962), Greene (2003)</p>
<p class="rubric">Attributes</p>
<table border="1" class="docutils">
<colgroup>
<col width="12%" />
<col width="88%" />
</colgroup>
<tbody valign="top">
<tr class="row-odd"><td>cholsigmainv</td>
<td>(array) The transpose of the Cholesky decomposition of <cite>pinv_wexog</cite></td>
</tr>
<tr class="row-even"><td>df_model</td>
<td>(array) Model degrees of freedom of each equation. p_{m} - 1 where p is the number of regressors for each equation m and one is subtracted for the constant.</td>
</tr>
<tr class="row-odd"><td>df_resid</td>
<td>(array) Residual degrees of freedom of each equation. Number of observations less the number of parameters.</td>
</tr>
<tr class="row-even"><td>endog</td>
<td>(array) The LHS variables for each equation in the system. It is a M x nobs array where M is the number of equations.</td>
</tr>
<tr class="row-odd"><td>exog</td>
<td>(array) The RHS variable for each equation in the system. It is a nobs x sum(p_{m}) array. Which is just each RHS array stacked next to each other in columns.</td>
</tr>
<tr class="row-even"><td>history</td>
<td>(dict) Contains the history of fitting the model. Probably not of interest if the model is fit with <cite>igls</cite> = False.</td>
</tr>
<tr class="row-odd"><td>iterations</td>
<td>(int) The number of iterations until convergence if the model is fit iteratively.</td>
</tr>
<tr class="row-even"><td>nobs</td>
<td>(float) The number of observations of the equations.</td>
</tr>
<tr class="row-odd"><td>normalized_cov_params</td>
<td>(array) sum(p_{m}) x sum(p_{m}) array <img class="math" src="../_images/math/3c05ca99ef032ae6fc3099d0a9e8ffeeb6be356b.png" alt="\left[X^{T}\left(\Sigma^{-1}\otimes\boldsymbol{I}\right)X\right]^{-1}" style="vertical-align: -6px"/></td>
</tr>
<tr class="row-even"><td>pinv_wexog</td>
<td>(array) The pseudo-inverse of the <cite>wexog</cite></td>
</tr>
<tr class="row-odd"><td>sigma</td>
<td>(array) M x M covariance matrix of the cross-equation disturbances. See notes.</td>
</tr>
<tr class="row-even"><td>sp_exog</td>
<td>(CSR sparse matrix) Contains a block diagonal sparse matrix of the design so that exog1 ... exogM are on the diagonal.</td>
</tr>
<tr class="row-odd"><td>wendog</td>
<td>(array) M * nobs x 1 array of the endogenous variables whitened by <cite>cholsigmainv</cite> and stacked into a single column.</td>
</tr>
<tr class="row-even"><td>wexog</td>
<td>(array) M*nobs x sum(p_{m}) array of the whitened exogenous variables.</td>
</tr>
</tbody>
</table>
<p class="rubric">Methods</p>
<table border="1" class="longtable docutils">
<colgroup>
<col width="10%" />
<col width="90%" />
</colgroup>
<tbody valign="top">
<tr class="row-odd"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.fit.html#statsmodels.sandbox.sysreg.SUR.fit" title="statsmodels.sandbox.sysreg.SUR.fit"><code class="xref py py-obj docutils literal"><span class="pre">fit</span></code></a>([igls, tol, maxiter])</td>
<td>igls : bool</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.initialize.html#statsmodels.sandbox.sysreg.SUR.initialize" title="statsmodels.sandbox.sysreg.SUR.initialize"><code class="xref py py-obj docutils literal"><span class="pre">initialize</span></code></a>()</td>
<td></td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.predict.html#statsmodels.sandbox.sysreg.SUR.predict" title="statsmodels.sandbox.sysreg.SUR.predict"><code class="xref py py-obj docutils literal"><span class="pre">predict</span></code></a>(design)</td>
<td></td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.whiten.html#statsmodels.sandbox.sysreg.SUR.whiten" title="statsmodels.sandbox.sysreg.SUR.whiten"><code class="xref py py-obj docutils literal"><span class="pre">whiten</span></code></a>(X)</td>
<td>SUR whiten method.</td>
</tr>
</tbody>
</table>
<p class="rubric">Methods</p>
<table border="1" class="longtable docutils">
<colgroup>
<col width="10%" />
<col width="90%" />
</colgroup>
<tbody valign="top">
<tr class="row-odd"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.fit.html#statsmodels.sandbox.sysreg.SUR.fit" title="statsmodels.sandbox.sysreg.SUR.fit"><code class="xref py py-obj docutils literal"><span class="pre">fit</span></code></a>([igls, tol, maxiter])</td>
<td>igls : bool</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.initialize.html#statsmodels.sandbox.sysreg.SUR.initialize" title="statsmodels.sandbox.sysreg.SUR.initialize"><code class="xref py py-obj docutils literal"><span class="pre">initialize</span></code></a>()</td>
<td></td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.predict.html#statsmodels.sandbox.sysreg.SUR.predict" title="statsmodels.sandbox.sysreg.SUR.predict"><code class="xref py py-obj docutils literal"><span class="pre">predict</span></code></a>(design)</td>
<td></td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="statsmodels.sandbox.sysreg.SUR.whiten.html#statsmodels.sandbox.sysreg.SUR.whiten" title="statsmodels.sandbox.sysreg.SUR.whiten"><code class="xref py py-obj docutils literal"><span class="pre">whiten</span></code></a>(X)</td>
<td>SUR whiten method.</td>
</tr>
</tbody>
</table>
</dd></dl>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h4>Previous topic</h4>
<p class="topless"><a href="statsmodels.sandbox.regression.try_catdata.labelmeanfilter_str.html"
title="previous chapter">statsmodels.sandbox.regression.try_catdata.labelmeanfilter_str</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="statsmodels.sandbox.sysreg.SUR.fit.html"
title="next chapter">statsmodels.sandbox.sysreg.SUR.fit</a></p>
<div role="note" aria-label="source link">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="../_sources/generated/statsmodels.sandbox.sysreg.SUR.rst.txt"
rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<form class="search" action="../search.html" method="get">
<div><input type="text" name="q" /></div>
<div><input type="submit" value="Go" /></div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer" role="contentinfo">
© Copyright 2009-2017, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.5.3.
</div>
</body>
</html>
|
statsmodels/statsmodels.github.io
|
0.8.0/generated/statsmodels.sandbox.sysreg.SUR.html
|
HTML
|
bsd-3-clause
| 13,006
|
package org.daylightingsociety.wherearetheeyes;
import java.util.Date;
import java.util.HashMap;
import android.location.Location;
import android.location.LocationListener;
import android.os.Bundle;
import android.util.Log;
import com.mapbox.mapboxsdk.geometry.LatLng;
import com.mapbox.mapboxsdk.maps.MapView;
/**
* Created by milo on 3/5/16.
*
* This class is responsible for handling location updates, and kicking off background threads
* to download more pins.
*/
public class GPS implements LocationListener {
private Location position = null;
private Date lastKnown = null;
private Boolean enabled = true;
private MapView map = null;
public GPS(MapView m) {
super();
map = m;
Log.d("GPS", "Initialized");
}
public void onLocationChanged(Location loc) {
// If this is our first location update just prime everything
if( position == null || lastKnown == null ) {
Log.d("GPS", "Initial ping, downloading first pins.");
position = loc;
lastKnown = new Date();
new DownloadPinsTask().execute(new PinData(new HashMap<LatLng, Integer>(), map, position));
return;
}
// How long has it been since last check-in and how far have we moved?
float distance = loc.distanceTo(position);
Date current = new Date();
long diff = (current.getTime() - lastKnown.getTime()) / 1000; // Convert to seconds
lastKnown = current;
position = loc;
// Don't constantly re-download pins if we're standing still
// If we're walking over X meters, or it's been 30 seconds, then okay
if( distance > Constants.MIN_DISTANCE_FOR_PIN_REDOWNLOAD || diff > Constants.MIN_TIME_FOR_PIN_REDOWNLOAD ) {
Log.d("GPS", "PING! Triggering pin re-download. "
+ "Distance: "
+ Float.toString(distance)
+ " Time diff: "
+ Long.toString(diff));
new DownloadPinsTask().execute(new PinData(new HashMap<LatLng, Integer>(), map, position));
} else {
Log.d("GPS", "PING! No re-download needed. "
+ "Distance: "
+ Float.toString(distance)
+ " Time diff: "
+ Long.toString(diff));
}
}
// Redownload pins even if location hasn't changed.
public void refreshPins() {
new DownloadPinsTask().execute(new PinData(new HashMap<LatLng, Integer>(), map, position));
}
@Override
public void onProviderDisabled(String provider) {enabled = false;}
@Override
public void onProviderEnabled(String provider) {enabled = true;}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {}
public Boolean isEnabled() {
return new Boolean(enabled);
}
public Location getLocation() {
if( position == null )
return null;
return new Location(position);
}
}
|
DaylightingSociety/WhereAreTheEyes
|
Android/app/src/main/java/org/daylightingsociety/wherearetheeyes/GPS.java
|
Java
|
bsd-3-clause
| 3,050
|
/*global FormplayerFrontend */
FormplayerFrontend.module("Layout.Views", function (Views, FormplayerFrontend, Backbone, Marionette) {
Views.ProgressView = Marionette.ItemView.extend({
template: "#progress-view-template",
initialize: function(options) {
this.progressMessage = options.progressMessage;
},
templateHelpers: function() {
return {
progressMessage: this.progressMessage,
};
},
setProgress: function(done, total, duration) {
var progress = total === 0 ? 0 : done / total;
// Due to jQuery bug, can't use .animate() with % until jQuery 3.0
$(this.el).find('.js-progress-bar').css('transition', duration + 'ms');
$(this.el).find('.js-progress-bar').width(progress * 100 + '%');
if (total > 0) {
$(this.el).find('.js-subtext small').text(
gettext('Completed: ') + done + '/' + total
);
}
},
});
});
|
qedsoftware/commcare-hq
|
corehq/apps/cloudcare/static/cloudcare/js/formplayer/layout/views/progress_bar.js
|
JavaScript
|
bsd-3-clause
| 1,052
|
# job to periodically clear and refresh the cache
class OpenbisEndpointCacheRefreshJob < ApplicationJob
queue_with_priority 3
def perform(endpoint)
return unless Seek::Config.openbis_enabled
return unless endpoint&.persisted?
endpoint.refresh_metadata
end
# jobs created if due, triggered by the scheduler.rb
def self.queue_timed_jobs
return unless Seek::Config.openbis_enabled
OpenbisEndpoint.find_each do |endpoint|
endpoint.create_refresh_metadata_job if endpoint.due_cache_refresh?
end
end
end
|
seek4science/seek
|
app/jobs/openbis_endpoint_cache_refresh_job.rb
|
Ruby
|
bsd-3-clause
| 542
|
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>mne.time_frequency.cwt_morlet — MNE 0.12.0 documentation</title>
<link rel="stylesheet" href="../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<link rel="stylesheet" href="../_static/gallery.css" type="text/css" />
<link rel="stylesheet" href="../_static/bootswatch-3.3.4/flatly/bootstrap.min.css" type="text/css" />
<link rel="stylesheet" href="../_static/bootstrap-sphinx.css" type="text/css" />
<link rel="stylesheet" href="../_static/style.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../',
VERSION: '0.12.0',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true
};
</script>
<script type="text/javascript" src="../_static/jquery.js"></script>
<script type="text/javascript" src="../_static/underscore.js"></script>
<script type="text/javascript" src="../_static/doctools.js"></script>
<script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/javascript" src="../_static/js/jquery-1.11.0.min.js"></script>
<script type="text/javascript" src="../_static/js/jquery-fix.js"></script>
<script type="text/javascript" src="../_static/bootstrap-3.3.4/js/bootstrap.min.js"></script>
<script type="text/javascript" src="../_static/bootstrap-sphinx.js"></script>
<link rel="shortcut icon" href="../_static/favicon.ico"/>
<link rel="top" title="MNE 0.12.0 documentation" href="../index.html" />
<link rel="up" title="Python API Reference" href="../python_reference.html" />
<link rel="next" title="mne.time_frequency.dpss_windows" href="mne.time_frequency.dpss_windows.html" />
<link rel="prev" title="mne.time_frequency.write_tfrs" href="mne.time_frequency.write_tfrs.html" />
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400italic,700italic,400,700' rel='stylesheet' type='text/css'>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-37225609-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
<script type="text/javascript">
!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0];if(!d.getElementById(id)){js=d.createElement(s);
js.id=id;js.src="http://platform.twitter.com/widgets.js";
fjs.parentNode.insertBefore(js,fjs);}}(document,"script","twitter-wjs");
</script>
<script type="text/javascript">
(function() {
var po = document.createElement('script'); po.type = 'text/javascript'; po.async = true;
po.src = 'https://apis.google.com/js/plusone.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(po, s);
})();
</script>
<link rel="canonical" href="https://mne.tools/stable/index.html" />
</head>
<body role="document">
<div id="navbar" class="navbar navbar-default navbar-fixed-top">
<div class="container">
<div class="navbar-header">
<!-- .btn-navbar is used as the toggle for collapsed navbar content -->
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="../index.html"><img src="../_static/mne_logo_small.png">
</a>
<span class="navbar-text navbar-version pull-left"><b>0.12.0</b></span>
</div>
<div class="collapse navbar-collapse nav-collapse">
<ul class="nav navbar-nav">
<li><a href="../getting_started.html">Get started</a></li>
<li><a href="../tutorials.html">Tutorials</a></li>
<li><a href="../auto_examples/index.html">Gallery</a></li>
<li><a href="../python_reference.html">API</a></li>
<li><a href="../manual/index.html">Manual</a></li>
<li><a href="../faq.html">FAQ</a></li>
<li class="dropdown globaltoc-container">
<a role="button"
id="dLabelGlobalToc"
data-toggle="dropdown"
data-target="#"
href="../index.html">Site <b class="caret"></b></a>
<ul class="dropdown-menu globaltoc"
role="menu"
aria-labelledby="dLabelGlobalToc"><ul>
<li class="toctree-l1"><a class="reference internal" href="../getting_started.html">Getting started</a></li>
<li class="toctree-l1"><a class="reference internal" href="../tutorials.html">Tutorials</a></li>
<li class="toctree-l1"><a class="reference internal" href="../auto_examples/index.html">Examples Gallery</a></li>
<li class="toctree-l1"><a class="reference internal" href="../faq.html">Frequently Asked Questions</a></li>
<li class="toctree-l1"><a class="reference internal" href="../contributing.html">Contribute to MNE</a></li>
</ul>
<ul class="current">
<li class="toctree-l1 current"><a class="reference internal" href="../python_reference.html">Python API Reference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../manual/index.html">User Manual</a></li>
<li class="toctree-l1"><a class="reference internal" href="../whats_new.html">What’s new</a></li>
</ul>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../cite.html">How to cite MNE</a></li>
<li class="toctree-l1"><a class="reference internal" href="../references.html">Related publications</a></li>
<li class="toctree-l1"><a class="reference internal" href="../cited.html">Publications from MNE users</a></li>
</ul>
</ul>
</li>
<li class="dropdown">
<a role="button"
id="dLabelLocalToc"
data-toggle="dropdown"
data-target="#"
href="#">Page <b class="caret"></b></a>
<ul class="dropdown-menu localtoc"
role="menu"
aria-labelledby="dLabelLocalToc"><ul>
<li><a class="reference internal" href="#">mne.time_frequency.cwt_morlet</a></li>
</ul>
</ul>
</li>
</ul>
<form class="navbar-form navbar-right" action="../search.html" method="get">
<div class="form-group">
<input type="text" name="q" class="form-control" placeholder="Search" />
</div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<p class="logo"><a href="../index.html">
<img class="logo" src="../_static/mne_logo_small.png" alt="Logo"/>
</a></p><ul>
<li><a class="reference internal" href="#">mne.time_frequency.cwt_morlet</a></li>
</ul>
<li>
<a href="mne.time_frequency.write_tfrs.html" title="Previous Chapter: mne.time_frequency.write_tfrs"><span class="glyphicon glyphicon-chevron-left visible-sm"></span><span class="hidden-sm hidden-tablet">« mne.time_freq...</span>
</a>
</li>
<li>
<a href="mne.time_frequency.dpss_windows.html" title="Next Chapter: mne.time_frequency.dpss_windows"><span class="glyphicon glyphicon-chevron-right visible-sm"></span><span class="hidden-sm hidden-tablet">mne.time_freq... »</span>
</a>
</li>
<form action="../search.html" method="get">
<div class="form-group">
<input type="text" name="q" class="form-control" placeholder="Search" />
</div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
</div>
<div class="col-md-12 content">
<div class="section" id="mne-time-frequency-cwt-morlet">
<h1>mne.time_frequency.cwt_morlet<a class="headerlink" href="#mne-time-frequency-cwt-morlet" title="Permalink to this headline">¶</a></h1>
<dl class="function">
<dt id="mne.time_frequency.cwt_morlet">
<code class="descclassname">mne.time_frequency.</code><code class="descname">cwt_morlet</code><span class="sig-paren">(</span><em>X</em>, <em>sfreq</em>, <em>freqs</em>, <em>use_fft=True</em>, <em>n_cycles=7.0</em>, <em>zero_mean=False</em>, <em>decim=1</em><span class="sig-paren">)</span><a class="headerlink" href="#mne.time_frequency.cwt_morlet" title="Permalink to this definition">¶</a></dt>
<dd><p>Compute time freq decomposition with Morlet wavelets</p>
<p>This function operates directly on numpy arrays. Consider using
<cite>tfr_morlet</cite> to process <cite>Epochs</cite> or <cite>Evoked</cite> instances.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>X</strong> : array, shape (n_signals, n_times)</p>
<blockquote>
<div><p>Signals (one per line)</p>
</div></blockquote>
<p><strong>sfreq</strong> : float</p>
<blockquote>
<div><p>Sampling frequency.</p>
</div></blockquote>
<p><strong>freqs</strong> : array</p>
<blockquote>
<div><p>Array of frequencies of interest</p>
</div></blockquote>
<p><strong>use_fft</strong> : bool</p>
<blockquote>
<div><p>Compute convolution with FFT or temoral convolution.</p>
</div></blockquote>
<p><strong>n_cycles: float | array of float</strong> :</p>
<blockquote>
<div><p>Number of cycles. Fixed number or one per frequency.</p>
</div></blockquote>
<p><strong>zero_mean</strong> : bool</p>
<blockquote>
<div><p>Make sure the wavelets are zero mean.</p>
</div></blockquote>
<p><strong>decim</strong> : int | slice</p>
<blockquote>
<div><p>To reduce memory usage, decimation factor after time-frequency
decomposition.
If <cite>int</cite>, returns tfr[..., ::decim].
If <cite>slice</cite> returns tfr[..., decim].
Note that decimation may create aliasing artifacts.
Defaults to 1.</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>tfr</strong> : 3D array</p>
<blockquote class="last">
<div><p>Time Frequency Decompositions (n_signals x n_frequencies x n_times)</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<div class="admonition seealso">
<p class="first admonition-title">See also</p>
<dl class="last docutils">
<dt><a class="reference internal" href="mne.time_frequency.tfr.cwt.html#mne.time_frequency.tfr.cwt" title="mne.time_frequency.tfr.cwt"><code class="xref py py-obj docutils literal"><span class="pre">tfr.cwt</span></code></a></dt>
<dd>Compute time-frequency decomposition with user-provided wavelets</dd>
</dl>
</div>
</dd></dl>
<div style='clear:both'></div></div>
</div>
</div>
</div>
<footer class="footer">
<div class="container">
<p class="pull-right">
<a href="#">Back to top</a>
<br/>
</p>
<p>
© Copyright 2012-2016, MNE Developers. Last updated on 2016-05-10.<br/>
</p>
</div>
</footer>
<script src="https://mne.tools/versionwarning.js"></script>
</body>
</html>
|
mne-tools/mne-tools.github.io
|
0.12/generated/mne.time_frequency.cwt_morlet.html
|
HTML
|
bsd-3-clause
| 11,766
|
<?php
/**
* Message translations.
*
* This file is automatically generated by 'yii message/extract' command.
* It contains the localizable messages extracted from source code.
* You may modify this file by translating the extracted messages.
*
* Each array element represents the translation (value) of a message (key).
* If the value is empty, the message is considered as not translated.
* Messages that no longer need translation will have their translations
* enclosed between a pair of '@@' marks.
*
* Message string can be used with plural forms format. Check i18n section
* of the guide for details.
*
* NOTE: this file must be saved in UTF-8 encoding.
*/
return [
'Your e-mail address has been successfully changed to {email}.' => 'Seu e-mail foi alterado com sucesso para {email}.',
];
|
LeonidLyalin/vova
|
common/humhub/protected/humhub/modules/user/messages/pt_br/views_account_changeEmailValidate.php
|
PHP
|
bsd-3-clause
| 815
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.