text
stringlengths
2
99k
meta
dict
import {assert} from 'chai'; import {describe, it} from 'mocha'; import {Color} from '../model/color'; import {ColorFormatter} from './color'; describe(ColorFormatter.name, () => { it('should format color with specified stringifier', () => { const stringifier = (color: Color): string => { return String(color); }; const f = new ColorFormatter(stringifier); const c = new Color([0, 127, 255], 'rgb'); assert.strictEqual(f.format(c), stringifier(c)); }); });
{ "pile_set_name": "Github" }
/* QLogic qed NIC Driver * Copyright (c) 2015 QLogic Corporation * * This software is available under the terms of the GNU General Public License * (GPL) Version 2, available from the file COPYING in the main directory of * this source tree. */ #ifndef _QED_INIT_OPS_H #define _QED_INIT_OPS_H #include <linux/types.h> #include <linux/slab.h> #include "qed.h" /** * @brief qed_init_iro_array - init iro_arr. * * * @param cdev */ void qed_init_iro_array(struct qed_dev *cdev); /** * @brief qed_init_run - Run the init-sequence. * * * @param p_hwfn * @param p_ptt * @param phase * @param phase_id * @param modes * @return _qed_status_t */ int qed_init_run(struct qed_hwfn *p_hwfn, struct qed_ptt *p_ptt, int phase, int phase_id, int modes); /** * @brief qed_init_hwfn_allocate - Allocate RT array, Store 'values' ptrs. * * * @param p_hwfn * * @return _qed_status_t */ int qed_init_alloc(struct qed_hwfn *p_hwfn); /** * @brief qed_init_hwfn_deallocate * * * @param p_hwfn */ void qed_init_free(struct qed_hwfn *p_hwfn); /** * @brief qed_init_clear_rt_data - Clears the runtime init array. * * * @param p_hwfn */ void qed_init_clear_rt_data(struct qed_hwfn *p_hwfn); /** * @brief qed_init_store_rt_reg - Store a configuration value in the RT array. * * * @param p_hwfn * @param rt_offset * @param val */ void qed_init_store_rt_reg(struct qed_hwfn *p_hwfn, u32 rt_offset, u32 val); #define STORE_RT_REG(hwfn, offset, val) \ qed_init_store_rt_reg(hwfn, offset, val) #define OVERWRITE_RT_REG(hwfn, offset, val) \ qed_init_store_rt_reg(hwfn, offset, val) /** * @brief * * * @param p_hwfn * @param rt_offset * @param val * @param size */ void qed_init_store_rt_agg(struct qed_hwfn *p_hwfn, u32 rt_offset, u32 *val, size_t size); #define STORE_RT_REG_AGG(hwfn, offset, val) \ qed_init_store_rt_agg(hwfn, offset, (u32 *)&val, sizeof(val)) /** * @brief * Initialize GTT global windows and set admin window * related params of GTT/PTT to default values. * * @param p_hwfn */ void qed_gtt_init(struct qed_hwfn *p_hwfn); #endif
{ "pile_set_name": "Github" }
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build ignore // +build aix /* Input to cgo -godefs. See also mkerrors.sh and mkall.sh */ // +godefs map struct_in_addr [4]byte /* in_addr */ // +godefs map struct_in6_addr [16]byte /* in6_addr */ package unix /* #include <sys/types.h> #include <sys/time.h> #include <sys/limits.h> #include <sys/un.h> #include <utime.h> #include <sys/utsname.h> #include <sys/poll.h> #include <sys/resource.h> #include <sys/stat.h> #include <sys/statfs.h> #include <sys/termio.h> #include <sys/ioctl.h> #include <termios.h> #include <net/if.h> #include <net/if_dl.h> #include <netinet/in.h> #include <netinet/icmp6.h> #include <dirent.h> #include <fcntl.h> enum { sizeofPtr = sizeof(void*), }; union sockaddr_all { struct sockaddr s1; // this one gets used for fields struct sockaddr_in s2; // these pad it out struct sockaddr_in6 s3; struct sockaddr_un s4; struct sockaddr_dl s5; }; struct sockaddr_any { struct sockaddr addr; char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; }; */ import "C" // Machine characteristics const ( SizeofPtr = C.sizeofPtr SizeofShort = C.sizeof_short SizeofInt = C.sizeof_int SizeofLong = C.sizeof_long SizeofLongLong = C.sizeof_longlong PathMax = C.PATH_MAX ) // Basic types type ( _C_short C.short _C_int C.int _C_long C.long _C_long_long C.longlong ) type off64 C.off64_t type off C.off_t type Mode_t C.mode_t // Time type Timespec C.struct_timespec type StTimespec C.struct_st_timespec type Timeval C.struct_timeval type Timeval32 C.struct_timeval32 type Timex C.struct_timex type Time_t C.time_t type Tms C.struct_tms type Utimbuf C.struct_utimbuf type Timezone C.struct_timezone // Processes type Rusage C.struct_rusage type Rlimit C.struct_rlimit64 type Pid_t C.pid_t type _Gid_t C.gid_t type dev_t C.dev_t // Files type Stat_t C.struct_stat type StatxTimestamp C.struct_statx_timestamp type Statx_t C.struct_statx type Dirent C.struct_dirent // Sockets type RawSockaddrInet4 C.struct_sockaddr_in type RawSockaddrInet6 C.struct_sockaddr_in6 type RawSockaddrUnix C.struct_sockaddr_un type RawSockaddr C.struct_sockaddr type RawSockaddrAny C.struct_sockaddr_any type _Socklen C.socklen_t type Cmsghdr C.struct_cmsghdr type ICMPv6Filter C.struct_icmp6_filter type Iovec C.struct_iovec type IPMreq C.struct_ip_mreq type IPv6Mreq C.struct_ipv6_mreq type IPv6MTUInfo C.struct_ip6_mtuinfo type Linger C.struct_linger type Msghdr C.struct_msghdr const ( SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 SizeofSockaddrAny = C.sizeof_struct_sockaddr_any SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un SizeofLinger = C.sizeof_struct_linger SizeofIPMreq = C.sizeof_struct_ip_mreq SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo SizeofMsghdr = C.sizeof_struct_msghdr SizeofCmsghdr = C.sizeof_struct_cmsghdr SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter ) // Routing and interface messages const ( SizeofIfMsghdr = C.sizeof_struct_if_msghdr ) type IfMsgHdr C.struct_if_msghdr // Misc type FdSet C.fd_set type Utsname C.struct_utsname type Ustat_t C.struct_ustat type Sigset_t C.sigset_t const ( AT_FDCWD = C.AT_FDCWD AT_REMOVEDIR = C.AT_REMOVEDIR AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW ) // Terminal handling type Termios C.struct_termios type Termio C.struct_termio type Winsize C.struct_winsize //poll type PollFd struct { Fd int32 Events uint16 Revents uint16 } const ( POLLERR = C.POLLERR POLLHUP = C.POLLHUP POLLIN = C.POLLIN POLLNVAL = C.POLLNVAL POLLOUT = C.POLLOUT POLLPRI = C.POLLPRI POLLRDBAND = C.POLLRDBAND POLLRDNORM = C.POLLRDNORM POLLWRBAND = C.POLLWRBAND POLLWRNORM = C.POLLWRNORM ) //flock_t type Flock_t C.struct_flock64 // Statfs type Fsid_t C.struct_fsid_t type Fsid64_t C.struct_fsid64_t type Statfs_t C.struct_statfs const RNDGETENTCNT = 0x80045200
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ // GB_AxB: hard-coded functions for semiring: C<M>=A*B or A'*B //------------------------------------------------------------------------------ // SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2020, All Rights Reserved. // http://suitesparse.com See GraphBLAS/Doc/License.txt for license. //------------------------------------------------------------------------------ // If this file is in the Generated/ folder, do not edit it (auto-generated). #include "GB.h" #ifndef GBCOMPACT #include "GB_control.h" #include "GB_ek_slice.h" #include "GB_bracket.h" #include "GB_iterator.h" #include "GB_sort.h" #include "GB_atomics.h" #include "GB_AxB_saxpy3.h" #include "GB_AxB__include.h" // The C=A*B semiring is defined by the following types and operators: // A'*B function (dot2): GB_Adot2B__land_first_bool // A'*B function (dot3): GB_Adot3B__land_first_bool // C+=A'*B function (dot4): GB_Adot4B__land_first_bool // A*B function (saxpy3): GB_Asaxpy3B__land_first_bool // C type: bool // A type: bool // B type: bool // Multiply: z = aik // Add: cij &= z // 'any' monoid? 0 // atomic? 1 // OpenMP atomic? 1 // MultAdd: cij &= aik // Identity: true // Terminal: if (cij == false) break ; #define GB_ATYPE \ bool #define GB_BTYPE \ bool #define GB_CTYPE \ bool // aik = Ax [pA] #define GB_GETA(aik,Ax,pA) \ bool aik = Ax [pA] // bkj = Bx [pB] #define GB_GETB(bkj,Bx,pB) \ ; #define GB_CX(p) Cx [p] // multiply operator #define GB_MULT(z, x, y) \ z = x // multiply-add #define GB_MULTADD(z, x, y) \ z &= x // monoid identity value #define GB_IDENTITY \ true // break if cij reaches the terminal value (dot product only) #define GB_DOT_TERMINAL(cij) \ if (cij == false) break ; // simd pragma for dot-product loop vectorization #define GB_PRAGMA_VECTORIZE_DOT \ ; // simd pragma for other loop vectorization #define GB_PRAGMA_VECTORIZE GB_PRAGMA_SIMD // declare the cij scalar #define GB_CIJ_DECLARE(cij) \ bool cij // save the value of C(i,j) #define GB_CIJ_SAVE(cij,p) Cx [p] = cij // cij = Cx [pC] #define GB_GETC(cij,pC) \ cij = Cx [pC] // Cx [pC] = cij #define GB_PUTC(cij,pC) \ Cx [pC] = cij // Cx [p] = t #define GB_CIJ_WRITE(p,t) Cx [p] = t // C(i,j) += t #define GB_CIJ_UPDATE(p,t) \ Cx [p] &= t // x + y #define GB_ADD_FUNCTION(x,y) \ x & y // type with size of GB_CTYPE, and can be used in compare-and-swap #define GB_CTYPE_PUN \ bool // bit pattern for bool, 8-bit, 16-bit, and 32-bit integers #define GB_CTYPE_BITS \ 0x1L // 1 if monoid update can skipped entirely (the ANY monoid) #define GB_IS_ANY_MONOID \ 0 // 1 if monoid update is EQ #define GB_IS_EQ_MONOID \ 0 // 1 if monoid update can be done atomically, 0 otherwise #define GB_HAS_ATOMIC \ 1 // 1 if monoid update can be done with an OpenMP atomic update, 0 otherwise #define GB_HAS_OMP_ATOMIC \ 1 // 1 for the ANY_PAIR semirings #define GB_IS_ANY_PAIR_SEMIRING \ 0 // 1 if PAIR is the multiply operator #define GB_IS_PAIR_MULTIPLIER \ 0 #if GB_IS_ANY_PAIR_SEMIRING // result is purely symbolic; no numeric work to do. Hx is not used. #define GB_HX_WRITE(i,t) #define GB_CIJ_GATHER(p,i) #define GB_HX_UPDATE(i,t) #define GB_CIJ_MEMCPY(p,i,len) #else // Hx [i] = t #define GB_HX_WRITE(i,t) Hx [i] = t // Cx [p] = Hx [i] #define GB_CIJ_GATHER(p,i) Cx [p] = Hx [i] // Hx [i] += t #define GB_HX_UPDATE(i,t) \ Hx [i] &= t // memcpy (&(Cx [p]), &(Hx [i]), len) #define GB_CIJ_MEMCPY(p,i,len) \ memcpy (Cx +(p), Hx +(i), (len) * sizeof(bool)) #endif // disable this semiring and use the generic case if these conditions hold #define GB_DISABLE \ (GxB_NO_LAND || GxB_NO_FIRST || GxB_NO_BOOL || GxB_NO_LAND_BOOL || GxB_NO_FIRST_BOOL || GxB_NO_LAND_FIRST_BOOL) //------------------------------------------------------------------------------ // C=A'*B or C<!M>=A'*B: dot product (phase 2) //------------------------------------------------------------------------------ GrB_Info GB_Adot2B__land_first_bool ( GrB_Matrix C, const GrB_Matrix M, const bool Mask_struct, const GrB_Matrix *Aslice, bool A_is_pattern, const GrB_Matrix B, bool B_is_pattern, int64_t *GB_RESTRICT B_slice, int64_t *GB_RESTRICT *C_counts, int nthreads, int naslice, int nbslice ) { // C<M>=A'*B now uses dot3 #if GB_DISABLE return (GrB_NO_VALUE) ; #else #define GB_PHASE_2_OF_2 #include "GB_AxB_dot2_meta.c" #undef GB_PHASE_2_OF_2 return (GrB_SUCCESS) ; #endif } //------------------------------------------------------------------------------ // C<M>=A'*B: masked dot product method (phase 2) //------------------------------------------------------------------------------ GrB_Info GB_Adot3B__land_first_bool ( GrB_Matrix C, const GrB_Matrix M, const bool Mask_struct, const GrB_Matrix A, bool A_is_pattern, const GrB_Matrix B, bool B_is_pattern, const GB_task_struct *GB_RESTRICT TaskList, const int ntasks, const int nthreads ) { #if GB_DISABLE return (GrB_NO_VALUE) ; #else #include "GB_AxB_dot3_template.c" return (GrB_SUCCESS) ; #endif } //------------------------------------------------------------------------------ // C+=A'*B: dense dot product //------------------------------------------------------------------------------ GrB_Info GB_Adot4B__land_first_bool ( GrB_Matrix C, const GrB_Matrix A, bool A_is_pattern, int64_t *GB_RESTRICT A_slice, int naslice, const GrB_Matrix B, bool B_is_pattern, int64_t *GB_RESTRICT B_slice, int nbslice, const int nthreads ) { #if GB_DISABLE return (GrB_NO_VALUE) ; #else #include "GB_AxB_dot4_template.c" return (GrB_SUCCESS) ; #endif } //------------------------------------------------------------------------------ // C=A*B, C<M>=A*B, C<!M>=A*B: saxpy3 method (Gustavson + Hash) //------------------------------------------------------------------------------ #include "GB_AxB_saxpy3_template.h" GrB_Info GB_Asaxpy3B__land_first_bool ( GrB_Matrix C, const GrB_Matrix M, bool Mask_comp, const bool Mask_struct, const GrB_Matrix A, bool A_is_pattern, const GrB_Matrix B, bool B_is_pattern, GB_saxpy3task_struct *GB_RESTRICT TaskList, const int ntasks, const int nfine, const int nthreads, GB_Context Context ) { #if GB_DISABLE return (GrB_NO_VALUE) ; #else #include "GB_AxB_saxpy3_template.c" return (GrB_SUCCESS) ; #endif } #endif
{ "pile_set_name": "Github" }
<?php /** * Copyright since 2007 PrestaShop SA and Contributors * PrestaShop is an International Registered Trademark & Property of PrestaShop SA * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.md. * It is also available through the world-wide-web at this URL: * https://opensource.org/licenses/OSL-3.0 * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@prestashop.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade PrestaShop to newer * versions in the future. If you wish to customize PrestaShop for your * needs please refer to https://devdocs.prestashop.com/ for more information. * * @author PrestaShop SA and Contributors <contact@prestashop.com> * @copyright Since 2007 PrestaShop SA and Contributors * @license https://opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0) */ namespace PrestaShop\PrestaShop\Core\Domain\Product\CommandHandler; use PrestaShop\PrestaShop\Core\Domain\Product\Command\UpdateProductPricesCommand; /** * Defines contract for UpdateProductPricesHandler */ interface UpdateProductPricesHandlerInterface { /** * @param UpdateProductPricesCommand $command */ public function handle(UpdateProductPricesCommand $command): void; }
{ "pile_set_name": "Github" }
{ "Version": "2.1.23.13", "Names": { "ab": "абхазаг", "ady": "адыгейаг", "ae": "авестӕ", "af": "африкаанс", "ang": "рагон англисаг", "ar": "араббаг", "av": "авайраг", "az": "тӕтӕйраг", "ba": "башкираг", "bg": "болгайраг", "bs": "босниаг", "bua": "бурятаг", "ca": "каталайнаг", "ce": "цӕцӕйнаг", "cop": "коптаг", "cs": "чехаг", "cv": "чувашаг", "da": "даниаг", "de": "немыцаг", "de_AT": "австралиаг немыцаг", "de_CH": "швйецариаг немыцаг", "egy": "рагон египтаг", "el": "бердзейнаг", "en": "англисаг", "en_AU": "австралиаг англисаг", "en_CA": "канадӕйаг англисаг", "en_GB": "бритайнаг англисаг", "en_US": "америкаг англисаг", "eo": "есперанто", "es": "испайнаг", "es_419": "латинаг америкаг англисаг", "es_ES": "европӕйаг англисаг", "et": "естойнаг", "eu": "баскаг", "fa": "персайнаг", "fi": "финнаг", "fil": "филиппинаг", "fj": "фиджи", "fo": "фарераг", "fr": "францаг", "fr_CA": "канадӕйаг францаг", "fr_CH": "швейцариаг францаг", "fro": "рагон францаг", "ga": "ирландиаг", "grc": "рагон бердзейнаг", "he": "уираг", "hr": "хорватаг", "hu": "венгериаг", "hy": "сомихаг", "inh": "мӕхъӕлон", "it": "италиаг", "ja": "япойнаг", "ka": "гуырдзиаг", "kbd": "кӕсгон", "krc": "бӕлхъӕрон", "ku": "курдаг", "kum": "хъуымыхъхъаг", "la": "латинаг", "lez": "лекъаг", "mk": "мӕчъидон", "os": "ирон", "pt": "португалиаг", "pt_BR": "бразилиаг португалиаг", "pt_PT": "европӕйаг полтугалиаг", "rom": "цигайнаг", "ru": "уырыссаг", "und": "нӕзонгӕ ӕвзаг", "zh": "китайаг", "zh_Hans": "ӕнцонгонд китайаг", "zh_Hant": "традицион китайаг" } }
{ "pile_set_name": "Github" }
#! /bin/bash # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. ######################################################################## # mozilla/security/nss/tests/lowhash/lowhash.sh # # Script to test basic functionallity of the NSSLoHash API # # included from # -------------- # all.sh # # needs to work on all Linux platforms # # tests implemented: # lowash (verify encryption cert - bugzilla bug 119059) # # special strings # --------------- # ######################################################################## errors=0 ############################## lowhash_init ############################## # local shell function to initialize this script ######################################################################## lowhash_init() { SCRIPTNAME=lowhash.sh # sourced - $0 would point to all.sh if [ -z "${CLEANUP}" ] ; then # if nobody else is responsible for CLEANUP="${SCRIPTNAME}" # cleaning this script will do it fi if [ -z "${INIT_SOURCED}" -o "${INIT_SOURCED}" != "TRUE" ]; then cd ../common . ./init.sh fi LOWHASHDIR=../lowhash mkdir -p ${LOWHASHDIR} if [ -f /proc/sys/crypto/fips_enabled ]; then FVAL=`cat /proc/sys/crypto/fips_enabled` html_head "Lowhash Tests - /proc/sys/crypto/fips_enabled is ${FVAL}" else html_head "Lowhash Tests" fi cd ${LOWHASHDIR} } ############################## lowhash_test ############################## # local shell function to test basic the NSS Low Hash API both in # FIPS 140 compliant mode and not ######################################################################## lowhash_test() { if [ ! -f ${BINDIR}/lowhashtest -a \ ! -f ${BINDIR}/lowhashtest${PROG_SUFFIX} ]; then echo "freebl lowhash not supported in this plaform." else TESTS="MD5 SHA1 SHA224 SHA256 SHA384 SHA512" OLD_MODE=`echo ${NSS_FIPS}` for fips_mode in 0 1; do echo "lowhashtest with fips mode=${fips_mode}" export NSS_FIPS=${fips_mode} for TEST in ${TESTS} do echo "lowhashtest ${TEST}" ${BINDIR}/lowhashtest ${TEST} 2>&1 RESULT=$? html_msg ${RESULT} 0 "lowhashtest with fips mode=${fips_mode} for ${TEST}" done done export NSS_FIPS=${OLD_MODE} fi } ############################## lowhash_cleanup ############################ # local shell function to finish this script (no exit since it might be # sourced) ######################################################################## lowhash_cleanup() { html "</TABLE><BR>" cd ${QADIR} . common/cleanup.sh } ################## main ################################################# lowhash_init lowhash_test lowhash_cleanup echo "lowhash.sh done"
{ "pile_set_name": "Github" }
# PMHTTP [![Version](https://img.shields.io/badge/version-v4.5.0-blue.svg)](https://github.com/postmates/PMHTTP/releases/latest) ![Platforms](https://img.shields.io/badge/platforms-ios%20%7C%20osx%20%7C%20watchos%20%7C%20tvos-lightgrey.svg) ![Languages](https://img.shields.io/badge/languages-swift%20%7C%20objc-orange.svg) ![License](https://img.shields.io/badge/license-MIT%2FApache-blue.svg) [![Carthage compatible](https://img.shields.io/badge/Carthage-compatible-4BC51D.svg?style=flat)][Carthage] [![CocoaPods](https://img.shields.io/cocoapods/v/PMHTTP.svg)](http://cocoadocs.org/docsets/PMHTTP) [Carthage]: https://github.com/carthage/carthage PMHTTP is an HTTP framework built around `URLSession` and designed for Swift while retaining Obj-C compatibility. We think `URLSession` is great. But it was designed for Obj-C and it doesn't handle anything beyond the networking aspect of a request. This means no handling of JSON, and it doesn't even provide `multipart/form-data` uploads. PMHTTP leaves the networking to `URLSession` and provides everything else. Features include: * Requests can define parse handlers that execute asynchronously separately from the completion block, and requests can be canceled while parsing and the completion block sees the correct result. * First-class JSON support using [PMJSON][]. * Structured results and high-quality errors; no more treating `URLError.cancelled` as a network error. * Strongly-typed results. * Thread safety. * [Intelligent cache handling](#cache-handling). * Requests can be defined once (including a parse handler) and executed many times, just like `URLRequest`. * Configurable automatic retrying of failed requests when safe. * A configurable base URL, allowing for switching between staging and production with no change to the code constructing the requests. * Support for Basic authentication. * `multipart/form-data`, `application/x-www-form-urlencoded`, and JSON upload support. * Built-in request mocking support without using method swizzling. * Nothing uses the main thread, not even completion blocks, unless you explicitly ask it to. PMHTTP was designed specifically for the HTTP functionality that [Postmates][] needs. This means first-class REST support with a focus on JSON. But there's some functionality it doesn't handle which we may get around to doing at some point ([see issues][]). Pull requests are welcome. [Postmates]: https://postmates.com [PMJSON]: https://github.com/postmates/PMJSON "postmates/PMJSON on GitHub" [see issues]: https://github.com/postmates/PMHTTP/labels/TODO ### Table of Contents * [Usage](#usage) * [Setup](#setup) * [Detailed Design](#detailed-design) * [`HTTPManager`](#httpmanager) * [Environments](#environments) * [Requests](#requests) * [Network Tasks](#network-tasks) * [Network Activity Indicator](#network-activity-indicator) * [Automatic Retrying of Failed Requests](#automatic-retrying-of-failed-requests) * [Cache Handling](#cache-handling) * [Mocking](#mocking) * [Testing](#testing) * [Requirements](#requirements) * [Installation](#installation) * [License](#license) * [Version History](#version-history) ## Usage A typical GET request looks like: ```swift // https://api.example.com/v1/search?query=%s let task = HTTP.request(GET: "search", parameters: ["query": "cute cats"]) .parseAsJSON(using: { (response, json) in return try JSON.map(json.getArray(), Cat.init(json:)) }) .performRequest(withCompletionQueue: .main) { task, result in switch result { case let .success(response, cats): // Do something with the Cats. case let .error(response, error): // Handle the error. This includes network errors, JSON parse errors, // and any error thrown by Cat.init(json:). case .canceled: // The task was canceled. Ignore or handle as appropriate. } } // task can be canceled and can be queried for its state // and this can be done from any thread. ``` A POST request might look like: ```swift // https://api.example.com/v1/submit_cat let task = HTTP.request(POST: "submit_cat", parameters: ["name": "Fluffles", "color": "tabby"]) .parseAsJSON(using: { result in // POST parse blocks take a single `result` argument because 204 No Content is a valid // response. The `result` enum vends an optional `value` property, and has a // `getValue()` method that throws an error if the response was 204 No Content. return try SubmitCatResponse(json: result.getValue()) }) .performRequest(withCompletionQueue: .main) { task, result in switch result { case let .success(response, value): // value is a SubmitCatResponse. case let .error(response, error): // Handle the error. This could be a network error, a JSON parse error, or // any error thrown by SubmitCatResponse.init(json:). case .canceled: // The task was canceled. } } ``` A `multipart/form-data` upload might look like: ```swift // https://api.example.com/v1/submit_cat with photo let req = HTTP.request(POST: "submit_cat", parameters: ["name": "Fluffles", "color": "tabby"])! // We could add the image synchronously, but it's better to be asynchronous. // Note: There is a convenience function to do this already, this is just an example. req.addMultipartBody { upload in // This block executes on a background queue. if let data = UIImageJPEGRepresentation(catPhoto, 0.9) { upload.addMultipart(data: data, withName: "photo", mimeType: "image/jpeg") } } let task = req.parseAsJSON(using: { try SubmitCatResponse(json: $0.getValue()) }) .performRequest(withCompletionQueue: .main) { task, result in // ... } ``` #### Setup You can modify the properties of the global `HTTPManager` object at any time, but to make setup easier, if your `UIApplicationDelegate` or `NSApplicationDelegate` object conforms to the `HTTPManagerConfigurable` protocol it will be asked to configure the `HTTPManager` the first time the `HTTP` global variable is accessed. This might look like: ```swift extension AppDelegate: HTTPManagerConfigurable { public func configure(httpManager: HTTPManager) { httpManager.environment = HTTPManager.Environment(string: /* ... */) let config = URLSessionConfiguration.default config.timeoutIntervalForRequest = 10 // PMHTTP defines a default User-Agent but we can supply our own config.HTTPAdditionalHeaders = ["User-Agent": myUserAgent] httpManager.sessionConfiguration = config if let (username, apiKey) = getAPICredentials() { httpManager.defaultCredential = URLCredential(user: username, password: apiKey, persistence: .forSession) } httpManager.defaultRetryBehavior = HTTPManagerRetryBehavior.retryNetworkFailureOrServiceUnavailable(withStrategy: .retryTwiceWithDefaultDelay) } } ``` ## Detailed Design PMHTTP was designed with 6 goals in mind: * Be as Swift-like as possible while retaining Obj-C compatibility. * Speed, with an emphasis on being concurrent by default. * Thread safety wherever it makes sense. * Explicitness and type safety. For example, PMHTTP doesn't auto-detect the return type but requires you to declare what response format you're expecting. * Correctness, which includes avoiding surprising behavior. * Make it easy to add new functionality, such as auto-retrying and network mocking. #### `HTTPManager` The overall manager class for PMHTTP is `HTTPManager`. This is the class that allows you to configure various global properties and to create new requests. Multiple managers can be created if desired, but a single global instance is provided under the global property `HTTP` (for Obj-C this is `[HTTPManager defaultManager]`). All properties and methods on this class are completely thread-safe. Configuration of the shared `HTTP` instance can be done by adopting the `HTTPManagerConfigurable` protocol on your app delegate. This protocol provides a method that can be used to configure the shared `HTTPManager` object the first time the `HTTP` property is accessed. This design allows you to ensure the shared instance is properly configured prior to first use even if it's used prior to the normal entry point for your application (e.g. inside some class's `+load` method). Do note, however, that this method will be executed on whatever thread is first accessing the `HTTP` property, and so it should be safe to run from any thread. **Important:** The shared `HTTP` instance is a convenience intended for use by the application. If you're writing a shared component (e.g. a framework) that uses PMHTTP, you need to carefully consider whether using `HTTP` is appropriate or whether you should be using a separate instance of `HTTPManager`. The use of `HTTP` is only appropriate if you want to automatically adopt any configuration the application provides (including environment and default credential). #### Environments `HTTPManager` has a property `environment` of type `HTTPManager.Environment`. An environment is a simple wrapper around a `URL` and represents the base URL that requests should use if the request is not made with an absolute URL. You may wish to create your own extension that looks something like: ```swift extension HTTPManager.Environment { // @nonobjc works around "a declaration cannot be both 'final' and 'dynamic'" error. @nonobjc static let Production = HTTPManager.Environment(baseURL: productionURL) @nonobjc static let Staging = HTTPManager.Environment(baseURL: stagingURL) } ``` The environment is also used to determine whether a given request should adopt the default credential configured on the `HTTPManager`. Only requests for URLs that are prefixed by the environment will use the default credential. Requests for any other URL will have no credential by default, though a credential can always be added to any request. #### Requests Requests in PMHTTP are objects. In a pure-Swift world they'd be structs/protocols, but they're objects in order to be compatible with Obj-C. Unlike `URLRequest`, PMHTTP requests are inherently mutable (so they're like `NSMutableURLRequest`). They're also the only public component of PMHTTP that is not thread-safe, though it is safe to access a request concurrently as long as no thread is mutating the request (which is to say, reading values from the request does not perform any internal mutation). Requests are split into a hierarchy of classes: * `HTTPManagerRequest` - The root request type, which contains parameters and methods that are applicable to all requests. * `HTTPManagerNetworkRequest` - The parent class for all requests that do not have a parse handler. * `HTTPManagerDataRequest` - The class for GET requests that do not have a parse handler. * `HTTPManagerActionRequest` - The class or parent class for POST/PUT/PATCH/DELETE requests that do not have a parse handler. * `HTTPManagerUploadFormRequest` - The class for POST/PUT/PATCH requests without a parse handler that have a body of either `application/x-www-form-urlencoded` or `multipart/form-data`. * `HTTPManagerUploadDataRequest` - The class for POST/PUT/PATCH requests without a parse handler that have a body consisting of an arbitrary `NSData`. * `HTTPManagerUploadJSONRequest` - The class for POST/PUT/PATCH requests without a parse handler that have a body consisting of a JSON value. * `HTTPManagerParseRequest<T>` - The class for any request that has a parse handler. * `HTTPManagerObjectParseRequest` - The class for requests made from Obj-C that have a parse handler. Similar to `HTTPManagerParseRequest<T>` but the parse result is always an `AnyObject?`. This hierarchy means that every class can provide only the methods/properties that make sense for all requests of that class type. For example, only `HTTPManagerUploadFormRequest` requests allow for adding multipart bodies. Requests include properties for configuring virtually every aspect of the network request. A few properties inherit default values from the `HTTPManager` object, though these default values can always be overridden. One property of note is `userInitiated`, which is a boolean property that should be set if the request represents some action the user is waiting on. Setting this property to `true` causes the underlying network task to be executed at a high priority and causes all background queue processing to occur using `QOS_CLASS_USER_INITIATED`. `HTTPManagerUploadFormRequest` provides support for creating `multipart/form-data` requests, which can be used for uploading files/images. These requests are implemented in a streaming fashion, so e.g. memory-mapped `NSData` objects won't be copied into a contiguous buffer, thus allowing you to upload files without concerns about memory use. `HTTPManagerRequest` conforms to `NSCopying` so copies can be made of any request if necessary. Furthermore, when attaching a parse handler to a request (and therefore converting it into an `HTTPManagerParseRequest<T>`) the original request data is copied so subsequent mutations to the original request do not affect the parse request, and when a request is executed the request data is copied so the request can be immediately mutated without affecting the executing network task. Requests are also designed such that they can be easily created and executed using a functional-style chain, as demonstrated by the [Usage](#usage) section above. Parse requests always execute their parse handler on a background queue, with no option to run on a given queue (or the main queue). This constraint exists both to encourage parsing in the background, and for simplicity, as parsing on the main queue can always be accomplished by skipping the parse handler and parsing in the completion block instead. Request completion blocks are similarly executed on a background queue by default (for requests with a parse handler, this will be the same queue that the parse handler executed on), although here a specific queue can be provided where the completion block should run, such as the main queue. #### Network Tasks Executing a request returns a value of type `HTTPManagerTask`. This class is the PMHTTP equivalent of `URLSessionTask` and is completely thread-safe. It provides properties for inspecting the current state of the request, including for accessing the underlying `URLSessionTask`, and it provides a `cancel()` method for canceling the request. Unlike `URLSessionTask.cancel()`, `HTTPManagerTask.cancel()` can be used to cancel a request while the parse handler is executing, not just canceling the networking portion. PMHTTP guarantees that if you execute `HTTPManagerTask.cancel()` from the same queue that the completion block is targeting, prior to the completion block itself executing, the completion block will always be given a result of `.canceled` even if it had already finished parsing before `cancel()` was invoked. This means that if you target the main queue for your completion block, you can be confident that a canceled task will never behave as though it succeeded or failed. Like `URLSessionTask`, `HTTPManagerTask` supports key-value observing (although, like `URLSessionTask`, the KVO messages will occur on some background queue). In the absence of automatic retrying, the `networkTask` property value will never change during the lifetime of the task. If automatic retrying has been configured, `networkTask` will change if the request is retried, and will broadcast any relevant key-value observing messages. #### Network Activity Indicator PMHTTP provides a callback you can use to implement support for the global network activity indicator. Each request object has a property `affectsNetworkActivityIndicator` (which defaults to `true`) that controls whether any tasks created from the request affect the callback. The callback itself is configured by assigning a block to `HTTPManager.networkActivityHandler`. This block is run on the main thread whenever the number of active tasks has changed. In order to display the global network activity indicator you can configure this like so: ```swift HTTPManager.networkActivityHandler = { active in UIApplication.sharedApplication().networkActivityIndicatorVisible = active > 0 } ``` #### Automatic Retrying of Failed Requests PMHTTP includes support for automatically retrying failed requests according to a configurable policy. The default retry policy can be configured with `HTTPManager.defaultRetryBehavior`, which can be overridden on individual requests with `HTTPManagerRequest.retryBehavior`. A few common retry policies are provided as convenience methods on `HTTPManagerRetryBehavior`, but any custom policy is supported as well. The convenience policies implement intelligent handling of the various `NSURLErrorDomain` errors, such as not retrying when encountering a non-transient error (such as `NSURLErrorAppTransportSecurityRequiresSecureConnection`), or retrying non-idempotent requests if the error indicates the server never received the request (e.g. `NSURLErrorCannotConnectToHost`). By default, retrying is disabled. #### Cache Handling PMHTTP implements intelligent cache handling for JSON responses. The HTTP standard allows user agents to cache responses at their discretion when the response does not include caching headers. However, this behavior is inappropriate for most REST API requests, and `URLSession` does not document its caching strategy for such responses. To handle this case, PMHTTP inspects JSON responses for appropriate caching headers and explicitly prevents responses from being cached if they do not include the appropriate cache directives. By default this behavior is only applied to requests created with `.parseAsJSON()`, `.parseAsJSON(using:)`, or `.decodeAsJSON(_:)`, although it can be overridden on a per-request basis (see `HTTPManagerRequest.defaultResponseCacheStoragePolicy`). Notably, requests created with `.parse(using:)` do not use this cache strategy as it would interfere with caching image requests. #### Mocking PMHTTP has built-in support for mocking network requests. This is done without swizzling (so it's safe to mock requests even in App Store builds), and it's done in a fashion that still creates a valid `URLSessionTask` (so any code that inspects `HTTPManagerTask.networkTask` will function as expected). Mocks can be registered on the `HTTPManager` as a whole, and individual requests can be independently mocked (so you can control whether a request is mocked based on more than just the URL in question). #### Testing PMHTTP itself has a comprehensive test suite, covering just about everything in the Swift API (the Obj-C–specific API is not currently tested, see [issue #7](https://github.com/postmates/PMHTTP/issues/7)). The tests are run against a custom HTTP/1.1 server implemented in the test bundle that listens on the loopback interface. This allows for testing all the functionality without any dependencies on external services and ensures the tests are very fast. The HTTP/1.1 server currently relies on [CocoaAsyncSocket][], which can be installed with `carthage bootstrap`. This dependency is not exposed to clients of PMHTTP as it's only used by the test suite. [CocoaAsyncSocket]: https://github.com/robbiehanson/CocoaAsyncSocket The HTTP/1.1 server implements just about everything that I thought was useful. It has a few minor dependencies on PMHTTP itself (most notably, it uses `HTTPManagerRequest.HTTPHeaders` instead of reimplementing the functionality), but beyond that, it could actually be pulled out and used anywhere else that an HTTP/1.1 server is required. However, as this server was written for the purposes of testing and not production use, it does not have any built-in mitigation of DOS attacks beyond rejecting uploads greater than 5MiB (for example, it does not impose any limit on headers, which are kept in memory, and it does not have any sort of timeout on connection duration). It also does not have any tests itself, beyond the fact that it behaves as expected when used in the PMHTTP test suite. ## Requirements Requires a minimum of iOS 8, macOS 10.10, watchOS 2.0, or tvOS 9.0. ## Installation After installation with any mechanism, you can use this by adding `import PMHTTP` to your code. ### Carthage To install using [Carthage][], add the following to your Cartfile: ``` github "postmates/PMHTTP" ~> 4.0 ``` This release supports Swift 4.0. For Swift 3.x you can use ``` github "postmates/PMHTTP" ~> 3.0 ``` ### CocoaPods To install using [CocoaPods](https://cocoapods.org), add the following to your Podfile: ``` pod "PMHTTP", "~> 4.0" ``` This release supports Swift 4.0. For Swift 3.x you can use: ``` pod "PMHTTP", "~> 3.0" ``` ## License Licensed under either of * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) at your option. ### Contribution Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you shall be dual licensed as above, without any additional terms or conditions. ## Version History #### v4.5.0 (2019-11-27) * Add Obj-C convenience functions for creating upload requests with an `NSData` but no explicit `contentType` ([#65][]). * Fix the Obj-C `-setValue:forHeaderField:` and `-setValue:forDefaultHeaderField:` methods to take a nullable value ([#67][]). * Add `HTTPManagerRetryBehavior.init(any:)` to combine multiple retry behaviors together ([#69][]). * Add `HTTPManagerRequest.HTTPHeaders` methods `init(minimumCapacity:`) and `reserveCapacity(_:)` and property `capacity` ([#66][]). [#65]: https://github.com/postmates/PMHTTP/issues/65 "Obj-C convenience functions for requests with data" [#66]: https://github.com/postmates/PMHTTP/issues/66 "Add HTTPManagerRequest.HTTPHeaders.init(minimumCapacity:) and .reserveCapacity(_:)" [#67]: https://github.com/postmates/PMHTTP/issues/67 "Obj-C -[HTTPManagerRequest setValue:forHeaderField:] should take nullable value" [#69]: https://github.com/postmates/PMHTTP/issues/69 "HTTPManagerRetryBehavior should support composition" #### v4.4.3 (2019-11-14) * Support PMJSON 4.x in addition to PMJSON 3.x with CocoaPods. Carthage doesn't support that kind of version range so it's now just set to PMJSON 4.x only. #### v4.4.2 (2019-08-13) * Fix a bug with the deprecated `HTTPManagerObjectParseRequest.credential` property where assigning to the property wouldn't work. #### v4.4.1 (2019-04-24) * Work around a CocoaPods bug with Swift versions ([CocoaPods/CocoaPods#8635][]). [CocoaPods/CocoaPods#8635]: https://github.com/CocoaPods/CocoaPods/issues/8635 "Unrecognized 'swift_version' key · Issue #8635 · CocoaPods/CocoaPods" #### v4.4.0 (2019-04-23) * Fix a bug when parsing images where we passed the wrong value for the type identifier hint, resulting in a warning being logged to the console ([#62][]). * Add computed properties on `HTTPManagerError` for convenient access to the associated values (e.g. `.response`, `.body`, etc). * Add computed property `HTTPManagerError.statusCode` that returns the failing status code for the error, or `nil` for `.unexpectedContentType` ([#60][]). * Add Obj-C function `PMHTTPErrorGetStatusCode()` that returns the failing status code for the error, or `nil` for `PMHTTPErrorUnexpectedContentType` or for non-PMHTTP errors ([#60][]). * Provide `PMHTTPStatusCodeErrorKey` user info key for more error types ([#59][]). * Add computed property `URLResponse.isUnmockedInterceptedRequest` that can be used to test if a response comes from a request that was intercepted by the mock manager without a mock installed ([#46][]). [#46]: https://github.com/postmates/PMHTTP/issues/46 "Consider making intercepted unmocked requests return 501 instead of 500 · Issue #46 · postmates/PMHTTP" [#59]: https://github.com/postmates/PMHTTP/issues/59 "PMHTTPStatusCodeErrorKey should be used for unauthorized and unexpectedNoContent · Issue #59 · postmates/PMHTTP" [#60]: https://github.com/postmates/PMHTTP/issues/60 "HTTPManagerError should have .statusCode property · Issue #60 · postmates/PMHTTP" [#62]: https://github.com/postmates/PMHTTP/issues/62 "Unknown Hint Identifier for Image MIME Types · Issue #62 · postmates/PMHTTP" #### v4.3.3 (2019-04-07) * Update `PMHTTPErrorIsFailedResponse` to handle `PMHTTPErrorUnexpectedNoContent` and `PMHTTPErrorUnexpectedRedirect` in addition to `PMHTTPErrorFailedResponse` and `PMHTTPErrorUnauthorized`. * Fix warnings introduced by Xcode 10.2. #### v4.3.2 (2018-11-14) * Fix bug where requests constructed from a `URL` would not inherit environmental defaults (e.g. auth, headers, etc) ([#52][]). [#52]: https://github.com/postmates/PMHTTP/issues/52 "Requests constructed with relative URLs do not inherit environment configuration · Issue #52 · postmates/PMHTTP" #### v4.3.1 (2018-08-01) * Add `URLProtocol` method overloads to query and set protocol properties on `HTTPManagerRequest`s ([#43][]). [#43]: https://github.com/postmates/PMHTTP/issues/43 "Let me attach arbitrary URLProtocol properties to a request · Issue #43 · postmates/PMHTTP" #### v4.3.0 (2018-07-26) * Expose `HTTPManagerTask.userInitiated` as a public property ([#42][]). * Add another parameter to the `HTTPManager.MetricsCallback` callback. In order to retain backwards compatibility, the old initializer and property were deprecated and a new initializer and property were added with different names ([#41][]). [#41]: https://github.com/postmates/PMHTTP/issues/41 [#42]: https://github.com/postmates/PMHTTP/issues/42 #### v4.2.0 (2018-07-10) * Percent-encode more characters for `application/x-www-form-urlencoded` bodies and query strings. Notably, semicolon (;) is now percent-encoded, as some servers treat it as a separator. * Optimize task metrics collection such that metrics are not collected if `metricsCallback` is `nil` ([#37][]). * Extend built-in retry behaviors to support custom strategies ([#35][]). * Add `HTTPManagerRequest` properties that correspond to the `URLRequest` properties `mainDocumentURL` and `httpShouldHandleCookies` ([#40][]). [#35]: https://github.com/postmates/PMHTTP/issues/35 [#37]: https://github.com/postmates/PMHTTP/issues/37 [#40]: https://github.com/postmates/PMHTTP/issues/40 #### v4.1.1 (2018-06-21) * Add `HTTPHeaders.merge(_:uniquingKeysWith:)` and `HTTPHeaders.merging(_:uniquingKeysWith:)`. * Deprecate `HTTPHeaders.append(contentsOf:)`. * Merge header fields when calling `HTTPManagerRequest.setDefaultEnvironmentalProperties()`, giving priority to existing request header fields in the case of a conflict. #### v4.1.0 (2018-06-15) * Support mocking relative URLs when no environment has been set. * Shrink the default mock delay to 10ms. * Make `HTTPManagerRequest.headerFields` mutable in Obj-C. * Add `HTTPManager.defaultHeaderFields` which defines the default header fields to attach to requests and, like `defaultAuth`, only applies to requests within the current environment. * Declare conformance to `Equatable` for `HTTPManagerRequest.HTTPHeaders`. * Fix fatal error when using deferred multipart bodies along with `serverRequiresContentLength`. * Add `HTTPManager.metricsCallback` to collect task metrics (`URLSessionTaskMetrics`) from all tasks associated with the `HTTPManager`. #### v4.0.1 (2018-05-17) * Fix `<PMHTTP/PMHTTP.h>` so it can be imported from Obj-C++ code. #### v4.0.0 (2018-02-23) * Convert to Swift 4. * Add a method `HTTPManagerParseRequest.map(_:)`. * Add methods `HTTPManagerDataRequest.decodeAsJSON(_:with:options:)` and `HTTPManagerActionRequest.decodeAsJSON(_:with:options:)`. #### v3.0.5 (2017-09-11) * Extend `HTTPAuth` to support handling 403 Forbidden errors as well. #### v3.0.4 (2017-09-05) * Support Swift 3.2. * Handle `serverRequiresContentLength` correctly in `preparedURLRequest`. #### v3.0.3 (2017-08-18) * Add overloads to the request creation methods that take a `URL`. These overloads return a non-optional request. * Add new property `HTTPManagerRequest.serverRequiresContentLength`. This disables streaming body support (for JSON and multipart/mixed) and instead encodes the body synchronously so it can provide a `"Content-Length"` header to the server. There is a corresponding `HTTPManager.defaultServerRequiresContentLength` property as well. * Add a method `HTTPManagerRequest.setDefaultEnvironmentalProperties()` that sets properties to the `HTTPManager`-defined defaults that otherwise are only set if the request's path matches the environment. This is primarily intended for requests constructed using absolute paths (e.g. `HTTP.request(GET: "/foo")`) that should still use the environment defaults. Right now this method only sets `auth` and `serverRequiresContentLength`. #### v3.0.2 (2017-05-01) * Add `@discardableResult` to Obj-C `-performRequest…` methods. #### v3.0.1 (2017-03-28) * Fix Xcode 8.3 compatibility. #### v3.0.0 (2017-02-27) * Preserve network task priority when retrying tasks. * Add convenience Obj-C function `PMHTTPErrorIsFailedResponse` to test PMHTTP errors easily. * Add methods `.parseAsImage(scale:)` and `.parseAsImage(scale:using:)` to `HTTPManagerDataRequest` and `HTTPManagerActionRequest`. * When a session is reset, cancel any tasks that were created but never resumed. * Ensure that the completion block is always deallocated on either the completion queue or on the thread that created the task. Previously there was a very subtle race that meant the completion block could deallocate on the `URLSession`'s delegate queue instead. This only matters if your completion block captures values whose `deinit` cares about the current thread. * Expand dictionaries, arrays, and sets passed as parameters. Dictionaries produce keys of the form `"foo[bar]"` and arrays and sets just use the key multiple times (e.g. `"foo=bar&foo=qux"`). The expansion is recursive. The order of values from expanded dictionaries and sets is implementation-defined. If you want `"array[]"` syntax, then put the `"[]"` in the key itself. See the documentation comments for more details. Do note that this behavior is slightly different from what AFNetworking does. * Also expand nested `URLQueryItem`s in parameters. The resulting parameter uses dictionary syntax (`"foo[bar]"`). * Change the type signature of the Obj-C parse methods that take handlers to make the error parameter non-optional. * Provide a callback that can be used for session-level authentication challenges. This can be used to implement SSL pinning using something like [TrustKit](https://github.com/datatheorem/TrustKit). * Fix a small memory leak when retrying tasks. * Rework how authorization works. The `defaultCredential` and `credential` properties have been replaced with `defaultAuth` and `auth`, using a brand new protocol `HTTPAuth`. An implementation of Basic authentication is provided with the `HTTPBasicAuth` object. This new authentication mechanism has been designed to allow for OAuth2-style refreshes, and a helper class `HTTPRefreshableAuth` is provided to make it easy to implement refreshable authentication. #### v2.0.1 (2017-01-05) * Fix PMJSON dependency in CocoaPods podspec. #### v2.0.0 (2017-01-03) * Support `text/json` in addition to `application/json`. * Add 2 convenience methods for uploading `UIImage`s as PNG or JPEG data. * Add `objcError` property to `PMHTTPResult`. * Change `objcError` on `HTTPManagerTaskResult` to `Error?` instead of `NSError?`. * Fix Xcode 8.1 compatibility of unit tests. * Add optional `options` parameter to `parseAsJSON()` and `parseAsJSON(with:)`. * Add `withMultipartBody(using:)` to `HTTPManagerUploadFormRequest`. * Rename `parse(with:)`, `parseAsJSON(options:with:)`, and `addMultipartBody(with:)` to use the parameter name `using:` instead, which is more in line with Swift 3 Foundation naming conventions. #### v1.0.4 (2016-10-20) * Add more Obj-C request constructors. * Fix encoding of `+` characters in query strings and `application/x-www-form-urlencoded` bodies. #### v1.0.3 (2016-09-23) * Fix obj-c name of `HTTPManager.parsedDateHeader(from:)`. #### v1.0.2 (2016-09-22) * Add fix-its for the Swift 3 API changes. #### v1.0.1 (2016-09-12) * Adopt `CustomNSError` and deprecate the `NSError` bridging methods. * Add autoreleasepools to dispatch queues where appropriate. * Fix CocoaPods support. #### v1.0.0 (2016-09-09) * Support Swift 3.0. #### v0.9.3 (2016-09-09) * Fix building for tvOS. #### v0.9.2 (2016-09-09) * Support Swift 2.3. #### v0.9.1 (2016-08-17) * Rename Source folder to Sources. * CocoaPods support. #### v0.9 (2016-08-05) Initial release.
{ "pile_set_name": "Github" }
// Copyright 2018 The Abseil Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "absl/strings/charconv.h" #include <algorithm> #include <cassert> #include <cmath> #include <cstring> #include "absl/base/casts.h" #include "absl/base/internal/bits.h" #include "absl/numeric/int128.h" #include "absl/strings/internal/charconv_bigint.h" #include "absl/strings/internal/charconv_parse.h" // The macro ABSL_BIT_PACK_FLOATS is defined on x86-64, where IEEE floating // point numbers have the same endianness in memory as a bitfield struct // containing the corresponding parts. // // When set, we replace calls to ldexp() with manual bit packing, which is // faster and is unaffected by floating point environment. #ifdef ABSL_BIT_PACK_FLOATS #error ABSL_BIT_PACK_FLOATS cannot be directly set #elif defined(__x86_64__) || defined(_M_X64) #define ABSL_BIT_PACK_FLOATS 1 #endif // A note about subnormals: // // The code below talks about "normals" and "subnormals". A normal IEEE float // has a fixed-width mantissa and power of two exponent. For example, a normal // `double` has a 53-bit mantissa. Because the high bit is always 1, it is not // stored in the representation. The implicit bit buys an extra bit of // resolution in the datatype. // // The downside of this scheme is that there is a large gap between DBL_MIN and // zero. (Large, at least, relative to the different between DBL_MIN and the // next representable number). This gap is softened by the "subnormal" numbers, // which have the same power-of-two exponent as DBL_MIN, but no implicit 53rd // bit. An all-bits-zero exponent in the encoding represents subnormals. (Zero // is represented as a subnormal with an all-bits-zero mantissa.) // // The code below, in calculations, represents the mantissa as a uint64_t. The // end result normally has the 53rd bit set. It represents subnormals by using // narrower mantissas. namespace absl { namespace { template <typename FloatType> struct FloatTraits; template <> struct FloatTraits<double> { // The number of mantissa bits in the given float type. This includes the // implied high bit. static constexpr int kTargetMantissaBits = 53; // The largest supported IEEE exponent, in our integral mantissa // representation. // // If `m` is the largest possible int kTargetMantissaBits bits wide, then // m * 2**kMaxExponent is exactly equal to DBL_MAX. static constexpr int kMaxExponent = 971; // The smallest supported IEEE normal exponent, in our integral mantissa // representation. // // If `m` is the smallest possible int kTargetMantissaBits bits wide, then // m * 2**kMinNormalExponent is exactly equal to DBL_MIN. static constexpr int kMinNormalExponent = -1074; static double MakeNan(const char* tagp) { // Support nan no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. using namespace std; // NOLINT return nan(tagp); } // Builds a nonzero floating point number out of the provided parts. // // This is intended to do the same operation as ldexp(mantissa, exponent), // but using purely integer math, to avoid -ffastmath and floating // point environment issues. Using type punning is also faster. We fall back // to ldexp on a per-platform basis for portability. // // `exponent` must be between kMinNormalExponent and kMaxExponent. // // `mantissa` must either be exactly kTargetMantissaBits wide, in which case // a normal value is made, or it must be less narrow than that, in which case // `exponent` must be exactly kMinNormalExponent, and a subnormal value is // made. static double Make(uint64_t mantissa, int exponent, bool sign) { #ifndef ABSL_BIT_PACK_FLOATS // Support ldexp no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. using namespace std; // NOLINT return sign ? -ldexp(mantissa, exponent) : ldexp(mantissa, exponent); #else constexpr uint64_t kMantissaMask = (uint64_t(1) << (kTargetMantissaBits - 1)) - 1; uint64_t dbl = static_cast<uint64_t>(sign) << 63; if (mantissa > kMantissaMask) { // Normal value. // Adjust by 1023 for the exponent representation bias, and an additional // 52 due to the implied decimal point in the IEEE mantissa represenation. dbl += uint64_t{exponent + 1023u + kTargetMantissaBits - 1} << 52; mantissa &= kMantissaMask; } else { // subnormal value assert(exponent == kMinNormalExponent); } dbl += mantissa; return absl::bit_cast<double>(dbl); #endif // ABSL_BIT_PACK_FLOATS } }; // Specialization of floating point traits for the `float` type. See the // FloatTraits<double> specialization above for meaning of each of the following // members and methods. template <> struct FloatTraits<float> { static constexpr int kTargetMantissaBits = 24; static constexpr int kMaxExponent = 104; static constexpr int kMinNormalExponent = -149; static float MakeNan(const char* tagp) { // Support nanf no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. using namespace std; // NOLINT return nanf(tagp); } static float Make(uint32_t mantissa, int exponent, bool sign) { #ifndef ABSL_BIT_PACK_FLOATS // Support ldexpf no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. using namespace std; // NOLINT return sign ? -ldexpf(mantissa, exponent) : ldexpf(mantissa, exponent); #else constexpr uint32_t kMantissaMask = (uint32_t(1) << (kTargetMantissaBits - 1)) - 1; uint32_t flt = static_cast<uint32_t>(sign) << 31; if (mantissa > kMantissaMask) { // Normal value. // Adjust by 127 for the exponent representation bias, and an additional // 23 due to the implied decimal point in the IEEE mantissa represenation. flt += uint32_t{exponent + 127u + kTargetMantissaBits - 1} << 23; mantissa &= kMantissaMask; } else { // subnormal value assert(exponent == kMinNormalExponent); } flt += mantissa; return absl::bit_cast<float>(flt); #endif // ABSL_BIT_PACK_FLOATS } }; // Decimal-to-binary conversions require coercing powers of 10 into a mantissa // and a power of 2. The two helper functions Power10Mantissa(n) and // Power10Exponent(n) perform this task. Together, these represent a hand- // rolled floating point value which is equal to or just less than 10**n. // // The return values satisfy two range guarantees: // // Power10Mantissa(n) * 2**Power10Exponent(n) <= 10**n // < (Power10Mantissa(n) + 1) * 2**Power10Exponent(n) // // 2**63 <= Power10Mantissa(n) < 2**64. // // Lookups into the power-of-10 table must first check the Power10Overflow() and // Power10Underflow() functions, to avoid out-of-bounds table access. // // Indexes into these tables are biased by -kPower10TableMin, and the table has // values in the range [kPower10TableMin, kPower10TableMax]. extern const uint64_t kPower10MantissaTable[]; extern const int16_t kPower10ExponentTable[]; // The smallest allowed value for use with the Power10Mantissa() and // Power10Exponent() functions below. (If a smaller exponent is needed in // calculations, the end result is guaranteed to underflow.) constexpr int kPower10TableMin = -342; // The largest allowed value for use with the Power10Mantissa() and // Power10Exponent() functions below. (If a smaller exponent is needed in // calculations, the end result is guaranteed to overflow.) constexpr int kPower10TableMax = 308; uint64_t Power10Mantissa(int n) { return kPower10MantissaTable[n - kPower10TableMin]; } int Power10Exponent(int n) { return kPower10ExponentTable[n - kPower10TableMin]; } // Returns true if n is large enough that 10**n always results in an IEEE // overflow. bool Power10Overflow(int n) { return n > kPower10TableMax; } // Returns true if n is small enough that 10**n times a ParsedFloat mantissa // always results in an IEEE underflow. bool Power10Underflow(int n) { return n < kPower10TableMin; } // Returns true if Power10Mantissa(n) * 2**Power10Exponent(n) is exactly equal // to 10**n numerically. Put another way, this returns true if there is no // truncation error in Power10Mantissa(n). bool Power10Exact(int n) { return n >= 0 && n <= 27; } // Sentinel exponent values for representing numbers too large or too close to // zero to represent in a double. constexpr int kOverflow = 99999; constexpr int kUnderflow = -99999; // Struct representing the calculated conversion result of a positive (nonzero) // floating point number. // // The calculated number is mantissa * 2**exponent (mantissa is treated as an // integer.) `mantissa` is chosen to be the correct width for the IEEE float // representation being calculated. (`mantissa` will always have the same bit // width for normal values, and narrower bit widths for subnormals.) // // If the result of conversion was an underflow or overflow, exponent is set // to kUnderflow or kOverflow. struct CalculatedFloat { uint64_t mantissa = 0; int exponent = 0; }; // Returns the bit width of the given uint128. (Equivalently, returns 128 // minus the number of leading zero bits.) int BitWidth(uint128 value) { if (Uint128High64(value) == 0) { return 64 - base_internal::CountLeadingZeros64(Uint128Low64(value)); } return 128 - base_internal::CountLeadingZeros64(Uint128High64(value)); } // Calculates how far to the right a mantissa needs to be shifted to create a // properly adjusted mantissa for an IEEE floating point number. // // `mantissa_width` is the bit width of the mantissa to be shifted, and // `binary_exponent` is the exponent of the number before the shift. // // This accounts for subnormal values, and will return a larger-than-normal // shift if binary_exponent would otherwise be too low. template <typename FloatType> int NormalizedShiftSize(int mantissa_width, int binary_exponent) { const int normal_shift = mantissa_width - FloatTraits<FloatType>::kTargetMantissaBits; const int minimum_shift = FloatTraits<FloatType>::kMinNormalExponent - binary_exponent; return std::max(normal_shift, minimum_shift); } // Right shifts a uint128 so that it has the requested bit width. (The // resulting value will have 128 - bit_width leading zeroes.) The initial // `value` must be wider than the requested bit width. // // Returns the number of bits shifted. int TruncateToBitWidth(int bit_width, uint128* value) { const int current_bit_width = BitWidth(*value); const int shift = current_bit_width - bit_width; *value >>= shift; return shift; } // Checks if the given ParsedFloat represents one of the edge cases that are // not dependent on number base: zero, infinity, or NaN. If so, sets *value // the appropriate double, and returns true. template <typename FloatType> bool HandleEdgeCase(const strings_internal::ParsedFloat& input, bool negative, FloatType* value) { if (input.type == strings_internal::FloatType::kNan) { // A bug in both clang and gcc would cause the compiler to optimize away the // buffer we are building below. Declaring the buffer volatile avoids the // issue, and has no measurable performance impact in microbenchmarks. // // https://bugs.llvm.org/show_bug.cgi?id=37778 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86113 constexpr ptrdiff_t kNanBufferSize = 128; volatile char n_char_sequence[kNanBufferSize]; if (input.subrange_begin == nullptr) { n_char_sequence[0] = '\0'; } else { ptrdiff_t nan_size = input.subrange_end - input.subrange_begin; nan_size = std::min(nan_size, kNanBufferSize - 1); std::copy_n(input.subrange_begin, nan_size, n_char_sequence); n_char_sequence[nan_size] = '\0'; } char* nan_argument = const_cast<char*>(n_char_sequence); *value = negative ? -FloatTraits<FloatType>::MakeNan(nan_argument) : FloatTraits<FloatType>::MakeNan(nan_argument); return true; } if (input.type == strings_internal::FloatType::kInfinity) { *value = negative ? -std::numeric_limits<FloatType>::infinity() : std::numeric_limits<FloatType>::infinity(); return true; } if (input.mantissa == 0) { *value = negative ? -0.0 : 0.0; return true; } return false; } // Given a CalculatedFloat result of a from_chars conversion, generate the // correct output values. // // CalculatedFloat can represent an underflow or overflow, in which case the // error code in *result is set. Otherwise, the calculated floating point // number is stored in *value. template <typename FloatType> void EncodeResult(const CalculatedFloat& calculated, bool negative, absl::from_chars_result* result, FloatType* value) { if (calculated.exponent == kOverflow) { result->ec = std::errc::result_out_of_range; *value = negative ? -std::numeric_limits<FloatType>::max() : std::numeric_limits<FloatType>::max(); return; } else if (calculated.mantissa == 0 || calculated.exponent == kUnderflow) { result->ec = std::errc::result_out_of_range; *value = negative ? -0.0 : 0.0; return; } *value = FloatTraits<FloatType>::Make(calculated.mantissa, calculated.exponent, negative); } // Returns the given uint128 shifted to the right by `shift` bits, and rounds // the remaining bits using round_to_nearest logic. The value is returned as a // uint64_t, since this is the type used by this library for storing calculated // floating point mantissas. // // It is expected that the width of the input value shifted by `shift` will // be the correct bit-width for the target mantissa, which is strictly narrower // than a uint64_t. // // If `input_exact` is false, then a nonzero error epsilon is assumed. For // rounding purposes, the true value being rounded is strictly greater than the // input value. The error may represent a single lost carry bit. // // When input_exact, shifted bits of the form 1000000... represent a tie, which // is broken by rounding to even -- the rounding direction is chosen so the low // bit of the returned value is 0. // // When !input_exact, shifted bits of the form 10000000... represent a value // strictly greater than one half (due to the error epsilon), and so ties are // always broken by rounding up. // // When !input_exact, shifted bits of the form 01111111... are uncertain; // the true value may or may not be greater than 10000000..., due to the // possible lost carry bit. The correct rounding direction is unknown. In this // case, the result is rounded down, and `output_exact` is set to false. // // Zero and negative values of `shift` are accepted, in which case the word is // shifted left, as necessary. uint64_t ShiftRightAndRound(uint128 value, int shift, bool input_exact, bool* output_exact) { if (shift <= 0) { *output_exact = input_exact; return static_cast<uint64_t>(value << -shift); } if (shift >= 128) { // Exponent is so small that we are shifting away all significant bits. // Answer will not be representable, even as a subnormal, so return a zero // mantissa (which represents underflow). *output_exact = true; return 0; } *output_exact = true; const uint128 shift_mask = (uint128(1) << shift) - 1; const uint128 halfway_point = uint128(1) << (shift - 1); const uint128 shifted_bits = value & shift_mask; value >>= shift; if (shifted_bits > halfway_point) { // Shifted bits greater than 10000... require rounding up. return static_cast<uint64_t>(value + 1); } if (shifted_bits == halfway_point) { // In exact mode, shifted bits of 10000... mean we're exactly halfway // between two numbers, and we must round to even. So only round up if // the low bit of `value` is set. // // In inexact mode, the nonzero error means the actual value is greater // than the halfway point and we must alway round up. if ((value & 1) == 1 || !input_exact) { ++value; } return static_cast<uint64_t>(value); } if (!input_exact && shifted_bits == halfway_point - 1) { // Rounding direction is unclear, due to error. *output_exact = false; } // Otherwise, round down. return static_cast<uint64_t>(value); } // Checks if a floating point guess needs to be rounded up, using high precision // math. // // `guess_mantissa` and `guess_exponent` represent a candidate guess for the // number represented by `parsed_decimal`. // // The exact number represented by `parsed_decimal` must lie between the two // numbers: // A = `guess_mantissa * 2**guess_exponent` // B = `(guess_mantissa + 1) * 2**guess_exponent` // // This function returns false if `A` is the better guess, and true if `B` is // the better guess, with rounding ties broken by rounding to even. bool MustRoundUp(uint64_t guess_mantissa, int guess_exponent, const strings_internal::ParsedFloat& parsed_decimal) { // 768 is the number of digits needed in the worst case. We could determine a // better limit dynamically based on the value of parsed_decimal.exponent. // This would optimize pathological input cases only. (Sane inputs won't have // hundreds of digits of mantissa.) absl::strings_internal::BigUnsigned<84> exact_mantissa; int exact_exponent = exact_mantissa.ReadFloatMantissa(parsed_decimal, 768); // Adjust the `guess` arguments to be halfway between A and B. guess_mantissa = guess_mantissa * 2 + 1; guess_exponent -= 1; // In our comparison: // lhs = exact = exact_mantissa * 10**exact_exponent // = exact_mantissa * 5**exact_exponent * 2**exact_exponent // rhs = guess = guess_mantissa * 2**guess_exponent // // Because we are doing integer math, we can't directly deal with negative // exponents. We instead move these to the other side of the inequality. absl::strings_internal::BigUnsigned<84>& lhs = exact_mantissa; int comparison; if (exact_exponent >= 0) { lhs.MultiplyByFiveToTheNth(exact_exponent); absl::strings_internal::BigUnsigned<84> rhs(guess_mantissa); // There are powers of 2 on both sides of the inequality; reduce this to // a single bit-shift. if (exact_exponent > guess_exponent) { lhs.ShiftLeft(exact_exponent - guess_exponent); } else { rhs.ShiftLeft(guess_exponent - exact_exponent); } comparison = Compare(lhs, rhs); } else { // Move the power of 5 to the other side of the equation, giving us: // lhs = exact_mantissa * 2**exact_exponent // rhs = guess_mantissa * 5**(-exact_exponent) * 2**guess_exponent absl::strings_internal::BigUnsigned<84> rhs = absl::strings_internal::BigUnsigned<84>::FiveToTheNth(-exact_exponent); rhs.MultiplyBy(guess_mantissa); if (exact_exponent > guess_exponent) { lhs.ShiftLeft(exact_exponent - guess_exponent); } else { rhs.ShiftLeft(guess_exponent - exact_exponent); } comparison = Compare(lhs, rhs); } if (comparison < 0) { return false; } else if (comparison > 0) { return true; } else { // When lhs == rhs, the decimal input is exactly between A and B. // Round towards even -- round up only if the low bit of the initial // `guess_mantissa` was a 1. We shifted guess_mantissa left 1 bit at // the beginning of this function, so test the 2nd bit here. return (guess_mantissa & 2) == 2; } } // Constructs a CalculatedFloat from a given mantissa and exponent, but // with the following normalizations applied: // // If rounding has caused mantissa to increase just past the allowed bit // width, shift and adjust exponent. // // If exponent is too high, sets kOverflow. // // If mantissa is zero (representing a non-zero value not representable, even // as a subnormal), sets kUnderflow. template <typename FloatType> CalculatedFloat CalculatedFloatFromRawValues(uint64_t mantissa, int exponent) { CalculatedFloat result; if (mantissa == uint64_t(1) << FloatTraits<FloatType>::kTargetMantissaBits) { mantissa >>= 1; exponent += 1; } if (exponent > FloatTraits<FloatType>::kMaxExponent) { result.exponent = kOverflow; } else if (mantissa == 0) { result.exponent = kUnderflow; } else { result.exponent = exponent; result.mantissa = mantissa; } return result; } template <typename FloatType> CalculatedFloat CalculateFromParsedHexadecimal( const strings_internal::ParsedFloat& parsed_hex) { uint64_t mantissa = parsed_hex.mantissa; int exponent = parsed_hex.exponent; int mantissa_width = 64 - base_internal::CountLeadingZeros64(mantissa); const int shift = NormalizedShiftSize<FloatType>(mantissa_width, exponent); bool result_exact; exponent += shift; mantissa = ShiftRightAndRound(mantissa, shift, /* input exact= */ true, &result_exact); // ParseFloat handles rounding in the hexadecimal case, so we don't have to // check `result_exact` here. return CalculatedFloatFromRawValues<FloatType>(mantissa, exponent); } template <typename FloatType> CalculatedFloat CalculateFromParsedDecimal( const strings_internal::ParsedFloat& parsed_decimal) { CalculatedFloat result; // Large or small enough decimal exponents will always result in overflow // or underflow. if (Power10Underflow(parsed_decimal.exponent)) { result.exponent = kUnderflow; return result; } else if (Power10Overflow(parsed_decimal.exponent)) { result.exponent = kOverflow; return result; } // Otherwise convert our power of 10 into a power of 2 times an integer // mantissa, and multiply this by our parsed decimal mantissa. uint128 wide_binary_mantissa = parsed_decimal.mantissa; wide_binary_mantissa *= Power10Mantissa(parsed_decimal.exponent); int binary_exponent = Power10Exponent(parsed_decimal.exponent); // Discard bits that are inaccurate due to truncation error. The magic // `mantissa_width` constants below are justified in charconv_algorithm.md. // They represent the number of bits in `wide_binary_mantissa` that are // guaranteed to be unaffected by error propagation. bool mantissa_exact; int mantissa_width; if (parsed_decimal.subrange_begin) { // Truncated mantissa mantissa_width = 58; mantissa_exact = false; binary_exponent += TruncateToBitWidth(mantissa_width, &wide_binary_mantissa); } else if (!Power10Exact(parsed_decimal.exponent)) { // Exact mantissa, truncated power of ten mantissa_width = 63; mantissa_exact = false; binary_exponent += TruncateToBitWidth(mantissa_width, &wide_binary_mantissa); } else { // Product is exact mantissa_width = BitWidth(wide_binary_mantissa); mantissa_exact = true; } // Shift into an FloatType-sized mantissa, and round to nearest. const int shift = NormalizedShiftSize<FloatType>(mantissa_width, binary_exponent); bool result_exact; binary_exponent += shift; uint64_t binary_mantissa = ShiftRightAndRound(wide_binary_mantissa, shift, mantissa_exact, &result_exact); if (!result_exact) { // We could not determine the rounding direction using int128 math. Use // full resolution math instead. if (MustRoundUp(binary_mantissa, binary_exponent, parsed_decimal)) { binary_mantissa += 1; } } return CalculatedFloatFromRawValues<FloatType>(binary_mantissa, binary_exponent); } template <typename FloatType> from_chars_result FromCharsImpl(const char* first, const char* last, FloatType& value, chars_format fmt_flags) { from_chars_result result; result.ptr = first; // overwritten on successful parse result.ec = std::errc(); bool negative = false; if (first != last && *first == '-') { ++first; negative = true; } // If the `hex` flag is *not* set, then we will accept a 0x prefix and try // to parse a hexadecimal float. if ((fmt_flags & chars_format::hex) == chars_format{} && last - first >= 2 && *first == '0' && (first[1] == 'x' || first[1] == 'X')) { const char* hex_first = first + 2; strings_internal::ParsedFloat hex_parse = strings_internal::ParseFloat<16>(hex_first, last, fmt_flags); if (hex_parse.end == nullptr || hex_parse.type != strings_internal::FloatType::kNumber) { // Either we failed to parse a hex float after the "0x", or we read // "0xinf" or "0xnan" which we don't want to match. // // However, a std::string that begins with "0x" also begins with "0", which // is normally a valid match for the number zero. So we want these // strings to match zero unless fmt_flags is `scientific`. (This flag // means an exponent is required, which the std::string "0" does not have.) if (fmt_flags == chars_format::scientific) { result.ec = std::errc::invalid_argument; } else { result.ptr = first + 1; value = negative ? -0.0 : 0.0; } return result; } // We matched a value. result.ptr = hex_parse.end; if (HandleEdgeCase(hex_parse, negative, &value)) { return result; } CalculatedFloat calculated = CalculateFromParsedHexadecimal<FloatType>(hex_parse); EncodeResult(calculated, negative, &result, &value); return result; } // Otherwise, we choose the number base based on the flags. if ((fmt_flags & chars_format::hex) == chars_format::hex) { strings_internal::ParsedFloat hex_parse = strings_internal::ParseFloat<16>(first, last, fmt_flags); if (hex_parse.end == nullptr) { result.ec = std::errc::invalid_argument; return result; } result.ptr = hex_parse.end; if (HandleEdgeCase(hex_parse, negative, &value)) { return result; } CalculatedFloat calculated = CalculateFromParsedHexadecimal<FloatType>(hex_parse); EncodeResult(calculated, negative, &result, &value); return result; } else { strings_internal::ParsedFloat decimal_parse = strings_internal::ParseFloat<10>(first, last, fmt_flags); if (decimal_parse.end == nullptr) { result.ec = std::errc::invalid_argument; return result; } result.ptr = decimal_parse.end; if (HandleEdgeCase(decimal_parse, negative, &value)) { return result; } CalculatedFloat calculated = CalculateFromParsedDecimal<FloatType>(decimal_parse); EncodeResult(calculated, negative, &result, &value); return result; } return result; } } // namespace from_chars_result from_chars(const char* first, const char* last, double& value, chars_format fmt) { return FromCharsImpl(first, last, value, fmt); } from_chars_result from_chars(const char* first, const char* last, float& value, chars_format fmt) { return FromCharsImpl(first, last, value, fmt); } namespace { // Table of powers of 10, from kPower10TableMin to kPower10TableMax. // // kPower10MantissaTable[i - kPower10TableMin] stores the 64-bit mantissa (high // bit always on), and kPower10ExponentTable[i - kPower10TableMin] stores the // power-of-two exponent. For a given number i, this gives the unique mantissa // and exponent such that mantissa * 2**exponent <= 10**i < (mantissa + 1) * // 2**exponent. const uint64_t kPower10MantissaTable[] = { 0xeef453d6923bd65aU, 0x9558b4661b6565f8U, 0xbaaee17fa23ebf76U, 0xe95a99df8ace6f53U, 0x91d8a02bb6c10594U, 0xb64ec836a47146f9U, 0xe3e27a444d8d98b7U, 0x8e6d8c6ab0787f72U, 0xb208ef855c969f4fU, 0xde8b2b66b3bc4723U, 0x8b16fb203055ac76U, 0xaddcb9e83c6b1793U, 0xd953e8624b85dd78U, 0x87d4713d6f33aa6bU, 0xa9c98d8ccb009506U, 0xd43bf0effdc0ba48U, 0x84a57695fe98746dU, 0xa5ced43b7e3e9188U, 0xcf42894a5dce35eaU, 0x818995ce7aa0e1b2U, 0xa1ebfb4219491a1fU, 0xca66fa129f9b60a6U, 0xfd00b897478238d0U, 0x9e20735e8cb16382U, 0xc5a890362fddbc62U, 0xf712b443bbd52b7bU, 0x9a6bb0aa55653b2dU, 0xc1069cd4eabe89f8U, 0xf148440a256e2c76U, 0x96cd2a865764dbcaU, 0xbc807527ed3e12bcU, 0xeba09271e88d976bU, 0x93445b8731587ea3U, 0xb8157268fdae9e4cU, 0xe61acf033d1a45dfU, 0x8fd0c16206306babU, 0xb3c4f1ba87bc8696U, 0xe0b62e2929aba83cU, 0x8c71dcd9ba0b4925U, 0xaf8e5410288e1b6fU, 0xdb71e91432b1a24aU, 0x892731ac9faf056eU, 0xab70fe17c79ac6caU, 0xd64d3d9db981787dU, 0x85f0468293f0eb4eU, 0xa76c582338ed2621U, 0xd1476e2c07286faaU, 0x82cca4db847945caU, 0xa37fce126597973cU, 0xcc5fc196fefd7d0cU, 0xff77b1fcbebcdc4fU, 0x9faacf3df73609b1U, 0xc795830d75038c1dU, 0xf97ae3d0d2446f25U, 0x9becce62836ac577U, 0xc2e801fb244576d5U, 0xf3a20279ed56d48aU, 0x9845418c345644d6U, 0xbe5691ef416bd60cU, 0xedec366b11c6cb8fU, 0x94b3a202eb1c3f39U, 0xb9e08a83a5e34f07U, 0xe858ad248f5c22c9U, 0x91376c36d99995beU, 0xb58547448ffffb2dU, 0xe2e69915b3fff9f9U, 0x8dd01fad907ffc3bU, 0xb1442798f49ffb4aU, 0xdd95317f31c7fa1dU, 0x8a7d3eef7f1cfc52U, 0xad1c8eab5ee43b66U, 0xd863b256369d4a40U, 0x873e4f75e2224e68U, 0xa90de3535aaae202U, 0xd3515c2831559a83U, 0x8412d9991ed58091U, 0xa5178fff668ae0b6U, 0xce5d73ff402d98e3U, 0x80fa687f881c7f8eU, 0xa139029f6a239f72U, 0xc987434744ac874eU, 0xfbe9141915d7a922U, 0x9d71ac8fada6c9b5U, 0xc4ce17b399107c22U, 0xf6019da07f549b2bU, 0x99c102844f94e0fbU, 0xc0314325637a1939U, 0xf03d93eebc589f88U, 0x96267c7535b763b5U, 0xbbb01b9283253ca2U, 0xea9c227723ee8bcbU, 0x92a1958a7675175fU, 0xb749faed14125d36U, 0xe51c79a85916f484U, 0x8f31cc0937ae58d2U, 0xb2fe3f0b8599ef07U, 0xdfbdcece67006ac9U, 0x8bd6a141006042bdU, 0xaecc49914078536dU, 0xda7f5bf590966848U, 0x888f99797a5e012dU, 0xaab37fd7d8f58178U, 0xd5605fcdcf32e1d6U, 0x855c3be0a17fcd26U, 0xa6b34ad8c9dfc06fU, 0xd0601d8efc57b08bU, 0x823c12795db6ce57U, 0xa2cb1717b52481edU, 0xcb7ddcdda26da268U, 0xfe5d54150b090b02U, 0x9efa548d26e5a6e1U, 0xc6b8e9b0709f109aU, 0xf867241c8cc6d4c0U, 0x9b407691d7fc44f8U, 0xc21094364dfb5636U, 0xf294b943e17a2bc4U, 0x979cf3ca6cec5b5aU, 0xbd8430bd08277231U, 0xece53cec4a314ebdU, 0x940f4613ae5ed136U, 0xb913179899f68584U, 0xe757dd7ec07426e5U, 0x9096ea6f3848984fU, 0xb4bca50b065abe63U, 0xe1ebce4dc7f16dfbU, 0x8d3360f09cf6e4bdU, 0xb080392cc4349decU, 0xdca04777f541c567U, 0x89e42caaf9491b60U, 0xac5d37d5b79b6239U, 0xd77485cb25823ac7U, 0x86a8d39ef77164bcU, 0xa8530886b54dbdebU, 0xd267caa862a12d66U, 0x8380dea93da4bc60U, 0xa46116538d0deb78U, 0xcd795be870516656U, 0x806bd9714632dff6U, 0xa086cfcd97bf97f3U, 0xc8a883c0fdaf7df0U, 0xfad2a4b13d1b5d6cU, 0x9cc3a6eec6311a63U, 0xc3f490aa77bd60fcU, 0xf4f1b4d515acb93bU, 0x991711052d8bf3c5U, 0xbf5cd54678eef0b6U, 0xef340a98172aace4U, 0x9580869f0e7aac0eU, 0xbae0a846d2195712U, 0xe998d258869facd7U, 0x91ff83775423cc06U, 0xb67f6455292cbf08U, 0xe41f3d6a7377eecaU, 0x8e938662882af53eU, 0xb23867fb2a35b28dU, 0xdec681f9f4c31f31U, 0x8b3c113c38f9f37eU, 0xae0b158b4738705eU, 0xd98ddaee19068c76U, 0x87f8a8d4cfa417c9U, 0xa9f6d30a038d1dbcU, 0xd47487cc8470652bU, 0x84c8d4dfd2c63f3bU, 0xa5fb0a17c777cf09U, 0xcf79cc9db955c2ccU, 0x81ac1fe293d599bfU, 0xa21727db38cb002fU, 0xca9cf1d206fdc03bU, 0xfd442e4688bd304aU, 0x9e4a9cec15763e2eU, 0xc5dd44271ad3cdbaU, 0xf7549530e188c128U, 0x9a94dd3e8cf578b9U, 0xc13a148e3032d6e7U, 0xf18899b1bc3f8ca1U, 0x96f5600f15a7b7e5U, 0xbcb2b812db11a5deU, 0xebdf661791d60f56U, 0x936b9fcebb25c995U, 0xb84687c269ef3bfbU, 0xe65829b3046b0afaU, 0x8ff71a0fe2c2e6dcU, 0xb3f4e093db73a093U, 0xe0f218b8d25088b8U, 0x8c974f7383725573U, 0xafbd2350644eeacfU, 0xdbac6c247d62a583U, 0x894bc396ce5da772U, 0xab9eb47c81f5114fU, 0xd686619ba27255a2U, 0x8613fd0145877585U, 0xa798fc4196e952e7U, 0xd17f3b51fca3a7a0U, 0x82ef85133de648c4U, 0xa3ab66580d5fdaf5U, 0xcc963fee10b7d1b3U, 0xffbbcfe994e5c61fU, 0x9fd561f1fd0f9bd3U, 0xc7caba6e7c5382c8U, 0xf9bd690a1b68637bU, 0x9c1661a651213e2dU, 0xc31bfa0fe5698db8U, 0xf3e2f893dec3f126U, 0x986ddb5c6b3a76b7U, 0xbe89523386091465U, 0xee2ba6c0678b597fU, 0x94db483840b717efU, 0xba121a4650e4ddebU, 0xe896a0d7e51e1566U, 0x915e2486ef32cd60U, 0xb5b5ada8aaff80b8U, 0xe3231912d5bf60e6U, 0x8df5efabc5979c8fU, 0xb1736b96b6fd83b3U, 0xddd0467c64bce4a0U, 0x8aa22c0dbef60ee4U, 0xad4ab7112eb3929dU, 0xd89d64d57a607744U, 0x87625f056c7c4a8bU, 0xa93af6c6c79b5d2dU, 0xd389b47879823479U, 0x843610cb4bf160cbU, 0xa54394fe1eedb8feU, 0xce947a3da6a9273eU, 0x811ccc668829b887U, 0xa163ff802a3426a8U, 0xc9bcff6034c13052U, 0xfc2c3f3841f17c67U, 0x9d9ba7832936edc0U, 0xc5029163f384a931U, 0xf64335bcf065d37dU, 0x99ea0196163fa42eU, 0xc06481fb9bcf8d39U, 0xf07da27a82c37088U, 0x964e858c91ba2655U, 0xbbe226efb628afeaU, 0xeadab0aba3b2dbe5U, 0x92c8ae6b464fc96fU, 0xb77ada0617e3bbcbU, 0xe55990879ddcaabdU, 0x8f57fa54c2a9eab6U, 0xb32df8e9f3546564U, 0xdff9772470297ebdU, 0x8bfbea76c619ef36U, 0xaefae51477a06b03U, 0xdab99e59958885c4U, 0x88b402f7fd75539bU, 0xaae103b5fcd2a881U, 0xd59944a37c0752a2U, 0x857fcae62d8493a5U, 0xa6dfbd9fb8e5b88eU, 0xd097ad07a71f26b2U, 0x825ecc24c873782fU, 0xa2f67f2dfa90563bU, 0xcbb41ef979346bcaU, 0xfea126b7d78186bcU, 0x9f24b832e6b0f436U, 0xc6ede63fa05d3143U, 0xf8a95fcf88747d94U, 0x9b69dbe1b548ce7cU, 0xc24452da229b021bU, 0xf2d56790ab41c2a2U, 0x97c560ba6b0919a5U, 0xbdb6b8e905cb600fU, 0xed246723473e3813U, 0x9436c0760c86e30bU, 0xb94470938fa89bceU, 0xe7958cb87392c2c2U, 0x90bd77f3483bb9b9U, 0xb4ecd5f01a4aa828U, 0xe2280b6c20dd5232U, 0x8d590723948a535fU, 0xb0af48ec79ace837U, 0xdcdb1b2798182244U, 0x8a08f0f8bf0f156bU, 0xac8b2d36eed2dac5U, 0xd7adf884aa879177U, 0x86ccbb52ea94baeaU, 0xa87fea27a539e9a5U, 0xd29fe4b18e88640eU, 0x83a3eeeef9153e89U, 0xa48ceaaab75a8e2bU, 0xcdb02555653131b6U, 0x808e17555f3ebf11U, 0xa0b19d2ab70e6ed6U, 0xc8de047564d20a8bU, 0xfb158592be068d2eU, 0x9ced737bb6c4183dU, 0xc428d05aa4751e4cU, 0xf53304714d9265dfU, 0x993fe2c6d07b7fabU, 0xbf8fdb78849a5f96U, 0xef73d256a5c0f77cU, 0x95a8637627989aadU, 0xbb127c53b17ec159U, 0xe9d71b689dde71afU, 0x9226712162ab070dU, 0xb6b00d69bb55c8d1U, 0xe45c10c42a2b3b05U, 0x8eb98a7a9a5b04e3U, 0xb267ed1940f1c61cU, 0xdf01e85f912e37a3U, 0x8b61313bbabce2c6U, 0xae397d8aa96c1b77U, 0xd9c7dced53c72255U, 0x881cea14545c7575U, 0xaa242499697392d2U, 0xd4ad2dbfc3d07787U, 0x84ec3c97da624ab4U, 0xa6274bbdd0fadd61U, 0xcfb11ead453994baU, 0x81ceb32c4b43fcf4U, 0xa2425ff75e14fc31U, 0xcad2f7f5359a3b3eU, 0xfd87b5f28300ca0dU, 0x9e74d1b791e07e48U, 0xc612062576589ddaU, 0xf79687aed3eec551U, 0x9abe14cd44753b52U, 0xc16d9a0095928a27U, 0xf1c90080baf72cb1U, 0x971da05074da7beeU, 0xbce5086492111aeaU, 0xec1e4a7db69561a5U, 0x9392ee8e921d5d07U, 0xb877aa3236a4b449U, 0xe69594bec44de15bU, 0x901d7cf73ab0acd9U, 0xb424dc35095cd80fU, 0xe12e13424bb40e13U, 0x8cbccc096f5088cbU, 0xafebff0bcb24aafeU, 0xdbe6fecebdedd5beU, 0x89705f4136b4a597U, 0xabcc77118461cefcU, 0xd6bf94d5e57a42bcU, 0x8637bd05af6c69b5U, 0xa7c5ac471b478423U, 0xd1b71758e219652bU, 0x83126e978d4fdf3bU, 0xa3d70a3d70a3d70aU, 0xccccccccccccccccU, 0x8000000000000000U, 0xa000000000000000U, 0xc800000000000000U, 0xfa00000000000000U, 0x9c40000000000000U, 0xc350000000000000U, 0xf424000000000000U, 0x9896800000000000U, 0xbebc200000000000U, 0xee6b280000000000U, 0x9502f90000000000U, 0xba43b74000000000U, 0xe8d4a51000000000U, 0x9184e72a00000000U, 0xb5e620f480000000U, 0xe35fa931a0000000U, 0x8e1bc9bf04000000U, 0xb1a2bc2ec5000000U, 0xde0b6b3a76400000U, 0x8ac7230489e80000U, 0xad78ebc5ac620000U, 0xd8d726b7177a8000U, 0x878678326eac9000U, 0xa968163f0a57b400U, 0xd3c21bcecceda100U, 0x84595161401484a0U, 0xa56fa5b99019a5c8U, 0xcecb8f27f4200f3aU, 0x813f3978f8940984U, 0xa18f07d736b90be5U, 0xc9f2c9cd04674edeU, 0xfc6f7c4045812296U, 0x9dc5ada82b70b59dU, 0xc5371912364ce305U, 0xf684df56c3e01bc6U, 0x9a130b963a6c115cU, 0xc097ce7bc90715b3U, 0xf0bdc21abb48db20U, 0x96769950b50d88f4U, 0xbc143fa4e250eb31U, 0xeb194f8e1ae525fdU, 0x92efd1b8d0cf37beU, 0xb7abc627050305adU, 0xe596b7b0c643c719U, 0x8f7e32ce7bea5c6fU, 0xb35dbf821ae4f38bU, 0xe0352f62a19e306eU, 0x8c213d9da502de45U, 0xaf298d050e4395d6U, 0xdaf3f04651d47b4cU, 0x88d8762bf324cd0fU, 0xab0e93b6efee0053U, 0xd5d238a4abe98068U, 0x85a36366eb71f041U, 0xa70c3c40a64e6c51U, 0xd0cf4b50cfe20765U, 0x82818f1281ed449fU, 0xa321f2d7226895c7U, 0xcbea6f8ceb02bb39U, 0xfee50b7025c36a08U, 0x9f4f2726179a2245U, 0xc722f0ef9d80aad6U, 0xf8ebad2b84e0d58bU, 0x9b934c3b330c8577U, 0xc2781f49ffcfa6d5U, 0xf316271c7fc3908aU, 0x97edd871cfda3a56U, 0xbde94e8e43d0c8ecU, 0xed63a231d4c4fb27U, 0x945e455f24fb1cf8U, 0xb975d6b6ee39e436U, 0xe7d34c64a9c85d44U, 0x90e40fbeea1d3a4aU, 0xb51d13aea4a488ddU, 0xe264589a4dcdab14U, 0x8d7eb76070a08aecU, 0xb0de65388cc8ada8U, 0xdd15fe86affad912U, 0x8a2dbf142dfcc7abU, 0xacb92ed9397bf996U, 0xd7e77a8f87daf7fbU, 0x86f0ac99b4e8dafdU, 0xa8acd7c0222311bcU, 0xd2d80db02aabd62bU, 0x83c7088e1aab65dbU, 0xa4b8cab1a1563f52U, 0xcde6fd5e09abcf26U, 0x80b05e5ac60b6178U, 0xa0dc75f1778e39d6U, 0xc913936dd571c84cU, 0xfb5878494ace3a5fU, 0x9d174b2dcec0e47bU, 0xc45d1df942711d9aU, 0xf5746577930d6500U, 0x9968bf6abbe85f20U, 0xbfc2ef456ae276e8U, 0xefb3ab16c59b14a2U, 0x95d04aee3b80ece5U, 0xbb445da9ca61281fU, 0xea1575143cf97226U, 0x924d692ca61be758U, 0xb6e0c377cfa2e12eU, 0xe498f455c38b997aU, 0x8edf98b59a373fecU, 0xb2977ee300c50fe7U, 0xdf3d5e9bc0f653e1U, 0x8b865b215899f46cU, 0xae67f1e9aec07187U, 0xda01ee641a708de9U, 0x884134fe908658b2U, 0xaa51823e34a7eedeU, 0xd4e5e2cdc1d1ea96U, 0x850fadc09923329eU, 0xa6539930bf6bff45U, 0xcfe87f7cef46ff16U, 0x81f14fae158c5f6eU, 0xa26da3999aef7749U, 0xcb090c8001ab551cU, 0xfdcb4fa002162a63U, 0x9e9f11c4014dda7eU, 0xc646d63501a1511dU, 0xf7d88bc24209a565U, 0x9ae757596946075fU, 0xc1a12d2fc3978937U, 0xf209787bb47d6b84U, 0x9745eb4d50ce6332U, 0xbd176620a501fbffU, 0xec5d3fa8ce427affU, 0x93ba47c980e98cdfU, 0xb8a8d9bbe123f017U, 0xe6d3102ad96cec1dU, 0x9043ea1ac7e41392U, 0xb454e4a179dd1877U, 0xe16a1dc9d8545e94U, 0x8ce2529e2734bb1dU, 0xb01ae745b101e9e4U, 0xdc21a1171d42645dU, 0x899504ae72497ebaU, 0xabfa45da0edbde69U, 0xd6f8d7509292d603U, 0x865b86925b9bc5c2U, 0xa7f26836f282b732U, 0xd1ef0244af2364ffU, 0x8335616aed761f1fU, 0xa402b9c5a8d3a6e7U, 0xcd036837130890a1U, 0x802221226be55a64U, 0xa02aa96b06deb0fdU, 0xc83553c5c8965d3dU, 0xfa42a8b73abbf48cU, 0x9c69a97284b578d7U, 0xc38413cf25e2d70dU, 0xf46518c2ef5b8cd1U, 0x98bf2f79d5993802U, 0xbeeefb584aff8603U, 0xeeaaba2e5dbf6784U, 0x952ab45cfa97a0b2U, 0xba756174393d88dfU, 0xe912b9d1478ceb17U, 0x91abb422ccb812eeU, 0xb616a12b7fe617aaU, 0xe39c49765fdf9d94U, 0x8e41ade9fbebc27dU, 0xb1d219647ae6b31cU, 0xde469fbd99a05fe3U, 0x8aec23d680043beeU, 0xada72ccc20054ae9U, 0xd910f7ff28069da4U, 0x87aa9aff79042286U, 0xa99541bf57452b28U, 0xd3fa922f2d1675f2U, 0x847c9b5d7c2e09b7U, 0xa59bc234db398c25U, 0xcf02b2c21207ef2eU, 0x8161afb94b44f57dU, 0xa1ba1ba79e1632dcU, 0xca28a291859bbf93U, 0xfcb2cb35e702af78U, 0x9defbf01b061adabU, 0xc56baec21c7a1916U, 0xf6c69a72a3989f5bU, 0x9a3c2087a63f6399U, 0xc0cb28a98fcf3c7fU, 0xf0fdf2d3f3c30b9fU, 0x969eb7c47859e743U, 0xbc4665b596706114U, 0xeb57ff22fc0c7959U, 0x9316ff75dd87cbd8U, 0xb7dcbf5354e9beceU, 0xe5d3ef282a242e81U, 0x8fa475791a569d10U, 0xb38d92d760ec4455U, 0xe070f78d3927556aU, 0x8c469ab843b89562U, 0xaf58416654a6babbU, 0xdb2e51bfe9d0696aU, 0x88fcf317f22241e2U, 0xab3c2fddeeaad25aU, 0xd60b3bd56a5586f1U, 0x85c7056562757456U, 0xa738c6bebb12d16cU, 0xd106f86e69d785c7U, 0x82a45b450226b39cU, 0xa34d721642b06084U, 0xcc20ce9bd35c78a5U, 0xff290242c83396ceU, 0x9f79a169bd203e41U, 0xc75809c42c684dd1U, 0xf92e0c3537826145U, 0x9bbcc7a142b17ccbU, 0xc2abf989935ddbfeU, 0xf356f7ebf83552feU, 0x98165af37b2153deU, 0xbe1bf1b059e9a8d6U, 0xeda2ee1c7064130cU, 0x9485d4d1c63e8be7U, 0xb9a74a0637ce2ee1U, 0xe8111c87c5c1ba99U, 0x910ab1d4db9914a0U, 0xb54d5e4a127f59c8U, 0xe2a0b5dc971f303aU, 0x8da471a9de737e24U, 0xb10d8e1456105dadU, 0xdd50f1996b947518U, 0x8a5296ffe33cc92fU, 0xace73cbfdc0bfb7bU, 0xd8210befd30efa5aU, 0x8714a775e3e95c78U, 0xa8d9d1535ce3b396U, 0xd31045a8341ca07cU, 0x83ea2b892091e44dU, 0xa4e4b66b68b65d60U, 0xce1de40642e3f4b9U, 0x80d2ae83e9ce78f3U, 0xa1075a24e4421730U, 0xc94930ae1d529cfcU, 0xfb9b7cd9a4a7443cU, 0x9d412e0806e88aa5U, 0xc491798a08a2ad4eU, 0xf5b5d7ec8acb58a2U, 0x9991a6f3d6bf1765U, 0xbff610b0cc6edd3fU, 0xeff394dcff8a948eU, 0x95f83d0a1fb69cd9U, 0xbb764c4ca7a4440fU, 0xea53df5fd18d5513U, 0x92746b9be2f8552cU, 0xb7118682dbb66a77U, 0xe4d5e82392a40515U, 0x8f05b1163ba6832dU, 0xb2c71d5bca9023f8U, 0xdf78e4b2bd342cf6U, 0x8bab8eefb6409c1aU, 0xae9672aba3d0c320U, 0xda3c0f568cc4f3e8U, 0x8865899617fb1871U, 0xaa7eebfb9df9de8dU, 0xd51ea6fa85785631U, 0x8533285c936b35deU, 0xa67ff273b8460356U, 0xd01fef10a657842cU, 0x8213f56a67f6b29bU, 0xa298f2c501f45f42U, 0xcb3f2f7642717713U, 0xfe0efb53d30dd4d7U, 0x9ec95d1463e8a506U, 0xc67bb4597ce2ce48U, 0xf81aa16fdc1b81daU, 0x9b10a4e5e9913128U, 0xc1d4ce1f63f57d72U, 0xf24a01a73cf2dccfU, 0x976e41088617ca01U, 0xbd49d14aa79dbc82U, 0xec9c459d51852ba2U, 0x93e1ab8252f33b45U, 0xb8da1662e7b00a17U, 0xe7109bfba19c0c9dU, 0x906a617d450187e2U, 0xb484f9dc9641e9daU, 0xe1a63853bbd26451U, 0x8d07e33455637eb2U, 0xb049dc016abc5e5fU, 0xdc5c5301c56b75f7U, 0x89b9b3e11b6329baU, 0xac2820d9623bf429U, 0xd732290fbacaf133U, 0x867f59a9d4bed6c0U, 0xa81f301449ee8c70U, 0xd226fc195c6a2f8cU, 0x83585d8fd9c25db7U, 0xa42e74f3d032f525U, 0xcd3a1230c43fb26fU, 0x80444b5e7aa7cf85U, 0xa0555e361951c366U, 0xc86ab5c39fa63440U, 0xfa856334878fc150U, 0x9c935e00d4b9d8d2U, 0xc3b8358109e84f07U, 0xf4a642e14c6262c8U, 0x98e7e9cccfbd7dbdU, 0xbf21e44003acdd2cU, 0xeeea5d5004981478U, 0x95527a5202df0ccbU, 0xbaa718e68396cffdU, 0xe950df20247c83fdU, 0x91d28b7416cdd27eU, 0xb6472e511c81471dU, 0xe3d8f9e563a198e5U, 0x8e679c2f5e44ff8fU, }; const int16_t kPower10ExponentTable[] = { -1200, -1196, -1193, -1190, -1186, -1183, -1180, -1176, -1173, -1170, -1166, -1163, -1160, -1156, -1153, -1150, -1146, -1143, -1140, -1136, -1133, -1130, -1127, -1123, -1120, -1117, -1113, -1110, -1107, -1103, -1100, -1097, -1093, -1090, -1087, -1083, -1080, -1077, -1073, -1070, -1067, -1063, -1060, -1057, -1053, -1050, -1047, -1043, -1040, -1037, -1034, -1030, -1027, -1024, -1020, -1017, -1014, -1010, -1007, -1004, -1000, -997, -994, -990, -987, -984, -980, -977, -974, -970, -967, -964, -960, -957, -954, -950, -947, -944, -940, -937, -934, -931, -927, -924, -921, -917, -914, -911, -907, -904, -901, -897, -894, -891, -887, -884, -881, -877, -874, -871, -867, -864, -861, -857, -854, -851, -847, -844, -841, -838, -834, -831, -828, -824, -821, -818, -814, -811, -808, -804, -801, -798, -794, -791, -788, -784, -781, -778, -774, -771, -768, -764, -761, -758, -754, -751, -748, -744, -741, -738, -735, -731, -728, -725, -721, -718, -715, -711, -708, -705, -701, -698, -695, -691, -688, -685, -681, -678, -675, -671, -668, -665, -661, -658, -655, -651, -648, -645, -642, -638, -635, -632, -628, -625, -622, -618, -615, -612, -608, -605, -602, -598, -595, -592, -588, -585, -582, -578, -575, -572, -568, -565, -562, -558, -555, -552, -549, -545, -542, -539, -535, -532, -529, -525, -522, -519, -515, -512, -509, -505, -502, -499, -495, -492, -489, -485, -482, -479, -475, -472, -469, -465, -462, -459, -455, -452, -449, -446, -442, -439, -436, -432, -429, -426, -422, -419, -416, -412, -409, -406, -402, -399, -396, -392, -389, -386, -382, -379, -376, -372, -369, -366, -362, -359, -356, -353, -349, -346, -343, -339, -336, -333, -329, -326, -323, -319, -316, -313, -309, -306, -303, -299, -296, -293, -289, -286, -283, -279, -276, -273, -269, -266, -263, -259, -256, -253, -250, -246, -243, -240, -236, -233, -230, -226, -223, -220, -216, -213, -210, -206, -203, -200, -196, -193, -190, -186, -183, -180, -176, -173, -170, -166, -163, -160, -157, -153, -150, -147, -143, -140, -137, -133, -130, -127, -123, -120, -117, -113, -110, -107, -103, -100, -97, -93, -90, -87, -83, -80, -77, -73, -70, -67, -63, -60, -57, -54, -50, -47, -44, -40, -37, -34, -30, -27, -24, -20, -17, -14, -10, -7, -4, 0, 3, 6, 10, 13, 16, 20, 23, 26, 30, 33, 36, 39, 43, 46, 49, 53, 56, 59, 63, 66, 69, 73, 76, 79, 83, 86, 89, 93, 96, 99, 103, 106, 109, 113, 116, 119, 123, 126, 129, 132, 136, 139, 142, 146, 149, 152, 156, 159, 162, 166, 169, 172, 176, 179, 182, 186, 189, 192, 196, 199, 202, 206, 209, 212, 216, 219, 222, 226, 229, 232, 235, 239, 242, 245, 249, 252, 255, 259, 262, 265, 269, 272, 275, 279, 282, 285, 289, 292, 295, 299, 302, 305, 309, 312, 315, 319, 322, 325, 328, 332, 335, 338, 342, 345, 348, 352, 355, 358, 362, 365, 368, 372, 375, 378, 382, 385, 388, 392, 395, 398, 402, 405, 408, 412, 415, 418, 422, 425, 428, 431, 435, 438, 441, 445, 448, 451, 455, 458, 461, 465, 468, 471, 475, 478, 481, 485, 488, 491, 495, 498, 501, 505, 508, 511, 515, 518, 521, 524, 528, 531, 534, 538, 541, 544, 548, 551, 554, 558, 561, 564, 568, 571, 574, 578, 581, 584, 588, 591, 594, 598, 601, 604, 608, 611, 614, 617, 621, 624, 627, 631, 634, 637, 641, 644, 647, 651, 654, 657, 661, 664, 667, 671, 674, 677, 681, 684, 687, 691, 694, 697, 701, 704, 707, 711, 714, 717, 720, 724, 727, 730, 734, 737, 740, 744, 747, 750, 754, 757, 760, 764, 767, 770, 774, 777, 780, 784, 787, 790, 794, 797, 800, 804, 807, 810, 813, 817, 820, 823, 827, 830, 833, 837, 840, 843, 847, 850, 853, 857, 860, 863, 867, 870, 873, 877, 880, 883, 887, 890, 893, 897, 900, 903, 907, 910, 913, 916, 920, 923, 926, 930, 933, 936, 940, 943, 946, 950, 953, 956, 960, }; } // namespace } // namespace absl
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 25 2017 03:49:04). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <AppKit/NSPopover.h> @interface TPopover : NSPopover { } - (void)updatePreferredEdge; - (id)positioningWindow; - (void)showRelativeToRect:(struct CGRect)arg1 ofView:(id)arg2 preferredEdge:(unsigned long long)arg3; @end
{ "pile_set_name": "Github" }
# CH system, PBE0 and HSE06, interatomic distance optimization ndtset 3 #DATASET 1 PBE (the pseudopotentials for C and H are with PBE XC) #DATASET 2 PBE0 ixc2 41 getwfk2 1 getxred2 1 #DATASET 3 HSE06 ixc3 -428 getwfk3 2 getxred3 2 #Common acell 5 2*4 Angstrom diemac 1.0d0 diemix 0.5d0 ecut 13 ionmov 2 ntime 15 tolrff 0.02 tolmxf 5.0d-5 kptopt 1 ngkpt 1 1 1 shiftk 3*0.0 istwfk 1 natom 2 nsppol 2 spinmagntarget 1 nband 4 nbdbuf 0 nstep 25 ntypat 2 typat 1 2 xcart -0.562 0 0 0.562 0 0 Angstrom znucl 6 1 #%%<BEGIN TEST_INFO> #%% [setup] #%% executable = abinit #%% [files] #%% files_to_test = #%% t68.out, tolnlines = 35, tolabs = 3.000e-03, tolrel = 1.500e-01, fld_options = -easy #%% psp_files = C.psp8, H.psp8 #%% [paral_info] #%% max_nprocs = 1 #%% [extra_info] #%% authors = X. Gonze #%% keywords = #%% description = #%% CH molecule spin-polarized, in a big box. #%% Norm conserving : first PBE, then PBE0, then HSE06. Interatomic distance optimization. #%% Test the geometry convergence in the hybrid functional case #%% The results obtained with underconverged ecut (ecut 15) and box size (4 3 3 Angstrom) #%% compare favourably with those mentioned in Arnardi's report, as well as with those of VASP. #%% Values for the distance d, in Angstrom, for PBE, PBE0 and HSE06 respectively : #%% Present calculation (ecut 13, 5 4 4 box) : 1.138, 1.130, 1.131 Angstrom #%% Better calculation (ecut 18, 7 6 6 box) : 1.135, 1.124, 1.125 Angstrom #%% ABINIT Arnardi report (ecut acell unknown): 1.136, 1.124, 1.124 Angstrom #%% VASP Arnardi report (ecut acell unknown) : 1.136, 1.124, NA Angstrom #%% Experimental : 1.120 Angstrom. #%% topics = Hybrids #%%<END TEST_INFO>
{ "pile_set_name": "Github" }
rules_version = '2'; service cloud.firestore { match /databases/{database}/documents { // Messages: // - Anyone can read. // - Authenticated users can add and edit messages. // - Validation: Check name is same as auth token and text length below 300 char or that imageUrl is a URL. // - Deletes are not allowed. match /messages/{messageId} { allow read; allow create, update: if request.auth != null && request.resource.data.name == request.auth.token.name && (request.resource.data.text is string && request.resource.data.text.size() <= 300 || request.resource.data.imageUrl is string && request.resource.data.imageUrl.matches('https?://.*')); allow delete: if false; } // FCM Tokens: // - Anyone can write their token. // - Reading list of tokens is not allowed. match /fcmTokens/{token} { allow read: if false; allow write; } } }
{ "pile_set_name": "Github" }
##List of Known Dependencies ###ORB-SLAM2 version 1.0 In this document we list all the pieces of code included by ORB-SLAM2 and linked libraries which are not property of the authors of ORB-SLAM2. #####Code in **src** and **include** folders * *ORBextractor.cc*. This is a modified version of orb.cpp of OpenCV library. The original code is BSD licensed. * *PnPsolver.h, PnPsolver.cc*. This is a modified version of the epnp.h and epnp.cc of Vincent Lepetit. This code can be found in popular BSD licensed computer vision libraries as [OpenCV](https://github.com/Itseez/opencv/blob/master/modules/calib3d/src/epnp.cpp) and [OpenGV](https://github.com/laurentkneip/opengv/blob/master/src/absolute_pose/modules/Epnp.cpp). The original code is FreeBSD. * Function *ORBmatcher::DescriptorDistance* in *ORBmatcher.cc*. The code is from: http://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel. The code is in the public domain. #####Code in Thirdparty folder * All code in **DBoW2** folder. This is a modified version of [DBoW2](https://github.com/dorian3d/DBoW2) and [DLib](https://github.com/dorian3d/DLib) library. All files included are BSD licensed. * All code in **g2o** folder. This is a modified version of [g2o](https://github.com/RainerKuemmerle/g2o). All files included are BSD licensed. #####Library dependencies * **Pangolin (visualization and user interface)**. https://github.com/stevenlovegrove/Pangolin * **OpenCV**. BSD licensed. * **Eigen3**. For versions greater than 3.1.1 is MPL2, earlier versions are LGPLv3. * **BLAS** (required by g2o). [Freely-available software](http://www.netlib.org/blas/#_licensing). * **LAPACK**(required by g2o). BSD licensed. * **ROS (Optional, only if you build Examples/ROS)**. BSD licensed. In the manifest.xml the only declared package dependencies are roscpp, tf, sensor_msgs, image_transport, cv_bridge, which are all BSD licensed. Updated: 23/01/2016 Raúl Mur Artal
{ "pile_set_name": "Github" }
# core HAL config file for servos # first load the core RT modules that will be needed # kinematics loadrt [KINS]KINEMATICS # motion controller, get name and thread periods from ini file loadrt [EMCMOT]EMCMOT servo_period_nsec=[EMCMOT]SERVO_PERIOD num_joints=[KINS]JOINTS # PID module, for three PID loops loadrt pid num_chan=3 # hook functions to realtime thread addf motion-command-handler servo-thread addf motion-controller servo-thread addf pid.0.do-pid-calcs servo-thread addf pid.1.do-pid-calcs servo-thread addf pid.2.do-pid-calcs servo-thread # connect position feedback net Xpos-fb joint.0.motor-pos-fb => pid.0.feedback net Ypos-fb joint.1.motor-pos-fb => pid.1.feedback net Zpos-fb joint.2.motor-pos-fb => pid.2.feedback # create PID to DAC output signals net Xoutput <= pid.0.output net Youtput <= pid.1.output net Zoutput <= pid.2.output # set PID loop output limits to +/-1.00 setp pid.0.maxoutput [JOINT_0]MAX_VELOCITY setp pid.1.maxoutput [JOINT_1]MAX_VELOCITY setp pid.2.maxoutput [JOINT_2]MAX_VELOCITY # set PID loop gains from inifile # the values below come from the ini setp pid.0.Pgain [JOINT_0]P setp pid.0.Igain [JOINT_0]I setp pid.0.Dgain [JOINT_0]D setp pid.0.bias [JOINT_0]BIAS setp pid.0.FF0 [JOINT_0]FF0 setp pid.0.FF1 [JOINT_0]FF1 setp pid.0.FF2 [JOINT_0]FF2 # deadband should be just over 1 count setp pid.0.deadband [JOINT_0]DEADBAND setp pid.1.Pgain [JOINT_1]P setp pid.1.Igain [JOINT_1]I setp pid.1.Dgain [JOINT_1]D setp pid.1.bias [JOINT_1]BIAS setp pid.1.FF0 [JOINT_1]FF0 setp pid.1.FF1 [JOINT_1]FF1 setp pid.1.FF2 [JOINT_1]FF2 # deadband should be just over 1 count setp pid.1.deadband [JOINT_1]DEADBAND setp pid.2.Pgain [JOINT_2]P setp pid.2.Igain [JOINT_2]I setp pid.2.Dgain [JOINT_2]D setp pid.2.bias [JOINT_2]BIAS setp pid.2.FF0 [JOINT_2]FF0 setp pid.2.FF1 [JOINT_2]FF1 setp pid.2.FF2 [JOINT_2]FF2 # deadband should be just over 1 count setp pid.2.deadband [JOINT_2]DEADBAND # position command signals net Xpos-cmd joint.0.motor-pos-cmd => pid.0.command net Ypos-cmd joint.1.motor-pos-cmd => pid.1.command net Zpos-cmd joint.2.motor-pos-cmd => pid.2.command # joint enable signals net Xenable joint.0.amp-enable-out => pid.0.enable net Yenable joint.1.amp-enable-out => pid.1.enable net Zenable joint.2.amp-enable-out => pid.2.enable
{ "pile_set_name": "Github" }
this exists to make sure that the git/svn bridge creates the folder.
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <EventKit/EKProtocolObject-Protocol.h> @class NSString, NSURL; @protocol EKProtocolSharee <EKProtocolObject> @property(readonly, nonatomic) BOOL isCurrentUserForScheduling; @property(readonly, nonatomic) BOOL isCurrentUserForSharing; @property(readonly, copy, nonatomic) NSString *name; @property(readonly, copy, nonatomic) NSURL *URL; - (NSString *)accessLevel; - (NSString *)status; @end
{ "pile_set_name": "Github" }
Ext.define('Ext.theme.triton.grid.column.Column', { override: 'Ext.grid.column.Column', compatibility: Ext.isIE8, onTitleMouseOver: function() { var triggerEl = this.triggerEl; this.callParent(arguments); if (triggerEl) { triggerEl.syncRepaint(); } } });
{ "pile_set_name": "Github" }
.. _component_layers: ``component_layers`` -------------------- .. automodule:: neuroptica.component_layers :members: :special-members: :show-inheritance:
{ "pile_set_name": "Github" }
create or replace function pgq.register_consumer( x_queue_name text, x_consumer_id text) returns integer as $$ -- ---------------------------------------------------------------------- -- Function: pgq.register_consumer(2) -- -- Subscribe consumer on a queue. -- -- From this moment forward, consumer will see all events in the queue. -- -- Parameters: -- x_queue_name - Name of queue -- x_consumer_name - Name of consumer -- -- Returns: -- 0 - if already registered -- 1 - if new registration -- Calls: -- pgq.register_consumer_at(3) -- Tables directly manipulated: -- None -- ---------------------------------------------------------------------- begin return pgq.register_consumer_at(x_queue_name, x_consumer_id, NULL); end; $$ language plpgsql security definer; create or replace function pgq.register_consumer_at( x_queue_name text, x_consumer_name text, x_tick_pos bigint) returns integer as $$ -- ---------------------------------------------------------------------- -- Function: pgq.register_consumer_at(3) -- -- Extended registration, allows to specify tick_id. -- -- Note: -- For usage in special situations. -- -- Parameters: -- x_queue_name - Name of a queue -- x_consumer_name - Name of consumer -- x_tick_pos - Tick ID -- -- Returns: -- 0/1 whether consumer has already registered. -- Calls: -- None -- Tables directly manipulated: -- update/insert - pgq.subscription -- ---------------------------------------------------------------------- declare tmp text; last_tick bigint; x_queue_id integer; x_consumer_id integer; queue integer; sub record; begin select queue_id into x_queue_id from pgq.queue where queue_name = x_queue_name; if not found then raise exception 'Event queue not created yet'; end if; -- get consumer and create if new select co_id into x_consumer_id from pgq.consumer where co_name = x_consumer_name for update; if not found then insert into pgq.consumer (co_name) values (x_consumer_name); x_consumer_id := currval('pgq.consumer_co_id_seq'); end if; -- if particular tick was requested, check if it exists if x_tick_pos is not null then perform 1 from pgq.tick where tick_queue = x_queue_id and tick_id = x_tick_pos; if not found then raise exception 'cannot reposition, tick not found: %', x_tick_pos; end if; end if; -- check if already registered select sub_last_tick, sub_batch into sub from pgq.subscription where sub_consumer = x_consumer_id and sub_queue = x_queue_id; if found then if x_tick_pos is not null then -- if requested, update tick pos and drop partial batch update pgq.subscription set sub_last_tick = x_tick_pos, sub_batch = null, sub_next_tick = null, sub_active = now() where sub_consumer = x_consumer_id and sub_queue = x_queue_id; end if; -- already registered return 0; end if; -- new registration if x_tick_pos is null then -- start from current tick select tick_id into last_tick from pgq.tick where tick_queue = x_queue_id order by tick_queue desc, tick_id desc limit 1; if not found then raise exception 'No ticks for this queue. Please run ticker on database.'; end if; else last_tick := x_tick_pos; end if; -- register insert into pgq.subscription (sub_queue, sub_consumer, sub_last_tick) values (x_queue_id, x_consumer_id, last_tick); return 1; end; $$ language plpgsql security definer;
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="vertical" android:padding="8dp"> <android.support.v7.widget.CardView android:id="@+id/card_view" android:layout_width="match_parent" android:layout_height="170dp" android:foreground="?android:attr/selectableItemBackground" android:clickable="true"> <!-- app:cardCornerRadius="3dp" app:cardElevation="4dp" --> <LinearLayout android:layout_width="match_parent" android:layout_height="wrap_content" android:orientation="horizontal"> <!--书的封面--> <ImageView android:id="@+id/img_cover" android:layout_width="120dp" android:layout_height="160dp" android:layout_marginBottom="@dimen/margin_4" android:layout_marginTop="@dimen/margin_4"/> <!--书籍相关信息--> <LinearLayout android:orientation="vertical" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_marginLeft="@dimen/margin_10" android:layout_marginTop="@dimen/margin_4"> <!--标题和作者信息--> <TextView android:id="@+id/tv_title" android:layout_width="wrap_content" android:layout_height="wrap_content" android:textStyle="bold" android:textSize="@dimen/textSize_normal"/> <TextView android:id="@+id/tv_author" android:layout_width="wrap_content" android:layout_height="wrap_content" android:textColor="@color/accent" android:textSize="@dimen/textSize_small" android:layout_marginTop="@dimen/margin_10"/> <!--评价人数和评分--> <LinearLayout android:orientation="horizontal" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_marginTop="@dimen/margin_10"> <TextView android:id="@+id/tv_book_rate" android:layout_width="wrap_content" android:layout_height="wrap_content"/> <TextView android:id="@+id/tv_rate_num" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_marginLeft="50dp"/> </LinearLayout> </LinearLayout> </LinearLayout> </android.support.v7.widget.CardView> </LinearLayout>
{ "pile_set_name": "Github" }
var assert = require('assert'); var uuid = require('../'); var log = console.log; var generators = { v1: uuid.v1, v4: uuid.v4 }; var UUID_FORMAT = { v1: /[0-9a-f]{8}-[0-9a-f]{4}-1[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i, v4: /[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i }; var N = 1e4; // Get %'age an actual value differs from the ideal value function divergence(actual, ideal) { return Math.round(100*100*(actual - ideal)/ideal)/100; } function rate(msg, t) { log(msg + ': ' + (N / (Date.now() - t) * 1e3 | 0) + ' uuids\/second'); } for (var version in generators) { var counts = {}, max = 0; var generator = generators[version]; var format = UUID_FORMAT[version]; log('\nSanity check ' + N + ' ' + version + ' uuids'); for (var i = 0, ok = 0; i < N; i++) { id = generator(); if (!format.test(id)) { throw Error(id + ' is not a valid UUID string'); } if (id != uuid.unparse(uuid.parse(id))) { assert(fail, id + ' is not a valid id'); } // Count digits for our randomness check if (version == 'v4') { var digits = id.replace(/-/g, '').split(''); for (var j = digits.length-1; j >= 0; j--) { var c = digits[j]; max = Math.max(max, counts[c] = (counts[c] || 0) + 1); } } } // Check randomness for v4 UUIDs if (version == 'v4') { // Limit that we get worried about randomness. (Purely empirical choice, this!) var limit = 2*100*Math.sqrt(1/N); log('\nChecking v4 randomness. Distribution of Hex Digits (% deviation from ideal)'); for (var i = 0; i < 16; i++) { var c = i.toString(16); var bar = '', n = counts[c], p = Math.round(n/max*100|0); // 1-3,5-8, and D-F: 1:16 odds over 30 digits var ideal = N*30/16; if (i == 4) { // 4: 1:1 odds on 1 digit, plus 1:16 odds on 30 digits ideal = N*(1 + 30/16); } else if (i >= 8 && i <= 11) { // 8-B: 1:4 odds on 1 digit, plus 1:16 odds on 30 digits ideal = N*(1/4 + 30/16); } else { // Otherwise: 1:16 odds on 30 digits ideal = N*30/16; } var d = divergence(n, ideal); // Draw bar using UTF squares (just for grins) var s = n/max*50 | 0; while (s--) bar += '='; assert(Math.abs(d) < limit, c + ' |' + bar + '| ' + counts[c] + ' (' + d + '% < ' + limit + '%)'); } } } // Perf tests for (var version in generators) { log('\nPerformance testing ' + version + ' UUIDs'); var generator = generators[version]; var buf = new uuid.BufferClass(16); for (var i = 0, t = Date.now(); i < N; i++) generator(); rate('uuid.' + version + '()', t); for (var i = 0, t = Date.now(); i < N; i++) generator('binary'); rate('uuid.' + version + '(\'binary\')', t); for (var i = 0, t = Date.now(); i < N; i++) generator('binary', buf); rate('uuid.' + version + '(\'binary\', buffer)', t); }
{ "pile_set_name": "Github" }
2 2 1 9 9 1 10 10 1 13 13 1 14 14 1 15 15 1 22 2 2 29 9 2 30 10 2 33 13 2 34 14 2 35 15 2 42 2 3 45 5 3 49 9 3 50 10 3 53 13 3 54 14 3 55 15 3 62 2 4 69 9 4 70 10 4 73 13 4 74 14 4 75 15 4
{ "pile_set_name": "Github" }
"Utility functions used by the btm_matcher module" from . import pytree from .pgen2 import grammar, token from .pygram import pattern_symbols, python_symbols syms = pattern_symbols pysyms = python_symbols tokens = grammar.opmap token_labels = token TYPE_ANY = -1 TYPE_ALTERNATIVES = -2 TYPE_GROUP = -3 class MinNode(object): """This class serves as an intermediate representation of the pattern tree during the conversion to sets of leaf-to-root subpatterns""" def __init__(self, type=None, name=None): self.type = type self.name = name self.children = [] self.leaf = False self.parent = None self.alternatives = [] self.group = [] def __repr__(self): return str(self.type) + ' ' + str(self.name) def leaf_to_root(self): """Internal method. Returns a characteristic path of the pattern tree. This method must be run for all leaves until the linear subpatterns are merged into a single""" node = self subp = [] while node: if node.type == TYPE_ALTERNATIVES: node.alternatives.append(subp) if len(node.alternatives) == len(node.children): #last alternative subp = [tuple(node.alternatives)] node.alternatives = [] node = node.parent continue else: node = node.parent subp = None break if node.type == TYPE_GROUP: node.group.append(subp) #probably should check the number of leaves if len(node.group) == len(node.children): subp = get_characteristic_subpattern(node.group) node.group = [] node = node.parent continue else: node = node.parent subp = None break if node.type == token_labels.NAME and node.name: #in case of type=name, use the name instead subp.append(node.name) else: subp.append(node.type) node = node.parent return subp def get_linear_subpattern(self): """Drives the leaf_to_root method. The reason that leaf_to_root must be run multiple times is because we need to reject 'group' matches; for example the alternative form (a | b c) creates a group [b c] that needs to be matched. Since matching multiple linear patterns overcomes the automaton's capabilities, leaf_to_root merges each group into a single choice based on 'characteristic'ity, i.e. (a|b c) -> (a|b) if b more characteristic than c Returns: The most 'characteristic'(as defined by get_characteristic_subpattern) path for the compiled pattern tree. """ for l in self.leaves(): subp = l.leaf_to_root() if subp: return subp def leaves(self): "Generator that returns the leaves of the tree" for child in self.children: for x in child.leaves(): yield x if not self.children: yield self def reduce_tree(node, parent=None): """ Internal function. Reduces a compiled pattern tree to an intermediate representation suitable for feeding the automaton. This also trims off any optional pattern elements(like [a], a*). """ new_node = None #switch on the node type if node.type == syms.Matcher: #skip node = node.children[0] if node.type == syms.Alternatives : #2 cases if len(node.children) <= 2: #just a single 'Alternative', skip this node new_node = reduce_tree(node.children[0], parent) else: #real alternatives new_node = MinNode(type=TYPE_ALTERNATIVES) #skip odd children('|' tokens) for child in node.children: if node.children.index(child)%2: continue reduced = reduce_tree(child, new_node) if reduced is not None: new_node.children.append(reduced) elif node.type == syms.Alternative: if len(node.children) > 1: new_node = MinNode(type=TYPE_GROUP) for child in node.children: reduced = reduce_tree(child, new_node) if reduced: new_node.children.append(reduced) if not new_node.children: # delete the group if all of the children were reduced to None new_node = None else: new_node = reduce_tree(node.children[0], parent) elif node.type == syms.Unit: if (isinstance(node.children[0], pytree.Leaf) and node.children[0].value == '('): #skip parentheses return reduce_tree(node.children[1], parent) if ((isinstance(node.children[0], pytree.Leaf) and node.children[0].value == '[') or (len(node.children)>1 and hasattr(node.children[1], "value") and node.children[1].value == '[')): #skip whole unit if its optional return None leaf = True details_node = None alternatives_node = None has_repeater = False repeater_node = None has_variable_name = False for child in node.children: if child.type == syms.Details: leaf = False details_node = child elif child.type == syms.Repeater: has_repeater = True repeater_node = child elif child.type == syms.Alternatives: alternatives_node = child if hasattr(child, 'value') and child.value == '=': # variable name has_variable_name = True #skip variable name if has_variable_name: #skip variable name, '=' name_leaf = node.children[2] if hasattr(name_leaf, 'value') and name_leaf.value == '(': # skip parenthesis name_leaf = node.children[3] else: name_leaf = node.children[0] #set node type if name_leaf.type == token_labels.NAME: #(python) non-name or wildcard if name_leaf.value == 'any': new_node = MinNode(type=TYPE_ANY) else: if hasattr(token_labels, name_leaf.value): new_node = MinNode(type=getattr(token_labels, name_leaf.value)) else: new_node = MinNode(type=getattr(pysyms, name_leaf.value)) elif name_leaf.type == token_labels.STRING: #(python) name or character; remove the apostrophes from #the string value name = name_leaf.value.strip("'") if name in tokens: new_node = MinNode(type=tokens[name]) else: new_node = MinNode(type=token_labels.NAME, name=name) elif name_leaf.type == syms.Alternatives: new_node = reduce_tree(alternatives_node, parent) #handle repeaters if has_repeater: if repeater_node.children[0].value == '*': #reduce to None new_node = None elif repeater_node.children[0].value == '+': #reduce to a single occurence i.e. do nothing pass else: #TODO: handle {min, max} repeaters raise NotImplementedError pass #add children if details_node and new_node is not None: for child in details_node.children[1:-1]: #skip '<', '>' markers reduced = reduce_tree(child, new_node) if reduced is not None: new_node.children.append(reduced) if new_node: new_node.parent = parent return new_node def get_characteristic_subpattern(subpatterns): """Picks the most characteristic from a list of linear patterns Current order used is: names > common_names > common_chars """ if not isinstance(subpatterns, list): return subpatterns if len(subpatterns)==1: return subpatterns[0] # first pick out the ones containing variable names subpatterns_with_names = [] subpatterns_with_common_names = [] common_names = ['in', 'for', 'if' , 'not', 'None'] subpatterns_with_common_chars = [] common_chars = "[]().,:" for subpattern in subpatterns: if any(rec_test(subpattern, lambda x: type(x) is str)): if any(rec_test(subpattern, lambda x: isinstance(x, str) and x in common_chars)): subpatterns_with_common_chars.append(subpattern) elif any(rec_test(subpattern, lambda x: isinstance(x, str) and x in common_names)): subpatterns_with_common_names.append(subpattern) else: subpatterns_with_names.append(subpattern) if subpatterns_with_names: subpatterns = subpatterns_with_names elif subpatterns_with_common_names: subpatterns = subpatterns_with_common_names elif subpatterns_with_common_chars: subpatterns = subpatterns_with_common_chars # of the remaining subpatterns pick out the longest one return max(subpatterns, key=len) def rec_test(sequence, test_func): """Tests test_func on all items of sequence and items of included sub-iterables""" for x in sequence: if isinstance(x, (list, tuple)): for y in rec_test(x, test_func): yield y else: yield test_func(x)
{ "pile_set_name": "Github" }
Sequel.migration do up do create_table :events do primary_key :id Integer :parent_id String :user, :null => false Time :timestamp, :index => true, :null => false String :action, :null => false String :object_type, :null => false String :object_name String :error, :text => true String :task String :deployment String :instance String :context_json, :text => true end end down do drop_table :events end end
{ "pile_set_name": "Github" }
# Utils Here are some utils, which may be used to do tasks around the fine dust sensor project. ## External utils [Stress Test Scripts for MQTT using Python and Ansible](https://github.com/bluewindthings/mqtt-stress)
{ "pile_set_name": "Github" }
// Copyright 2017 The Upspin Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package disk provides a storage.Storage that stores data on local disk. package disk // import "upspin.io/cloud/storage/disk" import ( "io/ioutil" "os" "path/filepath" "strings" "upspin.io/cloud/storage" "upspin.io/cloud/storage/disk/internal/local" "upspin.io/errors" "upspin.io/upspin" ) // New initializes and returns a disk-backed storage.Storage with the given // options. The single, required option is "basePath" that must be an absolute // path under which all objects should be stored. func New(opts *storage.Opts) (storage.Storage, error) { const op errors.Op = "cloud/storage/disk.New" base, ok := opts.Opts["basePath"] if !ok { return nil, errors.E(op, "the basePath option must be specified") } if err := os.MkdirAll(base, 0700); err != nil { return nil, errors.E(op, errors.IO, err) } if err := guaranteeNewEncoding(base); err != nil { return nil, errors.E(op, errors.IO, err) } return &storageImpl{base: base}, nil } // guaranteeNewEncoding makes sure we are using the new, safe path encoding. // If we're not, it prints a recipe to update it and errors out. func guaranteeNewEncoding(base string) error { // Make sure the disk tree is or will be using the new path encoding. // Three cases: // 1) Directory is empty. Use new encoding, and add "++" directory. // 2) Directory contains subdirectory "++". Use new encoding. // 3) Directory is non-empty and does not contain "++". Give error. // The "++" directory is used as an indicator that we are using the new // encoding. This might hold storage one day but will never exist if // using the old one, so it serves as a good marker. plusDir := filepath.Join(base, "++") empty, err := isEmpty(base) if err != nil { return err } if empty { // New directory tree. Create the "++" directory as a marker. return os.MkdirAll(plusDir, 0700) } // Directory is not empty. It must contain "++". if _, err := os.Stat(plusDir); err != nil { // Return a very long error explaining what to do. format := "Base directory %[1]q uses a deprecated path encoding.\n" + "It must be updated before serving again.\n" + "To update, move the tree aside to a backup location, and run:\n" + "\tgo run upspin.io/cloud/storage/disk/convert.go -old=<backup-location> -new=%[1]q\n" + "Then restart the server.\n" return errors.Errorf(format, base) } return nil } // isEmpty reports whether the directory is empty. // The directory must exist; we have already created it if we needed to. func isEmpty(dir string) (bool, error) { fd, err := os.Open(dir) if err != nil { return true, err } defer fd.Close() names, err := fd.Readdirnames(0) if err != nil { return true, err } return len(names) == 0, nil } func init() { storage.Register("Disk", New) } type storageImpl struct { base string } var ( _ storage.Storage = (*storageImpl)(nil) _ storage.Lister = (*storageImpl)(nil) ) // LinkBase implements storage.Storage. func (s *storageImpl) LinkBase() (base string, err error) { return "", upspin.ErrNotSupported } // Download implements storage.Storage. func (s *storageImpl) Download(ref string) ([]byte, error) { const op errors.Op = "cloud/storage/disk.Download" b, err := ioutil.ReadFile(s.path(ref)) if os.IsNotExist(err) { return nil, errors.E(op, errors.NotExist, errors.Str(ref)) } else if err != nil { return nil, errors.E(op, errors.IO, err) } return b, nil } // Put implements storage.Storage. func (s *storageImpl) Put(ref string, contents []byte) error { const op errors.Op = "cloud/storage/disk.Put" p := s.path(ref) if err := os.MkdirAll(filepath.Dir(p), 0700); err != nil { return errors.E(op, errors.IO, err) } if err := ioutil.WriteFile(p, contents, 0600); err != nil { return errors.E(op, errors.IO, err) } return nil } // Delete implements storage.Storage. func (s *storageImpl) Delete(ref string) error { const op errors.Op = "cloud/storage/disk.Delete" if err := os.Remove(s.path(ref)); os.IsNotExist(err) { return errors.E(op, errors.NotExist, errors.Str(ref)) } else if err != nil { return errors.E(op, errors.IO, err) } return nil } var maxRefsPerCall = 1000 // A variable so that it may be overridden by tests. // List implements storage.Lister. func (s *storageImpl) List(token string) (refs []upspin.ListRefsItem, next string, err error) { const op errors.Op = "cloud/storage/disk.List" err = filepath.Walk(s.base, func(path string, fi os.FileInfo, err error) error { if err != nil { return err } // Convert path into its base path. path = strings.TrimPrefix(strings.TrimPrefix(path, s.base), string(filepath.Separator)) // Ignore the root. if path == "" { return nil } // Stop walking when we've gathered enough refs. if len(refs) >= maxRefsPerCall { if next == "" { next = path } return filepath.SkipDir } // Don't process paths that come before our pagination token. if path < token { if fi.IsDir() && !strings.HasPrefix(token, path) { // Don't descend into irrelevant directories. return filepath.SkipDir } return nil } if fi.IsDir() { // Nothing more to do for directories. return nil } // Convert the file path into its reference name // and append it to refs. ref, err := local.Ref(path) if err != nil { return err } refs = append(refs, upspin.ListRefsItem{ Ref: upspin.Reference(ref), Size: fi.Size(), }) return nil }) if err != nil { return nil, "", errors.E(op, err) } return refs, next, nil } // path returns the absolute path that should contain ref. func (s *storageImpl) path(ref string) string { return local.Path(s.base, ref) }
{ "pile_set_name": "Github" }
export {Scrollbar} from './Scrollbar.js';
{ "pile_set_name": "Github" }
package org.openstack4j.model.compute; import java.util.Date; import org.openstack4j.model.ModelEntity; /** * An OpenStack Keypair is an SSH Key * * @author Jeremy Unruh,whaon */ public interface Keypair extends ModelEntity { /** * The name associated with the keypair * * @return the name of the keypair */ String getName(); /** * The public SSH key * * @return the public key */ String getPublicKey(); /** * The private key associated with this keypair. Only populated on create when a public key is not specified and is auto-generated * by the server * * @return the private key */ String getPrivateKey(); /** * @return the server fingerprint */ String getFingerprint(); /** * * @return the user_id for a keypair. */ String getUserId(); /** * * @return is deleted */ Boolean getDeleted(); /** * time created * @return */ Date getCreatedAt(); /** * time updated * @return */ Date getUpdatedAt(); /** * time deleted * @return */ Date getDeletedAt(); /** * * @return id of the keypair */ Integer getId(); }
{ "pile_set_name": "Github" }
--- external help file: PSWriteWord-help.xml Module Name: PSWriteWord online version: schema: 2.0.0 --- # Get-WordFooter ## SYNOPSIS {{Fill in the Synopsis}} ## SYNTAX ``` Get-WordFooter [[-WordDocument] <Container>] [[-Type] <String>] [[-Supress] <Boolean>] [<CommonParameters>] ``` ## DESCRIPTION {{Fill in the Description}} ## EXAMPLES ### Example 1 ```powershell PS C:\> {{ Add example code here }} ``` {{ Add example description here }} ## PARAMETERS ### -Supress {{Fill Supress Description}} ```yaml Type: Boolean Parameter Sets: (All) Aliases: Required: False Position: 2 Default value: None Accept pipeline input: False Accept wildcard characters: False ``` ### -Type {{Fill Type Description}} ```yaml Type: String Parameter Sets: (All) Aliases: Accepted values: All, First, Even, Odd Required: False Position: 1 Default value: None Accept pipeline input: False Accept wildcard characters: False ``` ### -WordDocument {{Fill WordDocument Description}} ```yaml Type: Container Parameter Sets: (All) Aliases: Required: False Position: 0 Default value: None Accept pipeline input: False Accept wildcard characters: False ``` ### CommonParameters This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). ## INPUTS ### None ## OUTPUTS ### System.Object ## NOTES ## RELATED LINKS
{ "pile_set_name": "Github" }
require('../../modules/es6.string.iterator'); require('../../modules/es6.array.from'); module.exports = require('../../modules/_core').Array.from;
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <link rel="stylesheet" type="text/css" href="css/tips.css"> </head> <body> <h1>SpecialCharacters(特殊字符) <a href='https://www.pingfangx.com/xx/translation/feedback?from=tips'>[汉化反馈]</a></h1> <p>您可以使用以撇号开头的 ASCII 值替换 XML 或 HTML 文件中的特殊字符。要做到这一点,选择一个字符,然后在主菜单选择 <span class="control">Edit | Encode XML/HTML Special Characters</span>。</p> </body> </html>
{ "pile_set_name": "Github" }
#if defined(TILES) #include "sdl_font.h" #include "output.h" #if defined(_WIN32) # if 1 // HACK: Hack to prevent reordering of #include "platform_win.h" by IWYU # include "platform_win.h" # endif # include <shlwapi.h> # if !defined(strcasecmp) # define strcasecmp StrCmpI # endif #else # include <strings.h> // for strcasecmp #endif #define dbg(x) DebugLog((x),D_SDL) << __FILE__ << ":" << __LINE__ << ": " // Check if text ends with suffix static bool ends_with( const std::string &text, const std::string &suffix ) { return text.length() >= suffix.length() && strcasecmp( text.c_str() + text.length() - suffix.length(), suffix.c_str() ) == 0; } static void font_folder_list( std::ostream &fout, const std::string &path, std::set<std::string> &bitmap_fonts ) { for( const std::string &f : get_files_from_path( "", path, true, false ) ) { TTF_Font_Ptr fnt( TTF_OpenFont( f.c_str(), 12 ) ); if( !fnt ) { continue; } // TTF_FontFaces returns a long, so use that // NOLINTNEXTLINE(cata-no-long) long nfaces = 0; nfaces = TTF_FontFaces( fnt.get() ); fnt.reset(); // NOLINTNEXTLINE(cata-no-long) for( long i = 0; i < nfaces; i++ ) { const TTF_Font_Ptr fnt( TTF_OpenFontIndex( f.c_str(), 12, i ) ); if( !fnt ) { continue; } // Add font family char *fami = TTF_FontFaceFamilyName( fnt.get() ); if( fami != nullptr ) { fout << fami; } else { continue; } // Add font style char *style = TTF_FontFaceStyleName( fnt.get() ); bool isbitmap = ends_with( f, ".fon" ); if( style != nullptr && !isbitmap && strcasecmp( style, "Regular" ) != 0 ) { fout << " " << style; } if( isbitmap ) { std::set<std::string>::iterator it; it = bitmap_fonts.find( std::string( fami ) ); if( it == bitmap_fonts.end() ) { // First appearance of this font family bitmap_fonts.insert( fami ); } else { // Font in set. Add filename to family string size_t start = f.find_last_of( "/\\" ); size_t end = f.find_last_of( '.' ); if( start != std::string::npos && end != std::string::npos ) { fout << " [" << f.substr( start + 1, end - start - 1 ) + "]"; } else { dbg( D_INFO ) << "Skipping wrong font file: \"" << f << "\""; } } } fout << std::endl; // Add filename and font index fout << f << std::endl; fout << i << std::endl; // We use only 1 style in bitmap fonts. if( isbitmap ) { break; } } } } static void save_font_list() { try { std::set<std::string> bitmap_fonts; write_to_file( PATH_INFO::fontlist(), [&]( std::ostream & fout ) { font_folder_list( fout, PATH_INFO::user_font(), bitmap_fonts ); font_folder_list( fout, PATH_INFO::fontdir(), bitmap_fonts ); #if defined(_WIN32) constexpr UINT max_dir_len = 256; char buf[max_dir_len]; const UINT dir_len = GetSystemWindowsDirectory( buf, max_dir_len ); if( dir_len == 0 ) { throw std::runtime_error( "GetSystemWindowsDirectory failed" ); } else if( dir_len >= max_dir_len ) { throw std::length_error( "GetSystemWindowsDirectory failed due to insufficient buffer" ); } font_folder_list( fout, buf + std::string( "\\fonts" ), bitmap_fonts ); #elif defined(_APPLE_) && defined(_MACH_) /* // Well I don't know how osx actually works .... font_folder_list(fout, "/System/Library/Fonts", bitmap_fonts); font_folder_list(fout, "/Library/Fonts", bitmap_fonts); wordexp_t exp; wordexp("~/Library/Fonts", &exp, 0); font_folder_list(fout, exp.we_wordv[0], bitmap_fonts); wordfree(&exp);*/ #else // Other POSIX-ish systems font_folder_list( fout, "/usr/share/fonts", bitmap_fonts ); font_folder_list( fout, "/usr/local/share/fonts", bitmap_fonts ); char *home; if( ( home = getenv( "HOME" ) ) ) { std::string userfontdir = home; userfontdir += "/.fonts"; font_folder_list( fout, userfontdir, bitmap_fonts ); } #endif } ); } catch( const std::exception &err ) { // This is called during startup, the UI system may not be initialized (because that // needs the font file in order to load the font for it). dbg( D_ERROR ) << "Faied to create fontlist file \"" << PATH_INFO::fontlist() << "\": " << err.what(); } } static cata::optional<std::string> find_system_font( const std::string &name, int &faceIndex ) { const std::string fontlist_path = PATH_INFO::fontlist(); std::ifstream fin( fontlist_path.c_str() ); if( !fin.is_open() ) { // Write out fontlist to the new location. save_font_list(); } if( fin.is_open() ) { std::string fname; std::string fpath; std::string iline; while( getline( fin, fname ) && getline( fin, fpath ) && getline( fin, iline ) ) { if( 0 == strcasecmp( fname.c_str(), name.c_str() ) ) { faceIndex = atoi( iline.c_str() ); return fpath; } } } return cata::nullopt; } // bitmap font size test // return face index that has this size or below static int test_face_size( const std::string &f, int size, int faceIndex ) { const TTF_Font_Ptr fnt( TTF_OpenFontIndex( f.c_str(), size, faceIndex ) ); if( fnt ) { char *style = TTF_FontFaceStyleName( fnt.get() ); if( style != nullptr ) { int faces = TTF_FontFaces( fnt.get() ); for( int i = faces - 1; i >= 0; i-- ) { const TTF_Font_Ptr tf( TTF_OpenFontIndex( f.c_str(), size, i ) ); char *ts = nullptr; if( tf ) { if( nullptr != ( ts = TTF_FontFaceStyleName( tf.get() ) ) ) { if( 0 == strcasecmp( ts, style ) && TTF_FontHeight( tf.get() ) <= size ) { return i; } } } } } } return faceIndex; } std::unique_ptr<Font> Font::load_font( SDL_Renderer_Ptr &renderer, SDL_PixelFormat_Ptr &format, const std::string &typeface, int fontsize, int width, int height, const palette_array &palette, const bool fontblending ) { if( ends_with( typeface, ".bmp" ) || ends_with( typeface, ".png" ) ) { // Seems to be an image file, not a font. // Try to load as bitmap font from user font dir, then from font dir. try { return std::unique_ptr<Font>( std::make_unique<BitmapFont>( renderer, format, width, height, palette, PATH_INFO::user_font() + typeface ) ); } catch( std::exception & ) { try { return std::unique_ptr<Font>( std::make_unique<BitmapFont>( renderer, format, width, height, palette, PATH_INFO::fontdir() + typeface ) ); } catch( std::exception &err ) { dbg( D_ERROR ) << "Failed to load " << typeface << ": " << err.what(); // Continue to load as truetype font } } } // Not loaded as bitmap font (or it failed), try to load as truetype try { return std::unique_ptr<Font>( std::make_unique<CachedTTFFont>( width, height, palette, typeface, fontsize, fontblending ) ); } catch( std::exception &err ) { dbg( D_ERROR ) << "Failed to load " << typeface << ": " << err.what(); } return nullptr; } // line_id is one of the LINE_*_C constants // FG is a curses color void Font::draw_ascii_lines( SDL_Renderer_Ptr &renderer, GeometryRenderer_Ptr &geometry, unsigned char line_id, const point &p, unsigned char color ) const { SDL_Color sdl_color = palette[color]; switch( line_id ) { // box bottom/top side (horizontal line) case LINE_OXOX_C: geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + width, 1, sdl_color ); break; // box left/right side (vertical line) case LINE_XOXO_C: geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + height, 2, sdl_color ); break; // box top left case LINE_OXXO_C: geometry->horizontal_line( renderer, p + point( ( width / 2 ), ( height / 2 ) ), p.x + width, 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), ( height / 2 ) ), p.y + height, 2, sdl_color ); break; // box top right case LINE_OOXX_C: geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + ( width / 2 ), 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), ( height / 2 ) ), p.y + height, 2, sdl_color ); break; // box bottom right case LINE_XOOX_C: geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + ( width / 2 ), 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + ( height / 2 ) + 1, 2, sdl_color ); break; // box bottom left case LINE_XXOO_C: geometry->horizontal_line( renderer, p + point( ( width / 2 ), ( height / 2 ) ), p.x + width, 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + ( height / 2 ) + 1, 2, sdl_color ); break; // box bottom north T (left, right, up) case LINE_XXOX_C: geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + width, 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + ( height / 2 ), 2, sdl_color ); break; // box bottom east T (up, right, down) case LINE_XXXO_C: geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + height, 2, sdl_color ); geometry->horizontal_line( renderer, p + point( ( width / 2 ), ( height / 2 ) ), p.x + width, 1, sdl_color ); break; // box bottom south T (left, right, down) case LINE_OXXX_C: geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + width, 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), ( height / 2 ) ), p.y + height, 2, sdl_color ); break; // box X (left down up right) case LINE_XXXX_C: geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + width, 1, sdl_color ); geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + height, 2, sdl_color ); break; // box bottom east T (left, down, up) case LINE_XOXX_C: geometry->vertical_line( renderer, p + point( ( width / 2 ), 0 ), p.y + height, 2, sdl_color ); geometry->horizontal_line( renderer, p + point( 0, ( height / 2 ) ), p.x + ( width / 2 ), 1, sdl_color ); break; default: break; } } CachedTTFFont::CachedTTFFont( const int w, const int h, const palette_array &palette, std::string typeface, int fontsize, const bool fontblending ) : Font( w, h, palette ) , fontblending( fontblending ) { const std::string original_typeface = typeface; int faceIndex = 0; if( const cata::optional<std::string> sysfnt = find_system_font( typeface, faceIndex ) ) { typeface = *sysfnt; dbg( D_INFO ) << "Using font [" + typeface + "] found in the system."; } if( !file_exist( typeface ) ) { faceIndex = 0; typeface = PATH_INFO::user_font() + original_typeface + ".ttf"; dbg( D_INFO ) << "Using compatible font [" + typeface + "] found in user font dir."; } //make fontdata compatible with wincurse if( !file_exist( typeface ) ) { faceIndex = 0; typeface = PATH_INFO::fontdir() + original_typeface + ".ttf"; dbg( D_INFO ) << "Using compatible font [" + typeface + "] found in font dir."; } //different default font with wincurse if( !file_exist( typeface ) ) { faceIndex = 0; typeface = PATH_INFO::fontdir() + "unifont.ttf"; dbg( D_INFO ) << "Using fallback font [" + typeface + "] found in font dir."; } dbg( D_INFO ) << "Loading truetype font [" + typeface + "]."; if( fontsize <= 0 ) { fontsize = height - 1; } // SDL_ttf handles bitmap fonts size incorrectly if( typeface.length() > 4 && strcasecmp( typeface.substr( typeface.length() - 4 ).c_str(), ".fon" ) == 0 ) { faceIndex = test_face_size( typeface, fontsize, faceIndex ); } font.reset( TTF_OpenFontIndex( typeface.c_str(), fontsize, faceIndex ) ); if( !font ) { throw std::runtime_error( TTF_GetError() ); } TTF_SetFontStyle( font.get(), TTF_STYLE_NORMAL ); } SDL_Texture_Ptr CachedTTFFont::create_glyph( SDL_Renderer_Ptr &renderer, const std::string &ch, const int color ) { const auto function = fontblending ? TTF_RenderUTF8_Blended : TTF_RenderUTF8_Solid; SDL_Surface_Ptr sglyph( function( font.get(), ch.c_str(), windowsPalette[color] ) ); if( !sglyph ) { dbg( D_ERROR ) << "Failed to create glyph for " << ch << ": " << TTF_GetError(); return nullptr; } /* SDL interprets each pixel as a 32-bit number, so our masks must depend on the endianness (byte order) of the machine */ #if SDL_BYTEORDER == SDL_BIG_ENDIAN static const Uint32 rmask = 0xff000000; static const Uint32 gmask = 0x00ff0000; static const Uint32 bmask = 0x0000ff00; static const Uint32 amask = 0x000000ff; #else static const Uint32 rmask = 0x000000ff; static const Uint32 gmask = 0x0000ff00; static const Uint32 bmask = 0x00ff0000; static const Uint32 amask = 0xff000000; #endif const int wf = utf8_wrapper( ch ).display_width(); // Note: bits per pixel must be 8 to be synchronized with the surface // that TTF_RenderGlyph above returns. This is important for SDL_BlitScaled SDL_Surface_Ptr surface = CreateRGBSurface( 0, width * wf, height, 32, rmask, gmask, bmask, amask ); SDL_Rect src_rect = { 0, 0, sglyph->w, sglyph->h }; SDL_Rect dst_rect = { 0, 0, width * wf, height }; if( src_rect.w < dst_rect.w ) { dst_rect.x = ( dst_rect.w - src_rect.w ) / 2; dst_rect.w = src_rect.w; } else if( src_rect.w > dst_rect.w ) { src_rect.x = ( src_rect.w - dst_rect.w ) / 2; src_rect.w = dst_rect.w; } if( src_rect.h < dst_rect.h ) { dst_rect.y = ( dst_rect.h - src_rect.h ) / 2; dst_rect.h = src_rect.h; } else if( src_rect.h > dst_rect.h ) { src_rect.y = ( src_rect.h - dst_rect.h ) / 2; src_rect.h = dst_rect.h; } if( !printErrorIf( SDL_BlitSurface( sglyph.get(), &src_rect, surface.get(), &dst_rect ) != 0, "SDL_BlitSurface failed" ) ) { sglyph = std::move( surface ); } return CreateTextureFromSurface( renderer, sglyph ); } bool CachedTTFFont::isGlyphProvided( const std::string &ch ) const { return TTF_GlyphIsProvided( font.get(), UTF8_getch( ch ) ); } void CachedTTFFont::OutputChar( SDL_Renderer_Ptr &renderer, GeometryRenderer_Ptr &, const std::string &ch, const point &p, unsigned char color, const float opacity ) { key_t key {ch, static_cast<unsigned char>( color & 0xf )}; auto it = glyph_cache_map.find( key ); if( it == std::end( glyph_cache_map ) ) { cached_t new_entry { create_glyph( renderer, key.codepoints, key.color ), static_cast<int>( width * utf8_wrapper( key.codepoints ).display_width() ) }; it = glyph_cache_map.insert( std::make_pair( std::move( key ), std::move( new_entry ) ) ).first; } const cached_t &value = it->second; if( !value.texture ) { // Nothing we can do here )-: return; } SDL_Rect rect {p.x, p.y, value.width, height}; if( opacity != 1.0f ) { SDL_SetTextureAlphaMod( value.texture.get(), opacity * 255.0f ); } RenderCopy( renderer, value.texture, nullptr, &rect ); if( opacity != 1.0f ) { SDL_SetTextureAlphaMod( value.texture.get(), 255 ); } } BitmapFont::BitmapFont( SDL_Renderer_Ptr &renderer, SDL_PixelFormat_Ptr &format, const int w, const int h, const palette_array &palette, const std::string &typeface_path ) : Font( w, h, palette ) { dbg( D_INFO ) << "Loading bitmap font [" + typeface_path + "]."; SDL_Surface_Ptr asciiload = load_image( typeface_path.c_str() ); cata_assert( asciiload ); if( asciiload->w * asciiload->h < ( width * height * 256 ) ) { throw std::runtime_error( "bitmap for font is to small" ); } Uint32 key = SDL_MapRGB( asciiload->format, 0xFF, 0, 0xFF ); SDL_SetColorKey( asciiload.get(), SDL_TRUE, key ); SDL_Surface_Ptr ascii_surf[std::tuple_size<decltype( ascii )>::value]; ascii_surf[0].reset( SDL_ConvertSurface( asciiload.get(), format.get(), 0 ) ); SDL_SetSurfaceRLE( ascii_surf[0].get(), 1 ); asciiload.reset(); for( size_t a = 1; a < std::tuple_size<decltype( ascii )>::value; ++a ) { ascii_surf[a].reset( SDL_ConvertSurface( ascii_surf[0].get(), format.get(), 0 ) ); SDL_SetSurfaceRLE( ascii_surf[a].get(), 1 ); } for( size_t a = 0; a < std::tuple_size<decltype( ascii )>::value - 1; ++a ) { SDL_LockSurface( ascii_surf[a].get() ); int size = ascii_surf[a]->h * ascii_surf[a]->w; Uint32 *pixels = static_cast<Uint32 *>( ascii_surf[a]->pixels ); Uint32 color = ( windowsPalette[a].r << 16 ) | ( windowsPalette[a].g << 8 ) | windowsPalette[a].b; for( int i = 0; i < size; i++ ) { if( pixels[i] == 0xFFFFFF ) { pixels[i] = color; } } SDL_UnlockSurface( ascii_surf[a].get() ); } tilewidth = ascii_surf[0]->w / width; //convert ascii_surf to SDL_Texture for( size_t a = 0; a < std::tuple_size<decltype( ascii )>::value; ++a ) { ascii[a] = CreateTextureFromSurface( renderer, ascii_surf[a] ); } } void BitmapFont::draw_ascii_lines( SDL_Renderer_Ptr &renderer, GeometryRenderer_Ptr &geometry, unsigned char line_id, const point &p, unsigned char color ) const { BitmapFont *t = const_cast<BitmapFont *>( this ); switch( line_id ) { // box bottom/top side (horizontal line) case LINE_OXOX_C: t->OutputChar( renderer, geometry, 0xcd, p, color ); break; // box left/right side (vertical line) case LINE_XOXO_C: t->OutputChar( renderer, geometry, 0xba, p, color ); break; // box top left case LINE_OXXO_C: t->OutputChar( renderer, geometry, 0xc9, p, color ); break; // box top right case LINE_OOXX_C: t->OutputChar( renderer, geometry, 0xbb, p, color ); break; // box bottom right case LINE_XOOX_C: t->OutputChar( renderer, geometry, 0xbc, p, color ); break; // box bottom left case LINE_XXOO_C: t->OutputChar( renderer, geometry, 0xc8, p, color ); break; // box bottom north T (left, right, up) case LINE_XXOX_C: t->OutputChar( renderer, geometry, 0xca, p, color ); break; // box bottom east T (up, right, down) case LINE_XXXO_C: t->OutputChar( renderer, geometry, 0xcc, p, color ); break; // box bottom south T (left, right, down) case LINE_OXXX_C: t->OutputChar( renderer, geometry, 0xcb, p, color ); break; // box X (left down up right) case LINE_XXXX_C: t->OutputChar( renderer, geometry, 0xce, p, color ); break; // box bottom east T (left, down, up) case LINE_XOXX_C: t->OutputChar( renderer, geometry, 0xb9, p, color ); break; default: break; } } bool BitmapFont::isGlyphProvided( const std::string &ch ) const { const uint32_t t = UTF8_getch( ch ); switch( t ) { case LINE_XOXO_UNICODE: case LINE_OXOX_UNICODE: case LINE_XXOO_UNICODE: case LINE_OXXO_UNICODE: case LINE_OOXX_UNICODE: case LINE_XOOX_UNICODE: case LINE_XXXO_UNICODE: case LINE_XXOX_UNICODE: case LINE_XOXX_UNICODE: case LINE_OXXX_UNICODE: case LINE_XXXX_UNICODE: return true; default: return t < 256; } } void BitmapFont::OutputChar( SDL_Renderer_Ptr &renderer, GeometryRenderer_Ptr &geometry, const std::string &ch, const point &p, unsigned char color, const float opacity ) { const int t = UTF8_getch( ch ); BitmapFont::OutputChar( renderer, geometry, t, p, color, opacity ); } void BitmapFont::OutputChar( SDL_Renderer_Ptr &renderer, GeometryRenderer_Ptr &geometry, const int t, const point &p, unsigned char color, const float opacity ) { if( t <= 256 ) { SDL_Rect src; src.x = ( t % tilewidth ) * width; src.y = ( t / tilewidth ) * height; src.w = width; src.h = height; SDL_Rect rect; rect.x = p.x; rect.y = p.y; rect.w = width; rect.h = height; if( opacity != 1.0f ) { SDL_SetTextureAlphaMod( ascii[color].get(), opacity * 255 ); } RenderCopy( renderer, ascii[color], &src, &rect ); if( opacity != 1.0f ) { SDL_SetTextureAlphaMod( ascii[color].get(), 255 ); } } else { unsigned char uc = 0; switch( t ) { case LINE_XOXO_UNICODE: uc = LINE_XOXO_C; break; case LINE_OXOX_UNICODE: uc = LINE_OXOX_C; break; case LINE_XXOO_UNICODE: uc = LINE_XXOO_C; break; case LINE_OXXO_UNICODE: uc = LINE_OXXO_C; break; case LINE_OOXX_UNICODE: uc = LINE_OOXX_C; break; case LINE_XOOX_UNICODE: uc = LINE_XOOX_C; break; case LINE_XXXO_UNICODE: uc = LINE_XXXO_C; break; case LINE_XXOX_UNICODE: uc = LINE_XXOX_C; break; case LINE_XOXX_UNICODE: uc = LINE_XOXX_C; break; case LINE_OXXX_UNICODE: uc = LINE_OXXX_C; break; case LINE_XXXX_UNICODE: uc = LINE_XXXX_C; break; default: return; } draw_ascii_lines( renderer, geometry, uc, p, color ); } } FontFallbackList::FontFallbackList( SDL_Renderer_Ptr &renderer, SDL_PixelFormat_Ptr &format, const int w, const int h, const palette_array &palette, const std::vector<std::string> &typefaces, const int fontsize, const bool fontblending ) : Font( w, h, palette ) { for( const std::string &typeface : typefaces ) { std::unique_ptr<Font> font = Font::load_font( renderer, format, typeface, fontsize, w, h, palette, fontblending ); if( !font ) { throw std::runtime_error( "Cannot load font " + typeface ); } fonts.emplace_back( std::move( font ) ); } if( fonts.empty() ) { throw std::runtime_error( "Typeface list is empty" ); } } bool FontFallbackList::isGlyphProvided( const std::string & ) const { return true; } void FontFallbackList::OutputChar( SDL_Renderer_Ptr &renderer, GeometryRenderer_Ptr &geometry, const std::string &ch, const point &p, unsigned char color, const float opacity ) { auto cached = glyph_font.find( ch ); if( cached == glyph_font.end() ) { for( auto it = fonts.begin(); it != fonts.end(); ++it ) { if( std::next( it ) == fonts.end() || ( *it )->isGlyphProvided( ch ) ) { cached = glyph_font.emplace( ch, it ).first; } } } ( *cached->second )->OutputChar( renderer, geometry, ch, p, color, opacity ); } #endif // TILES
{ "pile_set_name": "Github" }
/** * @file <argos3/plugins/simulator/visualizations/qt-opengl/qtopengl_render.h> * * @author Carlo Pinciroli - <ilpincy@gmail.com> */ #ifndef QT_OPENGL_RENDER_H #define QT_OPENGL_RENDER_H namespace argos { class CQTOpenGLRender; class CQTOpenGLApplication; } #include <argos3/core/simulator/visualization/visualization.h> #include <argos3/plugins/simulator/visualizations/qt-opengl/qtopengl_main_window.h> #ifdef ARGOS_WITH_LUA #include <argos3/plugins/simulator/visualizations/qt-opengl/qtopengl_lua_main_window.h> #endif namespace argos { class CQTOpenGLRender : public CVisualization { public: CQTOpenGLRender() : m_pcApplication(NULL), m_pcMainWindow(NULL), m_ppcOptions(NULL), m_nOptionNum(0) #ifdef ARGOS_WITH_LUA , m_pcQTOpenGLLuaMainWindow(NULL), m_bLuaEditor(false) #endif {} virtual ~CQTOpenGLRender() {} virtual void Init(TConfigurationNode& t_tree); virtual void Execute(); virtual void Reset() {} virtual void Destroy(); CQTOpenGLMainWindow& GetMainWindow(); #ifdef ARGOS_WITH_LUA CQTOpenGLLuaMainWindow& GetLuaMainWindow(); #endif private: CQTOpenGLApplication* m_pcApplication; CQTOpenGLMainWindow* m_pcMainWindow; char** m_ppcOptions; SInt32 m_nOptionNum; TConfigurationNode m_tConfTree; #ifdef ARGOS_WITH_LUA CQTOpenGLLuaMainWindow* m_pcQTOpenGLLuaMainWindow; bool m_bLuaEditor; #endif }; } #endif
{ "pile_set_name": "Github" }
1762-01-01 1763-01-01 1764-01-01 1765-01-01 1766-01-01 1767-01-01 1768-01-01 1769-01-01 1770-01-01 1771-01-01 1772-01-01 1773-01-01 1774-01-01 1775-01-01 1776-01-01 1777-01-01 1778-01-01 1779-01-01 1780-01-01 1781-01-01 1782-01-01 1783-01-01 1784-01-01 1785-01-01 1786-01-01 1787-01-01 1788-01-01 1789-01-01 1790-01-01 1791-01-01 1792-01-01 1793-01-01 1794-01-01 1795-01-01 1796-01-01 1797-01-01 1798-01-01 1799-01-01 1800-01-01 1801-01-01 1802-01-01 1803-01-01 1804-01-01 1805-01-01 1806-01-01 1807-01-01 1808-01-01 1809-01-01 1810-01-01 1811-01-01 1812-01-01 1813-01-01 1814-01-01 1815-01-01 1816-01-01 1817-01-01 1818-01-01 1819-01-01 1820-01-01 1821-01-01 1822-01-01 1823-01-01 1824-01-01 1825-01-01 1826-01-01 1827-01-01 1828-01-01 1829-01-01 1830-01-01 1831-01-01 1832-01-01 1833-01-01 1834-01-01 1835-01-01 1836-01-01 1837-01-01 1838-01-01 1839-01-01 1840-01-01 1841-01-01 1842-01-01 1843-01-01 1844-01-01 1845-01-01 1846-01-01 1847-01-01 1848-01-01 1849-01-01 1850-01-01 1851-01-01 1852-01-01 1853-01-01 1854-01-01 1855-01-01 1856-01-01 1857-01-01 1858-01-01 1859-01-01 1860-01-01 1861-01-01 1862-01-01 1863-01-01 1864-01-01 1865-01-01 1866-01-01 1867-01-01 1868-01-01 1869-01-01 1870-01-01 1871-01-01 1872-01-01 1873-01-01 1874-01-01 1875-01-01 1876-01-01 1877-01-01 1878-01-01 1879-01-01 1880-01-01 1881-01-01 1882-01-01 1883-01-01 1884-01-01 1885-01-01 1886-01-01 1887-01-01 1888-01-01 1889-01-01 1890-01-01 1891-01-01 1892-01-01 1893-01-01 1894-01-01 1895-01-01 1896-01-01 1897-01-01 1898-01-01 1899-01-01 1900-01-01 1901-01-01 1902-01-01 1903-01-01 1904-01-01 1905-01-01 1906-01-01 1907-01-01 1908-01-01 1909-01-01 1910-01-01 1911-01-01 1912-01-01 1913-01-01 1914-01-01 1915-01-01 1916-01-01 1917-01-01 1918-01-01 1919-01-01 1920-01-01 1921-01-01 1922-01-01 1923-01-01 1924-01-01 1925-01-01 1926-01-01 1927-01-01 1928-01-01 1929-01-01 1930-01-01 1931-01-01 1932-01-01 1933-01-01 1934-01-01 1935-01-01 1936-01-01 1937-01-01 1938-01-01 1939-01-01 1940-01-01 1941-01-01 1942-01-01 1943-01-01 1944-01-01 1945-01-01 1946-01-01 1947-01-01 1948-01-01 1949-01-01 1950-01-01 1951-01-01 1952-01-01 1953-01-01 1954-01-01 1955-01-01 1956-01-01 1957-01-01 1958-01-01 1959-01-01 1960-01-01 1961-01-01 1962-01-01 1963-01-01 1964-01-01 1965-01-01 1966-01-01 1967-01-01 1968-01-01 1969-01-01 1970-01-01 1971-01-01 1972-01-01 1973-01-01 1974-01-01 1975-01-01 1976-01-01 1977-01-01 1978-01-01 1979-01-01 1980-01-01 1981-01-01 1982-01-01 1983-01-01 1984-01-01 1985-01-01 1986-01-01 1987-01-01 1988-01-01 1989-01-01 1990-01-01 1991-01-01 1992-01-01 1993-01-01 1994-01-01 1995-01-01 1996-01-01 1997-01-01 1998-01-01 1999-01-01 2000-01-01 2001-01-01 2002-01-01 2003-01-01 2004-01-01 2005-01-01 2006-01-01 2007-01-01 2008-01-01 2009-01-01 2010-01-01 2011-01-01
{ "pile_set_name": "Github" }
; RUN: opt < %s -globalopt -S | FileCheck %s target datalayout = "E-p:64:64:64-a0:0:8-f32:32:32-f64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-v64:64:64-v128:128:128" %struct.foo = type { i32, i32 } @X = internal global %struct.foo* null ; CHECK: @X.f0 ; CHECK: @X.f1 define void @bar(i64 %Size) nounwind noinline { entry: %mallocsize = mul i64 8, %Size ; <i64> [#uses=1] ; CHECK: mul i64 %Size, 4 %malloccall = tail call i8* @malloc(i64 %mallocsize) ; <i8*> [#uses=1] %.sub = bitcast i8* %malloccall to %struct.foo* ; <%struct.foo*> [#uses=1] store %struct.foo* %.sub, %struct.foo** @X, align 4 ret void } declare noalias i8* @malloc(i64) define i32 @baz() nounwind readonly noinline { bb1.thread: %0 = load %struct.foo** @X, align 4 br label %bb1 bb1: ; preds = %bb1, %bb1.thread %i.0.reg2mem.0 = phi i32 [ 0, %bb1.thread ], [ %indvar.next, %bb1 ] %sum.0.reg2mem.0 = phi i32 [ 0, %bb1.thread ], [ %3, %bb1 ] %1 = getelementptr %struct.foo* %0, i32 %i.0.reg2mem.0, i32 0 %2 = load i32* %1, align 4 %3 = add i32 %2, %sum.0.reg2mem.0 %indvar.next = add i32 %i.0.reg2mem.0, 1 %exitcond = icmp eq i32 %indvar.next, 1200 br i1 %exitcond, label %bb2, label %bb1 bb2: ; preds = %bb1 ret i32 %3 }
{ "pile_set_name": "Github" }
h2 Error: #{statusCode} #{statusText} if stack code= stack
{ "pile_set_name": "Github" }
/* NSObject+CardDAV.h - this file is part of SOGo * * Copyright (C) 2007-2015 Inverse inc. * * This file is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This file is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; see the file COPYING. If not, write to * the Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ #ifndef __Contacts_NSObject_CardDAV_H__ #define __Contacts_NSObject_CardDAV_H__ #import "SOGoContactFolder.h" @interface SOGoFolder (CardDAV) <SOGoContactFolder> - (id) davAddressbookQuery: (id) queryContext; @end #endif
{ "pile_set_name": "Github" }
/**************************************************************************** ** ** Copyright (C) 2016 The Qt Company Ltd. ** Contact: https://www.qt.io/licensing/ ** ** This file is part of the QtNetwork module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see https://www.qt.io/terms-conditions. For further ** information use the contact form at https://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 3 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL3 included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 3 requirements ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 2.0 or (at your option) the GNU General ** Public license version 3 or any later version approved by the KDE Free ** Qt Foundation. The licenses are as published by the Free Software ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 ** included in the packaging of this file. Please review the following ** information to ensure the GNU General Public License requirements will ** be met: https://www.gnu.org/licenses/gpl-2.0.html and ** https://www.gnu.org/licenses/gpl-3.0.html. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include "qabstractsocketengine_p.h" #include "qnativesocketengine_p.h" #include "qmutex.h" #include "qnetworkproxy.h" QT_BEGIN_NAMESPACE class QSocketEngineHandlerList : public QList<QSocketEngineHandler*> { public: QMutex mutex; }; Q_GLOBAL_STATIC(QSocketEngineHandlerList, socketHandlers) QSocketEngineHandler::QSocketEngineHandler() { if (!socketHandlers()) return; QMutexLocker locker(&socketHandlers()->mutex); socketHandlers()->prepend(this); } QSocketEngineHandler::~QSocketEngineHandler() { if (!socketHandlers()) return; QMutexLocker locker(&socketHandlers()->mutex); socketHandlers()->removeAll(this); } QAbstractSocketEnginePrivate::QAbstractSocketEnginePrivate() : socketError(QAbstractSocket::UnknownSocketError) , hasSetSocketError(false) , socketErrorString(QLatin1String(QT_TRANSLATE_NOOP(QSocketLayer, "Unknown error"))) , socketState(QAbstractSocket::UnconnectedState) , socketType(QAbstractSocket::UnknownSocketType) , socketProtocol(QAbstractSocket::UnknownNetworkLayerProtocol) , localPort(0) , peerPort(0) , inboundStreamCount(0) , outboundStreamCount(0) , receiver(nullptr) { } QAbstractSocketEngine::QAbstractSocketEngine(QObject *parent) : QObject(*new QAbstractSocketEnginePrivate(), parent) { } QAbstractSocketEngine::QAbstractSocketEngine(QAbstractSocketEnginePrivate &dd, QObject* parent) : QObject(dd, parent) { } QAbstractSocketEngine *QAbstractSocketEngine::createSocketEngine(QAbstractSocket::SocketType socketType, const QNetworkProxy &proxy, QObject *parent) { #ifndef QT_NO_NETWORKPROXY // proxy type must have been resolved by now if (proxy.type() == QNetworkProxy::DefaultProxy) return nullptr; #endif QMutexLocker locker(&socketHandlers()->mutex); for (int i = 0; i < socketHandlers()->size(); i++) { if (QAbstractSocketEngine *ret = socketHandlers()->at(i)->createSocketEngine(socketType, proxy, parent)) return ret; } #ifndef QT_NO_NETWORKPROXY // only NoProxy can have reached here if (proxy.type() != QNetworkProxy::NoProxy) return nullptr; #endif return new QNativeSocketEngine(parent); } QAbstractSocketEngine *QAbstractSocketEngine::createSocketEngine(qintptr socketDescripter, QObject *parent) { QMutexLocker locker(&socketHandlers()->mutex); for (int i = 0; i < socketHandlers()->size(); i++) { if (QAbstractSocketEngine *ret = socketHandlers()->at(i)->createSocketEngine(socketDescripter, parent)) return ret; } return new QNativeSocketEngine(parent); } QAbstractSocket::SocketError QAbstractSocketEngine::error() const { return d_func()->socketError; } QString QAbstractSocketEngine::errorString() const { return d_func()->socketErrorString; } void QAbstractSocketEngine::setError(QAbstractSocket::SocketError error, const QString &errorString) const { Q_D(const QAbstractSocketEngine); d->socketError = error; d->socketErrorString = errorString; } void QAbstractSocketEngine::setReceiver(QAbstractSocketEngineReceiver *receiver) { d_func()->receiver = receiver; } void QAbstractSocketEngine::readNotification() { if (QAbstractSocketEngineReceiver *receiver = d_func()->receiver) receiver->readNotification(); } void QAbstractSocketEngine::writeNotification() { if (QAbstractSocketEngineReceiver *receiver = d_func()->receiver) receiver->writeNotification(); } void QAbstractSocketEngine::exceptionNotification() { if (QAbstractSocketEngineReceiver *receiver = d_func()->receiver) receiver->exceptionNotification(); } void QAbstractSocketEngine::closeNotification() { if (QAbstractSocketEngineReceiver *receiver = d_func()->receiver) receiver->closeNotification(); } void QAbstractSocketEngine::connectionNotification() { if (QAbstractSocketEngineReceiver *receiver = d_func()->receiver) receiver->connectionNotification(); } #ifndef QT_NO_NETWORKPROXY void QAbstractSocketEngine::proxyAuthenticationRequired(const QNetworkProxy &proxy, QAuthenticator *authenticator) { if (QAbstractSocketEngineReceiver *receiver = d_func()->receiver) receiver->proxyAuthenticationRequired(proxy, authenticator); } #endif QAbstractSocket::SocketState QAbstractSocketEngine::state() const { return d_func()->socketState; } void QAbstractSocketEngine::setState(QAbstractSocket::SocketState state) { d_func()->socketState = state; } QAbstractSocket::SocketType QAbstractSocketEngine::socketType() const { return d_func()->socketType; } void QAbstractSocketEngine::setSocketType(QAbstractSocket::SocketType socketType) { d_func()->socketType = socketType; } QAbstractSocket::NetworkLayerProtocol QAbstractSocketEngine::protocol() const { return d_func()->socketProtocol; } void QAbstractSocketEngine::setProtocol(QAbstractSocket::NetworkLayerProtocol protocol) { d_func()->socketProtocol = protocol; } QHostAddress QAbstractSocketEngine::localAddress() const { return d_func()->localAddress; } void QAbstractSocketEngine::setLocalAddress(const QHostAddress &address) { d_func()->localAddress = address; } quint16 QAbstractSocketEngine::localPort() const { return d_func()->localPort; } void QAbstractSocketEngine::setLocalPort(quint16 port) { d_func()->localPort = port; } QHostAddress QAbstractSocketEngine::peerAddress() const { return d_func()->peerAddress; } void QAbstractSocketEngine::setPeerAddress(const QHostAddress &address) { d_func()->peerAddress = address; } quint16 QAbstractSocketEngine::peerPort() const { return d_func()->peerPort; } void QAbstractSocketEngine::setPeerPort(quint16 port) { d_func()->peerPort = port; } int QAbstractSocketEngine::inboundStreamCount() const { return d_func()->inboundStreamCount; } int QAbstractSocketEngine::outboundStreamCount() const { return d_func()->outboundStreamCount; } QT_END_NAMESPACE #include "moc_qabstractsocketengine_p.cpp"
{ "pile_set_name": "Github" }
"use strict"; // This icon file is generated automatically. // tslint:disable Object.defineProperty(exports, "__esModule", { value: true }); var ToolFilled = { "name": "tool", "theme": "filled", "icon": { "tag": "svg", "attrs": { "viewBox": "64 64 896 896", "focusable": "false" }, "children": [{ "tag": "path", "attrs": { "d": "M865.3 244.7c-.3-.3-61.1 59.8-182.1 180.6l-84.9-84.9 180.9-180.9c-95.2-57.3-217.5-42.6-296.8 36.7A244.42 244.42 0 00419 432l1.8 6.7-283.5 283.4c-6.2 6.2-6.2 16.4 0 22.6l141.4 141.4c6.2 6.2 16.4 6.2 22.6 0l283.3-283.3 6.7 1.8c83.7 22.3 173.6-.9 236-63.3 79.4-79.3 94.1-201.6 38-296.6z" } }] } }; exports.default = ToolFilled;
{ "pile_set_name": "Github" }
//@private export class EasySAXParser { constructor(); parse(xml: string): void; on(name: string, cb: Function): void; ns(root: string, ns: any): void; public angularSyntax: boolean; }
{ "pile_set_name": "Github" }
/* * LeadTek Y04G0051 remote controller keytable * * Copyright (C) 2010 Antti Palosaari <crope@iki.fi> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ #include <media/rc-map.h> #include <linux/module.h> static struct rc_map_table leadtek_y04g0051[] = { { 0x0300, KEY_POWER2 }, { 0x0303, KEY_SCREEN }, { 0x0304, KEY_RIGHT }, { 0x0305, KEY_1 }, { 0x0306, KEY_2 }, { 0x0307, KEY_3 }, { 0x0308, KEY_LEFT }, { 0x0309, KEY_4 }, { 0x030a, KEY_5 }, { 0x030b, KEY_6 }, { 0x030c, KEY_UP }, { 0x030d, KEY_7 }, { 0x030e, KEY_8 }, { 0x030f, KEY_9 }, { 0x0310, KEY_DOWN }, { 0x0311, KEY_AGAIN }, { 0x0312, KEY_0 }, { 0x0313, KEY_OK }, /* 1st ok */ { 0x0314, KEY_MUTE }, { 0x0316, KEY_OK }, /* 2nd ok */ { 0x031e, KEY_VIDEO }, /* 2nd video */ { 0x031b, KEY_AUDIO }, { 0x031f, KEY_TEXT }, { 0x0340, KEY_SLEEP }, { 0x0341, KEY_DOT }, { 0x0342, KEY_REWIND }, { 0x0343, KEY_PLAY }, { 0x0344, KEY_FASTFORWARD }, { 0x0345, KEY_TIME }, { 0x0346, KEY_STOP }, /* 2nd stop */ { 0x0347, KEY_RECORD }, { 0x0348, KEY_CAMERA }, { 0x0349, KEY_ESC }, { 0x034a, KEY_NEW }, { 0x034b, KEY_RED }, { 0x034c, KEY_GREEN }, { 0x034d, KEY_YELLOW }, { 0x034e, KEY_BLUE }, { 0x034f, KEY_MENU }, { 0x0350, KEY_STOP }, /* 1st stop */ { 0x0351, KEY_CHANNEL }, { 0x0352, KEY_VIDEO }, /* 1st video */ { 0x0353, KEY_EPG }, { 0x0354, KEY_PREVIOUS }, { 0x0355, KEY_NEXT }, { 0x0356, KEY_TV }, { 0x035a, KEY_VOLUMEDOWN }, { 0x035b, KEY_CHANNELUP }, { 0x035e, KEY_VOLUMEUP }, { 0x035f, KEY_CHANNELDOWN }, }; static struct rc_map_list leadtek_y04g0051_map = { .map = { .scan = leadtek_y04g0051, .size = ARRAY_SIZE(leadtek_y04g0051), .rc_type = RC_TYPE_NEC, .name = RC_MAP_LEADTEK_Y04G0051, } }; static int __init init_rc_map_leadtek_y04g0051(void) { return rc_map_register(&leadtek_y04g0051_map); } static void __exit exit_rc_map_leadtek_y04g0051(void) { rc_map_unregister(&leadtek_y04g0051_map); } module_init(init_rc_map_leadtek_y04g0051) module_exit(exit_rc_map_leadtek_y04g0051) MODULE_LICENSE("GPL"); MODULE_AUTHOR("Antti Palosaari <crope@iki.fi>");
{ "pile_set_name": "Github" }
// Copyright (c) Josef Pihrt. All rights reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Syntax; using Roslynator.CSharp; using Roslynator.CSharp.Analysis.Documentation; namespace Roslynator.CSharp.Refactorings.Documentation { internal abstract class DocumentationCommentRefactoring<TNode> where TNode : SyntaxNode { public abstract XmlTag Tag { get; } public abstract bool ShouldBeBefore(XmlTag tag); public abstract string GetName(TNode node); public abstract ElementInfo<TNode> CreateInfo(TNode node, int insertIndex, NewLinePosition newLinePosition); public async Task<Document> RefactorAsync( Document document, DocumentationCommentTriviaSyntax comment, CancellationToken cancellationToken = default) { cancellationToken.ThrowIfCancellationRequested(); MemberDeclarationSyntax memberDeclaration = comment.FirstAncestor<MemberDeclarationSyntax>(); SeparatedSyntaxList<TNode> typeParameters = GetSyntaxList(memberDeclaration); List<ElementInfo<TNode>> infos = GetElementInfos(comment, typeParameters); string newTrivia = GetNewTrivia(comment, infos); SyntaxTriviaList triviaList = SyntaxFactory.ParseLeadingTrivia(newTrivia); if (triviaList.Any()) { SyntaxTrivia firstTrivia = triviaList[0]; if (firstTrivia.HasStructure && (firstTrivia.GetStructure() is DocumentationCommentTriviaSyntax newComment)) { newComment = newComment.WithFormatterAnnotation(); return await document.ReplaceNodeAsync(comment, newComment, cancellationToken).ConfigureAwait(false); } } Debug.Fail(""); return document; } protected abstract SeparatedSyntaxList<TNode> GetSyntaxList(SyntaxNode node); private string GetNewTrivia( DocumentationCommentTriviaSyntax comment, List<ElementInfo<TNode>> elementInfos) { var sb = new StringBuilder(); string text = comment.ToFullString(); int start = comment.FullSpan.Start; int startIndex = 0; string elementName = XmlTagMapper.GetName(Tag); foreach (IGrouping<int, ElementInfo<TNode>> grouping in elementInfos .OrderBy(f => f.InsertIndex) .GroupBy(f => f.InsertIndex)) { int endIndex = grouping.Key - start; sb.Append(text, startIndex, endIndex - startIndex); foreach (ElementInfo<TNode> elementInfo in grouping) { if (elementInfo.NewLinePosition == NewLinePosition.Beginning) sb.AppendLine(); sb.Append("/// <") .Append(elementName) .Append(" name=\"") .Append(elementInfo.Name) .Append("\"></") .Append(elementName) .Append(">"); if (elementInfo.NewLinePosition == NewLinePosition.End) sb.AppendLine(); } startIndex = endIndex; } sb.Append(text, startIndex, text.Length - startIndex); return sb.ToString(); } private List<ElementInfo<TNode>> GetElementInfos( DocumentationCommentTriviaSyntax comment, SeparatedSyntaxList<TNode> nodes) { Dictionary<string, XmlElementSyntax> dic = CreateNameElementMap(comment); var elementInfos = new List<ElementInfo<TNode>>(); for (int i = 0; i < nodes.Count; i++) { if (!dic.ContainsKey(GetName(nodes[i]))) { int insertIndex = -1; var newLinePosition = NewLinePosition.Beginning; for (int j = i - 1; j >= 0; j--) { if (dic.TryGetValue(GetName(nodes[j]), out XmlElementSyntax element)) { insertIndex = element.FullSpan.End; break; } } if (insertIndex == -1) { for (int j = i + 1; j < nodes.Count; j++) { if (dic.TryGetValue(GetName(nodes[j]), out XmlElementSyntax element)) { XmlElementSyntax previousElement = GetPreviousElement(comment, element); if (previousElement != null) { insertIndex = previousElement.FullSpan.End; } else { insertIndex = comment.FullSpan.Start; newLinePosition = NewLinePosition.End; } break; } } } if (insertIndex == -1) { insertIndex = GetDefaultIndex(comment); if (insertIndex == comment.FullSpan.Start) newLinePosition = NewLinePosition.End; } ElementInfo<TNode> elementInfo = CreateInfo(nodes[i], insertIndex, newLinePosition); elementInfos.Add(elementInfo); } } return elementInfos; } private static XmlElementSyntax GetPreviousElement(DocumentationCommentTriviaSyntax comment, XmlElementSyntax element) { SyntaxList<XmlNodeSyntax> content = comment.Content; int index = content.IndexOf(element); for (int i = index - 1; i >= 0; i--) { if (content[i].IsKind(SyntaxKind.XmlElement)) return (XmlElementSyntax)content[i]; } return null; } public Dictionary<string, XmlElementSyntax> CreateNameElementMap(DocumentationCommentTriviaSyntax comment) { var dic = new Dictionary<string, XmlElementSyntax>(); foreach (XmlElementSyntax element in comment.Elements(Tag)) { string name = element.GetAttributeValue("name"); if (!dic.ContainsKey(name)) dic.Add(name, element); } return dic; } private int GetDefaultIndex(DocumentationCommentTriviaSyntax comment) { SyntaxList<XmlNodeSyntax> content = comment.Content; for (int i = content.Count - 1; i >= 0; i--) { if (content[i] is XmlElementSyntax xmlElement) { XmlTag tag = xmlElement.GetTag(); if (tag == Tag || ShouldBeBefore(tag)) { return content[i].FullSpan.End; } } } return comment.FullSpan.Start; } } }
{ "pile_set_name": "Github" }
import unittest.mock as mock import cv2 import numpy as np from apps.rendering.benchmark import renderingbenchmark from golem.testutils import TempDirFixture class TestBenchmark(TempDirFixture): def setUp(self): super(self.__class__, self).setUp() self.benchmark = renderingbenchmark.RenderingBenchmark() def test_verify_img(self): filepath = self.temp_file_name("img.png") resolution = self.benchmark.task_definition.resolution with open(filepath, "wb"): img = np.zeros((resolution[1], resolution[0], 3), np.uint8) cv2.imwrite(filepath, img) self.assertTrue(self.benchmark.verify_img(filepath)) with open(filepath, "wb"): img = np.zeros((resolution[1]+1, resolution[0], 3), np.uint8) cv2.imwrite(filepath, img) self.assertFalse(self.benchmark.verify_img(filepath)) def test_broken_image(self): filepath = self.temp_file_name("broken.png") with open(filepath, "w") as f: f.write('notanimage,notanimageatall') with mock.patch('apps.rendering.benchmark.renderingbenchmark.logger') \ as m: self.assertFalse(self.benchmark.verify_img(filepath)) m.exception.assert_called_once() def test_verify_log(self): def verify_log(file_content): filepath = self.temp_file_name("log.log") fd = open(filepath, "w") fd.write(file_content) fd.close() return self.benchmark.verify_log(filepath) for fc in ["Error", "ERROR", "error", "blaErRor", "bla ERRor bla"]: self.assertFalse(verify_log(fc)) for fc in ["123", "erro r", "asd sda", "sad 12 sad;"]: self.assertTrue(verify_log(fc)) def test_verify_result(self): """Wether verify_result calls correct methods.""" with mock.patch.multiple(self.benchmark, verify_img=mock.DEFAULT, verify_log=mock.DEFAULT) as mocks: self.assertTrue(self.benchmark.verify_result(['a.txt', 'b.gif'])) self.assertEqual(mocks['verify_img'].call_count, 0) self.assertEqual(mocks['verify_log'].call_count, 0) for m in mocks.values(): m.return_value = True paths = [ '/mnt/dummy/image.png', '../important.log', ] self.assertTrue(self.benchmark.verify_result(paths)) mocks['verify_img'].assert_called_once_with(paths[0]) mocks['verify_log'].assert_called_once_with(paths[1]) for m in mocks.values(): m.reset_mock() m.return_value = False self.assertFalse(self.benchmark.verify_result([paths[0]])) self.assertFalse(self.benchmark.verify_result([paths[1]])) mocks['verify_img'].assert_called_once_with(paths[0]) mocks['verify_log'].assert_called_once_with(paths[1]) def test_find_resources(self): """Simplistic test of basic implementation.""" self.assertEqual(self.benchmark.find_resources(), set())
{ "pile_set_name": "Github" }
'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var prefix = 'fab'; var iconName = 'cc-mastercard'; var width = 576; var height = 512; var ligatures = []; var unicode = 'f1f1'; var svgPathData = 'M482.9 410.3c0 6.8-4.6 11.7-11.2 11.7-6.8 0-11.2-5.2-11.2-11.7 0-6.5 4.4-11.7 11.2-11.7 6.6 0 11.2 5.2 11.2 11.7zm-310.8-11.7c-7.1 0-11.2 5.2-11.2 11.7 0 6.5 4.1 11.7 11.2 11.7 6.5 0 10.9-4.9 10.9-11.7-.1-6.5-4.4-11.7-10.9-11.7zm117.5-.3c-5.4 0-8.7 3.5-9.5 8.7h19.1c-.9-5.7-4.4-8.7-9.6-8.7zm107.8.3c-6.8 0-10.9 5.2-10.9 11.7 0 6.5 4.1 11.7 10.9 11.7 6.8 0 11.2-4.9 11.2-11.7 0-6.5-4.4-11.7-11.2-11.7zm105.9 26.1c0 .3.3.5.3 1.1 0 .3-.3.5-.3 1.1-.3.3-.3.5-.5.8-.3.3-.5.5-1.1.5-.3.3-.5.3-1.1.3-.3 0-.5 0-1.1-.3-.3 0-.5-.3-.8-.5-.3-.3-.5-.5-.5-.8-.3-.5-.3-.8-.3-1.1 0-.5 0-.8.3-1.1 0-.5.3-.8.5-1.1.3-.3.5-.3.8-.5.5-.3.8-.3 1.1-.3.5 0 .8 0 1.1.3.5.3.8.3 1.1.5s.2.6.5 1.1zm-2.2 1.4c.5 0 .5-.3.8-.3.3-.3.3-.5.3-.8 0-.3 0-.5-.3-.8-.3 0-.5-.3-1.1-.3h-1.6v3.5h.8V426h.3l1.1 1.4h.8l-1.1-1.3zM576 81v352c0 26.5-21.5 48-48 48H48c-26.5 0-48-21.5-48-48V81c0-26.5 21.5-48 48-48h480c26.5 0 48 21.5 48 48zM64 220.6c0 76.5 62.1 138.5 138.5 138.5 27.2 0 53.9-8.2 76.5-23.1-72.9-59.3-72.4-171.2 0-230.5-22.6-15-49.3-23.1-76.5-23.1-76.4-.1-138.5 62-138.5 138.2zm224 108.8c70.5-55 70.2-162.2 0-217.5-70.2 55.3-70.5 162.6 0 217.5zm-142.3 76.3c0-8.7-5.7-14.4-14.7-14.7-4.6 0-9.5 1.4-12.8 6.5-2.4-4.1-6.5-6.5-12.2-6.5-3.8 0-7.6 1.4-10.6 5.4V392h-8.2v36.7h8.2c0-18.9-2.5-30.2 9-30.2 10.2 0 8.2 10.2 8.2 30.2h7.9c0-18.3-2.5-30.2 9-30.2 10.2 0 8.2 10 8.2 30.2h8.2v-23zm44.9-13.7h-7.9v4.4c-2.7-3.3-6.5-5.4-11.7-5.4-10.3 0-18.2 8.2-18.2 19.3 0 11.2 7.9 19.3 18.2 19.3 5.2 0 9-1.9 11.7-5.4v4.6h7.9V392zm40.5 25.6c0-15-22.9-8.2-22.9-15.2 0-5.7 11.9-4.8 18.5-1.1l3.3-6.5c-9.4-6.1-30.2-6-30.2 8.2 0 14.3 22.9 8.3 22.9 15 0 6.3-13.5 5.8-20.7.8l-3.5 6.3c11.2 7.6 32.6 6 32.6-7.5zm35.4 9.3l-2.2-6.8c-3.8 2.1-12.2 4.4-12.2-4.1v-16.6h13.1V392h-13.1v-11.2h-8.2V392h-7.6v7.3h7.6V416c0 17.6 17.3 14.4 22.6 10.9zm13.3-13.4h27.5c0-16.2-7.4-22.6-17.4-22.6-10.6 0-18.2 7.9-18.2 19.3 0 20.5 22.6 23.9 33.8 14.2l-3.8-6c-7.8 6.4-19.6 5.8-21.9-4.9zm59.1-21.5c-4.6-2-11.6-1.8-15.2 4.4V392h-8.2v36.7h8.2V408c0-11.6 9.5-10.1 12.8-8.4l2.4-7.6zm10.6 18.3c0-11.4 11.6-15.1 20.7-8.4l3.8-6.5c-11.6-9.1-32.7-4.1-32.7 15 0 19.8 22.4 23.8 32.7 15l-3.8-6.5c-9.2 6.5-20.7 2.6-20.7-8.6zm66.7-18.3H408v4.4c-8.3-11-29.9-4.8-29.9 13.9 0 19.2 22.4 24.7 29.9 13.9v4.6h8.2V392zm33.7 0c-2.4-1.2-11-2.9-15.2 4.4V392h-7.9v36.7h7.9V408c0-11 9-10.3 12.8-8.4l2.4-7.6zm40.3-14.9h-7.9v19.3c-8.2-10.9-29.9-5.1-29.9 13.9 0 19.4 22.5 24.6 29.9 13.9v4.6h7.9v-51.7zm7.6-75.1v4.6h.8V302h1.9v-.8h-4.6v.8h1.9zm6.6 123.8c0-.5 0-1.1-.3-1.6-.3-.3-.5-.8-.8-1.1-.3-.3-.8-.5-1.1-.8-.5 0-1.1-.3-1.6-.3-.3 0-.8.3-1.4.3-.5.3-.8.5-1.1.8-.5.3-.8.8-.8 1.1-.3.5-.3 1.1-.3 1.6 0 .3 0 .8.3 1.4 0 .3.3.8.8 1.1.3.3.5.5 1.1.8.5.3 1.1.3 1.4.3.5 0 1.1 0 1.6-.3.3-.3.8-.5 1.1-.8.3-.3.5-.8.8-1.1.3-.6.3-1.1.3-1.4zm3.2-124.7h-1.4l-1.6 3.5-1.6-3.5h-1.4v5.4h.8v-4.1l1.6 3.5h1.1l1.4-3.5v4.1h1.1v-5.4zm4.4-80.5c0-76.2-62.1-138.3-138.5-138.3-27.2 0-53.9 8.2-76.5 23.1 72.1 59.3 73.2 171.5 0 230.5 22.6 15 49.5 23.1 76.5 23.1 76.4.1 138.5-61.9 138.5-138.4z'; exports.definition = { prefix: prefix, iconName: iconName, icon: [ width, height, ligatures, unicode, svgPathData ]}; exports.faCcMastercard = exports.definition; exports.prefix = prefix; exports.iconName = iconName; exports.width = width; exports.height = height; exports.ligatures = ligatures; exports.unicode = unicode; exports.svgPathData = svgPathData;
{ "pile_set_name": "Github" }
#N canvas 98 339 450 300 10; #X obj 63 113 keynameonly a; #X obj 173 113 keynameonly b; #X obj 283 113 keynameonly c; #X floatatom 63 137 5 0 0 0 - - -; #X floatatom 173 137 5 0 0 0 - - -; #X floatatom 283 137 5 0 0 0 - - -; #X obj 63 162 bng 15 250 50 0 empty empty empty 17 7 0 10 -262144 -1 -1; #X obj 173 162 bng 15 250 50 0 empty empty empty 17 7 0 10 -262144 -1 -1; #X obj 283 162 bng 15 250 50 0 empty empty empty 17 7 0 10 -262144 -1 -1; #X text 16 15 keynameonly - only listen for keystrokes from one key ; #X text 279 271 2008 Luke Iannini; #X connect 0 0 3 0; #X connect 1 0 4 0; #X connect 2 0 5 0; #X connect 3 0 6 0; #X connect 4 0 7 0; #X connect 5 0 8 0;
{ "pile_set_name": "Github" }
// no PCH // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -emit-llvm -include %s -include %s %s -o - | FileCheck %s // with PCH // RUN: %clang_cc1 -fopenmp -fnoopenmp-use-tls -emit-llvm -chain-include %s -chain-include %s %s -o - | FileCheck %s // no PCH // RUN: %clang_cc1 -fopenmp -emit-llvm -include %s -include %s %s -o - | FileCheck %s -check-prefix=CHECK-TLS-1 // RUN: %clang_cc1 -fopenmp -emit-llvm -include %s -include %s %s -o - | FileCheck %s -check-prefix=CHECK-TLS-2 // with PCH // RUN: %clang_cc1 -fopenmp -emit-llvm -chain-include %s -chain-include %s %s -o - | FileCheck %s -check-prefix=CHECK-TLS-1 // RUN: %clang_cc1 -fopenmp -emit-llvm -chain-include %s -chain-include %s %s -o - | FileCheck %s -check-prefix=CHECK-TLS-2 #if !defined(PASS1) #define PASS1 extern "C" int* malloc (int size); int *a = malloc(20); #elif !defined(PASS2) #define PASS2 #pragma omp threadprivate(a) #else // CHECK: call {{.*}} @__kmpc_threadprivate_register( // CHECK-TLS-1: @{{a|\"\\01\?a@@3PE?AHE?A\"}} = {{.*}}thread_local {{.*}}global {{.*}}i32* // CHECK-LABEL: foo // CHECK-TLS-LABEL: foo int foo() { return *a; // CHECK: call {{.*}} @__kmpc_global_thread_num( // CHECK: call {{.*}} @__kmpc_threadprivate_cached( // CHECK-TLS-1: call {{.*}} @{{_ZTW1a|\"\\01\?\?__Ea@@YAXXZ\"}}() } // CHECK-TLS-2: define {{.*}} @{{_ZTW1a|\"\\01\?\?__Ea@@YAXXZ\"}}() #endif
{ "pile_set_name": "Github" }
#!/usr/bin/env ruby -wW1 $: << '.' $: << '../lib' if __FILE__ == $0 while (i = ARGV.index('-I')) x,path = ARGV.slice!(i, 2) $: << path end end require 'optparse' require 'stringio' require 'multi_xml' begin require 'libxml' rescue Exception => e end begin require 'nokogiri' rescue Exception => e end begin require 'ox' rescue Exception => e end $verbose = 0 $parsers = [] $iter = 10 opts = OptionParser.new opts.on("-v", "increase verbosity") { $verbose += 1 } opts.on("-p", "--parser [String]", String, "parser to test") { |p| $parsers = [p] } opts.on("-i", "--iterations [Int]", Integer, "iterations") { |i| $iter = i } opts.on("-h", "--help", "Show this display") { puts opts; Process.exit!(0) } files = opts.parse(ARGV) if $parsers.empty? $parsers << 'libxml' if defined?(::LibXML) $parsers << 'nokogiri' if defined?(::Nokogiri) $parsers << 'ox' if defined?(::Ox) end files.each do |filename| times = { } xml = File.read(filename) $parsers.each do |p| MultiXml.parser = p start = Time.now $iter.times do |i| io = StringIO.new(xml) MultiXml.parse(io) end dt = Time.now - start times[p] = Time.now - start end times.each do |p,t| puts "%8s took %0.3f seconds to parse %s %d times." % [p, t, filename, $iter] end end
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> <html> <head> <title>Recombine Tags And Decorations</title> <script src="extractSourceSpans.js"></script> <script src="numberLines.js"></script> <script src="recombineTagsAndDecorations.js"></script> <script src="http://github.com/douglascrockford/JSON-js/raw/master/json2.js"></script> <link rel="stylesheet" href="../src/prettify.css" type="text/css" /> <style> .ok { background: #dfd } .error, .failure { background: #fdd } .error { white-space: pre } td { font-family: monospace } tr { vertical-align: top } </style> </head> <body> <h1>Recombine Tags And Decorations</h1> <table border="1" cellpadding="2" cellspacing="0"> <tr><th colspan="4">Test Single Decoration</th></tr> <tr> <td><code class="testinput">"Hello, World!"</code></td> <td class="decorations">[0, 'str']</td> <td><code><span class="str">"Hello, World!"</span></code></td> </tr> <tr><th colspan="4">Test Single Span</th></tr> <tr> <td><code class="testinput">print "Hello, &lt;World&gt;!";</code></td> <td class="decorations">[0, 'kwd', 5, 'pln', 6, 'str', 14, 'tag', 21, 'str', 23, 'pun']</td> <td><code><span class="kwd">print</span><span class="pln"> </span><span class="str">"Hello, </span><span class="tag">&lt;World&gt;</span><span class="str">!"</span><span class="pun">;</span></code></td> </tr> <tr><th colspan="4">Test Interleaved</th></tr> <tr> <td><code class="testinput">print "Hello, &lt;<b>World</b>&gt;!";</code></td> <td class="decorations">[0, 'kwd', 5, 'pln', 6, 'str', 14, 'tag', 21, 'str', 23, 'pun']</td> <td><code><span class="kwd">print</span><span class="pln"> </span><span class="str">"Hello, </span><span class="tag">&lt;</span><b><span class="tag">World</span></b><span class="tag">&gt;</span><span class="str">!"</span><span class="pun">;</span></code></td> </tr> </table> <script> if (!document.body.getElementsByClassName) { document.body.getElementsByClassName = function (className) { className = className.replace(/\s+/g, ' ').replace(/^\s*|\s*$/g, ' '); var results = []; function walk(node) { if (node.nodeType !== 1) { return; } // This test should be order-insensitive. if ((' ' + node.className + ' ').indexOf(className) >= 0) { results[results.length] = node; } for (var child = node.firstChild; child; child = child.nextSibling) { walk(child); } } walk(document.body); return results; }; } setTimeout(function () { var testInputs = Array.prototype.slice.call( document.body.getElementsByClassName('testinput'), 0); for (var i = 0, n = testInputs.length; i < n; ++i) { var testInput = testInputs[i]; var decorationsNode = testInput.parentNode.nextSibling; while (decorationsNode.nodeType !== 1) { decorationsNode = decorationsNode.nextSibling; } var testResult = decorationsNode.nextSibling; while (testResult.nodeType !== 1) { testResult = testResult.nextSibling; } var actual = document.createElement('TD'); testResult.parentNode.appendChild(actual); var clone = testInput.cloneNode(true); clone.className = ''; // IE clone.removeAttribute('class'); // Not IE. actual.appendChild(clone); var job = extractSourceSpans(clone); job.decorations = eval(decorationsNode.innerText || decorationsNode.textContent); try { recombineTagsAndDecorations(job); var passed = testResult.innerHTML === actual.innerHTML; if (!passed) { console.log(JSON.stringify(testResult.innerHTML) + ' !==\n' + JSON.stringify(actual.innerHTML)); } actual.className = passed ? 'ok' : 'failure'; } catch (ex) { actual.className = 'error'; actual.appendChild(document.createTextNode( 'Error: ' + (ex.message || ex) + '\n' + ex.stack)); } actual.className += ' actual'; } }, 0)</script> <hr> <address></address> <!-- hhmts start --> Last modified: Tue Mar 29 10:41:34 PDT 2011 <!-- hhmts end --> </body> </html>
{ "pile_set_name": "Github" }
#include "../../src/gui/dialogs/qabstractpagesetupdialog.h"
{ "pile_set_name": "Github" }
/******************************* User Overrides *******************************/
{ "pile_set_name": "Github" }
diff -uNr stellarium-0.8.0-orig/src/s_gui.h stellarium-0.8.0/src/s_gui.h --- stellarium-0.8.0-orig/src/s_gui.h 2006-04-29 17:13:47.000000000 +0200 +++ stellarium-0.8.0/src/s_gui.h 2006-05-06 23:31:25.000000000 +0200 @@ -510,7 +510,7 @@ private: callback<void> onChangeCallback; - void ScrollBar::adjustSize(void); + void adjustSize(void); Button scrollBt; bool vertical; unsigned int scrollOffset, scrollSize; @@ -837,7 +837,7 @@ class City { public: - City::City(const string& _name = "", const string& _state = "", const string& _country = "", + City(const string& _name = "", const string& _state = "", const string& _country = "", double _longitude = 0.f, double _latitude = 0.f, float zone = 0, int _showatzoom = 0, int _altitude = 0); void addCity(const string& _name = "", const string& _state = "", const string& _country = "", double _longitude = 0.f, double _latitude = 0.f, float zone = 0, int _showatzoom = 0, int _altitude = 0);
{ "pile_set_name": "Github" }
E 1532126087 tags: Greedy, Array, DP, Sequence DP, Subarray time: O(m) space: O(1) 给一串数组, unsorted, can have negative/positive num. 找数组中间 subarray 数字之和的最小值 #### DP - 看到 min value, 至少考虑dp: - Consider last num: min sum will be (preMinSum + curr, or curr) - Use preMinSum to cache previouly calcualted min sum, also compare with +curr. - Have a global min to track: because the preMinSum can be dis-continuous. - 也可以写成 dp[i] 但是没什么必要 ``` /* Given an array of integers, find the subarray with smallest sum. Return the sum of the subarray. Example For [1, -1, -2, 1], return -3 Note The subarray should contain at least one integer. Tags Expand Greedy LintCode Copyright Subarray Array */ /* DP, Sequence DP Consider last num: min sum will be (preMinSum + curr, or curr) Use preMinSum to cache previouly calcualted min sum, also compare with +curr. Have a global min to track: because the preMinSum can be dis-continuous. */ public class Solution { public int minSubArray(List<Integer> nums) { if (nums == null || nums.size() == 0) return Integer.MAX_VALUE; int preMinSum = 0, min = Integer.MAX_VALUE; for (int num : nums) { preMinSum = Math.min(num, preMinSum + num); min = Math.min(min, preMinSum); } return min; } } /* Thoughts: Note: sub-array has order. It's not sub-set 1. On each index: decide to add with nums.get(i), to use the new lowest value nums.get(i). That means: If the new value is negative (it has decresing impact on sum) and the sum is larger than new value, just use the new value. In another case, if sum has been nagative, so sum + new value will be even smaller, then use sum. 2. Every time compare the currMin with the overall minimum value, call it minRst. Note: remember to pre-set init value for curMin, minRst. */ public class Solution { /** * @param nums: a list of integers * @return: A integer indicate the sum of minimum subarray */ public int minSubArray(ArrayList<Integer> nums) { if (nums == null || nums.size() == 0) { return 0; } int curMin = nums.get(0); int minRst = nums.get(0); for (int i = 1; i < nums.size(); i++) { curMin = Math.min(nums.get(i), curMin + nums.get(i)); minRst = Math.min(curMin, minRst); } return minRst; } } ```
{ "pile_set_name": "Github" }
INCLUDE 'VICMAIN_FOR' SUBROUTINE MAIN44 C C 2 MAY 89 ...REA... INITIAL RELEASE C 12 AUG 93 ...REA... PORT TO UNIX C REAL BUF(800) CHARACTER*80 PR C open input CALL XVUNIT(INUNIT,'INP',1,ISTAT,' ') CALL XVOPEN(INUNIT,ISTAT,'OPEN_ACT','SA','IO_ACT','SA', + 'U_FORMAT','REAL',' ') C CALL XVMESSAGE(' ',' ') CALL XVMESSAGE( + 'Tiepoint New Line New Sample Old Line Old Sample',' ') CALL XVMESSAGE( + '-------- --- ---- --- ------ --- ---- --- ------',' ') C DO I=1,10 CALL XVREAD(INUNIT,BUF,ISTAT,' ') DO N=1,200 J = 4*N-3 C IF (BUF(J) .EQ. 0.0 .AND. BUF(J+1) .EQ. 0.0) GO TO 100 WRITE (PR,50) N,BUF(J),BUF(J+1),BUF(J+2),BUF(J+3) 50 FORMAT(I6,F11.2,F12.2,F14.2,F12.2) CALL XVMESSAGE(PR,' ') END DO END DO C 100 CONTINUE CALL XVMESSAGE(' ',' ') RETURN END
{ "pile_set_name": "Github" }
{ "name": "jfk-files", "version": "1.0.0", "description": "Azure Search Demo based on declassified JFK Files", "main": "app.js", "config": { "build": "dist", "remote": "GH-Lemoncode", "msg": "Github Deploy in gh-pages" }, "scripts": { "start": "env-cmd .env if-env NODE_ENV=production && npm run start:prod || npm run start:dev", "start:dev": "env-cmd .env cross-env NODE_ENV=development webpack-dev-server --config=webpack.dev.config.js", "start:prod": "env-cmd .env cross-env NODE_ENV=production node server", "clean": "rimraf dist", "build": "env-cmd .env if-env NODE_ENV=production && npm run build:prod || npm run build:dev", "build:dev": "npm run clean && env-cmd .env cross-env NODE_ENV=development webpack --config=webpack.dev.config.js", "build:prod": "npm run clean && env-cmd .env cross-env NODE_ENV=production webpack -p --config=webpack.prod.config.js" }, "author": "Javier Calzado", "license": "ISC", "dependencies": { "babel-polyfill": "^6.26.0", "d3": "4.13.0", "downshift": "^1.28.0", "express": "^4.16.3", "lodash.throttle": "^4.1.1", "material-ui": "1.0.0-beta.33", "material-ui-icons": "1.0.0-beta.17", "material-ui-pickers": "1.0.0-beta.15.1", "moment": "^2.20.1", "paginator": "^1.0.0", "react": "^16.2.0", "react-dom": "^16.2.0", "react-router-dom": "^4.2.2" }, "devDependencies": { "@types/d3": "4.13.0", "@types/history": "^4.6.1", "@types/node": "^9.6.2", "@types/qs": "^6.5.1", "@types/react": "^16.0.21", "@types/react-dom": "^16.0.2", "@types/react-router-dom": "^4.2.0", "awesome-typescript-loader": "^3.3.0", "babel-core": "^6.26.0", "babel-minify-webpack-plugin": "^0.3.1", "babel-preset-env": "^1.6.1", "copy-webpack-plugin": "^4.5.1", "cross-env": "^5.1.4", "css-loader": "~0.28.7", "env-cmd": "^7.0.0", "extract-text-webpack-plugin": "^3.0.2", "file-loader": "~1.1.5", "html-webpack-plugin": "~2.30.1", "if-env": "^1.0.4", "node-sass": "^4.7.2", "qs": "^6.5.1", "resolve-url-loader": "^2.3.0", "rimraf": "^2.6.2", "sass-loader": "^6.0.6", "style-loader": "~0.19.0", "typescript": "^2.8.3", "url-loader": "~0.6.2", "webpack": "~3.8.1", "webpack-dev-server": "^2.9.4", "webpack-merge": "^4.1.1" } }
{ "pile_set_name": "Github" }
.class final Lcn/com/smartdevices/bracelet/gps/services/W; .super Landroid/content/BroadcastReceiver; # instance fields .field final synthetic a:Lcn/com/smartdevices/bracelet/gps/services/U; # direct methods .method private constructor <init>(Lcn/com/smartdevices/bracelet/gps/services/U;)V .locals 0 iput-object p1, p0, Lcn/com/smartdevices/bracelet/gps/services/W;->a:Lcn/com/smartdevices/bracelet/gps/services/U; invoke-direct {p0}, Landroid/content/BroadcastReceiver;-><init>()V return-void .end method .method synthetic constructor <init>(Lcn/com/smartdevices/bracelet/gps/services/U;Lcn/com/smartdevices/bracelet/gps/services/V;)V .locals 0 invoke-direct {p0, p1}, Lcn/com/smartdevices/bracelet/gps/services/W;-><init>(Lcn/com/smartdevices/bracelet/gps/services/U;)V return-void .end method # virtual methods .method public onReceive(Landroid/content/Context;Landroid/content/Intent;)V .locals 4 invoke-virtual {p2}, Landroid/content/Intent;->getAction()Ljava/lang/String; move-result-object v0 sget-object v1, Lcom/xiaomi/hm/health/bt/bleservice/BLEService;->h:Ljava/lang/String; invoke-virtual {v1, v0}, Ljava/lang/String;->equals(Ljava/lang/Object;)Z move-result v0 if-eqz v0, :cond_0 sget-object v0, Lcom/xiaomi/hm/health/bt/bleservice/BLEService;->p:Ljava/lang/String; invoke-virtual {p2, v0}, Landroid/content/Intent;->getParcelableExtra(Ljava/lang/String;)Landroid/os/Parcelable; move-result-object v0 check-cast v0, Lcom/xiaomi/hm/health/bt/bleservice/HwConnStatus; const-string v1, "Step" new-instance v2, Ljava/lang/StringBuilder; invoke-direct {v2}, Ljava/lang/StringBuilder;-><init>()V const-string v3, "INTENT_ACTION_CONN_STATUS_CHANGED status = " invoke-virtual {v2, v3}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder; move-result-object v2 invoke-virtual {v0}, Lcom/xiaomi/hm/health/bt/bleservice/HwConnStatus;->a()I move-result v3 invoke-virtual {v2, v3}, Ljava/lang/StringBuilder;->append(I)Ljava/lang/StringBuilder; move-result-object v2 const-string v3, ",isRTModeEnabled = " invoke-virtual {v2, v3}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder; move-result-object v2 iget-object v3, p0, Lcn/com/smartdevices/bracelet/gps/services/W;->a:Lcn/com/smartdevices/bracelet/gps/services/U; invoke-virtual {v3}, Lcn/com/smartdevices/bracelet/gps/services/U;->e()Z move-result v3 invoke-virtual {v2, v3}, Ljava/lang/StringBuilder;->append(Z)Ljava/lang/StringBuilder; move-result-object v2 invoke-virtual {v2}, Ljava/lang/StringBuilder;->toString()Ljava/lang/String; move-result-object v2 invoke-static {v1, v2}, Lcn/com/smartdevices/bracelet/q;->d(Ljava/lang/String;Ljava/lang/String;)V invoke-virtual {v0}, Lcom/xiaomi/hm/health/bt/bleservice/HwConnStatus;->h()Z move-result v0 if-eqz v0, :cond_0 iget-object v0, p0, Lcn/com/smartdevices/bracelet/gps/services/W;->a:Lcn/com/smartdevices/bracelet/gps/services/U; invoke-virtual {v0}, Lcn/com/smartdevices/bracelet/gps/services/U;->e()Z move-result v0 if-eqz v0, :cond_0 iget-object v0, p0, Lcn/com/smartdevices/bracelet/gps/services/W;->a:Lcn/com/smartdevices/bracelet/gps/services/U; const/4 v1, 0x1 invoke-virtual {v0, v1}, Lcn/com/smartdevices/bracelet/gps/services/U;->a(Z)Z :cond_0 return-void .end method
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8" /> <link rel="shortcut icon" type="image/ico" href="http://www.datatables.net/favicon.ico" /> <title>DataTables example</title> <style type="text/css" title="currentStyle"> @import "../../media/css/demo_page.css"; @import "../../media/css/demo_table_jui.css"; @import "../examples_support/themes/smoothness/jquery-ui-1.8.4.custom.css"; </style> <script type="text/javascript" language="javascript" src="../../media/js/jquery.js"></script> <script type="text/javascript" language="javascript" src="../../media/js/jquery.dataTables.js"></script> <script type="text/javascript" charset="utf-8"> $(document).ready(function() { $('#example').dataTable( { "sScrollY": 200, "bJQueryUI": true, "sPaginationType": "full_numbers" } ); } ); </script> </head> <body id="dt_example"> <div id="container"> <div class="full_width big"> DataTables vertical scrolling with jQuery UI ThemeRoller example </div> <h1>Preamble</h1> <p>This example is an extension of the vertical scrolling example, showing DataTables ability to be themed by jQuery UI's ThemeRoller.</p> <h1>Live example</h1> <div id="demo"> <table cellpadding="0" cellspacing="0" border="0" class="display" id="example"> <thead> <tr> <th>Rendering engine</th> <th>Browser</th> <th>Platform(s)</th> <th>Engine version</th> <th width="12%">CSS grade</th> </tr> </thead> <tfoot> <tr> <th>Rendering engine</th> <th>Browser</th> <th>Platform(s)</th> <th>Engine version</th> <th>CSS grade</th> </tr> </tfoot> <tbody> <tr class="odd gradeX"> <td>Trident</td> <td>Internet Explorer 4.0</td> <td>Win 95+</td> <td class="center">4</td> <td class="center">X</td> </tr> <tr class="odd gradeC"> <td>Trident</td> <td>Internet Explorer 5.0</td> <td>Win 95+</td> <td class="center">5</td> <td class="center">C</td> </tr> <tr class="odd gradeA"> <td>Trident</td> <td>Internet Explorer 5.5</td> <td>Win 95+</td> <td class="center">5.5</td> <td class="center">A</td> </tr> <tr class="odd gradeA"> <td>Trident</td> <td>Internet Explorer 6</td> <td>Win 98+</td> <td class="center">6</td> <td class="center">A</td> </tr> <tr class="odd gradeA"> <td>Trident</td> <td>Internet Explorer 7</td> <td>Win XP SP2+</td> <td class="center">7</td> <td class="center">A</td> </tr> <tr class="odd gradeA"> <td>Trident</td> <td>AOL browser (AOL desktop)</td> <td>Win XP</td> <td class="center">6</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Firefox 1.0</td> <td>Win 98+ / OSX.2+</td> <td class="center">1.7</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Firefox 1.5</td> <td>Win 98+ / OSX.2+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Firefox 2.0</td> <td>Win 98+ / OSX.2+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Firefox 3.0</td> <td>Win 2k+ / OSX.3+</td> <td class="center">1.9</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Camino 1.0</td> <td>OSX.2+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Camino 1.5</td> <td>OSX.3+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Netscape 7.2</td> <td>Win 95+ / Mac OS 8.6-9.2</td> <td class="center">1.7</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Netscape Browser 8</td> <td>Win 98SE+</td> <td class="center">1.7</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Netscape Navigator 9</td> <td>Win 98+ / OSX.2+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.0</td> <td>Win 95+ / OSX.1+</td> <td class="center">1</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.1</td> <td>Win 95+ / OSX.1+</td> <td class="center">1.1</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.2</td> <td>Win 95+ / OSX.1+</td> <td class="center">1.2</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.3</td> <td>Win 95+ / OSX.1+</td> <td class="center">1.3</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.4</td> <td>Win 95+ / OSX.1+</td> <td class="center">1.4</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.5</td> <td>Win 95+ / OSX.1+</td> <td class="center">1.5</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.6</td> <td>Win 95+ / OSX.1+</td> <td class="center">1.6</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.7</td> <td>Win 98+ / OSX.1+</td> <td class="center">1.7</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Mozilla 1.8</td> <td>Win 98+ / OSX.1+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Seamonkey 1.1</td> <td>Win 98+ / OSX.2+</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Gecko</td> <td>Epiphany 2.20</td> <td>Gnome</td> <td class="center">1.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>Safari 1.2</td> <td>OSX.3</td> <td class="center">125.5</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>Safari 1.3</td> <td>OSX.3</td> <td class="center">312.8</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>Safari 2.0</td> <td>OSX.4+</td> <td class="center">419.3</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>Safari 3.0</td> <td>OSX.4+</td> <td class="center">522.1</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>OmniWeb 5.5</td> <td>OSX.4+</td> <td class="center">420</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>iPod Touch / iPhone</td> <td>iPod</td> <td class="center">420.1</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Webkit</td> <td>S60</td> <td>S60</td> <td class="center">413</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 7.0</td> <td>Win 95+ / OSX.1+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 7.5</td> <td>Win 95+ / OSX.2+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 8.0</td> <td>Win 95+ / OSX.2+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 8.5</td> <td>Win 95+ / OSX.2+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 9.0</td> <td>Win 95+ / OSX.3+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 9.2</td> <td>Win 88+ / OSX.3+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera 9.5</td> <td>Win 88+ / OSX.3+</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Opera for Wii</td> <td>Wii</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Nokia N800</td> <td>N800</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>Presto</td> <td>Nintendo DS browser</td> <td>Nintendo DS</td> <td class="center">8.5</td> <td class="center">C/A<sup>1</sup></td> </tr> <tr class="gradeC"> <td>KHTML</td> <td>Konqureror 3.1</td> <td>KDE 3.1</td> <td class="center">3.1</td> <td class="center">C</td> </tr> <tr class="gradeA"> <td>KHTML</td> <td>Konqureror 3.3</td> <td>KDE 3.3</td> <td class="center">3.3</td> <td class="center">A</td> </tr> <tr class="gradeA"> <td>KHTML</td> <td>Konqureror 3.5</td> <td>KDE 3.5</td> <td class="center">3.5</td> <td class="center">A</td> </tr> <tr class="gradeX"> <td>Tasman</td> <td>Internet Explorer 4.5</td> <td>Mac OS 8-9</td> <td class="center">-</td> <td class="center">X</td> </tr> <tr class="gradeC"> <td>Tasman</td> <td>Internet Explorer 5.1</td> <td>Mac OS 7.6-9</td> <td class="center">1</td> <td class="center">C</td> </tr> <tr class="gradeC"> <td>Tasman</td> <td>Internet Explorer 5.2</td> <td>Mac OS 8-X</td> <td class="center">1</td> <td class="center">C</td> </tr> <tr class="gradeA"> <td>Misc</td> <td>NetFront 3.1</td> <td>Embedded devices</td> <td class="center">-</td> <td class="center">C</td> </tr> <tr class="gradeA"> <td>Misc</td> <td>NetFront 3.4</td> <td>Embedded devices</td> <td class="center">-</td> <td class="center">A</td> </tr> <tr class="gradeX"> <td>Misc</td> <td>Dillo 0.8</td> <td>Embedded devices</td> <td class="center">-</td> <td class="center">X</td> </tr> <tr class="gradeX"> <td>Misc</td> <td>Links</td> <td>Text only</td> <td class="center">-</td> <td class="center">X</td> </tr> <tr class="gradeX"> <td>Misc</td> <td>Lynx</td> <td>Text only</td> <td class="center">-</td> <td class="center">X</td> </tr> <tr class="gradeC"> <td>Misc</td> <td>IE Mobile</td> <td>Windows Mobile 6</td> <td class="center">-</td> <td class="center">C</td> </tr> <tr class="gradeC"> <td>Misc</td> <td>PSP browser</td> <td>PSP</td> <td class="center">-</td> <td class="center">C</td> </tr> <tr class="gradeU"> <td>Other browsers</td> <td>All others</td> <td>-</td> <td class="center">-</td> <td class="center">U</td> </tr> </tbody> </table> </div> <div class="spacer"></div> <h1>Initialisation code</h1> <pre class="brush: js;">$(document).ready(function() { $('#example').dataTable( { "sScrollY": 200, "bJQueryUI": true, "sPaginationType": "full_numbers" } ); } );</pre> <style type="text/css"> @import "../examples_support/syntax/css/shCore.css"; </style> <script type="text/javascript" language="javascript" src="../examples_support/syntax/js/shCore.js"></script> <h1>Other examples</h1> <div class="demo_links"> <h2>Basic initialisation</h2> <ul> <li><a href="../basic_init/zero_config.html">Zero configuration</a></li> <li><a href="../basic_init/filter_only.html">Feature enablement</a></li> <li><a href="../basic_init/table_sorting.html">Sorting data</a></li> <li><a href="../basic_init/multi_col_sort.html">Multi-column sorting</a></li> <li><a href="../basic_init/multiple_tables.html">Multiple tables</a></li> <li><a href="../basic_init/hidden_columns.html">Hidden columns</a></li> <li><a href="../basic_init/complex_header.html">Complex headers - grouping with colspan</a></li> <li><a href="../basic_init/dom.html">DOM positioning</a></li> <li><a href="../basic_init/state_save.html">State saving</a></li> <li><a href="../basic_init/alt_pagination.html">Alternative pagination styles</a></li> <li>Scrolling: <br> <a href="../basic_init/scroll_x.html">Horizontal</a> / <a href="../basic_init/scroll_y.html">Vertical</a> / <a href="../basic_init/scroll_xy.html">Both</a> / <a href="../basic_init/scroll_y_theme.html">Themed</a> / <a href="../basic_init/scroll_y_infinite.html">Infinite</a> </li> <li><a href="../basic_init/language.html">Change language information (internationalisation)</a></li> <li><a href="../basic_init/themes.html">ThemeRoller themes (Smoothness)</a></li> </ul> <h2>Advanced initialisation</h2> <ul> <li>Events: <br> <a href="../advanced_init/events_live.html">Live events</a> / <a href="../advanced_init/events_pre_init.html">Pre-init</a> / <a href="../advanced_init/events_post_init.html">Post-init with fnGetNodes</a> </li> <li><a href="../advanced_init/column_render.html">Column rendering</a></li> <li><a href="../advanced_init/html_sort.html">Sorting without HTML tags</a></li> <li><a href="../advanced_init/dom_multiple_elements.html">Multiple table controls (sDom)</a></li> <li><a href="../advanced_init/length_menu.html">Defining length menu options</a></li> <li><a href="../advanced_init/complex_header.html">Complex headers and hidden columns</a></li> <li><a href="../advanced_init/dom_toolbar.html">Custom toolbar (element) around table</a></li> <li><a href="../advanced_init/highlight.html">Row highlighting with CSS</a></li> <li><a href="../advanced_init/row_grouping.html">Row grouping</a></li> <li><a href="../advanced_init/row_callback.html">Row callback</a></li> <li><a href="../advanced_init/footer_callback.html">Footer callback</a></li> <li><a href="../advanced_init/sorting_control.html">Control sorting direction of columns</a></li> <li><a href="../advanced_init/language_file.html">Change language information from a file (internationalisation)</a></li> </ul> <h2>API</h2> <ul> <li><a href="../api/add_row.html">Dynamically add a new row</a></li> <li><a href="../api/multi_filter.html">Individual column filtering (using "input" elements)</a></li> <li><a href="../api/multi_filter_select.html">Individual column filtering (using "select" elements)</a></li> <li><a href="../api/highlight.html">Highlight rows and columns</a></li> <li><a href="../api/row_details.html">Show and hide details about a particular record</a></li> <li><a href="../api/select_row.html">User selectable rows (multiple rows)</a></li> <li><a href="../api/select_single_row.html">User selectable rows (single row) and delete rows</a></li> <li><a href="../api/editable.html">Editable rows (with jEditable)</a></li> <li><a href="../api/form.html">Submit form with elements in table</a></li> <li><a href="../api/counter_column.html">Index column (static number column)</a></li> <li><a href="../api/show_hide.html">Show and hide columns dynamically</a></li> <li><a href="../api/api_in_init.html">API function use in initialisation object (callback)</a></li> <li><a href="../api/tabs_and_scrolling.html">DataTables scrolling and tabs</a></li> <li><a href="../api/regex.html">Regular expression filtering</a></li> </ul> </div> <div class="demo_links"> <h2>Data sources</h2> <ul> <li><a href="../data_sources/dom.html">DOM</a></li> <li><a href="../data_sources/js_array.html">Javascript array</a></li> <li><a href="../data_sources/ajax.html">Ajax source</a></li> <li><a href="../data_sources/server_side.html">Server side processing</a></li> </ul> <h2>Server-side processing</h2> <ul> <li><a href="../server_side/server_side.html">Obtain server-side data</a></li> <li><a href="../server_side/custom_vars.html">Add extra HTTP variables</a></li> <li><a href="../server_side/post.html">Use HTTP POST</a></li> <li><a href="../server_side/ids.html">Automatic addition of IDs and classes to rows</a></li> <li><a href="../server_side/object_data.html">Reading table data from objects</a></li> <li><a href="../server_side/row_details.html">Show and hide details about a particular record</a></li> <li><a href="../server_side/select_rows.html">User selectable rows (multiple rows)</a></li> <li><a href="../server_side/jsonp.html">JSONP for a cross domain data source</a></li> <li><a href="../server_side/editable.html">jEditable integration with DataTables</a></li> <li><a href="../server_side/defer_loading.html">Deferred loading of Ajax data</a></li> <li><a href="../server_side/column_ordering.html">Custom column ordering (in callback data)</a></li> <li><a href="../server_side/pipeline.html">Pipelining data (reduce Ajax calls for paging)</a></li> </ul> <h2>Ajax data source</h2> <ul> <li><a href="../ajax/ajax.html">Ajax sourced data (array of arrays)</a></li> <li><a href="../ajax/objects.html">Ajax sourced data (array of objects)</a></li> <li><a href="../ajax/defer_render.html">Deferred DOM creation for extra speed</a></li> <li><a href="../ajax/null_data_source.html">Empty data source columns</a></li> <li><a href="../ajax/custom_data_property.html">Use a data source other than aaData (the default)</a></li> <li><a href="../ajax/objects_subarrays.html">Read column data from sub-arrays</a></li> <li><a href="../ajax/deep.html">Read column data from deeply nested properties</a></li> </ul> <h2>Plug-ins</h2> <ul> <li><a href="../plug-ins/plugin_api.html">Add custom API functions</a></li> <li><a href="../plug-ins/sorting_plugin.html">Sorting and automatic type detection</a></li> <li><a href="../plug-ins/sorting_sType.html">Sorting without automatic type detection</a></li> <li><a href="../plug-ins/paging_plugin.html">Custom pagination controls</a></li> <li><a href="../plug-ins/range_filtering.html">Range filtering / custom filtering</a></li> <li><a href="../plug-ins/dom_sort.html">Live DOM sorting</a></li> <li><a href="../plug-ins/html_sort.html">Automatic HTML type detection</a></li> </ul> </div> <div id="footer" class="clear" style="text-align:center;"> <p> Please refer to the <a href="http://www.datatables.net/usage">DataTables documentation</a> for full information about its API properties and methods.<br> Additionally, there are a wide range of <a href="http://www.datatables.net/extras">extras</a> and <a href="http://www.datatables.net/plug-ins">plug-ins</a> which extend the capabilities of DataTables. </p> <span style="font-size:10px;"> DataTables designed and created by <a href="http://www.sprymedia.co.uk">Allan Jardine</a> &copy; 2007-2011<br> DataTables is dual licensed under the <a href="http://www.datatables.net/license_gpl2">GPL v2 license</a> or a <a href="http://www.datatables.net/license_bsd">BSD (3-point) license</a>. </span> </div> </div> </body> </html>
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: a8c276c26e30a4c04a6192b62b9017c7 timeCreated: 1443394083 licenseType: Free NativeFormatImporter: userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
package org.orcid.core.adapter.v3; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.Date; import java.util.Set; import javax.annotation.Resource; import org.junit.Test; import org.junit.runner.RunWith; import org.orcid.jaxb.model.v3.release.common.Source; import org.orcid.jaxb.model.v3.release.common.SourceClientId; import org.orcid.jaxb.model.v3.release.notification.Notification; import org.orcid.jaxb.model.v3.release.notification.NotificationType; import org.orcid.jaxb.model.v3.release.notification.amended.NotificationAmended; import org.orcid.jaxb.model.v3.release.notification.custom.NotificationCustom; import org.orcid.jaxb.model.v3.release.notification.permission.AuthorizationUrl; import org.orcid.jaxb.model.v3.release.notification.permission.Item; import org.orcid.jaxb.model.v3.release.notification.permission.ItemType; import org.orcid.jaxb.model.v3.release.notification.permission.Items; import org.orcid.jaxb.model.v3.release.notification.permission.NotificationPermission; import org.orcid.jaxb.model.v3.release.record.ExternalID; import org.orcid.persistence.jpa.entities.NotificationAddItemsEntity; import org.orcid.persistence.jpa.entities.NotificationAmendedEntity; import org.orcid.persistence.jpa.entities.NotificationCustomEntity; import org.orcid.persistence.jpa.entities.NotificationEntity; import org.orcid.persistence.jpa.entities.NotificationItemEntity; import org.orcid.test.OrcidJUnit4ClassRunner; import org.orcid.utils.DateFieldsOnBaseEntityUtils; import org.orcid.utils.DateUtils; import org.springframework.test.context.ContextConfiguration; /** * * @author Will Simpson * */ @RunWith(OrcidJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:orcid-core-context.xml" }) public class JpaJaxbNotificationAdapterTest { @Resource(name = "jpaJaxbNotificationAdapterV3") private JpaJaxbNotificationAdapter jpaJaxbNotificationAdapter; @Test public void testToNotificationCustomEntity() { NotificationCustom notification = new NotificationCustom(); notification.setNotificationType(NotificationType.CUSTOM); notification.setSubject("Test subject"); NotificationEntity notificationEntity = jpaJaxbNotificationAdapter.toNotificationEntity(notification); assertNotNull(notificationEntity); assertEquals(org.orcid.jaxb.model.notification_v2.NotificationType.CUSTOM.name(), notificationEntity.getNotificationType()); assertEquals("Test subject", notification.getSubject()); } @Test public void testCustomEntityToNotification() throws IllegalAccessException { NotificationCustomEntity notificationEntity = new NotificationCustomEntity(); DateFieldsOnBaseEntityUtils.setDateFields(notificationEntity, DateUtils.convertToDate("2014-01-01T09:17:56")); notificationEntity.setId(123L); notificationEntity.setNotificationType(org.orcid.jaxb.model.notification_v2.NotificationType.CUSTOM.name()); notificationEntity.setSubject("Test subject"); notificationEntity.setReadDate(DateUtils.convertToDate("2014-03-04T17:43:06")); Date date = DateUtils.convertToDate("2015-06-05T10:15:20"); DateFieldsOnBaseEntityUtils.setDateFields(notificationEntity, date); Notification notification = jpaJaxbNotificationAdapter.toNotification(notificationEntity); assertNotNull(notification); assertTrue(notification instanceof NotificationCustom); NotificationCustom notificationCustom = (NotificationCustom) notification; assertNotNull(notificationCustom.getCreatedDate()); assertEquals(DateUtils.convertToDate("2015-06-05T10:15:20"), DateUtils.convertToDate(notificationCustom.getCreatedDate())); assertEquals(NotificationType.CUSTOM, notification.getNotificationType()); assertEquals("Test subject", notificationCustom.getSubject()); assertTrue(notification.getCreatedDate().toXMLFormat().startsWith("2015-06-05T10:15:20.000")); assertTrue(notification.getReadDate().toXMLFormat().startsWith("2014-03-04T17:43:06.000")); } @Test public void testToNotificationPermissionEntity() { NotificationPermission notification = new NotificationPermission(); notification.setNotificationType(NotificationType.PERMISSION); notification.setCreatedDate(DateUtils.convertToXMLGregorianCalendar(new Date())); String authorizationUrlString = "https://orcid.org/oauth/authorize?client_id=APP-U4UKCNSSIM1OCVQY&amp;response_type=code&amp;scope=/orcid-works/create&amp;redirect_uri=http://somethirdparty.com"; AuthorizationUrl url = new AuthorizationUrl(); notification.setAuthorizationUrl(url); notification.setNotificationIntro("This is the intro"); notification.setNotificationSubject("This is the subject"); Source source = new Source(); notification.setSource(source); SourceClientId clientId = new SourceClientId(); source.setSourceClientId(clientId); clientId.setPath("APP-5555-5555-5555-5555"); url.setUri(authorizationUrlString); Items activities = new Items(); notification.setItems(activities); Item activity = new Item(); activities.getItems().add(activity); activity.setItemType(ItemType.WORK); activity.setItemName("Latest Research Article"); ExternalID extId = new ExternalID(); activity.setExternalIdentifier(extId); extId.setType("doi"); extId.setValue("1234/abc123"); NotificationEntity notificationEntity = jpaJaxbNotificationAdapter.toNotificationEntity(notification); assertTrue(notificationEntity instanceof NotificationAddItemsEntity); NotificationAddItemsEntity addActivitiesEntity = (NotificationAddItemsEntity) notificationEntity; assertNotNull(notificationEntity); assertNull(notificationEntity.getDateCreated()); assertNull(notificationEntity.getLastModified()); assertEquals(org.orcid.jaxb.model.notification_v2.NotificationType.PERMISSION.name(), notificationEntity.getNotificationType()); assertEquals(authorizationUrlString, addActivitiesEntity.getAuthorizationUrl()); assertEquals(notification.getNotificationIntro(), notificationEntity.getNotificationIntro()); assertEquals(notification.getNotificationSubject(),notificationEntity.getNotificationSubject()); // Source assertNull(notificationEntity.getSourceId()); assertNull(notificationEntity.getClientSourceId()); assertNull(notificationEntity.getElementSourceId()); Set<NotificationItemEntity> activityEntities = addActivitiesEntity.getNotificationItems(); assertNotNull(activityEntities); assertEquals(1, activityEntities.size()); NotificationItemEntity activityEntity = activityEntities.iterator().next(); assertEquals(org.orcid.jaxb.model.notification.permission_v2.ItemType.WORK.name(), activityEntity.getItemType()); assertEquals("Latest Research Article", activityEntity.getItemName()); assertEquals("DOI", activityEntity.getExternalIdType()); assertEquals("1234/abc123", activityEntity.getExternalIdValue()); } @Test public void testToNotificationAmendedEntity() { NotificationAmended notification = new NotificationAmended(); notification.setNotificationType(NotificationType.AMENDED); notification.setCreatedDate(DateUtils.convertToXMLGregorianCalendar(new Date())); Source source = new Source(); notification.setSource(source); SourceClientId clientId = new SourceClientId(); source.setSourceClientId(clientId); clientId.setPath("APP-5555-5555-5555-5555"); Items activities = new Items(); notification.setItems(activities); Item activity = new Item(); activities.getItems().add(activity); activity.setItemType(ItemType.WORK); activity.setItemName("Latest Research Article"); ExternalID extId = new ExternalID(); activity.setExternalIdentifier(extId); extId.setType("doi"); extId.setValue("1234/abc123"); NotificationEntity notificationEntity = jpaJaxbNotificationAdapter.toNotificationEntity(notification); assertNull(notificationEntity.getDateCreated()); assertNull(notificationEntity.getLastModified()); assertTrue(notificationEntity instanceof NotificationAmendedEntity); NotificationAmendedEntity notificationAmendedEntity = (NotificationAmendedEntity) notificationEntity; assertNotNull(notificationEntity); assertEquals(org.orcid.jaxb.model.notification_v2.NotificationType.AMENDED.name(), notificationEntity.getNotificationType()); // Source assertNull(notificationAmendedEntity.getSourceId()); assertNull(notificationAmendedEntity.getClientSourceId()); assertNull(notificationAmendedEntity.getElementSourceId()); } }
{ "pile_set_name": "Github" }
<%= render(PageComponent.new(subject: @organization, tabs: TabsComponent.new(tabs: organization_tabs(@organization)), sidebar: true, sidebar_partial: "/pixels/sidebar")) do |component| %> <% component.with(:header) do %> <%= render(Page::HeaderComponent.new(title: @organization.name, sidebar: true, buttons: [ layout_button(link: new_pixel_path(@organization), icon: "fas fa-plus", title: "Add pixel", display: authorized_user.can_edit_organization_users?(@organization)), ])) %> <% end %> <% component.with(:body) do %> <%= render CardComponent.new do %> <%= render "/pixels/table", organization: @organization, pixels: @pixels, pagy: @pagy %> <% end %> <% end %> <% end %>
{ "pile_set_name": "Github" }
// Copyright (C) 2003-2004 Jeremy B. Maitin-Shepard. // Copyright (C) 2005-2008 Daniel James. // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org/libs/unordered for documentation #ifndef AUTOBOOST_UNORDERED_MAP_HPP_INCLUDED #define AUTOBOOST_UNORDERED_MAP_HPP_INCLUDED #include <autoboost/config.hpp> #if defined(AUTOBOOST_HAS_PRAGMA_ONCE) #pragma once #endif #include <autoboost/unordered/unordered_map.hpp> #endif // AUTOBOOST_UNORDERED_MAP_HPP_INCLUDED
{ "pile_set_name": "Github" }
/* Copyright 2003-2013 Joaquin M Lopez Munoz. * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * http://www.boost.org/LICENSE_1_0.txt) * * See http://www.boost.org/libs/multi_index for library home page. */ #ifndef BOOST_MULTI_INDEX_DETAIL_INDEX_SAVER_HPP #define BOOST_MULTI_INDEX_DETAIL_INDEX_SAVER_HPP #if defined(_MSC_VER) #pragma once #endif #include <boost/config.hpp> /* keep it first to prevent nasty warns in MSVC */ #include <boost/multi_index/detail/index_matcher.hpp> #include <boost/noncopyable.hpp> #include <boost/serialization/nvp.hpp> #include <cstddef> namespace boost{ namespace multi_index{ namespace detail{ /* index_saver accepts a base sequence of previously saved elements * and saves a possibly reordered subsequence in an efficient manner, * serializing only the information needed to rearrange the subsequence * based on the original order of the base. * multi_index_container is in charge of supplying the info about the * base sequence, and each index can subsequently save itself using the * const interface of index_saver. */ template<typename Node,typename Allocator> class index_saver:private noncopyable { public: index_saver(const Allocator& al,std::size_t size):alg(al,size){} template<class Archive> void add(Node* node,Archive& ar,const unsigned int) { ar<<serialization::make_nvp("position",*node); alg.add(node); } template<class Archive> void add_track(Node* node,Archive& ar,const unsigned int) { ar<<serialization::make_nvp("position",*node); } template<typename IndexIterator,class Archive> void save( IndexIterator first,IndexIterator last,Archive& ar, const unsigned int)const { /* calculate ordered positions */ alg.execute(first,last); /* Given a consecutive subsequence of displaced elements * x1,...,xn, the following information is serialized: * * p0,p1,...,pn,0 * * where pi is a pointer to xi and p0 is a pointer to the element * preceding x1. Crealy, from this information is possible to * restore the original order on loading time. If x1 is the first * element in the sequence, the following is serialized instead: * * p1,p1,...,pn,0 * * For each subsequence of n elements, n+2 pointers are serialized. * An optimization policy is applied: consider for instance the * sequence * * a,B,c,D * * where B and D are displaced, but c is in its correct position. * Applying the schema described above we would serialize 6 pointers: * * p(a),p(B),0 * p(c),p(D),0 * * but this can be reduced to 5 pointers by treating c as a displaced * element: * * p(a),p(B),p(c),p(D),0 */ std::size_t last_saved=3; /* distance to last pointer saved */ for(IndexIterator it=first,prev=first;it!=last;prev=it++,++last_saved){ if(!alg.is_ordered(get_node(it))){ if(last_saved>1)save_node(get_node(prev),ar); save_node(get_node(it),ar); last_saved=0; } else if(last_saved==2)save_node(null_node(),ar); } if(last_saved<=2)save_node(null_node(),ar); /* marks the end of the serialization info for [first,last) */ save_node(null_node(),ar); } private: template<typename IndexIterator> static Node* get_node(IndexIterator it) { return it.get_node(); } static Node* null_node(){return 0;} template<typename Archive> static void save_node(Node* node,Archive& ar) { ar<<serialization::make_nvp("pointer",node); } index_matcher::algorithm<Node,Allocator> alg; }; } /* namespace multi_index::detail */ } /* namespace multi_index */ } /* namespace boost */ #endif
{ "pile_set_name": "Github" }
Name ARB_texture_border_clamp Name Strings GL_ARB_texture_border_clamp Contact Patrick R. Brown, Intel Corporation (patrick.r.brown 'at' intel.com) Notice Copyright (c) 2000-2013 The Khronos Group Inc. Copyright terms at http://www.khronos.org/registry/speccopyright.html Specification Update Policy Khronos-approved extension specifications are updated in response to issues and bugs prioritized by the Khronos OpenGL Working Group. For extensions which have been promoted to a core Specification, fixes will first appear in the latest version of that core Specification, and will eventually be backported to the extension document. This policy is described in more detail at https://www.khronos.org/registry/OpenGL/docs/update_policy.php Status Complete. Approved by the ARB, 20 June 2000 Version 1.0, 22 June 2000 Number ARB Extension #13 Dependencies OpenGL 1.0 is required. This extension is written against the OpenGL 1.2.1 Specification. This extension is based on and intended to replace GL_SGIS_texture_border_clamp. Overview The base OpenGL provides clamping such that the texture coordinates are limited to exactly the range [0,1]. When a texture coordinate is clamped using this algorithm, the texture sampling filter straddles the edge of the texture image, taking 1/2 its sample values from within the texture image, and the other 1/2 from the texture border. It is sometimes desirable for a texture to be clamped to the border color, rather than to an average of the border and edge colors. This extension defines an additional texture clamping algorithm. CLAMP_TO_BORDER_ARB clamps texture coordinates at all mipmap levels such that NEAREST and LINEAR filters return only the color of the border texels. IP Status No known IP issues. Issues (1) Is this formulation correct for higher-order texture filters (e.g., cubic or anisotropic filters)? RESOLVED: No. A more appropriate formulation would clamp the texture coordinates in texel space. New Procedures and Functions None. New Tokens Accepted by the <param> parameter of TexParameteri and TexParameterf, and by the <params> parameter of TexParameteriv and TexParameterfv, when their <pname> parameter is TEXTURE_WRAP_S, TEXTURE_WRAP_T, or TEXTURE_WRAP_R: CLAMP_TO_BORDER_ARB 0x812D Additions to Chapter 2 of the OpenGL 1.2.1 Specification (OpenGL Operation) None. Additions to Chapter 3 of the OpenGL 1.2.1 Specification (Rasterization) Modify Table 3.17, p. 124, editing only the following lines: Name Type Legal Values ============== ======= ==================== TEXTURE_WRAP_S integer CLAMP, CLAMP_TO_EDGE, REPEAT, CLAMP_TO_BORDER_ARB TEXTURE_WRAP_T integer CLAMP, CLAMP_TO_EDGE, REPEAT, CLAMP_TO_BORDER_ARB TEXTURE_WRAP_R integer CLAMP, CLAMP_TO_EDGE, REPEAT, CLAMP_TO_BORDER_ARB Modify Section 3.8.4, Texture Wrap Modes, p.124 (add at the end of the section, p. 125) CLAMP_TO_BORDER_ARB clamps texture coordinates at all mipmaps such that the texture filter always samples border texels for fragments whose corresponding texture coordinate is sufficiently far outside the range [0,1]. The color returned when clamping is derived only from the border texels of the texture image, or from the constant border color if the texture image does not have a border. Texture coordinates are clamped to the range [min, max]. The minimum value is defined as min = -1 / 2N where N is the size (not including borders) of the one-, two-, or three-dimensional texture image in the direction of clamping. The maximum value is defined as max = 1 - min so that clamping is always symmetric about the [0,1] mapped range of a texture coordinate. Additions to Chapter 4 of the OpenGL 1.2.1 Specification (Per-Fragment Operations and the Frame Buffer) None. Additions to Chapter 5 of the OpenGL 1.2.1 Specification (Special Functions) None. Additions to Chapter 6 of the OpenGL 1.2.1 Specification (State and State Requests) None. Additions to Appendix A of the OpenGL 1.2.1 Specification (Invariance) None. Additions to the AGL/GLX/WGL Specifications None. GLX Protocol None. Errors None. New State Only the type information changes for these parameters. (table 6.13, p. 203) Initial Get Value Type Get Command Value Description Sec. Attribute --------- ---- ----------- ------- ----------- ---- --------- TEXTURE_WRAP_S 3+ x Z4 GetTexParameter REPEAT Texture wrap 3.8 texture TEXTURE_WRAP_T 3+ x Z4 GetTexParameter REPEAT Texture wrap 3.8 texture TEXTURE_WRAP_R 3+ x Z4 GetTexParameter REPEAT Texture wrap 3.8 texture Revision History 1.0, 06/22/2000 prbrown1: Added issue w.r.t. higher order filters. 0.2, 05/23/2000 prbrown1: Removed dependency on SGIS_texture_filter4 per ARB guidelines. 0.1, 05/02/2000 prbrown1: Initial revision -- mostly stolen from GL_SGIS_texture_border_clamp.
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 // This file contains a collection of methods that can be used from go-restful to // generate Swagger API documentation for its models. Please read this PR for more // information on the implementation: https://github.com/emicklei/go-restful/pull/215 // // TODOs are ignored from the parser (e.g. TODO(andronat):... || TODO:...) if and only if // they are on one line! For multiple line or blocks that you want to ignore use ---. // Any context after a --- is ignored. // // Those methods can be generated by using hack/update-generated-swagger-docs.sh // AUTO-GENERATED FUNCTIONS START HERE. DO NOT EDIT. var map_Job = map[string]string{ "": "Job represents the configuration of a single job.", "metadata": "Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata", "spec": "Specification of the desired behavior of a job. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status", "status": "Current status of a job. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status", } func (Job) SwaggerDoc() map[string]string { return map_Job } var map_JobCondition = map[string]string{ "": "JobCondition describes current state of a job.", "type": "Type of job condition, Complete or Failed.", "status": "Status of the condition, one of True, False, Unknown.", "lastProbeTime": "Last time the condition was checked.", "lastTransitionTime": "Last time the condition transit from one status to another.", "reason": "(brief) reason for the condition's last transition.", "message": "Human readable message indicating details about last transition.", } func (JobCondition) SwaggerDoc() map[string]string { return map_JobCondition } var map_JobList = map[string]string{ "": "JobList is a collection of jobs.", "metadata": "Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata", "items": "items is the list of Jobs.", } func (JobList) SwaggerDoc() map[string]string { return map_JobList } var map_JobSpec = map[string]string{ "": "JobSpec describes how the job execution will look like.", "parallelism": "Specifies the maximum desired number of pods the job should run at any given time. The actual number of pods running in steady state will be less than this number when ((.spec.completions - .status.successful) < .spec.parallelism), i.e. when the work left to do is less than max parallelism. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/", "completions": "Specifies the desired number of successfully finished pods the job should be run with. Setting to nil means that the success of any pod signals the success of all pods, and allows parallelism to have any positive value. Setting to 1 means that parallelism is limited to 1 and the success of that pod signals the success of the job. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/", "activeDeadlineSeconds": "Specifies the duration in seconds relative to the startTime that the job may be active before the system tries to terminate it; value must be positive integer", "backoffLimit": "Specifies the number of retries before marking this job failed. Defaults to 6", "selector": "A label query over pods that should match the pod count. Normally, the system sets this field for you. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors", "manualSelector": "manualSelector controls generation of pod labels and pod selectors. Leave `manualSelector` unset unless you are certain what you are doing. When false or unset, the system pick labels unique to this job and appends those labels to the pod template. When true, the user is responsible for picking unique labels and specifying the selector. Failure to pick a unique label may cause this and other jobs to not function correctly. However, You may see `manualSelector=true` in jobs that were created with the old `extensions/v1beta1` API. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/#specifying-your-own-pod-selector", "template": "Describes the pod that will be created when executing a job. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/", "ttlSecondsAfterFinished": "ttlSecondsAfterFinished limits the lifetime of a Job that has finished execution (either Complete or Failed). If this field is set, ttlSecondsAfterFinished after the Job finishes, it is eligible to be automatically deleted. When the Job is being deleted, its lifecycle guarantees (e.g. finalizers) will be honored. If this field is unset, the Job won't be automatically deleted. If this field is set to zero, the Job becomes eligible to be deleted immediately after it finishes. This field is alpha-level and is only honored by servers that enable the TTLAfterFinished feature.", } func (JobSpec) SwaggerDoc() map[string]string { return map_JobSpec } var map_JobStatus = map[string]string{ "": "JobStatus represents the current state of a Job.", "conditions": "The latest available observations of an object's current state. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/", "startTime": "Represents time when the job was acknowledged by the job controller. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.", "completionTime": "Represents time when the job was completed. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.", "active": "The number of actively running pods.", "succeeded": "The number of pods which reached phase Succeeded.", "failed": "The number of pods which reached phase Failed.", } func (JobStatus) SwaggerDoc() map[string]string { return map_JobStatus } // AUTO-GENERATED FUNCTIONS END HERE
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_162) on Sun Mar 11 19:57:10 CST 2018 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Class com.alibaba.druid.mock.MockParameterMetaData (druid 1.1.9 API)</title> <meta name="date" content="2018-03-11"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class com.alibaba.druid.mock.MockParameterMetaData (druid 1.1.9 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../com/alibaba/druid/mock/MockParameterMetaData.html" title="class in com.alibaba.druid.mock">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/alibaba/druid/mock/class-use/MockParameterMetaData.html" target="_top">Frames</a></li> <li><a href="MockParameterMetaData.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class com.alibaba.druid.mock.MockParameterMetaData" class="title">Uses of Class<br>com.alibaba.druid.mock.MockParameterMetaData</h2> </div> <div class="classUseContainer">No usage of com.alibaba.druid.mock.MockParameterMetaData</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../com/alibaba/druid/mock/MockParameterMetaData.html" title="class in com.alibaba.druid.mock">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/alibaba/druid/mock/class-use/MockParameterMetaData.html" target="_top">Frames</a></li> <li><a href="MockParameterMetaData.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2013&#x2013;2018 <a href="http://code.alibabatech.com/">Alibaba Group</a>. All rights reserved.</small></p> </body> </html>
{ "pile_set_name": "Github" }
/* * Copyright 2014-2020 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.agrona.concurrent; import org.agrona.MutableDirectBuffer; /** * Abstraction over a range of buffer types that allows type to be accessed with memory ordering semantics. */ public interface AtomicBuffer extends MutableDirectBuffer { /** * Verify that the underlying buffer is correctly aligned to prevent word tearing and other ordering issues. * * @throws IllegalStateException if the alignment is not correct. */ void verifyAlignment(); /** * Get the value at a given index with volatile semantics. * * @param index in bytes from which to get. * @return the value for at a given index. */ long getLongVolatile(int index); /** * Put a value to a given index with volatile semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putLongVolatile(int index, long value); /** * Put a value to a given index with ordered store semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putLongOrdered(int index, long value); /** * Add a value to a given index with ordered store semantics. Use a negative increment to decrement. * * @param index in bytes for where to put. * @param increment by which the value at the index will be adjusted. * @return the previous value at the index. */ long addLongOrdered(int index, long increment); /** * Atomic compare and set of a long given an expected value. * * @param index in bytes for where to put. * @param expectedValue at to be compared. * @param updateValue to be exchanged. * @return set successful or not. */ boolean compareAndSetLong(int index, long expectedValue, long updateValue); /** * Atomically exchange a value at a location returning the previous contents. * * @param index in bytes for where to put. * @param value for at a given index. * @return previous value at the index. */ long getAndSetLong(int index, long value); /** * Atomically add a delta to a value at a location returning the previous contents. * To decrement a negative delta can be provided. * * @param index in bytes for where to put. * @param delta to be added to the value at the index. * @return previous value. */ long getAndAddLong(int index, long delta); /** * Get the value at a given index with volatile semantics. * * @param index in bytes from which to get. * @return the value for at a given index. */ int getIntVolatile(int index); /** * Put a value to a given index with volatile semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putIntVolatile(int index, int value); /** * Put a value to a given index with ordered semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putIntOrdered(int index, int value); /** * Add a value to a given index with ordered store semantics. Use a negative increment to decrement. * * @param index in bytes for where to put. * @param increment by which the value at the index will be adjusted. * @return the previous value at the index. */ int addIntOrdered(int index, int increment); /** * Atomic compare and set of a int given an expected value. * * @param index in bytes for where to put. * @param expectedValue at to be compared. * @param updateValue to be exchanged. * @return successful or not. */ boolean compareAndSetInt(int index, int expectedValue, int updateValue); /** * Atomically exchange a value at a location returning the previous contents. * * @param index in bytes for where to put. * @param value for at a given index. * @return previous value. */ int getAndSetInt(int index, int value); /** * Atomically add a delta to a value at a location returning the previous contents. * To decrement a negative delta can be provided. * * @param index in bytes for where to put. * @param delta to be added to the value at the index. * @return previous value. */ int getAndAddInt(int index, int delta); /** * Get the value at a given index with volatile semantics. * * @param index in bytes from which to get. * @return the value for at a given index. */ short getShortVolatile(int index); /** * Put a value to a given index with volatile semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putShortVolatile(int index, short value); /** * Get the value at a given index with volatile semantics. * * @param index in bytes from which to get. * @return the value for at a given index. */ char getCharVolatile(int index); /** * Put a value to a given index with volatile semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putCharVolatile(int index, char value); /** * Get the value at a given index with volatile semantics. * * @param index in bytes from which to get. * @return the value for at a given index. */ byte getByteVolatile(int index); /** * Put a value to a given index with volatile semantics. * * @param index in bytes for where to put. * @param value for at a given index. */ void putByteVolatile(int index, byte value); }
{ "pile_set_name": "Github" }
/* Arduino SdFat Library * Copyright (C) 2009 by William Greiman * * This file is part of the Arduino SdFat Library * * This Library is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This Library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with the Arduino SdFat Library. If not, see * <http://www.gnu.org/licenses/>. */ #include "SdFat.h" #ifdef __AVR__ #include <avr/pgmspace.h> #endif #include <Arduino.h> //------------------------------------------------------------------------------ // callback function for date/time void (*SdFile::dateTime_)(uint16_t* date, uint16_t* time) = NULL; #if ALLOW_DEPRECATED_FUNCTIONS // suppress cpplint warnings with NOLINT comment void (*SdFile::oldDateTime_)(uint16_t& date, uint16_t& time) = NULL; // NOLINT #endif // ALLOW_DEPRECATED_FUNCTIONS //------------------------------------------------------------------------------ // add a cluster to a file uint8_t SdFile::addCluster() { if (!vol_->allocContiguous(1, &curCluster_)) return false; // if first cluster of file link to directory entry if (firstCluster_ == 0) { firstCluster_ = curCluster_; flags_ |= F_FILE_DIR_DIRTY; } return true; } //------------------------------------------------------------------------------ // Add a cluster to a directory file and zero the cluster. // return with first block of cluster in the cache uint8_t SdFile::addDirCluster(void) { if (!addCluster()) return false; // zero data in cluster insure first cluster is in cache uint32_t block = vol_->clusterStartBlock(curCluster_); for (uint8_t i = vol_->blocksPerCluster_; i != 0; i--) { if (!SdVolume::cacheZeroBlock(block + i - 1)) return false; } // Increase directory file size by cluster size fileSize_ += 512UL << vol_->clusterSizeShift_; return true; } //------------------------------------------------------------------------------ // cache a file's directory entry // return pointer to cached entry or null for failure dir_t* SdFile::cacheDirEntry(uint8_t action) { if (!SdVolume::cacheRawBlock(dirBlock_, action)) return NULL; return SdVolume::cacheBuffer_.dir + dirIndex_; } //------------------------------------------------------------------------------ /** * Close a file and force cached data and directory information * to be written to the storage device. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include no file is open or an I/O error. */ uint8_t SdFile::close(void) { if (!sync())return false; type_ = FAT_FILE_TYPE_CLOSED; return true; } //------------------------------------------------------------------------------ /** * Check for contiguous file and return its raw block range. * * \param[out] bgnBlock the first block address for the file. * \param[out] endBlock the last block address for the file. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include file is not contiguous, file has zero length * or an I/O error occurred. */ uint8_t SdFile::contiguousRange(uint32_t* bgnBlock, uint32_t* endBlock) { // error if no blocks if (firstCluster_ == 0) return false; for (uint32_t c = firstCluster_; ; c++) { uint32_t next; if (!vol_->fatGet(c, &next)) return false; // check for contiguous if (next != (c + 1)) { // error if not end of chain if (!vol_->isEOC(next)) return false; *bgnBlock = vol_->clusterStartBlock(firstCluster_); *endBlock = vol_->clusterStartBlock(c) + vol_->blocksPerCluster_ - 1; return true; } } } //------------------------------------------------------------------------------ /** * Create and open a new contiguous file of a specified size. * * \note This function only supports short DOS 8.3 names. * See open() for more information. * * \param[in] dirFile The directory where the file will be created. * \param[in] fileName A valid DOS 8.3 file name. * \param[in] size The desired file size. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include \a fileName contains * an invalid DOS 8.3 file name, the FAT volume has not been initialized, * a file is already open, the file already exists, the root * directory is full or an I/O error. * */ uint8_t SdFile::createContiguous(SdFile* dirFile, const char* fileName, uint32_t size) { // don't allow zero length file if (size == 0) return false; if (!open(dirFile, fileName, O_CREAT | O_EXCL | O_RDWR)) return false; // calculate number of clusters needed uint32_t count = ((size - 1) >> (vol_->clusterSizeShift_ + 9)) + 1; // allocate clusters if (!vol_->allocContiguous(count, &firstCluster_)) { remove(); return false; } fileSize_ = size; // insure sync() will update dir entry flags_ |= F_FILE_DIR_DIRTY; return sync(); } //------------------------------------------------------------------------------ /** * Return a files directory entry * * \param[out] dir Location for return of the files directory entry. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. */ uint8_t SdFile::dirEntry(dir_t* dir) { // make sure fields on SD are correct if (!sync()) return false; // read entry dir_t* p = cacheDirEntry(SdVolume::CACHE_FOR_READ); if (!p) return false; // copy to caller's struct memcpy(dir, p, sizeof(dir_t)); return true; } //------------------------------------------------------------------------------ /** * Format the name field of \a dir into the 13 byte array * \a name in standard 8.3 short name format. * * \param[in] dir The directory structure containing the name. * \param[out] name A 13 byte char array for the formatted name. */ void SdFile::dirName(const dir_t& dir, char* name) { uint8_t j = 0; for (uint8_t i = 0; i < 11; i++) { if (dir.name[i] == ' ')continue; if (i == 8) name[j++] = '.'; name[j++] = dir.name[i]; } name[j] = 0; } //------------------------------------------------------------------------------ /** List directory contents to Serial. * * \param[in] flags The inclusive OR of * * LS_DATE - %Print file modification date * * LS_SIZE - %Print file size. * * LS_R - Recursive list of subdirectories. * * \param[in] indent Amount of space before file name. Used for recursive * list to indicate subdirectory level. */ void SdFile::ls(uint8_t flags, uint8_t indent) { dir_t* p; rewind(); while ((p = readDirCache())) { // done if past last used entry if (p->name[0] == DIR_NAME_FREE) break; // skip deleted entry and entries for . and .. if (p->name[0] == DIR_NAME_DELETED || p->name[0] == '.') continue; // only list subdirectories and files if (!DIR_IS_FILE_OR_SUBDIR(p)) continue; // print any indent spaces for (int8_t i = 0; i < indent; i++) Serial.print(' '); // print file name with possible blank fill printDirName(*p, flags & (LS_DATE | LS_SIZE) ? 14 : 0); // print modify date/time if requested if (flags & LS_DATE) { printFatDate(p->lastWriteDate); Serial.print(' '); printFatTime(p->lastWriteTime); } // print size if requested if (!DIR_IS_SUBDIR(p) && (flags & LS_SIZE)) { Serial.print(' '); Serial.print(p->fileSize); } Serial.println(); // list subdirectory content if requested if ((flags & LS_R) && DIR_IS_SUBDIR(p)) { uint16_t index = curPosition()/32 - 1; SdFile s; if (s.open(this, index, O_READ)) s.ls(flags, indent + 2); seekSet(32 * (index + 1)); } } } //------------------------------------------------------------------------------ // format directory name field from a 8.3 name string uint8_t SdFile::make83Name(const char* str, uint8_t* name) { uint8_t c; uint8_t n = 7; // max index for part before dot uint8_t i = 0; // blank fill name and extension while (i < 11) name[i++] = ' '; i = 0; while ((c = *str++) != '\0') { if (c == '.') { if (n == 10) return false; // only one dot allowed n = 10; // max index for full 8.3 name i = 8; // place for extension } else { // illegal FAT characters uint8_t b; #if defined(__AVR__) PGM_P p = PSTR("|<>^+=?/[];,*\"\\"); while ((b = pgm_read_byte(p++))) if (b == c) return false; #elif defined(__arm__) const uint8_t valid[] = "|<>^+=?/[];,*\"\\"; const uint8_t *p = valid; while ((b = *p++)) if (b == c) return false; #endif // check size and only allow ASCII printable characters if (i > n || c < 0X21 || c > 0X7E)return false; // only upper case allowed in 8.3 names - convert lower to upper name[i++] = c < 'a' || c > 'z' ? c : c + ('A' - 'a'); } } // must have a file name, extension is optional return name[0] != ' '; } //------------------------------------------------------------------------------ /** Make a new directory. * * \param[in] dir An open SdFat instance for the directory that will containing * the new directory. * * \param[in] dirName A valid 8.3 DOS name for the new directory. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include this SdFile is already open, \a dir is not a * directory, \a dirName is invalid or already exists in \a dir. */ uint8_t SdFile::makeDir(SdFile* dir, const char* dirName) { dir_t d; // create a normal file if (!open(dir, dirName, O_CREAT | O_EXCL | O_RDWR)) return false; // convert SdFile to directory flags_ = O_READ; type_ = FAT_FILE_TYPE_SUBDIR; // allocate and zero first cluster if (!addDirCluster())return false; // force entry to SD if (!sync()) return false; // cache entry - should already be in cache due to sync() call dir_t* p = cacheDirEntry(SdVolume::CACHE_FOR_WRITE); if (!p) return false; // change directory entry attribute p->attributes = DIR_ATT_DIRECTORY; // make entry for '.' memcpy(&d, p, sizeof(d)); for (uint8_t i = 1; i < 11; i++) d.name[i] = ' '; d.name[0] = '.'; // cache block for '.' and '..' uint32_t block = vol_->clusterStartBlock(firstCluster_); if (!SdVolume::cacheRawBlock(block, SdVolume::CACHE_FOR_WRITE)) return false; // copy '.' to block memcpy(&SdVolume::cacheBuffer_.dir[0], &d, sizeof(d)); // make entry for '..' d.name[1] = '.'; if (dir->isRoot()) { d.firstClusterLow = 0; d.firstClusterHigh = 0; } else { d.firstClusterLow = dir->firstCluster_ & 0XFFFF; d.firstClusterHigh = dir->firstCluster_ >> 16; } // copy '..' to block memcpy(&SdVolume::cacheBuffer_.dir[1], &d, sizeof(d)); // set position after '..' curPosition_ = 2 * sizeof(d); // write first block return SdVolume::cacheFlush(); } //------------------------------------------------------------------------------ /** * Open a file or directory by name. * * \param[in] dirFile An open SdFat instance for the directory containing the * file to be opened. * * \param[in] fileName A valid 8.3 DOS name for a file to be opened. * * \param[in] oflag Values for \a oflag are constructed by a bitwise-inclusive * OR of flags from the following list * * O_READ - Open for reading. * * O_RDONLY - Same as O_READ. * * O_WRITE - Open for writing. * * O_WRONLY - Same as O_WRITE. * * O_RDWR - Open for reading and writing. * * O_APPEND - If set, the file offset shall be set to the end of the * file prior to each write. * * O_CREAT - If the file exists, this flag has no effect except as noted * under O_EXCL below. Otherwise, the file shall be created * * O_EXCL - If O_CREAT and O_EXCL are set, open() shall fail if the file exists. * * O_SYNC - Call sync() after each write. This flag should not be used with * write(uint8_t), write_P(PGM_P), writeln_P(PGM_P), or the Arduino Print class. * These functions do character at a time writes so sync() will be called * after each byte. * * O_TRUNC - If the file exists and is a regular file, and the file is * successfully opened and is not read only, its length shall be truncated to 0. * * \note Directory files must be opened read only. Write and truncation is * not allowed for directory files. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include this SdFile is already open, \a difFile is not * a directory, \a fileName is invalid, the file does not exist * or can't be opened in the access mode specified by oflag. */ uint8_t SdFile::open(SdFile* dirFile, const char* fileName, uint8_t oflag) { uint8_t dname[11]; dir_t* p; // error if already open if (isOpen())return false; if (!make83Name(fileName, dname)) return false; vol_ = dirFile->vol_; dirFile->rewind(); // bool for empty entry found uint8_t emptyFound = false; // search for file while (dirFile->curPosition_ < dirFile->fileSize_) { uint8_t index = 0XF & (dirFile->curPosition_ >> 5); p = dirFile->readDirCache(); if (p == NULL) return false; if (p->name[0] == DIR_NAME_FREE || p->name[0] == DIR_NAME_DELETED) { // remember first empty slot if (!emptyFound) { emptyFound = true; dirIndex_ = index; dirBlock_ = SdVolume::cacheBlockNumber_; } // done if no entries follow if (p->name[0] == DIR_NAME_FREE) break; } else if (!memcmp(dname, p->name, 11)) { // don't open existing file if O_CREAT and O_EXCL if ((oflag & (O_CREAT | O_EXCL)) == (O_CREAT | O_EXCL)) return false; // open found file return openCachedEntry(0XF & index, oflag); } } // only create file if O_CREAT and O_WRITE if ((oflag & (O_CREAT | O_WRITE)) != (O_CREAT | O_WRITE)) return false; // cache found slot or add cluster if end of file if (emptyFound) { p = cacheDirEntry(SdVolume::CACHE_FOR_WRITE); if (!p) return false; } else { if (dirFile->type_ == FAT_FILE_TYPE_ROOT16) return false; // add and zero cluster for dirFile - first cluster is in cache for write if (!dirFile->addDirCluster()) return false; // use first entry in cluster dirIndex_ = 0; p = SdVolume::cacheBuffer_.dir; } // initialize as empty file memset(p, 0, sizeof(dir_t)); memcpy(p->name, dname, 11); // set timestamps if (dateTime_) { // call user function dateTime_(&p->creationDate, &p->creationTime); } else { // use default date/time p->creationDate = FAT_DEFAULT_DATE; p->creationTime = FAT_DEFAULT_TIME; } p->lastAccessDate = p->creationDate; p->lastWriteDate = p->creationDate; p->lastWriteTime = p->creationTime; // force write of entry to SD if (!SdVolume::cacheFlush()) return false; // open entry in cache return openCachedEntry(dirIndex_, oflag); } //------------------------------------------------------------------------------ /** * Open a file by index. * * \param[in] dirFile An open SdFat instance for the directory. * * \param[in] index The \a index of the directory entry for the file to be * opened. The value for \a index is (directory file position)/32. * * \param[in] oflag Values for \a oflag are constructed by a bitwise-inclusive * OR of flags O_READ, O_WRITE, O_TRUNC, and O_SYNC. * * See open() by fileName for definition of flags and return values. * */ uint8_t SdFile::open(SdFile* dirFile, uint16_t index, uint8_t oflag) { // error if already open if (isOpen())return false; // don't open existing file if O_CREAT and O_EXCL - user call error if ((oflag & (O_CREAT | O_EXCL)) == (O_CREAT | O_EXCL)) return false; vol_ = dirFile->vol_; // seek to location of entry if (!dirFile->seekSet(32 * index)) return false; // read entry into cache dir_t* p = dirFile->readDirCache(); if (p == NULL) return false; // error if empty slot or '.' or '..' if (p->name[0] == DIR_NAME_FREE || p->name[0] == DIR_NAME_DELETED || p->name[0] == '.') { return false; } // open cached entry return openCachedEntry(index & 0XF, oflag); } //------------------------------------------------------------------------------ // open a cached directory entry. Assumes vol_ is initializes uint8_t SdFile::openCachedEntry(uint8_t dirIndex, uint8_t oflag) { // location of entry in cache dir_t* p = SdVolume::cacheBuffer_.dir + dirIndex; // write or truncate is an error for a directory or read-only file if (p->attributes & (DIR_ATT_READ_ONLY | DIR_ATT_DIRECTORY)) { if (oflag & (O_WRITE | O_TRUNC)) return false; } // remember location of directory entry on SD dirIndex_ = dirIndex; dirBlock_ = SdVolume::cacheBlockNumber_; // copy first cluster number for directory fields firstCluster_ = (uint32_t)p->firstClusterHigh << 16; firstCluster_ |= p->firstClusterLow; // make sure it is a normal file or subdirectory if (DIR_IS_FILE(p)) { fileSize_ = p->fileSize; type_ = FAT_FILE_TYPE_NORMAL; } else if (DIR_IS_SUBDIR(p)) { if (!vol_->chainSize(firstCluster_, &fileSize_)) return false; type_ = FAT_FILE_TYPE_SUBDIR; } else { return false; } // save open flags for read/write flags_ = oflag & (O_ACCMODE | O_SYNC | O_APPEND); // set to start of file curCluster_ = 0; curPosition_ = 0; // truncate file to zero length if requested if (oflag & O_TRUNC) return truncate(0); return true; } //------------------------------------------------------------------------------ /** * Open a volume's root directory. * * \param[in] vol The FAT volume containing the root directory to be opened. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include the FAT volume has not been initialized * or it a FAT12 volume. */ uint8_t SdFile::openRoot(SdVolume* vol) { // error if file is already open if (isOpen()) return false; if (vol->fatType() == 16) { type_ = FAT_FILE_TYPE_ROOT16; firstCluster_ = 0; fileSize_ = 32 * vol->rootDirEntryCount(); } else if (vol->fatType() == 32) { type_ = FAT_FILE_TYPE_ROOT32; firstCluster_ = vol->rootDirStart(); if (!vol->chainSize(firstCluster_, &fileSize_)) return false; } else { // volume is not initialized or FAT12 return false; } vol_ = vol; // read only flags_ = O_READ; // set to start of file curCluster_ = 0; curPosition_ = 0; // root has no directory entry dirBlock_ = 0; dirIndex_ = 0; return true; } //------------------------------------------------------------------------------ /** %Print the name field of a directory entry in 8.3 format to Serial. * * \param[in] dir The directory structure containing the name. * \param[in] width Blank fill name if length is less than \a width. */ void SdFile::printDirName(const dir_t& dir, uint8_t width) { uint8_t w = 0; for (uint8_t i = 0; i < 11; i++) { if (dir.name[i] == ' ')continue; if (i == 8) { Serial.print('.'); w++; } Serial.write(dir.name[i]); w++; } if (DIR_IS_SUBDIR(&dir)) { Serial.print('/'); w++; } while (w < width) { Serial.print(' '); w++; } } //------------------------------------------------------------------------------ /** %Print a directory date field to Serial. * * Format is yyyy-mm-dd. * * \param[in] fatDate The date field from a directory entry. */ void SdFile::printFatDate(uint16_t fatDate) { Serial.print(FAT_YEAR(fatDate)); Serial.print('-'); printTwoDigits(FAT_MONTH(fatDate)); Serial.print('-'); printTwoDigits(FAT_DAY(fatDate)); } //------------------------------------------------------------------------------ /** %Print a directory time field to Serial. * * Format is hh:mm:ss. * * \param[in] fatTime The time field from a directory entry. */ void SdFile::printFatTime(uint16_t fatTime) { printTwoDigits(FAT_HOUR(fatTime)); Serial.print(':'); printTwoDigits(FAT_MINUTE(fatTime)); Serial.print(':'); printTwoDigits(FAT_SECOND(fatTime)); } //------------------------------------------------------------------------------ /** %Print a value as two digits to Serial. * * \param[in] v Value to be printed, 0 <= \a v <= 99 */ void SdFile::printTwoDigits(uint8_t v) { char str[3]; str[0] = '0' + v/10; str[1] = '0' + v % 10; str[2] = 0; Serial.print(str); } //------------------------------------------------------------------------------ /** * Read data from a file starting at the current position. * * \param[out] buf Pointer to the location that will receive the data. * * \param[in] nbyte Maximum number of bytes to read. * * \return For success read() returns the number of bytes read. * A value less than \a nbyte, including zero, will be returned * if end of file is reached. * If an error occurs, read() returns -1. Possible errors include * read() called before a file has been opened, corrupt file system * or an I/O error occurred. */ int16_t SdFile::read(void* buf, uint16_t nbyte) { uint8_t* dst = reinterpret_cast<uint8_t*>(buf); // error if not open or write only if (!isOpen() || !(flags_ & O_READ)) return -1; // max bytes left in file if (nbyte > (fileSize_ - curPosition_)) nbyte = fileSize_ - curPosition_; // amount left to read uint16_t toRead = nbyte; while (toRead > 0) { uint32_t block; // raw device block number uint16_t offset = curPosition_ & 0X1FF; // offset in block if (type_ == FAT_FILE_TYPE_ROOT16) { block = vol_->rootDirStart() + (curPosition_ >> 9); } else { uint8_t blockOfCluster = vol_->blockOfCluster(curPosition_); if (offset == 0 && blockOfCluster == 0) { // start of new cluster if (curPosition_ == 0) { // use first cluster in file curCluster_ = firstCluster_; } else { // get next cluster from FAT if (!vol_->fatGet(curCluster_, &curCluster_)) return -1; } } block = vol_->clusterStartBlock(curCluster_) + blockOfCluster; } uint16_t n = toRead; // amount to be read from current block if (n > (512 - offset)) n = 512 - offset; // no buffering needed if n == 512 or user requests no buffering if ((unbufferedRead() || n == 512) && block != SdVolume::cacheBlockNumber_) { if (!vol_->readData(block, offset, n, dst)) return -1; dst += n; } else { // read block to cache and copy data to caller if (!SdVolume::cacheRawBlock(block, SdVolume::CACHE_FOR_READ)) return -1; uint8_t* src = SdVolume::cacheBuffer_.data + offset; uint8_t* end = src + n; while (src != end) *dst++ = *src++; } curPosition_ += n; toRead -= n; } return nbyte; } //------------------------------------------------------------------------------ /** * Read the next directory entry from a directory file. * * \param[out] dir The dir_t struct that will receive the data. * * \return For success readDir() returns the number of bytes read. * A value of zero will be returned if end of file is reached. * If an error occurs, readDir() returns -1. Possible errors include * readDir() called before a directory has been opened, this is not * a directory file or an I/O error occurred. */ int8_t SdFile::readDir(dir_t* dir) { int8_t n; // if not a directory file or miss-positioned return an error if (!isDir() || (0X1F & curPosition_)) return -1; while ((n = read(dir, sizeof(dir_t))) == sizeof(dir_t)) { // last entry if DIR_NAME_FREE if (dir->name[0] == DIR_NAME_FREE) break; // skip empty entries and entry for . and .. if (dir->name[0] == DIR_NAME_DELETED || dir->name[0] == '.') continue; // return if normal file or subdirectory if (DIR_IS_FILE_OR_SUBDIR(dir)) return n; } // error, end of file, or past last entry return n < 0 ? -1 : 0; } //------------------------------------------------------------------------------ // Read next directory entry into the cache // Assumes file is correctly positioned dir_t* SdFile::readDirCache(void) { // error if not directory if (!isDir()) return NULL; // index of entry in cache uint8_t i = (curPosition_ >> 5) & 0XF; // use read to locate and cache block if (read() < 0) return NULL; // advance to next entry curPosition_ += 31; // return pointer to entry return (SdVolume::cacheBuffer_.dir + i); } //------------------------------------------------------------------------------ /** * Remove a file. * * The directory entry and all data for the file are deleted. * * \note This function should not be used to delete the 8.3 version of a * file that has a long name. For example if a file has the long name * "New Text Document.txt" you should not delete the 8.3 name "NEWTEX~1.TXT". * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include the file read-only, is a directory, * or an I/O error occurred. */ uint8_t SdFile::remove(void) { // free any clusters - will fail if read-only or directory if (!truncate(0)) return false; // cache directory entry dir_t* d = cacheDirEntry(SdVolume::CACHE_FOR_WRITE); if (!d) return false; // mark entry deleted d->name[0] = DIR_NAME_DELETED; // set this SdFile closed type_ = FAT_FILE_TYPE_CLOSED; // write entry to SD return SdVolume::cacheFlush(); } //------------------------------------------------------------------------------ /** * Remove a file. * * The directory entry and all data for the file are deleted. * * \param[in] dirFile The directory that contains the file. * \param[in] fileName The name of the file to be removed. * * \note This function should not be used to delete the 8.3 version of a * file that has a long name. For example if a file has the long name * "New Text Document.txt" you should not delete the 8.3 name "NEWTEX~1.TXT". * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include the file is a directory, is read only, * \a dirFile is not a directory, \a fileName is not found * or an I/O error occurred. */ uint8_t SdFile::remove(SdFile* dirFile, const char* fileName) { SdFile file; if (!file.open(dirFile, fileName, O_WRITE)) return false; return file.remove(); } //------------------------------------------------------------------------------ /** Remove a directory file. * * The directory file will be removed only if it is empty and is not the * root directory. rmDir() follows DOS and Windows and ignores the * read-only attribute for the directory. * * \note This function should not be used to delete the 8.3 version of a * directory that has a long name. For example if a directory has the * long name "New folder" you should not delete the 8.3 name "NEWFOL~1". * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include the file is not a directory, is the root * directory, is not empty, or an I/O error occurred. */ uint8_t SdFile::rmDir(void) { // must be open subdirectory if (!isSubDir()) return false; rewind(); // make sure directory is empty while (curPosition_ < fileSize_) { dir_t* p = readDirCache(); if (p == NULL) return false; // done if past last used entry if (p->name[0] == DIR_NAME_FREE) break; // skip empty slot or '.' or '..' if (p->name[0] == DIR_NAME_DELETED || p->name[0] == '.') continue; // error not empty if (DIR_IS_FILE_OR_SUBDIR(p)) return false; } // convert empty directory to normal file for remove type_ = FAT_FILE_TYPE_NORMAL; flags_ |= O_WRITE; return remove(); } //------------------------------------------------------------------------------ /** Recursively delete a directory and all contained files. * * This is like the Unix/Linux 'rm -rf *' if called with the root directory * hence the name. * * Warning - This will remove all contents of the directory including * subdirectories. The directory will then be removed if it is not root. * The read-only attribute for files will be ignored. * * \note This function should not be used to delete the 8.3 version of * a directory that has a long name. See remove() and rmDir(). * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. */ uint8_t SdFile::rmRfStar(void) { rewind(); while (curPosition_ < fileSize_) { SdFile f; // remember position uint16_t index = curPosition_/32; dir_t* p = readDirCache(); if (!p) return false; // done if past last entry if (p->name[0] == DIR_NAME_FREE) break; // skip empty slot or '.' or '..' if (p->name[0] == DIR_NAME_DELETED || p->name[0] == '.') continue; // skip if part of long file name or volume label in root if (!DIR_IS_FILE_OR_SUBDIR(p)) continue; if (!f.open(this, index, O_READ)) return false; if (f.isSubDir()) { // recursively delete if (!f.rmRfStar()) return false; } else { // ignore read-only f.flags_ |= O_WRITE; if (!f.remove()) return false; } // position to next entry if required if (curPosition_ != (32*(index + 1))) { if (!seekSet(32*(index + 1))) return false; } } // don't try to delete root if (isRoot()) return true; return rmDir(); } //------------------------------------------------------------------------------ /** * Sets a file's position. * * \param[in] pos The new position in bytes from the beginning of the file. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. */ uint8_t SdFile::seekSet(uint32_t pos) { // error if file not open or seek past end of file if (!isOpen() || pos > fileSize_) return false; if (type_ == FAT_FILE_TYPE_ROOT16) { curPosition_ = pos; return true; } if (pos == 0) { // set position to start of file curCluster_ = 0; curPosition_ = 0; return true; } // calculate cluster index for cur and new position uint32_t nCur = (curPosition_ - 1) >> (vol_->clusterSizeShift_ + 9); uint32_t nNew = (pos - 1) >> (vol_->clusterSizeShift_ + 9); if (nNew < nCur || curPosition_ == 0) { // must follow chain from first cluster curCluster_ = firstCluster_; } else { // advance from curPosition nNew -= nCur; } while (nNew--) { if (!vol_->fatGet(curCluster_, &curCluster_)) return false; } curPosition_ = pos; return true; } //------------------------------------------------------------------------------ /** * The sync() call causes all modified data and directory fields * to be written to the storage device. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include a call to sync() before a file has been * opened or an I/O error. */ uint8_t SdFile::sync(void) { // only allow open files and directories if (!isOpen()) return false; if (flags_ & F_FILE_DIR_DIRTY) { dir_t* d = cacheDirEntry(SdVolume::CACHE_FOR_WRITE); if (!d) return false; // do not set filesize for dir files if (!isDir()) d->fileSize = fileSize_; // update first cluster fields d->firstClusterLow = firstCluster_ & 0XFFFF; d->firstClusterHigh = firstCluster_ >> 16; // set modify time if user supplied a callback date/time function if (dateTime_) { dateTime_(&d->lastWriteDate, &d->lastWriteTime); d->lastAccessDate = d->lastWriteDate; } // clear directory dirty flags_ &= ~F_FILE_DIR_DIRTY; } return SdVolume::cacheFlush(); } //------------------------------------------------------------------------------ /** * Set a file's timestamps in its directory entry. * * \param[in] flags Values for \a flags are constructed by a bitwise-inclusive * OR of flags from the following list * * T_ACCESS - Set the file's last access date. * * T_CREATE - Set the file's creation date and time. * * T_WRITE - Set the file's last write/modification date and time. * * \param[in] year Valid range 1980 - 2107 inclusive. * * \param[in] month Valid range 1 - 12 inclusive. * * \param[in] day Valid range 1 - 31 inclusive. * * \param[in] hour Valid range 0 - 23 inclusive. * * \param[in] minute Valid range 0 - 59 inclusive. * * \param[in] second Valid range 0 - 59 inclusive * * \note It is possible to set an invalid date since there is no check for * the number of days in a month. * * \note * Modify and access timestamps may be overwritten if a date time callback * function has been set by dateTimeCallback(). * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. */ uint8_t SdFile::timestamp(uint8_t flags, uint16_t year, uint8_t month, uint8_t day, uint8_t hour, uint8_t minute, uint8_t second) { if (!isOpen() || year < 1980 || year > 2107 || month < 1 || month > 12 || day < 1 || day > 31 || hour > 23 || minute > 59 || second > 59) { return false; } dir_t* d = cacheDirEntry(SdVolume::CACHE_FOR_WRITE); if (!d) return false; uint16_t dirDate = FAT_DATE(year, month, day); uint16_t dirTime = FAT_TIME(hour, minute, second); if (flags & T_ACCESS) { d->lastAccessDate = dirDate; } if (flags & T_CREATE) { d->creationDate = dirDate; d->creationTime = dirTime; // seems to be units of 1/100 second not 1/10 as Microsoft states d->creationTimeTenths = second & 1 ? 100 : 0; } if (flags & T_WRITE) { d->lastWriteDate = dirDate; d->lastWriteTime = dirTime; } SdVolume::cacheSetDirty(); return sync(); } //------------------------------------------------------------------------------ /** * Truncate a file to a specified length. The current file position * will be maintained if it is less than or equal to \a length otherwise * it will be set to end of file. * * \param[in] length The desired length for the file. * * \return The value one, true, is returned for success and * the value zero, false, is returned for failure. * Reasons for failure include file is read only, file is a directory, * \a length is greater than the current file size or an I/O error occurs. */ uint8_t SdFile::truncate(uint32_t length) { // error if not a normal file or read-only if (!isFile() || !(flags_ & O_WRITE)) return false; // error if length is greater than current size if (length > fileSize_) return false; // fileSize and length are zero - nothing to do if (fileSize_ == 0) return true; // remember position for seek after truncation uint32_t newPos = curPosition_ > length ? length : curPosition_; // position to last cluster in truncated file if (!seekSet(length)) return false; if (length == 0) { // free all clusters if (!vol_->freeChain(firstCluster_)) return false; firstCluster_ = 0; } else { uint32_t toFree; if (!vol_->fatGet(curCluster_, &toFree)) return false; if (!vol_->isEOC(toFree)) { // free extra clusters if (!vol_->freeChain(toFree)) return false; // current cluster is end of chain if (!vol_->fatPutEOC(curCluster_)) return false; } } fileSize_ = length; // need to update directory entry flags_ |= F_FILE_DIR_DIRTY; if (!sync()) return false; // set file to correct position return seekSet(newPos); } //------------------------------------------------------------------------------ /** * Write data to an open file. * * \note Data is moved to the cache but may not be written to the * storage device until sync() is called. * * \param[in] buf Pointer to the location of the data to be written. * * \param[in] nbyte Number of bytes to write. * * \return For success write() returns the number of bytes written, always * \a nbyte. If an error occurs, write() returns -1. Possible errors * include write() is called before a file has been opened, write is called * for a read-only file, device is full, a corrupt file system or an I/O error. * */ size_t SdFile::write(const void* buf, uint16_t nbyte) { // convert void* to uint8_t* - must be before goto statements const uint8_t* src = reinterpret_cast<const uint8_t*>(buf); // number of bytes left to write - must be before goto statements uint16_t nToWrite = nbyte; // error if not a normal file or is read-only if (!isFile() || !(flags_ & O_WRITE)) goto writeErrorReturn; // seek to end of file if append flag if ((flags_ & O_APPEND) && curPosition_ != fileSize_) { if (!seekEnd()) goto writeErrorReturn; } while (nToWrite > 0) { uint8_t blockOfCluster = vol_->blockOfCluster(curPosition_); uint16_t blockOffset = curPosition_ & 0X1FF; if (blockOfCluster == 0 && blockOffset == 0) { // start of new cluster if (curCluster_ == 0) { if (firstCluster_ == 0) { // allocate first cluster of file if (!addCluster()) goto writeErrorReturn; } else { curCluster_ = firstCluster_; } } else { uint32_t next; if (!vol_->fatGet(curCluster_, &next)) return false; if (vol_->isEOC(next)) { // add cluster if at end of chain if (!addCluster()) goto writeErrorReturn; } else { curCluster_ = next; } } } // max space in block uint16_t n = 512 - blockOffset; // lesser of space and amount to write if (n > nToWrite) n = nToWrite; // block for data write uint32_t block = vol_->clusterStartBlock(curCluster_) + blockOfCluster; if (n == 512) { // full block - don't need to use cache // invalidate cache if block is in cache if (SdVolume::cacheBlockNumber_ == block) { SdVolume::cacheBlockNumber_ = 0XFFFFFFFF; } if (!vol_->writeBlock(block, src)) goto writeErrorReturn; src += 512; } else { if (blockOffset == 0 && curPosition_ >= fileSize_) { // start of new block don't need to read into cache if (!SdVolume::cacheFlush()) goto writeErrorReturn; SdVolume::cacheBlockNumber_ = block; SdVolume::cacheSetDirty(); } else { // rewrite part of block if (!SdVolume::cacheRawBlock(block, SdVolume::CACHE_FOR_WRITE)) { goto writeErrorReturn; } } uint8_t* dst = SdVolume::cacheBuffer_.data + blockOffset; uint8_t* end = dst + n; while (dst != end) *dst++ = *src++; } nToWrite -= n; curPosition_ += n; } if (curPosition_ > fileSize_) { // update fileSize and insure sync will update dir entry fileSize_ = curPosition_; flags_ |= F_FILE_DIR_DIRTY; } else if (dateTime_ && nbyte) { // insure sync will update modified date and time flags_ |= F_FILE_DIR_DIRTY; } if (flags_ & O_SYNC) { if (!sync()) goto writeErrorReturn; } return nbyte; writeErrorReturn: // return for write error //writeError = true; setWriteError(); return 0; } //------------------------------------------------------------------------------ /** * Write a byte to a file. Required by the Arduino Print class. * * Use SdFile::writeError to check for errors. */ size_t SdFile::write(uint8_t b) { return write(&b, 1); } //------------------------------------------------------------------------------ /** * Write a string to a file. Used by the Arduino Print class. * * Use SdFile::writeError to check for errors. */ size_t SdFile::write(const char* str) { return write(str, strlen(str)); } #ifdef __AVR__ //------------------------------------------------------------------------------ /** * Write a PROGMEM string to a file. * * Use SdFile::writeError to check for errors. */ void SdFile::write_P(PGM_P str) { for (uint8_t c; (c = pgm_read_byte(str)); str++) write(c); } //------------------------------------------------------------------------------ /** * Write a PROGMEM string followed by CR/LF to a file. * * Use SdFile::writeError to check for errors. */ void SdFile::writeln_P(PGM_P str) { write_P(str); println(); } #endif
{ "pile_set_name": "Github" }
<?xml version='1.0' encoding='UTF-8' standalone='yes'?> <assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'> <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> <security> <requestedPrivileges> <requestedExecutionLevel level='asInvoker' uiAccess='false' /> </requestedPrivileges> </security> </trustInfo> </assembly>
{ "pile_set_name": "Github" }
<?xml version='1.0'?> <gl_extension name="GL_EXT_draw_instanced" reg_no="327" prefer="GL_ARB_draw_instanced"> <functions> <function name="glDrawArraysInstancedEXT" return="void"> <params> <param type="GLenum" name="mode"/> <param type="GLint" name="start"/> <param type="GLsizei" name="count"/> <param type="GLsizei" name="primcount"/> </params> </function> <function name="glDrawElementsInstancedEXT" return="void"> <params> <param type="GLenum" name="mode"/> <param type="GLsizei" name="count"/> <param type="GLenum" name="type"/> <param type="const GLvoid*" name="indices"/> <param type="GLsizei" name="primcount"/> </params> </function> </functions> </gl_extension>
{ "pile_set_name": "Github" }
--- title: Tutorials and training for SharePoint Development description: Different tutorials and other resources around SharePoint development. ms.date: 02/27/2020 ms.prod: sharepoint localization_priority: Priority --- # Tutorials and training material for SharePoint Development Here are the different tutorials and training assets available for you to get started on building SharePoint Framework solutions for SharePoint, Microsoft Teams, and Office clients. These tutorials and training modules include both written and a video format. ## Setting up your development environment - [Set up your Office 365 tenant](https://docs.microsoft.com/sharepoint/dev/spfx/set-up-your-developer-tenant) | [video](https://www.youtube.com/watch?v=yc1IYgYp7qQ&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq) - Includes how to get a free tenant from Microsoft 365 developer program - [Set up development environment](https://docs.microsoft.com/sharepoint/dev/spfx/set-up-your-development-environment) | [video](https://www.youtube.com/watch?v=-2-jWsEa2Yw&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=2) - Install needed tooling to get started ## Getting started tutorials for SharePoint Framework - [Getting started with SharePoint Framework client-side web parts](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/build-a-hello-world-web-part) | [video](https://www.youtube.com/watch?v=_O2Re5uRLoo&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=3) - Tutorial series with 4 parts - [Using Microsoft Graph APIs in your solution](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/using-microsoft-graph-apis) | [video](https://www.youtube.com/watch?v=tHzbh5JoC-A&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=7) - [Building Microsoft Teams tab using SharePoint Framework](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/using-web-part-as-ms-teams-tab) | [video](https://www.youtube.com/watch?v=JoTAC2i-XeU&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=8) - [Add jQueryUI Accordion to your SharePoint client-side web part](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/add-jqueryui-accordion-to-web-part) | [video](https://www.youtube.com/watch?v=N0C9azIyiTo&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=10) - [Use Office UI Fabric React components in your SharePoint client-side web part](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/use-fabric-react-components) | [video](https://www.youtube.com/watch?v=kNrYd8nYaZY&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=11) - [Tutorial for creating Outlook Web App extension using SharePoint Framework - Preview](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/office-addins-tutorial) | [video](https://www.youtube.com/watch?v=QtGjTAjbIKU&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=9) ## Getting started tutorials for SharePoint Framework extensions - [Getting started with SharePoint Framework Extensions](https://docs.microsoft.com/sharepoint/dev/spfx/extensions/get-started/build-a-hello-world-extension) | [video](https://www.youtube.com/watch?v=DnfRIl2YN8g&list=PLR9nK3mnD-OXtWO5AIIr7nCR3sWutACpV) - Tutorial series with 4 parts - [Build your first Field Customizer extension](https://docs.microsoft.com/sharepoint/dev/spfx/extensions/get-started/building-simple-field-customizer) | [video](https://www.youtube.com/watch?v=mBZ7Sq_KfDA&list=PLR9nK3mnD-OXtWO5AIIr7nCR3sWutACpV&index=5) - [Build your first ListView Command Set extension](https://docs.microsoft.com/sharepoint/dev/spfx/extensions/get-started/building-simple-cmdset-with-dialog-api) | [video](https://www.youtube.com/watch?v=uaUGtLrNbRA&list=PLR9nK3mnD-OXtWO5AIIr7nCR3sWutACpV&index=6) ## Additional tutorials for SharePoint Framework - [Provision SharePoint assets from your SharePoint client-side web part](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/provision-sp-assets-from-package) | [video](https://www.youtube.com/watch?v=09uoG6Voeew&list=PLR9nK3mnD-OXvSWvS2zglCzz4iplhVrKq&index=12) - [Deploy your SharePoint client-side web part to Azure CDN](https://docs.microsoft.com/sharepoint/dev/spfx/web-parts/get-started/deploy-web-part-to-cdn) - [Consume the Microsoft Graph in the SharePoint Framework](https://docs.microsoft.com/sharepoint/dev/spfx/use-aad-tutorial) - [Consume enterprise APIs secured with Azure AD in SharePoint Framework](https://docs.microsoft.com/sharepoint/dev/spfx/use-aadhttpclient-enterpriseapi) - [Consume multi-tenant enterprise APIs secured with Azure AD in SharePoint Framework](https://docs.microsoft.com/sharepoint/dev/spfx/use-aadhttpclient-enterpriseapi-multitenant) | Migration tutorials from classic customizations to SharePoint Framework - [Migrating from Edit Control Block (ECB) menu item to SharePoint Framework Extensions](https://docs.microsoft.com/sharepoint/dev/spfx/extensions/guidance/migrate-from-ecb-to-spfx-extensions) - [Migrating from JSLink to SharePoint Framework Extensions](https://docs.microsoft.com/sharepoint/dev/spfx/extensions/guidance/migrate-from-jslink-to-spfx-extensions) - [Migrating from UserCustomAction to SharePoint Framework Extensions](https://docs.microsoft.com/sharepoint/dev/spfx/extensions/guidance/migrate-from-usercustomactions-to-spfx-extensions) ## Getting started training modules for SharePoint Framework These are "presentations in a box" that include slides, demos, videos & hands-on labs for self-paced learning or for user group presentations. You can find more detailed description of this training package from https://aka.ms/spfx-training. - [Getting Started with the SharePoint Framework](https://github.com/SharePoint/sp-dev-training-spfx-getting-started) | [video](https://www.youtube.com/watch?v=_Pt5cnU4MpU&index=1&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc) - [Developing with the SharePoint Framework: Web Parts](https://github.com/SharePoint/sp-dev-training-spfx-web-parts) | [video](https://www.youtube.com/watch?v=m1l_sgSwKek&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc&index=2) - [Working with the Web Part Property Pane](https://github.com/SharePoint/sp-dev-training-spfx-webpart-proppane) | [video](https://www.youtube.com/watch?v=4QLY6z3RGug&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc) - [Working with SharePoint Content](https://github.com/SharePoint/sp-dev-training-spfx-spcontent) | [video](https://www.youtube.com/watch?v=0OiC7AzoCVI&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc) - [Getting Started with SharePoint Framework Extensions](https://github.com/SharePoint/sp-dev-training-spfx-extensions) | [video](https://www.youtube.com/watch?v=85DlxhbIK9I&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc) - [Leverage the Microsoft Graph & 3rd Party APIs](https://github.com/SharePoint/sp-dev-training-spfx-graph-3rdpartyapis) | [video](https://www.youtube.com/watch?v=0zVtDn0ckBM&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc) - [Using React and Office UI Fabric React Components](https://github.com/SharePoint/sp-dev-training-spfx-react-fabric) | [video](https://www.youtube.com/watch?v=TlSGdDZmrTM&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc&index=7) - [Deploying SharePoint Framework Components to Production](https://github.com/SharePoint/sp-dev-training-spfx-deployment) | [video](https://www.youtube.com/watch?v=DLi6ZviEIJ8&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc&index=8) - [Build Microsoft Teams customizations using SharePoint Framework](https://github.com/SharePoint/sp-dev-training-spfx-teams-dev) | [video](https://www.youtube.com/watch?v=Yfs3-qawJfA&list=PLR9nK3mnD-OV-RPXQ3Lco845qoEy7VJoc&index=9) ## Other resources and assets - [Microsoft 365 & SharePoint Community YouTube channel](https://aka.ms/spdev-videos) - [SharePoint Framework web part samples](https://aka.ms/spfx-webparts) - [SharePoint Framework extension samples](https://aka.ms/spfx-extensions)
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard. // #import "TView.h" @interface TFakeTagsMenuItemView : TView { _Bool _selected; struct TNSRef<NSView, void> _selectionView; struct TNSRef<TTextField, void> _titleField; } + (id)fakeMenuItemViewWithFrame:(struct CGRect)arg1; @property(nonatomic) _Bool selected; // @synthesize selected=_selected; - (id).cxx_construct; - (void).cxx_destruct; - (void)mouseExited:(id)arg1; - (void)mouseEntered:(id)arg1; - (void)mouseUp:(id)arg1; - (void)mouseDown:(id)arg1; - (void)layout; - (void)initCommon; @end
{ "pile_set_name": "Github" }
// ======================================================================== // $Id: WebApplicationContextMBean.java,v 1.11 2005/08/13 00:01:27 gregwilkins Exp $ // Copyright 1999-2004 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== package org.browsermob.proxy.jetty.jetty.servlet.jmx; import org.apache.commons.logging.Log; import org.browsermob.proxy.jetty.jetty.servlet.WebApplicationContext; import org.browsermob.proxy.jetty.log.LogFactory; import org.browsermob.proxy.jetty.util.LifeCycleEvent; import org.browsermob.proxy.jetty.util.LifeCycleListener; import org.browsermob.proxy.jetty.util.LogSupport; import javax.management.MBeanException; import javax.management.MBeanServer; import javax.management.ObjectName; import java.util.HashMap; import java.util.Iterator; import java.util.Map; /* ------------------------------------------------------------ */ /** Web Application MBean. * Note that while Web Applications are HttpContexts, the MBean is * not derived from HttpContextMBean as they are managed differently. * * @version $Revision: 1.11 $ * @author Greg Wilkins (gregw) */ public class WebApplicationContextMBean extends ServletHttpContextMBean { private static final Log log = LogFactory.getLog(WebApplicationContextMBean.class); private WebApplicationContext _webappContext; private Map _configurations = new HashMap(); /* ------------------------------------------------------------ */ /** Constructor. * @exception MBeanException */ public WebApplicationContextMBean() throws MBeanException {} /* ------------------------------------------------------------ */ protected void defineManagedResource() { super.defineManagedResource(); defineAttribute("displayName",false); defineAttribute("defaultsDescriptor",true); defineAttribute("WAR",true); defineAttribute("extractWAR",true); _webappContext=(WebApplicationContext)getManagedResource(); _webappContext.addEventListener(new LifeCycleListener() { public void lifeCycleStarting (LifeCycleEvent event) {} public void lifeCycleStarted (LifeCycleEvent event) { getConfigurations(); } public void lifeCycleFailure (LifeCycleEvent event) {} public void lifeCycleStopping (LifeCycleEvent event) {} public void lifeCycleStopped (LifeCycleEvent event) { destroyConfigurations(); } }); } /** postRegister * Register mbeans for all of the jsr77 servlet stats * @see javax.management.MBeanRegistration#postRegister(java.lang.Boolean) */ public void postRegister(Boolean ok) { super.postRegister(ok); getConfigurations(); } /**postDeregister * Unregister mbeans we created for the Configuration objects. * @see javax.management.MBeanRegistration#postDeregister() */ public void postDeregister () { destroyConfigurations (); super.postDeregister(); } /**getConfigurations * Make mbeans for all of the Configurations applied to the * WebApplicationContext * @return */ public ObjectName[] getConfigurations () { return getComponentMBeans(_webappContext.getConfigurations(),_configurations); } public void destroyConfigurations () { MBeanServer mbeanServer = getMBeanServer(); Iterator itor = _configurations.values().iterator(); while (itor.hasNext()) { try { ObjectName o = (ObjectName)itor.next(); log.debug("Unregistering: "+o); if (null!=mbeanServer) mbeanServer.unregisterMBean((ObjectName)o); } catch (Exception e) { log.warn(LogSupport.EXCEPTION, e); } } _configurations.clear(); } }
{ "pile_set_name": "Github" }
Red [ Title: "Red base environment definitions" Author: "Nenad Rakocevic" File: %operators.red Tabs: 4 Rights: "Copyright (C) 2011-2018 Red Foundation. All rights reserved." License: { Distributed under the Boost Software License, Version 1.0. See https://github.com/red/red/blob/master/BSL-License.txt } ] ;-- #load temporary directive is used to workaround REBOL LOAD limitations on some words #do keep [to-set-word "+"] make op! :add #do keep [to-set-word "-"] make op! :subtract #do keep [to-set-word "*"] make op! :multiply #do keep [to-set-word "/"] make op! :divide #do keep [to-set-word "//"] make op! :modulo #do keep [to-set-word "%"] make op! :remainder #do keep [to-set-word "="] make op! :equal? #do keep [to-set-word "<>"] make op! :not-equal? #do keep [to-set-word "=="] make op! :strict-equal? #do keep [to-set-word "=?"] make op! :same? #do keep [to-set-word "<"] make op! :lesser? #do keep [to-set-word ">"] make op! :greater? #do keep [to-set-word "<="] make op! :lesser-or-equal? #do keep [to-set-word ">="] make op! :greater-or-equal? #do keep [to-set-word "<<"] make op! :shift-left #do keep [to-set-word ">>"] make op! :shift-right #do keep [to-set-word ">>>"] make op! :shift-logical #do keep [to-set-word "**"] make op! :power and: make op! :and~ or: make op! :or~ xor: make op! :xor~
{ "pile_set_name": "Github" }
load --table Shops [ {"_key":"nezu-no-taiyaki", "name":"根津のたいやき", "latitude":128592911, "longitude":503145263, "location":"128592911x503145263", "latitude_in_degree":35.720253, "longitude_in_degree":139.762573, "tags":["たいやき","天然"], "area":"area0005"}, {"_key":"taiyaki-kataoka", "name":"たい焼 カタオカ", "latitude":128565076, "longitude":502976128, "location":"128565076x502976128", "latitude_in_degree":35.712521, "longitude_in_degree":139.715591, "tags":["たいやき"]}, {"_key":"soba-taiyaki-ku", "name":"そばたいやき空", "latitude":128461363, "longitude":502772717, "location":"128461363x502772717", "latitude_in_degree":35.683712, "longitude_in_degree":139.659088, "tags":["たいやき"]}, {"_key":"kuruma", "name":"車", "latitude":128597458, "longitude":502942345, "location":"128597458x502942345", "latitude_in_degree":35.721516, "longitude_in_degree":139.706207, "tags":["たいやき"]}, {"_key":"hirose-ya", "name":"広瀬屋", "latitude":128573438, "longitude":502868189, "location":"128573438x502868189", "latitude_in_degree":35.714844, "longitude_in_degree":139.685608, "tags":["たいやき"]}, {"_key":"sazare", "name":"さざれ", "latitude":128572751, "longitude":502866155, "location":"128572751x502866155", "latitude_in_degree":35.714653, "longitude_in_degree":139.685043, "tags":["たいやき"]}, {"_key":"omede-taiyaki", "name":"おめで鯛焼き本舗錦糸町東急店", "latitude":128521858, "longitude":503341754, "location":"128521858x503341754", "latitude_in_degree":35.700516, "longitude_in_degree":139.817154, "tags":["たいやき"]}, {"_key":"onaga-ya", "name":"尾長屋 錦糸町店", "latitude":128513714, "longitude":503319780, "location":"128513714x503319780", "latitude_in_degree":35.698254, "longitude_in_degree":139.811050, "tags":["たいやき","白"]}, {"_key":"shiro-ya", "name":"たいやき工房白家 阿佐ヶ谷店", "latitude":128539861, "longitude":502699000, "location":"128539861x502699000", "latitude_in_degree":35.705517, "longitude_in_degree":139.638611, "tags":["たいやき","白"]}, {"_key":"fuji-ya", "name":"たいやき本舗 藤家 阿佐ヶ谷店", "latitude":128534177, "longitude":502693614, "location":"128534177x502693614", "latitude_in_degree":35.703938, "longitude_in_degree":139.637115, "tags":["たいやき","白"]}, {"_key":"miyoshi", "name":"みよし", "latitude":128320340, "longitude":502334363, "location":"128320340x502334363", "latitude_in_degree":35.644539, "longitude_in_degree":139.537323, "tags":["たいやき"]}, {"_key":"juju-ya", "name":"寿々屋 菓子", "latitude":128264119, "longitude":502904718, "location":"128264119x502904718", "latitude_in_degree":35.628922, "longitude_in_degree":139.695755, "tags":["たいやき"]}, {"_key":"tatsumi-ya", "name":"たい焼き \/ たつみや", "latitude":128395804, "longitude":502699165, "location":"128395804x502699165", "latitude_in_degree":35.665501, "longitude_in_degree":139.638657, "tags":["たいやき"]}, {"_key":"tetsuji", "name":"たい焼き鉄次 大丸東京店", "latitude":128451283, "longitude":503166852, "location":"128451283x503166852", "latitude_in_degree":35.680912, "longitude_in_degree":139.768570, "tags":["たいやき"], "categories":["category0003", "category0001"]}, {"_key":"gazuma-ya", "name":"吾妻屋", "latitude":128522941, "longitude":502731353, "location":"128522941x502731353", "latitude_in_degree":35.700817, "longitude_in_degree":139.647598, "tags":["たいやき"]}, {"_key":"honma-mon", "name":"ほんま門", "latitude":128601850, "longitude":502749263, "location":"128601850x502749263", "latitude_in_degree":35.722736, "longitude_in_degree":139.652573, "tags":["たこやき"], "area":"area0014"}, {"_key":"naniwa-ya", "name":"浪花家", "latitude":128628220, "longitude":503266442, "location":"128628220x503266442", "latitude_in_degree":35.730061, "longitude_in_degree":139.796234, "tags":["たいやき","天然"]}, {"_key":"kuro-dai", "name":"代官山たい焼き黒鯛", "latitude":128341242, "longitude":502937402, "location":"128341242x502937402", "latitude_in_degree":35.650345, "longitude_in_degree":139.704834, "tags":["たいやき"]}, {"_key":"daruma", "name":"たいやき神田達磨 八重洲店", "latitude":128453260, "longitude":503174156, "location":"128453260x503174156", "location1":"35.68157x139.76587", "location2":"35.68481x139.76681", "location3":"35.68276x139.77411", "latitude_in_degree":35.681461, "longitude_in_degree":139.770599, "tags":["たいやき"]}, {"_key":"yanagi-ya", "name":"柳屋 たい焼き", "latitude":128467228, "longitude":503222332, "location":"128467228x503222332", "latitude_in_degree":35.685341, "longitude_in_degree":139.783981, "tags":["たいやき","天然"], "area":"area0002"}, {"_key":"sharaku", "name":"たい焼き写楽", "latitude":128581088, "longitude":503261446, "location":"128581088x503261446", "latitude_in_degree":35.716969, "longitude_in_degree":139.794846, "tags":["たいやき","天然"]}, {"_key":"takane", "name":"たかね 和菓子", "latitude":128514964, "longitude":502419287, "location":"128514964x502419287", "latitude_in_degree":35.698601, "longitude_in_degree":139.560913, "tags":["たいやき","天然","和菓子"]}, {"_key":"chiyoda", "name":"たい焼き ちよだ", "latitude":128313364, "longitude":502750141, "location":"128313364x502750141", "latitude_in_degree":35.642601, "longitude_in_degree":139.652817, "tags":["たいやき"]}, {"_key":"da-ka-po", "name":"ダ・カーポ", "latitude":128258446, "longitude":503018482, "location":"128258446x503018482", "latitude_in_degree":35.627346, "longitude_in_degree":139.727356, "tags":["たいやき","カレー"]}, {"_key":"matsushima-ya", "name":"松島屋", "latitude":128306002, "longitude":503054572, "location":"128306002x503054572", "latitude_in_degree":35.640556, "longitude_in_degree":139.737381, "tags":["和菓子"], "categories":["和食"]}, {"_key":"kazuya", "name":"銀座 かずや", "latitude":128424629, "longitude":503139222, "location":"128424629x503139222", "latitude_in_degree":35.673508, "longitude_in_degree":139.760895, "tags":["和菓子"], "categories":["和食"]}, {"_key":"furuya-kogane-an", "name":"ふるや古賀音庵 和菓子", "latitude":128450171, "longitude":502833856, "location":"128450171x502833856", "latitude_in_degree":35.680603, "longitude_in_degree":139.676071, "tags":["和菓子"], "categories":["和食"]}, {"_key":"hachi-no-ie", "name":"蜂の家 自由が丘本店", "latitude":128188876, "longitude":502805182, "location":"128188876x502805182", "latitude_in_degree":35.608021, "longitude_in_degree":139.668106, "tags":["和菓子"], "categories":["和食"]}, {"_key":"azuki-chan", "name":"薄皮たい焼き あづきちゃん", "latitude":128309436, "longitude":502823531, "location":"128309436x502823531", "latitude_in_degree":35.641510, "longitude_in_degree":139.673203, "tags":["たいやき"]}, {"_key":"kuriko-an", "name":"横浜 くりこ庵 浅草店", "latitude":128563247, "longitude":503268584, "location":"128563247x503268584", "location1":"35.71916x139.79849", "latitude_in_degree":35.712013, "longitude_in_degree":139.796829, "tags":["たいやき"]}, {"_key":"yume-no-aru-machi-no-taiyaki-ya-san", "name":"夢ある街のたいやき屋さん戸越銀座店", "latitude":128218316, "longitude":502965086, "location":"128218316x502965086", "latitude_in_degree":35.616199, "longitude_in_degree":139.712524, "tags":["たいやき"]}, {"_key":"naze-ya", "name":"何故屋", "latitude":128192540, "longitude":502796999, "location":"128192540x502796999", "latitude_in_degree":35.609039, "longitude_in_degree":139.665833, "tags":["たいやき"]}, {"_key":"sanoki-ya", "name":"築地 さのきや", "latitude":128397312, "longitude":503174596, "location":"128397312x503174596", "latitude_in_degree":35.665920, "longitude_in_degree":139.770721, "tags":["たいやき","マグロ"], "categories":null, "area":"area0002"}, {"_key":"shigeta", "name":"しげ田", "latitude":128421454, "longitude":503208983, "location":"128421454x503208983", "latitude_in_degree":35.672626, "longitude_in_degree":139.780273, "tags":["たいやき","和菓子"], "area":"area0002"}, {"_key":"nishimi-ya", "name":"にしみや 甘味処", "latitude":128418570, "longitude":503188661, "location":"128418570x503188661", "latitude_in_degree":35.671825, "longitude_in_degree":139.774628, "tags":["たいやき","おでん"]}, {"_key":"hiiragi", "name":"たいやきひいらぎ", "latitude":128331724, "longitude":502961461, "location":"128331724x502961461", "latitude_in_degree":35.647701, "longitude_in_degree":139.711517, "tags":["たいやき"], "area":"area0013"} ]
{ "pile_set_name": "Github" }
package io.swagger.codegen.languages; import io.swagger.codegen.CodegenConfig; import io.swagger.codegen.CodegenConstants; import io.swagger.codegen.CodegenType; import io.swagger.codegen.CodegenOperation; import io.swagger.codegen.DefaultCodegen; import io.swagger.codegen.SupportingFile; import io.swagger.models.properties.ArrayProperty; import io.swagger.models.properties.MapProperty; import io.swagger.models.properties.Property; import java.io.File; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; public class SilexServerCodegen extends DefaultCodegen implements CodegenConfig { protected String invokerPackage; protected String groupId = "io.swagger"; protected String artifactId = "swagger-server"; protected String artifactVersion = "1.0.0"; public SilexServerCodegen() { super(); invokerPackage = camelize("SwaggerServer"); String packagePath = "SwaggerServer"; modelPackage = packagePath + "/lib/models"; apiPackage = packagePath + "/lib"; outputFolder = "generated-code/php-silex"; // no model, api files modelTemplateFiles.clear(); apiTemplateFiles.clear(); embeddedTemplateDir = templateDir = "php-silex"; setReservedWordsLowerCase( Arrays.asList( "__halt_compiler", "abstract", "and", "array", "as", "break", "callable", "case", "catch", "class", "clone", "const", "continue", "declare", "default", "die", "do", "echo", "else", "elseif", "empty", "enddeclare", "endfor", "endforeach", "endif", "endswitch", "endwhile", "eval", "exit", "extends", "final", "for", "foreach", "function", "global", "goto", "if", "implements", "include", "include_once", "instanceof", "insteadof", "interface", "isset", "list", "namespace", "new", "or", "print", "private", "protected", "public", "require", "require_once", "return", "static", "switch", "throw", "trait", "try", "unset", "use", "var", "while", "xor") ); additionalProperties.put(CodegenConstants.INVOKER_PACKAGE, invokerPackage); additionalProperties.put(CodegenConstants.GROUP_ID, groupId); additionalProperties.put(CodegenConstants.ARTIFACT_ID, artifactId); additionalProperties.put(CodegenConstants.ARTIFACT_VERSION, artifactVersion); // ref: http://php.net/manual/en/language.types.intro.php languageSpecificPrimitives = new HashSet<String>( Arrays.asList( "boolean", "int", "integer", "double", "float", "string", "object", "DateTime", "mixed", "number") ); instantiationTypes.put("array", "array"); instantiationTypes.put("map", "map"); // ref: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types typeMapping = new HashMap<String, String>(); typeMapping.put("integer", "int"); typeMapping.put("long", "int"); typeMapping.put("float", "float"); typeMapping.put("double", "double"); typeMapping.put("string", "string"); typeMapping.put("byte", "int"); typeMapping.put("boolean", "boolean"); typeMapping.put("date", "DateTime"); typeMapping.put("datetime", "DateTime"); typeMapping.put("file", "string"); typeMapping.put("map", "map"); typeMapping.put("array", "array"); typeMapping.put("list", "array"); typeMapping.put("object", "object"); //TODO binary should be mapped to byte array // mapped to String as a workaround typeMapping.put("binary", "string"); supportingFiles.add(new SupportingFile("README.mustache", packagePath.replace('/', File.separatorChar), "README.md")); supportingFiles.add(new SupportingFile("composer.json", packagePath.replace('/', File.separatorChar), "composer.json")); supportingFiles.add(new SupportingFile("index.mustache", packagePath.replace('/', File.separatorChar), "index.php")); supportingFiles.add(new SupportingFile(".htaccess", packagePath.replace('/', File.separatorChar), ".htaccess")); } @Override public CodegenType getTag() { return CodegenType.SERVER; } @Override public String getName() { return "php-silex"; } @Override public String getHelp() { return "Generates a PHP Silex server library."; } @Override public String escapeReservedWord(String name) { if(this.reservedWordsMappings().containsKey(name)) { return this.reservedWordsMappings().get(name); } return "_" + name; } @Override public String apiFileFolder() { return (outputFolder + "/" + apiPackage()).replace('/', File.separatorChar); } @Override public String modelFileFolder() { return (outputFolder + "/" + modelPackage()).replace('/', File.separatorChar); } @Override public String getTypeDeclaration(Property p) { if (p instanceof ArrayProperty) { ArrayProperty ap = (ArrayProperty) p; Property inner = ap.getItems(); return getSwaggerType(p) + "[" + getTypeDeclaration(inner) + "]"; } else if (p instanceof MapProperty) { MapProperty mp = (MapProperty) p; Property inner = mp.getAdditionalProperties(); return getSwaggerType(p) + "[string," + getTypeDeclaration(inner) + "]"; } return super.getTypeDeclaration(p); } @Override public String getSwaggerType(Property p) { String swaggerType = super.getSwaggerType(p); String type = null; if (typeMapping.containsKey(swaggerType)) { type = typeMapping.get(swaggerType); if (languageSpecificPrimitives.contains(type)) { return type; } else if (instantiationTypes.containsKey(type)) { return type; } } else { type = swaggerType; } if (type == null) { return null; } return toModelName(type); } @Override public String toDefaultValue(Property p) { return "null"; } @Override public String toVarName(String name) { // return the name in underscore style // PhoneNumber => phone_number name = underscore(name); // FIXME: a parameter should not be assigned. Also declare the methods parameters as 'final'. // parameter name starting with number won't compile // need to escape it by appending _ at the beginning if (name.matches("^\\d.*")) { name = "_" + name; } return name; } @Override public String toParamName(String name) { // should be the same as variable name return toVarName(name); } @Override public String toModelName(String name) { // model name cannot use reserved keyword if (isReservedWord(name)) { escapeReservedWord(name); // e.g. return => _return } // camelize the model name // phone_number => PhoneNumber return camelize(name); } @Override public String toModelFilename(String name) { // should be the same as the model name return toModelName(name); } @Override public String escapeQuotationMark(String input) { // remove ' to avoid code injection return input.replace("'", ""); } @Override public String escapeUnsafeCharacters(String input) { return input.replace("*/", "*_/").replace("/*", "/_*"); } @Override public Map<String, Object> postProcessOperations(Map<String, Object> objs) { Map<String, Object> operations = (Map<String, Object>) objs.get("operations"); List<CodegenOperation> operationList = (List<CodegenOperation>) operations.get("operation"); for (CodegenOperation op : operationList) { String path = new String(op.path); String[] items = path.split("/", -1); String opsPath = ""; int pathParamIndex = 0; for (int i = 0; i < items.length; ++i) { if (items[i].matches("^\\{(.*)\\}$")) { // wrap in {} // camelize path variable items[i] = "{" + camelize(items[i].substring(1, items[i].length()-1), true) + "}"; } } op.path = StringUtils.join(items, "/"); } return objs; } }
{ "pile_set_name": "Github" }
FROM python:3.6 MAINTAINER maicius WORKDIR /qqzone COPY requirements.txt /qqzone COPY Songti.ttc /usr/share/fonts RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple -r requirements.txt COPY . /qqzone CMD python src/web/server.py
{ "pile_set_name": "Github" }
/// <reference types="node" /> import { FileTransformer } from "builder-util/out/fs"; import { Stats } from "fs-extra-p"; import { FileMatcher } from "../fileMatcher"; import { Packager } from "../packager"; export interface ResolvedFileSet { src: string; destination: string; files: Array<string>; metadata: Map<string, Stats>; transformedFiles?: Map<number, string | Buffer> | null; } export declare function computeFileSets(matchers: Array<FileMatcher>, transformer: FileTransformer, packager: Packager, isElectronCompile: boolean): Promise<Array<ResolvedFileSet>>; export declare function ensureEndSlash(s: string): string;
{ "pile_set_name": "Github" }
NS1 input.foo /^ns NS1$/;" n x input.foo /^define x {$/;" d namespace:NS1 y input.foo /^ define y {$/;" d definition:NS1.x v_y_0 input.foo /^ var v_y_0$/;" v definition:NS1.x.y v_y_1 input.foo /^ var v_y_1$/;" v definition:NS1.x.y z input.foo /^ define z {$/;" d definition:NS1.x v_z_0 input.foo /^ var v_z_0$/;" v definition:NS1.x.z v_z_1 input.foo /^ var v_z_1$/;" v definition:NS1.x.z a input.foo /^ define a {$/;" d definition:NS1.x.z NS2 input.foo /^ns NS2$/;" n p input.foo /^define p {$/;" d namespace:NS2 q input.foo /^ define q {$/;" d definition:NS2.p v_g1 input.foo /^var v_g1$/;" v d_g input.foo /^define d_g {$/;" d v_l input.foo /^ var v_l$/;" v definition:d_g NS3 input.foo /^ns NS3$/;" n PACKAGE input.foo /^package PACKAGE$/;" p namespace:NS3 p input.foo /^define p {$/;" d package:NS3.PACKAGE q input.foo /^ define q {$/;" d definition:NS3.PACKAGE.p L input.foo /^ var L$/;" v definition:NS3.PACKAGE.p.q v_g2 input.foo /^var v_g2$/;" v namespace:NS3
{ "pile_set_name": "Github" }
/* * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "api/rtp_headers.h" namespace webrtc { RTPHeaderExtension::RTPHeaderExtension() : hasTransmissionTimeOffset(false), transmissionTimeOffset(0), hasAbsoluteSendTime(false), absoluteSendTime(0), hasTransportSequenceNumber(false), transportSequenceNumber(0), hasAudioLevel(false), voiceActivity(false), audioLevel(0), hasVideoRotation(false), videoRotation(kVideoRotation_0), hasVideoContentType(false), videoContentType(VideoContentType::UNSPECIFIED), has_video_timing(false) {} RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) = default; RTPHeaderExtension& RTPHeaderExtension::operator=( const RTPHeaderExtension& other) = default; RTPHeader::RTPHeader() : markerBit(false), payloadType(0), sequenceNumber(0), timestamp(0), ssrc(0), numCSRCs(0), arrOfCSRCs(), paddingLength(0), headerLength(0), payload_type_frequency(0), extension() {} RTPHeader::RTPHeader(const RTPHeader& other) = default; RTPHeader& RTPHeader::operator=(const RTPHeader& other) = default; } // namespace webrtc
{ "pile_set_name": "Github" }
/*! * jQuery UI Draggable 1.8.24 * * Copyright 2012, AUTHORS.txt (http://jqueryui.com/about) * Dual licensed under the MIT or GPL Version 2 licenses. * http://jquery.org/license * * http://docs.jquery.com/UI/Draggables * * Depends: * jquery.ui.core.js * jquery.ui.mouse.js * jquery.ui.widget.js */ (function( $, undefined ) { $.widget("ui.draggable", $.ui.mouse, { widgetEventPrefix: "drag", options: { addClasses: true, appendTo: "parent", axis: false, connectToSortable: false, containment: false, cursor: "auto", cursorAt: false, grid: false, handle: false, helper: "original", iframeFix: false, opacity: false, refreshPositions: false, revert: false, revertDuration: 500, scope: "default", scroll: true, scrollSensitivity: 20, scrollSpeed: 20, snap: false, snapMode: "both", snapTolerance: 20, stack: false, zIndex: false }, _create: function() { if (this.options.helper == 'original' && !(/^(?:r|a|f)/).test(this.element.css("position"))) this.element[0].style.position = 'relative'; (this.options.addClasses && this.element.addClass("ui-draggable")); (this.options.disabled && this.element.addClass("ui-draggable-disabled")); this._mouseInit(); }, destroy: function() { if(!this.element.data('draggable')) return; this.element .removeData("draggable") .unbind(".draggable") .removeClass("ui-draggable" + " ui-draggable-dragging" + " ui-draggable-disabled"); this._mouseDestroy(); return this; }, _mouseCapture: function(event) { var o = this.options; // among others, prevent a drag on a resizable-handle if (this.helper || o.disabled || $(event.target).is('.ui-resizable-handle')) return false; //Quit if we're not on a valid handle this.handle = this._getHandle(event); if (!this.handle) return false; if ( o.iframeFix ) { $(o.iframeFix === true ? "iframe" : o.iframeFix).each(function() { $('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>') .css({ width: this.offsetWidth+"px", height: this.offsetHeight+"px", position: "absolute", opacity: "0.001", zIndex: 1000 }) .css($(this).offset()) .appendTo("body"); }); } return true; }, _mouseStart: function(event) { var o = this.options; //Create and append the visible helper this.helper = this._createHelper(event); this.helper.addClass("ui-draggable-dragging"); //Cache the helper size this._cacheHelperProportions(); //If ddmanager is used for droppables, set the global draggable if($.ui.ddmanager) $.ui.ddmanager.current = this; /* * - Position generation - * This block generates everything position related - it's the core of draggables. */ //Cache the margins of the original element this._cacheMargins(); //Store the helper's css position this.cssPosition = this.helper.css("position"); this.scrollParent = this.helper.scrollParent(); //The element's absolute position on the page minus margins this.offset = this.positionAbs = this.element.offset(); this.offset = { top: this.offset.top - this.margins.top, left: this.offset.left - this.margins.left }; $.extend(this.offset, { click: { //Where the click happened, relative to the element left: event.pageX - this.offset.left, top: event.pageY - this.offset.top }, parent: this._getParentOffset(), relative: this._getRelativeOffset() //This is a relative to absolute position minus the actual position calculation - only used for relative positioned helper }); //Generate the original position this.originalPosition = this.position = this._generatePosition(event); this.originalPageX = event.pageX; this.originalPageY = event.pageY; //Adjust the mouse offset relative to the helper if 'cursorAt' is supplied (o.cursorAt && this._adjustOffsetFromHelper(o.cursorAt)); //Set a containment if given in the options if(o.containment) this._setContainment(); //Trigger event + callbacks if(this._trigger("start", event) === false) { this._clear(); return false; } //Recache the helper size this._cacheHelperProportions(); //Prepare the droppable offsets if ($.ui.ddmanager && !o.dropBehaviour) $.ui.ddmanager.prepareOffsets(this, event); this._mouseDrag(event, true); //Execute the drag once - this causes the helper not to be visible before getting its correct position //If the ddmanager is used for droppables, inform the manager that dragging has started (see #5003) if ( $.ui.ddmanager ) $.ui.ddmanager.dragStart(this, event); return true; }, _mouseDrag: function(event, noPropagation) { //Compute the helpers position this.position = this._generatePosition(event); this.positionAbs = this._convertPositionTo("absolute"); //Call plugins and callbacks and use the resulting position if something is returned if (!noPropagation) { var ui = this._uiHash(); if(this._trigger('drag', event, ui) === false) { this._mouseUp({}); return false; } this.position = ui.position; } if(!this.options.axis || this.options.axis != "y") this.helper[0].style.left = this.position.left+'px'; if(!this.options.axis || this.options.axis != "x") this.helper[0].style.top = this.position.top+'px'; if($.ui.ddmanager) $.ui.ddmanager.drag(this, event); return false; }, _mouseStop: function(event) { //If we are using droppables, inform the manager about the drop var dropped = false; if ($.ui.ddmanager && !this.options.dropBehaviour) dropped = $.ui.ddmanager.drop(this, event); //if a drop comes from outside (a sortable) if(this.dropped) { dropped = this.dropped; this.dropped = false; } //if the original element is no longer in the DOM don't bother to continue (see #8269) var element = this.element[0], elementInDom = false; while ( element && (element = element.parentNode) ) { if (element == document ) { elementInDom = true; } } if ( !elementInDom && this.options.helper === "original" ) return false; if((this.options.revert == "invalid" && !dropped) || (this.options.revert == "valid" && dropped) || this.options.revert === true || ($.isFunction(this.options.revert) && this.options.revert.call(this.element, dropped))) { var self = this; $(this.helper).animate(this.originalPosition, parseInt(this.options.revertDuration, 10), function() { if(self._trigger("stop", event) !== false) { self._clear(); } }); } else { if(this._trigger("stop", event) !== false) { this._clear(); } } return false; }, _mouseUp: function(event) { //Remove frame helpers $("div.ui-draggable-iframeFix").each(function() { this.parentNode.removeChild(this); }); //If the ddmanager is used for droppables, inform the manager that dragging has stopped (see #5003) if( $.ui.ddmanager ) $.ui.ddmanager.dragStop(this, event); return $.ui.mouse.prototype._mouseUp.call(this, event); }, cancel: function() { if(this.helper.is(".ui-draggable-dragging")) { this._mouseUp({}); } else { this._clear(); } return this; }, _getHandle: function(event) { var handle = !this.options.handle || !$(this.options.handle, this.element).length ? true : false; $(this.options.handle, this.element) .find("*") .andSelf() .each(function() { if(this == event.target) handle = true; }); return handle; }, _createHelper: function(event) { var o = this.options; var helper = $.isFunction(o.helper) ? $(o.helper.apply(this.element[0], [event])) : (o.helper == 'clone' ? this.element.clone().removeAttr('id') : this.element); if(!helper.parents('body').length) helper.appendTo((o.appendTo == 'parent' ? this.element[0].parentNode : o.appendTo)); if(helper[0] != this.element[0] && !(/(fixed|absolute)/).test(helper.css("position"))) helper.css("position", "absolute"); return helper; }, _adjustOffsetFromHelper: function(obj) { if (typeof obj == 'string') { obj = obj.split(' '); } if ($.isArray(obj)) { obj = {left: +obj[0], top: +obj[1] || 0}; } if ('left' in obj) { this.offset.click.left = obj.left + this.margins.left; } if ('right' in obj) { this.offset.click.left = this.helperProportions.width - obj.right + this.margins.left; } if ('top' in obj) { this.offset.click.top = obj.top + this.margins.top; } if ('bottom' in obj) { this.offset.click.top = this.helperProportions.height - obj.bottom + this.margins.top; } }, _getParentOffset: function() { //Get the offsetParent and cache its position this.offsetParent = this.helper.offsetParent(); var po = this.offsetParent.offset(); // This is a special case where we need to modify a offset calculated on start, since the following happened: // 1. The position of the helper is absolute, so it's position is calculated based on the next positioned parent // 2. The actual offset parent is a child of the scroll parent, and the scroll parent isn't the document, which means that // the scroll is included in the initial calculation of the offset of the parent, and never recalculated upon drag if(this.cssPosition == 'absolute' && this.scrollParent[0] != document && $.ui.contains(this.scrollParent[0], this.offsetParent[0])) { po.left += this.scrollParent.scrollLeft(); po.top += this.scrollParent.scrollTop(); } if((this.offsetParent[0] == document.body) //This needs to be actually done for all browsers, since pageX/pageY includes this information || (this.offsetParent[0].tagName && this.offsetParent[0].tagName.toLowerCase() == 'html' && $.browser.msie)) //Ugly IE fix po = { top: 0, left: 0 }; return { top: po.top + (parseInt(this.offsetParent.css("borderTopWidth"),10) || 0), left: po.left + (parseInt(this.offsetParent.css("borderLeftWidth"),10) || 0) }; }, _getRelativeOffset: function() { if(this.cssPosition == "relative") { var p = this.element.position(); return { top: p.top - (parseInt(this.helper.css("top"),10) || 0) + this.scrollParent.scrollTop(), left: p.left - (parseInt(this.helper.css("left"),10) || 0) + this.scrollParent.scrollLeft() }; } else { return { top: 0, left: 0 }; } }, _cacheMargins: function() { this.margins = { left: (parseInt(this.element.css("marginLeft"),10) || 0), top: (parseInt(this.element.css("marginTop"),10) || 0), right: (parseInt(this.element.css("marginRight"),10) || 0), bottom: (parseInt(this.element.css("marginBottom"),10) || 0) }; }, _cacheHelperProportions: function() { this.helperProportions = { width: this.helper.outerWidth(), height: this.helper.outerHeight() }; }, _setContainment: function() { var o = this.options; if(o.containment == 'parent') o.containment = this.helper[0].parentNode; if(o.containment == 'document' || o.containment == 'window') this.containment = [ o.containment == 'document' ? 0 : $(window).scrollLeft() - this.offset.relative.left - this.offset.parent.left, o.containment == 'document' ? 0 : $(window).scrollTop() - this.offset.relative.top - this.offset.parent.top, (o.containment == 'document' ? 0 : $(window).scrollLeft()) + $(o.containment == 'document' ? document : window).width() - this.helperProportions.width - this.margins.left, (o.containment == 'document' ? 0 : $(window).scrollTop()) + ($(o.containment == 'document' ? document : window).height() || document.body.parentNode.scrollHeight) - this.helperProportions.height - this.margins.top ]; if(!(/^(document|window|parent)$/).test(o.containment) && o.containment.constructor != Array) { var c = $(o.containment); var ce = c[0]; if(!ce) return; var co = c.offset(); var over = ($(ce).css("overflow") != 'hidden'); this.containment = [ (parseInt($(ce).css("borderLeftWidth"),10) || 0) + (parseInt($(ce).css("paddingLeft"),10) || 0), (parseInt($(ce).css("borderTopWidth"),10) || 0) + (parseInt($(ce).css("paddingTop"),10) || 0), (over ? Math.max(ce.scrollWidth,ce.offsetWidth) : ce.offsetWidth) - (parseInt($(ce).css("borderLeftWidth"),10) || 0) - (parseInt($(ce).css("paddingRight"),10) || 0) - this.helperProportions.width - this.margins.left - this.margins.right, (over ? Math.max(ce.scrollHeight,ce.offsetHeight) : ce.offsetHeight) - (parseInt($(ce).css("borderTopWidth"),10) || 0) - (parseInt($(ce).css("paddingBottom"),10) || 0) - this.helperProportions.height - this.margins.top - this.margins.bottom ]; this.relative_container = c; } else if(o.containment.constructor == Array) { this.containment = o.containment; } }, _convertPositionTo: function(d, pos) { if(!pos) pos = this.position; var mod = d == "absolute" ? 1 : -1; var o = this.options, scroll = this.cssPosition == 'absolute' && !(this.scrollParent[0] != document && $.ui.contains(this.scrollParent[0], this.offsetParent[0])) ? this.offsetParent : this.scrollParent, scrollIsRootNode = (/(html|body)/i).test(scroll[0].tagName); return { top: ( pos.top // The absolute mouse position + this.offset.relative.top * mod // Only for relative positioned nodes: Relative offset from element to offset parent + this.offset.parent.top * mod // The offsetParent's offset without borders (offset + border) - ($.browser.safari && $.browser.version < 526 && this.cssPosition == 'fixed' ? 0 : ( this.cssPosition == 'fixed' ? -this.scrollParent.scrollTop() : ( scrollIsRootNode ? 0 : scroll.scrollTop() ) ) * mod) ), left: ( pos.left // The absolute mouse position + this.offset.relative.left * mod // Only for relative positioned nodes: Relative offset from element to offset parent + this.offset.parent.left * mod // The offsetParent's offset without borders (offset + border) - ($.browser.safari && $.browser.version < 526 && this.cssPosition == 'fixed' ? 0 : ( this.cssPosition == 'fixed' ? -this.scrollParent.scrollLeft() : scrollIsRootNode ? 0 : scroll.scrollLeft() ) * mod) ) }; }, _generatePosition: function(event) { var o = this.options, scroll = this.cssPosition == 'absolute' && !(this.scrollParent[0] != document && $.ui.contains(this.scrollParent[0], this.offsetParent[0])) ? this.offsetParent : this.scrollParent, scrollIsRootNode = (/(html|body)/i).test(scroll[0].tagName); var pageX = event.pageX; var pageY = event.pageY; /* * - Position constraining - * Constrain the position to a mix of grid, containment. */ if(this.originalPosition) { //If we are not dragging yet, we won't check for options var containment; if(this.containment) { if (this.relative_container){ var co = this.relative_container.offset(); containment = [ this.containment[0] + co.left, this.containment[1] + co.top, this.containment[2] + co.left, this.containment[3] + co.top ]; } else { containment = this.containment; } if(event.pageX - this.offset.click.left < containment[0]) pageX = containment[0] + this.offset.click.left; if(event.pageY - this.offset.click.top < containment[1]) pageY = containment[1] + this.offset.click.top; if(event.pageX - this.offset.click.left > containment[2]) pageX = containment[2] + this.offset.click.left; if(event.pageY - this.offset.click.top > containment[3]) pageY = containment[3] + this.offset.click.top; } if(o.grid) { //Check for grid elements set to 0 to prevent divide by 0 error causing invalid argument errors in IE (see ticket #6950) var top = o.grid[1] ? this.originalPageY + Math.round((pageY - this.originalPageY) / o.grid[1]) * o.grid[1] : this.originalPageY; pageY = containment ? (!(top - this.offset.click.top < containment[1] || top - this.offset.click.top > containment[3]) ? top : (!(top - this.offset.click.top < containment[1]) ? top - o.grid[1] : top + o.grid[1])) : top; var left = o.grid[0] ? this.originalPageX + Math.round((pageX - this.originalPageX) / o.grid[0]) * o.grid[0] : this.originalPageX; pageX = containment ? (!(left - this.offset.click.left < containment[0] || left - this.offset.click.left > containment[2]) ? left : (!(left - this.offset.click.left < containment[0]) ? left - o.grid[0] : left + o.grid[0])) : left; } } return { top: ( pageY // The absolute mouse position - this.offset.click.top // Click offset (relative to the element) - this.offset.relative.top // Only for relative positioned nodes: Relative offset from element to offset parent - this.offset.parent.top // The offsetParent's offset without borders (offset + border) + ($.browser.safari && $.browser.version < 526 && this.cssPosition == 'fixed' ? 0 : ( this.cssPosition == 'fixed' ? -this.scrollParent.scrollTop() : ( scrollIsRootNode ? 0 : scroll.scrollTop() ) )) ), left: ( pageX // The absolute mouse position - this.offset.click.left // Click offset (relative to the element) - this.offset.relative.left // Only for relative positioned nodes: Relative offset from element to offset parent - this.offset.parent.left // The offsetParent's offset without borders (offset + border) + ($.browser.safari && $.browser.version < 526 && this.cssPosition == 'fixed' ? 0 : ( this.cssPosition == 'fixed' ? -this.scrollParent.scrollLeft() : scrollIsRootNode ? 0 : scroll.scrollLeft() )) ) }; }, _clear: function() { this.helper.removeClass("ui-draggable-dragging"); if(this.helper[0] != this.element[0] && !this.cancelHelperRemoval) this.helper.remove(); //if($.ui.ddmanager) $.ui.ddmanager.current = null; this.helper = null; this.cancelHelperRemoval = false; }, // From now on bulk stuff - mainly helpers _trigger: function(type, event, ui) { ui = ui || this._uiHash(); $.ui.plugin.call(this, type, [event, ui]); if(type == "drag") this.positionAbs = this._convertPositionTo("absolute"); //The absolute position has to be recalculated after plugins return $.Widget.prototype._trigger.call(this, type, event, ui); }, plugins: {}, _uiHash: function(event) { return { helper: this.helper, position: this.position, originalPosition: this.originalPosition, offset: this.positionAbs }; } }); $.extend($.ui.draggable, { version: "1.8.24" }); $.ui.plugin.add("draggable", "connectToSortable", { start: function(event, ui) { var inst = $(this).data("draggable"), o = inst.options, uiSortable = $.extend({}, ui, { item: inst.element }); inst.sortables = []; $(o.connectToSortable).each(function() { var sortable = $.data(this, 'sortable'); if (sortable && !sortable.options.disabled) { inst.sortables.push({ instance: sortable, shouldRevert: sortable.options.revert }); sortable.refreshPositions(); // Call the sortable's refreshPositions at drag start to refresh the containerCache since the sortable container cache is used in drag and needs to be up to date (this will ensure it's initialised as well as being kept in step with any changes that might have happened on the page). sortable._trigger("activate", event, uiSortable); } }); }, stop: function(event, ui) { //If we are still over the sortable, we fake the stop event of the sortable, but also remove helper var inst = $(this).data("draggable"), uiSortable = $.extend({}, ui, { item: inst.element }); $.each(inst.sortables, function() { if(this.instance.isOver) { this.instance.isOver = 0; inst.cancelHelperRemoval = true; //Don't remove the helper in the draggable instance this.instance.cancelHelperRemoval = false; //Remove it in the sortable instance (so sortable plugins like revert still work) //The sortable revert is supported, and we have to set a temporary dropped variable on the draggable to support revert: 'valid/invalid' if(this.shouldRevert) this.instance.options.revert = true; //Trigger the stop of the sortable this.instance._mouseStop(event); this.instance.options.helper = this.instance.options._helper; //If the helper has been the original item, restore properties in the sortable if(inst.options.helper == 'original') this.instance.currentItem.css({ top: 'auto', left: 'auto' }); } else { this.instance.cancelHelperRemoval = false; //Remove the helper in the sortable instance this.instance._trigger("deactivate", event, uiSortable); } }); }, drag: function(event, ui) { var inst = $(this).data("draggable"), self = this; var checkPos = function(o) { var dyClick = this.offset.click.top, dxClick = this.offset.click.left; var helperTop = this.positionAbs.top, helperLeft = this.positionAbs.left; var itemHeight = o.height, itemWidth = o.width; var itemTop = o.top, itemLeft = o.left; return $.ui.isOver(helperTop + dyClick, helperLeft + dxClick, itemTop, itemLeft, itemHeight, itemWidth); }; $.each(inst.sortables, function(i) { //Copy over some variables to allow calling the sortable's native _intersectsWith this.instance.positionAbs = inst.positionAbs; this.instance.helperProportions = inst.helperProportions; this.instance.offset.click = inst.offset.click; if(this.instance._intersectsWith(this.instance.containerCache)) { //If it intersects, we use a little isOver variable and set it once, so our move-in stuff gets fired only once if(!this.instance.isOver) { this.instance.isOver = 1; //Now we fake the start of dragging for the sortable instance, //by cloning the list group item, appending it to the sortable and using it as inst.currentItem //We can then fire the start event of the sortable with our passed browser event, and our own helper (so it doesn't create a new one) this.instance.currentItem = $(self).clone().removeAttr('id').appendTo(this.instance.element).data("sortable-item", true); this.instance.options._helper = this.instance.options.helper; //Store helper option to later restore it this.instance.options.helper = function() { return ui.helper[0]; }; event.target = this.instance.currentItem[0]; this.instance._mouseCapture(event, true); this.instance._mouseStart(event, true, true); //Because the browser event is way off the new appended portlet, we modify a couple of variables to reflect the changes this.instance.offset.click.top = inst.offset.click.top; this.instance.offset.click.left = inst.offset.click.left; this.instance.offset.parent.left -= inst.offset.parent.left - this.instance.offset.parent.left; this.instance.offset.parent.top -= inst.offset.parent.top - this.instance.offset.parent.top; inst._trigger("toSortable", event); inst.dropped = this.instance.element; //draggable revert needs that //hack so receive/update callbacks work (mostly) inst.currentItem = inst.element; this.instance.fromOutside = inst; } //Provided we did all the previous steps, we can fire the drag event of the sortable on every draggable drag, when it intersects with the sortable if(this.instance.currentItem) this.instance._mouseDrag(event); } else { //If it doesn't intersect with the sortable, and it intersected before, //we fake the drag stop of the sortable, but make sure it doesn't remove the helper by using cancelHelperRemoval if(this.instance.isOver) { this.instance.isOver = 0; this.instance.cancelHelperRemoval = true; //Prevent reverting on this forced stop this.instance.options.revert = false; // The out event needs to be triggered independently this.instance._trigger('out', event, this.instance._uiHash(this.instance)); this.instance._mouseStop(event, true); this.instance.options.helper = this.instance.options._helper; //Now we remove our currentItem, the list group clone again, and the placeholder, and animate the helper back to it's original size this.instance.currentItem.remove(); if(this.instance.placeholder) this.instance.placeholder.remove(); inst._trigger("fromSortable", event); inst.dropped = false; //draggable revert needs that } }; }); } }); $.ui.plugin.add("draggable", "cursor", { start: function(event, ui) { var t = $('body'), o = $(this).data('draggable').options; if (t.css("cursor")) o._cursor = t.css("cursor"); t.css("cursor", o.cursor); }, stop: function(event, ui) { var o = $(this).data('draggable').options; if (o._cursor) $('body').css("cursor", o._cursor); } }); $.ui.plugin.add("draggable", "opacity", { start: function(event, ui) { var t = $(ui.helper), o = $(this).data('draggable').options; if(t.css("opacity")) o._opacity = t.css("opacity"); t.css('opacity', o.opacity); }, stop: function(event, ui) { var o = $(this).data('draggable').options; if(o._opacity) $(ui.helper).css('opacity', o._opacity); } }); $.ui.plugin.add("draggable", "scroll", { start: function(event, ui) { var i = $(this).data("draggable"); if(i.scrollParent[0] != document && i.scrollParent[0].tagName != 'HTML') i.overflowOffset = i.scrollParent.offset(); }, drag: function(event, ui) { var i = $(this).data("draggable"), o = i.options, scrolled = false; if(i.scrollParent[0] != document && i.scrollParent[0].tagName != 'HTML') { if(!o.axis || o.axis != 'x') { if((i.overflowOffset.top + i.scrollParent[0].offsetHeight) - event.pageY < o.scrollSensitivity) i.scrollParent[0].scrollTop = scrolled = i.scrollParent[0].scrollTop + o.scrollSpeed; else if(event.pageY - i.overflowOffset.top < o.scrollSensitivity) i.scrollParent[0].scrollTop = scrolled = i.scrollParent[0].scrollTop - o.scrollSpeed; } if(!o.axis || o.axis != 'y') { if((i.overflowOffset.left + i.scrollParent[0].offsetWidth) - event.pageX < o.scrollSensitivity) i.scrollParent[0].scrollLeft = scrolled = i.scrollParent[0].scrollLeft + o.scrollSpeed; else if(event.pageX - i.overflowOffset.left < o.scrollSensitivity) i.scrollParent[0].scrollLeft = scrolled = i.scrollParent[0].scrollLeft - o.scrollSpeed; } } else { if(!o.axis || o.axis != 'x') { if(event.pageY - $(document).scrollTop() < o.scrollSensitivity) scrolled = $(document).scrollTop($(document).scrollTop() - o.scrollSpeed); else if($(window).height() - (event.pageY - $(document).scrollTop()) < o.scrollSensitivity) scrolled = $(document).scrollTop($(document).scrollTop() + o.scrollSpeed); } if(!o.axis || o.axis != 'y') { if(event.pageX - $(document).scrollLeft() < o.scrollSensitivity) scrolled = $(document).scrollLeft($(document).scrollLeft() - o.scrollSpeed); else if($(window).width() - (event.pageX - $(document).scrollLeft()) < o.scrollSensitivity) scrolled = $(document).scrollLeft($(document).scrollLeft() + o.scrollSpeed); } } if(scrolled !== false && $.ui.ddmanager && !o.dropBehaviour) $.ui.ddmanager.prepareOffsets(i, event); } }); $.ui.plugin.add("draggable", "snap", { start: function(event, ui) { var i = $(this).data("draggable"), o = i.options; i.snapElements = []; $(o.snap.constructor != String ? ( o.snap.items || ':data(draggable)' ) : o.snap).each(function() { var $t = $(this); var $o = $t.offset(); if(this != i.element[0]) i.snapElements.push({ item: this, width: $t.outerWidth(), height: $t.outerHeight(), top: $o.top, left: $o.left }); }); }, drag: function(event, ui) { var inst = $(this).data("draggable"), o = inst.options; var d = o.snapTolerance; var x1 = ui.offset.left, x2 = x1 + inst.helperProportions.width, y1 = ui.offset.top, y2 = y1 + inst.helperProportions.height; for (var i = inst.snapElements.length - 1; i >= 0; i--){ var l = inst.snapElements[i].left, r = l + inst.snapElements[i].width, t = inst.snapElements[i].top, b = t + inst.snapElements[i].height; //Yes, I know, this is insane ;) if(!((l-d < x1 && x1 < r+d && t-d < y1 && y1 < b+d) || (l-d < x1 && x1 < r+d && t-d < y2 && y2 < b+d) || (l-d < x2 && x2 < r+d && t-d < y1 && y1 < b+d) || (l-d < x2 && x2 < r+d && t-d < y2 && y2 < b+d))) { if(inst.snapElements[i].snapping) (inst.options.snap.release && inst.options.snap.release.call(inst.element, event, $.extend(inst._uiHash(), { snapItem: inst.snapElements[i].item }))); inst.snapElements[i].snapping = false; continue; } if(o.snapMode != 'inner') { var ts = Math.abs(t - y2) <= d; var bs = Math.abs(b - y1) <= d; var ls = Math.abs(l - x2) <= d; var rs = Math.abs(r - x1) <= d; if(ts) ui.position.top = inst._convertPositionTo("relative", { top: t - inst.helperProportions.height, left: 0 }).top - inst.margins.top; if(bs) ui.position.top = inst._convertPositionTo("relative", { top: b, left: 0 }).top - inst.margins.top; if(ls) ui.position.left = inst._convertPositionTo("relative", { top: 0, left: l - inst.helperProportions.width }).left - inst.margins.left; if(rs) ui.position.left = inst._convertPositionTo("relative", { top: 0, left: r }).left - inst.margins.left; } var first = (ts || bs || ls || rs); if(o.snapMode != 'outer') { var ts = Math.abs(t - y1) <= d; var bs = Math.abs(b - y2) <= d; var ls = Math.abs(l - x1) <= d; var rs = Math.abs(r - x2) <= d; if(ts) ui.position.top = inst._convertPositionTo("relative", { top: t, left: 0 }).top - inst.margins.top; if(bs) ui.position.top = inst._convertPositionTo("relative", { top: b - inst.helperProportions.height, left: 0 }).top - inst.margins.top; if(ls) ui.position.left = inst._convertPositionTo("relative", { top: 0, left: l }).left - inst.margins.left; if(rs) ui.position.left = inst._convertPositionTo("relative", { top: 0, left: r - inst.helperProportions.width }).left - inst.margins.left; } if(!inst.snapElements[i].snapping && (ts || bs || ls || rs || first)) (inst.options.snap.snap && inst.options.snap.snap.call(inst.element, event, $.extend(inst._uiHash(), { snapItem: inst.snapElements[i].item }))); inst.snapElements[i].snapping = (ts || bs || ls || rs || first); }; } }); $.ui.plugin.add("draggable", "stack", { start: function(event, ui) { var o = $(this).data("draggable").options; var group = $.makeArray($(o.stack)).sort(function(a,b) { return (parseInt($(a).css("zIndex"),10) || 0) - (parseInt($(b).css("zIndex"),10) || 0); }); if (!group.length) { return; } var min = parseInt(group[0].style.zIndex) || 0; $(group).each(function(i) { this.style.zIndex = min + i; }); this[0].style.zIndex = min + group.length; } }); $.ui.plugin.add("draggable", "zIndex", { start: function(event, ui) { var t = $(ui.helper), o = $(this).data("draggable").options; if(t.css("zIndex")) o._zIndex = t.css("zIndex"); t.css('zIndex', o.zIndex); }, stop: function(event, ui) { var o = $(this).data("draggable").options; if(o._zIndex) $(ui.helper).css('zIndex', o._zIndex); } }); })(jQuery);
{ "pile_set_name": "Github" }
{ "name": "Analytics", "description": "", "version": "4.17.0", "dependencies": { "system": { "type": "SAE", "version": "4.2.0" } } }
{ "pile_set_name": "Github" }
// ASM: a very small and fast Java bytecode manipulation framework // Copyright (c) 2000-2011 INRIA, France Telecom // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. Neither the name of the copyright holders nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. package org.mvel2.asm; /** * A visitor to visit a Java annotation. The methods of this class must be called in the following * order: ( {@code visit} | {@code visitEnum} | {@code visitAnnotation} | {@code visitArray} )* * {@code visitEnd}. * * @author Eric Bruneton * @author Eugene Kuleshov */ public abstract class AnnotationVisitor { /** * The ASM API version implemented by this visitor. The value of this field must be one of {@link * Opcodes#ASM4}, {@link Opcodes#ASM5}, {@link Opcodes#ASM6} or {@link Opcodes#ASM7}. */ protected final int api; /** The annotation visitor to which this visitor must delegate method calls. May be null. */ protected AnnotationVisitor av; /** * Constructs a new {@link AnnotationVisitor}. * * @param api the ASM API version implemented by this visitor. Must be one of {@link * Opcodes#ASM4}, {@link Opcodes#ASM5}, {@link Opcodes#ASM6} or {@link Opcodes#ASM7}. */ public AnnotationVisitor(final int api) { this(api, null); } /** * Constructs a new {@link AnnotationVisitor}. * * @param api the ASM API version implemented by this visitor. Must be one of {@link * Opcodes#ASM4}, {@link Opcodes#ASM5}, {@link Opcodes#ASM6} or {@link Opcodes#ASM7}. * @param annotationVisitor the annotation visitor to which this visitor must delegate method * calls. May be null. */ public AnnotationVisitor(final int api, final AnnotationVisitor annotationVisitor) { if (api != Opcodes.ASM6 && api != Opcodes.ASM5 && api != Opcodes.ASM4 && api != Opcodes.ASM7) { throw new IllegalArgumentException(); } this.api = api; this.av = annotationVisitor; } /** * Visits a primitive value of the annotation. * * @param name the value name. * @param value the actual value, whose type must be {@link Byte}, {@link Boolean}, {@link * Character}, {@link Short}, {@link Integer} , {@link Long}, {@link Float}, {@link Double}, * {@link String} or {@link Type} of {@link Type#OBJECT} or {@link Type#ARRAY} sort. This * value can also be an array of byte, boolean, short, char, int, long, float or double values * (this is equivalent to using {@link #visitArray} and visiting each array element in turn, * but is more convenient). */ public void visit(final String name, final Object value) { if (av != null) { av.visit(name, value); } } /** * Visits an enumeration value of the annotation. * * @param name the value name. * @param descriptor the class descriptor of the enumeration class. * @param value the actual enumeration value. */ public void visitEnum(final String name, final String descriptor, final String value) { if (av != null) { av.visitEnum(name, descriptor, value); } } /** * Visits a nested annotation value of the annotation. * * @param name the value name. * @param descriptor the class descriptor of the nested annotation class. * @return a visitor to visit the actual nested annotation value, or {@literal null} if this * visitor is not interested in visiting this nested annotation. <i>The nested annotation * value must be fully visited before calling other methods on this annotation visitor</i>. */ public AnnotationVisitor visitAnnotation(final String name, final String descriptor) { if (av != null) { return av.visitAnnotation(name, descriptor); } return null; } /** * Visits an array value of the annotation. Note that arrays of primitive types (such as byte, * boolean, short, char, int, long, float or double) can be passed as value to {@link #visit * visit}. This is what {@link ClassReader} does. * * @param name the value name. * @return a visitor to visit the actual array value elements, or {@literal null} if this visitor * is not interested in visiting these values. The 'name' parameters passed to the methods of * this visitor are ignored. <i>All the array values must be visited before calling other * methods on this annotation visitor</i>. */ public AnnotationVisitor visitArray(final String name) { if (av != null) { return av.visitArray(name); } return null; } /** Visits the end of the annotation. */ public void visitEnd() { if (av != null) { av.visitEnd(); } } }
{ "pile_set_name": "Github" }
--- name: Question about: Got a question? ask it here title: "[question]" labels: question assignees: '' ---
{ "pile_set_name": "Github" }
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package manager import ( "context" "fmt" "net" "net/http" "sync" "time" "github.com/prometheus/client_golang/prometheus/promhttp" "k8s.io/apimachinery/pkg/api/meta" "k8s.io/apimachinery/pkg/runtime" "k8s.io/client-go/rest" "k8s.io/client-go/tools/leaderelection" "k8s.io/client-go/tools/leaderelection/resourcelock" "k8s.io/client-go/tools/record" "sigs.k8s.io/controller-runtime/pkg/cache" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/healthz" logf "sigs.k8s.io/controller-runtime/pkg/internal/log" "sigs.k8s.io/controller-runtime/pkg/metrics" "sigs.k8s.io/controller-runtime/pkg/recorder" "sigs.k8s.io/controller-runtime/pkg/runtime/inject" "sigs.k8s.io/controller-runtime/pkg/webhook" ) const ( // Values taken from: https://github.com/kubernetes/apiserver/blob/master/pkg/apis/config/v1alpha1/defaults.go defaultLeaseDuration = 15 * time.Second defaultRenewDeadline = 10 * time.Second defaultRetryPeriod = 2 * time.Second defaultReadinessEndpoint = "/readyz" defaultLivenessEndpoint = "/healthz" ) var log = logf.RuntimeLog.WithName("manager") type controllerManager struct { // config is the rest.config used to talk to the apiserver. Required. config *rest.Config // scheme is the scheme injected into Controllers, EventHandlers, Sources and Predicates. Defaults // to scheme.scheme. scheme *runtime.Scheme // leaderElectionRunnables is the set of Controllers that the controllerManager injects deps into and Starts. // These Runnables are managed by lead election. leaderElectionRunnables []Runnable // nonLeaderElectionRunnables is the set of webhook servers that the controllerManager injects deps into and Starts. // These Runnables will not be blocked by lead election. nonLeaderElectionRunnables []Runnable cache cache.Cache // TODO(directxman12): Provide an escape hatch to get individual indexers // client is the client injected into Controllers (and EventHandlers, Sources and Predicates). client client.Client // apiReader is the reader that will make requests to the api server and not the cache. apiReader client.Reader // fieldIndexes knows how to add field indexes over the Cache used by this controller, // which can later be consumed via field selectors from the injected client. fieldIndexes client.FieldIndexer // recorderProvider is used to generate event recorders that will be injected into Controllers // (and EventHandlers, Sources and Predicates). recorderProvider recorder.Provider // resourceLock forms the basis for leader election resourceLock resourcelock.Interface // mapper is used to map resources to kind, and map kind and version. mapper meta.RESTMapper // metricsListener is used to serve prometheus metrics metricsListener net.Listener // healthProbeListener is used to serve liveness probe healthProbeListener net.Listener // Readiness probe endpoint name readinessEndpointName string // Liveness probe endpoint name livenessEndpointName string // Readyz probe handler readyzHandler *healthz.Handler // Healthz probe handler healthzHandler *healthz.Handler mu sync.Mutex started bool startedLeader bool healthzStarted bool // NB(directxman12): we don't just use an error channel here to avoid the situation where the // error channel is too small and we end up blocking some goroutines waiting to report their errors. // errSignal lets us track when we should stop because an error occurred errSignal *errSignaler // internalStop is the stop channel *actually* used by everything involved // with the manager as a stop channel, so that we can pass a stop channel // to things that need it off the bat (like the Channel source). It can // be closed via `internalStopper` (by being the same underlying channel). internalStop <-chan struct{} // internalStopper is the write side of the internal stop channel, allowing us to close it. // It and `internalStop` should point to the same channel. internalStopper chan<- struct{} startCache func(stop <-chan struct{}) error // port is the port that the webhook server serves at. port int // host is the hostname that the webhook server binds to. host string // CertDir is the directory that contains the server key and certificate. // if not set, webhook server would look up the server key and certificate in // {TempDir}/k8s-webhook-server/serving-certs certDir string webhookServer *webhook.Server // leaseDuration is the duration that non-leader candidates will // wait to force acquire leadership. leaseDuration time.Duration // renewDeadline is the duration that the acting master will retry // refreshing leadership before giving up. renewDeadline time.Duration // retryPeriod is the duration the LeaderElector clients should wait // between tries of actions. retryPeriod time.Duration } type errSignaler struct { // errSignal indicates that an error occurred, when closed. It shouldn't // be written to. errSignal chan struct{} // err is the received error err error mu sync.Mutex } func (r *errSignaler) SignalError(err error) { r.mu.Lock() defer r.mu.Unlock() if err == nil { // non-error, ignore log.Error(nil, "SignalError called without an (with a nil) error, which should never happen, ignoring") return } if r.err != nil { // we already have an error, don't try again return } // save the error and report it r.err = err close(r.errSignal) } func (r *errSignaler) Error() error { r.mu.Lock() defer r.mu.Unlock() return r.err } func (r *errSignaler) GotError() chan struct{} { r.mu.Lock() defer r.mu.Unlock() return r.errSignal } // Add sets dependencies on i, and adds it to the list of Runnables to start. func (cm *controllerManager) Add(r Runnable) error { cm.mu.Lock() defer cm.mu.Unlock() // Set dependencies on the object if err := cm.SetFields(r); err != nil { return err } var shouldStart bool // Add the runnable to the leader election or the non-leaderelection list if leRunnable, ok := r.(LeaderElectionRunnable); ok && !leRunnable.NeedLeaderElection() { shouldStart = cm.started cm.nonLeaderElectionRunnables = append(cm.nonLeaderElectionRunnables, r) } else { shouldStart = cm.startedLeader cm.leaderElectionRunnables = append(cm.leaderElectionRunnables, r) } if shouldStart { // If already started, start the controller go func() { if err := r.Start(cm.internalStop); err != nil { cm.errSignal.SignalError(err) } }() } return nil } func (cm *controllerManager) SetFields(i interface{}) error { if _, err := inject.ConfigInto(cm.config, i); err != nil { return err } if _, err := inject.ClientInto(cm.client, i); err != nil { return err } if _, err := inject.APIReaderInto(cm.apiReader, i); err != nil { return err } if _, err := inject.SchemeInto(cm.scheme, i); err != nil { return err } if _, err := inject.CacheInto(cm.cache, i); err != nil { return err } if _, err := inject.InjectorInto(cm.SetFields, i); err != nil { return err } if _, err := inject.StopChannelInto(cm.internalStop, i); err != nil { return err } if _, err := inject.MapperInto(cm.mapper, i); err != nil { return err } return nil } // AddHealthzCheck allows you to add Healthz checker func (cm *controllerManager) AddHealthzCheck(name string, check healthz.Checker) error { cm.mu.Lock() defer cm.mu.Unlock() if cm.healthzStarted { return fmt.Errorf("unable to add new checker because healthz endpoint has already been created") } if cm.healthzHandler == nil { cm.healthzHandler = &healthz.Handler{Checks: map[string]healthz.Checker{}} } cm.healthzHandler.Checks[name] = check return nil } // AddReadyzCheck allows you to add Readyz checker func (cm *controllerManager) AddReadyzCheck(name string, check healthz.Checker) error { cm.mu.Lock() defer cm.mu.Unlock() if cm.healthzStarted { return fmt.Errorf("unable to add new checker because readyz endpoint has already been created") } if cm.readyzHandler == nil { cm.readyzHandler = &healthz.Handler{Checks: map[string]healthz.Checker{}} } cm.readyzHandler.Checks[name] = check return nil } func (cm *controllerManager) GetConfig() *rest.Config { return cm.config } func (cm *controllerManager) GetClient() client.Client { return cm.client } func (cm *controllerManager) GetScheme() *runtime.Scheme { return cm.scheme } func (cm *controllerManager) GetFieldIndexer() client.FieldIndexer { return cm.fieldIndexes } func (cm *controllerManager) GetCache() cache.Cache { return cm.cache } func (cm *controllerManager) GetEventRecorderFor(name string) record.EventRecorder { return cm.recorderProvider.GetEventRecorderFor(name) } func (cm *controllerManager) GetRESTMapper() meta.RESTMapper { return cm.mapper } func (cm *controllerManager) GetAPIReader() client.Reader { return cm.apiReader } func (cm *controllerManager) GetWebhookServer() *webhook.Server { if cm.webhookServer == nil { cm.webhookServer = &webhook.Server{ Port: cm.port, Host: cm.host, CertDir: cm.certDir, } if err := cm.Add(cm.webhookServer); err != nil { panic("unable to add webhookServer to the controller manager") } } return cm.webhookServer } func (cm *controllerManager) serveMetrics(stop <-chan struct{}) { var metricsPath = "/metrics" handler := promhttp.HandlerFor(metrics.Registry, promhttp.HandlerOpts{ ErrorHandling: promhttp.HTTPErrorOnError, }) // TODO(JoelSpeed): Use existing Kubernetes machinery for serving metrics mux := http.NewServeMux() mux.Handle(metricsPath, handler) server := http.Server{ Handler: mux, } // Run the server go func() { log.Info("starting metrics server", "path", metricsPath) if err := server.Serve(cm.metricsListener); err != nil && err != http.ErrServerClosed { cm.errSignal.SignalError(err) } }() // Shutdown the server when stop is closed select { case <-stop: if err := server.Shutdown(context.Background()); err != nil { cm.errSignal.SignalError(err) } } } func (cm *controllerManager) serveHealthProbes(stop <-chan struct{}) { cm.mu.Lock() mux := http.NewServeMux() if cm.readyzHandler != nil { mux.Handle(cm.readinessEndpointName, http.StripPrefix(cm.readinessEndpointName, cm.readyzHandler)) } if cm.healthzHandler != nil { mux.Handle(cm.livenessEndpointName, http.StripPrefix(cm.livenessEndpointName, cm.healthzHandler)) } server := http.Server{ Handler: mux, } // Run server go func() { if err := server.Serve(cm.healthProbeListener); err != nil && err != http.ErrServerClosed { cm.errSignal.SignalError(err) } }() cm.healthzStarted = true cm.mu.Unlock() // Shutdown the server when stop is closed select { case <-stop: if err := server.Shutdown(context.Background()); err != nil { cm.errSignal.SignalError(err) } } } func (cm *controllerManager) Start(stop <-chan struct{}) error { // join the passed-in stop channel as an upstream feeding into cm.internalStopper defer close(cm.internalStopper) // initialize this here so that we reset the signal channel state on every start cm.errSignal = &errSignaler{errSignal: make(chan struct{})} // Metrics should be served whether the controller is leader or not. // (If we don't serve metrics for non-leaders, prometheus will still scrape // the pod but will get a connection refused) if cm.metricsListener != nil { go cm.serveMetrics(cm.internalStop) } // Serve health probes if cm.healthProbeListener != nil { go cm.serveHealthProbes(cm.internalStop) } go cm.startNonLeaderElectionRunnables() if cm.resourceLock != nil { err := cm.startLeaderElection() if err != nil { return err } } else { go cm.startLeaderElectionRunnables() } select { case <-stop: // We are done return nil case <-cm.errSignal.GotError(): // Error starting a controller return cm.errSignal.Error() } } func (cm *controllerManager) startNonLeaderElectionRunnables() { cm.mu.Lock() defer cm.mu.Unlock() cm.waitForCache() // Start the non-leaderelection Runnables after the cache has synced for _, c := range cm.nonLeaderElectionRunnables { // Controllers block, but we want to return an error if any have an error starting. // Write any Start errors to a channel so we can return them ctrl := c go func() { if err := ctrl.Start(cm.internalStop); err != nil { cm.errSignal.SignalError(err) } // we use %T here because we don't have a good stand-in for "name", // and the full runnable might not serialize (mutexes, etc) log.V(1).Info("non-leader-election runnable finished", "runnable type", fmt.Sprintf("%T", ctrl)) }() } } func (cm *controllerManager) startLeaderElectionRunnables() { cm.mu.Lock() defer cm.mu.Unlock() cm.waitForCache() // Start the leader election Runnables after the cache has synced for _, c := range cm.leaderElectionRunnables { // Controllers block, but we want to return an error if any have an error starting. // Write any Start errors to a channel so we can return them ctrl := c go func() { if err := ctrl.Start(cm.internalStop); err != nil { cm.errSignal.SignalError(err) } // we use %T here because we don't have a good stand-in for "name", // and the full runnable might not serialize (mutexes, etc) log.V(1).Info("leader-election runnable finished", "runnable type", fmt.Sprintf("%T", ctrl)) }() } cm.startedLeader = true } func (cm *controllerManager) waitForCache() { if cm.started { return } // Start the Cache. Allow the function to start the cache to be mocked out for testing if cm.startCache == nil { cm.startCache = cm.cache.Start } go func() { if err := cm.startCache(cm.internalStop); err != nil { cm.errSignal.SignalError(err) } }() // Wait for the caches to sync. // TODO(community): Check the return value and write a test cm.cache.WaitForCacheSync(cm.internalStop) cm.started = true } func (cm *controllerManager) startLeaderElection() (err error) { l, err := leaderelection.NewLeaderElector(leaderelection.LeaderElectionConfig{ Lock: cm.resourceLock, LeaseDuration: cm.leaseDuration, RenewDeadline: cm.renewDeadline, RetryPeriod: cm.retryPeriod, Callbacks: leaderelection.LeaderCallbacks{ OnStartedLeading: func(_ context.Context) { cm.startLeaderElectionRunnables() }, OnStoppedLeading: func() { // Most implementations of leader election log.Fatal() here. // Since Start is wrapped in log.Fatal when called, we can just return // an error here which will cause the program to exit. cm.errSignal.SignalError(fmt.Errorf("leader election lost")) }, }, }) if err != nil { return err } ctx, cancel := context.WithCancel(context.Background()) go func() { select { case <-cm.internalStop: cancel() case <-ctx.Done(): } }() // Start the leader elector process go l.Run(ctx) return nil }
{ "pile_set_name": "Github" }
op { name: "QueueEnqueueV2" input_arg { name: "handle" type: DT_RESOURCE } input_arg { name: "components" type_list_attr: "Tcomponents" } attr { name: "Tcomponents" type: "list(type)" has_minimum: true minimum: 1 } attr { name: "timeout_ms" type: "int" default_value { i: -1 } } is_stateful: true }
{ "pile_set_name": "Github" }
//===----------------------------------------------------------------------===// // // This source file is part of the Soto for AWS open source project // // Copyright (c) 2017-2020 the Soto project authors // Licensed under Apache License v2.0 // // See LICENSE.txt for license information // See CONTRIBUTORS.txt for the list of Soto project authors // // SPDX-License-Identifier: Apache-2.0 // //===----------------------------------------------------------------------===// // THIS FILE IS AUTOMATICALLY GENERATED by https://github.com/swift-aws/soto/blob/main/CodeGenerator/Sources/CodeGenerator/main.swift. DO NOT EDIT. @_exported import SotoCore /* Client object for interacting with AWS CodeStar service. AWS CodeStar This is the API reference for AWS CodeStar. This reference provides descriptions of the operations and data types for the AWS CodeStar API along with usage examples. You can use the AWS CodeStar API to work with: Projects and their resources, by calling the following: DeleteProject, which deletes a project. DescribeProject, which lists the attributes of a project. ListProjects, which lists all projects associated with your AWS account. ListResources, which lists the resources associated with a project. ListTagsForProject, which lists the tags associated with a project. TagProject, which adds tags to a project. UntagProject, which removes tags from a project. UpdateProject, which updates the attributes of a project. Teams and team members, by calling the following: AssociateTeamMember, which adds an IAM user to the team for a project. DisassociateTeamMember, which removes an IAM user from the team for a project. ListTeamMembers, which lists all the IAM users in the team for a project, including their roles and attributes. UpdateTeamMember, which updates a team member's attributes in a project. Users, by calling the following: CreateUserProfile, which creates a user profile that contains data associated with the user across all projects. DeleteUserProfile, which deletes all user profile information across all projects. DescribeUserProfile, which describes the profile of a user. ListUserProfiles, which lists all user profiles. UpdateUserProfile, which updates the profile for a user. */ public struct CodeStar: AWSService { // MARK: Member variables public let client: AWSClient public let config: AWSServiceConfig // MARK: Initialization /// Initialize the CodeStar client /// - parameters: /// - client: AWSClient used to process requests /// - region: Region of server you want to communicate with. This will override the partition parameter. /// - partition: AWS partition where service resides, standard (.aws), china (.awscn), government (.awsusgov). /// - endpoint: Custom endpoint URL to use instead of standard AWS servers /// - timeout: Timeout value for HTTP requests public init( client: AWSClient, region: SotoCore.Region? = nil, partition: AWSPartition = .aws, endpoint: String? = nil, timeout: TimeAmount? = nil, byteBufferAllocator: ByteBufferAllocator = ByteBufferAllocator(), options: AWSServiceConfig.Options = [] ) { self.client = client self.config = AWSServiceConfig( region: region, partition: region?.partition ?? partition, amzTarget: "CodeStar_20170419", service: "codestar", serviceProtocol: .json(version: "1.1"), apiVersion: "2017-04-19", endpoint: endpoint, possibleErrorTypes: [CodeStarErrorType.self], timeout: timeout, byteBufferAllocator: byteBufferAllocator, options: options ) } // MARK: API Calls /// Adds an IAM user to the team for an AWS CodeStar project. public func associateTeamMember(_ input: AssociateTeamMemberRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<AssociateTeamMemberResult> { return self.client.execute(operation: "AssociateTeamMember", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Creates a project, including project resources. This action creates a project based on a submitted project request. A set of source code files and a toolchain template file can be included with the project request. If these are not provided, an empty project is created. public func createProject(_ input: CreateProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<CreateProjectResult> { return self.client.execute(operation: "CreateProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Creates a profile for a user that includes user preferences, such as the display name and email address assocciated with the user, in AWS CodeStar. The user profile is not project-specific. Information in the user profile is displayed wherever the user's information appears to other users in AWS CodeStar. public func createUserProfile(_ input: CreateUserProfileRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<CreateUserProfileResult> { return self.client.execute(operation: "CreateUserProfile", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Deletes a project, including project resources. Does not delete users associated with the project, but does delete the IAM roles that allowed access to the project. public func deleteProject(_ input: DeleteProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<DeleteProjectResult> { return self.client.execute(operation: "DeleteProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Deletes a user profile in AWS CodeStar, including all personal preference data associated with that profile, such as display name and email address. It does not delete the history of that user, for example the history of commits made by that user. public func deleteUserProfile(_ input: DeleteUserProfileRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<DeleteUserProfileResult> { return self.client.execute(operation: "DeleteUserProfile", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Describes a project and its resources. public func describeProject(_ input: DescribeProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<DescribeProjectResult> { return self.client.execute(operation: "DescribeProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Describes a user in AWS CodeStar and the user attributes across all projects. public func describeUserProfile(_ input: DescribeUserProfileRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<DescribeUserProfileResult> { return self.client.execute(operation: "DescribeUserProfile", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Removes a user from a project. Removing a user from a project also removes the IAM policies from that user that allowed access to the project and its resources. Disassociating a team member does not remove that user's profile from AWS CodeStar. It does not remove the user from IAM. public func disassociateTeamMember(_ input: DisassociateTeamMemberRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<DisassociateTeamMemberResult> { return self.client.execute(operation: "DisassociateTeamMember", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Lists all projects in AWS CodeStar associated with your AWS account. public func listProjects(_ input: ListProjectsRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<ListProjectsResult> { return self.client.execute(operation: "ListProjects", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Lists resources associated with a project in AWS CodeStar. public func listResources(_ input: ListResourcesRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<ListResourcesResult> { return self.client.execute(operation: "ListResources", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Gets the tags for a project. public func listTagsForProject(_ input: ListTagsForProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<ListTagsForProjectResult> { return self.client.execute(operation: "ListTagsForProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Lists all team members associated with a project. public func listTeamMembers(_ input: ListTeamMembersRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<ListTeamMembersResult> { return self.client.execute(operation: "ListTeamMembers", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Lists all the user profiles configured for your AWS account in AWS CodeStar. public func listUserProfiles(_ input: ListUserProfilesRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<ListUserProfilesResult> { return self.client.execute(operation: "ListUserProfiles", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Adds tags to a project. public func tagProject(_ input: TagProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<TagProjectResult> { return self.client.execute(operation: "TagProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Removes tags from a project. public func untagProject(_ input: UntagProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<UntagProjectResult> { return self.client.execute(operation: "UntagProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Updates a project in AWS CodeStar. public func updateProject(_ input: UpdateProjectRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<UpdateProjectResult> { return self.client.execute(operation: "UpdateProject", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Updates a team member's attributes in an AWS CodeStar project. For example, you can change a team member's role in the project, or change whether they have remote access to project resources. public func updateTeamMember(_ input: UpdateTeamMemberRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<UpdateTeamMemberResult> { return self.client.execute(operation: "UpdateTeamMember", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } /// Updates a user's profile in AWS CodeStar. The user profile is not project-specific. Information in the user profile is displayed wherever the user's information appears to other users in AWS CodeStar. public func updateUserProfile(_ input: UpdateUserProfileRequest, on eventLoop: EventLoop? = nil, logger: Logger = AWSClient.loggingDisabled) -> EventLoopFuture<UpdateUserProfileResult> { return self.client.execute(operation: "UpdateUserProfile", path: "/", httpMethod: .POST, serviceConfig: self.config, input: input, on: eventLoop, logger: logger) } }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <!-- Copyright 2004-2020 H2 Group. Multiple-Licensed under the MPL 2.0, and the EPL 1.0 (https://h2database.com/html/license.html). Initial Developer: H2 Group --> <html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en"> <head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" /><title> Javadoc package documentation </title></head><body style="font: 9pt/130% Tahoma, Arial, Helvetica, sans-serif; font-weight: normal;"><p> A LIRS cache implementation. </p></body></html>
{ "pile_set_name": "Github" }
var t = require("tap") var origCwd = process.cwd() process.chdir(__dirname) var glob = require('../') var path = require('path') t.test('.', function (t) { glob('/b*/**', { globDebug: true, root: '.' }, function (er, matches) { t.ifError(er) t.like(matches, []) t.end() }) }) t.test('a', function (t) { console.error("root=" + path.resolve('a')) glob('/b*/**', { globDebug: true, root: path.resolve('a') }, function (er, matches) { t.ifError(er) var wanted = [ '/b', '/b/c', '/b/c/d', '/bc', '/bc/e', '/bc/e/f' ].map(function (m) { return path.join(path.resolve('a'), m).replace(/\\/g, '/') }) t.like(matches, wanted) t.end() }) }) t.test('root=a, cwd=a/b', function (t) { glob('/b*/**', { globDebug: true, root: 'a', cwd: path.resolve('a/b') }, function (er, matches) { t.ifError(er) t.like(matches, [ '/b', '/b/c', '/b/c/d', '/bc', '/bc/e', '/bc/e/f' ].map(function (m) { return path.join(path.resolve('a'), m).replace(/\\/g, '/') })) t.end() }) }) t.test('cd -', function (t) { process.chdir(origCwd) t.end() })
{ "pile_set_name": "Github" }
#!/bin/bash USAGE() { echo "usage: $0 [--liveheap][-nz|--nozip][-i|--interval] <pid>" } if [ $# -lt 1 ]; then USAGE exit -1 fi BASEDIR=/tmp/vjtools LOGDIR=${BASEDIR}/vjdump SLEEP_TIME=1 CLOSE_COMPRESS=0 NEED_HEAP_DUMP=0 PID="$1" while true; do case "$1" in -i|--interval) SLEEP_TIME="$2"; PID="$3"; shift 1;; -nz|--nozip) CLOSE_COMPRESS=1; PID="$2"; shift;; --liveheap) NEED_HEAP_DUMP=1; PID="$2"; shift;; *) break;; esac done CMD="$1" shift START() { if [[ x"$PID" == x ]]; then echo -e "The pid is empty, please enter pid". exit -1 else echo -e "The pid is ${PID}" fi # try to find $JAVA_HOME if not set if [ -z "$JAVA_HOME" ] ; then JAVA_HOME=`readlink -f \`which java 2>/dev/null\` 2>/dev/null | \ sed 's/\jre\/bin\/java//' | sed 's/\/bin\/java//'` fi if [ ! -f "$JAVA_HOME/bin/jstack" ] ; then echo -e "\033[31m\$JAVA_HOME not found. please export JAVA_HOME manually.\033[0m" exit -1 fi # clean all history logs rm -rf ${LOGDIR}/*.log ${LOGDIR}/*jmap_dump_live-*.bin mkdir -p ${LOGDIR} DATE=$(date "+%Y%m%d%H%M%S") echo -e "\033[34m$(date '+%Y-%m-%d %H:%M:%S') vjdump begin. command interval is ${SLEEP_TIME}s.\033[0m" # jstack echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process jstack." JSTACK_LOG=${LOGDIR}/jstack-${PID}-${DATE}.log ${JAVA_HOME}/bin/jstack -l $PID > ${JSTACK_LOG} if [[ $? != 0 ]]; then echo -e "\033[31mprocess jstack error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process jstack." sleep ${SLEEP_TIME} # vjtop VJTOP_SCRIPT=vjtop.sh which $VJTOP_SCRIPT 2>/dev/null if [[ $? == 0 ]]; then echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process vjtop." echo -e "It will take 3 seconds, please wait." VJTOP_LOG=${LOGDIR}/vjtop-${PID}-${DATE}.log $VJTOP_SCRIPT -n 3 -d 1 $PID > ${VJTOP_LOG} if [[ $? != 0 ]]; then echo -e "\033[31mprocess vjtop error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process vjtop." else # no vjtop, use other replacement # jinfo -flags $PID echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process jinfo -flags." JINFO_FLAGS_LOG=${LOGDIR}/jinfo-flags-${PID}-${DATE}.log ${JAVA_HOME}/bin/jinfo -flags $PID 1>${JINFO_FLAGS_LOG} 2>&1 if [[ $? != 0 ]]; then echo -e "\033[31mprocess jinfo -flags error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process jinfo -flags." #jmap -heap echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process jmap -heap." JMAP_HEAP_LOG=${LOGDIR}/jmap_heap-${PID}-${DATE}.log ${JAVA_HOME}/bin/jmap -heap $PID > ${JMAP_HEAP_LOG} if [[ $? != 0 ]]; then echo -e "\033[31mprocess jmap -heap error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process jmap -heap." fi # jmap -histo echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process jmap -histo." JMAP_HISTO_LOG=${LOGDIR}/jmap_histo-${PID}-${DATE}.log ${JAVA_HOME}/bin/jmap -histo $PID > ${JMAP_HISTO_LOG} if [[ $? != 0 ]]; then echo -e "\033[31mprocess jmap -histo error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process jmap -histo." sleep ${SLEEP_TIME} # jmap -histo:live echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process jmap -histo:live." JMAP_HISTO_LIVE_LOG=${LOGDIR}/jmap_histo_live-${PID}-${DATE}.log ${JAVA_HOME}/bin/jmap -histo:live $PID > ${JMAP_HISTO_LIVE_LOG} if [[ $? != 0 ]]; then echo -e "\033[31mprocess jmap -histo:live error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process jmap -histo:live." sleep ${SLEEP_TIME} # jmap -dump:live if [[ $NEED_HEAP_DUMP == 1 ]]; then JMAP_DUMP_FILE=${LOGDIR}/jmap_dump_live-${PID}-${DATE}.bin echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process jmap -dump:live." ${JAVA_HOME}/bin/jmap -dump:live,format=b,file=${JMAP_DUMP_FILE} $PID if [[ $? != 0 ]]; then echo -e "\033[31mprocess jmap -dump:live error.\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process jmap -dump:live." sleep ${SLEEP_TIME} fi # gc log echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to process gc log." GCLOG=$(strings /proc/${PID}/cmdline |grep '\-Xloggc' |cut -d : -f 2) if [[ x"$GCLOG" == x ]]; then echo -e "No GC log existing." else # "\cp" means unalias cp, it can cover files without prompting \cp -rf $GCLOG ${LOGDIR}/ if [[ $? != 0 ]]; then echo -e "copy gc log error." fi fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to process gc log." # packaging if [[ $CLOSE_COMPRESS == 1 ]]; then echo -e "The zip option is closed, no zip package will be generated." else echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to zip all files." # zip files without heap dump if [ -x "$(command -v zip)" ]; then COMPRESS_FILE=${BASEDIR}/vjdump-${PID}-${DATE}.zip zip -j ${COMPRESS_FILE} ${LOGDIR}/*.log else COMPRESS_FILE=${BASEDIR}/vjdump-${PID}-${DATE}.tar.gz (cd ${LOGDIR} && tar -zcvf ${COMPRESS_FILE} *.log) fi if [[ $? != 0 ]]; then echo -e "\033[31mzip files error.\033[0m" else echo -e "zip files success, the zip file is \033[34m${ZIP_FILE}\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to zip all files." if [[ $NEED_HEAP_DUMP == 1 ]]; then # compress all files echo -e "$(date '+%Y-%m-%d %H:%M:%S') Begin to zip files which include dump file." if [ -x "$(command -v zip)" ]; then COMPRESS_FILE_WITH_HEAP_DUMP=${BASEDIR}/vjdump-with-heap-${PID}-${DATE}.zip zip -j ${COMPRESS_FILE_WITH_HEAP_DUMP} ${LOGDIR}/*.log ${JMAP_DUMP_FILE} else COMPRESS_FILE_WITH_HEAP_DUMP=${BASEDIR}/vjdump-with-heap-${PID}-${DATE}.tar.gz (cd ${LOGDIR} && tar -zcvf ${COMPRESS_FILE_WITH_HEAP_DUMP} *.log *.bin) fi if [[ $? != 0 ]]; then echo -e "\033[31mzip files which include dump file error.\033[0m" else echo -e "zip files which include dump file success, the zip path is \033[34m${ZIP_FILE_WITH_HEAP_DUMP}\033[0m" fi echo -e "$(date '+%Y-%m-%d %H:%M:%S') Finish to zip files which include dump file." fi fi echo -e "\033[34m$(date '+%Y-%m-%d %H:%M:%S') vjdump finish. \033[0m" } case "$CMD" in help) USAGE;; *) START;; esac
{ "pile_set_name": "Github" }
/* * Copyright 2020 Confluent Inc. * * Licensed under the Confluent Community License (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at * * http://www.confluent.io/confluent-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package io.confluent.ksql.parser.tree; import static io.confluent.ksql.schema.ksql.SystemColumns.WINDOWSTART_NAME; import static java.util.Optional.of; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertThrows; import io.confluent.ksql.execution.expression.tree.Expression; import io.confluent.ksql.execution.expression.tree.StringLiteral; import io.confluent.ksql.parser.NodeLocation; import io.confluent.ksql.parser.exception.ParseFailedException; import java.util.Optional; import org.junit.Test; public class SingleColumnTest { private static final Optional<NodeLocation> A_LOCATION = Optional.empty(); private static final Expression AN_EXPRESSION = new StringLiteral("foo"); @Test public void shouldThrowIfAliasIsSystemColumnName() { // When: final Exception e = assertThrows( ParseFailedException.class, () -> new SingleColumn(A_LOCATION, AN_EXPRESSION, of(WINDOWSTART_NAME)) ); // Then: assertThat(e.getMessage(), containsString("is a reserved system column name.")); } }
{ "pile_set_name": "Github" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> <meta name="tabpage" content="jobs"/> <meta name="layout" content="base"/> <title>%{-- - Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. --}% <g:appTitle/> - <g:message code="upload.job.page.title" /></title> <script type="text/javascript"> jQuery(function(){ jQuery('.act_job_action_dropdown').click(function(){ var id=jQuery(this).data('jobId'); var el=jQuery(this).parent().find('.dropdown-menu'); el.load(_genUrl(appLinks.scheduledExecutionActionMenuFragment,{id:id,jobDeleteSingle:true})); }); jQuery('#xmlBatch').on('change', function () { if (this.files.length == 1) { if (this.files[0].name.match(/\.ya?ml$/i)) { jQuery('input[name=fileformat][value=yaml]').prop('checked', true); } else if (this.files[0].name.match(/\.xml$/i)) { jQuery('input[name=fileformat][value=xml]').prop('checked', true); } } }); }); </script> </head> <body> <tmpl:uploadForm /> </body> </html>
{ "pile_set_name": "Github" }
//////////////////////////////////////////////////////////////////////////////// // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to You under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// package org.apache.royale.jewel.supportClasses.card { import org.apache.royale.jewel.HContainer; /** * The CardHeader class is a header used in Jewel Card component where title, icons * or actions (i.e: buttons, icons) can be located. * * Actions are placed horizontally by default, and can be separated using * BarSection for left, middle and right sections. * * @langversion 3.0 * @playerversion Flash 10.2 * @playerversion AIR 2.6 * @productversion Royale 0.9.7 */ public class CardHeader extends HContainer { /** * constructor. * * @langversion 3.0 * @playerversion Flash 10.2 * @playerversion AIR 2.6 * @productversion Royale 0.9.7 */ public function CardHeader() { super(); typeNames = "card-header"; } } }
{ "pile_set_name": "Github" }
(require 'clojure-mode) (require 'clojure-mode-extra-font-locking) (defadvice clojure-test-run-tests (before save-first activate) (save-buffer)) (defadvice nrepl-load-current-buffer (before save-first activate) (save-buffer)) (require 'clj-refactor) (setq cljr-favor-prefix-notation nil) (setq cljr-favor-private-functions nil) (cljr-add-keybindings-with-modifier "C-s-") (define-key clj-refactor-map (kbd "C-x C-r") 'cljr-rename-file) (define-key clojure-mode-map (kbd "C-:") 'hippie-expand-lines) (define-key clojure-mode-map (kbd "C-\"") 'clojure-toggle-keyword-string) (define-key clojure-mode-map [remap paredit-forward] 'clojure-forward-logical-sexp) (define-key clojure-mode-map [remap paredit-backward] 'clojure-backward-logical-sexp) ;; kaocha (require 'kaocha-runner) (defun kaocha-runner-run-relevant-tests () (when (cljr--project-depends-on-p "kaocha") (if (clj--is-test? (buffer-file-name)) (kaocha-runner--run-tests (kaocha-runner--testable-sym (cider-current-ns) nil (eq major-mode 'clojurescript-mode)) nil t) (let ((original-buffer (current-buffer))) (save-window-excursion (let* ((file (clj-other-file-name)) (alternative-file (clj-find-alternative-name file))) (cond ((file-exists-p file) (find-file file)) ((file-exists-p alternative-file) (find-file alternative-file)))) (when (clj--is-test? (buffer-file-name)) (kaocha-runner--run-tests (kaocha-runner--testable-sym (cider-current-ns) nil (eq major-mode 'clojurescript-mode)) nil t original-buffer))))))) (add-hook 'cider-file-loaded-hook #'kaocha-runner-run-relevant-tests) (define-key clojure-mode-map (kbd "C-c k t") 'kaocha-runner-run-test-at-point) (define-key clojure-mode-map (kbd "C-c k r") 'kaocha-runner-run-tests) (define-key clojure-mode-map (kbd "C-c k a") 'kaocha-runner-run-all-tests) (define-key clojure-mode-map (kbd "C-c k w") 'kaocha-runner-show-warnings) (define-key clojure-mode-map (kbd "C-c k h") 'kaocha-runner-hide-windows) (defun enable-clojure-mode-stuff () (clj-refactor-mode 1)) (add-hook 'clojure-mode-hook 'enable-clojure-mode-stuff) (require 'symbol-focus) (define-key clojure-mode-map (kbd "M-s-f") 'sf/focus-at-point) (defun clj-duplicate-top-level-form () (interactive) (save-excursion (cljr--goto-toplevel) (insert (cljr--extract-sexp) "\n") (cljr--just-one-blank-line))) (define-key clojure-mode-map (kbd "M-s-d") 'clj-duplicate-top-level-form) (add-to-list 'cljr-project-clean-functions 'cleanup-buffer) (define-key clojure-mode-map (kbd "C->") 'cljr-thread) (define-key clojure-mode-map (kbd "C-<") 'cljr-unwind) (define-key clojure-mode-map (kbd "s-j") 'clj-jump-to-other-file) (define-key clojure-mode-map (kbd "C-.") 'clj-hippie-expand-no-case-fold) (defun backslash () (interactive) (insert "\\")) (define-key clojure-mode-map (kbd "H-7") 'backslash) (defun clj-hippie-expand-no-case-fold () (interactive) (let ((old-syntax (char-to-string (char-syntax ?/)))) (modify-syntax-entry ?/ " ") (hippie-expand-no-case-fold) (modify-syntax-entry ?/ old-syntax))) (require 'cider) ;; don't kill the REPL when printing large data structures (setq cider-print-options '(("length" 80) ("level" 20) ("right-margin" 80))) ;; save files when evaluating them (setq cider-save-file-on-load t) (define-key cider-repl-mode-map (kbd "<home>") nil) (define-key cider-repl-mode-map (kbd "C-,") 'complete-symbol) (define-key cider-mode-map (kbd "C-,") 'complete-symbol) (define-key cider-mode-map (kbd "C-c C-q") 'nrepl-close) (define-key cider-mode-map (kbd "C-c C-Q") 'cider-quit) (defun cider-repl-command (cmd) (set-buffer (cider-current-repl-buffer)) (goto-char (point-max)) (insert cmd) (cider-repl-return)) (defun cider-repl-restart () "Assumes that tools.namespace is used to reload everything on the classpath (which is why we save buffers first)" (interactive) (save-some-buffers) (cider-repl-command "(user/reset!)")) (defun cider-repl-compile-and-restart () "Compile the current file and restart the app" (interactive) (cider-load-current-buffer) (cider-repl-command "(user/restart!)")) (defun cider-repl-run-clj-test () "Run the clojure.test tests in the current namespace" (interactive) (cider-load-current-buffer) (cider-repl-command "(run-tests)")) (define-key cider-mode-map (kbd "C-c M-r") 'cider-repl-reset) (define-key cider-mode-map (kbd "C-c M-k") 'cider-repl-compile-and-restart) (define-key cider-mode-map (kbd "C-c t") 'cider-repl-run-clj-test) (defun cider-find-and-clear-repl-buffer () (interactive) (cider-find-and-clear-repl-output t)) (define-key cider-mode-map (kbd "C-c C-l") 'cider-find-and-clear-repl-buffer) (define-key cider-repl-mode-map (kbd "C-c C-l") 'cider-repl-clear-buffer) (setq cljr-clojure-test-declaration "[clojure.test :refer [deftest is testing]]") ;; indent [quiescent.dom :as d] specially (define-clojure-indent (forcat 1) (d/a 1) (d/button 1) (d/code 1) (d/div 1) (d/form 1) (d/fieldset 1) (d/h1 1) (d/h2 1) (d/h3 1) (d/h4 1) (d/h5 1) (d/hr 1) (d/img 1) (d/label 1) (d/li 1) (d/option 1) (d/p 1) (d/clipPath 1) (d/pre 1) (d/select 1) (d/small 1) (d/span 1) (d/strong 1) (d/style 1) (d/ul 1) (d/ol 1) (d/svg 1) (d/g 1) (d/table 1) (d/tbody 1) (d/thead 1) (d/tr 1) (d/td 1) (d/linearGradient 1) (dd/measure! 2) (dog/measure! 2) (e/block 1) (e/flex 1) (e/prose 1) (e/container 1) (e/hero-container 1) (e/value 1) (e/section 1) (e/section-prose 1) (e/section-header 1) (e/page 1) (e/instructions 1) (e/setup-header 1) (e/h1 1) (e/h1-light 1) (e/h2 1) (e/h3 1) (e/h4 1) (e/p 1) (e/block 1) (e/grid 1) (e/grid-box 1) (e/grid-section 1) (l/padded 1) (l/lightly-padded 1) (l/padded-all 1) (l/bubble-grid 1) (l/slider 1) (l/spread-vert 1) (l/bottom-fixed 1) (l/centered 1) (l/vert-space 1) (c/box 1) (c/square 1) (c/box-with-subsection 1) (c/embossed-section 1) (c/embossed 1) (c/group 1) (c/list 1) (c/split 1) (e/Page 1) (test-within 1) (add-watch 2) (async 1) (testing-async 1) (transaction 2)) ;; Don't warn me about the dangers of clj-refactor, fire the missiles! (setq cljr-warn-on-eval nil) ;; Use figwheel for cljs repl (setq cider-cljs-lein-repl "(do (use 'figwheel-sidecar.repl-api) (start-figwheel!) (cljs-repl))") ;; Indent and highlight more commands (put-clojure-indent 'match 'defun) ;; Hide nrepl buffers when switching buffers (switch to by prefixing with space) (setq nrepl-hide-special-buffers t) ;; Enable error buffer popping also in the REPL: (setq cider-repl-popup-stacktraces t) ;; Specify history file (setq cider-history-file "~/.emacs.d/nrepl-history") ;; auto-select the error buffer when it's displayed (setq cider-auto-select-error-buffer t) ;; Prevent the auto-display of the REPL buffer in a separate window after connection is established (setq cider-repl-pop-to-buffer-on-connect nil) ;; Pretty print results in repl (setq cider-repl-use-pretty-printing t) ;; Don't prompt for symbols (setq cider-prompt-for-symbol nil) ;; Enable eldoc in Clojure buffers (add-hook 'cider-mode-hook #'eldoc-mode) ;; Some expectations features (defun my-toggle-expect-focused () (interactive) (save-excursion (search-backward "(expect" (cljr--point-after 'cljr--goto-toplevel)) (forward-word) (if (looking-at "-focused") (paredit-forward-kill-word) (insert "-focused")))) (defun my-remove-all-focused () (interactive) (save-excursion (goto-char (point-min)) (while (search-forward "(expect-focused" nil t) (delete-char -8)))) (define-key clj-refactor-map (cljr--key-pairs-with-modifier "C-s-" "xf") 'my-toggle-expect-focused) (define-key clj-refactor-map (cljr--key-pairs-with-modifier "C-s-" "xr") 'my-remove-all-focused) ;; Focus tests (defun my-toggle-focused-test () (interactive) (save-excursion (search-backward "(deftest " (cljr--point-after 'cljr--goto-toplevel)) (forward-word) (if (looking-at " ^:test-refresh/focus") (kill-sexp) (insert " ^:test-refresh/focus")))) (defun my-blur-all-tests () (interactive) (save-excursion (goto-char (point-min)) (while (search-forward " ^:test-refresh/focus" nil t) (delete-region (match-beginning 0) (match-end 0))))) (define-key clj-refactor-map (cljr--key-pairs-with-modifier "C-s-" "ft") 'my-toggle-focused-test) (define-key clj-refactor-map (cljr--key-pairs-with-modifier "C-s-" "bt") 'my-blur-all-tests) ;; Cycle between () {} [] (defun live-delete-and-extract-sexp () "Delete the sexp and return it." (interactive) (let* ((begin (point))) (forward-sexp) (let* ((result (buffer-substring-no-properties begin (point)))) (delete-region begin (point)) result))) (defun live-cycle-clj-coll () "convert the coll at (point) from (x) -> {x} -> [x] -> (x) recur" (interactive) (let* ((original-point (point))) (while (and (> (point) 1) (not (equal "(" (buffer-substring-no-properties (point) (+ 1 (point))))) (not (equal "{" (buffer-substring-no-properties (point) (+ 1 (point))))) (not (equal "[" (buffer-substring-no-properties (point) (+ 1 (point)))))) (backward-char)) (cond ((equal "(" (buffer-substring-no-properties (point) (+ 1 (point)))) (insert "{" (substring (live-delete-and-extract-sexp) 1 -1) "}")) ((equal "{" (buffer-substring-no-properties (point) (+ 1 (point)))) (insert "[" (substring (live-delete-and-extract-sexp) 1 -1) "]")) ((equal "[" (buffer-substring-no-properties (point) (+ 1 (point)))) (insert "(" (substring (live-delete-and-extract-sexp) 1 -1) ")")) ((equal 1 (point)) (message "beginning of file reached, this was probably a mistake."))) (goto-char original-point))) (define-key clojure-mode-map (kbd "C-`") 'live-cycle-clj-coll) ;; Warn about missing nREPL instead of doing stupid things (defun nrepl-warn-when-not-connected () (interactive) (message "Oops! You're not connected to an nREPL server. Please run M-x cider or M-x cider-jack-in to connect.")) (define-key clojure-mode-map (kbd "C-M-x") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-x C-e") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-e") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-l") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-r") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-z") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-k") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-n") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-p") 'nrepl-warn-when-not-connected) (define-key clojure-mode-map (kbd "C-c C-q") 'nrepl-warn-when-not-connected) (setq cljr-magic-require-namespaces '(("io" . "clojure.java.io") ("set" . "clojure.set") ("str" . "clojure.string") ("walk" . "clojure.walk") ("zip" . "clojure.zip") ("time" . "clj-time.core") ("log" . "clojure.tools.logging") ("json" . "cheshire.core"))) ;; refer all from expectations (setq cljr-expectations-test-declaration "[expectations :refer :all]") ;; Add requires to blank devcards files (defun cljr--find-source-ns-of-devcard-ns (test-ns test-file) (let* ((ns-chunks (split-string test-ns "[.]" t)) (test-name (car (last ns-chunks))) (src-dir-name (s-replace "devcards/" "src/" (file-name-directory test-file))) (replace-underscore (-partial 's-replace "_" "-")) (src-ns (car (--filter (or (s-prefix-p it test-name) (s-suffix-p it test-name)) (-map (lambda (file-name) (funcall replace-underscore (file-name-sans-extension file-name))) (directory-files src-dir-name)))))) (when src-ns (mapconcat 'identity (append (butlast ns-chunks) (list src-ns)) ".")))) (defun clj--find-devcards-component-name () (or (ignore-errors (with-current-buffer (find-file-noselect (clj--src-file-name-from-cards (buffer-file-name))) (save-excursion (goto-char (point-max)) (search-backward "defcomponent ") (clojure-forward-logical-sexp) (skip-syntax-forward " ") (let ((beg (point)) (end (progn (re-search-forward "\\w+") (point)))) (buffer-substring-no-properties beg end))))) "")) (defun cljr--add-card-declarations () (save-excursion (let* ((ns (clojure-find-ns)) (source-ns (cljr--find-source-ns-of-devcard-ns ns (buffer-file-name)))) (cljr--insert-in-ns ":require") (when source-ns (insert "[" source-ns " :refer [" (clj--find-devcards-component-name) "]]")) (cljr--insert-in-ns ":require") (insert (if (cljr--project-depends-on-p "reagent") "[devcards.core :refer-macros [defcard-rg]]" "[devcards.core :refer-macros [defcard]]"))) (indent-region (point-min) (point-max)))) (defun cljr--add-ns-if-blank-clj-file () (ignore-errors (when (and cljr-add-ns-to-blank-clj-files (cljr--clojure-ish-filename-p (buffer-file-name)) (= (point-min) (point-max))) (insert (format "(ns %s)\n\n" (clojure-expected-ns))) (when (cljr--in-tests-p) (cljr--add-test-declarations)) (when (clj--is-card? (buffer-file-name)) (cljr--add-card-declarations))))) (defun clojure-mode-indent-top-level-form (&optional cleanup-buffer?) (interactive "P") (if cleanup-buffer? (cleanup-buffer) (save-excursion (cljr--goto-toplevel) (indent-region (point) (progn (paredit-forward) (point)))))) (define-key clojure-mode-map (vector 'remap 'cleanup-buffer) 'clojure-mode-indent-top-level-form) (defun clojure-mode-paredit-wrap (pre post) (unless (looking-back "[ #\(\[\{]" 1) (insert " ")) (let ((beg (point)) (end nil)) (insert pre) (save-excursion (clojure-forward-logical-sexp 1) (insert post) (setq end (point))) (indent-region beg end))) (defun clojure-mode-paredit-wrap-square () (interactive) (clojure-mode-paredit-wrap "[" "]")) (defun clojure-mode-paredit-wrap-round () (interactive) (clojure-mode-paredit-wrap "(" ")")) (defun clojure-mode-paredit-wrap-curly () (interactive) (clojure-mode-paredit-wrap "{" "}")) (defun clojure-mode-paredit-wrap-round-from-behind () (interactive) (clojure-backward-logical-sexp 1) (clojure-mode-paredit-wrap "(" ")")) (define-key clojure-mode-map (vector 'remap 'paredit-wrap-round) 'clojure-mode-paredit-wrap-round) (define-key clojure-mode-map (vector 'remap 'paredit-wrap-square) 'clojure-mode-paredit-wrap-square) (define-key clojure-mode-map (vector 'remap 'paredit-wrap-curly) 'clojure-mode-paredit-wrap-curly) (define-key clojure-mode-map (vector 'remap 'paredit-wrap-round-from-behind) 'clojure-mode-paredit-wrap-round-from-behind) (defun cider-switch-to-any-repl-buffer (&optional set-namespace) "Switch to current REPL buffer, when possible in an existing window. The type of the REPL is inferred from the mode of current buffer. With a prefix arg SET-NAMESPACE sets the namespace in the REPL buffer to that of the namespace in the Clojure source buffer" (interactive "P") (cider--switch-to-repl-buffer (cider-current-repl "any" t) set-namespace)) (define-key clojure-mode-map (kbd "C-c z") 'cider-switch-to-any-repl-buffer) ;; Make q quit out of find-usages to previous window config (defadvice cljr-find-usages (before setup-grep activate) (window-configuration-to-register ?$)) ;; ------------ ;; TODO: Loot more stuff from: ;; - https://github.com/overtone/emacs-live/blob/master/packs/dev/clojure-pack/config/paredit-conf.el (setq cljr-inject-dependencies-at-jack-in nil) (setq sayid-inject-dependencies-at-jack-in nil) ;; eval-current-sexp while also including any surrounding lets with C-x M-e (defun my/cider-looking-at-lets? () (or (looking-at "(let ") (looking-at "(letfn ") (looking-at "(when-let ") (looking-at "(if-let "))) (defun my/cider-collect-lets (&optional max-point) (let* ((beg-of-defun (save-excursion (beginning-of-defun) (point))) (lets nil)) (save-excursion (while (not (= (point) beg-of-defun)) (paredit-backward-up 1) (when (my/cider-looking-at-lets?) (save-excursion (let ((beg (point))) (paredit-forward-down 1) (paredit-forward 2) (when (and max-point (< max-point (point))) (goto-char max-point)) (setq lets (cons (concat (buffer-substring-no-properties beg (point)) (if max-point "]" "")) lets)))))) lets))) (defun my/inside-let-block? () (save-excursion (paredit-backward-up 2) (my/cider-looking-at-lets?))) (defun my/cider-eval-including-lets (&optional output-to-current-buffer) "Evaluates the current sexp form, wrapped in all parent lets." (interactive "P") (let* ((beg-of-sexp (save-excursion (paredit-backward 1) (point))) (code (buffer-substring-no-properties beg-of-sexp (point))) (lets (my/cider-collect-lets (when (my/inside-let-block?) (save-excursion (paredit-backward 2) (point))))) (code (concat (s-join " " lets) " " code (s-repeat (length lets) ")")))) (cider-interactive-eval code (when output-to-current-buffer (cider-eval-print-handler)) nil (cider--nrepl-pr-request-map)))) (define-key clojure-mode-map (kbd "C-x M-e") 'my/cider-eval-including-lets) (provide 'setup-clojure-mode)
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.data.management.copy.entities; import java.util.Map; import org.apache.gobblin.commit.CommitStep; /** * A {@link CommitStepCopyEntity} whose step will be executed after publishing files. * The priority sets an order among {@link PostPublishStep} in which they will be executed. */ public class PostPublishStep extends CommitStepCopyEntity { public PostPublishStep(String fileSet, Map<String, String> additionalMetadata, CommitStep step, int priority) { super(fileSet, additionalMetadata, step, priority); } @Override public String explain() { return String.format("Post publish step with priority %s: %s", this.getPriority(), getStep().toString()); } }
{ "pile_set_name": "Github" }
class CreateElectronics < ActiveRecord::Migration def self.up create_table :electronics, :force => true do |t| t.column :name, :string t.column :manufacturer, :string t.column :features, :string t.column :category, :string t.column :price, :string end end def self.down drop_table :electronics end end
{ "pile_set_name": "Github" }
# Ceres Solver - A fast non-linear least squares minimizer # Copyright 2015 Google Inc. All rights reserved. # http://ceres-solver.org/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of Google Inc. nor the names of its contributors may be # used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Author: alexs.mac@gmail.com (Alex Stewart) # # FindGlog.cmake - Find Google glog logging library. # # This module defines the following variables: # # GLOG_FOUND: TRUE iff glog is found. # GLOG_INCLUDE_DIRS: Include directories for glog. # GLOG_LIBRARIES: Libraries required to link glog. # # The following variables control the behaviour of this module: # # GLOG_INCLUDE_DIR_HINTS: List of additional directories in which to # search for glog includes, e.g: /timbuktu/include. # GLOG_LIBRARY_DIR_HINTS: List of additional directories in which to # search for glog libraries, e.g: /timbuktu/lib. # # The following variables are also defined by this module, but in line with # CMake recommended FindPackage() module style should NOT be referenced directly # by callers (use the plural variables detailed above instead). These variables # do however affect the behaviour of the module via FIND_[PATH/LIBRARY]() which # are NOT re-called (i.e. search for library is not repeated) if these variables # are set with valid values _in the CMake cache_. This means that if these # variables are set directly in the cache, either by the user in the CMake GUI, # or by the user passing -DVAR=VALUE directives to CMake when called (which # explicitly defines a cache variable), then they will be used verbatim, # bypassing the HINTS variables and other hard-coded search locations. # # GLOG_INCLUDE_DIR: Include directory for glog, not including the # include directory of any dependencies. # GLOG_LIBRARY: glog library, not including the libraries of any # dependencies. # Reset CALLERS_CMAKE_FIND_LIBRARY_PREFIXES to its value when # FindGlog was invoked. macro(GLOG_RESET_FIND_LIBRARY_PREFIX) if (MSVC) set(CMAKE_FIND_LIBRARY_PREFIXES "${CALLERS_CMAKE_FIND_LIBRARY_PREFIXES}") endif (MSVC) endmacro(GLOG_RESET_FIND_LIBRARY_PREFIX) # Called if we failed to find glog or any of it's required dependencies, # unsets all public (designed to be used externally) variables and reports # error message at priority depending upon [REQUIRED/QUIET/<NONE>] argument. macro(GLOG_REPORT_NOT_FOUND REASON_MSG) unset(GLOG_FOUND) unset(GLOG_INCLUDE_DIRS) unset(GLOG_LIBRARIES) # Make results of search visible in the CMake GUI if glog has not # been found so that user does not have to toggle to advanced view. mark_as_advanced(CLEAR GLOG_INCLUDE_DIR GLOG_LIBRARY) glog_reset_find_library_prefix() # Note <package>_FIND_[REQUIRED/QUIETLY] variables defined by FindPackage() # use the camelcase library name, not uppercase. if (Glog_FIND_QUIETLY) message(STATUS "Failed to find glog - " ${REASON_MSG} ${ARGN}) elseif (Glog_FIND_REQUIRED) message(FATAL_ERROR "Failed to find glog - " ${REASON_MSG} ${ARGN}) else() # Neither QUIETLY nor REQUIRED, use no priority which emits a message # but continues configuration and allows generation. message("-- Failed to find glog - " ${REASON_MSG} ${ARGN}) endif () endmacro(GLOG_REPORT_NOT_FOUND) # Handle possible presence of lib prefix for libraries on MSVC, see # also GLOG_RESET_FIND_LIBRARY_PREFIX(). if (MSVC) # Preserve the caller's original values for CMAKE_FIND_LIBRARY_PREFIXES # s/t we can set it back before returning. set(CALLERS_CMAKE_FIND_LIBRARY_PREFIXES "${CMAKE_FIND_LIBRARY_PREFIXES}") # The empty string in this list is important, it represents the case when # the libraries have no prefix (shared libraries / DLLs). set(CMAKE_FIND_LIBRARY_PREFIXES "lib" "" "${CMAKE_FIND_LIBRARY_PREFIXES}") endif (MSVC) # Search user-installed locations first, so that we prefer user installs # to system installs where both exist. list(APPEND GLOG_CHECK_INCLUDE_DIRS /usr/local/include /usr/local/homebrew/include # Mac OS X /opt/local/var/macports/software # Mac OS X. /opt/local/include /usr/include) # Windows (for C:/Program Files prefix). list(APPEND GLOG_CHECK_PATH_SUFFIXES glog/include glog/Include Glog/include Glog/Include) list(APPEND GLOG_CHECK_LIBRARY_DIRS /usr/local/lib /usr/local/homebrew/lib # Mac OS X. /opt/local/lib /usr/lib) # Windows (for C:/Program Files prefix). list(APPEND GLOG_CHECK_LIBRARY_SUFFIXES glog/lib glog/Lib Glog/lib Glog/Lib) # Search supplied hint directories first if supplied. find_path(GLOG_INCLUDE_DIR NAMES glog/logging.h PATHS ${GLOG_INCLUDE_DIR_HINTS} ${GLOG_CHECK_INCLUDE_DIRS} PATH_SUFFIXES ${GLOG_CHECK_PATH_SUFFIXES}) if (NOT GLOG_INCLUDE_DIR OR NOT EXISTS ${GLOG_INCLUDE_DIR}) glog_report_not_found( "Could not find glog include directory, set GLOG_INCLUDE_DIR " "to directory containing glog/logging.h") endif (NOT GLOG_INCLUDE_DIR OR NOT EXISTS ${GLOG_INCLUDE_DIR}) find_library(GLOG_LIBRARY NAMES glog PATHS ${GLOG_LIBRARY_DIR_HINTS} ${GLOG_CHECK_LIBRARY_DIRS} PATH_SUFFIXES ${GLOG_CHECK_LIBRARY_SUFFIXES}) if (NOT GLOG_LIBRARY OR NOT EXISTS ${GLOG_LIBRARY}) glog_report_not_found( "Could not find glog library, set GLOG_LIBRARY " "to full path to libglog.") endif (NOT GLOG_LIBRARY OR NOT EXISTS ${GLOG_LIBRARY}) # Mark internally as found, then verify. GLOG_REPORT_NOT_FOUND() unsets # if called. set(GLOG_FOUND TRUE) # Glog does not seem to provide any record of the version in its # source tree, thus cannot extract version. # Catch case when caller has set GLOG_INCLUDE_DIR in the cache / GUI and # thus FIND_[PATH/LIBRARY] are not called, but specified locations are # invalid, otherwise we would report the library as found. if (GLOG_INCLUDE_DIR AND NOT EXISTS ${GLOG_INCLUDE_DIR}/glog/logging.h) glog_report_not_found( "Caller defined GLOG_INCLUDE_DIR:" " ${GLOG_INCLUDE_DIR} does not contain glog/logging.h header.") endif (GLOG_INCLUDE_DIR AND NOT EXISTS ${GLOG_INCLUDE_DIR}/glog/logging.h) # TODO: This regex for glog library is pretty primitive, we use lowercase # for comparison to handle Windows using CamelCase library names, could # this check be better? string(TOLOWER "${GLOG_LIBRARY}" LOWERCASE_GLOG_LIBRARY) if (GLOG_LIBRARY AND NOT "${LOWERCASE_GLOG_LIBRARY}" MATCHES ".*glog[^/]*") glog_report_not_found( "Caller defined GLOG_LIBRARY: " "${GLOG_LIBRARY} does not match glog.") endif (GLOG_LIBRARY AND NOT "${LOWERCASE_GLOG_LIBRARY}" MATCHES ".*glog[^/]*") # Set standard CMake FindPackage variables if found. if (GLOG_FOUND) set(GLOG_INCLUDE_DIRS ${GLOG_INCLUDE_DIR}) set(GLOG_LIBRARIES ${GLOG_LIBRARY}) endif (GLOG_FOUND) glog_reset_find_library_prefix() # Handle REQUIRED / QUIET optional arguments. include(FindPackageHandleStandardArgs) find_package_handle_standard_args(Glog DEFAULT_MSG GLOG_INCLUDE_DIRS GLOG_LIBRARIES) # Only mark internal variables as advanced if we found glog, otherwise # leave them visible in the standard GUI for the user to set manually. if (GLOG_FOUND) mark_as_advanced(FORCE GLOG_INCLUDE_DIR GLOG_LIBRARY) endif (GLOG_FOUND)
{ "pile_set_name": "Github" }
// Color Utility // Options $iota-utils-color : true; $iota-utils-color-namespace : 'color-' !default; $iota-utils-color-names : () !default; // Helper Local Variables $iota-utils-color-var-color: $iota-global-utilities-namespace + $iota-utils-color-namespace; // Color Utilities @each $color-name, $color-value in $iota-utils-color-names { .#{$iota-utils-color-var-color + $color-name} { color: #{$color-value} !important; } }
{ "pile_set_name": "Github" }
body { margin: 0; padding: 0; border: 0; color: #1E1E1E; font-size: 13px; font-family: "Segoe UI", Helvetica, Arial, sans-serif; line-height: 1.45; word-wrap: break-word; } /* General & 'Reset' Stuff */ .container { width: 1100px; margin: 0 auto; } section { display: block; margin: 0; } h1, h2, h3, h4, h5, h6 { margin: 0; } table, tr { width: 1100px; padding: 0px; vertical-align: top; } /* Header, <header> header - container h1 - project name h2 - project description */ #header { color: #FFF; background: #68217a; position:relative; } h1, h2 { font-family: "Segoe UI Light", "Segoe UI", Helvetica, Arial, sans-serif; line-height: 1; margin: 0 18px;; padding: 0; } #header h1 { font-size: 3.4em; padding-top: 18px; font-weight: normal; margin-left: 15px; } #header h2 { font-size: 1.5em; margin-top: 10px; padding-bottom: 18px; font-weight: normal; } #main_content { width: 100%; display: flex; flex-direction: row; } h1, h2, h3, h4, h5, h6 { font-weight: bolder; } #main_content h1 { font-size: 1.8em; margin-top: 34px; } #main_content h1:first-child { margin-top: 30px; } #main_content h2 { font-size: 1.8em; } p, ul { margin: 11px 18px; } #main_content a { color: #06C; text-decoration: none; } ul { margin-top: 13px; margin-left: 18px; padding-left: 0; } ul li { margin-left: 18px; padding-left: 0; } #lpanel { width: 870px; float: left; } #rpanel ul { list-style-type: none; } #rpanel ul li { line-height: 1.8em; } #rpanel { background: #e7e7e7; width: 230px; }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <string xmlns="http://tempuri.org/">{ "Info": [ { "IsSuccess": "True", "InAddress": "臺中市西區向上路一段162號", "InSRS": "EPSG:4326", "InFuzzyType": "[單雙號機制]+[最近門牌號機制]", "InFuzzyBuffer": "0", "InIsOnlyFullMatch": "False", "InIsLockCounty": "True", "InIsLockTown": "False", "InIsLockVillage": "False", "InIsLockRoadSection": "False", "InIsLockLane": "False", "InIsLockAlley": "False", "InIsLockArea": "False", "InIsSameNumber_SubNumber": "True", "InCanIgnoreVillage": "True", "InCanIgnoreNeighborhood": "True", "InReturnMaxCount": "0", "OutTotal": "1", "OutMatchType": "完全比對", "OutMatchCode": "[臺中市]\tFULL:1", "OutTraceInfo": "[臺中市]\t { 完全比對 } 找到符合的門牌地址" } ], "AddressList": [ { "FULL_ADDR": "臺中市西區中興里21鄰向上路一段162號", "COUNTY": "臺中市", "TOWN": "西區", "VILLAGE": "中興里", "NEIGHBORHOOD": "21鄰", "ROAD": "向上路", "SECTION": "1", "LANE": "", "ALLEY": "", "SUB_ALLEY": "", "TONG": "", "NUMBER": "162號", "X": 120.661249, "Y": 24.146898 } ] }</string>
{ "pile_set_name": "Github" }
/****************************************************************** Copyright 1993, 1994 by Digital Equipment Corporation, Maynard, Massachusetts, All Rights Reserved Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation, and that the names of Digital or MIT not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. DIGITAL DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL DIGITAL BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. Author: Hiroyuki Miyamoto Digital Equipment Corporation miyamoto@jrd.dec.com This version tidied and debugged by Steve Underwood May 1999 ******************************************************************/ #ifndef FRAMEMGR_H #define FRAMEMGR_H #include <X11/Xmd.h> #include <X11/Xlib.h> #include <stdio.h> #if defined(VAXC) && !defined(__DECC) #define xim_externalref globalref #define xim_externaldef globaldef #else #define xim_externalref extern #define xim_externaldef #endif /* Definitions for FrameMgr */ #define COUNTER_MASK 0x10 typedef enum { BIT8 = 0x1, /* {CARD8* | INT8*} */ BIT16 = 0x2, /* {CARD16* | INT16*} */ BIT32 = 0x3, /* {CARD32* | INT32*} */ BIT64 = 0x4, /* {CARD64* | INT64*} */ BARRAY = 0x5, /* int*, void* */ ITER = 0x6, /* int* */ POINTER = 0x7, /* specifies next item is a PTR_ITEM */ PTR_ITEM = 0x8, /* specifies the item has a pointer */ /* BOGUS - POINTER and PTR_ITEM * In the current implementation, PTR_ITEM should be lead by * POINTER. But actually, it's just redundant logically. Someone * may remove this redundancy and POINTER from the enum member but he * should also modify the logic in FrameMgr program. */ PADDING = 0x9, /* specifies that a padding is needed. * This requires extra data in data field. */ EOL = 0xA, /* specifies the end of list */ COUNTER_BIT8 = COUNTER_MASK | 0x1, COUNTER_BIT16 = COUNTER_MASK | 0x2, COUNTER_BIT32 = COUNTER_MASK | 0x3, COUNTER_BIT64 = COUNTER_MASK | 0x4 } XimFrameType; /* Convenient macro */ #define _FRAME(a) {a, NULL} #define _PTR(p) {PTR_ITEM, (void *)p} /* PADDING's usage of data field * B15-B8 : Shows the number of effective items. * B7-B0 : Shows padding unit. ex) 04 shows 4 unit padding. */ #define _PAD2(n) {PADDING, (void*)((n)<<8|2)} #define _PAD4(n) {PADDING, (void*)((n)<<8|4)} #define FmCounterByte 0 #define FmCounterNumber 1 #define _BYTE_COUNTER(type, offset) \ {(COUNTER_MASK|type), (void*)((offset)<<8|FmCounterByte)} #define _NUMBER_COUNTER(type, offset) \ {(COUNTER_MASK|type), (void*)((offset)<<8|FmCounterNumber)} typedef struct _XimFrame { XimFrameType type; void* data; /* For PTR_ITEM and PADDING */ } XimFrameRec, *XimFrame; typedef enum { FmSuccess, FmEOD, FmInvalidCall, FmBufExist, FmCannotCalc, FmNoMoreData } FmStatus; typedef struct _FrameMgr *FrameMgr; FrameMgr FrameMgrInit(XimFrame frame, char* area, Bool byte_swap); void FrameMgrInitWithData(FrameMgr fm, XimFrame frame, void* area, Bool byte_swap); void FrameMgrFree(FrameMgr fm); FmStatus FrameMgrSetBuffer(FrameMgr, void*); FmStatus _FrameMgrPutToken(FrameMgr, void*, int); FmStatus _FrameMgrGetToken(FrameMgr, void*, int); FmStatus FrameMgrSetSize(FrameMgr, int); FmStatus FrameMgrSetIterCount(FrameMgr, int); FmStatus FrameMgrSetTotalSize(FrameMgr, int); int FrameMgrGetTotalSize(FrameMgr); int FrameMgrGetSize(FrameMgr); FmStatus FrameMgrSkipToken(FrameMgr, int); void FrameMgrReset(FrameMgr); Bool FrameMgrIsIterLoopEnd(FrameMgr, FmStatus*); #define FrameMgrPutToken(fm, obj) _FrameMgrPutToken((fm), &(obj), sizeof(obj)) #define FrameMgrGetToken(fm, obj) _FrameMgrGetToken((fm), &(obj), sizeof(obj)) #endif /* FRAMEMGR_H */ // kate: indent-mode cstyle; space-indent on; indent-width 0;
{ "pile_set_name": "Github" }
119c119 < >>control query shape exchange(scan('t025mdam', MDAM_COLUMNS ALL)); --- > >>control query shape scan('t025mdam', MDAM_COLUMNS ALL); 2094c2094,2097 < --- SQL command prepared. --- > *** ERROR[2105] This query could not be compiled because of incompatible Control Query Shape (CQS) specifications. Inspect the CQS in effect. > > *** ERROR[8822] The statement was not prepared. > 2099,2102c2102 < (EXPR) (EXPR) (EXPR) (EXPR) (EXPR) < ---------- < < 3 65 5 50 21 --- > *** ERROR[15017] Statement S1D2A was not found. 2104d2103 < --- 1 row(s) selected. 2125,2128c2124 < (EXPR) (EXPR) (EXPR) (EXPR) (EXPR) < ---------- < < 0 ? ? ? ? --- > *** ERROR[15017] Statement S1D2A was not found. 2130d2125 < --- 1 row(s) selected.
{ "pile_set_name": "Github" }