text
stringlengths
2
99k
meta
dict
name: "Joseph Pierri" twitter: "" bio: "Joseph Pierri is a Senior Software Engineer at PagerDuty, based in Toronto. He is a member of the Core Team at PagerDuty, and has had the opportunity to work on a variety of interesting projects over the 3+ years that he’s been there, ranging from feature-work to backend systems to infrastructure components. Joseph has worked as a full-time software professional for over 8 years, in both the Finance and SaaS industries. In his spare time, he likes running and poring over baseball statistics."
{ "pile_set_name": "Github" }
marr <- structure(c(5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 12, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 6, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 5, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 4, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 12, 16, 23, 33, 27, 26, 36, 21, 40, 18, 36, 39), .Dim = 11:12) n_occasions <- 12L
{ "pile_set_name": "Github" }
/*--------------------------------*- C++ -*----------------------------------*\ | ========= | | | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox | | \\ / O peration | Version: 2.3.0 | | \\ / A nd | Web: www.OpenFOAM.org | | \\/ M anipulation | | \*---------------------------------------------------------------------------*/ FoamFile { version 2.0; format ascii; class dictionary; object changeDictionaryDict; } // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // dictionaryReplacement { boundary { minY { type patch; } minZ { type patch; } maxZ { type patch; } } T { internalField uniform 300; boundaryField { ".*" { type zeroGradient; value uniform 300; } heater_to_topAir { type compressible::turbulentTemperatureRadCoupledMixed; Tnbr T; kappa solidThermo; QrNbr Qr; Qr none; kappaName none; value uniform 300; } heater_to_bottomAir { type compressible::turbulentTemperatureRadCoupledMixed; Tnbr T; kappa solidThermo; QrNbr Qr; Qr none; kappaName none; value uniform 300; } "heater_to_.*" { type compressible::turbulentTemperatureCoupledBaffleMixed; Tnbr T; kappa solidThermo; kappaName none; value uniform 300; } minY { type fixedValue; value uniform 500; } } } } // ************************************************************************* //
{ "pile_set_name": "Github" }
package com.jvms.i18neditor.swing.text; import java.awt.event.ActionEvent; import javax.swing.text.JTextComponent; import javax.swing.text.TextAction; /** * An action implementation useful for deleting text. * * @author Jacob van Mourik */ public class DeleteAction extends TextAction { private final static long serialVersionUID = -7933405670677160997L; public DeleteAction(String name) { super(name); } @Override public void actionPerformed(ActionEvent e) { JTextComponent component = getFocusedComponent(); component.replaceSelection(""); } }
{ "pile_set_name": "Github" }
<?php /** * Copyright © OXID eSales AG. All rights reserved. * See LICENSE file for license details. */ namespace OxidEsales\EshopCommunity\Application\Model; use OxidEsales\Eshop\Core\DatabaseProvider; use oxRegistry; use Exception; use oxDb; use OxidEsales\Eshop\Core\Registry; use OxidEsales\Eshop\Core\Database\Adapter\DatabaseInterface; use OxidEsales\Eshop\Core\Exception\DatabaseErrorException; /** * Article list manager. * Collects list of article according to collection rules (categories, etc.). */ class ArticleList extends \OxidEsales\Eshop\Core\Model\ListModel { /** * @var string SQL addon for sorting */ protected $_sCustomSorting; /** * List Object class name * * @var string */ protected $_sObjectsInListName = 'oxarticle'; /** * Set to true if Select Lists should be laoded * * @var bool */ protected $_blLoadSelectLists = false; /** * Set Custom Sorting, simply an order by.... * * @param string $sSorting Custom sorting */ public function setCustomSorting($sSorting) { $this->_sCustomSorting = $sSorting; } /** * Call enableSelectLists() for loading select lists in lst articles */ public function enableSelectLists() { $this->_blLoadSelectLists = true; } /** * @inheritdoc * In addition to the parent method, this method includes profiling. * * @param string $sql SQL select statement or prepared statement * @param array $parameters Parameters to be used in a prepared statement */ public function selectString($sql, array $parameters = []) { startProfile("loadinglists"); parent::selectString($sql, $parameters); stopProfile("loadinglists"); } /** * Get history article id's from session or cookie. * * @return array */ public function getHistoryArticles() { $session = \OxidEsales\Eshop\Core\Registry::getSession(); if ($aArticlesIds = $session->getVariable('aHistoryArticles')) { return $aArticlesIds; } elseif ($sArticlesIds = \OxidEsales\Eshop\Core\Registry::getUtilsServer()->getOxCookie('aHistoryArticles')) { return explode('|', $sArticlesIds); } } /** * Set history article id's to session or cookie * * @param array $aArticlesIds array history article ids */ public function setHistoryArticles($aArticlesIds) { $session = \OxidEsales\Eshop\Core\Registry::getSession(); if ($session->getId()) { $session->setVariable('aHistoryArticles', $aArticlesIds); // clean cookie, if session started \OxidEsales\Eshop\Core\Registry::getUtilsServer()->setOxCookie('aHistoryArticles', ''); } else { \OxidEsales\Eshop\Core\Registry::getUtilsServer()->setOxCookie('aHistoryArticles', implode('|', $aArticlesIds)); } } /** * Loads up to 4 history (normally recently seen) articles from session, and adds $sArtId to history. * Returns article id array. * * @param string $sArtId Article ID * @param int $iCnt product count */ public function loadHistoryArticles($sArtId, $iCnt = 4) { $aHistoryArticles = $this->getHistoryArticles(); $aHistoryArticles[] = $sArtId; // removing duplicates $aHistoryArticles = array_unique($aHistoryArticles); if (count($aHistoryArticles) > ($iCnt + 1)) { array_shift($aHistoryArticles); } $this->setHistoryArticles($aHistoryArticles); //remove current article and return array //asignment =, not == if (($iCurrentArt = array_search($sArtId, $aHistoryArticles)) !== false) { unset($aHistoryArticles[$iCurrentArt]); } $aHistoryArticles = array_values($aHistoryArticles); $this->loadIds($aHistoryArticles); $this->sortByIds($aHistoryArticles); } /** * sort this list by given order. * * @param array $aIds ordered ids */ public function sortByIds($aIds) { $this->_aOrderMap = array_flip($aIds); uksort($this->_aArray, [$this, '_sortByOrderMapCallback']); } /** * callback function only used from sortByIds * * @param string $key1 1st key * @param string $key2 2nd key * * @see oxArticleList::sortByIds * * @return int * @deprecated underscore prefix violates PSR12, will be renamed to "sortByOrderMapCallback" in next major */ protected function _sortByOrderMapCallback($key1, $key2) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { if (isset($this->_aOrderMap[$key1])) { if (isset($this->_aOrderMap[$key2])) { $iDiff = $this->_aOrderMap[$key2] - $this->_aOrderMap[$key1]; if ($iDiff > 0) { return -1; } elseif ($iDiff < 0) { return 1; } else { return 0; } } else { // first is here, but 2nd is not - 1st gets more priority return -1; } } elseif (isset($this->_aOrderMap[$key2])) { // first is not here, but 2nd is - 2nd gets more priority return 1; } else { // both unset, equal return 0; } } /** * Loads newest shops articles from DB. * * @param int $iLimit Select limit */ public function loadNewestArticles($iLimit = null) { //has module? $myConfig = \OxidEsales\Eshop\Core\Registry::getConfig(); if (!$myConfig->getConfigParam('bl_perfLoadPriceForAddList')) { $this->getBaseObject()->disablePriceLoad(); } $this->_aArray = []; switch ($myConfig->getConfigParam('iNewestArticlesMode')) { case 0: // switched off, do nothing break; case 1: // manually entered $this->loadActionArticles('oxnewest', $iLimit); break; case 2: $sArticleTable = getViewName('oxarticles'); if ($myConfig->getConfigParam('blNewArtByInsert')) { $sType = 'oxinsert'; } else { $sType = 'oxtimestamp'; } $sSelect = "select * from $sArticleTable "; $sSelect .= "where oxparentid = '' and " . $this->getBaseObject()->getSqlActiveSnippet() . " and oxissearch = 1 order by $sType desc "; if (!($iLimit = (int) $iLimit)) { $iLimit = $myConfig->getConfigParam('iNrofNewcomerArticles'); } $sSelect .= "limit " . $iLimit; $this->selectString($sSelect); break; } } /** * Load top 5 articles * * @param int $iLimit Select limit */ public function loadTop5Articles($iLimit = null) { //has module? $myConfig = \OxidEsales\Eshop\Core\Registry::getConfig(); if (!$myConfig->getConfigParam('bl_perfLoadPriceForAddList')) { $this->getBaseObject()->disablePriceLoad(); } switch ($myConfig->getConfigParam('iTop5Mode')) { case 0: // switched off, do nothing break; case 1: // manually entered $this->loadActionArticles('oxtop5', $iLimit); break; case 2: $sArticleTable = getViewName('oxarticles'); //by default limit 5 $sLimit = ($iLimit > 0) ? "limit " . $iLimit : 'limit 5'; $sSelect = "select * from $sArticleTable "; $sSelect .= "where " . $this->getBaseObject()->getSqlActiveSnippet() . " and $sArticleTable.oxissearch = 1 "; $sSelect .= "and $sArticleTable.oxparentid = '' and $sArticleTable.oxsoldamount>0 "; $sSelect .= "order by $sArticleTable.oxsoldamount desc $sLimit"; $this->selectString($sSelect); break; } } /** * Loads shop AktionArticles. * * @param string $sActionID Action id * @param int $iLimit Select limit * * @return null */ public function loadActionArticles($sActionID, $iLimit = null) { // Performance if (!trim($sActionID)) { return; } $sShopID = Registry::getConfig()->getShopId(); $sActionID = strtolower($sActionID); //echo $sSelect; $oBaseObject = $this->getBaseObject(); $sArticleTable = $oBaseObject->getViewName(); $sArticleFields = $oBaseObject->getSelectFields(); $oBase = oxNew(\OxidEsales\Eshop\Application\Model\Actions::class); $sActiveSql = $oBase->getSqlActiveSnippet(); $sViewName = $oBase->getViewName(); $sLimit = ($iLimit > 0) ? "limit " . $iLimit : ''; $sSelect = "select $sArticleFields from oxactions2article left join $sArticleTable on $sArticleTable.oxid = oxactions2article.oxartid left join $sViewName on $sViewName.oxid = oxactions2article.oxactionid where oxactions2article.oxshopid = :oxshopid and oxactions2article.oxactionid = :oxactionid and $sActiveSql and $sArticleTable.oxid is not null and " . $oBaseObject->getSqlActiveSnippet() . " order by oxactions2article.oxsort $sLimit"; $this->selectString($sSelect, [ ':oxshopid' => $sShopID, ':oxactionid' => $sActionID ]); } /** * Loads article cross selling * * @param string $sArticleId Article id * * @return null */ public function loadArticleCrossSell($sArticleId) { $myConfig = \OxidEsales\Eshop\Core\Registry::getConfig(); // Performance if (!$myConfig->getConfigParam('bl_perfLoadCrossselling')) { return null; } $oBaseObject = $this->getBaseObject(); $sArticleTable = $oBaseObject->getViewName(); $sSelect = "SELECT $sArticleTable.* FROM $sArticleTable INNER JOIN oxobject2article ON oxobject2article.oxobjectid=$sArticleTable.oxid WHERE oxobject2article.oxarticlenid = :oxarticlenid AND {$oBaseObject->getSqlActiveSnippet()} ORDER BY oxobject2article.oxsort"; // #525 bidirectional cross selling if ($myConfig->getConfigParam('blBidirectCross')) { $sSelect = " ( SELECT $sArticleTable.*, O2A1.OXSORT as sorting FROM $sArticleTable INNER JOIN oxobject2article AS O2A1 on ( O2A1.oxobjectid = $sArticleTable.oxid AND O2A1.oxarticlenid = :oxarticlenid ) WHERE 1 AND " . $oBaseObject->getSqlActiveSnippet() . " AND ($sArticleTable.oxid != :oxarticlenid) ) UNION ( SELECT $sArticleTable.*, O2A2.OXSORT as sorting FROM $sArticleTable INNER JOIN oxobject2article AS O2A2 ON ( O2A2.oxarticlenid = $sArticleTable.oxid AND O2A2.oxobjectid = :oxarticlenid ) WHERE 1 AND " . $oBaseObject->getSqlActiveSnippet() . " AND ($sArticleTable.oxid != :oxarticlenid) ) ORDER BY sorting"; } $this->setSqlLimit(0, $myConfig->getConfigParam('iNrofCrossellArticles')); $this->selectString($sSelect, [ ':oxarticlenid' => $sArticleId ]); } /** * Loads article accessories * * @param string $sArticleId Article id * * @return null */ public function loadArticleAccessoires($sArticleId) { $myConfig = \OxidEsales\Eshop\Core\Registry::getConfig(); // Performance if (!$myConfig->getConfigParam('bl_perfLoadAccessoires')) { return; } $oBaseObject = $this->getBaseObject(); $sArticleTable = $oBaseObject->getViewName(); $sSelect = "select $sArticleTable.* from oxaccessoire2article left join $sArticleTable on oxaccessoire2article.oxobjectid=$sArticleTable.oxid "; $sSelect .= "where oxaccessoire2article.oxarticlenid = :oxarticlenid "; $sSelect .= " and $sArticleTable.oxid is not null and " . $oBaseObject->getSqlActiveSnippet(); //sorting articles $sSelect .= " order by oxaccessoire2article.oxsort"; $this->selectString($sSelect, [ ':oxarticlenid' => $sArticleId ]); } /** * Loads only ID's and create Fake objects for cmp_categories. * * @param string $sCatId Category tree ID * @param array $aSessionFilter Like array ( catid => array( attrid => value,...)) */ public function loadCategoryIds($sCatId, $aSessionFilter) { $sArticleTable = $this->getBaseObject()->getViewName(); $sSelect = $this->_getCategorySelect($sArticleTable . '.oxid as oxid', $sCatId, $aSessionFilter); $this->_createIdListFromSql($sSelect); } /** * Loads articles for the give Category * * @param string $sCatId Category tree ID * @param array $aSessionFilter Like array ( catid => array( attrid => value,...)) * @param int $iLimit Limit * * @return integer total Count of Articles in this Category */ public function loadCategoryArticles($sCatId, $aSessionFilter, $iLimit = null) { $sArticleFields = $this->getBaseObject()->getSelectFields(); $sSelect = $this->_getCategorySelect($sArticleFields, $sCatId, $aSessionFilter); // calc count - we can not use count($this) here as we might have paging enabled // #1970C - if any filters are used, we can not use cached category article count $iArticleCount = null; if ($aSessionFilter) { $iArticleCount = \OxidEsales\Eshop\Core\DatabaseProvider::getDb()->getOne($this->_getCategoryCountSelect($sCatId, $aSessionFilter)); } if ($iLimit = (int) $iLimit) { $sSelect .= " LIMIT $iLimit"; } $this->selectString($sSelect); if ($iArticleCount !== null) { return $iArticleCount; } // this select is FAST so no need to hazzle here with getNrOfArticles() return \OxidEsales\Eshop\Core\Registry::getUtilsCount()->getCatArticleCount($sCatId); } /** * Loads articles for the recommlist * * @deprecated since v5.3 (2016-06-17); Listmania will be moved to an own module. * * @param string $sRecommId Recommlist ID * @param string $sArticlesFilter Additional filter for recommlist's items */ public function loadRecommArticles($sRecommId, $sArticlesFilter = null) { $sSelect = $this->_getArticleSelect($sRecommId, $sArticlesFilter); $this->selectString($sSelect); } /** * Loads only ID's and create Fake objects. * * @deprecated since v5.3 (2016-06-17); Listmania will be moved to an own module. * * @param string $sRecommId Recommlist ID * @param string $sArticlesFilter Additional filter for recommlist's items */ public function loadRecommArticleIds($sRecommId, $sArticlesFilter) { $sSelect = $this->_getArticleSelect($sRecommId, $sArticlesFilter); $sArtView = getViewName('oxarticles'); $sPartial = substr($sSelect, strpos($sSelect, ' from ')); $sSelect = "select distinct $sArtView.oxid $sPartial "; $this->_createIdListFromSql($sSelect); } /** * Returns the appropriate SQL select * * @deprecated since v5.3 (2016-06-17); Listmania will be moved to an own module. * * @param string $sRecommId Recommlist ID * @param string $sArticlesFilter Additional filter for recommlist's items * * @return string */ protected function _getArticleSelect($sRecommId, $sArticlesFilter = null) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sRecommId = \OxidEsales\Eshop\Core\DatabaseProvider::getDb()->quote($sRecommId); $sArtView = getViewName('oxarticles'); $sSelect = "select distinct $sArtView.*, oxobject2list.oxdesc from oxobject2list "; $sSelect .= "left join $sArtView on oxobject2list.oxobjectid = $sArtView.oxid "; $sSelect .= "where (oxobject2list.oxlistid = $sRecommId) " . $sArticlesFilter; return $sSelect; } /** * Loads only ID's and create Fake objects for cmp_categories. * * @param string $sSearchStr Search string * @param string $sSearchCat Search within category * @param string $sSearchVendor Search within vendor * @param string $sSearchManufacturer Search within manufacturer */ public function loadSearchIds($sSearchStr = '', $sSearchCat = '', $sSearchVendor = '', $sSearchManufacturer = '') { $oDb = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); $sSearchCat = $sSearchCat ? $sSearchCat : null; $sSearchVendor = $sSearchVendor ? $sSearchVendor : null; $sSearchManufacturer = $sSearchManufacturer ? $sSearchManufacturer : null; $sWhere = null; if ($sSearchStr) { $sWhere = $this->_getSearchSelect($sSearchStr); } $sArticleTable = getViewName('oxarticles'); // longdesc field now is kept on different table $sDescJoin = $this->getDescriptionJoin(); // load the articles $sSelect = "select $sArticleTable.oxid, $sArticleTable.oxtimestamp from $sArticleTable $sDescJoin where "; // must be additional conditions in select if searching in category if ($sSearchCat) { $sO2CView = getViewName('oxobject2category'); $sSelect = "select $sArticleTable.oxid from $sO2CView as oxobject2category, $sArticleTable $sDescJoin "; $sSelect .= "where oxobject2category.oxcatnid=" . $oDb->quote($sSearchCat) . " and oxobject2category.oxobjectid=$sArticleTable.oxid and "; } $sSelect .= $this->getBaseObject()->getSqlActiveSnippet(); $sSelect .= " and $sArticleTable.oxparentid = '' and $sArticleTable.oxissearch = 1 "; // #671 if ($sSearchVendor) { $sSelect .= " and $sArticleTable.oxvendorid = " . $oDb->quote($sSearchVendor) . " "; } if ($sSearchManufacturer) { $sSelect .= " and $sArticleTable.oxmanufacturerid = " . $oDb->quote($sSearchManufacturer) . " "; } $sSelect .= $sWhere; if ($this->_sCustomSorting) { $sSelect .= " order by {$this->_sCustomSorting} "; } $this->_createIdListFromSql($sSelect); } /** * Loads Id list of appropriate price products * * @param float $dPriceFrom Starting price * @param float $dPriceTo Max price */ public function loadPriceIds($dPriceFrom, $dPriceTo) { $sSelect = $this->_getPriceSelect($dPriceFrom, $dPriceTo); $this->_createIdListFromSql($sSelect); } /** * Loads articles, that price is bigger than passed $dPriceFrom and smaller * than passed $dPriceTo. Returns count of selected articles. * * @param double $dPriceFrom Price from * @param double $dPriceTo Price to * @param object $oCategory Active category object * * @return integer */ public function loadPriceArticles($dPriceFrom, $dPriceTo, $oCategory = null) { $sSelect = $this->_getPriceSelect($dPriceFrom, $dPriceTo); startProfile("loadPriceArticles"); $this->selectString($sSelect); stopProfile("loadPriceArticles"); if (!$oCategory) { return $this->count(); } return \OxidEsales\Eshop\Core\Registry::getUtilsCount()->getPriceCatArticleCount($oCategory->getId(), $dPriceFrom, $dPriceTo); } /** * Loads Products for specified vendor * * @param string $sVendorId Vendor id */ public function loadVendorIDs($sVendorId) { $sSelect = $this->_getVendorSelect($sVendorId); $this->_createIdListFromSql($sSelect); } /** * Loads Products for specified Manufacturer * * @param string $sManufacturerId Manufacturer id */ public function loadManufacturerIDs($sManufacturerId) { $sSelect = $this->_getManufacturerSelect($sManufacturerId); $this->_createIdListFromSql($sSelect); } /** * Loads articles that belongs to vendor, passed by parameter $sVendorId. * Returns count of selected articles. * * @param string $sVendorId Vendor ID * @param object $oVendor Active vendor object * * @return integer */ public function loadVendorArticles($sVendorId, $oVendor = null) { $sSelect = $this->_getVendorSelect($sVendorId); $this->selectString($sSelect); return \OxidEsales\Eshop\Core\Registry::getUtilsCount()->getVendorArticleCount($sVendorId); } /** * Loads articles that belongs to Manufacturer, passed by parameter $sManufacturerId. * Returns count of selected articles. * * @param string $sManufacturerId Manufacturer ID * @param object $oManufacturer Active Manufacturer object * * @return integer */ public function loadManufacturerArticles($sManufacturerId, $oManufacturer = null) { $sSelect = $this->_getManufacturerSelect($sManufacturerId); $this->selectString($sSelect); return \OxidEsales\Eshop\Core\Registry::getUtilsCount()->getManufacturerArticleCount($sManufacturerId); } /** * Load the list by article ids * * @param array $aIds Article ID array * * @return null */ public function loadIds($aIds) { if (!count($aIds)) { $this->clear(); return; } $oBaseObject = $this->getBaseObject(); $sArticleTable = $oBaseObject->getViewName(); $sArticleFields = $oBaseObject->getSelectFields(); $oxIdsSql = implode(',', \OxidEsales\Eshop\Core\DatabaseProvider::getDb()->quoteArray($aIds)); $sSelect = "select $sArticleFields from $sArticleTable "; $sSelect .= "where $sArticleTable.oxid in ( " . $oxIdsSql . " ) and "; $sSelect .= $oBaseObject->getSqlActiveSnippet(); $this->selectString($sSelect); } /** * Loads the article list by orders ids * * @param array $aOrders user orders array * * @return null */ public function loadOrderArticles($aOrders) { if (!count($aOrders)) { $this->clear(); return; } foreach ($aOrders as $iKey => $oOrder) { $aOrdersIds[] = $oOrder->getId(); } $oBaseObject = $this->getBaseObject(); $sArticleTable = $oBaseObject->getViewName(); $sArticleFields = $oBaseObject->getSelectFields(); $sArticleFields = str_replace("`$sArticleTable`.`oxid`", "`oxorderarticles`.`oxartid` AS `oxid`", $sArticleFields); $sSelect = "SELECT $sArticleFields FROM oxorderarticles "; $sSelect .= "left join $sArticleTable on oxorderarticles.oxartid = $sArticleTable.oxid "; $sSelect .= "WHERE oxorderarticles.oxorderid IN ( '" . implode("','", $aOrdersIds) . "' ) "; $sSelect .= "order by $sArticleTable.oxid "; $this->selectString($sSelect); // not active or not available products must not have button "tobasket" $sNow = date('Y-m-d H:i:s'); foreach ($this as $oArticle) { if ( !$oArticle->oxarticles__oxactive->value && ( $oArticle->oxarticles__oxactivefrom->value > $sNow || $oArticle->oxarticles__oxactiveto->value < $sNow ) ) { $oArticle->setBuyableState(false); } } } /** * Loads list of low stock state products * * @param array $aBasketContents product ids array */ public function loadStockRemindProducts($aBasketContents) { if (is_array($aBasketContents) && count($aBasketContents)) { $oDb = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); foreach ($aBasketContents as $oBasketItem) { $aArtIds[] = $oDb->quote($oBasketItem->getProductId()); } $oBaseObject = $this->getBaseObject(); $sFieldNames = $oBaseObject->getSelectFields(); $sTable = $oBaseObject->getViewName(); // fetching actual db stock state and reminder status $sQ = "select {$sFieldNames} from {$sTable} where {$sTable}.oxid in ( " . implode(",", $aArtIds) . " ) and oxremindactive = '1' and oxstock <= oxremindamount"; $this->selectString($sQ); // updating stock reminder state if ($this->count()) { $sQ = "update {$sTable} set oxremindactive = '2' where :tableName in ( " . implode(",", $aArtIds) . " ) and oxremindactive = '1' and oxstock <= oxremindamount"; $oDb->execute($sQ, [':tableName' => $sTable . '.oxid']); } } } /** * Calculates, updates and returns next price renew time * * @return int */ public function renewPriceUpdateTime() { $iTimeToUpdate = $this->fetchNextUpdateTime(); // next day? $iCurrUpdateTime = \OxidEsales\Eshop\Core\Registry::getUtilsDate()->getTime(); $iNextUpdateTime = $iCurrUpdateTime + 3600 * 24; // renew next update time if (!$iTimeToUpdate || $iTimeToUpdate > $iNextUpdateTime) { $iTimeToUpdate = $iNextUpdateTime; } \OxidEsales\Eshop\Core\Registry::getConfig()->saveShopConfVar("num", "iTimeToUpdatePrices", $iTimeToUpdate); return $iTimeToUpdate; } /** * Updates prices where new price > 0, update time != '0000-00-00 00:00:00' * and <= CURRENT_TIMESTAMP. Returns update execution state (result of \OxidEsales\Eshop\Core\DatabaseProvider::execute()) * * @param bool $blForceUpdate if true, forces price update without timeout check, default value is FALSE * * @throws Exception * * @return mixed */ public function updateUpcomingPrices($blForceUpdate = false) { $blUpdated = false; if ($blForceUpdate || $this->_canUpdatePrices()) { // Transaction picks master automatically (see ESDEV-3804 and ESDEV-3822). $database = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); $database->startTransaction(); try { $sCurrUpdateTime = date("Y-m-d H:i:s", \OxidEsales\Eshop\Core\Registry::getUtilsDate()->getTime()); // Collect article id's for later recalculation. $sQ = "SELECT `oxid` FROM `oxarticles` WHERE `oxupdatepricetime` > 0 AND `oxupdatepricetime` <= :oxupdatepricetime"; $aUpdatedArticleIds = $database->getCol($sQ, [ ':oxupdatepricetime' => $sCurrUpdateTime ]); // updating oxarticles $blUpdated = $this->updateOxArticles($sCurrUpdateTime, $database); // renew update time in case update is not forced if (!$blForceUpdate) { $this->renewPriceUpdateTime(); } $database->commitTransaction(); } catch (Exception $exception) { $database->rollbackTransaction(); throw $exception; } // recalculate oxvarminprice and oxvarmaxprice for parent if (is_array($aUpdatedArticleIds)) { foreach ($aUpdatedArticleIds as $sArticleId) { $oArticle = oxNew(\OxidEsales\Eshop\Application\Model\Article::class); $oArticle->load($sArticleId); $oArticle->onChange(); } } $this->updateArticles($aUpdatedArticleIds); } return $blUpdated; } /** * fills the list simply with keys of the oxid and the position as value for the given sql * * @param string $sSql SQL select * @deprecated underscore prefix violates PSR12, will be renamed to "createIdListFromSql" in next major */ protected function _createIdListFromSql($sSql) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $rs = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(\OxidEsales\Eshop\Core\DatabaseProvider::FETCH_MODE_ASSOC)->select($sSql); if ($rs != false && $rs->count() > 0) { while (!$rs->EOF) { $rs->fields = array_change_key_case($rs->fields, CASE_LOWER); $this[$rs->fields['oxid']] = $rs->fields['oxid']; //only the oxid $rs->fetchRow(); } } } /** * Returns sql to fetch ids of articles fitting current filter * * @param string $sCatId category id * @param array $aFilter filters for this category * * @return string * @deprecated underscore prefix violates PSR12, will be renamed to "getFilterIdsSql" in next major */ protected function _getFilterIdsSql($sCatId, $aFilter) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sO2CView = getViewName('oxobject2category'); $sO2AView = getViewName('oxobject2attribute'); $sFilter = ''; $iCnt = 0; $oDb = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); foreach ($aFilter as $sAttrId => $sValue) { if ($sValue) { if ($sFilter) { $sFilter .= ' or '; } $sValue = $oDb->quote($sValue); $sAttrId = $oDb->quote($sAttrId); $sFilter .= "( oa.oxattrid = {$sAttrId} and oa.oxvalue = {$sValue} )"; $iCnt++; } } if ($sFilter) { $sFilter = "WHERE $sFilter "; } $sFilterSelect = "select oc.oxobjectid as oxobjectid, count(*) as cnt from "; $sFilterSelect .= "(SELECT * FROM $sO2CView WHERE $sO2CView.oxcatnid = '$sCatId' GROUP BY $sO2CView.oxobjectid, $sO2CView.oxcatnid) as oc "; $sFilterSelect .= "INNER JOIN $sO2AView as oa ON ( oa.oxobjectid = oc.oxobjectid ) "; return $sFilterSelect . "{$sFilter} GROUP BY oa.oxobjectid HAVING cnt = $iCnt "; } /** * Returns filtered articles sql "oxid in (filtered ids)" part * * @param string $sCatId category id * @param array $aFilter filters for this category * * @return string * @deprecated underscore prefix violates PSR12, will be renamed to "getFilterSql" in next major */ protected function _getFilterSql($sCatId, $aFilter) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sArticleTable = getViewName('oxarticles'); $aIds = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(\OxidEsales\Eshop\Core\DatabaseProvider::FETCH_MODE_ASSOC)->getAll($this->_getFilterIdsSql($sCatId, $aFilter)); $sIds = ''; if ($aIds) { foreach ($aIds as $aArt) { if ($sIds) { $sIds .= ', '; } $sIds .= \OxidEsales\Eshop\Core\DatabaseProvider::getDb()->quote(current($aArt)); } if ($sIds) { $sFilterSql = " and $sArticleTable.oxid in ( $sIds ) "; } // bug fix #0001695: if no articles found return false } elseif (!(current($aFilter) == '' && count(array_unique($aFilter)) == 1)) { $sFilterSql = " and false "; } return $sFilterSql; } /** * Creates SQL Statement to load Articles, etc. * * @param string $sFields Fields which are loaded e.g. "oxid" or "*" etc. * @param string $sCatId Category tree ID * @param array $aSessionFilter Like array ( catid => array( attrid => value,...)) * * @return string SQL * @deprecated underscore prefix violates PSR12, will be renamed to "getCategorySelect" in next major */ protected function _getCategorySelect($sFields, $sCatId, $aSessionFilter) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sArticleTable = getViewName('oxarticles'); $sO2CView = getViewName('oxobject2category'); // ---------------------------------- // sorting $sSorting = ''; if ($this->_sCustomSorting) { $sSorting = " {$this->_sCustomSorting} , "; } // ---------------------------------- // filtering ? $sFilterSql = ''; $iLang = \OxidEsales\Eshop\Core\Registry::getLang()->getBaseLanguage(); if ($aSessionFilter && isset($aSessionFilter[$sCatId][$iLang])) { $sFilterSql = $this->_getFilterSql($sCatId, $aSessionFilter[$sCatId][$iLang]); } $oDb = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); $sSelect = "SELECT $sFields, $sArticleTable.oxtimestamp FROM $sO2CView as oc left join $sArticleTable ON $sArticleTable.oxid = oc.oxobjectid WHERE " . $this->getBaseObject()->getSqlActiveSnippet() . " and $sArticleTable.oxparentid = '' and oc.oxcatnid = " . $oDb->quote($sCatId) . " $sFilterSql ORDER BY $sSorting oc.oxpos, oc.oxobjectid "; return $sSelect; } /** * Creates SQL Statement to load Articles Count, etc. * * @param string $sCatId Category tree ID * @param array $aSessionFilter Like array ( catid => array( attrid => value,...)) * * @return string SQL * @deprecated underscore prefix violates PSR12, will be renamed to "getCategoryCountSelect" in next major */ protected function _getCategoryCountSelect($sCatId, $aSessionFilter) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sArticleTable = getViewName('oxarticles'); $sO2CView = getViewName('oxobject2category'); // ---------------------------------- // filtering ? $sFilterSql = ''; $iLang = \OxidEsales\Eshop\Core\Registry::getLang()->getBaseLanguage(); if ($aSessionFilter && isset($aSessionFilter[$sCatId][$iLang])) { $sFilterSql = $this->_getFilterSql($sCatId, $aSessionFilter[$sCatId][$iLang]); } $oDb = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); $sSelect = "SELECT COUNT(*) FROM $sO2CView as oc left join $sArticleTable ON $sArticleTable.oxid = oc.oxobjectid WHERE " . $this->getBaseObject()->getSqlActiveSnippet() . " and $sArticleTable.oxparentid = '' and oc.oxcatnid = " . $oDb->quote($sCatId) . " $sFilterSql "; return $sSelect; } /** * Forms and returns SQL query string for search in DB. * * @param string $sSearchString searching string * * @return string * @deprecated underscore prefix violates PSR12, will be renamed to "getSearchSelect" in next major */ protected function _getSearchSelect($sSearchString) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { // check if it has string at all if (!$sSearchString || !str_replace(' ', '', $sSearchString)) { return ''; } $oDb = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); $myConfig = \OxidEsales\Eshop\Core\Registry::getConfig(); $sArticleTable = $this->getBaseObject()->getViewName(); $aSearch = explode(' ', $sSearchString); $sSearch = ' and ( '; $blSep = false; // #723 if ($myConfig->getConfigParam('blSearchUseAND')) { $sSearchSep = ' and '; } else { $sSearchSep = ' or '; } $aSearchCols = $myConfig->getConfigParam('aSearchCols'); $myUtilsString = \OxidEsales\Eshop\Core\Registry::getUtilsString(); foreach ($aSearch as $sSearchString) { if (!strlen($sSearchString)) { continue; } if ($blSep) { $sSearch .= $sSearchSep; } $blSep2 = false; $sSearch .= '( '; $sUml = $myUtilsString->prepareStrForSearch($sSearchString); foreach ($aSearchCols as $sField) { if ($blSep2) { $sSearch .= ' or '; } // as long description now is on different table table must differ $sSearchTable = $this->getSearchTableName($sArticleTable, $sField); $sSearch .= $sSearchTable . '.' . $sField . ' like ' . $oDb->quote('%' . $sSearchString . '%') . ' '; if ($sUml) { $sSearch .= ' or ' . $sSearchTable . '.' . $sField . ' like ' . $oDb->quote('%' . $sUml . '%'); } $blSep2 = true; } $sSearch .= ' ) '; $blSep = true; } $sSearch .= ' ) '; return $sSearch; } /** * Builds SQL for selecting articles by price * * @param double $dPriceFrom Starting price * @param double $dPriceTo Max price * * @return string * @deprecated underscore prefix violates PSR12, will be renamed to "getPriceSelect" in next major */ protected function _getPriceSelect($dPriceFrom, $dPriceTo) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $oBaseObject = $this->getBaseObject(); $sArticleTable = $oBaseObject->getViewName(); $sSelectFields = $oBaseObject->getSelectFields(); $sSelect = "select {$sSelectFields} from {$sArticleTable} where oxvarminprice >= 0 "; $sSelect .= $dPriceTo ? "and oxvarminprice <= " . (double) $dPriceTo . " " : " "; $sSelect .= $dPriceFrom ? "and oxvarminprice >= " . (double) $dPriceFrom . " " : " "; $sSelect .= " and " . $oBaseObject->getSqlActiveSnippet() . " and {$sArticleTable}.oxissearch = 1"; if (!$this->_sCustomSorting) { $sSelect .= " order by {$sArticleTable}.oxvarminprice asc , {$sArticleTable}.oxid"; } else { $sSelect .= " order by {$this->_sCustomSorting}, {$sArticleTable}.oxid "; } return $sSelect; } /** * Builds vendor select SQL statement * * @param string $sVendorId Vendor ID * * @return string * @deprecated underscore prefix violates PSR12, will be renamed to "getVendorSelect" in next major */ protected function _getVendorSelect($sVendorId) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sArticleTable = getViewName('oxarticles'); $oBaseObject = $this->getBaseObject(); $sFieldNames = $oBaseObject->getSelectFields(); $sSelect = "select $sFieldNames from $sArticleTable "; $sSelect .= "where $sArticleTable.oxvendorid = " . \OxidEsales\Eshop\Core\DatabaseProvider::getDb()->quote($sVendorId) . " "; $sSelect .= " and " . $oBaseObject->getSqlActiveSnippet() . " and $sArticleTable.oxparentid = '' "; if ($this->_sCustomSorting) { $sSelect .= " ORDER BY {$this->_sCustomSorting} "; } return $sSelect; } /** * Builds Manufacturer select SQL statement * * @param string $sManufacturerId Manufacturer ID * * @return string * @deprecated underscore prefix violates PSR12, will be renamed to "getManufacturerSelect" in next major */ protected function _getManufacturerSelect($sManufacturerId) // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $sArticleTable = getViewName('oxarticles'); $oBaseObject = $this->getBaseObject(); $sFieldNames = $oBaseObject->getSelectFields(); $sSelect = "select $sFieldNames from $sArticleTable "; $sSelect .= "where $sArticleTable.oxmanufacturerid = " . \OxidEsales\Eshop\Core\DatabaseProvider::getDb()->quote($sManufacturerId) . " "; $sSelect .= " and " . $oBaseObject->getSqlActiveSnippet() . " and $sArticleTable.oxparentid = '' "; if ($this->_sCustomSorting) { $sSelect .= " ORDER BY {$this->_sCustomSorting} "; } return $sSelect; } /** * Checks if price update can be executed - current time > next price update time * * @return bool * @deprecated underscore prefix violates PSR12, will be renamed to "canUpdatePrices" in next major */ protected function _canUpdatePrices() // phpcs:ignore PSR2.Methods.MethodDeclaration.Underscore { $oConfig = \OxidEsales\Eshop\Core\Registry::getConfig(); $blCan = false; // crontab is off? if (!$oConfig->getConfigParam("blUseCron")) { $iTimeToUpdate = $oConfig->getConfigParam("iTimeToUpdatePrices"); if (!$iTimeToUpdate || $iTimeToUpdate <= \OxidEsales\Eshop\Core\Registry::getUtilsDate()->getTime()) { $blCan = true; } } return $blCan; } /** * Method fetches next update time for renewing price update time. * * @return string */ protected function fetchNextUpdateTime() { // Function is called inside a transaction or from admin backend which uses master connection only. // Transaction picks master automatically (see ESDEV-3804 and ESDEV-3822). $database = \OxidEsales\Eshop\Core\DatabaseProvider::getDb(); // fetching next update time $sQ = $this->getQueryToFetchNextUpdateTime(); $iTimeToUpdate = $database->getOne(sprintf($sQ, "`oxarticles`")); return $iTimeToUpdate; } /** * Returns query to fetch next update time. * * @return string */ protected function getQueryToFetchNextUpdateTime() { return "select unix_timestamp( oxupdatepricetime ) from %s where oxupdatepricetime > 0 order by oxupdatepricetime asc"; } /** * Updates article. * * @param string $sCurrUpdateTime * @param DatabaseInterface $oDb * * @return mixed */ protected function updateOxArticles($sCurrUpdateTime, $oDb) { $sQ = $this->getQueryToUpdateOxArticle($sCurrUpdateTime); $blUpdated = $oDb->execute(sprintf($sQ, "`oxarticles`")); return $blUpdated; } /** * Method returns query to update article. * * @param string $sCurrUpdateTime * * @return string */ protected function getQueryToUpdateOxArticle($sCurrUpdateTime) { $sQ = "UPDATE %s SET `oxprice` = IF( `oxupdateprice` > 0, `oxupdateprice`, `oxprice` ), `oxpricea` = IF( `oxupdatepricea` > 0, `oxupdatepricea`, `oxpricea` ), `oxpriceb` = IF( `oxupdatepriceb` > 0, `oxupdatepriceb`, `oxpriceb` ), `oxpricec` = IF( `oxupdatepricec` > 0, `oxupdatepricec`, `oxpricec` ), `oxupdatepricetime` = 0, `oxupdateprice` = 0, `oxupdatepricea` = 0, `oxupdatepriceb` = 0, `oxupdatepricec` = 0 WHERE `oxupdatepricetime` > 0 AND `oxupdatepricetime` <= '{$sCurrUpdateTime}'"; return $sQ; } /** * Method is used for overloading. * * @param array $aUpdatedArticleIds */ protected function updateArticles($aUpdatedArticleIds) { } /** * Get description join. Needed in case of searching for data in table oxartextends or its views. * * @return string */ protected function getDescriptionJoin() { $table = Registry::get(\OxidEsales\Eshop\Core\TableViewNameGenerator::class)->getViewName('oxarticles'); $descriptionJoin = ''; $searchColumns = \OxidEsales\Eshop\Core\Registry::getConfig()->getConfigParam('aSearchCols'); if (is_array($searchColumns) && in_array('oxlongdesc', $searchColumns)) { $viewName = getViewName('oxartextends'); $descriptionJoin = " LEFT JOIN $viewName ON {$viewName}.oxid={$table}.oxid "; } return $descriptionJoin; } /** * Get search table name. * Needed in case of searching for data in table oxartextends or its views. * * @param string $table * @param string $field Chose table depending on field. * * @return string */ protected function getSearchTableName($table, $field) { $searchTable = $table; if ($field == 'oxlongdesc') { $searchTable = Registry::get(\OxidEsales\Eshop\Core\TableViewNameGenerator::class)->getViewName('oxartextends'); } return $searchTable; } }
{ "pile_set_name": "Github" }
/*-----------------------------------------------------------------------------+ Copyright (c) 2010-2010: Joachim Faulhaber +------------------------------------------------------------------------------+ Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENCE.txt or copy at http://www.boost.org/LICENSE_1_0.txt) +-----------------------------------------------------------------------------*/ #ifndef BOOST_ICL_ASSOCIATIVE_INTERVAL_CONTAINER_HPP_JOFA_101023 #define BOOST_ICL_ASSOCIATIVE_INTERVAL_CONTAINER_HPP_JOFA_101023 #include <boost/icl/impl_config.hpp> #include <boost/icl/concept/comparable.hpp> #include <boost/icl/concept/joinable.hpp> #include <boost/icl/concept/container.hpp> #include <boost/icl/concept/interval_associator_base.hpp> #include <boost/icl/concept/interval_set.hpp> #include <boost/icl/concept/interval_map.hpp> #include <boost/icl/concept/interval_associator.hpp> #include <boost/icl/iterator.hpp> #endif
{ "pile_set_name": "Github" }
.sidebar { overflow: hidden; height: 100vh; border-right: 1px solid #ddd; position: fixed; left: 0; top: 0; } .search-box { padding: 15px 20px; border-bottom: 1px solid #ddd; } .search-icon { color: rgba($color: #000000, $alpha: 0.25); } .workspace-list { overflow: auto; width: 256px; height: calc(100vh - 136px); padding: 0; margin-bottom: 0; } .actions { padding: 20px; text-align: center; border-top: 1px solid #ddd; }
{ "pile_set_name": "Github" }
import React from 'react' import { render, fireEvent } from '@testing-library/react' import ColumnForm from './' describe('<ColumnForm />', () => { let subject, onConfirm, onCancel function mount() { onConfirm = jest.fn() onCancel = jest.fn() subject = render(<ColumnForm onConfirm={onConfirm} onCancel={onCancel} />) } beforeEach(mount) afterEach(() => { subject = onConfirm = onCancel = undefined }) it('renders an input asking for a column title', () => { expect(subject.container.querySelector('input')).toBeInTheDocument() }) it('focus on the input', () => { expect(subject.container.querySelector('input')).toHaveFocus() }) describe('when the user clicks confirm the input', () => { describe('when the user has typed a column title', () => { beforeEach(() => { fireEvent.change(subject.container.querySelector('input'), { target: { value: 'Column Title' } }) fireEvent.click(subject.queryByText('Add')) }) it('calls the onConfirm prop passing the column title', () => { expect(onConfirm).toHaveBeenCalledTimes(1) expect(onConfirm).toHaveBeenCalledWith('Column Title') }) it('does not call the onCancel prop', () => { expect(onCancel).not.toHaveBeenCalled() }) }) describe('when the user has not typed a column title', () => { beforeEach(() => { fireEvent.click(subject.queryByText('Add')) }) it('does not call the onConfirm prop', () => { expect(onConfirm).not.toHaveBeenCalled() }) it('does not call the onCancel prop', () => { expect(onCancel).not.toHaveBeenCalled() }) }) }) describe('when the user cancels the input', () => { beforeEach(() => { fireEvent.click(subject.queryByText('Cancel')) }) it('calls the onCancel prop', () => { expect(onCancel).toHaveBeenCalledTimes(1) }) it('does not call the onConfirm prop', () => { expect(onConfirm).not.toHaveBeenCalled() }) }) })
{ "pile_set_name": "Github" }
package org.nifty.examples.processing; /** * Hello world! * */ public class App { public static void main( String[] args ) { System.out.println( "Hello World!" ); } }
{ "pile_set_name": "Github" }
/dts-v1/; /memreserve/ 0xdeadbeef00000000 0x100000; /memreserve/ 123456789 010000; / { compatible = "test_tree1"; prop-int = <0xdeadbeef>; prop-str = "hello world"; subnode@1 { compatible = "subnode1"; subsubnode { compatible = "subsubnode1", "subsubnode"; prop-int = <0xdeadbeef>; }; ss1 { }; }; subnode@2 { linux,phandle = <0x2000>; prop-int = <123456789>; subsubnode@0 { phandle = <0x2001>; compatible = "subsubnode2", "subsubnode"; prop-int = <0726746425>; }; ss2 { }; }; };
{ "pile_set_name": "Github" }
508345e32a9382f6b1e95895536ca200da6855e0
{ "pile_set_name": "Github" }
/* * Copyright (c) 2012, Michael Lehn, Klaus Pototzky * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1) Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2) Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * 3) Neither the name of the FLENS development group nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* Based on * SUBROUTINE DGBTRS( TRANS, N, KL, KU, NRHS, AB, LDAB, IPIV, B, LDB, $ INFO ) SUBROUTINE ZGBTRS( TRANS, N, KL, KU, NRHS, AB, LDAB, IPIV, B, LDB, $ INFO ) * * -- LAPACK routine (version 3.3.1) -- * -- LAPACK is a software package provided by Univ. of Tennessee, -- * -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- * -- April 2011 -- * */ #ifndef FLENS_LAPACK_GB_TRS_H #define FLENS_LAPACK_GB_TRS_H 1 #include <flens/lapack/typedefs.h> #include <flens/matrixtypes/matrixtypes.h> #include <flens/vectortypes/vectortypes.h> namespace flens { namespace lapack { //== (gb)trs =================================================================== // // Real and complex variant // template <typename MA, typename VPIV, typename MB> typename RestrictTo<IsGbMatrix<MA>::value && IsIntegerDenseVector<VPIV>::value && IsGeMatrix<MB>::value, void>::Type trs(Transpose trans, const MA &A, const VPIV &piv, MB &&B); //== Variant for convenience: Rhs b is vector ================================= // // General Matrix: Rhs b is vector // template <typename MA, typename VPIV, typename VB> typename RestrictTo<IsGbMatrix<MA>::value && IsIntegerDenseVector<VPIV>::value && IsDenseVector<VB>::value, void>::Type trs(Transpose trans, const MA &A, const VPIV &piv, VB &&b); } } // namespace lapack, flens #endif // FLENS_LAPACK_GB_TRS_H
{ "pile_set_name": "Github" }
function foo(o_) { var o = o_; var result = 0; for (var s in o) { result += o[s]; if (result >= 3) o = {0:1, 1:2, b:4, a:3}; } return result; } noInline(foo); for (var i = 0; i < 10000; ++i) { var result = foo({0:0, 1:1, a:2, b:3}); if (result != 7) throw "Error: bad result: " + result; }
{ "pile_set_name": "Github" }
package de.vogella.recursion; public class Factorial { public static long factor(long n) { // Base case if (n == 1) { return n; } // Reductions steps return n * factor(n - 1); } public static void main(String[] args) { assert (factor(5) == 120); assert (factor(6) == 720); } }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"> <html> <head> <meta http-equiv="Content-Type" content="text/html;charset=utf-8"> <title>DBIRecord</title> <META HTTP-EQUIV="EXPIRES" CONTENT=0> <link rel="stylesheet" href="../../../../docs.css"> </head> <body> <br> <h1>Io Reference</h1> <br><br><br> <br><br><br> <a class='column' href='../../index.html'>Databases</a> &nbsp;&nbsp;<font color=#ccc>/</font>&nbsp;&nbsp; <a class='column' href='../index.html'>DBI</a> &nbsp;&nbsp;<font color=#ccc>/</font>&nbsp;&nbsp; <b>DBIRecord</b> <br><br><br> <br><br><br> <table border=0 cellspacing=0 style="margin-left:8em; width:40em; line-height:1.2em;"> <tr> <td align=right></td> <td></td> <td>A DBI Record. When utilizing `foreach' or `populate' methods of a DBIResult object, you can pass an optional Object cloned from DBIRecord. This object will be populated with the row contents making it possible to write objects that represent your SQL results. A simple example would be: <pre> Person := DBIRecord clone do (fullName := method(firstName.." "..lastName)) q := conn query("SELECT id, firstName, lastName FROM people") q foreach(Person, p, writeln("Name = ", p fullName)) </pre> As you can see, fullName was not in the SQL query, however, a dynamic method in your Person class. DBIRecord in and of itself provides no real functionality. It simply acts as an Object and stores the values from the SQL query into a Map. You can access the field information: <pre> o := r populate(Person) o firstName // would retrieve the firstName value of the SQL query o setFirstName("John") // would update the object's firstName value to be John </pre> Do not confuse the above example as updating the actual database. The call to setFirstName <i>only</i> updates the objects representation of firstName. </td></tr> <tr><td colspan=3>&nbsp;</td></tr> <tr><td colspan=3>&nbsp;</td></tr> <tr><td colspan=3>&nbsp;</td></tr> <tr> <td align=right> </td> <td></td> <td> <hr align=left color=#ddd height=1> <br><br> <font color=#888> <a name="DBIRecord-forward"></a><b> forward </b> <p> <div class=slotDescription> Private method that implements getting and setting values. </font> </div> <a name="DBIRecord-with"></a><b> with(aMap) </b> <p> <div class=slotDescription> Returns a new DBIRecord instance for the given value map. </div> </td> </tr> </table> <br><br><br><br><br> </body> </html>
{ "pile_set_name": "Github" }
//===- LangOptions.h - C Language Family Language Options -------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // /// \file /// \brief Defines the clang::LangOptions interface. // //===----------------------------------------------------------------------===// #ifndef LLVM_CLANG_BASIC_LANGOPTIONS_H #define LLVM_CLANG_BASIC_LANGOPTIONS_H #include "clang/Basic/CommentOptions.h" #include "clang/Basic/LLVM.h" #include "clang/Basic/ObjCRuntime.h" #include "clang/Basic/Sanitizers.h" #include "clang/Basic/Visibility.h" #include "llvm/ADT/StringRef.h" #include "llvm/ADT/Triple.h" #include <string> #include <vector> namespace clang { /// Bitfields of LangOptions, split out from LangOptions in order to ensure that /// this large collection of bitfields is a trivial class type. class LangOptionsBase { public: // Define simple language options (with no accessors). #define LANGOPT(Name, Bits, Default, Description) unsigned Name : Bits; #define ENUM_LANGOPT(Name, Type, Bits, Default, Description) #include "clang/Basic/LangOptions.def" protected: // Define language options of enumeration type. These are private, and will // have accessors (below). #define LANGOPT(Name, Bits, Default, Description) #define ENUM_LANGOPT(Name, Type, Bits, Default, Description) \ unsigned Name : Bits; #include "clang/Basic/LangOptions.def" }; /// \brief Keeps track of the various options that can be /// enabled, which controls the dialect of C or C++ that is accepted. class LangOptions : public LangOptionsBase { public: using Visibility = clang::Visibility; enum GCMode { NonGC, GCOnly, HybridGC }; enum StackProtectorMode { SSPOff, SSPOn, SSPStrong, SSPReq }; enum SignedOverflowBehaviorTy { // Default C standard behavior. SOB_Undefined, // -fwrapv SOB_Defined, // -ftrapv SOB_Trapping }; // FIXME: Unify with TUKind. enum CompilingModuleKind { /// Not compiling a module interface at all. CMK_None, /// Compiling a module from a module map. CMK_ModuleMap, /// Compiling a C++ modules TS module interface unit. CMK_ModuleInterface }; enum PragmaMSPointersToMembersKind { PPTMK_BestCase, PPTMK_FullGeneralitySingleInheritance, PPTMK_FullGeneralityMultipleInheritance, PPTMK_FullGeneralityVirtualInheritance }; enum DefaultCallingConvention { DCC_None, DCC_CDecl, DCC_FastCall, DCC_StdCall, DCC_VectorCall, DCC_RegCall }; enum AddrSpaceMapMangling { ASMM_Target, ASMM_On, ASMM_Off }; enum MSVCMajorVersion { MSVC2010 = 16, MSVC2012 = 17, MSVC2013 = 18, MSVC2015 = 19 }; /// Clang versions with different platform ABI conformance. enum class ClangABI { /// Attempt to be ABI-compatible with code generated by Clang 3.8.x /// (SVN r257626). This causes <1 x long long> to be passed in an /// integer register instead of an SSE register on x64_64. Ver3_8, /// Attempt to be ABI-compatible with code generated by Clang 4.0.x /// (SVN r291814). This causes move operations to be ignored when /// determining whether a class type can be passed or returned directly. Ver4, /// Attempt to be ABI-compatible with code generated by Clang 6.0.x /// (SVN r321711). This causes determination of whether a type is /// standard-layout to ignore collisions between empty base classes /// and between base classes and member subobjects, which affects /// whether we reuse base class tail padding in some ABIs. Ver6, /// Conform to the underlying platform's C and C++ ABIs as closely /// as we can. Latest }; enum FPContractModeKind { // Form fused FP ops only where result will not be affected. FPC_Off, // Form fused FP ops according to FP_CONTRACT rules. FPC_On, // Aggressively fuse FP ops (E.g. FMA). FPC_Fast }; public: /// \brief Set of enabled sanitizers. SanitizerSet Sanitize; /// \brief Paths to blacklist files specifying which objects /// (files, functions, variables) should not be instrumented. std::vector<std::string> SanitizerBlacklistFiles; /// \brief Paths to the XRay "always instrument" files specifying which /// objects (files, functions, variables) should be imbued with the XRay /// "always instrument" attribute. /// WARNING: This is a deprecated field and will go away in the future. std::vector<std::string> XRayAlwaysInstrumentFiles; /// \brief Paths to the XRay "never instrument" files specifying which /// objects (files, functions, variables) should be imbued with the XRay /// "never instrument" attribute. /// WARNING: This is a deprecated field and will go away in the future. std::vector<std::string> XRayNeverInstrumentFiles; /// \brief Paths to the XRay attribute list files, specifying which objects /// (files, functions, variables) should be imbued with the appropriate XRay /// attribute(s). std::vector<std::string> XRayAttrListFiles; clang::ObjCRuntime ObjCRuntime; std::string ObjCConstantStringClass; /// \brief The name of the handler function to be called when -ftrapv is /// specified. /// /// If none is specified, abort (GCC-compatible behaviour). std::string OverflowHandler; /// The module currently being compiled as speficied by -fmodule-name. std::string ModuleName; /// \brief The name of the current module, of which the main source file /// is a part. If CompilingModule is set, we are compiling the interface /// of this module, otherwise we are compiling an implementation file of /// it. This starts as ModuleName in case -fmodule-name is provided and /// changes during compilation to reflect the current module. std::string CurrentModule; /// \brief The names of any features to enable in module 'requires' decls /// in addition to the hard-coded list in Module.cpp and the target features. /// /// This list is sorted. std::vector<std::string> ModuleFeatures; /// \brief Options for parsing comments. CommentOptions CommentOpts; /// \brief A list of all -fno-builtin-* function names (e.g., memset). std::vector<std::string> NoBuiltinFuncs; /// \brief Triples of the OpenMP targets that the host code codegen should /// take into account in order to generate accurate offloading descriptors. std::vector<llvm::Triple> OMPTargetTriples; /// \brief Name of the IR file that contains the result of the OpenMP target /// host code generation. std::string OMPHostIRFile; /// \brief Indicates whether the front-end is explicitly told that the /// input is a header file (i.e. -x c-header). bool IsHeaderFile = false; LangOptions(); // Define accessors/mutators for language options of enumeration type. #define LANGOPT(Name, Bits, Default, Description) #define ENUM_LANGOPT(Name, Type, Bits, Default, Description) \ Type get##Name() const { return static_cast<Type>(Name); } \ void set##Name(Type Value) { Name = static_cast<unsigned>(Value); } #include "clang/Basic/LangOptions.def" /// Are we compiling a module interface (.cppm or module map)? bool isCompilingModule() const { return getCompilingModule() != CMK_None; } /// Do we need to track the owning module for a local declaration? bool trackLocalOwningModule() const { return isCompilingModule() || ModulesLocalVisibility || ModulesTS; } bool isSignedOverflowDefined() const { return getSignedOverflowBehavior() == SOB_Defined; } bool isSubscriptPointerArithmetic() const { return ObjCRuntime.isSubscriptPointerArithmetic() && !ObjCSubscriptingLegacyRuntime; } bool isCompatibleWithMSVC(MSVCMajorVersion MajorVersion) const { return MSCompatibilityVersion >= MajorVersion * 10000000U; } /// \brief Reset all of the options that are not considered when building a /// module. void resetNonModularOptions(); /// \brief Is this a libc/libm function that is no longer recognized as a /// builtin because a -fno-builtin-* option has been specified? bool isNoBuiltinFunc(StringRef Name) const; /// \brief True if any ObjC types may have non-trivial lifetime qualifiers. bool allowsNonTrivialObjCLifetimeQualifiers() const { return ObjCAutoRefCount || ObjCWeak; } bool assumeFunctionsAreConvergent() const { return (CUDA && CUDAIsDevice) || OpenCL; } }; /// \brief Floating point control options class FPOptions { public: FPOptions() : fp_contract(LangOptions::FPC_Off) {} // Used for serializing. explicit FPOptions(unsigned I) : fp_contract(static_cast<LangOptions::FPContractModeKind>(I)) {} explicit FPOptions(const LangOptions &LangOpts) : fp_contract(LangOpts.getDefaultFPContractMode()) {} bool allowFPContractWithinStatement() const { return fp_contract == LangOptions::FPC_On; } bool allowFPContractAcrossStatement() const { return fp_contract == LangOptions::FPC_Fast; } void setAllowFPContractWithinStatement() { fp_contract = LangOptions::FPC_On; } void setAllowFPContractAcrossStatement() { fp_contract = LangOptions::FPC_Fast; } void setDisallowFPContract() { fp_contract = LangOptions::FPC_Off; } /// Used to serialize this. unsigned getInt() const { return fp_contract; } private: /// Adjust BinaryOperator::FPFeatures to match the bit-field size of this. unsigned fp_contract : 2; }; /// \brief Describes the kind of translation unit being processed. enum TranslationUnitKind { /// \brief The translation unit is a complete translation unit. TU_Complete, /// \brief The translation unit is a prefix to a translation unit, and is /// not complete. TU_Prefix, /// \brief The translation unit is a module. TU_Module }; } // namespace clang #endif // LLVM_CLANG_BASIC_LANGOPTIONS_H
{ "pile_set_name": "Github" }
{ "name": "Red Hat", "website": "https://www.redhat.com", "matches": [ { "search": "headers[server]", "regexp": "Red Hat" }, { "search": "headers[x-powered-by]", "regexp": "Red Hat" } ] }
{ "pile_set_name": "Github" }
/* * Scilab ( http://www.scilab.org/ ) - This file is part of Scilab * Copyright (C) DIGITEO - 2009 - Allan CORNET * * Copyright (C) 2012 - 2016 - Scilab Enterprises * * This file is hereby licensed under the terms of the GNU GPL v2.0, * pursuant to article 5.3.4 of the CeCILL v.2.1. * This file was originally licensed under the terms of the CeCILL v2.1, * and continues to be available under such terms. * For more information, see the COPYING file which you should have received * along with this program. * */ /*--------------------------------------------------------------------------*/ #ifndef __DYNLIB_GRAPHIC_EXPORT_H__ #define __DYNLIB_GRAPHIC_EXPORT_H__ #ifdef _MSC_VER #ifdef GRAPHIC_EXPORT_EXPORTS #define GRAPHIC_EXPORT_IMPEXP __declspec(dllexport) #else #define GRAPHIC_EXPORT_IMPEXP __declspec(dllimport) #endif #else #define GRAPHIC_EXPORT_IMPEXP #endif #endif /* __DYNLIB_GRAPHIC_EXPORT_H__ */ /*--------------------------------------------------------------------------*/
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <resources> <!-- ****** These strings are for toasts, snackbars, buttons, menu options items, and other strings that are used in individual example activities ****** --> <!-- Spinner for color picker example--> <string-array name="layer_spinner_array"> <item>Aigua</item> <item>Edifici</item> </string-array> <!--Default style names--> <string name="menu_map_style_streets">Carrers</string> <string name="menu_map_style_dark">Fosc</string> <string name="menu_map_style_light">Clar</string> <string name="menu_map_style_outdoors">A l\'aire lliure</string> <string name="menu_map_style_satellite">Satèl·lit</string> <string name="menu_map_style_satellite_streets">Carrers satèl·lit</string> <!--Move map instruction for toast--> <string name="move_map_instruction">Desplaça el mapa per afegir place\nmarker a la posició desitjada</string> <!--Zoom in and out map instruction for toast--> <string name="zoom_map_in_and_out_instruction">Apropa\'t i allunya\'t per veure com canvien les xifres a les agrupacions</string> <!--Geocode widget hint text--> <!--Tap on feature box instruction--> <string name="tap_on_feature_box_instruction">Toqueu al requadre delimitador</string> <!--Fragment below textview in support fragment activity--> <string name="fragment_in_card_below">Contenidor de fragment de mapa a la targeta de sota</string> <!--Offline manager button text--> <string name="download_button_text">Descarrega</string> <string name="list_button_text">Llista</string> <!--Basic user location permission toasts--> <string name="user_location_permission_explanation">Aquesta app necessita permisos per mostrar la seva funcionalitat.</string> <string name="user_location_permission_not_granted">No heu proporcionat permisos de localització.</string> <!-- Settings dialog --> <string name="settings_dialog_title">Ajustos</string> <string name="settings_dialog_positive_button_text">Desa</string> <string name="settings_dialog_negative_button_text">Cancel·lar</string> <string name="analytics_opt_out_textview">No consentiment a les analísi de seguiment</string> <string name="log_out_of_account_button">Sortir del compte de Mapbox</string> <string name="log_out_toast_confirm">Sessió tancada amb èxit</string> <!--Basic offline map--> <string name="basic_offline_deleted_toast">Mapa offlilne de Yosemite esborrat</string> <!--Marker strings for MarkerViewPluginActivity activity--> <!--Marker strings for DrawMarkerActivity activity--> <string name="draw_marker_options_title">Hola Món!</string> <string name="draw_marker_options_snippet">Benvingut al meu marcador.</string> <!--Marker strings for GeocodingActivity activity--> <!--Marker strings for DirectionsActivity activity--> <string name="directions_activity_toast_message">La ruta té %1$f metres.</string> <!--Strings for OfflineManager activity--> <string name="dialog_title">Nom de la nova regió</string> <string name="dialog_message">Descarrega la regió de mapa que estàs veient</string> <string name="dialog_positive_button">Descarrega</string> <string name="dialog_negative_button">Cancel·la</string> <string name="dialog_toast">El nom de la regió no pot ser buit.</string> <string name="end_progress_success">Regió descarregada amb èxit.</string> <string name="toast_no_regions_yet">Encara no tens cap regió.</string> <string name="toast_region_deleted">Regio esborrada</string> <string name="navigate_positive_button">Vés a </string> <string name="navigate_neutral_button_title">Esborra</string> <string name="navigate_negative_button_title">Cancel·la</string> <string name="navigate_title">Llista</string> <string name="region_name">Regió %1$d</string> <string name="set_region_name_hint">Posa el nom</string> <!--SimpleOfflineMapActivity activity--> <string name="simple_offline_end_progress_success">Regió descarregada amb èxit.</string> <!--FeatureCountActivity activity--> <string name="feature_count_snackbar_feature_size">%1$delements a la caixa</string> <!--QueryFeatureActivity activity--> <string name="query_feature_marker_title">Propietats:</string> <string name="query_feature_no_properties_found">Cap propietat per aquest element</string> <!--LocationPickerActivity activity--> <string name="location_picker_select_location_button_cancel">Cancel·la</string> <string name="location_picker_select_location_button_select">Escull una ubicació</string> <string name="location_picker_dropped_marker_snippet_no_results">Cap resultat</string> <!--PulsingLayerOpacityColorActivity activity--> <string name="fab_title_hotels">Hotels</string> <string name="fab_title_parks">Parcs</string> <string name="fab_title_attractions">Atraccions</string> <!--OffRouteActivity activity--> <!--LanguageSwitchActivity activity--> <string name="language_switch_options_menu_language_english">Anglès</string> <string name="language_switch_options_menu_language_french">Francès</string> <string name="language_switch_options_menu_language_russian">Rus</string> <string name="language_switch_options_menu_language_german">Alemany</string> <string name="language_switch_options_menu_language_spanish">Espanyol</string> <!--LanguageSwitchActivity activity--> <!--Tap on map instruction for toast--> <string name="tap_on_map_instruction">Toqueu en qualsevol lloc al mapa</string> <string name="san_francisco">San Francisco</string> <string name="los_angeles">Los Angeles</string> <string name="seattle">Seattle</string> <string name="new_orleans">Nova Orleans</string> <string name="chicago">Xicago</string> <string name="philadelphia">Filadèlfia</string> <string name="new_york">Nova York</string> <string name="atlanta">Atlanta</string> <string name="portland">Portland</string> <string name="denver">Denver</string> <string name="minneapolis">Minneapolis</string> <string name="miami">Miami</string> <string name="extrusions_category">Extrusions 3D</string> <string name="plugins_category">Connectors</string> <!--Landing activity--> <string name="tv_create_account_button">Crea un compte</string> <string name="tv_sign_in_to_account_button">Iniciar sessió</string> <string name="landing_cta_tv_text">Crea o entra el teu compte Mapbox per veure les darreres demostracions de Mapbox</string> <string name="skip_for_now_bottom_button">Omet per ara</string> <string name="whoops_error_dialog_title">Whoops!</string> <string name="whoops_error_dialog_message">Alguna cosa ha fallat amb l\'inici de sessió! Si us plau neteja la caché i les galetes del teu navegador.</string> <string name="whoops_error_dialog_ok_positive_button">D\'acord</string> <!--Loading activity--> <string name="loading_textview">Carregant&#8230;</string> <!--Space station toast--> <string name="space_station_toast">Apropa\'t a l\'estació espacial per veure-la en moviment</string> <!-- Extrusion light activity menu --> <string name="change_anchor">Canvia l\'àncora</string> <string name="change_intensity">Canvia la intensitat</string> </resources>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <resources xmlns:ns1="urn:oasis:names:tc:xliff:document:1.2"> <string msgid="2518680582564677258" name="common_google_play_services_unknown_issue">"Google Play ಸೇವೆಗಳಲ್ಲಿ <ns1:g id="APP_NAME">%1$s</ns1:g> ಸಮಸ್ಯೆಯನ್ನು ಹೊಂದಿದೆ. ದಯವಿಟ್ಟು ಮತ್ತೆ ಪ್ರಯತ್ನಿಸಿ."</string> </resources>
{ "pile_set_name": "Github" }
// transform/transform-common.cc // Copyright 2009-2011 Saarland University; Microsoft Corporation // See ../../COPYING for clarification regarding multiple authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED // WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // See the Apache 2 License for the specific language governing permissions and // limitations under the License. #include <vector> #include "base/kaldi-common.h" #include "transform/transform-common.h" namespace kaldi { void AffineXformStats::Init(int32 dim, int32 num_gs) { if (dim == 0) { if (num_gs != 0) { KALDI_WARN << "Ignoring 'num_gs' (=" << num_gs << ") argument since " << "dim = 0."; } beta_ = 0.0; K_.Resize(0, 0); G_.clear(); dim_ = 0; } else { beta_ = 0.0; K_.Resize(dim, dim + 1, kSetZero); G_.resize(num_gs); for (int32 i = 0; i < num_gs; i++) G_[i].Resize(dim + 1, kSetZero); dim_ = dim; } } void AffineXformStats::Write(std::ostream &out, bool binary) const { WriteToken(out, binary, "<DIMENSION>"); WriteBasicType(out, binary, dim_); if (!binary) out << '\n'; WriteToken(out, binary, "<BETA>"); WriteBasicType(out, binary, beta_); if (!binary) out << '\n'; WriteToken(out, binary, "<K>"); Matrix<BaseFloat> tmp_k(K_); tmp_k.Write(out, binary); WriteToken(out, binary, "<G>"); int32 g_size = static_cast<int32>(G_.size()); WriteBasicType(out, binary, g_size); if (!binary) out << '\n'; for (std::vector< SpMatrix<double> >::const_iterator itr = G_.begin(), end = G_.end(); itr != end; ++itr) { SpMatrix<BaseFloat> tmp_g(*itr); tmp_g.Write(out, binary); } } void AffineXformStats::Read(std::istream &in, bool binary, bool add) { ExpectToken(in, binary, "<DIMENSION>"); ReadBasicType(in, binary, &dim_); ExpectToken(in, binary, "<BETA>"); ReadBasicType(in, binary, &beta_); ExpectToken(in, binary, "<K>"); Matrix<BaseFloat> tmp_k; tmp_k.Read(in, binary); K_.Resize(tmp_k.NumRows(), tmp_k.NumCols()); if (add) { Matrix<double> tmp_k_d(tmp_k); K_.AddMat(1.0, tmp_k_d, kNoTrans); } else { K_.CopyFromMat(tmp_k, kNoTrans); } ExpectToken(in, binary, "<G>"); int32 g_size; ReadBasicType(in, binary, &g_size); G_.resize(g_size); SpMatrix<BaseFloat> tmp_g; SpMatrix<double> tmp_g_d; if (add) { tmp_g_d.Resize(tmp_g.NumRows()); } for (size_t i = 0; i < G_.size(); i++) { tmp_g.Read(in, binary, false /*no add*/); G_[i].Resize(tmp_g.NumRows()); if (add) { tmp_g_d.CopyFromSp(tmp_g); G_[i].AddSp(1.0, tmp_g_d); } else { G_[i].CopyFromSp(tmp_g); } } } void AffineXformStats::SetZero() { beta_ = 0.0; K_.SetZero(); for (std::vector< SpMatrix<double> >::iterator it = G_.begin(), end = G_.end(); it != end; ++it) { it->SetZero(); } } void AffineXformStats::CopyStats(const AffineXformStats &other) { KALDI_ASSERT(G_.size() == other.G_.size()); KALDI_ASSERT(dim_ == other.dim_); beta_ = other.beta_; K_.CopyFromMat(other.K_, kNoTrans); for (size_t i = 0; i < G_.size(); i++) G_[i].CopyFromSp(other.G_[i]); } void AffineXformStats::Add(const AffineXformStats &other) { KALDI_ASSERT(G_.size() == other.G_.size()); KALDI_ASSERT(dim_ == other.dim_); beta_ += other.beta_; K_.AddMat(1.0, other.K_, kNoTrans); for (size_t i = 0; i < G_.size(); i++) G_[i].AddSp(1.0, other.G_[i]); } bool ComposeTransforms(const Matrix<BaseFloat> &a, const Matrix<BaseFloat> &b, bool b_is_affine, Matrix<BaseFloat> *c) { if (b.NumRows() == 0 || a.NumCols() == 0) { KALDI_WARN << "Empty matrix in ComposeTransforms"; return false; } if (a.NumCols() == b.NumRows()) { c->Resize(a.NumRows(), b.NumCols()); c->AddMatMat(1.0, a, kNoTrans, b, kNoTrans, 0.0); // c = a * b. return true; } else if (a.NumCols() == b.NumRows()+1) { // a is affine. if (b_is_affine) { // append 0 0 0 0 ... 1 to b and multiply. Matrix<BaseFloat> b_ext(b.NumRows()+1, b.NumCols()); SubMatrix<BaseFloat> b_part(b_ext, 0, b.NumRows(), 0, b.NumCols()); b_part.CopyFromMat(b); b_ext(b.NumRows(), b.NumCols()-1) = 1.0; // so the last row is 0 0 0 0 ... 0 1 c->Resize(a.NumRows(), b.NumCols()); c->AddMatMat(1.0, a, kNoTrans, b_ext, kNoTrans, 0.0); // c = a * b_ext. } else { // extend b by 1 row and column with all zeros except a 1 on diagonal. Matrix<BaseFloat> b_ext(b.NumRows()+1, b.NumCols()+1); SubMatrix<BaseFloat> b_part(b_ext, 0, b.NumRows(), 0, b.NumCols()); b_part.CopyFromMat(b); b_ext(b.NumRows(), b.NumCols()) = 1.0; // so the last row is 0 0 0 0 ... 0 1; // rest of last column is zero (this is the offset term) c->Resize(a.NumRows(), b.NumCols()+1); c->AddMatMat(1.0, a, kNoTrans, b_ext, kNoTrans, 0.0); // c = a * b_ext. } return true; } else { KALDI_ERR << "ComposeTransforms: mismatched dimensions, a has " << a.NumCols() << " columns and b has " << b.NumRows() << " rows."; // this is fatal. return false; } } void ApplyAffineTransform(const MatrixBase<BaseFloat> &xform, VectorBase<BaseFloat> *vec) { int32 dim = xform.NumRows(); KALDI_ASSERT(dim > 0 && xform.NumCols() == dim+1 && vec->Dim() == dim); Vector<BaseFloat> tmp(dim+1); SubVector<BaseFloat> tmp_part(tmp, 0, dim); tmp_part.CopyFromVec(*vec); tmp(dim) = 1.0; // next line is: vec = 1.0 * xform * tmp + 0.0 * vec vec->AddMatVec(1.0, xform, kNoTrans, tmp, 0.0); } } // namespace kaldi
{ "pile_set_name": "Github" }
<?xml version='1.0' encoding='UTF-8' ?> <rss version="2.0" xmlns:dc="http://purl.org/dc/elements/1.1/"> <channel> <title>limetorrents.cc - RSS Feed</title> <link>http://www.limetorrents.cc/</link> <description>Latest Torrents RSS.</description> <language>en-us</language> <pubDate>Thu, 16 Feb 2017 05:48:36 +0200</pubDate> <lastBuildDate>Thu, 16 Feb 2017 05:48:36 +0200</lastBuildDate> <docs>http://blogs.law.harvard.edu/tech/rss</docs> <generator>limetorrents.cc RSS Generator 1.1</generator> <item> <title>The Expanse 2x04 (720p-HDTV-x264-SVA)[VTV]</title> <guid isPermaLink='true'>http://www.limetorrents.cc/The-Expanse-2x04-(720p-HDTV-x264-SVA)[VTV]-torrent-8643587.html</guid> <pubDate>16 Feb 2017 05:24:26 +0300</pubDate> <category>TV shows</category> <link>http://www.limetorrents.cc/The-Expanse-2x04-(720p-HDTV-x264-SVA)[VTV]-torrent-8643587.html</link> <size>880496711</size> <description> <![CDATA[ Category: <a href="http://www.limetorrents.cc/browse-torrents/TV-shows/">TV shows</a><br /> Seeds: 0<br />Leechers: 0<br />Size: 839.71 MB<br /><br /><a href="http://www.limetorrents.cc/The-Expanse-2x04-(720p-HDTV-x264-SVA)[VTV]-torrent-8643587.html">More @ limetorrents.cc</a><br /> ]]> </description> <comments>http://www.limetorrents.cc/The-Expanse-2x04-(720p-HDTV-x264-SVA)[VTV]-torrent-8643587.html</comments> <category domain="http://www.limetorrents.cc/browse-torrents/TV shows">TV shows</category> <enclosure url="http://itorrents.org/torrent/51C578C9823DD58F6EEA287C368ED935843D63AB.torrent?title=The-Expanse-2x04-(720p-HDTV-x264-SVA)[VTV]" length="880496711" type="application/x-bittorrent" /> </item> <item> <title>Criminal Minds S12E13 720p HDTV x264-FLEET[PRiME]</title> <guid isPermaLink='true'>http://www.limetorrents.cc/Criminal-Minds-S12E13-720p-HDTV-x264-FLEET[PRiME]-torrent-8643586.html</guid> <pubDate>16 Feb 2017 05:20:49 +0300</pubDate> <category>TV shows</category> <link>http://www.limetorrents.cc/Criminal-Minds-S12E13-720p-HDTV-x264-FLEET[PRiME]-torrent-8643586.html</link> <size>940818158</size> <description> <![CDATA[ Category: <a href="http://www.limetorrents.cc/browse-torrents/TV-shows/">TV shows</a><br /> Seeds: 0<br />Leechers: 0<br />Size: 897.23 MB<br /><br /><a href="http://www.limetorrents.cc/Criminal-Minds-S12E13-720p-HDTV-x264-FLEET[PRiME]-torrent-8643586.html">More @ limetorrents.cc</a><br /> ]]> </description> <comments>http://www.limetorrents.cc/Criminal-Minds-S12E13-720p-HDTV-x264-FLEET[PRiME]-torrent-8643586.html</comments> <category domain="http://www.limetorrents.cc/browse-torrents/TV shows">TV shows</category> <enclosure url="http://itorrents.org/torrent/C7EBCBE53A82E7C8F0826417F5174C8709DB9DC0.torrent?title=Criminal-Minds-S12E13-720p-HDTV-x264-FLEET[PRiME]" length="940818158" type="application/x-bittorrent" /> </item> <item> <title>Legion S01E02 720p HDTV x264-AVS[PRiME]</title> <guid isPermaLink='true'>http://www.limetorrents.cc/Legion-S01E02-720p-HDTV-x264-AVS[PRiME]-torrent-8643585.html</guid> <pubDate>16 Feb 2017 05:20:48 +0300</pubDate> <category>TV shows</category> <link>http://www.limetorrents.cc/Legion-S01E02-720p-HDTV-x264-AVS[PRiME]-torrent-8643585.html</link> <size>1320654292</size> <description> <![CDATA[ Category: <a href="http://www.limetorrents.cc/browse-torrents/TV-shows/">TV shows</a><br /> Seeds: 0<br />Leechers: 0<br />Size: 1.23 GB<br /><br /><a href="http://www.limetorrents.cc/Legion-S01E02-720p-HDTV-x264-AVS[PRiME]-torrent-8643585.html">More @ limetorrents.cc</a><br /> ]]> </description> <comments>http://www.limetorrents.cc/Legion-S01E02-720p-HDTV-x264-AVS[PRiME]-torrent-8643585.html</comments> <category domain="http://www.limetorrents.cc/browse-torrents/TV shows">TV shows</category> <enclosure url="http://itorrents.org/torrent/ED2903DB3F4B3D728D2E7091C33B6F502A0FB5D4.torrent?title=Legion-S01E02-720p-HDTV-x264-AVS[PRiME]" length="1320654292" type="application/x-bittorrent" /> </item> <item> <title>Suits S06E14 HDTV x264-SVA[PRiME]</title> <guid isPermaLink='true'>http://www.limetorrents.cc/Suits-S06E14-HDTV-x264-SVA[PRiME]-torrent-8643579.html</guid> <pubDate>16 Feb 2017 05:11:58 +0300</pubDate> <category>TV shows</category> <link>http://www.limetorrents.cc/Suits-S06E14-HDTV-x264-SVA[PRiME]-torrent-8643579.html</link> <size>212274667</size> <description> <![CDATA[ Category: <a href="http://www.limetorrents.cc/browse-torrents/TV-shows/">TV shows</a><br /> Seeds: 0<br />Leechers: 0<br />Size: 202.44 MB<br /><br /><a href="http://www.limetorrents.cc/Suits-S06E14-HDTV-x264-SVA[PRiME]-torrent-8643579.html">More @ limetorrents.cc</a><br /> ]]> </description> <comments>http://www.limetorrents.cc/Suits-S06E14-HDTV-x264-SVA[PRiME]-torrent-8643579.html</comments> <category domain="http://www.limetorrents.cc/browse-torrents/TV shows">TV shows</category> <enclosure url="http://itorrents.org/torrent/5E412B3200773684AEDBEBF9B053ED58180279DD.torrent?title=Suits-S06E14-HDTV-x264-SVA[PRiME]" length="212274667" type="application/x-bittorrent" /> </item> <item> <title>The Expanse S02E04 HDTV x264-SVA[PRiME]</title> <guid isPermaLink='true'>http://www.limetorrents.cc/The-Expanse-S02E04-HDTV-x264-SVA[PRiME]-torrent-8643578.html</guid> <pubDate>16 Feb 2017 05:11:57 +0300</pubDate> <category>TV shows</category> <link>http://www.limetorrents.cc/The-Expanse-S02E04-HDTV-x264-SVA[PRiME]-torrent-8643578.html</link> <size>269445781</size> <description> <![CDATA[ Category: <a href="http://www.limetorrents.cc/browse-torrents/TV-shows/">TV shows</a><br /> Seeds: 0<br />Leechers: 0<br />Size: 256.96 MB<br /><br /><a href="http://www.limetorrents.cc/The-Expanse-S02E04-HDTV-x264-SVA[PRiME]-torrent-8643578.html">More @ limetorrents.cc</a><br /> ]]> </description> <comments>http://www.limetorrents.cc/The-Expanse-S02E04-HDTV-x264-SVA[PRiME]-torrent-8643578.html</comments> <category domain="http://www.limetorrents.cc/browse-torrents/TV shows">TV shows</category> <enclosure url="http://itorrents.org/torrent/7E552CD2D99E43C34FBB233E3BAF0C1ECD416C76.torrent?title=The-Expanse-S02E04-HDTV-x264-SVA[PRiME]" length="269445781" type="application/x-bittorrent" /> </item> </channel> </rss>
{ "pile_set_name": "Github" }
import {NgModule} from '@angular/core'; import {ReactiveFormsModule} from '@angular/forms'; import {MatButtonModule} from '@angular/material/button'; import {MatIconModule} from '@angular/material/icon'; import {MatInputModule} from '@angular/material/input'; import {MatStepperModule} from '@angular/material/stepper'; import {StepperEditableExample} from './stepper-editable/stepper-editable-example'; import {StepperErrorsExample} from './stepper-errors/stepper-errors-example'; import { StepperLabelPositionBottomExample } from './stepper-label-position-bottom/stepper-label-position-bottom-example'; import {StepperOptionalExample} from './stepper-optional/stepper-optional-example'; import {StepperOverviewExample} from './stepper-overview/stepper-overview-example'; import {StepperStatesExample} from './stepper-states/stepper-states-example'; import {StepperVerticalExample} from './stepper-vertical/stepper-vertical-example'; export { StepperEditableExample, StepperErrorsExample, StepperLabelPositionBottomExample, StepperOptionalExample, StepperOverviewExample, StepperStatesExample, StepperVerticalExample, }; const EXAMPLES = [ StepperEditableExample, StepperErrorsExample, StepperLabelPositionBottomExample, StepperOptionalExample, StepperOverviewExample, StepperStatesExample, StepperVerticalExample, ]; @NgModule({ imports: [ MatButtonModule, MatIconModule, MatInputModule, MatStepperModule, ReactiveFormsModule, ], declarations: EXAMPLES, exports: EXAMPLES, entryComponents: EXAMPLES, }) export class StepperExamplesModule { }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <root> <!-- Microsoft ResX Schema Version 2.0 The primary goals of this format is to allow a simple XML format that is mostly human readable. The generation and parsing of the various data types are done through the TypeConverter classes associated with the data types. Example: ... ado.net/XML headers & schema ... <resheader name="resmimetype">text/microsoft-resx</resheader> <resheader name="version">2.0</resheader> <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader> <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader> <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data> <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data> <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64"> <value>[base64 mime encoded serialized .NET Framework object]</value> </data> <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64"> <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value> <comment>This is a comment</comment> </data> There are any number of "resheader" rows that contain simple name/value pairs. Each data row contains a name, and value. The row also contains a type or mimetype. Type corresponds to a .NET class that support text/value conversion through the TypeConverter architecture. Classes that don't support this are serialized and stored with the mimetype set. The mimetype is used for serialized objects, and tells the ResXResourceReader how to depersist the object. This is currently not extensible. For a given mimetype the value must be set accordingly: Note - application/x-microsoft.net.object.binary.base64 is the format that the ResXResourceWriter will generate, however the reader can read any of the formats listed below. mimetype: application/x-microsoft.net.object.binary.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.soap.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Soap.SoapFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.bytearray.base64 value : The object must be serialized into a byte array : using a System.ComponentModel.TypeConverter : and then encoded with base64 encoding. --> <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata"> <xsd:import namespace="http://www.w3.org/XML/1998/namespace" /> <xsd:element name="root" msdata:IsDataSet="true"> <xsd:complexType> <xsd:choice maxOccurs="unbounded"> <xsd:element name="metadata"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" /> </xsd:sequence> <xsd:attribute name="name" use="required" type="xsd:string" /> <xsd:attribute name="type" type="xsd:string" /> <xsd:attribute name="mimetype" type="xsd:string" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="assembly"> <xsd:complexType> <xsd:attribute name="alias" type="xsd:string" /> <xsd:attribute name="name" type="xsd:string" /> </xsd:complexType> </xsd:element> <xsd:element name="data"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" /> <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" /> <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="resheader"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" /> </xsd:complexType> </xsd:element> </xsd:choice> </xsd:complexType> </xsd:element> </xsd:schema> <resheader name="resmimetype"> <value>text/microsoft-resx</value> </resheader> <resheader name="version"> <value>2.0</value> </resheader> <resheader name="reader"> <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <resheader name="writer"> <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> </root>
{ "pile_set_name": "Github" }
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.tasks.diagnostics.internal.graph.nodes; import java.util.List; public interface Section { String getDescription(); List<Section> getChildren(); }
{ "pile_set_name": "Github" }
version https://git-lfs.github.com/spec/v1 oid sha256:b5d5fcd320c019c754b7e4704110b3f5a3c2ceff9d28f7d5eedf9cddd6e67f73 size 11125
{ "pile_set_name": "Github" }
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * @name: S11.8.4_A4.11; * @section: 11.8.4, 11.8.5; * @assertion: If y is a prefix of x, return true; * @description: x and y are string primitives; */ //CHECK#1 if (("x" >= "x") !== true) { $ERROR('#1: ("x" >= "x") === true'); } //CHECK#2 if (("x" >= "") !== true) { $ERROR('#2: ("x" >= "") === true'); } //CHECK#3 if (("abcd" >= "ab") !== true) { $ERROR('#3: ("abcd" >= ab") === true'); } //CHECK#4 if (("abc\u0064" >= "abcd") !== true) { $ERROR('#4: ("abc\\u0064" >= abc") === true'); } //CHECK#5 if (("x" + "y" >= "x") !== true) { $ERROR('#5: ("x" + "y" >= "x") === true'); } //CHECK#6 var x = "x"; if ((x + 'y' >= x) !== true) { $ERROR('#6: var x = "x"; (x + "y" >= x) === true'); } //CHECK#7 if (("a\u0000a" >= "a\u0000") !== true) { $ERROR('#7: ("a\\u0000a" >= "a\\u0000") === true'); } //CHECK#8 if ((" x" >= "x") !== false) { $ERROR('#8: (" x" >= "x") === false'); }
{ "pile_set_name": "Github" }
MIT License Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{ "pile_set_name": "Github" }
#include <brdb/brdb_value.hxx> #include <boxm2/view/boxm2_trajectory.h> BRDB_VALUE_INSTANTIATE(boxm2_trajectory_sptr, "boxm2_trajectory_sptr");
{ "pile_set_name": "Github" }
9b465a89facef8f053783fba72a753893258c529
{ "pile_set_name": "Github" }
<template> <div id="app"> <router-view></router-view> </div> </template> <script> export default { name: 'app' } </script>
{ "pile_set_name": "Github" }
{"text": ["richjeanneret", ":", "major", "drops", "in", "u", ".", "s", ".", "equities", "create", "better", "valuations", ";", "this", "could", "strengthen", "buying", "opportunities", "for", "those", "who", "take", "risk", "$", "d", "\u2026"], "created_at": "Mon Aug 24 16:21:37 +0000 2015", "user_id_str": "2862599829"} {"text": ["l", "$", "d", "\ud83d\ude08"], "created_at": "Mon Aug 24 12:40:44 +0000 2015", "user_id_str": "590596817"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 04:43:28 +0000 2015", "user_id_str": "1081784671"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 06:47:48 +0000 2015", "user_id_str": "1093445768"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 04:38:46 +0000 2015", "user_id_str": "2395283701"} {"text": ["rt", "AT_USER", "AT_USER", "l", "$", "d", "x", "AT_USER"], "created_at": "Mon Aug 24 07:38:33 +0000 2015", "user_id_str": "1214352314"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 19:49:37 +0000 2015", "user_id_str": "2661429878"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 05:26:32 +0000 2015", "user_id_str": "869811306"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 05:28:37 +0000 2015", "user_id_str": "2429731950"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 05:34:59 +0000 2015", "user_id_str": "416647048"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 05:36:41 +0000 2015", "user_id_str": "322987611"} {"text": ["i", "played", "l", "$", "d", "in", "the", "car", "with", "my", "dad", "and", "he", "asked", "what", "it", "meant", "so", "i", "told", "him", "love", "summer", "dreams"], "created_at": "Mon Aug 24 00:57:56 +0000 2015", "user_id_str": "1358239268"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "-", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 06:30:48 +0000 2015", "user_id_str": "1104004676"} {"text": ["rt", "AT_USER", "a", "$", "ap", "rocky", "|", "l", "$", "d", "URL"], "created_at": "Mon Aug 24 18:19:15 +0000 2015", "user_id_str": "555398938"}
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package fake import ( "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/discovery" fakediscovery "k8s.io/client-go/discovery/fake" clientset "k8s.io/client-go/kubernetes" admissionregistrationv1 "k8s.io/client-go/kubernetes/typed/admissionregistration/v1" fakeadmissionregistrationv1 "k8s.io/client-go/kubernetes/typed/admissionregistration/v1/fake" admissionregistrationv1beta1 "k8s.io/client-go/kubernetes/typed/admissionregistration/v1beta1" fakeadmissionregistrationv1beta1 "k8s.io/client-go/kubernetes/typed/admissionregistration/v1beta1/fake" appsv1 "k8s.io/client-go/kubernetes/typed/apps/v1" fakeappsv1 "k8s.io/client-go/kubernetes/typed/apps/v1/fake" appsv1beta1 "k8s.io/client-go/kubernetes/typed/apps/v1beta1" fakeappsv1beta1 "k8s.io/client-go/kubernetes/typed/apps/v1beta1/fake" appsv1beta2 "k8s.io/client-go/kubernetes/typed/apps/v1beta2" fakeappsv1beta2 "k8s.io/client-go/kubernetes/typed/apps/v1beta2/fake" auditregistrationv1alpha1 "k8s.io/client-go/kubernetes/typed/auditregistration/v1alpha1" fakeauditregistrationv1alpha1 "k8s.io/client-go/kubernetes/typed/auditregistration/v1alpha1/fake" authenticationv1 "k8s.io/client-go/kubernetes/typed/authentication/v1" fakeauthenticationv1 "k8s.io/client-go/kubernetes/typed/authentication/v1/fake" authenticationv1beta1 "k8s.io/client-go/kubernetes/typed/authentication/v1beta1" fakeauthenticationv1beta1 "k8s.io/client-go/kubernetes/typed/authentication/v1beta1/fake" authorizationv1 "k8s.io/client-go/kubernetes/typed/authorization/v1" fakeauthorizationv1 "k8s.io/client-go/kubernetes/typed/authorization/v1/fake" authorizationv1beta1 "k8s.io/client-go/kubernetes/typed/authorization/v1beta1" fakeauthorizationv1beta1 "k8s.io/client-go/kubernetes/typed/authorization/v1beta1/fake" autoscalingv1 "k8s.io/client-go/kubernetes/typed/autoscaling/v1" fakeautoscalingv1 "k8s.io/client-go/kubernetes/typed/autoscaling/v1/fake" autoscalingv2beta1 "k8s.io/client-go/kubernetes/typed/autoscaling/v2beta1" fakeautoscalingv2beta1 "k8s.io/client-go/kubernetes/typed/autoscaling/v2beta1/fake" autoscalingv2beta2 "k8s.io/client-go/kubernetes/typed/autoscaling/v2beta2" fakeautoscalingv2beta2 "k8s.io/client-go/kubernetes/typed/autoscaling/v2beta2/fake" batchv1 "k8s.io/client-go/kubernetes/typed/batch/v1" fakebatchv1 "k8s.io/client-go/kubernetes/typed/batch/v1/fake" batchv1beta1 "k8s.io/client-go/kubernetes/typed/batch/v1beta1" fakebatchv1beta1 "k8s.io/client-go/kubernetes/typed/batch/v1beta1/fake" batchv2alpha1 "k8s.io/client-go/kubernetes/typed/batch/v2alpha1" fakebatchv2alpha1 "k8s.io/client-go/kubernetes/typed/batch/v2alpha1/fake" certificatesv1beta1 "k8s.io/client-go/kubernetes/typed/certificates/v1beta1" fakecertificatesv1beta1 "k8s.io/client-go/kubernetes/typed/certificates/v1beta1/fake" coordinationv1 "k8s.io/client-go/kubernetes/typed/coordination/v1" fakecoordinationv1 "k8s.io/client-go/kubernetes/typed/coordination/v1/fake" coordinationv1beta1 "k8s.io/client-go/kubernetes/typed/coordination/v1beta1" fakecoordinationv1beta1 "k8s.io/client-go/kubernetes/typed/coordination/v1beta1/fake" corev1 "k8s.io/client-go/kubernetes/typed/core/v1" fakecorev1 "k8s.io/client-go/kubernetes/typed/core/v1/fake" discoveryv1alpha1 "k8s.io/client-go/kubernetes/typed/discovery/v1alpha1" fakediscoveryv1alpha1 "k8s.io/client-go/kubernetes/typed/discovery/v1alpha1/fake" eventsv1beta1 "k8s.io/client-go/kubernetes/typed/events/v1beta1" fakeeventsv1beta1 "k8s.io/client-go/kubernetes/typed/events/v1beta1/fake" extensionsv1beta1 "k8s.io/client-go/kubernetes/typed/extensions/v1beta1" fakeextensionsv1beta1 "k8s.io/client-go/kubernetes/typed/extensions/v1beta1/fake" networkingv1 "k8s.io/client-go/kubernetes/typed/networking/v1" fakenetworkingv1 "k8s.io/client-go/kubernetes/typed/networking/v1/fake" networkingv1beta1 "k8s.io/client-go/kubernetes/typed/networking/v1beta1" fakenetworkingv1beta1 "k8s.io/client-go/kubernetes/typed/networking/v1beta1/fake" nodev1alpha1 "k8s.io/client-go/kubernetes/typed/node/v1alpha1" fakenodev1alpha1 "k8s.io/client-go/kubernetes/typed/node/v1alpha1/fake" nodev1beta1 "k8s.io/client-go/kubernetes/typed/node/v1beta1" fakenodev1beta1 "k8s.io/client-go/kubernetes/typed/node/v1beta1/fake" policyv1beta1 "k8s.io/client-go/kubernetes/typed/policy/v1beta1" fakepolicyv1beta1 "k8s.io/client-go/kubernetes/typed/policy/v1beta1/fake" rbacv1 "k8s.io/client-go/kubernetes/typed/rbac/v1" fakerbacv1 "k8s.io/client-go/kubernetes/typed/rbac/v1/fake" rbacv1alpha1 "k8s.io/client-go/kubernetes/typed/rbac/v1alpha1" fakerbacv1alpha1 "k8s.io/client-go/kubernetes/typed/rbac/v1alpha1/fake" rbacv1beta1 "k8s.io/client-go/kubernetes/typed/rbac/v1beta1" fakerbacv1beta1 "k8s.io/client-go/kubernetes/typed/rbac/v1beta1/fake" schedulingv1 "k8s.io/client-go/kubernetes/typed/scheduling/v1" fakeschedulingv1 "k8s.io/client-go/kubernetes/typed/scheduling/v1/fake" schedulingv1alpha1 "k8s.io/client-go/kubernetes/typed/scheduling/v1alpha1" fakeschedulingv1alpha1 "k8s.io/client-go/kubernetes/typed/scheduling/v1alpha1/fake" schedulingv1beta1 "k8s.io/client-go/kubernetes/typed/scheduling/v1beta1" fakeschedulingv1beta1 "k8s.io/client-go/kubernetes/typed/scheduling/v1beta1/fake" settingsv1alpha1 "k8s.io/client-go/kubernetes/typed/settings/v1alpha1" fakesettingsv1alpha1 "k8s.io/client-go/kubernetes/typed/settings/v1alpha1/fake" storagev1 "k8s.io/client-go/kubernetes/typed/storage/v1" fakestoragev1 "k8s.io/client-go/kubernetes/typed/storage/v1/fake" storagev1alpha1 "k8s.io/client-go/kubernetes/typed/storage/v1alpha1" fakestoragev1alpha1 "k8s.io/client-go/kubernetes/typed/storage/v1alpha1/fake" storagev1beta1 "k8s.io/client-go/kubernetes/typed/storage/v1beta1" fakestoragev1beta1 "k8s.io/client-go/kubernetes/typed/storage/v1beta1/fake" "k8s.io/client-go/testing" ) // NewSimpleClientset returns a clientset that will respond with the provided objects. // It's backed by a very simple object tracker that processes creates, updates and deletions as-is, // without applying any validations and/or defaults. It shouldn't be considered a replacement // for a real clientset and is mostly useful in simple unit tests. func NewSimpleClientset(objects ...runtime.Object) *Clientset { o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder()) for _, obj := range objects { if err := o.Add(obj); err != nil { panic(err) } } cs := &Clientset{tracker: o} cs.discovery = &fakediscovery.FakeDiscovery{Fake: &cs.Fake} cs.AddReactor("*", "*", testing.ObjectReaction(o)) cs.AddWatchReactor("*", func(action testing.Action) (handled bool, ret watch.Interface, err error) { gvr := action.GetResource() ns := action.GetNamespace() watch, err := o.Watch(gvr, ns) if err != nil { return false, nil, err } return true, watch, nil }) return cs } // Clientset implements clientset.Interface. Meant to be embedded into a // struct to get a default implementation. This makes faking out just the method // you want to test easier. type Clientset struct { testing.Fake discovery *fakediscovery.FakeDiscovery tracker testing.ObjectTracker } func (c *Clientset) Discovery() discovery.DiscoveryInterface { return c.discovery } func (c *Clientset) Tracker() testing.ObjectTracker { return c.tracker } var _ clientset.Interface = &Clientset{} // AdmissionregistrationV1 retrieves the AdmissionregistrationV1Client func (c *Clientset) AdmissionregistrationV1() admissionregistrationv1.AdmissionregistrationV1Interface { return &fakeadmissionregistrationv1.FakeAdmissionregistrationV1{Fake: &c.Fake} } // AdmissionregistrationV1beta1 retrieves the AdmissionregistrationV1beta1Client func (c *Clientset) AdmissionregistrationV1beta1() admissionregistrationv1beta1.AdmissionregistrationV1beta1Interface { return &fakeadmissionregistrationv1beta1.FakeAdmissionregistrationV1beta1{Fake: &c.Fake} } // AppsV1 retrieves the AppsV1Client func (c *Clientset) AppsV1() appsv1.AppsV1Interface { return &fakeappsv1.FakeAppsV1{Fake: &c.Fake} } // AppsV1beta1 retrieves the AppsV1beta1Client func (c *Clientset) AppsV1beta1() appsv1beta1.AppsV1beta1Interface { return &fakeappsv1beta1.FakeAppsV1beta1{Fake: &c.Fake} } // AppsV1beta2 retrieves the AppsV1beta2Client func (c *Clientset) AppsV1beta2() appsv1beta2.AppsV1beta2Interface { return &fakeappsv1beta2.FakeAppsV1beta2{Fake: &c.Fake} } // AuditregistrationV1alpha1 retrieves the AuditregistrationV1alpha1Client func (c *Clientset) AuditregistrationV1alpha1() auditregistrationv1alpha1.AuditregistrationV1alpha1Interface { return &fakeauditregistrationv1alpha1.FakeAuditregistrationV1alpha1{Fake: &c.Fake} } // AuthenticationV1 retrieves the AuthenticationV1Client func (c *Clientset) AuthenticationV1() authenticationv1.AuthenticationV1Interface { return &fakeauthenticationv1.FakeAuthenticationV1{Fake: &c.Fake} } // AuthenticationV1beta1 retrieves the AuthenticationV1beta1Client func (c *Clientset) AuthenticationV1beta1() authenticationv1beta1.AuthenticationV1beta1Interface { return &fakeauthenticationv1beta1.FakeAuthenticationV1beta1{Fake: &c.Fake} } // AuthorizationV1 retrieves the AuthorizationV1Client func (c *Clientset) AuthorizationV1() authorizationv1.AuthorizationV1Interface { return &fakeauthorizationv1.FakeAuthorizationV1{Fake: &c.Fake} } // AuthorizationV1beta1 retrieves the AuthorizationV1beta1Client func (c *Clientset) AuthorizationV1beta1() authorizationv1beta1.AuthorizationV1beta1Interface { return &fakeauthorizationv1beta1.FakeAuthorizationV1beta1{Fake: &c.Fake} } // AutoscalingV1 retrieves the AutoscalingV1Client func (c *Clientset) AutoscalingV1() autoscalingv1.AutoscalingV1Interface { return &fakeautoscalingv1.FakeAutoscalingV1{Fake: &c.Fake} } // AutoscalingV2beta1 retrieves the AutoscalingV2beta1Client func (c *Clientset) AutoscalingV2beta1() autoscalingv2beta1.AutoscalingV2beta1Interface { return &fakeautoscalingv2beta1.FakeAutoscalingV2beta1{Fake: &c.Fake} } // AutoscalingV2beta2 retrieves the AutoscalingV2beta2Client func (c *Clientset) AutoscalingV2beta2() autoscalingv2beta2.AutoscalingV2beta2Interface { return &fakeautoscalingv2beta2.FakeAutoscalingV2beta2{Fake: &c.Fake} } // BatchV1 retrieves the BatchV1Client func (c *Clientset) BatchV1() batchv1.BatchV1Interface { return &fakebatchv1.FakeBatchV1{Fake: &c.Fake} } // BatchV1beta1 retrieves the BatchV1beta1Client func (c *Clientset) BatchV1beta1() batchv1beta1.BatchV1beta1Interface { return &fakebatchv1beta1.FakeBatchV1beta1{Fake: &c.Fake} } // BatchV2alpha1 retrieves the BatchV2alpha1Client func (c *Clientset) BatchV2alpha1() batchv2alpha1.BatchV2alpha1Interface { return &fakebatchv2alpha1.FakeBatchV2alpha1{Fake: &c.Fake} } // CertificatesV1beta1 retrieves the CertificatesV1beta1Client func (c *Clientset) CertificatesV1beta1() certificatesv1beta1.CertificatesV1beta1Interface { return &fakecertificatesv1beta1.FakeCertificatesV1beta1{Fake: &c.Fake} } // CoordinationV1beta1 retrieves the CoordinationV1beta1Client func (c *Clientset) CoordinationV1beta1() coordinationv1beta1.CoordinationV1beta1Interface { return &fakecoordinationv1beta1.FakeCoordinationV1beta1{Fake: &c.Fake} } // CoordinationV1 retrieves the CoordinationV1Client func (c *Clientset) CoordinationV1() coordinationv1.CoordinationV1Interface { return &fakecoordinationv1.FakeCoordinationV1{Fake: &c.Fake} } // CoreV1 retrieves the CoreV1Client func (c *Clientset) CoreV1() corev1.CoreV1Interface { return &fakecorev1.FakeCoreV1{Fake: &c.Fake} } // DiscoveryV1alpha1 retrieves the DiscoveryV1alpha1Client func (c *Clientset) DiscoveryV1alpha1() discoveryv1alpha1.DiscoveryV1alpha1Interface { return &fakediscoveryv1alpha1.FakeDiscoveryV1alpha1{Fake: &c.Fake} } // EventsV1beta1 retrieves the EventsV1beta1Client func (c *Clientset) EventsV1beta1() eventsv1beta1.EventsV1beta1Interface { return &fakeeventsv1beta1.FakeEventsV1beta1{Fake: &c.Fake} } // ExtensionsV1beta1 retrieves the ExtensionsV1beta1Client func (c *Clientset) ExtensionsV1beta1() extensionsv1beta1.ExtensionsV1beta1Interface { return &fakeextensionsv1beta1.FakeExtensionsV1beta1{Fake: &c.Fake} } // NetworkingV1 retrieves the NetworkingV1Client func (c *Clientset) NetworkingV1() networkingv1.NetworkingV1Interface { return &fakenetworkingv1.FakeNetworkingV1{Fake: &c.Fake} } // NetworkingV1beta1 retrieves the NetworkingV1beta1Client func (c *Clientset) NetworkingV1beta1() networkingv1beta1.NetworkingV1beta1Interface { return &fakenetworkingv1beta1.FakeNetworkingV1beta1{Fake: &c.Fake} } // NodeV1alpha1 retrieves the NodeV1alpha1Client func (c *Clientset) NodeV1alpha1() nodev1alpha1.NodeV1alpha1Interface { return &fakenodev1alpha1.FakeNodeV1alpha1{Fake: &c.Fake} } // NodeV1beta1 retrieves the NodeV1beta1Client func (c *Clientset) NodeV1beta1() nodev1beta1.NodeV1beta1Interface { return &fakenodev1beta1.FakeNodeV1beta1{Fake: &c.Fake} } // PolicyV1beta1 retrieves the PolicyV1beta1Client func (c *Clientset) PolicyV1beta1() policyv1beta1.PolicyV1beta1Interface { return &fakepolicyv1beta1.FakePolicyV1beta1{Fake: &c.Fake} } // RbacV1 retrieves the RbacV1Client func (c *Clientset) RbacV1() rbacv1.RbacV1Interface { return &fakerbacv1.FakeRbacV1{Fake: &c.Fake} } // RbacV1beta1 retrieves the RbacV1beta1Client func (c *Clientset) RbacV1beta1() rbacv1beta1.RbacV1beta1Interface { return &fakerbacv1beta1.FakeRbacV1beta1{Fake: &c.Fake} } // RbacV1alpha1 retrieves the RbacV1alpha1Client func (c *Clientset) RbacV1alpha1() rbacv1alpha1.RbacV1alpha1Interface { return &fakerbacv1alpha1.FakeRbacV1alpha1{Fake: &c.Fake} } // SchedulingV1alpha1 retrieves the SchedulingV1alpha1Client func (c *Clientset) SchedulingV1alpha1() schedulingv1alpha1.SchedulingV1alpha1Interface { return &fakeschedulingv1alpha1.FakeSchedulingV1alpha1{Fake: &c.Fake} } // SchedulingV1beta1 retrieves the SchedulingV1beta1Client func (c *Clientset) SchedulingV1beta1() schedulingv1beta1.SchedulingV1beta1Interface { return &fakeschedulingv1beta1.FakeSchedulingV1beta1{Fake: &c.Fake} } // SchedulingV1 retrieves the SchedulingV1Client func (c *Clientset) SchedulingV1() schedulingv1.SchedulingV1Interface { return &fakeschedulingv1.FakeSchedulingV1{Fake: &c.Fake} } // SettingsV1alpha1 retrieves the SettingsV1alpha1Client func (c *Clientset) SettingsV1alpha1() settingsv1alpha1.SettingsV1alpha1Interface { return &fakesettingsv1alpha1.FakeSettingsV1alpha1{Fake: &c.Fake} } // StorageV1beta1 retrieves the StorageV1beta1Client func (c *Clientset) StorageV1beta1() storagev1beta1.StorageV1beta1Interface { return &fakestoragev1beta1.FakeStorageV1beta1{Fake: &c.Fake} } // StorageV1 retrieves the StorageV1Client func (c *Clientset) StorageV1() storagev1.StorageV1Interface { return &fakestoragev1.FakeStorageV1{Fake: &c.Fake} } // StorageV1alpha1 retrieves the StorageV1alpha1Client func (c *Clientset) StorageV1alpha1() storagev1alpha1.StorageV1alpha1Interface { return &fakestoragev1alpha1.FakeStorageV1alpha1{Fake: &c.Fake} }
{ "pile_set_name": "Github" }
var lodash = require('./wrapperLodash'); /** * Creates a `lodash` wrapper instance that wraps `value` with explicit method * chain sequences enabled. The result of such sequences must be unwrapped * with `_#value`. * * @static * @memberOf _ * @since 1.3.0 * @category Seq * @param {*} value The value to wrap. * @returns {Object} Returns the new `lodash` wrapper instance. * @example * * var users = [ * { 'user': 'barney', 'age': 36 }, * { 'user': 'fred', 'age': 40 }, * { 'user': 'pebbles', 'age': 1 } * ]; * * var youngest = _ * .chain(users) * .sortBy('age') * .map(function(o) { * return o.user + ' is ' + o.age; * }) * .head() * .value(); * // => 'pebbles is 1' */ function chain(value) { var result = lodash(value); result.__chain__ = true; return result; } module.exports = chain;
{ "pile_set_name": "Github" }
import sys sys.path.append('..') from SETTINGS import *; tag = 11; fcn_img_size=196; net_version = 2; heart_delta_multiplier = 1.8; para_ss = 150; do_cv = False; num_epochs = 250; shift = 10; rotation = 15; scale = 0.15; no_contour_type = 'L';#other versions worse, so it's fixed to 'L'
{ "pile_set_name": "Github" }
#!/usr/bin/env node var mkdirp = require('../'); var minimist = require('minimist'); var fs = require('fs'); var argv = minimist(process.argv.slice(2), { alias: { m: 'mode', h: 'help' }, string: [ 'mode' ] }); if (argv.help) { fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout); return; } var paths = argv._.slice(); var mode = argv.mode ? parseInt(argv.mode, 8) : undefined; (function next () { if (paths.length === 0) return; var p = paths.shift(); if (mode === undefined) mkdirp(p, cb) else mkdirp(p, mode, cb) function cb (err) { if (err) { console.error(err.message); process.exit(1); } else next(); } })();
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>CFBundleDevelopmentRegion</key> <string>$(DEVELOPMENT_LANGUAGE)</string> <key>CFBundleExecutable</key> <string>$(EXECUTABLE_NAME)</string> <key>CFBundleIdentifier</key> <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> <key>CFBundleName</key> <string>$(PRODUCT_NAME)</string> <key>CFBundlePackageType</key> <string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string> <key>CFBundleShortVersionString</key> <string>$(MARKETING_VERSION)</string> <key>CFBundleVersion</key> <string>$(CURRENT_PROJECT_VERSION)</string> </dict> </plist>
{ "pile_set_name": "Github" }
export { default } from 'shared/helpers/lower-case';
{ "pile_set_name": "Github" }
/* * (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * Antoine Taillefer <ataillefer@nuxeo.com> */ package org.nuxeo.drive.operations; import java.io.IOException; import javax.mail.internet.ParseException; import org.nuxeo.drive.adapter.FileItem; import org.nuxeo.drive.adapter.FileSystemItem; import org.nuxeo.drive.service.FileSystemItemManager; import org.nuxeo.ecm.automation.OperationContext; import org.nuxeo.ecm.automation.core.Constants; import org.nuxeo.ecm.automation.core.annotations.Context; import org.nuxeo.ecm.automation.core.annotations.Operation; import org.nuxeo.ecm.automation.core.annotations.OperationMethod; import org.nuxeo.ecm.automation.core.annotations.Param; import org.nuxeo.ecm.core.api.Blob; import org.nuxeo.ecm.core.api.Blobs; import org.nuxeo.runtime.api.Framework; /** * Updates the document backing the {@link FileSystemItem} with the given id with the input blob. * * @author Antoine Taillefer */ @Operation(id = NuxeoDriveUpdateFile.ID, category = Constants.CAT_SERVICES, label = "Nuxeo Drive: Update file", description = "Update the document backing the file system item with the given id with the input blob." // + " Return the file system item backed by the updated document as a JSON blob.") public class NuxeoDriveUpdateFile { public static final String ID = "NuxeoDrive.UpdateFile"; @Context protected OperationContext ctx; @Param(name = "id", description = "Id of the file system item backed by the document to update.") protected String id; // NOSONAR /** * @since 6.0 */ @Param(name = "parentId", required = false, description = "Optional id of the file system item backed by the parent container of the document to update." // + " For optimization purpose.") protected String parentId; @OperationMethod public Blob run(Blob blob) throws ParseException, IOException { FileSystemItemManager fileSystemItemManager = Framework.getService(FileSystemItemManager.class); NuxeoDriveOperationHelper.normalizeMimeTypeAndEncoding(blob); FileItem fileItem; if (parentId == null) { fileItem = fileSystemItemManager.updateFile(id, blob, ctx.getPrincipal()); } else { fileItem = fileSystemItemManager.updateFile(id, parentId, blob, ctx.getPrincipal()); } return Blobs.createJSONBlobFromValue(fileItem); } }
{ "pile_set_name": "Github" }
package com.polidea.rxandroidble2; import android.bluetooth.BluetoothGatt; import androidx.annotation.NonNull; import com.polidea.rxandroidble2.internal.connection.RxBleGattCallback; import com.polidea.rxandroidble2.internal.serialization.ConnectionOperationQueue; import io.reactivex.Observable; import io.reactivex.Observer; import io.reactivex.Scheduler; /** * Represents a custom operation that will be enqueued for future execution within the client instance. */ public interface RxBleCustomOperation<T> { /** * Return an observable that implement a custom operation using low-level Android BLE API. * <p> * The {@link Observable} returned by this method will be subscribed to by the {@link ConnectionOperationQueue} * when it determines that the custom operation should be the next to be run. * <p> * The method receives everything needed to access the low-level Android BLE API objects mainly the * {@link BluetoothGatt} to interact with Android BLE GATT operations and {@link RxBleGattCallback} * to be notified when GATT operations completes. * <p> * Every event emitted by the returned {@link Observable} will be forwarded to the observable * returned by {@link RxBleConnection#queue(RxBleCustomOperation)} * <p> * As the implementer, your contract is to return an {@link Observable} that completes at some * point in time. When the returned observable terminates, either via the {@link Observer#onComplete()} or * {@link Observer#onError(Throwable)} callback, the {@link ConnectionOperationQueue} queue's lock is released so that * queue operations can continue. * <p> * You <b>must</b> ensure the returned {@link Observable} do terminate either via {@code onCompleted} * or {@code onError(Throwable)}. Otherwise, the internal queue orchestrator will wait forever for * your {@link Observable} to complete and the it will not continue to process queued operations. * * @param bluetoothGatt The Android API GATT instance * @param rxBleGattCallback The internal Rx ready bluetooth gatt callback to be notified of GATT operations * @param scheduler The ClientOperationQueue scheduler used to asObservable operation * @throws Throwable Any exception that your custom operation might throw */ @NonNull Observable<T> asObservable(BluetoothGatt bluetoothGatt, RxBleGattCallback rxBleGattCallback, Scheduler scheduler) throws Throwable; }
{ "pile_set_name": "Github" }
/* *Copyright 2018 T Mobile, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not use * this file except in compliance with the License. A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or * implied. See the License for the specific language governing permissions and * limitations under the License. */ import { Component, OnInit, OnDestroy, Output, EventEmitter } from '@angular/core'; import { Subscription } from 'rxjs/Subscription'; import { environment } from './../../../../environments/environment'; import { LoggerService } from '../../../shared/services/logger.service'; import { AssetGroupObservableService } from '../../../core/services/asset-group-observable.service'; import { GridOptions } from 'ag-grid'; import { CommonResponseService } from '../../../shared/services/common-response.service'; import { UtilsService } from '../../../shared/services/utils.service'; import * as _ from 'lodash'; @Component({ selector: 'app-vulnerability-summary-table', templateUrl: './vulnerability-summary-table.component.html', styleUrls: ['./vulnerability-summary-table.component.css'], providers: [CommonResponseService] }) export class VulnerabilitySummaryTableComponent implements OnInit, OnDestroy { selectedAssetGroup: string; private errorMessage = 'apiResponseError'; getContextMenuItems: any; gridApi: any; gridColumnApi: any; columns: any = []; initComplete = false; showtable = false; gridOptions: GridOptions; private subscriptionToAssetGroup: Subscription; private dataSubscription: Subscription; @Output() errorOccurred = new EventEmitter(); errorValue = 0; constructor( private commonResponseService: CommonResponseService, private assetGroupObservableService: AssetGroupObservableService, private logger: LoggerService, private utils: UtilsService ) { this.gridOptions = <GridOptions>{}; this.gridOptions.columnDefs = []; this.gridOptions.rowData = []; this.getContextMenuItems = function getContextMenuItems(params) { const result = [ 'toolPanel', 'separator', 'copy', 'separator', 'csvExport', 'separator', 'autoSizeAll', 'resetColumns' ]; return result; }; this.subscriptionToAssetGroup = this.assetGroupObservableService.getAssetGroup().subscribe( assetGroupName => { this.showtable = false; this.selectedAssetGroup = assetGroupName; setTimeout(() => { this.showtable = true; this.updateComponent(); }, 10); }); } ngOnInit() { } updateComponent() { this.errorValue = 0; this.getData(); } getData() { if (this.dataSubscription) { this.dataSubscription.unsubscribe(); } const payload = {}; const queryParam = { 'ag': this.selectedAssetGroup }; this.errorValue = 0; const tableUrl = environment.vulnerabilitySummary.url; const tableMethod = environment.vulnerabilitySummary.method; this.errorValue = 0; this.dataSubscription = this.commonResponseService.getData(tableUrl, tableMethod, payload, queryParam).subscribe( response => { try { if (this.utils.checkIfAPIReturnedDataIsEmpty(response.distribution)) { this.errorOccurred.emit(); this.errorValue = -1; this.errorMessage = 'vulnerabilityMessage'; } else { this.errorValue = 1; this.processData(response); } } catch (e) { this.errorOccurred.emit(); this.errorValue = -1; this.errorMessage = 'jsError'; this.logger.log('error', e); } }, error => { this.errorOccurred.emit(); this.errorValue = -1; this.errorMessage = 'apiResponseError'; this.logger.log('error', error); }); } downloadCsv() { this.gridApi.exportDataAsCsv({fileName: 'Vulnerability Summary.csv'}); } processData(data) { this.columns = []; const ObjArr = data.distribution.severityinfo; const listofColumns = Object.keys(ObjArr[0]); const columns = _.pull(listofColumns, 'severity', 'count'); this.columns = columns; let eachObj = {}; this.gridOptions.columnDefs = []; this.gridOptions.rowData = []; const objProperties = { minWidth: 182, maxWidth: 800 }; for ( let i = 0; i < columns.length; i++) { if (columns[i].toLowerCase() === 'severitylevel') { eachObj = { pinned: 'left', lockPosition: true, field: columns[i], headerName: 'Severity', minWidth: 182, maxWidth: 800, order: 1 }; } else if (columns[i].toLowerCase() === 'appcount') { eachObj = { field: columns[i], headerName: 'Applications', order: 3 }; Object.assign(eachObj, objProperties); } else if (columns[i].toLowerCase() === 'uniquevulncount') { eachObj = { field: columns[i], headerName: 'Unique Vulnerabilities', order: 2, minWidth: 182 }; Object.assign(eachObj, objProperties); } else if (columns[i].toLowerCase() === 'hostcount') { eachObj = { field: columns[i], headerName: 'Servers', order: 4 }; Object.assign(eachObj, objProperties); } else if (columns[i].toLowerCase() === 'vulninstancecount') { eachObj = { field: columns[i], headerName: 'Vulnerability Instances', order: 5, minWidth: 182 }; Object.assign(eachObj, objProperties); } else { eachObj = { field: columns[i], headerName: columns[i], minWidth: 182, maxWidth: 800 }; } this.gridOptions.columnDefs.push(eachObj); } // sortobject as per 'order' property set. this.gridOptions.columnDefs.sort((a, b) => { return a['order'] - b['order']; }); // get sum of each column let sum_vulnerability = 0; let sum_applications = 0; let sum_servers = 0; let sum_instances = 0; data.distribution.severityinfo.reduce(function(total, currentValue) { if (currentValue['uniqueVulnCount']) { sum_vulnerability = sum_vulnerability + currentValue['uniqueVulnCount']; } if (currentValue['appCount']) { sum_applications = sum_applications + currentValue['appCount']; } if (currentValue['hostCount']) { sum_servers = sum_servers + currentValue['hostCount']; } if (currentValue['vulnInstanceCount']) { sum_instances = sum_instances + currentValue['vulnInstanceCount']; } }, []); const updateRowData = []; // delete rows which are not required to display. for ( let j = 0; j < data.distribution.severityinfo.length; j++) { delete data.distribution.severityinfo[j].severity; delete data.distribution.severityinfo[j].count; updateRowData.push(data.distribution.severityinfo[j]); } this.gridOptions.rowData = updateRowData; this.gridOptions.getRowStyle = function(params) { if (params.data.severitylevel === 'TOTAL') { return { 'font-weight': 700 }; } }; if (this.gridApi) { this.gridApi.setColumnDefs(this.gridOptions.columnDefs); this.gridApi.setRowData(this.gridOptions.rowData); this.onresize(); } } onresize() { if (this.columns.length < 6 && this.columns.length > 0) { setTimeout(() => { this.gridApi.sizeColumnsToFit(); }, 3); } else { this.autoSizeAll(); } } onGridReady(params) { this.gridApi = params.api; this.gridColumnApi = params.columnApi; } autoSizeAll() { const allColumnIds = []; if (this.gridColumnApi) { this.gridColumnApi.getAllColumns().forEach(function(column) { allColumnIds.push(column.colId); }); this.gridColumnApi.autoSizeColumns(allColumnIds); } } ngOnDestroy() { try { if (this.subscriptionToAssetGroup) { this.subscriptionToAssetGroup.unsubscribe(); } if (this.dataSubscription) { this.dataSubscription.unsubscribe(); } } catch (error) { this.logger.log('error', '--- Error while unsubscribing ---'); } } }
{ "pile_set_name": "Github" }
/* ChibiOS - Copyright (C) 2006..2015 Giovanni Di Sirio Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * **** This file incorporates work covered by the following copyright and **** * **** permission notice: **** * * Copyright (c) 2001-2004 Swedish Institute of Computer Science. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY * OF SUCH DAMAGE. * * This file is part of the lwIP TCP/IP stack. * * Author: Adam Dunkels <adam@sics.se> * */ #ifndef __PERF_H__ #define __PERF_H__ #define PERF_START #define PERF_STOP(x) #endif /* __PERF_H__ */
{ "pile_set_name": "Github" }
/* Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 and * only version 2 as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ #ifndef __ASM_HARDWARE_CP14_H #define __ASM_HARDWARE_CP14_H #include <linux/types.h> /* Accessors for CP14 registers */ #define dbg_read(reg) RCP14_##reg() #define dbg_write(val, reg) WCP14_##reg(val) #define etm_read(reg) RCP14_##reg() #define etm_write(val, reg) WCP14_##reg(val) /* MRC14 and MCR14 */ #define MRC14(op1, crn, crm, op2) \ ({ \ uint32_t val; \ asm volatile("mrc p14, "#op1", %0, "#crn", "#crm", "#op2 : "=r" (val)); \ val; \ }) #define MCR14(val, op1, crn, crm, op2) \ ({ \ asm volatile("mcr p14, "#op1", %0, "#crn", "#crm", "#op2 : : "r" (val));\ }) /* Debug Registers * * Available only in DBGv7 * DBGECR, DBGDSCCR, DBGDSMCR, DBGDRCR * * Available only in DBGv7.1 * DBGBXVRm, DBGOSDLR, DBGDEVID2, DBGDEVID1 * * Read only * DBGDIDR, DBGDSCRint, DBGDTRRXint, DBGDRAR, DBGOSLSR, DBGOSSRR, DBGPRSR, * DBGPRSR, DBGDSAR, DBGAUTHSTATUS, DBGDEVID2, DBGDEVID1, DBGDEVID * * Write only * DBGDTRTXint, DBGOSLAR */ #define RCP14_DBGDIDR() MRC14(0, c0, c0, 0) #define RCP14_DBGDSCRint() MRC14(0, c0, c1, 0) #define RCP14_DBGDTRRXint() MRC14(0, c0, c5, 0) #define RCP14_DBGWFAR() MRC14(0, c0, c6, 0) #define RCP14_DBGVCR() MRC14(0, c0, c7, 0) #define RCP14_DBGECR() MRC14(0, c0, c9, 0) #define RCP14_DBGDSCCR() MRC14(0, c0, c10, 0) #define RCP14_DBGDSMCR() MRC14(0, c0, c11, 0) #define RCP14_DBGDTRRXext() MRC14(0, c0, c0, 2) #define RCP14_DBGDSCRext() MRC14(0, c0, c2, 2) #define RCP14_DBGDTRTXext() MRC14(0, c0, c3, 2) #define RCP14_DBGDRCR() MRC14(0, c0, c4, 2) #define RCP14_DBGBVR0() MRC14(0, c0, c0, 4) #define RCP14_DBGBVR1() MRC14(0, c0, c1, 4) #define RCP14_DBGBVR2() MRC14(0, c0, c2, 4) #define RCP14_DBGBVR3() MRC14(0, c0, c3, 4) #define RCP14_DBGBVR4() MRC14(0, c0, c4, 4) #define RCP14_DBGBVR5() MRC14(0, c0, c5, 4) #define RCP14_DBGBVR6() MRC14(0, c0, c6, 4) #define RCP14_DBGBVR7() MRC14(0, c0, c7, 4) #define RCP14_DBGBVR8() MRC14(0, c0, c8, 4) #define RCP14_DBGBVR9() MRC14(0, c0, c9, 4) #define RCP14_DBGBVR10() MRC14(0, c0, c10, 4) #define RCP14_DBGBVR11() MRC14(0, c0, c11, 4) #define RCP14_DBGBVR12() MRC14(0, c0, c12, 4) #define RCP14_DBGBVR13() MRC14(0, c0, c13, 4) #define RCP14_DBGBVR14() MRC14(0, c0, c14, 4) #define RCP14_DBGBVR15() MRC14(0, c0, c15, 4) #define RCP14_DBGBCR0() MRC14(0, c0, c0, 5) #define RCP14_DBGBCR1() MRC14(0, c0, c1, 5) #define RCP14_DBGBCR2() MRC14(0, c0, c2, 5) #define RCP14_DBGBCR3() MRC14(0, c0, c3, 5) #define RCP14_DBGBCR4() MRC14(0, c0, c4, 5) #define RCP14_DBGBCR5() MRC14(0, c0, c5, 5) #define RCP14_DBGBCR6() MRC14(0, c0, c6, 5) #define RCP14_DBGBCR7() MRC14(0, c0, c7, 5) #define RCP14_DBGBCR8() MRC14(0, c0, c8, 5) #define RCP14_DBGBCR9() MRC14(0, c0, c9, 5) #define RCP14_DBGBCR10() MRC14(0, c0, c10, 5) #define RCP14_DBGBCR11() MRC14(0, c0, c11, 5) #define RCP14_DBGBCR12() MRC14(0, c0, c12, 5) #define RCP14_DBGBCR13() MRC14(0, c0, c13, 5) #define RCP14_DBGBCR14() MRC14(0, c0, c14, 5) #define RCP14_DBGBCR15() MRC14(0, c0, c15, 5) #define RCP14_DBGWVR0() MRC14(0, c0, c0, 6) #define RCP14_DBGWVR1() MRC14(0, c0, c1, 6) #define RCP14_DBGWVR2() MRC14(0, c0, c2, 6) #define RCP14_DBGWVR3() MRC14(0, c0, c3, 6) #define RCP14_DBGWVR4() MRC14(0, c0, c4, 6) #define RCP14_DBGWVR5() MRC14(0, c0, c5, 6) #define RCP14_DBGWVR6() MRC14(0, c0, c6, 6) #define RCP14_DBGWVR7() MRC14(0, c0, c7, 6) #define RCP14_DBGWVR8() MRC14(0, c0, c8, 6) #define RCP14_DBGWVR9() MRC14(0, c0, c9, 6) #define RCP14_DBGWVR10() MRC14(0, c0, c10, 6) #define RCP14_DBGWVR11() MRC14(0, c0, c11, 6) #define RCP14_DBGWVR12() MRC14(0, c0, c12, 6) #define RCP14_DBGWVR13() MRC14(0, c0, c13, 6) #define RCP14_DBGWVR14() MRC14(0, c0, c14, 6) #define RCP14_DBGWVR15() MRC14(0, c0, c15, 6) #define RCP14_DBGWCR0() MRC14(0, c0, c0, 7) #define RCP14_DBGWCR1() MRC14(0, c0, c1, 7) #define RCP14_DBGWCR2() MRC14(0, c0, c2, 7) #define RCP14_DBGWCR3() MRC14(0, c0, c3, 7) #define RCP14_DBGWCR4() MRC14(0, c0, c4, 7) #define RCP14_DBGWCR5() MRC14(0, c0, c5, 7) #define RCP14_DBGWCR6() MRC14(0, c0, c6, 7) #define RCP14_DBGWCR7() MRC14(0, c0, c7, 7) #define RCP14_DBGWCR8() MRC14(0, c0, c8, 7) #define RCP14_DBGWCR9() MRC14(0, c0, c9, 7) #define RCP14_DBGWCR10() MRC14(0, c0, c10, 7) #define RCP14_DBGWCR11() MRC14(0, c0, c11, 7) #define RCP14_DBGWCR12() MRC14(0, c0, c12, 7) #define RCP14_DBGWCR13() MRC14(0, c0, c13, 7) #define RCP14_DBGWCR14() MRC14(0, c0, c14, 7) #define RCP14_DBGWCR15() MRC14(0, c0, c15, 7) #define RCP14_DBGDRAR() MRC14(0, c1, c0, 0) #define RCP14_DBGBXVR0() MRC14(0, c1, c0, 1) #define RCP14_DBGBXVR1() MRC14(0, c1, c1, 1) #define RCP14_DBGBXVR2() MRC14(0, c1, c2, 1) #define RCP14_DBGBXVR3() MRC14(0, c1, c3, 1) #define RCP14_DBGBXVR4() MRC14(0, c1, c4, 1) #define RCP14_DBGBXVR5() MRC14(0, c1, c5, 1) #define RCP14_DBGBXVR6() MRC14(0, c1, c6, 1) #define RCP14_DBGBXVR7() MRC14(0, c1, c7, 1) #define RCP14_DBGBXVR8() MRC14(0, c1, c8, 1) #define RCP14_DBGBXVR9() MRC14(0, c1, c9, 1) #define RCP14_DBGBXVR10() MRC14(0, c1, c10, 1) #define RCP14_DBGBXVR11() MRC14(0, c1, c11, 1) #define RCP14_DBGBXVR12() MRC14(0, c1, c12, 1) #define RCP14_DBGBXVR13() MRC14(0, c1, c13, 1) #define RCP14_DBGBXVR14() MRC14(0, c1, c14, 1) #define RCP14_DBGBXVR15() MRC14(0, c1, c15, 1) #define RCP14_DBGOSLSR() MRC14(0, c1, c1, 4) #define RCP14_DBGOSSRR() MRC14(0, c1, c2, 4) #define RCP14_DBGOSDLR() MRC14(0, c1, c3, 4) #define RCP14_DBGPRCR() MRC14(0, c1, c4, 4) #define RCP14_DBGPRSR() MRC14(0, c1, c5, 4) #define RCP14_DBGDSAR() MRC14(0, c2, c0, 0) #define RCP14_DBGITCTRL() MRC14(0, c7, c0, 4) #define RCP14_DBGCLAIMSET() MRC14(0, c7, c8, 6) #define RCP14_DBGCLAIMCLR() MRC14(0, c7, c9, 6) #define RCP14_DBGAUTHSTATUS() MRC14(0, c7, c14, 6) #define RCP14_DBGDEVID2() MRC14(0, c7, c0, 7) #define RCP14_DBGDEVID1() MRC14(0, c7, c1, 7) #define RCP14_DBGDEVID() MRC14(0, c7, c2, 7) #define WCP14_DBGDTRTXint(val) MCR14(val, 0, c0, c5, 0) #define WCP14_DBGWFAR(val) MCR14(val, 0, c0, c6, 0) #define WCP14_DBGVCR(val) MCR14(val, 0, c0, c7, 0) #define WCP14_DBGECR(val) MCR14(val, 0, c0, c9, 0) #define WCP14_DBGDSCCR(val) MCR14(val, 0, c0, c10, 0) #define WCP14_DBGDSMCR(val) MCR14(val, 0, c0, c11, 0) #define WCP14_DBGDTRRXext(val) MCR14(val, 0, c0, c0, 2) #define WCP14_DBGDSCRext(val) MCR14(val, 0, c0, c2, 2) #define WCP14_DBGDTRTXext(val) MCR14(val, 0, c0, c3, 2) #define WCP14_DBGDRCR(val) MCR14(val, 0, c0, c4, 2) #define WCP14_DBGBVR0(val) MCR14(val, 0, c0, c0, 4) #define WCP14_DBGBVR1(val) MCR14(val, 0, c0, c1, 4) #define WCP14_DBGBVR2(val) MCR14(val, 0, c0, c2, 4) #define WCP14_DBGBVR3(val) MCR14(val, 0, c0, c3, 4) #define WCP14_DBGBVR4(val) MCR14(val, 0, c0, c4, 4) #define WCP14_DBGBVR5(val) MCR14(val, 0, c0, c5, 4) #define WCP14_DBGBVR6(val) MCR14(val, 0, c0, c6, 4) #define WCP14_DBGBVR7(val) MCR14(val, 0, c0, c7, 4) #define WCP14_DBGBVR8(val) MCR14(val, 0, c0, c8, 4) #define WCP14_DBGBVR9(val) MCR14(val, 0, c0, c9, 4) #define WCP14_DBGBVR10(val) MCR14(val, 0, c0, c10, 4) #define WCP14_DBGBVR11(val) MCR14(val, 0, c0, c11, 4) #define WCP14_DBGBVR12(val) MCR14(val, 0, c0, c12, 4) #define WCP14_DBGBVR13(val) MCR14(val, 0, c0, c13, 4) #define WCP14_DBGBVR14(val) MCR14(val, 0, c0, c14, 4) #define WCP14_DBGBVR15(val) MCR14(val, 0, c0, c15, 4) #define WCP14_DBGBCR0(val) MCR14(val, 0, c0, c0, 5) #define WCP14_DBGBCR1(val) MCR14(val, 0, c0, c1, 5) #define WCP14_DBGBCR2(val) MCR14(val, 0, c0, c2, 5) #define WCP14_DBGBCR3(val) MCR14(val, 0, c0, c3, 5) #define WCP14_DBGBCR4(val) MCR14(val, 0, c0, c4, 5) #define WCP14_DBGBCR5(val) MCR14(val, 0, c0, c5, 5) #define WCP14_DBGBCR6(val) MCR14(val, 0, c0, c6, 5) #define WCP14_DBGBCR7(val) MCR14(val, 0, c0, c7, 5) #define WCP14_DBGBCR8(val) MCR14(val, 0, c0, c8, 5) #define WCP14_DBGBCR9(val) MCR14(val, 0, c0, c9, 5) #define WCP14_DBGBCR10(val) MCR14(val, 0, c0, c10, 5) #define WCP14_DBGBCR11(val) MCR14(val, 0, c0, c11, 5) #define WCP14_DBGBCR12(val) MCR14(val, 0, c0, c12, 5) #define WCP14_DBGBCR13(val) MCR14(val, 0, c0, c13, 5) #define WCP14_DBGBCR14(val) MCR14(val, 0, c0, c14, 5) #define WCP14_DBGBCR15(val) MCR14(val, 0, c0, c15, 5) #define WCP14_DBGWVR0(val) MCR14(val, 0, c0, c0, 6) #define WCP14_DBGWVR1(val) MCR14(val, 0, c0, c1, 6) #define WCP14_DBGWVR2(val) MCR14(val, 0, c0, c2, 6) #define WCP14_DBGWVR3(val) MCR14(val, 0, c0, c3, 6) #define WCP14_DBGWVR4(val) MCR14(val, 0, c0, c4, 6) #define WCP14_DBGWVR5(val) MCR14(val, 0, c0, c5, 6) #define WCP14_DBGWVR6(val) MCR14(val, 0, c0, c6, 6) #define WCP14_DBGWVR7(val) MCR14(val, 0, c0, c7, 6) #define WCP14_DBGWVR8(val) MCR14(val, 0, c0, c8, 6) #define WCP14_DBGWVR9(val) MCR14(val, 0, c0, c9, 6) #define WCP14_DBGWVR10(val) MCR14(val, 0, c0, c10, 6) #define WCP14_DBGWVR11(val) MCR14(val, 0, c0, c11, 6) #define WCP14_DBGWVR12(val) MCR14(val, 0, c0, c12, 6) #define WCP14_DBGWVR13(val) MCR14(val, 0, c0, c13, 6) #define WCP14_DBGWVR14(val) MCR14(val, 0, c0, c14, 6) #define WCP14_DBGWVR15(val) MCR14(val, 0, c0, c15, 6) #define WCP14_DBGWCR0(val) MCR14(val, 0, c0, c0, 7) #define WCP14_DBGWCR1(val) MCR14(val, 0, c0, c1, 7) #define WCP14_DBGWCR2(val) MCR14(val, 0, c0, c2, 7) #define WCP14_DBGWCR3(val) MCR14(val, 0, c0, c3, 7) #define WCP14_DBGWCR4(val) MCR14(val, 0, c0, c4, 7) #define WCP14_DBGWCR5(val) MCR14(val, 0, c0, c5, 7) #define WCP14_DBGWCR6(val) MCR14(val, 0, c0, c6, 7) #define WCP14_DBGWCR7(val) MCR14(val, 0, c0, c7, 7) #define WCP14_DBGWCR8(val) MCR14(val, 0, c0, c8, 7) #define WCP14_DBGWCR9(val) MCR14(val, 0, c0, c9, 7) #define WCP14_DBGWCR10(val) MCR14(val, 0, c0, c10, 7) #define WCP14_DBGWCR11(val) MCR14(val, 0, c0, c11, 7) #define WCP14_DBGWCR12(val) MCR14(val, 0, c0, c12, 7) #define WCP14_DBGWCR13(val) MCR14(val, 0, c0, c13, 7) #define WCP14_DBGWCR14(val) MCR14(val, 0, c0, c14, 7) #define WCP14_DBGWCR15(val) MCR14(val, 0, c0, c15, 7) #define WCP14_DBGBXVR0(val) MCR14(val, 0, c1, c0, 1) #define WCP14_DBGBXVR1(val) MCR14(val, 0, c1, c1, 1) #define WCP14_DBGBXVR2(val) MCR14(val, 0, c1, c2, 1) #define WCP14_DBGBXVR3(val) MCR14(val, 0, c1, c3, 1) #define WCP14_DBGBXVR4(val) MCR14(val, 0, c1, c4, 1) #define WCP14_DBGBXVR5(val) MCR14(val, 0, c1, c5, 1) #define WCP14_DBGBXVR6(val) MCR14(val, 0, c1, c6, 1) #define WCP14_DBGBXVR7(val) MCR14(val, 0, c1, c7, 1) #define WCP14_DBGBXVR8(val) MCR14(val, 0, c1, c8, 1) #define WCP14_DBGBXVR9(val) MCR14(val, 0, c1, c9, 1) #define WCP14_DBGBXVR10(val) MCR14(val, 0, c1, c10, 1) #define WCP14_DBGBXVR11(val) MCR14(val, 0, c1, c11, 1) #define WCP14_DBGBXVR12(val) MCR14(val, 0, c1, c12, 1) #define WCP14_DBGBXVR13(val) MCR14(val, 0, c1, c13, 1) #define WCP14_DBGBXVR14(val) MCR14(val, 0, c1, c14, 1) #define WCP14_DBGBXVR15(val) MCR14(val, 0, c1, c15, 1) #define WCP14_DBGOSLAR(val) MCR14(val, 0, c1, c0, 4) #define WCP14_DBGOSSRR(val) MCR14(val, 0, c1, c2, 4) #define WCP14_DBGOSDLR(val) MCR14(val, 0, c1, c3, 4) #define WCP14_DBGPRCR(val) MCR14(val, 0, c1, c4, 4) #define WCP14_DBGITCTRL(val) MCR14(val, 0, c7, c0, 4) #define WCP14_DBGCLAIMSET(val) MCR14(val, 0, c7, c8, 6) #define WCP14_DBGCLAIMCLR(val) MCR14(val, 0, c7, c9, 6) /* ETM Registers * * Available only in ETMv3.3, 3.4, 3.5 * ETMASICCR, ETMTECR2, ETMFFRR, ETMVDEVR, ETMVDCR1, ETMVDCR2, ETMVDCR3, * ETMDCVRn, ETMDCMRn * * Available only in ETMv3.5 as read only * ETMIDR2 * * Available only in ETMv3.5, PFTv1.0, 1.1 * ETMTSEVR, ETMVMIDCVR, ETMPDCR * * Read only * ETMCCR, ETMSCR, ETMIDR, ETMCCER, ETMOSLSR * ETMLSR, ETMAUTHSTATUS, ETMDEVID, ETMDEVTYPE, ETMPIDR4, ETMPIDR5, ETMPIDR6, * ETMPIDR7, ETMPIDR0, ETMPIDR1, ETMPIDR2, ETMPIDR2, ETMPIDR3, ETMCIDR0, * ETMCIDR1, ETMCIDR2, ETMCIDR3 * * Write only * ETMOSLAR, ETMLAR * Note: ETMCCER[11] controls WO nature of certain regs. Refer ETM arch spec. */ #define RCP14_ETMCR() MRC14(1, c0, c0, 0) #define RCP14_ETMCCR() MRC14(1, c0, c1, 0) #define RCP14_ETMTRIGGER() MRC14(1, c0, c2, 0) #define RCP14_ETMASICCR() MRC14(1, c0, c3, 0) #define RCP14_ETMSR() MRC14(1, c0, c4, 0) #define RCP14_ETMSCR() MRC14(1, c0, c5, 0) #define RCP14_ETMTSSCR() MRC14(1, c0, c6, 0) #define RCP14_ETMTECR2() MRC14(1, c0, c7, 0) #define RCP14_ETMTEEVR() MRC14(1, c0, c8, 0) #define RCP14_ETMTECR1() MRC14(1, c0, c9, 0) #define RCP14_ETMFFRR() MRC14(1, c0, c10, 0) #define RCP14_ETMFFLR() MRC14(1, c0, c11, 0) #define RCP14_ETMVDEVR() MRC14(1, c0, c12, 0) #define RCP14_ETMVDCR1() MRC14(1, c0, c13, 0) #define RCP14_ETMVDCR2() MRC14(1, c0, c14, 0) #define RCP14_ETMVDCR3() MRC14(1, c0, c15, 0) #define RCP14_ETMACVR0() MRC14(1, c0, c0, 1) #define RCP14_ETMACVR1() MRC14(1, c0, c1, 1) #define RCP14_ETMACVR2() MRC14(1, c0, c2, 1) #define RCP14_ETMACVR3() MRC14(1, c0, c3, 1) #define RCP14_ETMACVR4() MRC14(1, c0, c4, 1) #define RCP14_ETMACVR5() MRC14(1, c0, c5, 1) #define RCP14_ETMACVR6() MRC14(1, c0, c6, 1) #define RCP14_ETMACVR7() MRC14(1, c0, c7, 1) #define RCP14_ETMACVR8() MRC14(1, c0, c8, 1) #define RCP14_ETMACVR9() MRC14(1, c0, c9, 1) #define RCP14_ETMACVR10() MRC14(1, c0, c10, 1) #define RCP14_ETMACVR11() MRC14(1, c0, c11, 1) #define RCP14_ETMACVR12() MRC14(1, c0, c12, 1) #define RCP14_ETMACVR13() MRC14(1, c0, c13, 1) #define RCP14_ETMACVR14() MRC14(1, c0, c14, 1) #define RCP14_ETMACVR15() MRC14(1, c0, c15, 1) #define RCP14_ETMACTR0() MRC14(1, c0, c0, 2) #define RCP14_ETMACTR1() MRC14(1, c0, c1, 2) #define RCP14_ETMACTR2() MRC14(1, c0, c2, 2) #define RCP14_ETMACTR3() MRC14(1, c0, c3, 2) #define RCP14_ETMACTR4() MRC14(1, c0, c4, 2) #define RCP14_ETMACTR5() MRC14(1, c0, c5, 2) #define RCP14_ETMACTR6() MRC14(1, c0, c6, 2) #define RCP14_ETMACTR7() MRC14(1, c0, c7, 2) #define RCP14_ETMACTR8() MRC14(1, c0, c8, 2) #define RCP14_ETMACTR9() MRC14(1, c0, c9, 2) #define RCP14_ETMACTR10() MRC14(1, c0, c10, 2) #define RCP14_ETMACTR11() MRC14(1, c0, c11, 2) #define RCP14_ETMACTR12() MRC14(1, c0, c12, 2) #define RCP14_ETMACTR13() MRC14(1, c0, c13, 2) #define RCP14_ETMACTR14() MRC14(1, c0, c14, 2) #define RCP14_ETMACTR15() MRC14(1, c0, c15, 2) #define RCP14_ETMDCVR0() MRC14(1, c0, c0, 3) #define RCP14_ETMDCVR2() MRC14(1, c0, c2, 3) #define RCP14_ETMDCVR4() MRC14(1, c0, c4, 3) #define RCP14_ETMDCVR6() MRC14(1, c0, c6, 3) #define RCP14_ETMDCVR8() MRC14(1, c0, c8, 3) #define RCP14_ETMDCVR10() MRC14(1, c0, c10, 3) #define RCP14_ETMDCVR12() MRC14(1, c0, c12, 3) #define RCP14_ETMDCVR14() MRC14(1, c0, c14, 3) #define RCP14_ETMDCMR0() MRC14(1, c0, c0, 4) #define RCP14_ETMDCMR2() MRC14(1, c0, c2, 4) #define RCP14_ETMDCMR4() MRC14(1, c0, c4, 4) #define RCP14_ETMDCMR6() MRC14(1, c0, c6, 4) #define RCP14_ETMDCMR8() MRC14(1, c0, c8, 4) #define RCP14_ETMDCMR10() MRC14(1, c0, c10, 4) #define RCP14_ETMDCMR12() MRC14(1, c0, c12, 4) #define RCP14_ETMDCMR14() MRC14(1, c0, c14, 4) #define RCP14_ETMCNTRLDVR0() MRC14(1, c0, c0, 5) #define RCP14_ETMCNTRLDVR1() MRC14(1, c0, c1, 5) #define RCP14_ETMCNTRLDVR2() MRC14(1, c0, c2, 5) #define RCP14_ETMCNTRLDVR3() MRC14(1, c0, c3, 5) #define RCP14_ETMCNTENR0() MRC14(1, c0, c4, 5) #define RCP14_ETMCNTENR1() MRC14(1, c0, c5, 5) #define RCP14_ETMCNTENR2() MRC14(1, c0, c6, 5) #define RCP14_ETMCNTENR3() MRC14(1, c0, c7, 5) #define RCP14_ETMCNTRLDEVR0() MRC14(1, c0, c8, 5) #define RCP14_ETMCNTRLDEVR1() MRC14(1, c0, c9, 5) #define RCP14_ETMCNTRLDEVR2() MRC14(1, c0, c10, 5) #define RCP14_ETMCNTRLDEVR3() MRC14(1, c0, c11, 5) #define RCP14_ETMCNTVR0() MRC14(1, c0, c12, 5) #define RCP14_ETMCNTVR1() MRC14(1, c0, c13, 5) #define RCP14_ETMCNTVR2() MRC14(1, c0, c14, 5) #define RCP14_ETMCNTVR3() MRC14(1, c0, c15, 5) #define RCP14_ETMSQ12EVR() MRC14(1, c0, c0, 6) #define RCP14_ETMSQ21EVR() MRC14(1, c0, c1, 6) #define RCP14_ETMSQ23EVR() MRC14(1, c0, c2, 6) #define RCP14_ETMSQ31EVR() MRC14(1, c0, c3, 6) #define RCP14_ETMSQ32EVR() MRC14(1, c0, c4, 6) #define RCP14_ETMSQ13EVR() MRC14(1, c0, c5, 6) #define RCP14_ETMSQR() MRC14(1, c0, c7, 6) #define RCP14_ETMEXTOUTEVR0() MRC14(1, c0, c8, 6) #define RCP14_ETMEXTOUTEVR1() MRC14(1, c0, c9, 6) #define RCP14_ETMEXTOUTEVR2() MRC14(1, c0, c10, 6) #define RCP14_ETMEXTOUTEVR3() MRC14(1, c0, c11, 6) #define RCP14_ETMCIDCVR0() MRC14(1, c0, c12, 6) #define RCP14_ETMCIDCVR1() MRC14(1, c0, c13, 6) #define RCP14_ETMCIDCVR2() MRC14(1, c0, c14, 6) #define RCP14_ETMCIDCMR() MRC14(1, c0, c15, 6) #define RCP14_ETMIMPSPEC0() MRC14(1, c0, c0, 7) #define RCP14_ETMIMPSPEC1() MRC14(1, c0, c1, 7) #define RCP14_ETMIMPSPEC2() MRC14(1, c0, c2, 7) #define RCP14_ETMIMPSPEC3() MRC14(1, c0, c3, 7) #define RCP14_ETMIMPSPEC4() MRC14(1, c0, c4, 7) #define RCP14_ETMIMPSPEC5() MRC14(1, c0, c5, 7) #define RCP14_ETMIMPSPEC6() MRC14(1, c0, c6, 7) #define RCP14_ETMIMPSPEC7() MRC14(1, c0, c7, 7) #define RCP14_ETMSYNCFR() MRC14(1, c0, c8, 7) #define RCP14_ETMIDR() MRC14(1, c0, c9, 7) #define RCP14_ETMCCER() MRC14(1, c0, c10, 7) #define RCP14_ETMEXTINSELR() MRC14(1, c0, c11, 7) #define RCP14_ETMTESSEICR() MRC14(1, c0, c12, 7) #define RCP14_ETMEIBCR() MRC14(1, c0, c13, 7) #define RCP14_ETMTSEVR() MRC14(1, c0, c14, 7) #define RCP14_ETMAUXCR() MRC14(1, c0, c15, 7) #define RCP14_ETMTRACEIDR() MRC14(1, c1, c0, 0) #define RCP14_ETMIDR2() MRC14(1, c1, c2, 0) #define RCP14_ETMVMIDCVR() MRC14(1, c1, c0, 1) #define RCP14_ETMOSLSR() MRC14(1, c1, c1, 4) /* not available in PFTv1.1 */ #define RCP14_ETMOSSRR() MRC14(1, c1, c2, 4) #define RCP14_ETMPDCR() MRC14(1, c1, c4, 4) #define RCP14_ETMPDSR() MRC14(1, c1, c5, 4) #define RCP14_ETMITCTRL() MRC14(1, c7, c0, 4) #define RCP14_ETMCLAIMSET() MRC14(1, c7, c8, 6) #define RCP14_ETMCLAIMCLR() MRC14(1, c7, c9, 6) #define RCP14_ETMLSR() MRC14(1, c7, c13, 6) #define RCP14_ETMAUTHSTATUS() MRC14(1, c7, c14, 6) #define RCP14_ETMDEVID() MRC14(1, c7, c2, 7) #define RCP14_ETMDEVTYPE() MRC14(1, c7, c3, 7) #define RCP14_ETMPIDR4() MRC14(1, c7, c4, 7) #define RCP14_ETMPIDR5() MRC14(1, c7, c5, 7) #define RCP14_ETMPIDR6() MRC14(1, c7, c6, 7) #define RCP14_ETMPIDR7() MRC14(1, c7, c7, 7) #define RCP14_ETMPIDR0() MRC14(1, c7, c8, 7) #define RCP14_ETMPIDR1() MRC14(1, c7, c9, 7) #define RCP14_ETMPIDR2() MRC14(1, c7, c10, 7) #define RCP14_ETMPIDR3() MRC14(1, c7, c11, 7) #define RCP14_ETMCIDR0() MRC14(1, c7, c12, 7) #define RCP14_ETMCIDR1() MRC14(1, c7, c13, 7) #define RCP14_ETMCIDR2() MRC14(1, c7, c14, 7) #define RCP14_ETMCIDR3() MRC14(1, c7, c15, 7) #define WCP14_ETMCR(val) MCR14(val, 1, c0, c0, 0) #define WCP14_ETMTRIGGER(val) MCR14(val, 1, c0, c2, 0) #define WCP14_ETMASICCR(val) MCR14(val, 1, c0, c3, 0) #define WCP14_ETMSR(val) MCR14(val, 1, c0, c4, 0) #define WCP14_ETMTSSCR(val) MCR14(val, 1, c0, c6, 0) #define WCP14_ETMTECR2(val) MCR14(val, 1, c0, c7, 0) #define WCP14_ETMTEEVR(val) MCR14(val, 1, c0, c8, 0) #define WCP14_ETMTECR1(val) MCR14(val, 1, c0, c9, 0) #define WCP14_ETMFFRR(val) MCR14(val, 1, c0, c10, 0) #define WCP14_ETMFFLR(val) MCR14(val, 1, c0, c11, 0) #define WCP14_ETMVDEVR(val) MCR14(val, 1, c0, c12, 0) #define WCP14_ETMVDCR1(val) MCR14(val, 1, c0, c13, 0) #define WCP14_ETMVDCR2(val) MCR14(val, 1, c0, c14, 0) #define WCP14_ETMVDCR3(val) MCR14(val, 1, c0, c15, 0) #define WCP14_ETMACVR0(val) MCR14(val, 1, c0, c0, 1) #define WCP14_ETMACVR1(val) MCR14(val, 1, c0, c1, 1) #define WCP14_ETMACVR2(val) MCR14(val, 1, c0, c2, 1) #define WCP14_ETMACVR3(val) MCR14(val, 1, c0, c3, 1) #define WCP14_ETMACVR4(val) MCR14(val, 1, c0, c4, 1) #define WCP14_ETMACVR5(val) MCR14(val, 1, c0, c5, 1) #define WCP14_ETMACVR6(val) MCR14(val, 1, c0, c6, 1) #define WCP14_ETMACVR7(val) MCR14(val, 1, c0, c7, 1) #define WCP14_ETMACVR8(val) MCR14(val, 1, c0, c8, 1) #define WCP14_ETMACVR9(val) MCR14(val, 1, c0, c9, 1) #define WCP14_ETMACVR10(val) MCR14(val, 1, c0, c10, 1) #define WCP14_ETMACVR11(val) MCR14(val, 1, c0, c11, 1) #define WCP14_ETMACVR12(val) MCR14(val, 1, c0, c12, 1) #define WCP14_ETMACVR13(val) MCR14(val, 1, c0, c13, 1) #define WCP14_ETMACVR14(val) MCR14(val, 1, c0, c14, 1) #define WCP14_ETMACVR15(val) MCR14(val, 1, c0, c15, 1) #define WCP14_ETMACTR0(val) MCR14(val, 1, c0, c0, 2) #define WCP14_ETMACTR1(val) MCR14(val, 1, c0, c1, 2) #define WCP14_ETMACTR2(val) MCR14(val, 1, c0, c2, 2) #define WCP14_ETMACTR3(val) MCR14(val, 1, c0, c3, 2) #define WCP14_ETMACTR4(val) MCR14(val, 1, c0, c4, 2) #define WCP14_ETMACTR5(val) MCR14(val, 1, c0, c5, 2) #define WCP14_ETMACTR6(val) MCR14(val, 1, c0, c6, 2) #define WCP14_ETMACTR7(val) MCR14(val, 1, c0, c7, 2) #define WCP14_ETMACTR8(val) MCR14(val, 1, c0, c8, 2) #define WCP14_ETMACTR9(val) MCR14(val, 1, c0, c9, 2) #define WCP14_ETMACTR10(val) MCR14(val, 1, c0, c10, 2) #define WCP14_ETMACTR11(val) MCR14(val, 1, c0, c11, 2) #define WCP14_ETMACTR12(val) MCR14(val, 1, c0, c12, 2) #define WCP14_ETMACTR13(val) MCR14(val, 1, c0, c13, 2) #define WCP14_ETMACTR14(val) MCR14(val, 1, c0, c14, 2) #define WCP14_ETMACTR15(val) MCR14(val, 1, c0, c15, 2) #define WCP14_ETMDCVR0(val) MCR14(val, 1, c0, c0, 3) #define WCP14_ETMDCVR2(val) MCR14(val, 1, c0, c2, 3) #define WCP14_ETMDCVR4(val) MCR14(val, 1, c0, c4, 3) #define WCP14_ETMDCVR6(val) MCR14(val, 1, c0, c6, 3) #define WCP14_ETMDCVR8(val) MCR14(val, 1, c0, c8, 3) #define WCP14_ETMDCVR10(val) MCR14(val, 1, c0, c10, 3) #define WCP14_ETMDCVR12(val) MCR14(val, 1, c0, c12, 3) #define WCP14_ETMDCVR14(val) MCR14(val, 1, c0, c14, 3) #define WCP14_ETMDCMR0(val) MCR14(val, 1, c0, c0, 4) #define WCP14_ETMDCMR2(val) MCR14(val, 1, c0, c2, 4) #define WCP14_ETMDCMR4(val) MCR14(val, 1, c0, c4, 4) #define WCP14_ETMDCMR6(val) MCR14(val, 1, c0, c6, 4) #define WCP14_ETMDCMR8(val) MCR14(val, 1, c0, c8, 4) #define WCP14_ETMDCMR10(val) MCR14(val, 1, c0, c10, 4) #define WCP14_ETMDCMR12(val) MCR14(val, 1, c0, c12, 4) #define WCP14_ETMDCMR14(val) MCR14(val, 1, c0, c14, 4) #define WCP14_ETMCNTRLDVR0(val) MCR14(val, 1, c0, c0, 5) #define WCP14_ETMCNTRLDVR1(val) MCR14(val, 1, c0, c1, 5) #define WCP14_ETMCNTRLDVR2(val) MCR14(val, 1, c0, c2, 5) #define WCP14_ETMCNTRLDVR3(val) MCR14(val, 1, c0, c3, 5) #define WCP14_ETMCNTENR0(val) MCR14(val, 1, c0, c4, 5) #define WCP14_ETMCNTENR1(val) MCR14(val, 1, c0, c5, 5) #define WCP14_ETMCNTENR2(val) MCR14(val, 1, c0, c6, 5) #define WCP14_ETMCNTENR3(val) MCR14(val, 1, c0, c7, 5) #define WCP14_ETMCNTRLDEVR0(val) MCR14(val, 1, c0, c8, 5) #define WCP14_ETMCNTRLDEVR1(val) MCR14(val, 1, c0, c9, 5) #define WCP14_ETMCNTRLDEVR2(val) MCR14(val, 1, c0, c10, 5) #define WCP14_ETMCNTRLDEVR3(val) MCR14(val, 1, c0, c11, 5) #define WCP14_ETMCNTVR0(val) MCR14(val, 1, c0, c12, 5) #define WCP14_ETMCNTVR1(val) MCR14(val, 1, c0, c13, 5) #define WCP14_ETMCNTVR2(val) MCR14(val, 1, c0, c14, 5) #define WCP14_ETMCNTVR3(val) MCR14(val, 1, c0, c15, 5) #define WCP14_ETMSQ12EVR(val) MCR14(val, 1, c0, c0, 6) #define WCP14_ETMSQ21EVR(val) MCR14(val, 1, c0, c1, 6) #define WCP14_ETMSQ23EVR(val) MCR14(val, 1, c0, c2, 6) #define WCP14_ETMSQ31EVR(val) MCR14(val, 1, c0, c3, 6) #define WCP14_ETMSQ32EVR(val) MCR14(val, 1, c0, c4, 6) #define WCP14_ETMSQ13EVR(val) MCR14(val, 1, c0, c5, 6) #define WCP14_ETMSQR(val) MCR14(val, 1, c0, c7, 6) #define WCP14_ETMEXTOUTEVR0(val) MCR14(val, 1, c0, c8, 6) #define WCP14_ETMEXTOUTEVR1(val) MCR14(val, 1, c0, c9, 6) #define WCP14_ETMEXTOUTEVR2(val) MCR14(val, 1, c0, c10, 6) #define WCP14_ETMEXTOUTEVR3(val) MCR14(val, 1, c0, c11, 6) #define WCP14_ETMCIDCVR0(val) MCR14(val, 1, c0, c12, 6) #define WCP14_ETMCIDCVR1(val) MCR14(val, 1, c0, c13, 6) #define WCP14_ETMCIDCVR2(val) MCR14(val, 1, c0, c14, 6) #define WCP14_ETMCIDCMR(val) MCR14(val, 1, c0, c15, 6) #define WCP14_ETMIMPSPEC0(val) MCR14(val, 1, c0, c0, 7) #define WCP14_ETMIMPSPEC1(val) MCR14(val, 1, c0, c1, 7) #define WCP14_ETMIMPSPEC2(val) MCR14(val, 1, c0, c2, 7) #define WCP14_ETMIMPSPEC3(val) MCR14(val, 1, c0, c3, 7) #define WCP14_ETMIMPSPEC4(val) MCR14(val, 1, c0, c4, 7) #define WCP14_ETMIMPSPEC5(val) MCR14(val, 1, c0, c5, 7) #define WCP14_ETMIMPSPEC6(val) MCR14(val, 1, c0, c6, 7) #define WCP14_ETMIMPSPEC7(val) MCR14(val, 1, c0, c7, 7) /* can be read only in ETMv3.4, ETMv3.5 */ #define WCP14_ETMSYNCFR(val) MCR14(val, 1, c0, c8, 7) #define WCP14_ETMEXTINSELR(val) MCR14(val, 1, c0, c11, 7) #define WCP14_ETMTESSEICR(val) MCR14(val, 1, c0, c12, 7) #define WCP14_ETMEIBCR(val) MCR14(val, 1, c0, c13, 7) #define WCP14_ETMTSEVR(val) MCR14(val, 1, c0, c14, 7) #define WCP14_ETMAUXCR(val) MCR14(val, 1, c0, c15, 7) #define WCP14_ETMTRACEIDR(val) MCR14(val, 1, c1, c0, 0) #define WCP14_ETMIDR2(val) MCR14(val, 1, c1, c2, 0) #define WCP14_ETMVMIDCVR(val) MCR14(val, 1, c1, c0, 1) #define WCP14_ETMOSLAR(val) MCR14(val, 1, c1, c0, 4) /* not available in PFTv1.1 */ #define WCP14_ETMOSSRR(val) MCR14(val, 1, c1, c2, 4) #define WCP14_ETMPDCR(val) MCR14(val, 1, c1, c4, 4) #define WCP14_ETMPDSR(val) MCR14(val, 1, c1, c5, 4) #define WCP14_ETMITCTRL(val) MCR14(val, 1, c7, c0, 4) #define WCP14_ETMCLAIMSET(val) MCR14(val, 1, c7, c8, 6) #define WCP14_ETMCLAIMCLR(val) MCR14(val, 1, c7, c9, 6) /* writes to this from CP14 interface are ignored */ #define WCP14_ETMLAR(val) MCR14(val, 1, c7, c12, 6) #endif
{ "pile_set_name": "Github" }
\version "2.10.3" violoncello = { \set Staff.instrumentName = "Violoncello." \set Staff.midiInstrument = "cello" \clef bass \key d \major \time 4/4 R1 R1 R1 R1 R1 R1 R1 R1 R1 R1 a,8\f r a, r a, r a, r R1 a,8\f r a, r a, r a, r a,4\p a, r b, r cis r d r4 g, r2 r4 a,4 r2 d16\p d d d d d d d d8 r8 r4 R1 a,16 a, a, a, a, a, a, a, a,8 r r4 R1 d16 d d d d d d d d8 r r4 R1 R1 a4_\markup { \italic "dimin." }( fis d b,) a,1\pp~ a,1 bes,16\ff bes, bes, bes, bes, bes, bes, bes, bes,8 r bes, r bes,8 r r4 r2 f16 f f f f f f f f8 r f r fis!4 r r2 g16 g g g g g g g g8 r r4 cis!16 cis cis cis cis cis cis cis cis8 r r4 d16 d d d d8 r8 bes,16 bes, bes, bes, bes,8 r a,4 r4 r2 r4 e16\sf f g a f8 r8 d16\sf e f d a,8 r8 r4 r2 r4 e16\sf f g a f8 r8 d16\sf e f d a,8 r8 r4 r2 R1 R1 R1 d4\p^\markup { \italic "pizz." } r4 r2 R1 a,4 r4 r2 R1 d4 r4 r2 R1 g,4 r a, r d4 r r2^\markup { "arco" } \set tupletSpannerDuration = #(ly:make-moment 1 4) \times 2/3 { a8_\markup { \italic "sempre p" }[ bes a] g[ a g] f[ g f] e[ f e] d[ cis d] e[ d e] f[ g f] d[ e d] e[ f e] d[ e d] cis[ b, cis] d[ cis d] cis[ d cis] a,[ cis e] a[ a a] a[ a a] a[ bes a] g[ a g] f[ g f] e[ f e] d[ e d] e[ f e] f[ g f] d[ e f] g[ a g] f[ g f] e[ f e] c[ d e] f[ c a,] f,[ a, c] f[ f f] f[ f f] ees[ f ees] d[ ees d] cis![ d cis] a,[ b,! cis] d[ f a] d'[ a f] d[ e! d] c[ d c] \setTextCresc bes,\<[ d g] bes[ bes bes] c[ e g] c'[ c' c'] f[ c a,] f,[ a, c] f[ f f] f[ f f] g,[ bes, e] g[ g g] a,\!\f[ cis! e] a\sf[ a a] } R1 fis,!4\pp\staccato( fis,\staccato fis,\staccato fis,\staccato) g,1 fis,4\staccato( fis,\staccato fis,\staccato fis,\staccato) g,1 \setTextCresc fis,8\< fis fis fis fis fis fis fis fis fis fis fis fis fis fis fis g,\!\f g g g g g g g g,\f g g g g g g g fis,8\ff fis fis fis fis8 r8 r4 g,8 g g g g4 r4 a,4\f r a,\f r a,\f r a,\f r d4\ff r r2 R1 a,2( cis4 e g gis a g8 e) d4 r4 r2 R1 a,2( cis4 e g gis a\sf g8 e) d4( fis a\sf g8 e) d4( fis a2\sf\>)~ a4\!\p g( e cis) a,1~ a,1~ a,1 a,4\f r r2 a,4\f r r2 a,4\p r r2 R1 R1 R1 R1 R1 R1 a,1\p d4 r4 r2 R1 R1 R1 R1 R1 R1 R1 R1 R1 a,8\sfz r a, r a, r a, r R1 a,8\sfz r a, r a, r a, r a,4\p a, r b, r cis r d r4 g, r2 r4 a, r2 d16\p d d d d d d d d8 r8 r4 R1 a,16 a, a, a, a, a, a, a, a,8 r8 r4 R1 d16\f d d d d d d d d8 r r4 R1 a,16 a, a, a, a, a, a, a, a,8 r r4 R1 d16\p d d d d d d d d8 r r4 R1 c16 c c c c c c c c8 r r4 R1 f16 f f f f f f f f2~ f1~ f1~ f1 e4 r r2 r4 r8 e8 a4 r8 a8 e4 r r2 r4 r8 e8 a[ a] r a e4\f r8 a, e4 r8 a, e4 r r2 R1 R1 R1 R1 a4\p^\markup { "pizz." } r r2 R1 e4 r r2 R1 a4 r r2 R1 d4 r e r a, r r2 a,4 r a, r a, r r a gis a e a e4 r r2 a,4 r a, r a, r r a f r g! r c r r2 d4 r e r a, a r g! \setTextCresc f4\< r g r c r r2 d4 r e\!\f r R1 cis'!4\p\staccato( cis'\staccato cis'\staccato cis'\staccato) d'1 cis'4\staccato( cis'\staccato cis'\staccato cis'\staccato) d'1 \set crescendoText = \markup { \italic "poco cresc." } \set crescendoSpanner = #'dashed-line cis'4\< cis' r cis' r cis' r cis' d8[ d d d] d4 r r8 d8[ d d] d4 r cis4\!\f r r2 d4\f r r2 e4\f r e r e r e r a,4\p r r2 R1 e,2( gis,4 b, d dis e d8 b,) a,4 r r2 R1 e,2( gis,4 b, d dis e d8 b,) a,4( cis fis e8 cis) b,4( fis a g!8 e) d4( fis a g8 e) d4 r d r e r e r a4_\markup { \italic "poco cresc." } r e r a4 r fis r b r a r d r a, r d4\f r r2 R1 dis4\f r r2 R1 e4\p r4 r2 R1 R1 R1 R1 R1 R1 R1 f,1\pp R1 R1 g,8\pp r g, r g, r g, r c4 c r a, r d r f e4 r r2 R1 R1 R1 R1 R1 R1 R1 R1 R1 e1_\markup { \italic "cresc." } f16\ff f f f f f f f f8 r f r f8 r r4 r2 c16 c c c c c c c c8 r c r cis!8 r r4 r2 d16 d d d d d d d d8 r r4 gis,16 gis, gis, gis, gis, gis, gis, gis, gis,8 r r4 a,16 a a a a8 r f,16 f f f f8 r e8 r r4 r2 r4 b,16\sf c d e c8 r a,16\sf b, c a, e8 r r4 r2 r4 b,16\sf c d e c8 r a,16\sf b, c a, e8 r r4 r2 R1 R1 R1 a,4\p^\markup { "pizz." } r4 r2 R1 e4 r r2 R1 a,4 r r2 R1 d4 r e r a,4 r r2 a,2\ff^\markup { "arco." } a,2 a,1 gis,4( a, e, a,) e,1 a,2 a,2 a,1 f,2 g,!2 c1 d2 e2 a,4_\markup { \italic "sempre ff" }( a2 g!4) f2 g2 c1 d2 e2 R1 cis!4\staccato_\markup { \italic "sempre f" }( cis\staccato cis\staccato cis\staccato) d1 cis4\staccato( cis\staccato cis\staccato cis\staccato) c8[ c c c] c[ c c c] c8[ c c c] c[ c c c] c8\sf[ c c c] c[ c c c] c\sf[ c c c] c[ c c c] c8[ c c c] c4 r f,8[ f f f] f4 r4 g,4 r g, r g, r g, r c4_\markup { \italic "sempre f" } r4 r2 R1 g,2( b,4 d f fis g f8 d) c4 r r2 R1 g,2( b,4 d f4 fis g\sf f8 d) c4( e g\sf f8 d) c4( e g2\sf\>)~ g4\!( f4 d b,) g,1~ g,1~ g,1 g,4\f r r2 g,4\f r r2 g,4\p r r2 R1 R1 R1 R1 R1 R1 R1 R1 R1 R1 g,1\p fis,8 r fis, r fis, r fis, r fis,1~ fis,1~ fis,1~ fis,4 r r2 b,1~ b,1~ b,1 b,8 r b, r b, r b, r e4 r r2 e8 r e r e r e r a,4 r r2 a,8 r a, r a, r a, r d4 r r2 d8 r d r d r d r g,4 r g,8[ g, g, g,] g,4 r g,8[ g, g, g,] c4 r c8[ c c c] c4 r c8[ c c c] r4 d8 d r4 d8 d r4 c8 c r4 c8 c bes,4 r g,8[ g, g, g,] g,4 r g,8[ g, g, g,] c4 r c8[ c c c] c4 r c8[ c c c] d4_\markup { \italic "cresc." } r r2 d4 r r2 d4\f r r2 R1 R1 R1 g,1\pp bes,1 a,1 d, g, g f1_\markup { \italic "cresc." } bes,\pp ees g f bes, ees ees cis!_\markup { \italic "cresc." } a,\pp d1 f e! a, d2 r4 d d2 r4 d cis!2 r4 cis c2 r4 c b,!4\staccato( b,\staccato b,\staccato b,\staccato) bes,4 bes, bes, bes, a,1\pp~ a,~ a, ~ a,~ a,8 r r4 r2 R1 r4 cis8^\markup { "pizz." } r cis r cis r r2 r4 a,8\f^\markup { "arco" } r8 d8\ff r d r d r d r d1\sf~ d2. d4_\markup { "sempre ff" }( g fis e d) a,8 r a, r a, r a, r a,1~ a,2. a,4 a,4 a, a, a, d a, d r R1 a,8 r a, r a, r a, r R1 a,8 r a, r a, r a, r a,4_\markup { "sempre f" } a, r b, r cis r d g,4 r r2 a,4 r r2 d16 d d d d d d d d8 r r4 R1 a,16 a, a, a, a, a, a, a, a,8 r r4 R1 d16\p d d d d d d d d8 r r4 R1 a,16 a, a, a, a, a, a, a, a,8 r r4 R1 d16 d d d d d d d d8 r r4 R1 R1 r4 b4( g e) d1 d1 g,4 r r2 d'4\p( b g e) d1~ d1 dis16 dis dis dis dis dis dis dis dis8 r r4 R1 e16 e e e e e e e e8 r r4 R1 a,2~ a,8([ b, cis d] e[ fis g a] b[ cis') a,\staccato a,\staccato] bes,1~ bes,1 a,4 r r2 r4 r8 a, d4 r8 d a,4 r r2 r4 r8 a, d d r d a,4\f r8 d a,4 r8 d a,4 r r2 R1 R1 R1 R1 d4\p^\markup { "pizz." } r4 r2 R1 cis4 d a, d a,4 r r2 d4 r r2 R1 g,4 r a, r d4 r r2 d4 r d r d r r2 cis4 d a, d a,4 r r2 d4 r d r d r r d bes, r c r f, r r f, g,4 r a, r d d' r c' \setTextCresc bes4\< r c' r f r r f g, r a,4\!\f r4 R1 \clef tenor fis'4\p\staccato( fis'\staccato fis'\staccato fis'\staccato) g'1 fis'4\staccato( fis'\staccato fis'\staccato fis'\staccato) g'1 \set crescendoText = \markup { \italic "poco cresc." } \set crescendoSpanner = #'dashed-line fis'4\< fis' r fis' r fis' r fis' \clef bass g8[ g g g] g4 r r8 g[ g g] g4 r fis4\f r r2 g,4 r r2 a,4 r a, r a, r a, r d4\p r r2 R1 a,2( cis4 e g gis a g8 e) d4 r r2 R1 a,2( cis4 e g gis a g8 e) d4( fis b a8 fis) e4( b d' c'8 a) g4( b d' c'8 a) g4 r g r a4 r a, r \set crescendoText = \markup { \italic "poco cresc." } \set crescendoSpanner = #'dashed-line d4\< r4 a, r d r b, r e r d r g r d\! r g4\f r r2 R1 gis,4 r r2 R1 a,4\p r r2 R1 R1 R1 R1 R1 R1 R1 bes,1\pp R1 R1 c8\pp r c r c r c r f4 f r d r g r bes a4 r r2 R1 R1 R1 R1 R1 R1 R1 R1 a,1_\markup { \italic "cresc." } bes,16\ff bes, bes, bes, bes, bes, bes, bes, bes,8 r bes, r bes,4 r r2 f16 f f f f f f f f8 r f r fis!4 r r2 g16 g g g g g g g g8 r r4 cis!16 cis cis cis cis cis cis cis cis8 r r4 d16 d d d d8 r bes,16 bes, bes, bes, bes,8 r a,4 r4 r2 r4 e16\sf f g a f8 r8 d16\sf e f d a,8 r8 r4 r2 r4 e16\sf f g a f8 r8 d16\sf e f d a,8 r8 d16 e f d e8 r e16 f g e f8 r fis,!16\ff g, a, fis, g,8 r gis,16 a, b, gis, a,2^\trill^\fermata r2^\fermata d4\p^\markup { "pizz." } r4 r2 d4 r r2 cis4 d a, d a,4 r r2 d4 r r2 d4 r r2 g,4 r a, r d4 r r2 g,4 r a, r d4 r r2 g,4_\markup { \italic "dimin." } r r2 a,4 r r2 R1 R1 a,2( cis4 e g gis a g8 e) d4 r r2 R1 a,2( cis4 e g gis a g8 e) \setTextCresc d4\<( fis a g8 e) d4( fis a g8 e) d4( fis a\!\f g8 e) d4\ff r d r d4 r r2 \bar "|." }
{ "pile_set_name": "Github" }
<div id="playground" style="visibility:hidden"><h1 id="_H1">FCKW3CRange Test</h1><p id="_Para">This is <b id="_B">some</b> text.</p><p>Another paragraph.</p></div> <div id="playground2" style="visibility: hidden"> <p id="_P"> This document contains various markup features commonly used by content editors or "<span id="_Span" lang="fr">r&eacute;dacteurs de contenu</span>" as they are called in <a id="_A" href="http://en.wikipedia.org/wiki/France" title="Wikipedia article about France"> France</a>.<br /> It is important that a <acronym id="_Acronym" title="what you see is what you get">WYSIWYG</acronym> tool has features that are easily available for the editor. If not, there is a risk that content won't receive <strong id="_Strong">proper</strong> markup. Examples of commonly found content are:</p> </div>
{ "pile_set_name": "Github" }
@component('mail::message') # Team Deleted This team no longer exists. @component('mail::panel') Some features and services may not be accessible if your subscription was based on the team plan. Login to your account to see the changes. @endcomponent @component('mail::button', ['url' => route('account.index')]) View my account @endcomponent Thanks,<br> {{ config('app.name') }} @endcomponent
{ "pile_set_name": "Github" }
[ { "time": "06:14", "quote_first": "It's ", "quote_time_case": "06:13", "quote_last": " .........Ma says I ought to be wrapped up in Rug already, Old Nick might possibly come.", "title": "Room", "author": "Emma Donoghue" } ]
{ "pile_set_name": "Github" }
#!/usr/bin/env python """ Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/) See the file 'LICENSE' for copying permission """ import string from lib.core.enums import PRIORITY __priority__ = PRIORITY.LOWEST def tamper(payload, **kwargs): """ Unicode-escapes non-encoded characters in a given payload (not processing already encoded) Notes: * Useful to bypass weak filtering and/or WAFs in JSON contexes >>> tamper('SELECT FIELD FROM TABLE') '\\\\u0053\\\\u0045\\\\u004C\\\\u0045\\\\u0043\\\\u0054\\\\u0020\\\\u0046\\\\u0049\\\\u0045\\\\u004C\\\\u0044\\\\u0020\\\\u0046\\\\u0052\\\\u004F\\\\u004D\\\\u0020\\\\u0054\\\\u0041\\\\u0042\\\\u004C\\\\u0045' """ retVal = payload if payload: retVal = "" i = 0 while i < len(payload): if payload[i] == '%' and (i < len(payload) - 2) and payload[i + 1:i + 2] in string.hexdigits and payload[i + 2:i + 3] in string.hexdigits: retVal += "\\u00%s" % payload[i + 1:i + 3] i += 3 else: retVal += '\\u%.4X' % ord(payload[i]) i += 1 return retVal
{ "pile_set_name": "Github" }
<!-- Copyright (C) 2010 Orbeon, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. The full text of the license is available at http://www.gnu.org/copyleft/lesser.html --> <?orbeon-serializer status-code="500"?> <html xsl:version="2.0" xmlns="http://www.w3.org/1999/xhtml" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:p="http://www.orbeon.com/oxf/pipeline"> <xsl:variable name="css-uri" select="p:split(normalize-space(p:property('oxf.fr.css.uri.*.*')))" as="xs:string*"/> <xsl:variable name="custom-css-uri" select="p:split(normalize-space(p:property('oxf.fr.css.custom.uri.*.*')))" as="xs:string*"/> <head> <title>Orbeon Forms Error</title> <!-- Form Runner CSS stylesheets --> <xsl:for-each select="$css-uri, $custom-css-uri"> <link rel="stylesheet" href="{.}" type="text/css" media="all"/> </xsl:for-each> </head> <body> <div id="fr-view" class="fr-view"> <div id="doc4"> <div class="fr-header"> <!-- Logo --> <div class="fr-logo"> <img src="/apps/fr/style/orbeon-logo-trimmed-transparent-30.png" alt="Logo"/> </div> </div> <div id="bd" class="fr-container"> <div id="yui-main"> <div class="yui-b"> <div class="yui-g fr-top"> <h1 class="fr-form-title"> Orbeon Forms Error </h1> </div> <div class="yui-g fr-separator">&#160;</div> <div class="yui-g fr-body"> <div class="fr-error-message"> <a name="fr-form"/> <p> Oops, something bad happened! </p> <p> We apologize about that. Please let us know about this error by sending an email to <a href="mailto:info@orbeon.com">info@orbeon.com</a>. We definitely appreciate! </p> <p> You can then do the following: </p> <ul> <li>If this error occurred when you followed a link, press your browser's Back button.</li> <li> If the above does not work, try reloading the page: <ul> <li> With Firefox: hold down the <code>shift</code> key and click the Reload button in your browser toolbar. </li> <li> With Safari and Chrome: click the Reload button in your browser toolbar. </li> <li> With Internet Explorer: hold down the <code>control</code> key and click the Reload button in your browser toolbar. </li> </ul> </li> <li>Return to the <a href="http://www.orbeon.com/">Orbeon web site</a>.</li> <li>Return to the <a href="/">Orbeon Forms demos</a>.</li> </ul> </div> </div> <div class="yui-g fr-separator">&#160;</div> </div> </div> </div> <div id="ft" class="fr-footer"> <xsl:variable name="version" as="xs:string?" select="version:versionStringIfAllowedOrEmpty()" xmlns:version="java:org.orbeon.oxf.common.Version"/> <xsl:if test="$version"> <div class="fr-orbeon-version"><xsl:value-of select="$version"/></div> </xsl:if> </div> </div> </div> </body> </html>
{ "pile_set_name": "Github" }
/* * Tencent is pleased to support the open source community by making wwsearch * available. * * Copyright (C) 2018-present Tencent. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * https://opensource.org/licenses/Apache-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ #pragma once #include "doc_iterator.h" #include "search_slice.h" #include "serialize.h" #include "storage_type.h" namespace wwsearch { // 1 byte in header // no order insert. struct DocListHeader { // version = 0 -> default,fixed size uint8_t version : 4; // flag for version used uint8_t flag : 3; uint8_t extend : 1; DocListHeader() : version(0), flag(0), extend(0) {} } __attribute__((packed)); typedef uint8_t DocumentState; enum kDocumentIDState { kDocumentStateOK = 0, kDocumentStateDelete = 1 }; class DocListWriterCodec : public SerializeAble { public: virtual ~DocListWriterCodec() {} virtual void AddDocID(const DocumentID& doc_id, DocumentState state) = 0; virtual std::string DebugString() = 0; }; class DocListReaderCodec : public DocIdSetIterator { private: size_t priority_; // used for merge. public: virtual ~DocListReaderCodec() {} virtual DocumentState& State() = 0; inline void SetPriority(size_t v) { this->priority_ = v; } inline size_t GetPriority() { return this->priority_; } private: }; typedef bool (*DocListReaderCodecGreater)(DocListReaderCodec* left, DocListReaderCodec* right); } // namespace wwsearch
{ "pile_set_name": "Github" }
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Namespace of all TIR analysis utils.""" # pylint: disable=wildcard-import, invalid-name from .analysis import *
{ "pile_set_name": "Github" }
# mach: crisv0 crisv3 crisv8 crisv10 crisv32 # output: 2\n2\nffff\nffffffff\n5432f789\n2\n2\nffff\nffff\nffff\nf789\n2\n2\nff\nff\n89\nfeda4953\nfeda4962\n0\n0\n .include "testutils.inc" start moveq -1,r3 moveq 2,r4 bound.d r4,r3 test_move_cc 0 0 0 0 checkr3 2 moveq 2,r3 moveq -1,r4 bound.d r4,r3 test_move_cc 0 0 0 0 checkr3 2 move.d 0xffff,r4 move.d r4,r3 bound.d r4,r3 test_move_cc 0 0 0 0 checkr3 ffff moveq -1,r4 move.d r4,r3 bound.d r4,r3 test_move_cc 1 0 0 0 checkr3 ffffffff move.d 0x5432f789,r4 move.d 0x78134452,r3 bound.d r4,r3 test_move_cc 0 0 0 0 checkr3 5432f789 moveq -1,r3 moveq 2,r4 bound.w r4,r3 test_move_cc 0 0 0 0 checkr3 2 moveq 2,r3 moveq -1,r4 bound.w r4,r3 test_move_cc 0 0 0 0 checkr3 2 moveq -1,r3 bound.w r3,r3 test_move_cc 0 0 0 0 checkr3 ffff move.d 0xffff,r4 move.d r4,r3 bound.w r4,r3 test_move_cc 0 0 0 0 checkr3 ffff move.d 0xfedaffff,r4 move.d r4,r3 bound.w r4,r3 test_move_cc 0 0 0 0 checkr3 ffff move.d 0x5432f789,r4 move.d 0x78134452,r3 bound.w r4,r3 test_move_cc 0 0 0 0 checkr3 f789 moveq -1,r3 moveq 2,r4 bound.b r4,r3 test_move_cc 0 0 0 0 checkr3 2 moveq 2,r3 moveq -1,r4 bound.b r4,r3 test_move_cc 0 0 0 0 checkr3 2 move.d 0xff,r4 move.d r4,r3 bound.b r4,r3 test_move_cc 0 0 0 0 checkr3 ff move.d 0xfeda49ff,r4 move.d r4,r3 bound.b r4,r3 test_move_cc 0 0 0 0 checkr3 ff move.d 0x5432f789,r4 move.d 0x78134452,r3 bound.b r4,r3 test_move_cc 0 0 0 0 checkr3 89 move.d 0xfeda4956,r3 move.d 0xfeda4953,r4 bound.d r4,r3 test_move_cc 1 0 0 0 checkr3 feda4953 move.d 0xfeda4962,r3 move.d 0xfeda4963,r4 bound.d r4,r3 test_move_cc 1 0 0 0 checkr3 feda4962 move.d 0xfeda4956,r3 move.d 0,r4 bound.d r4,r3 test_move_cc 0 1 0 0 checkr3 0 move.d 0xfeda4956,r4 move.d 0,r3 bound.d r4,r3 test_move_cc 0 1 0 0 checkr3 0 quit
{ "pile_set_name": "Github" }
var baseGet = require('./_baseGet'); /** * Gets the value at `path` of `object`. If the resolved value is * `undefined`, the `defaultValue` is returned in its place. * * @static * @memberOf _ * @since 3.7.0 * @category Object * @param {Object} object The object to query. * @param {Array|string} path The path of the property to get. * @param {*} [defaultValue] The value returned for `undefined` resolved values. * @returns {*} Returns the resolved value. * @example * * var object = { 'a': [{ 'b': { 'c': 3 } }] }; * * _.get(object, 'a[0].b.c'); * // => 3 * * _.get(object, ['a', '0', 'b', 'c']); * // => 3 * * _.get(object, 'a.b.c', 'default'); * // => 'default' */ function get(object, path, defaultValue) { var result = object == null ? undefined : baseGet(object, path); return result === undefined ? defaultValue : result; } module.exports = get;
{ "pile_set_name": "Github" }
if nil == ccexp then return end ccexp.VideoPlayerEvent = { PLAYING = 0, PAUSED = 1, STOPPED= 2, COMPLETED =3, }
{ "pile_set_name": "Github" }
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. // Efficient JavaScript based implementation of a linked hash map used as a // backing map for constant maps and the [LinkedHashMap] patch part of dart._js_helper; // DDC-specific, just use Object-backed maps //const _USE_ES6_MAPS = const bool.fromEnvironment("dart2js.use.es6.maps"); class JsLinkedHashMap<K, V> implements LinkedHashMap<K, V>, InternalMap { int _length = 0; // The hash map contents are divided into three parts: one part for // string keys, one for numeric keys, and one for the rest. String // and numeric keys map directly to their linked cells, but the rest // of the entries are stored in bucket lists of the form: // // [cell-0, cell-1, ...] // // where all keys in the same bucket share the same hash code. var _strings; var _nums; var _rest; // The keys and values are stored in cells that are linked together // to form a double linked list. LinkedHashMapCell/*<K, V>*/ _first; LinkedHashMapCell/*<K, V>*/ _last; // We track the number of modifications done to the key set of the // hash map to be able to throw when the map is modified while being // iterated over. int _modifications = 0; // static bool get _supportsEs6Maps { // return JS('returns:bool;depends:none;effects:none;throws:never;gvn:true', // 'typeof Map != "undefined"'); // } JsLinkedHashMap(); /// If ES6 Maps are available returns a linked hash-map backed by an ES6 Map. // @ForceInline() factory JsLinkedHashMap.es6() { // return (_USE_ES6_MAPS && JsLinkedHashMap._supportsEs6Maps) // ? new Es6LinkedHashMap<K, V>() // : new JsLinkedHashMap<K, V>(); return new JsLinkedHashMap<K, V>(); } int get length => _length; bool get isEmpty => _length == 0; bool get isNotEmpty => !isEmpty; Iterable<K> get keys { return new LinkedHashMapKeyIterable<K>(this); } Iterable<V> get values { return new MappedIterable<K, V>(keys, (each) => this[each]); } bool containsKey(Object key) { if (_isStringKey(key)) { var strings = _strings; if (strings == null) return false; return _containsTableEntry(strings, key); } else if (_isNumericKey(key)) { var nums = _nums; if (nums == null) return false; return _containsTableEntry(nums, key); } else { return internalContainsKey(key); } } bool internalContainsKey(Object key) { var rest = _rest; if (rest == null) return false; var bucket = _getBucket(rest, key); return internalFindBucketIndex(bucket, key) >= 0; } bool containsValue(Object value) { return keys.any((each) => this[each] == value); } void addAll(Map<K, V> other) { other.forEach((K key, V value) { this[key] = value; }); } V operator [](Object key) { if (_isStringKey(key)) { var strings = _strings; if (strings == null) return null; LinkedHashMapCell/*<K, V>*/ cell = _getTableCell(strings, key); return (cell == null) ? null : cell.hashMapCellValue; } else if (_isNumericKey(key)) { var nums = _nums; if (nums == null) return null; LinkedHashMapCell/*<K, V>*/ cell = _getTableCell(nums, key); return (cell == null) ? null : cell.hashMapCellValue; } else { return internalGet(key); } } V internalGet(Object key) { var rest = _rest; if (rest == null) return null; var bucket = _getBucket(rest, key); int index = internalFindBucketIndex(bucket, key); if (index < 0) return null; LinkedHashMapCell/*<K, V>*/ cell = JS('var', '#[#]', bucket, index); return cell.hashMapCellValue; } void operator []=(K key, V value) { if (_isStringKey(key)) { var strings = _strings; if (strings == null) _strings = strings = _newHashTable(); _addHashTableEntry(strings, key, value); } else if (_isNumericKey(key)) { var nums = _nums; if (nums == null) _nums = nums = _newHashTable(); _addHashTableEntry(nums, key, value); } else { internalSet(key, value); } } void internalSet(K key, V value) { var rest = _rest; if (rest == null) _rest = rest = _newHashTable(); var hash = internalComputeHashCode(key); var bucket = _getTableBucket(rest, hash); if (bucket == null) { LinkedHashMapCell/*<K, V>*/ cell = _newLinkedCell(key, value); _setTableEntry(rest, hash, JS('var', '[#]', cell)); } else { int index = internalFindBucketIndex(bucket, key); if (index >= 0) { LinkedHashMapCell/*<K, V>*/ cell = JS('var', '#[#]', bucket, index); cell.hashMapCellValue = value; } else { LinkedHashMapCell/*<K, V>*/ cell = _newLinkedCell(key, value); JS('void', '#.push(#)', bucket, cell); } } } V putIfAbsent(K key, V ifAbsent()) { if (containsKey(key)) return this[key]; V value = ifAbsent(); this[key] = value; return value; } V remove(Object key) { if (_isStringKey(key)) { return _removeHashTableEntry(_strings, key); } else if (_isNumericKey(key)) { return _removeHashTableEntry(_nums, key); } else { return internalRemove(key); } } V internalRemove(Object key) { var rest = _rest; if (rest == null) return null; var bucket = _getBucket(rest, key); int index = internalFindBucketIndex(bucket, key); if (index < 0) return null; // Use splice to remove the [cell] element at the index and // unlink the cell before returning its value. LinkedHashMapCell/*<K, V>*/ cell = JS('var', '#.splice(#, 1)[0]', bucket, index); _unlinkCell(cell); // TODO(kasperl): Consider getting rid of the bucket list when // the length reaches zero. return cell.hashMapCellValue; } void clear() { if (_length > 0) { _strings = _nums = _rest = _first = _last = null; _length = 0; _modified(); } } void forEach(void action(K key, V value)) { LinkedHashMapCell/*<K, V>*/ cell = _first; int modifications = _modifications; while (cell != null) { action(cell.hashMapCellKey, cell.hashMapCellValue); if (modifications != _modifications) { throw new ConcurrentModificationError(this); } cell = cell._next; } } void _addHashTableEntry(var table, K key, V value) { LinkedHashMapCell/*<K, V>*/ cell = _getTableCell(table, key); if (cell == null) { _setTableEntry(table, key, _newLinkedCell(key, value)); } else { cell.hashMapCellValue = value; } } V _removeHashTableEntry(var table, Object key) { if (table == null) return null; LinkedHashMapCell/*<K, V>*/ cell = _getTableCell(table, key); if (cell == null) return null; _unlinkCell(cell); _deleteTableEntry(table, key); return cell.hashMapCellValue; } void _modified() { // Value cycles after 2^30 modifications so that modification counts are // always unboxed (Smi) values. Modification detection will be missed if you // make exactly some multiple of 2^30 modifications between advances of an // iterator. _modifications = (_modifications + 1) & 0x3ffffff; } // Create a new cell and link it in as the last one in the list. LinkedHashMapCell/*<K, V>*/ _newLinkedCell(K key, V value) { LinkedHashMapCell/*<K, V>*/ cell = new LinkedHashMapCell/*<K, V>*/(key, value); if (_first == null) { _first = _last = cell; } else { LinkedHashMapCell/*<K, V>*/ last = _last; cell._previous = last; _last = last._next = cell; } _length++; _modified(); return cell; } // Unlink the given cell from the linked list of cells. void _unlinkCell(LinkedHashMapCell/*<K, V>*/ cell) { LinkedHashMapCell/*<K, V>*/ previous = cell._previous; LinkedHashMapCell/*<K, V>*/ next = cell._next; if (previous == null) { assert(cell == _first); _first = next; } else { previous._next = next; } if (next == null) { assert(cell == _last); _last = previous; } else { next._previous = previous; } _length--; _modified(); } static bool _isStringKey(var key) { return key is String; } static bool _isNumericKey(var key) { // Only treat unsigned 30-bit integers as numeric keys. This way, // we avoid converting them to strings when we use them as keys in // the JavaScript hash table object. return key is num && JS('bool', '(# & 0x3ffffff) === #', key, key); } int internalComputeHashCode(var key) { // We force the hash codes to be unsigned 30-bit integers to avoid // issues with problematic keys like '__proto__'. Another option // would be to throw an exception if the hash code isn't a number. return JS('int', '# & 0x3ffffff', key.hashCode); } List<dynamic/*=LinkedHashMapCell<K, V>*/ > _getBucket(var table, var key) { var hash = internalComputeHashCode(key); return _getTableBucket(table, hash); } int internalFindBucketIndex(var bucket, var key) { if (bucket == null) return -1; int length = JS('int', '#.length', bucket); for (int i = 0; i < length; i++) { LinkedHashMapCell/*<K, V>*/ cell = JS('var', '#[#]', bucket, i); if (cell.hashMapCellKey == key) return i; } return -1; } String toString() => Maps.mapToString(this); /*=LinkedHashMapCell<K, V>*/ _getTableCell(var table, var key) { return JS('var', '#[#]', table, key); } /*=List<LinkedHashMapCell<K, V>>*/ _getTableBucket(var table, var key) { return JS('var', '#[#]', table, key); } void _setTableEntry(var table, var key, var value) { assert(value != null); JS('void', '#[#] = #', table, key, value); } void _deleteTableEntry(var table, var key) { JS('void', 'delete #[#]', table, key); } bool _containsTableEntry(var table, var key) { LinkedHashMapCell/*<K, V>*/ cell = _getTableCell(table, key); return cell != null; } _newHashTable() { // Create a new JavaScript object to be used as a hash table. Use // Object.create to avoid the properties on Object.prototype // showing up as entries. var table = JS('var', 'Object.create(null)'); // Attempt to force the hash table into 'dictionary' mode by // adding a property to it and deleting it again. var temporaryKey = '<non-identifier-key>'; _setTableEntry(table, temporaryKey, table); _deleteTableEntry(table, temporaryKey); return table; } } class Es6LinkedHashMap<K, V> extends JsLinkedHashMap<K, V> { @override /*=LinkedHashMapCell<K, V>*/ _getTableCell(var table, var key) { return JS('var', '#.get(#)', table, key); } @override /*=List<LinkedHashMapCell<K, V>>*/ _getTableBucket(var table, var key) { return JS('var', '#.get(#)', table, key); } @override void _setTableEntry(var table, var key, var value) { JS('void', '#.set(#, #)', table, key, value); } @override void _deleteTableEntry(var table, var key) { JS('void', '#.delete(#)', table, key); } @override bool _containsTableEntry(var table, var key) { return JS('bool', '#.has(#)', table, key); } @override _newHashTable() { return JS('var', 'new Map()'); } } class LinkedHashMapCell<K, V> { final dynamic/*=K*/ hashMapCellKey; dynamic/*=V*/ hashMapCellValue; LinkedHashMapCell/*<K, V>*/ _next; LinkedHashMapCell/*<K, V>*/ _previous; LinkedHashMapCell(this.hashMapCellKey, this.hashMapCellValue); } class LinkedHashMapKeyIterable<E> extends Iterable<E> implements EfficientLength { final dynamic/*=JsLinkedHashMap<E, dynamic>*/ _map; LinkedHashMapKeyIterable(this._map); int get length => _map._length; bool get isEmpty => _map._length == 0; Iterator<E> get iterator { return new LinkedHashMapKeyIterator<E>(_map, _map._modifications); } bool contains(Object element) { return _map.containsKey(element); } void forEach(void f(E element)) { LinkedHashMapCell/*<E, dynamic>*/ cell = _map._first; int modifications = _map._modifications; while (cell != null) { f(cell.hashMapCellKey); if (modifications != _map._modifications) { throw new ConcurrentModificationError(_map); } cell = cell._next; } } } class LinkedHashMapKeyIterator<E> implements Iterator<E> { final dynamic/*=JsLinkedHashMap<E, dynamic>*/ _map; final int _modifications; LinkedHashMapCell/*<E, dynamic>*/ _cell; E _current; LinkedHashMapKeyIterator(this._map, this._modifications) { _cell = _map._first; } E get current => _current; bool moveNext() { if (_modifications != _map._modifications) { throw new ConcurrentModificationError(_map); } else if (_cell == null) { _current = null; return false; } else { _current = _cell.hashMapCellKey; _cell = _cell._next; return true; } } }
{ "pile_set_name": "Github" }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq.Expressions; using JetBrains.Annotations; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Utilities; namespace Microsoft.EntityFrameworkCore.Cosmos.Query.Internal { /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public class ReadItemExpression : Expression { private const string RootAlias = "c"; /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public override Type Type => typeof(object); /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public override ExpressionType NodeType => ExpressionType.Extension; /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public virtual string Container { get; } /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public virtual ProjectionExpression ProjectionExpression { get; } /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public virtual IEntityType EntityType { get; } /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public virtual IDictionary<IProperty, string> PropertyParameters { get; } /// <summary> /// This is an internal API that supports the Entity Framework Core infrastructure and not subject to /// the same compatibility standards as public APIs. It may be changed or removed without notice in /// any release. You should only use it directly in your code with extreme caution and knowing that /// doing so can result in application failures when updating to a new Entity Framework Core release. /// </summary> public ReadItemExpression( [NotNull] IEntityType entityType, [NotNull] IDictionary<IProperty, string> propertyParameters) { Check.NotNull(entityType, nameof(entityType)); Check.NotNull(propertyParameters, nameof(propertyParameters)); Container = entityType.GetContainer(); ProjectionExpression = new ProjectionExpression( new EntityProjectionExpression( entityType, new RootReferenceExpression(entityType, RootAlias)), RootAlias); EntityType = entityType; PropertyParameters = propertyParameters; } } }
{ "pile_set_name": "Github" }
import os import json import shutil import logging import tensorflow as tf from conlleval import return_report models_path = "./models" eval_path = "./evaluation" eval_temp = os.path.join(eval_path, "temp") eval_script = os.path.join(eval_path, "conlleval") def get_logger(log_file): logger = logging.getLogger(log_file) logger.setLevel(logging.DEBUG) fh = logging.FileHandler(log_file) fh.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") ch.setFormatter(formatter) fh.setFormatter(formatter) logger.addHandler(ch) logger.addHandler(fh) return logger # def test_ner(results, path): # """ # Run perl script to evaluate model # """ # script_file = "conlleval" # output_file = os.path.join(path, "ner_predict.utf8") # result_file = os.path.join(path, "ner_result.utf8") # with open(output_file, "w") as f: # to_write = [] # for block in results: # for line in block: # to_write.append(line + "\n") # to_write.append("\n") # # f.writelines(to_write) # os.system("perl {} < {} > {}".format(script_file, output_file, result_file)) # eval_lines = [] # with open(result_file) as f: # for line in f: # eval_lines.append(line.strip()) # return eval_lines def test_ner(results, path): """ Run perl script to evaluate model """ output_file = os.path.join(path, "ner_predict.utf8") with open(output_file, "w",encoding='utf8') as f: to_write = [] for block in results: for line in block: to_write.append(line + "\n") to_write.append("\n") f.writelines(to_write) eval_lines = return_report(output_file) return eval_lines def print_config(config, logger): """ Print configuration of the model """ for k, v in config.items(): logger.info("{}:\t{}".format(k.ljust(15), v)) def make_path(params): """ Make folders for training and evaluation """ if not os.path.isdir(params.result_path): os.makedirs(params.result_path) if not os.path.isdir(params.ckpt_path): os.makedirs(params.ckpt_path) if not os.path.isdir("log"): os.makedirs("log") def clean(params): """ Clean current folder remove saved model and training log """ if os.path.isfile(params.vocab_file): os.remove(params.vocab_file) if os.path.isfile(params.map_file): os.remove(params.map_file) if os.path.isdir(params.ckpt_path): shutil.rmtree(params.ckpt_path) if os.path.isdir(params.summary_path): shutil.rmtree(params.summary_path) if os.path.isdir(params.result_path): shutil.rmtree(params.result_path) if os.path.isdir("log"): shutil.rmtree("log") if os.path.isdir("__pycache__"): shutil.rmtree("__pycache__") if os.path.isfile(params.config_file): os.remove(params.config_file) if os.path.isfile(params.vocab_file): os.remove(params.vocab_file) def save_config(config, config_file): """ Save configuration of the model parameters are stored in json format """ with open(config_file, "w", encoding="utf8") as f: json.dump(config, f, ensure_ascii=False, indent=4) def load_config(config_file): """ Load configuration of the model parameters are stored in json format """ with open(config_file, encoding="utf8") as f: return json.load(f) def convert_to_text(line): """ Convert conll data to text """ to_print = [] for item in line: try: if item[0] == " ": to_print.append(" ") continue word, gold, tag = item.split(" ") if tag[0] in "SB": to_print.append("[") to_print.append(word) if tag[0] in "SE": to_print.append("@" + tag.split("-")[-1]) to_print.append("]") except: print(list(item)) return "".join(to_print) def save_model(sess, model, path, logger): checkpoint_path = os.path.join(path, "ner.ckpt") model.saver.save(sess, checkpoint_path) logger.info("model saved") def create_model(session, Model_class, path, load_vec, config, id_to_char, logger): # create model, reuse parameters if exists model = Model_class(config) ckpt = tf.train.get_checkpoint_state(path) if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path): logger.info("Reading model parameters from %s" % ckpt.model_checkpoint_path) model.saver.restore(session, ckpt.model_checkpoint_path) else: logger.info("Created model with fresh parameters.") session.run(tf.global_variables_initializer()) if config["pre_emb"]: emb_weights = session.run(model.char_lookup.read_value()) emb_weights = load_vec(config["emb_file"],id_to_char, config["char_dim"], emb_weights) session.run(model.char_lookup.assign(emb_weights)) logger.info("Load pre-trained embedding.") return model def result_to_json(string, tags): item = {"string": string, "entities": []} entity_name = "" entity_start = 0 idx = 0 for char, tag in zip(string, tags): if tag[0] == "S": item["entities"].append({"word": char, "start": idx, "end": idx+1, "type":tag[2:]}) elif tag[0] == "B": entity_name += char entity_start = idx elif tag[0] == "I": entity_name += char elif tag[0] == "E": entity_name += char item["entities"].append({"word": entity_name, "start": entity_start, "end": idx + 1, "type": tag[2:]}) entity_name = "" else: entity_name = "" entity_start = idx idx += 1 return item #import os #import json #import shutil #import logging #import codecs #import tensorflow as tf #from conlleval import return_report #models_path = "./models" #eval_path = "./evaluation" #eval_temp = os.path.join(eval_path, "temp") #eval_script = os.path.join(eval_path, "conlleval") #def get_logger(log_file): #logger = logging.getLogger(log_file) #logger.setLevel(logging.DEBUG) #fh = logging.FileHandler(log_file) #fh.setLevel(logging.DEBUG) #ch = logging.StreamHandler() #ch.setLevel(logging.INFO) #formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") #ch.setFormatter(formatter) #fh.setFormatter(formatter) #logger.addHandler(ch) #logger.addHandler(fh) #return logger ## def test_ner(results, path): ## """ ## Run perl script to evaluate model ## """ ## script_file = "conlleval" ## output_file = os.path.join(path, "ner_predict.utf8") ## result_file = os.path.join(path, "ner_result.utf8") ## with open(output_file, "w") as f: ## to_write = [] ## for block in results: ## for line in block: ## to_write.append(line + "\n") ## to_write.append("\n") ## ## f.writelines(to_write) ## os.system("perl {} < {} > {}".format(script_file, output_file, result_file)) ## eval_lines = [] ## with open(result_file) as f: ## for line in f: ## eval_lines.append(line.strip()) ## return eval_lines #def make_path(params): #""" #Make folders for training and evaluation #""" #if not os.path.isdir(params.result_path): #os.makedirs(params.result_path) #if not os.path.isdir(params.ckpt_path): #os.makedirs(params.ckpt_path) #if not os.path.isdir("log"): #os.makedirs("log") #def load_config(config_file): #""" #Load configuration of the model #parameters are stored in json format #""" #with codecs.open(config_file,'r', encoding="utf-8") as f: #return json.load(f) #def create_model(session, Model_class, path, load_vec, config, id_to_char, logger): ## create model, reuse parameters if exists #model = Model_class(config) #ckpt = tf.train.get_checkpoint_state(path) #if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path): #logger.info("Reading model parameters from %s" % ckpt.model_checkpoint_path) #model.saver.restore(session, ckpt.model_checkpoint_path) #return model #def result_to_json(string, tags): #item = {"string": string, "entities": []} #entity_name = "" #entity_start = 0 #idx = 0 #for char, tag in zip(string, tags): #if tag[0] == "S": #item["entities"].append({"word": char, "start": idx, "end": idx+1, "type":tag[2:]}) #elif tag[0] == "B": #entity_name += char #entity_start = idx #elif tag[0] == "I": #entity_name += char #elif tag[0] == "E": #entity_name += char #item["entities"].append({"word": entity_name, "start": entity_start, "end": idx + 1, "type": tag[2:]}) #entity_name = "" #else: #entity_name = "" #entity_start = idx #idx += 1 #return item
{ "pile_set_name": "Github" }
import { defineMessages } from 'react-intl'; export default defineMessages({ navigation: { id: 'cboard.components.Settings.Navigation.navigation', defaultMessage: 'Navigation' }, enable: { id: 'cboard.components.Settings.Navigation.enable', defaultMessage: 'Enable context aware back button' }, enableSecondary: { id: 'cboard.components.Settings.Navigation.enableSecondary', defaultMessage: 'Shows big back buttons on top of the boards' }, quickUnlock: { id: 'cboard.components.Settings.Navigation.quickUnlock', defaultMessage: 'Enable quick settings unlock' }, quickUnlockSecondary: { id: 'cboard.components.Settings.Navigation.quickUnlockSecondary', defaultMessage: 'Unlocks the settings with a single click' }, outputRemove: { id: 'cboard.components.Settings.Navigation.outputRemove', defaultMessage: 'Remove symbols from the output bar' }, outputRemoveSecondary: { id: 'cboard.components.Settings.Navigation.outputRemoveSecondary', defaultMessage: 'Shows a "x" buttton on each symbol in order to remove it' }, vocalizeFolders: { id: 'cboard.components.Settings.Navigation.vocalizeFolders', defaultMessage: 'Enable folder vocalization' }, vocalizeFoldersSecondary: { id: 'cboard.components.Settings.Navigation.vocalizeFoldersSecondary', defaultMessage: 'Reads a folder\'s name out loud when clicked' } });
{ "pile_set_name": "Github" }
@import templates_top; template<typename T> class Vector { public: void push_back(const T&); }; template<typename T> class List; template<> class List<bool> { public: void push_back(int); }; namespace N { template<typename T> class Set { public: void insert(T); }; } constexpr unsigned List<int>::*size_right = &List<int>::size; List<int> list_right = { 0, 12 }; typedef List<int> ListInt_right; template <typename T> void pendingInstantiationEmit(T) {} void triggerPendingInstantiationToo() { pendingInstantiationEmit(12); } void redeclDefinitionEmit(){} typedef Outer<int>::Inner OuterIntInner_right; int defineListDoubleRight() { List<double> ld; ld.push_back(0.0); return ld.size; } inline void defineListLongRight() { List<long> ll; } template<typename T> struct MergePatternDecl; void outOfLineInlineUseRightF(void (OutOfLineInline<int>::*)() = &OutOfLineInline<int>::f); void outOfLineInlineUseRightG(void (OutOfLineInline<int>::*)() = &OutOfLineInline<int>::g); void outOfLineInlineUseRightH(void (OutOfLineInline<int>::*)() = &OutOfLineInline<int>::h); inline int *getStaticDataMemberRight() { return WithUndefinedStaticDataMember<int[]>::undefined; } inline WithAttributes<int> make_with_attributes_right() { return WithAttributes<int>(); }
{ "pile_set_name": "Github" }
// CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: https://codemirror.net/LICENSE /* Gherkin mode - http://www.cukes.info/ Report bugs/issues here: https://github.com/codemirror/CodeMirror/issues */ // Following Objs from Brackets implementation: https://github.com/tregusti/brackets-gherkin/blob/master/main.js //var Quotes = { // SINGLE: 1, // DOUBLE: 2 //}; //var regex = { // keywords: /(Feature| {2}(Scenario|In order to|As|I)| {4}(Given|When|Then|And))/ //}; (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineMode("gherkin", function () { return { startState: function () { return { lineNumber: 0, tableHeaderLine: false, allowFeature: true, allowBackground: false, allowScenario: false, allowSteps: false, allowPlaceholders: false, allowMultilineArgument: false, inMultilineString: false, inMultilineTable: false, inKeywordLine: false }; }, token: function (stream, state) { if (stream.sol()) { state.lineNumber++; state.inKeywordLine = false; if (state.inMultilineTable) { state.tableHeaderLine = false; if (!stream.match(/\s*\|/, false)) { state.allowMultilineArgument = false; state.inMultilineTable = false; } } } stream.eatSpace(); if (state.allowMultilineArgument) { // STRING if (state.inMultilineString) { if (stream.match('"""')) { state.inMultilineString = false; state.allowMultilineArgument = false; } else { stream.match(/.*/); } return "string"; } // TABLE if (state.inMultilineTable) { if (stream.match(/\|\s*/)) { return "bracket"; } else { stream.match(/[^\|]*/); return state.tableHeaderLine ? "header" : "string"; } } // DETECT START if (stream.match('"""')) { // String state.inMultilineString = true; return "string"; } else if (stream.match("|")) { // Table state.inMultilineTable = true; state.tableHeaderLine = true; return "bracket"; } } // LINE COMMENT if (stream.match(/#.*/)) { return "comment"; // TAG } else if (!state.inKeywordLine && stream.match(/@\S+/)) { return "tag"; // FEATURE } else if (!state.inKeywordLine && state.allowFeature && stream.match(/(機能|功能|フィーチャ|기능|โครงหลัก|ความสามารถ|ความต้องการทางธุรกิจ|ಹೆಚ್ಚಳ|గుణము|ਮੁਹਾਂਦਰਾ|ਨਕਸ਼ ਨੁਹਾਰ|ਖਾਸੀਅਤ|रूप लेख|وِیژگی|خاصية|תכונה|Функціонал|Функция|Функционалност|Функционал|Үзенчәлеклелек|Свойство|Особина|Мөмкинлек|Могућност|Λειτουργία|Δυνατότητα|Właściwość|Vlastnosť|Trajto|Tính năng|Savybė|Pretty much|Požiadavka|Požadavek|Potrzeba biznesowa|Özellik|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Hwæt|Hwaet|Funzionalità|Funktionalitéit|Funktionalität|Funkcja|Funkcionalnost|Funkcionalitāte|Funkcia|Fungsi|Functionaliteit|Funcționalitate|Funcţionalitate|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Fīča|Feature|Eiginleiki|Egenskap|Egenskab|Característica|Caracteristica|Business Need|Aspekt|Arwedd|Ahoy matey!|Ability):/)) { state.allowScenario = true; state.allowBackground = true; state.allowPlaceholders = false; state.allowSteps = false; state.allowMultilineArgument = false; state.inKeywordLine = true; return "keyword"; // BACKGROUND } else if (!state.inKeywordLine && state.allowBackground && stream.match(/(背景|배경|แนวคิด|ಹಿನ್ನೆಲೆ|నేపథ్యం|ਪਿਛੋਕੜ|पृष्ठभूमि|زمینه|الخلفية|רקע|Тарих|Предыстория|Предистория|Позадина|Передумова|Основа|Контекст|Кереш|Υπόβαθρο|Założenia|Yo\-ho\-ho|Tausta|Taust|Situācija|Rerefons|Pozadina|Pozadie|Pozadí|Osnova|Latar Belakang|Kontext|Konteksts|Kontekstas|Kontekst|Háttér|Hannergrond|Grundlage|Geçmiş|Fundo|Fono|First off|Dis is what went down|Dasar|Contexto|Contexte|Context|Contesto|Cenário de Fundo|Cenario de Fundo|Cefndir|Bối cảnh|Bakgrunnur|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|Ær|Aer|Achtergrond):/)) { state.allowPlaceholders = false; state.allowSteps = true; state.allowBackground = false; state.allowMultilineArgument = false; state.inKeywordLine = true; return "keyword"; // SCENARIO OUTLINE } else if (!state.inKeywordLine && state.allowScenario && stream.match(/(場景大綱|场景大纲|劇本大綱|剧本大纲|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|시나리오 개요|สรุปเหตุการณ์|โครงสร้างของเหตุการณ์|ವಿವರಣೆ|కథనం|ਪਟਕਥਾ ਰੂਪ ਰੇਖਾ|ਪਟਕਥਾ ਢਾਂਚਾ|परिदृश्य रूपरेखा|سيناريو مخطط|الگوی سناریو|תבנית תרחיש|Сценарийның төзелеше|Сценарий структураси|Структура сценарію|Структура сценария|Структура сценарија|Скица|Рамка на сценарий|Концепт|Περιγραφή Σεναρίου|Wharrimean is|Template Situai|Template Senario|Template Keadaan|Tapausaihio|Szenariogrundriss|Szablon scenariusza|Swa hwær swa|Swa hwaer swa|Struktura scenarija|Structură scenariu|Structura scenariu|Skica|Skenario konsep|Shiver me timbers|Senaryo taslağı|Schema dello scenario|Scenariomall|Scenariomal|Scenario Template|Scenario Outline|Scenario Amlinellol|Scenārijs pēc parauga|Scenarijaus šablonas|Reckon it's like|Raamstsenaarium|Plang vum Szenario|Plan du Scénario|Plan du scénario|Osnova scénáře|Osnova Scenára|Náčrt Scenáru|Náčrt Scénáře|Náčrt Scenára|MISHUN SRSLY|Menggariskan Senario|Lýsing Dæma|Lýsing Atburðarásar|Konturo de la scenaro|Koncept|Khung tình huống|Khung kịch bản|Forgatókönyv vázlat|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l'escenari|Esbozo do escenario|Delineação do Cenário|Delineacao do Cenario|All y'all|Abstrakt Scenario|Abstract Scenario):/)) { state.allowPlaceholders = true; state.allowSteps = true; state.allowMultilineArgument = false; state.inKeywordLine = true; return "keyword"; // EXAMPLES } else if (state.allowScenario && stream.match(/(例子|例|サンプル|예|ชุดของเหตุการณ์|ชุดของตัวอย่าง|ಉದಾಹರಣೆಗಳು|ఉదాహరణలు|ਉਦਾਹਰਨਾਂ|उदाहरण|نمونه ها|امثلة|דוגמאות|Үрнәкләр|Сценарији|Примеры|Примери|Приклади|Мисоллар|Мисаллар|Σενάρια|Παραδείγματα|You'll wanna|Voorbeelden|Variantai|Tapaukset|Se þe|Se the|Se ðe|Scenarios|Scenariji|Scenarijai|Przykłady|Primjeri|Primeri|Příklady|Príklady|Piemēri|Példák|Pavyzdžiai|Paraugs|Örnekler|Juhtumid|Exemplos|Exemples|Exemple|Exempel|EXAMPLZ|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|Dữ liệu|Dead men tell no tales|Dæmi|Contoh|Cenários|Cenarios|Beispiller|Beispiele|Atburðarásir):/)) { state.allowPlaceholders = false; state.allowSteps = true; state.allowBackground = false; state.allowMultilineArgument = true; return "keyword"; // SCENARIO } else if (!state.inKeywordLine && state.allowScenario && stream.match(/(場景|场景|劇本|剧本|シナリオ|시나리오|เหตุการณ์|ಕಥಾಸಾರಾಂಶ|సన్నివేశం|ਪਟਕਥਾ|परिदृश्य|سيناريو|سناریو|תרחיש|Сценарій|Сценарио|Сценарий|Пример|Σενάριο|Tình huống|The thing of it is|Tapaus|Szenario|Swa|Stsenaarium|Skenario|Situai|Senaryo|Senario|Scenaro|Scenariusz|Scenariu|Scénario|Scenario|Scenarijus|Scenārijs|Scenarij|Scenarie|Scénář|Scenár|Primer|MISHUN|Kịch bản|Keadaan|Heave to|Forgatókönyv|Escenario|Escenari|Cenário|Cenario|Awww, look mate|Atburðarás):/)) { state.allowPlaceholders = false; state.allowSteps = true; state.allowBackground = false; state.allowMultilineArgument = false; state.inKeywordLine = true; return "keyword"; // STEPS } else if (!state.inKeywordLine && state.allowSteps && stream.match(/(那麼|那么|而且|當|当|并且|同時|同时|前提|假设|假設|假定|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|하지만|조건|먼저|만일|만약|단|그리고|그러면|และ |เมื่อ |แต่ |ดังนั้น |กำหนดให้ |ಸ್ಥಿತಿಯನ್ನು |ಮತ್ತು |ನೀಡಿದ |ನಂತರ |ಆದರೆ |మరియు |చెప్పబడినది |కాని |ఈ పరిస్థితిలో |అప్పుడు |ਪਰ |ਤਦ |ਜੇਕਰ |ਜਿਵੇਂ ਕਿ |ਜਦੋਂ |ਅਤੇ |यदि |परन्तु |पर |तब |तदा |तथा |जब |चूंकि |किन्तु |कदा |और |अगर |و |هنگامی |متى |لكن |عندما |ثم |بفرض |با فرض |اما |اذاً |آنگاه |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Һәм |Унда |Тоді |Тогда |То |Также |Та |Пусть |Припустимо, що |Припустимо |Онда |Но |Нехай |Нәтиҗәдә |Лекин |Ләкин |Коли |Когда |Когато |Када |Кад |К тому же |І |И |Задато |Задати |Задате |Если |Допустим |Дано |Дадено |Вә |Ва |Бирок |Әмма |Әйтик |Әгәр |Аммо |Али |Але |Агар |А також |А |Τότε |Όταν |Και |Δεδομένου |Αλλά |Þurh |Þegar |Þa þe |Þá |Þa |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Za předpokladu |Za predpokladu |Youse know when youse got |Youse know like when |Yna |Yeah nah |Y'know |Y |Wun |Wtedy |When y'all |When |Wenn |WEN |wann |Ve |Và |Und |Un |ugeholl |Too right |Thurh |Thì |Then y'all |Then |Tha the |Tha |Tetapi |Tapi |Tak |Tada |Tad |Stel |Soit |Siis |Și |Şi |Si |Sed |Se |Så |Quando |Quand |Quan |Pryd |Potom |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Niin |Nhưng |När |Når |Mutta |Men |Mas |Maka |Majd |Mając |Mais |Maar |mä |Ma |Lorsque |Lorsqu'|Logo |Let go and haul |Kun |Kuid |Kui |Kiedy |Khi |Ketika |Kemudian |Keď |Když |Kaj |Kai |Kada |Kad |Jeżeli |Jeśli |Ja |It's just unbelievable |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y'all |Given |Gitt |Gegeven |Gegeben seien |Gegeben sei |Gdy |Gangway! |Fakat |Étant donnés |Etant donnés |Étant données |Etant données |Étant donnée |Etant donnée |Étant donné |Etant donné |Et |És |Entonces |Entón |Então |Entao |En |Eğer ki |Ef |Eeldades |E |Ðurh |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Diberi |Dengan |Den youse gotta |DEN |De |Dato |Dați fiind |Daţi fiind |Dati fiind |Dati |Date fiind |Date |Data |Dat fiind |Dar |Dann |dann |Dan |Dados |Dado |Dadas |Dada |Ða ðe |Ða |Cuando |Cho |Cando |Când |Cand |Cal |But y'all |But at the end of the day I reckon |BUT |But |Buh |Blimey! |Biết |Bet |Bagi |Aye |awer |Avast! |Atunci |Atesa |Atès |Apabila |Anrhegedig a |Angenommen |And y'all |And |AN |An |an |Amikor |Amennyiben |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Ak |Adott |Ac |Aber |A zároveň |A tiež |A taktiež |A také |A |a |7 |\* )/)) { state.inStep = true; state.allowPlaceholders = true; state.allowMultilineArgument = true; state.inKeywordLine = true; return "keyword"; // INLINE STRING } else if (stream.match(/"[^"]*"?/)) { return "string"; // PLACEHOLDER } else if (state.allowPlaceholders && stream.match(/<[^>]*>?/)) { return "variable"; // Fall through } else { stream.next(); stream.eatWhile(/[^@"<#]/); return null; } } }; }); CodeMirror.defineMIME("text/x-feature", "gherkin"); });
{ "pile_set_name": "Github" }
// // ******************************************************************** // * License and Disclaimer * // * * // * The Geant4 software is copyright of the Copyright Holders of * // * the Geant4 Collaboration. It is provided under the terms and * // * conditions of the Geant4 Software License, included in the file * // * LICENSE and available at http://cern.ch/geant4/license . These * // * include a list of copyright holders. * // * * // * Neither the authors of this software system, nor their employing * // * institutes,nor the agencies providing financial support for this * // * work make any representation or warranty, express or implied, * // * regarding this software system or assume any liability for its * // * use. Please see the license in the file LICENSE and URL above * // * for the full disclaimer and the limitation of liability. * // * * // * This code implementation is the result of the scientific and * // * technical work of the GEANT4 collaboration. * // * By using, copying, modifying or distributing the software (or * // * any work based on the software) you agree to acknowledge its * // * use in resulting scientific publications, and indicate your * // * acceptance of all terms of the Geant4 Software license. * // ******************************************************************** // // // ------------------------------------------------------------------- // GEANT 4 class implementation file // // CERN, Geneva, Switzerland // // File name: G4KaonPlusField.cc // // Author: Alessandro Brunengo (Alessandro.Brunengo@ge.infn.it) // // Creation date: 5 June 2000 // ------------------------------------------------------------------- #include "G4KaonPlusField.hh" #include "G4PhysicalConstants.hh" #include "G4SystemOfUnits.hh" #include "G4NucleiProperties.hh" #include "G4VNuclearDensity.hh" #include "G4FermiMomentum.hh" #include "G4KaonPlus.hh" #include "G4HadTmpUtil.hh" #include "G4Pow.hh" G4KaonPlusField::G4KaonPlusField(G4V3DNucleus * nucleus, G4double coeff) : G4VNuclearField(nucleus) { theCoeff = coeff; } G4KaonPlusField::~G4KaonPlusField() { } G4double G4KaonPlusField::GetField(const G4ThreeVector & aPosition) { // Field is 0 out of the nucleus! if(aPosition.mag() >= radius) return 0.0; G4double kaonMass = G4KaonPlus::KaonPlus()->GetPDGMass(); G4int A = theNucleus->GetMassNumber(); G4int Z = theNucleus->GetCharge(); G4double bindingEnergy = G4NucleiProperties::GetBindingEnergy(A, Z); G4double nucleusMass = Z*proton_mass_c2+(A-Z)*neutron_mass_c2+bindingEnergy; G4double reducedMass = kaonMass*nucleusMass/(kaonMass+nucleusMass); G4double density = theNucleus->GetNuclearDensity()->GetDensity(aPosition); return -2.*pi*hbarc*hbarc/reducedMass*(2.0)*theCoeff*density+GetBarrier(); } G4double G4KaonPlusField::GetBarrier() { G4int A = theNucleus->GetMassNumber(); G4int Z = theNucleus->GetCharge(); G4double coulombBarrier = (1.44/1.14) * MeV * Z / (1.0 + G4Pow::GetInstance()->Z13(A)); return coulombBarrier; }
{ "pile_set_name": "Github" }
# generated automatically by aclocal 1.13.4 -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, [m4_warning([this file was generated for autoconf 2.69. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. To do so, use the procedure documented by the package, typically 'autoreconf'.])]) # pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- # serial 1 (pkg-config-0.24) # # Copyright © 2004 Scott James Remnant <scott@netsplit.com>. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # PKG_PROG_PKG_CONFIG([MIN-VERSION]) # ---------------------------------- AC_DEFUN([PKG_PROG_PKG_CONFIG], [m4_pattern_forbid([^_?PKG_[A-Z_]+$]) m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$]) m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$]) AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) fi if test -n "$PKG_CONFIG"; then _pkg_min_version=m4_default([$1], [0.9.0]) AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) PKG_CONFIG="" fi fi[]dnl ])# PKG_PROG_PKG_CONFIG # PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) # # Check to see whether a particular set of modules exists. Similar # to PKG_CHECK_MODULES(), but does not set variables or print errors. # # Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) # only at the first occurence in configure.ac, so if the first place # it's called might be skipped (such as if it is within an "if", you # have to call PKG_CHECK_EXISTS manually # -------------------------------------------------------------- AC_DEFUN([PKG_CHECK_EXISTS], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl if test -n "$PKG_CONFIG" && \ AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then m4_default([$2], [:]) m4_ifvaln([$3], [else $3])dnl fi]) # _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) # --------------------------------------------- m4_define([_PKG_CONFIG], [if test -n "$$1"; then pkg_cv_[]$1="$$1" elif test -n "$PKG_CONFIG"; then PKG_CHECK_EXISTS([$3], [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes ], [pkg_failed=yes]) else pkg_failed=untried fi[]dnl ])# _PKG_CONFIG # _PKG_SHORT_ERRORS_SUPPORTED # ----------------------------- AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], [AC_REQUIRE([PKG_PROG_PKG_CONFIG]) if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi[]dnl ])# _PKG_SHORT_ERRORS_SUPPORTED # PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], # [ACTION-IF-NOT-FOUND]) # # # Note that if there is a possibility the first call to # PKG_CHECK_MODULES might not happen, you should be sure to include an # explicit call to PKG_PROG_PKG_CONFIG in your configure.ac # # # -------------------------------------------------------------- AC_DEFUN([PKG_CHECK_MODULES], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no AC_MSG_CHECKING([for $1]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. _PKG_TEXT To get pkg-config, see <http://pkg-config.freedesktop.org/>.])[]dnl ]) else $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) $3 fi[]dnl ])# PKG_CHECK_MODULES # PKG_INSTALLDIR(DIRECTORY) # ------------------------- # Substitutes the variable pkgconfigdir as the location where a module # should install pkg-config .pc files. By default the directory is # $libdir/pkgconfig, but the default can be changed by passing # DIRECTORY. The user can override through the --with-pkgconfigdir # parameter. AC_DEFUN([PKG_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([pkgconfigdir], [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, [with_pkgconfigdir=]pkg_default) AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ]) dnl PKG_INSTALLDIR # PKG_NOARCH_INSTALLDIR(DIRECTORY) # ------------------------- # Substitutes the variable noarch_pkgconfigdir as the location where a # module should install arch-independent pkg-config .pc files. By # default the directory is $datadir/pkgconfig, but the default can be # changed by passing DIRECTORY. The user can override through the # --with-noarch-pkgconfigdir parameter. AC_DEFUN([PKG_NOARCH_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([noarch-pkgconfigdir], [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, [with_noarch_pkgconfigdir=]pkg_default) AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ]) dnl PKG_NOARCH_INSTALLDIR # Copyright (C) 2002-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version='1.13' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. m4_if([$1], [1.13.4], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) # _AM_AUTOCONF_VERSION(VERSION) # ----------------------------- # aclocal traces this macro to find the Autoconf version. # This is a private macro too. Using m4_define simplifies # the logic in aclocal, which can simply ignore this definition. m4_define([_AM_AUTOCONF_VERSION], []) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.13.4])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to # '$srcdir', '$srcdir/..', or '$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is '.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [dnl Rely on autoconf to set up CDPATH properly. AC_PREREQ([2.50])dnl # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ([2.52])dnl m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing # CC etc. in the Makefile, will ask for an AC_PROG_CC use... # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. # NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was # modified to invoke _AM_DEPENDENCIES(CC); we would have a circular # dependency, and given that the user is not expected to run this macro, # just rely on AC_PROG_CC. AC_DEFUN([_AM_DEPENDENCIES], [AC_REQUIRE([AM_SET_DEPDIR])dnl AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], [$1], [CXX], [depcc="$CXX" am_compiler_list=], [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], [$1], [UPC], [depcc="$UPC" am_compiler_list=], [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], [if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_$1_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi am__universal=false m4_case([$1], [CC], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac], [CXX], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac]) for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_$1_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_$1_dependencies_compiler_type=none fi ]) AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) AM_CONDITIONAL([am__fastdep$1], [ test "x$enable_dependency_tracking" != xno \ && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) ]) # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. # This macro is AC_REQUIREd in _AM_DEPENDENCIES. AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl ]) # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], [AC_ARG_ENABLE([dependency-tracking], [dnl AS_HELP_STRING( [--enable-dependency-tracking], [do not reject slow dependency extractors]) AS_HELP_STRING( [--disable-dependency-tracking], [speeds up one-time build])]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH])dnl _AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl AC_SUBST([am__nodep])dnl _AM_SUBST_NOTMAKE([am__nodep])dnl ]) # Generate code to set up dependency tracking. -*- Autoconf -*- # Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [{ # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`AS_DIRNAME("$mf")` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` AS_MKDIR_P([$dirpart/$fdir]) # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ])# _AM_OUTPUT_DEPENDENCY_COMMANDS # AM_OUTPUT_DEPENDENCY_COMMANDS # ----------------------------- # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking # is enabled. FIXME. This creates each '.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.65])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl # test to see if srcdir already configured if test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [AC_DIAGNOSE([obsolete], [$0: two- and three-arguments forms are deprecated.]) m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. m4_if( m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), [ok:ok],, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) AM_MISSING_PROG([AUTOCONF], [autoconf]) AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) AM_MISSING_PROG([AUTOHEADER], [autoheader]) AM_MISSING_PROG([MAKEINFO], [makeinfo]) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # <http://lists.gnu.org/archive/html/automake/2012-07/msg00001.html> # <http://lists.gnu.org/archive/html/automake/2012-07/msg00014.html> AC_SUBST([mkdir_p], ['$(MKDIR_P)']) # We need awk for the "check" target. The system "awk" is bad on # some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES([CC])], [m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES([CXX])], [m4_define([AC_PROG_CXX], m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], [_AM_DEPENDENCIES([OBJC])], [m4_define([AC_PROG_OBJC], m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], [_AM_DEPENDENCIES([OBJCXX])], [m4_define([AC_PROG_OBJCXX], m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl ]) AC_REQUIRE([AM_SILENT_RULES])dnl dnl The testsuite driver may need to know about EXEEXT, so add the dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi AC_SUBST([install_sh])]) # Copyright (C) 2003-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Add --enable-maintainer-mode option to configure. -*- Autoconf -*- # From Jim Meyering # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MAINTAINER_MODE([DEFAULT-MODE]) # ---------------------------------- # Control maintainer-specific portions of Makefiles. # Default is to disable them, unless 'enable' is passed literally. # For symmetry, 'disable' may be passed as well. Anyway, the user # can override the default with the --enable/--disable switch. AC_DEFUN([AM_MAINTAINER_MODE], [m4_case(m4_default([$1], [disable]), [enable], [m4_define([am_maintainer_other], [disable])], [disable], [m4_define([am_maintainer_other], [enable])], [m4_define([am_maintainer_other], [enable]) m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])]) AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles]) dnl maintainer-mode's default is 'disable' unless 'enable' is passed AC_ARG_ENABLE([maintainer-mode], [AS_HELP_STRING([--]am_maintainer_other[-maintainer-mode], am_maintainer_other[ make rules and dependencies not useful (and sometimes confusing) to the casual installer])], [USE_MAINTAINER_MODE=$enableval], [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes])) AC_MSG_RESULT([$USE_MAINTAINER_MODE]) AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes]) MAINT=$MAINTAINER_MODE_TRUE AC_SUBST([MAINT])dnl ] ) # Check to see how 'make' treats includes. -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. AC_DEFUN([AM_MAKE_INCLUDE], [am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. AC_MSG_CHECKING([for style of include used by $am_make]) am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi AC_SUBST([am__include]) AC_SUBST([am__quote]) AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) # Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_CC_C_O # -------------- # Like AC_PROG_CC_C_O, but changed for automake. AC_DEFUN([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC_C_O])dnl AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([compile])dnl # FIXME: we rely on the cache variable name because # there is no other way. set dummy $CC am_cc=`echo $[2] | sed ['s/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/']` eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o if test "$am_t" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi dnl Make sure AC_PROG_CC is never called again, or it will override our dnl setting of CC. m4_define([AC_PROG_CC], [m4_fatal([AC_PROG_CC cannot be called after AM_PROG_CC_C_O])]) ]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it is modern enough. # If it is, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= AC_MSG_WARN(['missing' script is too old or missing]) fi ]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), [1])]) # _AM_SET_OPTIONS(OPTIONS) # ------------------------ # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PATH_PYTHON([MINIMUM-VERSION], [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) # --------------------------------------------------------------------------- # Adds support for distributing Python modules and packages. To # install modules, copy them to $(pythondir), using the python_PYTHON # automake variable. To install a package with the same name as the # automake package, install to $(pkgpythondir), or use the # pkgpython_PYTHON automake variable. # # The variables $(pyexecdir) and $(pkgpyexecdir) are provided as # locations to install python extension modules (shared libraries). # Another macro is required to find the appropriate flags to compile # extension modules. # # If your package is configured with a different prefix to python, # users will have to add the install directory to the PYTHONPATH # environment variable, or create a .pth file (see the python # documentation for details). # # If the MINIMUM-VERSION argument is passed, AM_PATH_PYTHON will # cause an error if the version of python installed on the system # doesn't meet the requirement. MINIMUM-VERSION should consist of # numbers and dots only. AC_DEFUN([AM_PATH_PYTHON], [ dnl Find a Python interpreter. Python versions prior to 2.0 are not dnl supported. (2.0 was released on October 16, 2000). m4_define_default([_AM_PYTHON_INTERPRETER_LIST], [python python2 python3 python3.3 python3.2 python3.1 python3.0 python2.7 dnl python2.6 python2.5 python2.4 python2.3 python2.2 python2.1 python2.0]) AC_ARG_VAR([PYTHON], [the Python interpreter]) m4_if([$1],[],[ dnl No version check is needed. # Find any Python interpreter. if test -z "$PYTHON"; then AC_PATH_PROGS([PYTHON], _AM_PYTHON_INTERPRETER_LIST, :) fi am_display_PYTHON=python ], [ dnl A version check is needed. if test -n "$PYTHON"; then # If the user set $PYTHON, use it and don't search something else. AC_MSG_CHECKING([whether $PYTHON version is >= $1]) AM_PYTHON_CHECK_VERSION([$PYTHON], [$1], [AC_MSG_RESULT([yes])], [AC_MSG_RESULT([no]) AC_MSG_ERROR([Python interpreter is too old])]) am_display_PYTHON=$PYTHON else # Otherwise, try each interpreter until we find one that satisfies # VERSION. AC_CACHE_CHECK([for a Python interpreter with version >= $1], [am_cv_pathless_PYTHON],[ for am_cv_pathless_PYTHON in _AM_PYTHON_INTERPRETER_LIST none; do test "$am_cv_pathless_PYTHON" = none && break AM_PYTHON_CHECK_VERSION([$am_cv_pathless_PYTHON], [$1], [break]) done]) # Set $PYTHON to the absolute path of $am_cv_pathless_PYTHON. if test "$am_cv_pathless_PYTHON" = none; then PYTHON=: else AC_PATH_PROG([PYTHON], [$am_cv_pathless_PYTHON]) fi am_display_PYTHON=$am_cv_pathless_PYTHON fi ]) if test "$PYTHON" = :; then dnl Run any user-specified action, or abort. m4_default([$3], [AC_MSG_ERROR([no suitable Python interpreter found])]) else dnl Query Python for its version number. Getting [:3] seems to be dnl the best way to do this; it's what "site.py" does in the standard dnl library. AC_CACHE_CHECK([for $am_display_PYTHON version], [am_cv_python_version], [am_cv_python_version=`$PYTHON -c "import sys; sys.stdout.write(sys.version[[:3]])"`]) AC_SUBST([PYTHON_VERSION], [$am_cv_python_version]) dnl Use the values of $prefix and $exec_prefix for the corresponding dnl values of PYTHON_PREFIX and PYTHON_EXEC_PREFIX. These are made dnl distinct variables so they can be overridden if need be. However, dnl general consensus is that you shouldn't need this ability. AC_SUBST([PYTHON_PREFIX], ['${prefix}']) AC_SUBST([PYTHON_EXEC_PREFIX], ['${exec_prefix}']) dnl At times (like when building shared libraries) you may want dnl to know which OS platform Python thinks this is. AC_CACHE_CHECK([for $am_display_PYTHON platform], [am_cv_python_platform], [am_cv_python_platform=`$PYTHON -c "import sys; sys.stdout.write(sys.platform)"`]) AC_SUBST([PYTHON_PLATFORM], [$am_cv_python_platform]) # Just factor out some code duplication. am_python_setup_sysconfig="\ import sys # Prefer sysconfig over distutils.sysconfig, for better compatibility # with python 3.x. See automake bug#10227. try: import sysconfig except ImportError: can_use_sysconfig = 0 else: can_use_sysconfig = 1 # Can't use sysconfig in CPython 2.7, since it's broken in virtualenvs: # <https://github.com/pypa/virtualenv/issues/118> try: from platform import python_implementation if python_implementation() == 'CPython' and sys.version[[:3]] == '2.7': can_use_sysconfig = 0 except ImportError: pass" dnl Set up 4 directories: dnl pythondir -- where to install python scripts. This is the dnl site-packages directory, not the python standard library dnl directory like in previous automake betas. This behavior dnl is more consistent with lispdir.m4 for example. dnl Query distutils for this directory. AC_CACHE_CHECK([for $am_display_PYTHON script directory], [am_cv_python_pythondir], [if test "x$prefix" = xNONE then am_py_prefix=$ac_default_prefix else am_py_prefix=$prefix fi am_cv_python_pythondir=`$PYTHON -c " $am_python_setup_sysconfig if can_use_sysconfig: sitedir = sysconfig.get_path('purelib', vars={'base':'$am_py_prefix'}) else: from distutils import sysconfig sitedir = sysconfig.get_python_lib(0, 0, prefix='$am_py_prefix') sys.stdout.write(sitedir)"` case $am_cv_python_pythondir in $am_py_prefix*) am__strip_prefix=`echo "$am_py_prefix" | sed 's|.|.|g'` am_cv_python_pythondir=`echo "$am_cv_python_pythondir" | sed "s,^$am__strip_prefix,$PYTHON_PREFIX,"` ;; *) case $am_py_prefix in /usr|/System*) ;; *) am_cv_python_pythondir=$PYTHON_PREFIX/lib/python$PYTHON_VERSION/site-packages ;; esac ;; esac ]) AC_SUBST([pythondir], [$am_cv_python_pythondir]) dnl pkgpythondir -- $PACKAGE directory under pythondir. Was dnl PYTHON_SITE_PACKAGE in previous betas, but this naming is dnl more consistent with the rest of automake. AC_SUBST([pkgpythondir], [\${pythondir}/$PACKAGE]) dnl pyexecdir -- directory for installing python extension modules dnl (shared libraries) dnl Query distutils for this directory. AC_CACHE_CHECK([for $am_display_PYTHON extension module directory], [am_cv_python_pyexecdir], [if test "x$exec_prefix" = xNONE then am_py_exec_prefix=$am_py_prefix else am_py_exec_prefix=$exec_prefix fi am_cv_python_pyexecdir=`$PYTHON -c " $am_python_setup_sysconfig if can_use_sysconfig: sitedir = sysconfig.get_path('platlib', vars={'platbase':'$am_py_prefix'}) else: from distutils import sysconfig sitedir = sysconfig.get_python_lib(1, 0, prefix='$am_py_prefix') sys.stdout.write(sitedir)"` case $am_cv_python_pyexecdir in $am_py_exec_prefix*) am__strip_prefix=`echo "$am_py_exec_prefix" | sed 's|.|.|g'` am_cv_python_pyexecdir=`echo "$am_cv_python_pyexecdir" | sed "s,^$am__strip_prefix,$PYTHON_EXEC_PREFIX,"` ;; *) case $am_py_exec_prefix in /usr|/System*) ;; *) am_cv_python_pyexecdir=$PYTHON_EXEC_PREFIX/lib/python$PYTHON_VERSION/site-packages ;; esac ;; esac ]) AC_SUBST([pyexecdir], [$am_cv_python_pyexecdir]) dnl pkgpyexecdir -- $(pyexecdir)/$(PACKAGE) AC_SUBST([pkgpyexecdir], [\${pyexecdir}/$PACKAGE]) dnl Run any user-specified action. $2 fi ]) # AM_PYTHON_CHECK_VERSION(PROG, VERSION, [ACTION-IF-TRUE], [ACTION-IF-FALSE]) # --------------------------------------------------------------------------- # Run ACTION-IF-TRUE if the Python interpreter PROG has version >= VERSION. # Run ACTION-IF-FALSE otherwise. # This test uses sys.hexversion instead of the string equivalent (first # word of sys.version), in order to cope with versions such as 2.2c1. # This supports Python 2.0 or higher. (2.0 was released on October 16, 2000). AC_DEFUN([AM_PYTHON_CHECK_VERSION], [prog="import sys # split strings by '.' and convert to numeric. Append some zeros # because we need at least 4 digits for the hex conversion. # map returns an iterator in Python 3.0 and a list in 2.x minver = list(map(int, '$2'.split('.'))) + [[0, 0, 0]] minverhex = 0 # xrange is not present in Python 3.0 and range returns an iterator for i in list(range(0, 4)): minverhex = (minverhex << 8) + minver[[i]] sys.exit(sys.hexversion < minverhex)" AS_IF([AM_RUN_LOG([$1 -c "$prog"])], [$3], [$4])]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_RUN_LOG(COMMAND) # ------------------- # Run COMMAND, save the exit status in ac_status, and log it. # (This has been adapted from Autoconf's _AC_RUN_LOG macro.) AC_DEFUN([AM_RUN_LOG], [{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD (exit $ac_status); }]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[[\\\"\#\$\&\'\`$am_lf]]*) AC_MSG_ERROR([unsafe absolute working directory name]);; esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi if test "$[2]" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT([yes]) # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi AC_CONFIG_COMMANDS_PRE( [AC_MSG_CHECKING([that generated files are newer than configure]) if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi AC_MSG_RESULT([done])]) rm -f conftest.file ]) # Copyright (C) 2009-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SILENT_RULES([DEFAULT]) # -------------------------- # Enable less verbose build rules; with the default set to DEFAULT # ("yes" being less verbose, "no" or empty being verbose). AC_DEFUN([AM_SILENT_RULES], [AC_ARG_ENABLE([silent-rules], [dnl AS_HELP_STRING( [--enable-silent-rules], [less verbose build output (undo: "make V=1")]) AS_HELP_STRING( [--disable-silent-rules], [verbose build output (undo: "make V=0")])dnl ]) case $enable_silent_rules in @%:@ ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; esac dnl dnl A few 'make' implementations (e.g., NonStop OS and NextStep) dnl do not support nested variable expansions. dnl See automake bug#9928 and bug#10237. am_make=${MAKE-make} AC_CACHE_CHECK([whether $am_make supports nested variables], [am_cv_make_support_nested_variables], [if AS_ECHO([['TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi]) if test $am_cv_make_support_nested_variables = yes; then dnl Using '$V' instead of '$(V)' breaks IRIX make. AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AC_SUBST([AM_V])dnl AM_SUBST_NOTMAKE([AM_V])dnl AC_SUBST([AM_DEFAULT_V])dnl AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl AC_SUBST([AM_DEFAULT_VERBOSITY])dnl AM_BACKSLASH='\' AC_SUBST([AM_BACKSLASH])dnl _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl ]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor 'install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in "make install-strip", and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Copyright (C) 2006-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) # AM_SUBST_NOTMAKE(VARIABLE) # -------------------------- # Public sister of _AM_SUBST_NOTMAKE. AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of 'v7', 'ustar', or 'pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar # AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AC_SUBST([AMTAR], ['$${TAR-tar}']) # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' m4_if([$1], [v7], [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], [m4_case([$1], [ustar], [# The POSIX 1988 'ustar' format is defined with fixed-size fields. # There is notably a 21 bits limit for the UID and the GID. In fact, # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 # and bug#13588). am_max_uid=2097151 # 2^21 - 1 am_max_gid=$am_max_uid # The $UID and $GID variables are not portable, so we need to resort # to the POSIX-mandated id(1) utility. Errors in the 'id' calls # below are definitely unexpected, so allow the users to see them # (that is, avoid stderr redirection). am_uid=`id -u || echo unknown` am_gid=`id -g || echo unknown` AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) if test $am_uid -le $am_max_uid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) if test $am_gid -le $am_max_gid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi], [pax], [], [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_$1-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar <conftest.tar]) AM_RUN_LOG([cat conftest.dir/file]) grep GrepMe conftest.dir/file >/dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR m4_include([m4macros/attributes.m4]) m4_include([m4macros/glibtests.m4]) m4_include([m4macros/gtk-doc.m4]) m4_include([m4macros/libtool.m4]) m4_include([m4macros/ltoptions.m4]) m4_include([m4macros/ltsugar.m4]) m4_include([m4macros/ltversion.m4]) m4_include([m4macros/lt~obsolete.m4]) m4_include([acinclude.m4])
{ "pile_set_name": "Github" }
local combat = {} for i = 0, 15 do combat[i] = Combat() combat[i]:setParameter(COMBAT_PARAM_EFFECT, CONST_ME_HITAREA) local condition = Condition(CONDITION_ATTRIBUTES) condition:setParameter(CONDITION_PARAM_TICKS, 10000) condition:setParameter(CONDITION_PARAM_SKILL_SHIELDPERCENT, i) arr = { {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, {0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0}, {0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0}, {0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0}, {0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0}, {0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0}, {0, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 0}, {0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0}, {0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0}, {0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0}, {0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0}, {0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0}, {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, } local area = createCombatArea(arr) combat[i]:setArea(area) combat[i]:addCondition(condition) end function onCastSpell(creature, var) return combat[math.random(0, 15)]:execute(creature, var) end
{ "pile_set_name": "Github" }
package com.airbnb.aerosolve.training import java.io.BufferedWriter import java.io.OutputStreamWriter import java.util import com.airbnb.aerosolve.core.{ModelRecord, ModelHeader, FeatureVector, Example} import com.airbnb.aerosolve.core.models.LinearModel import com.airbnb.aerosolve.core.util.Util import com.typesafe.config.Config import org.slf4j.{LoggerFactory, Logger} import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.rdd.RDD import scala.collection.mutable.HashMap import scala.collection.mutable.HashSet import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.Buffer import scala.collection.JavaConversions._ import scala.collection.JavaConverters._ import scala.util.Random import scala.math.abs import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.fs.Path import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path object FeatureSelection { private final val log: Logger = LoggerFactory.getLogger("FeatureSelection") val allKey : (String, String) = ("$ALL", "$POS") // Given a RDD compute the pointwise mutual information between // the positive label and the discrete features. def pointwiseMutualInformation(examples : RDD[Example], config : Config, key : String, rankKey : String, posThreshold : Double, minPosCount : Double, newCrosses : Boolean) : RDD[((String, String), Double)] = { val pointwise = LinearRankerUtils.makePointwise(examples, config, key, rankKey) val features = pointwise .mapPartitions(part => { // The tuple2 is var, var | positive val output = scala.collection.mutable.HashMap[(String, String), (Double, Double)]() part.foreach(example =>{ val featureVector = example.example.get(0) val isPos = if (featureVector.floatFeatures.get(rankKey).asScala.head._2 > posThreshold) 1.0 else 0.0 val all : (Double, Double) = output.getOrElse(allKey, (0.0, 0.0)) output.put(allKey, (all._1 + 1.0, all._2 + 1.0 * isPos)) val features : Array[(String, String)] = LinearRankerUtils.getFeatures(featureVector) if (newCrosses) { for (i <- features) { for (j <- features) { if (i._1 < j._1) { val key = ("%s<NEW>%s".format(i._1, j._1), "%s<NEW>%s".format(i._2, j._2)) val x = output.getOrElse(key, (0.0, 0.0)) output.put(key, (x._1 + 1.0, x._2 + 1.0 * isPos)) } } } } for (feature <- features) { val x = output.getOrElse(feature, (0.0, 0.0)) output.put(feature, (x._1 + 1.0, x._2 + 1.0 * isPos)) } }) output.iterator }) .reduceByKey((a, b) => (a._1 + b._1, a._2 + b._2)) .filter(x => x._2._2 >= minPosCount) val allCount = features.filter(x => x._1.equals(allKey)).take(1).head features.map(x => { val prob = x._2._1 / allCount._2._1 val probPos = x._2._2 / allCount._2._2 (x._1, math.log(probPos / prob) / math.log(2.0)) }) } // Returns the maximum entropy per family def maxEntropy(input : RDD[((String, String), Double)]) : RDD[((String, String), Double)] = { input .map(x => (x._1._1, (x._1._2, x._2))) .reduceByKey((a, b) => if (math.abs(a._2) > math.abs(b._2)) a else b) .map(x => ((x._1, x._2._1), x._2._2)) } }
{ "pile_set_name": "Github" }
{ /*************************************************************************** encloseselectiondlg.pas ----------------------- ***************************************************************************/ *************************************************************************** * * * This source is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * * This code is distributed in the hope that it will be useful, but * * WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * * General Public License for more details. * * * * A copy of the GNU General Public License is available on the World * * Wide Web at <http://www.gnu.org/copyleft/gpl.html>. You can also * * obtain it by writing to the Free Software Foundation, * * Inc., 51 Franklin Street - Fifth Floor, Boston, MA 02110-1335, USA. * * * *************************************************************************** Author: Mattias Gaertner Abstract: Dialog to setup parameters of the enclose selection function } unit EncloseSelectionDlg; {$mode objfpc}{$H+} interface uses Classes, SysUtils, // LCL Forms, Controls, Graphics, Dialogs, ExtCtrls, ButtonPanel, // LazUtils LazUTF8, LazTracer, LazStringUtils, // CodeTools BasicCodeTools, CodeToolManager, SourceChanger, // IDE LazarusIDEStrConsts; type TEncloseSelectionType = ( estTryFinally, estTryExcept, estBeginEnd, estForBeginEnd, estWhileDoBeginEnd, estRepeatUntil, estWith, estPascalComment, estRegionArea ); { TEncloseSelectionDialog } TEncloseSelectionDialog = class(TForm) ButtonPanel: TButtonPanel; TypeRadiogroup: TRADIOGROUP; procedure EncloseSelectionDialogCREATE(Sender: TObject); private public function GetEncloseType: TEncloseSelectionType; end; function ShowEncloseSelectionDialog(out TheType: TEncloseSelectionType ): TModalResult; function EncloseSelectionTypeDescription(TheType: TEncloseSelectionType ): string; procedure GetEncloseSelectionParams(TheType: TEncloseSelectionType; out Template: string); procedure EncloseTextSelection(const Template: string; Source: TStrings; SelectionStart, SelectionEnd: TPoint; out NewSelection: string; out NewCursor: TPoint); implementation {$R *.lfm} function EncloseSelectionTypeDescription(TheType: TEncloseSelectionType): string; begin Result:=''; case TheType of estTryFinally: Result:='Try..Finally'; estTryExcept: Result:='Try..Except'; estBeginEnd: Result:='Begin..End'; estForBeginEnd: Result:='For | do begin..end'; estWhileDoBeginEnd: Result:='While | do begin..end'; estRepeatUntil: Result:='Repeat..Until |'; estWith: Result:='With | do begin..end'; estPascalComment: Result:='{..}'; estRegionArea: Result:='{$REGION ''|''}..{$ENDREGION}'; else RaiseGDBException('EncloseSelectionTypeDescription'); end; end; function ShowEncloseSelectionDialog(out TheType: TEncloseSelectionType ): TModalResult; var TheDialog: TEncloseSelectionDialog; begin TheType:=estBeginEnd; TheDialog:=TEncloseSelectionDialog.Create(nil); Result:=TheDialog.ShowModal; if Result=mrOk then TheType:=TheDialog.GetEncloseType; TheDialog.Free; end; procedure GetEncloseSelectionParams(TheType: TEncloseSelectionType; out Template: string); begin case TheType of estTryFinally: Template:='try'+LineEnding +' <selection>'+LineEnding +'finally'+LineEnding +' |'+LineEnding +'end;'+LineEnding; estTryExcept: Template:='try'+LineEnding +' <selection>'+LineEnding +'except'+LineEnding +' |'+LineEnding +'end;'+LineEnding; estBeginEnd: Template:='begin'+LineEnding +' |<selection>'+LineEnding +'end;'+LineEnding; estForBeginEnd: Template:='for | do begin'+LineEnding +' <selection>'+LineEnding +'end;'+LineEnding; estWhileDoBeginEnd: Template:='while | do begin'+LineEnding +' <selection>'+LineEnding +'end;'+LineEnding; estRepeatUntil: Template:='repeat'+LineEnding +' <selection>'+LineEnding +'until |;'+LineEnding; estWith: Template:='with | do begin'+LineEnding +' <selection>'+LineEnding +'end;'+LineEnding; estPascalComment: Template:='{'+LineEnding +' |<selection>'+LineEnding +'}'+LineEnding; estRegionArea: Template:='{$REGION ''|''}'+LineEnding +' <selection>'+LineEnding +'{$ENDREGION}'+LineEnding; else RaiseGDBException('GetEnclosedSelectionParams'); end; end; procedure EncloseTextSelection(const Template: string; Source: TStrings; SelectionStart, SelectionEnd: TPoint; out NewSelection: string; out NewCursor: TPoint); var TemplateLen: Integer; TemplatePos: Integer; LastWrittenTemplatePos: Integer; NewSelect: TMemoryStream; Y: Integer; X: Integer; OldSelectionIndent: Integer; TemplateIndent: Integer; CutLastLineBreak: Boolean; CutPos: Integer; procedure AddBeautified(const s: string); var NewStr: String; LengthOfLastLine: integer; LineEndCnt: Integer; CurIndent: Integer; FirstLineIndent: Integer; EndPos: Integer; begin if s='' then exit; NewStr:=s; CurIndent:=OldSelectionIndent; if NewSelect.Position=0 then begin FirstLineIndent:=OldSelectionIndent-SelectionStart.X+1; if FirstLineIndent<0 then FirstLineIndent:=0; NewStr:=GetIndentStr(FirstLineIndent)+NewStr; dec(CurIndent,FirstLineIndent); if CurIndent<0 then CurIndent:=0; end; //debugln('AddBeautified A X=',X,' Y=',Y,' CurIndent=',CurIndent,' NewStr="',NewStr,'"'); dec(CurIndent,GetLineIndent(NewStr,1)); if CurIndent<0 then CurIndent:=0; NewStr:=CodeToolBoss.SourceChangeCache.BeautifyCodeOptions.BeautifyStatement( NewStr,CurIndent, [bcfIndentExistingLineBreaks,bcfDoNotIndentFirstLine]); LineEndCnt:=LineEndCount(NewStr,LengthOfLastLine); if (TemplatePos>TemplateLen) then begin // cut indent at end of template if LineEndCnt>0 then begin EndPos:=length(NewStr); while (EndPos>=1) and (NewStr[EndPos]=' ') do dec(EndPos); NewStr:=copy(NewStr,1,length(NewStr)-CurIndent); LineEndCnt:=LineEndCount(NewStr,LengthOfLastLine); end; end; inc(Y,LineEndCnt); if LineEndCnt=0 then inc(X,LengthOfLastLine) else X:=LengthOfLastLine+1; if (LineEndCnt>0) or (NewSelect.Position=0) then TemplateIndent:=GetLineIndent(NewStr,length(NewStr)+1); //debugln('AddBeautified B X=',X,' Y=',Y,' TemplateIndent=',TemplateIndent,' LengthOfLastLine=',LengthOfLastLine,' NewStr="',NewSTr,'"'); NewSelect.Write(NewStr[1],length(NewStr)); end; procedure FlushTemplate; var FromPos: Integer; ToPos: Integer; begin FromPos:=LastWrittenTemplatePos+1; ToPos:=TemplatePos-1; if ToPos>TemplateLen then ToPos:=TemplateLen; if FromPos<=ToPos then AddBeautified(copy(Template,FromPos,ToPos-FromPos+1)); LastWrittenTemplatePos:=ToPos; end; procedure CalculateCursorPos; begin NewCursor:=Point(X,Y); end; procedure InsertSelection; var CurY: Integer; CurLine: string; IndentStr: String; MinX: Integer; MaxX: Integer; l: Integer; begin IndentStr:=GetIndentStr(TemplateIndent-OldSelectionIndent); for CurY:=SelectionStart.Y to SelectionEnd.Y do begin CurLine:=Source[CurY-1]; //debugln(['InsertSelection CurY=',CurY,' CurLine="',dbgstr(CurLine),'"']); MinX:=1; MaxX:=length(CurLine)+1; if (CurY=SelectionStart.Y) then begin MinX:=SelectionStart.X; if MinX<=OldSelectionIndent then MinX:=OldSelectionIndent+1; if MinX>MaxX then MinX:=MaxX; end; if (CurY=SelectionEnd.Y) and (MaxX>SelectionEnd.X) then MaxX:=SelectionEnd.X; //debugln(['InsertSelection CurY=',CurY,' Range=',MinX,'-',MaxX,' Indent="',length(IndentStr),'" "',copy(CurLine,MinX,MaxX-MinX),'"']); X:=1; // write indent if (IndentStr<>'') and (CurY<>SelectionStart.Y) then begin NewSelect.Write(IndentStr[1],length(IndentStr)); inc(X,length(IndentStr)); end; // write line l:=MaxX-MinX; if l>0 then begin NewSelect.Write(CurLine[MinX],l); inc(X,l); end; // write line break and adjust cursor if CurY<SelectionEnd.Y then begin NewSelect.Write(EndOfLine[1],length(EndOfLine)); inc(Y); X:=1; end; end; end; procedure ParseMacro; var MacroNameStart: Integer; MacroNameEnd: Integer; function MacroNameIs(const Name: string): boolean; begin Result:=CompareText(@Template[MacroNameStart],MacroNameEnd-MacroNameStart, @Name[1],length(Name),false)=0; end; begin FlushTemplate; inc(TemplatePos); MacroNameStart:=TemplatePos; while (TemplatePos<=TemplateLen) and (Template[TemplatePos] in ['a'..'z','A'..'Z','_','0'..'9']) do inc(TemplatePos); MacroNameEnd:=TemplatePos; if (TemplatePos<=TemplateLen) and (Template[TemplatePos]='>') then begin LastWrittenTemplatePos:=TemplatePos; inc(TemplatePos); if MacroNameIs('Selection') then begin InsertSelection; end; end; end; procedure GetOldSelectionIndent; var CurY: Integer; CurLine: string; CurIndent: Integer; begin OldSelectionIndent:=0; CurY:=SelectionStart.Y; while CurY<Source.Count do begin CurLine:=Source[CurY-1]; CurIndent:=GetLineIndent(CurLine,1); if CurIndent<length(CurLine) then begin OldSelectionIndent:=CurIndent; break; end; inc(CurY); end; end; begin //debugln(['EncloseTextSelection A ',SelectionStart.X,',',SelectionStart.Y,'-',SelectionEnd.X,',',SelectionEnd.Y,' indent=',Indent,' Template="',Template,'"']); NewSelection:=''; NewCursor:=Point(0,0); CutLastLineBreak:=true; if (SelectionEnd.X=1) and (SelectionEnd.Y>SelectionStart.Y) then begin CutLastLineBreak:=false; dec(SelectionEnd.Y); if SelectionEnd.Y<Source.Count then SelectionEnd.X:=length(Source[SelectionEnd.Y-1])+1; end; NewSelect:=TMemoryStream.Create; NewCursor:=SelectionStart; X:=NewCursor.X; Y:=NewCursor.Y; GetOldSelectionIndent; TemplateIndent:=OldSelectionIndent; try TemplateLen:=length(Template); TemplatePos:=1; LastWrittenTemplatePos:=TemplatePos-1; while TemplatePos<=TemplateLen do begin case Template[TemplatePos] of '\': begin FlushTemplate; LastWrittenTemplatePos:=TemplatePos; inc(TemplatePos,2); end; '|': begin FlushTemplate; CalculateCursorPos; LastWrittenTemplatePos:=TemplatePos; inc(TemplatePos); end; '<': ParseMacro; else inc(TemplatePos); end; end; FlushTemplate; finally SetLength(NewSelection,NewSelect.Size); if NewSelection<>'' then begin NewSelect.Position:=0; NewSelect.Read(NewSelection[1],length(NewSelection)); //debugln(['EncloseTextSelection CutLastLineBreak=',CutLastLineBreak,' NewSelection="',NewSelection,'"']); if CutLastLineBreak then begin CutPos:=length(NewSelection); if NewSelection[CutPos] in [#10,#13] then begin dec(CutPos); if (CutPos>=1) and (NewSelection[CutPos] in [#10,#13]) and (NewSelection[CutPos]<>NewSelection[CutPos+1]) then begin dec(CutPos); end; NewSelection:=copy(NewSelection,1,CutPos); end; end; end; NewSelect.Free; end; end; { TEncloseSelectionDialog } procedure TEncloseSelectionDialog.EncloseSelectionDialogCREATE(Sender: TObject); var t: TEncloseSelectionType; begin Caption:=lisKMEncloseSelection; TypeRadiogroup.Caption:=lisChooseStructureToEncloseSelection; with TypeRadiogroup.Items do begin BeginUpdate; for t:=Low(TEncloseSelectionType) to High(TEncloseSelectionType) do Add(EncloseSelectionTypeDescription(t)); EndUpdate; end; TypeRadiogroup.ItemIndex:=0; end; function TEncloseSelectionDialog.GetEncloseType: TEncloseSelectionType; var i: Integer; begin i:=TypeRadiogroup.ItemIndex; for Result:=Low(TEncloseSelectionType) to High(TEncloseSelectionType) do if UTF8CompareText(TypeRadiogroup.Items[i], EncloseSelectionTypeDescription(Result))=0 then exit; RaiseGDBException('TEncloseSelectionDialog.GetEncloseType'); end; end.
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8" /> <title>Kepler's dilemma</title> <link rel="stylesheet" href="../css/main.css"/> <style> img { position: absolute; } img[alt="moon"] { top: 0; left: 0; } img[alt="earth"] { top: 128px; left: 128px; } .box-animation { position: relative; } button { display: block; margin: auto; } </style> </head> <body> <p>The output is partially printed on the console</p> <button id="button-animate">Animate</button> <div class="box-animation"> <img src="images/moon.png" alt="moon"/> <img src="images/earth.png" alt="earth"/> </div> <script src="../js/jquery-1.11.3.min.js"></script> <script> function formatDate(date) { return (date.getHours() < 10 ? '0' : '') + date.getHours() + ':' + (date.getMinutes() < 10 ? '0' : '') + date.getMinutes() + ':' + (date.getSeconds() < 10 ? '0' : '') + date.getSeconds() + '.' + (date.getMilliseconds() < 10 ? '00' : (date.getMilliseconds() < 100 ? '0' : '')) + date.getMilliseconds(); } $('#button-animate').click(function() { var $moonImage = $('img[alt="moon"]'); console.log('At ' + formatDate(new Date()) + ' 1'); $moonImage.animate({left: '+=256'}, 2500); console.log('At ' + formatDate(new Date()) + ' 2'); $moonImage.animate({top: '+=256'}, 2500); console.log('At ' + formatDate(new Date()) + ' 3'); $moonImage.animate({left: '-=256'}, 2500); console.log('At ' + formatDate(new Date()) + ' 4'); $moonImage.animate({top: '-=256'}, 2500); console.log('At ' + formatDate(new Date()) + ' 5'); }); /* callback alternative var $moonImage = $('img[alt="moon"]'); $('#button-animate').click(function() { $moonImage.animate({left: '+=256'}, 2500, function(){ $moonImage.animate({top: '+=256'}, 2500, function(){ $moonImage.animate({left: '-=256'}, 2500, function(){ $moonImage.animate({top: '-=256'}, 2500); }); }); }); }); */ </script> </body> </html>
{ "pile_set_name": "Github" }
2013-12-26 David Anderson * pro_section.c, pro_types.c, pro_vars.c, pro_weaks.c, pro_forms.c, pro_frame.c,pro_funcs.c,pro_init.c,pro_line.c,pro_macinfo.c, pro_pubnames.c,pro_reloc.c,pro_reloc_stream.c,pro_reloc_symbolic.c, pro_expr.h,pro_macinfo.h,pro_reloc.h,pro_reloc_stream.h, pro_reloc_symbolic.h,pro_section.h,pro_types.h,pro_util.h: Remove trailing whitespace from the code. 2013-11-24 David Anderson * pro_expr.c(dwarf_add_expr_gen): Moved use of a pointer to be after where the pointer tested for NULL. * pro_forms.c(local_add_AT_address): Moved use of a pointer to be after where the pointer tested for NULL. Both bugs found by the STACK code analysis tool from MIT. 2013-10-31 David Anderson * dwarf_error.c: Added DW_DLE_AT_FIXUP_NULL and DW_DLE_AT_FIXUP_DUP. * libdwarf.h.in: Added those two defines plus dwarf_add_AT_reference_b() and dwarf_fixup_AT_reference_die() for CLASS REFERENCE attribute handling. Also, dwarf_add_AT_with_ref_sig8() for refsig8 references. * libdwarf2p.1.mm: version 1.34 20 October. Document new interfaces. Fixed typo in dwarf_add_AT_targ_address_b() documentation. * libdwarf2p.1.pdf: Regenerated. * pro_die.c. Added commentary. * pro_forms.c: Added reference support with dwarf_add_AT_reference_b() and refactoring to avoid duplicating code. Added dwarf_add_AT_with_ref_sig8() * pro_section.c: now only adds DW_AT_sibling automatically if it was not present already. Still only adds it in the same automatically-selected places. Some renaming of local variables for clarity around line 1743. One line reformatted, it was looking very odd, line 2217. 2013-10-17 David Anderson * dwarf_error.c: Add DW_DLE_DEBUGPUBTYPES_ERROR string. * libdwarf.h.in: Add DW_DLE_DEBUGPUBTYPES_ERROR and fix DW_DLE_LAST. Add dwarf_add_pubtype() interface. * libdwarf2.1.mm: References to Dwarf_Pubtype changed to Dwarf_Type, using the same interface type as for .debug_types (an SGI extension). Now at rev 2.15. Clarified that pubnames and pubtypes only apply to .debug_info. * libdwarf2.1.pdf: Regenerated. * pro_opaque.h: Added DEBUG_PUBTYPES and updated NUM_DEBUG_SECTIONS. * pro_pubnames.c: Added dwarf_add_pubtype() support. * pro_section.c: Added support for pubtypes. 2013-10-14 David Anderson * libdwarf.h.in: Dwarf_Callback_Func_c, Dwarf_Callback_Func_b and Dwarf_Callback_Func name argument was lacking a const qualifier leading to compiler warnings. Now has const qualifier. 2013-08-15 David Anderson * dwarf_alloc.c: Now uses dwarf_printf instead of printf. And frees the dwarf_printf dp_buffer if appropriate. * dwarf_line.c, dwarf_print_lines.c: Now use dwarf_printf instead of printf. * dwarf_line.c: Update copyright year. * dwarf_opaque.h: Add de_printf_callback to Dwarf_Debug struct. Add dwarf_printf to function prototypes. * dwarf_util.c: Implement dwarf_printf. * libdwarf.h.in: Now specifies struct Dwarf_Printf_Callback_Info_s and dwarf_register_printf_callback(); * libdwarf2.1.mm: Version 2.14. Fixed three tables with too-long lines. Documented the new printf callback registration functions. * libdwarf2.1.pdf: Regenerated. 2013-08-13 David Anderson * dwarf_init_finish.c: * dwarf_query.c: Added dwarf_highpc_b() so consumers can deal with DW_AT_high_pc with form constant (a feature of DWARF4). * libdwarf.h.in: Added dwarf_highpc_b(), dwarf_add_AT_any_value_sleb(), dwarf_add_AT_any_value_uleb(). to give producers more flexibility. Moved the Dwarf_Form_Class declaration closer to the head of the file so the new function prototypes can reference it. * libdwarf2.1.mm: Version 2.13. Added dwarf_highpc_b(). * libdwarf2.1.pdf: Regenerated. * libdwarf2p.1.mm: Version 1.33. Documents dwarf_add_AT_any_value_sleb() and dwarf_add_AT_any_value_uleb(). Fixes a one-character typo that was truncating the document severely. * libdwarf2p.1.pdf: Regenerated. * pro_forms.c: Implements dwarf_add_AT_any_value_sleb() and dwarf_add_AT_any_value_uleb(). 2013-08-09 David Anderson * dwarf_init_finish.c: Spelling, change _dwarf_assume_string_bad-> _dwarf_assume_string_in_bound to reflect the actual meaning. * dwarf_alloc.c: Change a debug message for DWARF_SIMPLE_MALLOC to write to stdout, not stderr. All non-fatal messages now print to stdout. * libdwarf2.1.mm: Now version 2.12. Corrected the description of dwarf_set_stringcheck(). * libdwarf2.1.pdf: Regenerated. 2013-08-08 David Anderson * dwarf_form.c: When a FORM_string attribute is in debug_types it is now correctly dealt with. 2013-08-07 David Anderson * dwarf_init_finish.c: Changed a nonfatal error to write it to stdout instead of stderr to make it easier to understand the context of the error (which involves the number of debug sections, not something anyone should ever see). 2013-07-28 David Anderson * dwarf_abbrev.c, dwarf_frame.c, dwarf_frame3.c,dwarf_line.c, pro_reloc_stream.c, pro_section.c: Rename local variables to avoid compiler warnings about local variables reusing outer scope (including global) names. * dwarf_elf_access.c: Add AARCH64 support. * dwarf_reloc_arm.h: Add AARCH64 support. * libdwarf2.1.mm: dwarf_highpc() documentation admits it does not work properly for DWARF4 in all cases. dwarf_highpc() needs to be fixed. * libdwarf2.1.pdf: Regenerated 2013-06-08 David Anderson * libdwarf2.1.mm: Improved the documentation of dwarf_highpc() function to suggest how to interpret the value (pc or offset). * libdwarf2.1.pdf: Regenerated. 2013-03-08 David Anderson * dwarf_elf_access.c: Now we handle K10M and L10M as having relocations named as in x86, x86_64. 2013-02-07 David Anderson * dwarf_elf_access.c: FreeBSD did not have R_PPC64_ADDR32 as a relocation name, so changed one name to be the name usable in Ubuntu and FreeBSD, R_PPC_ADDR32. 2013-02-01 David Anderson * libdwarf2.1.mm: Improved the documentation of the badly-named functions dwarf_whatform() and dwarf_whatform_direct(). * libdwarf2.1.pdf: Regenerated. * libdwarf.h.in: The arguments to dwarf_whatform[_direct]() are renamed for clarity. They are commented out, so this is just improving documentation. 2013-01-20 David Anderson * libdwarf.h: Removed. * libdwarf.h.in: Added. Identical content to standard libdwarf. * configure.in: Generates libdwarf.h. Notices if struct _Elf in libelf.h and generates libdwarf.h appropriately. * configure: Regenerated * README now mentions the libdwarf.h generation at configure time. 2013-01-28 David Anderson * dwarf_frame.c: Fix a macro so it does not test unsigned numbers as being less than zero. Fixes a compiler warning. * malloc_check.c: Add void as the parameter list of a parameter-less function. Fixes a compiler warning. 2013-01-26 David Anderson * libdwarf.h: Remove the lc_number3 field to restore interface binary compatibility with earlier releases. * dwarf_loc.c, dwarf_loc.h: No longer uses the removed lc_number3 or lr_number3 fields. 2013-01-25 David Anderson * dwarf_alloc.c: Changed some local names to avoid compiler warnings about redefining names in inner contexts. * dwarf_frame2.c: Changed some local names to avoid compiler warnings about redefining names in inner contexts. Added const to some declarations to avoid warnings about const being cast away. * dwarf_init_finish.c: Added const to some declarations to avoid warnings about const being cast away. Changed some local names to avoid compiler warnings about redefining names in inner contexts. * dwarf_line.c, dwarf_print_lines.c: Added const to some declarations to avoid warnings about const being cast away. Added static to function definition to reflect its use and to avoid warning about lack of a visible prototype. * gennames.c: Using C89/90 void to declare functions with no arguments to avoid compiler warnings about using old style declarations. Changed some local names to avoid compiler warnings about redefining names in inner contexts. Added const to some declarations to avoid warnings about const being cast away. * pro_incl.h: WRITE_UNALIGNED macros now cast with const to avoid warnings about const being cast away. * pro_macinfo.c,pro_section.c,pro_section.h: Added const to some declarations to avoid warnings about const being cast away. 2013-01-25 David Anderson * common.c: Add 'const' on string declarations to avoid compiler warnings. * dwarf_loc.h: Add the new field lc_number3 to handle DW_OP_GNU_const_type properly. * dwarf_loc.c: Handle DW_OP_GNU_const_type properly. * libdwarf.h: Add lr_number3 so we can handle DW_OP_GNU_const_type properly. This destroys binary compatibility. Not a good idea. See Jan 26, above. 2013-01-25 David Anderson * dwarf_loc.c: Use cc_length_size, not cc_length to get the offset size. Nasty bug. * dwarf_opaque.h: Change commentary to clarify the cc_length field content to hopefully avoid making that mistake again. 2013-01-15 David Anderson * dwarf.h: defines for some added DW_OP_GNU operators * dwarf_loc.c: Added support for some DW_OP_GNU operators. * config.h.in, configure.in: Define and set HAVE_STRUCT_UNDERSCORE_ELF as FreeBSD names struct _Elf instead of struct Elf. * configure: Regenerated. * dwarf_alloc.c: Initialize a local var at declaration, add const to array of strings type declaration. * dwarf_alloc.h: Change ah_structs_per_chunk to Dwarf_Sword to eliminate a compiler warning. * dwarf_arange.c: Change a couple Dwarf_Unsigned to Dwarf_Signed to eliminate a compiler warning. * dwarf_die_deliv.c: Change local to Dwarf_Unsigned to eliminate signed compare warning (and actually signed was wrong anyway!). * dwarf_error.c: Fix comparison to eliminate signed/unsigned compare warning. * dwarf_form.c: Index variable changed to unsigned to eliminate signed/unsigned comparison warning. * dwarf_frame.c: Local variables changed to unsigned to eliminate signed/unsigned comparison warnings. * dwarf_frame3.c: Local variables changed to unsigned to eliminate signed/unsigned comparison warnings. * dwarf_init_finish.c: Local variable changed to unsigned to eliminate signed/unsigned comparison warning. * dwarf_leb.c: Local variable changed to unsigned to eliminate signed/unsigned comparison warning. * dwarf_line.c: Changed index variable to unsigned to eliminate signed/unsigned comparison warning. * dwarf_loc.c: Delete two unused local variables. * dwarf_loc.c: Delete two unused local variables. * dwarf_macro.c: Fixed comparisons eliminate signed/unsigned comparison warning. * dwarf_opaque.h: Changed cc_abbrev_offset and de_fde_count to unsigned (which they should have been all along) to eliminate signed/unsigned comparison warnings. * dwarf_print_lines.c: Local variable changed to unsigned to eliminate signed/unsigned comparison warning. * dwarf_util.c: Add include of pro_encode_nm.h to avoid a compiler warning. Changed index variable to unsigned to eliminate signed/unsigned comparison warning. * libdwarf.h: Add ability to handle struct _Elf. * pro_alloc.c: Move an include of malloc.h as it is not needed if stdlib.h is present. * pro_forms.c: Changed index variable to unsigned to eliminate signed/unsigned comparison warning. * pro_util.h: Add specific allowance for FreeBSD include and relocation.
{ "pile_set_name": "Github" }
# Server-Side Request Forgery (SSRF) vulnerable Lab This repository contain PHP codes which are vulnerable to Server-Side Request Forgery (SSRF) attack. I would like to say Thank You to @albinowax, AKReddy, Vivek Sir (For being great personalities who always supported me), Andrew Sir - @vanderaj (for his encouraging words) and those researchers who contirubuted in DNS rebinding attack based research ![](https://github.com/incredibleindishell/SSRF_Vulnerable_lab/blob/master/images/ssrf_lab.gif?raw=true) Vulnerable codes are meant to demonstrate SSRF for below mentioned 5 scenarios: <b> 1. Application code that fetches and display the content of the specified file</b> In programming languages, there are functions which can fetch the contents of locally saved file. These functions may be capable of fetching the content from remote URLs as well local files (e.g file_get_contents in PHP). This functionality can be abused if application is not prepending any string to the user supplied data to fetch the content from a file i.e application is not prepeding and directory name or path to the user supplied data. In this case, these data fetching function can process the schemes like "http://" or "file://". When user specifies the remote URL in place of file name like "http://localhost", the data fetching function extract the data from the specified URL. In case if application is prepending any data string (for example any directory name) to user data, "http://" or "file://" scheme won't work and exploitation of SSRF vulnerability is not possible. <a href="https://github.com/incredibleindishell/SSRF_Vulnerable_lab/tree/master/file_content_fetch">Guide to Exploitation of Scenario 1</a> <b> 2. Application provides interface to connect to Remote Host</b> Web application has interfaces that allow an user to specify the any IP with any port. Here the application has functionality which tries to connect to service like "MySQL", "LDAP" etc. Application expects user to specify the remote server hostname/IP, username and password in input fields. Application then tries to connect to the remote server over specified port. Here in this scenario, application tries to communicate to remote service listening on specific port. When vulnerable code has functionality to connect to server like MySQL and user specified the SMB port, vulnerable application will try to communicate to SMB servie using MySQL server service packets. Even though, the port is open, we are not able to communicate to the service due to difference in way of communication. This behaviour can be exploited to perform internal network scanning not just to enumerate IPs but Ports as well on those live IPs. <a href="https://github.com/incredibleindishell/SSRF_Vulnerable_lab/tree/master/Remote_host_connect_interface">Guide to Exploitation of Scenario 2</a> <b> 3. Application with File Download Functionality</b> In this case, an attacker can exploit this functionality to perform IP scanning inside the network where application server is hosted. The function which performs the task of downloading file from server, can download file not just from local server but also from SMB path as well. This is something which can help an attacker to figure out the Windows based machines in the network. Web application hosted on Windows OS will process the SMB path as well if file download functionality is processing user input without prepending any data. <a href="https://github.com/incredibleindishell/SSRF_Vulnerable_lab/tree/master/File_Download">Guide to Exploitation of Scenario 3</a> <b> 4. Bypassing IP blacklisting using DNS Based Spoofing</b> The script has funcionality which allow user to fetch data from remote URL. User need to specify the remote URL with any IP or domain name. The script perform check if user has specified the input as "localhost", "Internal IPs" or "Reserved IPs". If domain/IP specified by user is blacklisted, script will not fetch the content and stop processing. <a href="https://github.com/incredibleindishell/SSRF_Vulnerable_lab/tree/master/DNS-Spoofing-based-Bypass">Guide to Exploitation of Scenario 4</a> <b> 5. Bypassing IP blacklisting using DNS Rebinding Technique</b> Application has implemented black listing of not just internal and private range IPs but also rsolve the user supplied domain to its IP and again perform check if resolved is black listed or not. In this case, DNS based spoofing trick will also not work to access the content hosted on internal/Reserved IP. Application code perform domain resolution to its IP and again perform black listed IP check for the resolved IP. <a href="https://github.com/incredibleindishell/SSRF_Vulnerable_lab/tree/master/DNS%20Rebinding%20based%20Bypass">Guide to Exploitation of Scenario 5</a> Ofcourse,<br><b>--==[[ With Love From IndiShell ]]==--</b> <img src="https://web.archive.org/web/20140704135452/freesmileys.org/smileys/smiley-flag010.gif"> --==[[ Greetz To ]]==-- Guru ji zero, Code breaker ICA, root_devil, google_warrior, INX_r0ot, Darkwolf indishell, Baba, Silent poison India, Magnum sniper, ethicalnoob Indishell, Reborn India, L0rd Crus4d3r, cool toad, Hackuin, Alicks, mike waals, cyber gladiator, Cyber Ace, Golden boy INDIA, d3, rafay baloch, nag256 Ketan Singh, AR AR, saad abbasi, Minhal Mehdi, Raj bhai ji, Hacking queen, lovetherisk, D2, Bikash Dash and rest of the Team INDISHELL --==[[Love to]]==-- My Father, my Ex Teacher, cold fire hacker, Mannu, ViKi, Ashu bhai ji, Soldier Of God, Bhuppi, Gujjar PCP Mohit, Ffe, Shardhanand, Budhaoo, Jagriti, Hacker fantastic, Jennifer Arcuri, Thecolonial, Anurag Bhai Ji and Don(Deepika kaushik)
{ "pile_set_name": "Github" }
#include "macro_helper_test.h" #define DEFINE(name) \ namespace ns { \ static const bool t1 = false; \ bool t2_##name = t1; \ bool t3_##name = t1; \ } \ using ns::t2_##name; DEFINE(test) void f1() {}
{ "pile_set_name": "Github" }
/* GIMP - The GNU Image Manipulation Program * Copyright (C) 1995 Spencer Kimball and Peter Mattis * * gimpvectors-warp.c * Copyright (C) 2005 Bill Skaggs <weskaggs@primate.ucdavis.edu> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ #include "config.h" #include <gdk-pixbuf/gdk-pixbuf.h> #include <gegl.h> #include "vectors-types.h" #include "libgimpmath/gimpmath.h" #include "core/gimp-utils.h" #include "core/gimpcoords.h" #include "gimpanchor.h" #include "gimpstroke.h" #include "gimpvectors.h" #include "gimpvectors-warp.h" #define EPSILON 0.2 #define DX 2.0 static void gimp_stroke_warp_point (GimpStroke *stroke, gdouble x, gdouble y, GimpCoords *point_warped, gdouble y_offset, gdouble x_len); static void gimp_vectors_warp_stroke (GimpVectors *vectors, GimpStroke *stroke, gdouble y_offset); void gimp_vectors_warp_point (GimpVectors *vectors, GimpCoords *point, GimpCoords *point_warped, gdouble y_offset) { gdouble x = point->x; gdouble y = point->y; gdouble len; GList *list; GimpStroke *stroke; for (list = vectors->strokes->head; list; list = g_list_next (list)) { stroke = list->data; len = gimp_vectors_stroke_get_length (vectors, stroke); if (x < len || ! list->next) break; x -= len; } if (! list) { point_warped->x = 0; point_warped->y = 0; return; } gimp_stroke_warp_point (stroke, x, y, point_warped, y_offset, len); } static void gimp_stroke_warp_point (GimpStroke *stroke, gdouble x, gdouble y, GimpCoords *point_warped, gdouble y_offset, gdouble x_len) { GimpCoords point_zero = { 0, }; GimpCoords point_minus = { 0, }; GimpCoords point_plus = { 0, }; gdouble slope; gdouble dx, dy, nx, ny, len; if (x + DX >= x_len) { gdouble tx, ty; if (! gimp_stroke_get_point_at_dist (stroke, x_len, EPSILON, &point_zero, &slope)) { point_warped->x = 0; point_warped->y = 0; return; } point_warped->x = point_zero.x; point_warped->y = point_zero.y; if (! gimp_stroke_get_point_at_dist (stroke, x_len - DX, EPSILON, &point_minus, &slope)) return; dx = point_zero.x - point_minus.x; dy = point_zero.y - point_minus.y; len = hypot (dx, dy); if (len < 0.01) return; tx = dx / len; ty = dy / len; nx = - dy / len; ny = dx / len; point_warped->x += tx * (x - x_len) + nx * (y - y_offset); point_warped->y += ty * (x - x_len) + ny * (y - y_offset); return; } if (! gimp_stroke_get_point_at_dist (stroke, x, EPSILON, &point_zero, &slope)) { point_warped->x = 0; point_warped->y = 0; return; } point_warped->x = point_zero.x; point_warped->y = point_zero.y; if (! gimp_stroke_get_point_at_dist (stroke, x - DX, EPSILON, &point_minus, &slope)) return; if (! gimp_stroke_get_point_at_dist (stroke, x + DX, EPSILON, &point_plus, &slope)) return; dx = point_plus.x - point_minus.x; dy = point_plus.y - point_minus.y; len = hypot (dx, dy); if (len < 0.01) return; nx = - dy / len; ny = dx / len; point_warped->x = point_zero.x + nx * (y - y_offset); point_warped->y = point_zero.y + ny * (y - y_offset); } static void gimp_vectors_warp_stroke (GimpVectors *vectors, GimpStroke *stroke, gdouble y_offset) { GList *list; for (list = stroke->anchors->head; list; list = g_list_next (list)) { GimpAnchor *anchor = list->data; gimp_vectors_warp_point (vectors, &anchor->position, &anchor->position, y_offset); } } void gimp_vectors_warp_vectors (GimpVectors *vectors, GimpVectors *vectors_in, gdouble y_offset) { GList *list; for (list = vectors_in->strokes->head; list; list = g_list_next (list)) { GimpStroke *stroke = list->data; gimp_vectors_warp_stroke (vectors, stroke, y_offset); } }
{ "pile_set_name": "Github" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.clouderrorreporting.v1beta1.model; /** * An error event which is returned by the Error Reporting system. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Stackdriver Error Reporting API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ErrorEvent extends com.google.api.client.json.GenericJson { /** * Data about the context in which the error occurred. * The value may be {@code null}. */ @com.google.api.client.util.Key private ErrorContext context; /** * Time when the event occurred as provided in the error report. If the report did not contain a * timestamp, the time the error was received by the Error Reporting system is used. * The value may be {@code null}. */ @com.google.api.client.util.Key private String eventTime; /** * The stack trace that was reported or logged by the service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * The `ServiceContext` for which this error was reported. * The value may be {@code null}. */ @com.google.api.client.util.Key private ServiceContext serviceContext; /** * Data about the context in which the error occurred. * @return value or {@code null} for none */ public ErrorContext getContext() { return context; } /** * Data about the context in which the error occurred. * @param context context or {@code null} for none */ public ErrorEvent setContext(ErrorContext context) { this.context = context; return this; } /** * Time when the event occurred as provided in the error report. If the report did not contain a * timestamp, the time the error was received by the Error Reporting system is used. * @return value or {@code null} for none */ public String getEventTime() { return eventTime; } /** * Time when the event occurred as provided in the error report. If the report did not contain a * timestamp, the time the error was received by the Error Reporting system is used. * @param eventTime eventTime or {@code null} for none */ public ErrorEvent setEventTime(String eventTime) { this.eventTime = eventTime; return this; } /** * The stack trace that was reported or logged by the service. * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * The stack trace that was reported or logged by the service. * @param message message or {@code null} for none */ public ErrorEvent setMessage(java.lang.String message) { this.message = message; return this; } /** * The `ServiceContext` for which this error was reported. * @return value or {@code null} for none */ public ServiceContext getServiceContext() { return serviceContext; } /** * The `ServiceContext` for which this error was reported. * @param serviceContext serviceContext or {@code null} for none */ public ErrorEvent setServiceContext(ServiceContext serviceContext) { this.serviceContext = serviceContext; return this; } @Override public ErrorEvent set(String fieldName, Object value) { return (ErrorEvent) super.set(fieldName, value); } @Override public ErrorEvent clone() { return (ErrorEvent) super.clone(); } }
{ "pile_set_name": "Github" }
{ "created_at": "2015-02-27T22:29:20.544925", "description": "a php written mysql workbench file reader to transform the database scheme to useful other schemes like Doctrine", "fork": false, "full_name": "johmue/mysql-workbench-schema-exporter", "language": "PHP", "updated_at": "2015-02-27T23:44:24.913067" }
{ "pile_set_name": "Github" }
## Problem This problem was asked by Google. A coin was flipped 1000 times, and 550 times it showed up heads. Do you think the coin is biased? Why or why not? ## Solution Because the sample size of flips is large (1000), we can apply the Central Limit Theorem. Since each individual flip is a Bernoulli random variable, we can assume it has a probability of showing up heads as p. Then we want to test whether p is 0.5 (i.e. whether it is fair). The Central Limit Theorem allows us to approximate the total number of heads seen as being normally distributed. More specifically, the number of heads seen should follow a Binomial distribution since it a sum of Bernoulli random variables. If the coin is not biased (p = 0.5), then we have the following on the expected number of heads, and variance on that outcome: ``` \sigma = np = 1000 * 0.5 = 500 ``` ``` \sigma^2 = np(1-p) = 1000 * 0.5 * 0.5 = 250 \sigma = \sqrt{250} \approx 16 ``` Since this mean and standard deviation specify the normal distribution, we can calculate the corresponding z-score for 550 heads: ``` z = \frac{550 - 500}{16} > 3 ``` This means that, if the coin were fair, the event of seeing 550 heads should occur with a < 1% chance under normality assumptions. Therefore, the coin is likely biased.
{ "pile_set_name": "Github" }
CORE main.c --dump-c ^EXIT=0$ ^SIGNAL=0$ -- ^warning: ignoring -- This test should be run via chain.sh, which will try to recompile the dumped C code. Missing/incomplete typedef output would cause a failure.
{ "pile_set_name": "Github" }
{ "name": "merge", "version": "1.2.0", "author": { "name": "yeikos", "url": "http://www.yeikos.com" }, "description": "Merge multiple objects into one, optionally creating a new cloned object. Similar to the jQuery.extend but more flexible. Works in Node.js and the browser.", "main": "merge.js", "license": "MIT", "homepage": "https://github.com/yeikos/js.merge", "repository": { "type": "git", "url": "git+https://github.com/yeikos/js.merge.git" }, "keywords": [ "merge", "recursive", "extend", "clone", "object", "browser" ], "scripts": { "test": "cd tests; node index.js" }, "readme": "# Merge\r\n\r\nMerge multiple objects into one, optionally creating a new cloned object.\r\nSimilar to the jQuery.extend but more flexible. Works in Node.js and the\r\nbrowser.\r\n\r\n## Node.js Usage\r\n\r\n```sh\r\nnpm install merge --save\r\n```\r\n\r\n```js\r\nvar merge = require('merge'), original, cloned;\r\n\r\nconsole.log(merge({one:'hello'}, {two: 'world'}));\r\n// -> {\"one\": \"hello\", \"two\": \"world\"}\r\n\r\noriginal = { x: { y: 1 } };\r\ncloned = merge(true, original);\r\ncloned.x.y++;\r\n\r\nconsole.log(original.x.y, cloned.x.y);\r\n// -> 1, 2\r\n\r\nconsole.log(merge.recursive(true, original, { x: { z: 2 } }));\r\n// -> {\"x\": { \"y\": 1, \"z\": 2 } }\r\n\r\n```\r\n\r\n## Browser Usage\r\n\r\n```html\r\n<script src=\"http://files.yeikos.com/merge.js\"></script>\r\n<script>\r\n\tvar original, cloned;\r\n\r\n\tconsole.log(merge({one:'hello'}, {two: 'world'}));\r\n\t// -> {\"one\": \"hello\", \"two\": \"world\"}\r\n\r\n\toriginal = { x: { y: 1 } };\r\n\tcloned = merge(true, original);\r\n\tcloned.x.y++;\r\n\r\n\tconsole.log(original.x.y, cloned.x.y);\r\n\t// -> 1, 2\r\n\r\n\tconsole.log(merge.recursive(true, original, { x: { z: 2 } }));\r\n\t// -> {\"x\": { \"y\": 1, \"z\": 2 } }\r\n\r\n</script>\r\n```\r\n\r\n## Tests\r\n\r\n```sh\r\nnpm test\r\n```\r\n", "readmeFilename": "README.md", "bugs": { "url": "https://github.com/yeikos/js.merge/issues" }, "_id": "merge@1.2.0", "_shasum": "7531e39d4949c281a66b8c5a6e0265e8b05894da", "_resolved": "https://registry.npmjs.org/merge/-/merge-1.2.0.tgz", "_from": "merge@>=1.1.3 <2.0.0" }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <resources> <string name="oauth_client_id">android-crm</string> <string name="oauth_client_secret">123456</string> <string name="oauth_access_token_callback_uri">x-com-jl-crm-android://crm-oauth-response</string> <string name="oauth_token_uri">/oauth/token</string> <string name="oauth_authorize_uri">/oauth/authorize</string> <string name="oauth_scope">read,write</string> <string name="shared_preferences">CrmPreferences</string> <color name="spring_welcome_bg_color">#69d54c</color> <string name="base_uri_emulator">http://10.0.2.2:8080/</string> <string name="base_uri_qa">http://192.168.2.9:8080/</string> </resources>
{ "pile_set_name": "Github" }
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Set everything to be logged to the file target/unit-tests.log log4j.rootCategory=INFO, file log4j.appender.file=org.apache.log4j.FileAppender log4j.appender.file.append=true log4j.appender.file.file=target/unit-tests.log log4j.appender.file.layout=org.apache.log4j.PatternLayout log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n # Ignore messages below warning level from Jetty, because it's a bit verbose log4j.logger.org.spark_project.jetty=WARN
{ "pile_set_name": "Github" }
// // BAKSendableRequest.h // BackchannelSDK // // Created by Soroush Khanlou on 5/20/15. // Copyright (c) 2015 Backchannel. All rights reserved. // #import <Foundation/Foundation.h> @protocol BAKRequestTemplate, BAKRequestBuilder; @interface BAKSendableRequest : NSObject - (instancetype)initWithRequestTemplate:(id<BAKRequestTemplate>)template; - (instancetype)initWithRequestBuilder:(id<BAKRequestBuilder>)requestBuilder; @property (nonatomic) id<BAKRequestTemplate> template; - (void)sendRequestWithSuccessBlock:(void (^)(id result))successBlock failureBlock:(void (^)(NSError *error))failureBlock; @end
{ "pile_set_name": "Github" }
# Angular ngrx-data ## What is _ngrx-data_? The [`ngrx-data` library](https://github.com/johnpapa/angular-ngrx-data) makes it easier to write an Angular application that manages [entity](https://github.com/johnpapa/angular-ngrx-data/blob/master/docs/faq.md#entity) data with [ngrx](https://github.com/ngrx/platform/blob/master/README.md) in a "reactive" style, following the [redux](https://redux.js.org/) pattern. ## Why use it? Many applications have substantial "domain models" with 10s or 100s of entity types. Instances of these entity types are created, retrieved, updated, and deleted (CRUD). If you've tried to manage your entity data with _ngrx_, you've discovered that you have to write a lot of code for each entity type. For each type, you've written _actions_, _action-creators_, _reducers_, _effects_, _dispatchers_, and _selectors_ as well as the HTTP GET, PUT, POST, and DELETE methods. This is a ton of repetitive code to write, maintain, and test. This library is _one_ way to radically reduce the amount of "boilerplate" necessary to manage entities with _ngrx_. ## How it works See the [repository README](https://github.com/johnpapa/angular-ngrx-data/blob/master/docs/README.md). ## Problems or Suggestions [Open an issue here](https://github.com/johnpapa/angular-ngrx-data/issues)
{ "pile_set_name": "Github" }
namespace nUpdate.Administration.UI.Dialogs { partial class StatisticsServerDialog { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(StatisticsServerDialog)); this.serverList = new nUpdate.Administration.UI.Controls.ServerList(); this.imageList1 = new System.Windows.Forms.ImageList(this.components); this.toolStrip1 = new System.Windows.Forms.ToolStrip(); this.addServerButton = new System.Windows.Forms.ToolStripButton(); this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator(); this.editServerButton = new System.Windows.Forms.ToolStripButton(); this.toolStripSeparator2 = new System.Windows.Forms.ToolStripSeparator(); this.deleteServerButton = new System.Windows.Forms.ToolStripButton(); this.noServersLabel = new System.Windows.Forms.Label(); this.toolStrip1.SuspendLayout(); this.SuspendLayout(); // // serverList // this.serverList.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle; this.serverList.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed; this.serverList.FormattingEnabled = true; this.serverList.IntegralHeight = false; this.serverList.ItemHeight = 60; this.serverList.Location = new System.Drawing.Point(0, 26); this.serverList.Name = "serverList"; this.serverList.Size = new System.Drawing.Size(479, 244); this.serverList.TabIndex = 8; // // imageList1 // this.imageList1.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("imageList1.ImageStream"))); this.imageList1.TransparentColor = System.Drawing.Color.Transparent; this.imageList1.Images.SetKeyName(0, "Visualpharm-Hardware-Server.ico"); // // toolStrip1 // this.toolStrip1.BackColor = System.Drawing.Color.Transparent; this.toolStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.addServerButton, this.toolStripSeparator1, this.editServerButton, this.toolStripSeparator2, this.deleteServerButton}); this.toolStrip1.Location = new System.Drawing.Point(0, 0); this.toolStrip1.Name = "toolStrip1"; this.toolStrip1.RenderMode = System.Windows.Forms.ToolStripRenderMode.System; this.toolStrip1.Size = new System.Drawing.Size(479, 25); this.toolStrip1.TabIndex = 9; this.toolStrip1.Text = "toolStrip1"; // // addServerButton // this.addServerButton.Image = ((System.Drawing.Image)(resources.GetObject("addServerButton.Image"))); this.addServerButton.ImageTransparentColor = System.Drawing.Color.Magenta; this.addServerButton.Name = "addServerButton"; this.addServerButton.Size = new System.Drawing.Size(92, 22); this.addServerButton.Text = "Add a server"; this.addServerButton.Click += new System.EventHandler(this.addServerButton_Click); // // toolStripSeparator1 // this.toolStripSeparator1.Name = "toolStripSeparator1"; this.toolStripSeparator1.Size = new System.Drawing.Size(6, 25); // // editServerButton // this.editServerButton.Image = ((System.Drawing.Image)(resources.GetObject("editServerButton.Image"))); this.editServerButton.ImageTransparentColor = System.Drawing.Color.Magenta; this.editServerButton.Name = "editServerButton"; this.editServerButton.Size = new System.Drawing.Size(81, 22); this.editServerButton.Text = "Edit server"; this.editServerButton.Click += new System.EventHandler(this.editServerButton_Click); // // toolStripSeparator2 // this.toolStripSeparator2.Name = "toolStripSeparator2"; this.toolStripSeparator2.Size = new System.Drawing.Size(6, 25); // // deleteServerButton // this.deleteServerButton.Image = ((System.Drawing.Image)(resources.GetObject("deleteServerButton.Image"))); this.deleteServerButton.ImageTransparentColor = System.Drawing.Color.Magenta; this.deleteServerButton.Name = "deleteServerButton"; this.deleteServerButton.Size = new System.Drawing.Size(94, 22); this.deleteServerButton.Text = "Delete server"; this.deleteServerButton.Click += new System.EventHandler(this.deleteServerButton_Click); // // noServersLabel // this.noServersLabel.AutoSize = true; this.noServersLabel.Location = new System.Drawing.Point(159, 128); this.noServersLabel.Name = "noServersLabel"; this.noServersLabel.Size = new System.Drawing.Size(153, 13); this.noServersLabel.TabIndex = 10; this.noServersLabel.Text = "No statistic servers available."; this.noServersLabel.Visible = false; // // StatisticsServerDialog // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.ClientSize = new System.Drawing.Size(479, 270); this.Controls.Add(this.noServersLabel); this.Controls.Add(this.toolStrip1); this.Controls.Add(this.serverList); this.Font = new System.Drawing.Font("Segoe UI", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon"))); this.KeyPreview = true; this.MaximizeBox = false; this.Name = "StatisticsServerDialog"; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "Statistic servers - {0}"; this.Load += new System.EventHandler(this.StatisticsServerDialog_Load); this.KeyDown += new System.Windows.Forms.KeyEventHandler(this.StatisticsServerDialog_KeyDown); this.toolStrip1.ResumeLayout(false); this.toolStrip1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private Controls.ServerList serverList; private System.Windows.Forms.ImageList imageList1; private System.Windows.Forms.ToolStrip toolStrip1; private System.Windows.Forms.ToolStripButton addServerButton; private System.Windows.Forms.ToolStripSeparator toolStripSeparator1; private System.Windows.Forms.ToolStripButton editServerButton; private System.Windows.Forms.ToolStripSeparator toolStripSeparator2; private System.Windows.Forms.ToolStripButton deleteServerButton; private System.Windows.Forms.Label noServersLabel; } }
{ "pile_set_name": "Github" }
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import React from 'react'; import {shallow} from 'enzyme'; import FeatureDiscovery from 'components/admin_console/feature_discovery/feature_discovery'; describe('components/feature_discovery', () => { describe('FeatureDiscovery', () => { test('should match snapshot', () => { const wrapper = shallow( <FeatureDiscovery featureName='test' titleID='translation.test.title' titleDefault='Foo' copyID='translation.test.copy' copyDefault={'Bar'} learnMoreURL='https://test.mattermost.com/secondary/' imgPath='foo/bar.png' // eslint-disable-next-line @typescript-eslint/naming-convention stats={{TOTAL_USERS: 20}} actions={{ requestTrialLicense: jest.fn(), getLicenseConfig: jest.fn(), }} />, ); expect(wrapper).toMatchSnapshot(); }); }); });
{ "pile_set_name": "Github" }
/* Copyright 2020 Ben Roesner (keycapsss.com) * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "plaid_pad.h"
{ "pile_set_name": "Github" }
// // Code (inline and block) // -------------------------------------------------- // Inline and block code styles code, kbd, pre, samp { font-family: @font-family-monospace; } // Inline code code { padding: 2px 4px; font-size: 90%; color: @code-color; background-color: @code-bg; white-space: nowrap; border-radius: @border-radius-base; } // User input typically entered via keyboard kbd { padding: 2px 4px; font-size: 90%; color: @kbd-color; background-color: @kbd-bg; border-radius: @border-radius-small; box-shadow: inset 0 -1px 0 rgba(0,0,0,.25); } // Blocks of code pre { display: block; padding: ((@line-height-computed - 1) / 2); margin: 0 0 (@line-height-computed / 2); font-size: (@font-size-base - 1); // 14px to 13px line-height: @line-height-base; word-break: break-all; word-wrap: break-word; color: @pre-color; background-color: @pre-bg; border: 1px solid @pre-border-color; border-radius: @border-radius-base; // Account for some code outputs that place code tags in pre tags code { padding: 0; font-size: inherit; color: inherit; white-space: pre-wrap; background-color: transparent; border-radius: 0; } } // Enable scrollable blocks of code .pre-scrollable { max-height: @pre-scrollable-max-height; overflow-y: scroll; }
{ "pile_set_name": "Github" }
{ "name": "google map", "author": "fofa", "version": "0.1.0", "matches": [ { "search": "body", "text": "maps.googleapis.com/maps/api/js" } ] }
{ "pile_set_name": "Github" }
<?php /** * 微信配置 */ function WXAdmin(){ require 'ThinkPHP/Library/Vendor/Wechat/WSTWechat.class.php'; return new WSTWechat($GLOBALS['CONFIG']['WeiXinAppId'],$GLOBALS['CONFIG']['WeiXinAppKey']); }
{ "pile_set_name": "Github" }
shok ==== http://shok.io The shok command shell is a non-POSIX interactive command language interpreter with an expressive scripting language. It intends to be a modern, discoverable shell intended for every-day command invocation, process management, and filesystem manipulation. status ====== shok is in its early stages of initial development. It has the framework for a shell, a lexer, a parser, and an "evaluator" (type-checking, AST execution, program invocation). If it compiles, it may let you change directories and run commands but not write programs, or much else really. Most core features have yet to be implemented. All language attributes are suitable for discussion and replacement. Nevertheless, it is progressing quickly and steadily. Get involved! See http://shok.io for details. Description of the code layout and steps forward are coming soon. todo ==== Immediate hacking priorities: 1. functions / methods 2. trivial implementations of a few basic objects 3. string literals 4. basic interactive niceties: left/right/home/end, ^L 5. comments
{ "pile_set_name": "Github" }
from .htmlcomponents import * from .chartcomponents import * from .gridcomponents import * from addict import Dict from io import StringIO try: import pandas as pd _has_pandas = True except: _has_pandas = False # https://pandas.pydata.org/pandas-docs/stable/development/extending.html if _has_pandas: @pd.api.extensions.register_dataframe_accessor("jp") class JustPyAccessor: def __init__(self, df, **kwargs): self._validate(df) self.df = df @staticmethod def _validate(obj): pass @staticmethod def make_pairs_list(x_data, y_data): return list(map(list, itertools.zip_longest(x_data, y_data))) def _get_column(self, col_spec): if isinstance(col_spec, int): col = self.df.iloc[:, col_spec] elif isinstance(col_spec, str): col = self.df[col_spec] else: raise TypeError('Column specification for plotting must be integer or string') col = col.replace([np.inf, -np.inf], [sys.float_info.max, -sys.float_info.max]) # Convert nan to None return col.where((pd.notnull(col)), None) def plot(self, x, y, **kwargs): kind = kwargs.get('kind', 'column') chart = HighCharts(**kwargs) categories = kwargs.get('categories', True) o = chart.options o.chart.type = kind o.chart.zoomType = 'xy' o.chart.panning = True o.chart.panKey = 'shift' o.title.text = kwargs.get('title', '') o.subtitle.text = kwargs.get('subtitle', '') o.plotOptions.series.stacking = kwargs.get('stacking', '') # either normal or percent if kind not in ['scatter'] and categories: o.xAxis.categories = list(self._get_column(x)) o.series = [] for col in y: s = Dict() if kind not in ['scatter'] and categories: s.data = list(self._get_column(col)) else: s.data = self.make_pairs_list(self._get_column(x),self._get_column(col)) s.name = self.df.columns[col] if isinstance(col, int) else col s.type = kind o.series.append(s) return chart def ag_grid(self, **kwargs): grid = AgGrid(**kwargs) grid.load_pandas_frame(self.df) return grid def table(self, **kwargs): headers = list(self.df.columns) table_data = self.df.to_numpy().tolist() table_data.insert(0, headers) return(AutoTable(values=table_data, **kwargs)) def read_csv_from_string(csv_string, *args): return pd.read_csv(StringIO(csv_string), *args) class LinkedChartGrid(Div): def __init__(self, df, x, y, **kwargs): super().__init__(**kwargs) self.df = df self.x = x self.y = y self.kind = kwargs.get('kind', 'column') self.stacking = kwargs.get('stacking', '') self.title = kwargs.get('title', '') self.subtitle = kwargs.get('subtitle', '') self.set_classes('flex flex-col') self.chart = df.jp.plot(x, y, a=self, classes='m-2 p-2 border', kind=self.kind, stacking=self.stacking, title=self.title, subtitle=self.subtitle) self.grid = df.jp.ag_grid(a=self) self.grid.parent = self for event_name in ['sortChanged', 'filterChanged', 'columnMoved', 'rowDragEnd']: self.grid.on(event_name, self.grid_change) @staticmethod def grid_change(self, msg): self.parent.df = read_csv_from_string(msg.data) c = self.parent.df.jp.plot(self.parent.x, self.parent.y, kind=self.parent.kind, title=self.parent.title, subtitle=self.parent.subtitle, stacking=self.parent.stacking) self.parent.chart.options = c.options
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Net; using System.Net.Http; using System.Net.Security; using System.Runtime.InteropServices; using System.Security.Cryptography.X509Certificates; using System.Threading.Tasks; using EventStore.Common.Options; using EventStore.Common.Utils; using EventStore.Core.Authentication; using EventStore.Core.Authentication.InternalAuthentication; using EventStore.Core.Authorization; using EventStore.Core.Bus; using EventStore.Core.Cluster.Settings; using EventStore.Core.Messages; using EventStore.Core.Services.Gossip; using EventStore.Core.Services.Monitoring; using EventStore.Core.Tests.Http; using EventStore.Core.Tests.Services.Transport.Tcp; using EventStore.Core.TransactionLog.Checkpoint; using EventStore.Core.TransactionLog.Chunks; using EventStore.Core.TransactionLog.FileNamingStrategy; using EventStore.Core.Services.Transport.Http.Controllers; using EventStore.Core.Util; using EventStore.Core.Data; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Server.Kestrel.Core; using Microsoft.AspNetCore.Server.Kestrel.Https; using Microsoft.AspNetCore.TestHost; using ILogger = Serilog.ILogger; namespace EventStore.Core.Tests.Helpers { public class MiniClusterNode { public static int RunCount = 0; public static readonly Stopwatch RunningTime = new Stopwatch(); public static readonly Stopwatch StartingTime = new Stopwatch(); public static readonly Stopwatch StoppingTime = new Stopwatch(); public const int ChunkSize = 1024 * 1024; public const int CachedChunkSize = ChunkSize + ChunkHeader.Size + ChunkFooter.Size; private static readonly ILogger Log = Serilog.Log.ForContext<MiniClusterNode>(); public IPEndPoint InternalTcpEndPoint { get; private set; } public IPEndPoint InternalTcpSecEndPoint { get; private set; } public IPEndPoint ExternalTcpEndPoint { get; private set; } public IPEndPoint ExternalTcpSecEndPoint { get; private set; } public IPEndPoint HttpEndPoint { get; private set; } public readonly int DebugIndex; public readonly ClusterVNode Node; public readonly TFChunkDb Db; private readonly string _dbPath; private readonly bool _isReadOnlyReplica; private readonly TaskCompletionSource<bool> _started = new TaskCompletionSource<bool>(); private readonly TaskCompletionSource<bool> _adminUserCreated = new TaskCompletionSource<bool>(); public Task Started => _started.Task; public Task AdminUserCreated => _adminUserCreated.Task; public VNodeState NodeState = VNodeState.Unknown; private readonly IWebHost _host; private TestServer _kestrelTestServer; private static bool EnableHttps() { return !RuntimeInformation.IsOSPlatform(OSPlatform.OSX); } public MiniClusterNode( string pathname, int debugIndex, IPEndPoint internalTcp, IPEndPoint internalTcpSec, IPEndPoint externalTcp, IPEndPoint externalTcpSec, IPEndPoint httpEndPoint, EndPoint[] gossipSeeds, ISubsystem[] subsystems = null, int? chunkSize = null, int? cachedChunkSize = null, bool enableTrustedAuth = false, bool skipInitializeStandardUsersCheck = true, int memTableSize = 1000, bool inMemDb = true, bool disableFlushToDisk = false, bool readOnlyReplica = false) { if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { AppContext.SetSwitch("System.Net.Http.SocketsHttpHandler.Http2UnencryptedSupport", true); //TODO JPB Remove this sadness when dotnet core supports kestrel + http2 on macOS } RunningTime.Start(); RunCount += 1; DebugIndex = debugIndex; _dbPath = Path.Combine( pathname, string.Format( "mini-cluster-node-db-{0}-{1}-{2}", externalTcp.Port, externalTcpSec.Port, httpEndPoint.Port)); Directory.CreateDirectory(_dbPath); FileStreamExtensions.ConfigureFlush(disableFlushToDisk); Db = new TFChunkDb( CreateDbConfig(chunkSize ?? ChunkSize, _dbPath, cachedChunkSize ?? CachedChunkSize, inMemDb)); InternalTcpEndPoint = internalTcp; InternalTcpSecEndPoint = internalTcpSec; ExternalTcpEndPoint = externalTcp; ExternalTcpSecEndPoint = externalTcpSec; HttpEndPoint = httpEndPoint; var useHttps = EnableHttps(); var certificate = useHttps ? ssl_connections.GetServerCertificate() : null; var trustedRootCertificates = useHttps ? new X509Certificate2Collection(ssl_connections.GetRootCertificate()) : null; var singleVNodeSettings = new ClusterVNodeSettings( Guid.NewGuid(), debugIndex, () => new ClusterNodeOptions(), InternalTcpEndPoint, InternalTcpSecEndPoint, ExternalTcpEndPoint, ExternalTcpSecEndPoint, HttpEndPoint, new Data.GossipAdvertiseInfo( InternalTcpEndPoint.ToDnsEndPoint(), InternalTcpSecEndPoint.ToDnsEndPoint(), ExternalTcpEndPoint.ToDnsEndPoint(), ExternalTcpSecEndPoint.ToDnsEndPoint(), HttpEndPoint.ToDnsEndPoint(), null, null, 0, null, 0, 0), enableTrustedAuth, certificate, trustedRootCertificates, Opts.CertificateReservedNodeCommonNameDefault, 1, false, "", gossipSeeds, TFConsts.MinFlushDelayMs, 3, 2, 2, TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(10), false, false,TimeSpan.FromHours(1), StatsStorage.None, 0, new AuthenticationProviderFactory(components => new InternalAuthenticationProviderFactory(components)), new AuthorizationProviderFactory(components => new LegacyAuthorizationProviderFactory(components.MainQueue)), disableScavengeMerging: true, scavengeHistoryMaxAge: 30, adminOnPublic: true, statsOnPublic: true, gossipOnPublic: true, gossipInterval: TimeSpan.FromSeconds(2), gossipAllowedTimeDifference: TimeSpan.FromSeconds(1), gossipTimeout: TimeSpan.FromSeconds(3), extTcpHeartbeatTimeout: TimeSpan.FromSeconds(2), extTcpHeartbeatInterval: TimeSpan.FromSeconds(2), intTcpHeartbeatTimeout: TimeSpan.FromSeconds(2), intTcpHeartbeatInterval: TimeSpan.FromSeconds(2), deadMemberRemovalPeriod: TimeSpan.FromSeconds(1800), verifyDbHash: false, maxMemtableEntryCount: memTableSize, hashCollisionReadLimit: Opts.HashCollisionReadLimitDefault, startStandardProjections: false, disableHTTPCaching: false, logHttpRequests: false, connectionPendingSendBytesThreshold: Opts.ConnectionPendingSendBytesThresholdDefault, connectionQueueSizeThreshold: Opts.ConnectionQueueSizeThresholdDefault, readOnlyReplica: readOnlyReplica, ptableMaxReaderCount: Constants.PTableMaxReaderCountDefault, enableExternalTCP: true, disableHttps: !useHttps, enableAtomPubOverHTTP: true); _isReadOnlyReplica = readOnlyReplica; Log.Information( "\n{0,-25} {1} ({2}/{3}, {4})\n" + "{5,-25} {6} ({7})\n" + "{8,-25} {9} ({10}-bit)\n" + "{11,-25} {12}\n" + "{13,-25} {14}\n" + "{15,-25} {16}\n" + "{17,-25} {18}\n" + "{19,-25} {20}\n\n", "ES VERSION:", VersionInfo.Version, VersionInfo.Branch, VersionInfo.Hashtag, VersionInfo.Timestamp, "OS:", OS.OsFlavor, Environment.OSVersion, "RUNTIME:", OS.GetRuntimeVersion(), Marshal.SizeOf(typeof(IntPtr)) * 8, "GC:", GC.MaxGeneration == 0 ? "NON-GENERATION (PROBABLY BOEHM)" : string.Format("{0} GENERATIONS", GC.MaxGeneration + 1), "DBPATH:", _dbPath, "ExTCP ENDPOINT:", ExternalTcpEndPoint, "ExTCP SECURE ENDPOINT:", ExternalTcpSecEndPoint, "ExHTTP ENDPOINT:", HttpEndPoint); Node = new ClusterVNode(Db, singleVNodeSettings, infoControllerBuilder: new InfoControllerBuilder() , subsystems: subsystems, gossipSeedSource: new KnownEndpointGossipSeedSource(gossipSeeds)); Node.HttpService.SetupController(new TestController(Node.MainQueue)); _host = new WebHostBuilder() .UseKestrel(o => { o.Listen(HttpEndPoint, options => { if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { options.Protocols = HttpProtocols.Http2; } else { options.UseHttps(new HttpsConnectionAdapterOptions { ServerCertificate = certificate, ClientCertificateMode = ClientCertificateMode.AllowCertificate, ClientCertificateValidation = (certificate, chain, sslPolicyErrors) => { var (isValid, error) = ClusterVNode.ValidateClientCertificateWithTrustedRootCerts(certificate, chain, sslPolicyErrors, () => trustedRootCertificates); if (!isValid && error != null) { Log.Error("Client certificate validation error: {e}", error); } return isValid; } }); } }); }) .UseStartup(Node.Startup) .Build(); _kestrelTestServer = new TestServer(new WebHostBuilder() .UseKestrel() .UseStartup(Node.Startup)); } public void Start() { StartingTime.Start(); Node.MainBus.Subscribe( new AdHocHandler<SystemMessage.StateChangeMessage>(m => { NodeState = _isReadOnlyReplica ? VNodeState.ReadOnlyLeaderless : VNodeState.Unknown; })); if (!_isReadOnlyReplica) { Node.MainBus.Subscribe( new AdHocHandler<SystemMessage.BecomeLeader>(m => { NodeState = VNodeState.Leader; _started.TrySetResult(true); })); Node.MainBus.Subscribe( new AdHocHandler<SystemMessage.BecomeFollower>(m => { NodeState = VNodeState.Follower; _started.TrySetResult(true); })); } else { Node.MainBus.Subscribe( new AdHocHandler<SystemMessage.BecomeReadOnlyReplica>(m => { NodeState = VNodeState.ReadOnlyReplica; _started.TrySetResult(true); })); } Node.MainBus.Subscribe( new AdHocHandler<AuthenticationMessage.AuthenticationProviderInitialized>(m => { _adminUserCreated.TrySetResult(true); })); _host.Start(); Node.Start(); } public HttpClient CreateHttpClient() { return new HttpClient(_kestrelTestServer.CreateHandler()); } public async Task Shutdown(bool keepDb = false) { StoppingTime.Start(); _kestrelTestServer?.Dispose(); await Node.StopAsync().WithTimeout(TimeSpan.FromSeconds(20)); _host?.Dispose(); if (!keepDb) TryDeleteDirectory(_dbPath); StoppingTime.Stop(); RunningTime.Stop(); } public void WaitIdle() { #if DEBUG Node.QueueStatsManager.WaitIdle(); #endif } private void TryDeleteDirectory(string directory) { try { Directory.Delete(directory, true); } catch (Exception e) { Debug.WriteLine("Failed to remove directory {0}", directory); Debug.WriteLine(e); } } private TFChunkDbConfig CreateDbConfig(int chunkSize, string dbPath, long chunksCacheSize, bool inMemDb) { ICheckpoint writerChk; ICheckpoint chaserChk; ICheckpoint epochChk; ICheckpoint truncateChk; ICheckpoint replicationCheckpoint = new InMemoryCheckpoint(-1); ICheckpoint indexCheckpoint = new InMemoryCheckpoint(-1); if (inMemDb) { writerChk = new InMemoryCheckpoint(Checkpoint.Writer); chaserChk = new InMemoryCheckpoint(Checkpoint.Chaser); epochChk = new InMemoryCheckpoint(Checkpoint.Epoch, initValue: -1); truncateChk = new InMemoryCheckpoint(Checkpoint.Truncate, initValue: -1); } else { var writerCheckFilename = Path.Combine(dbPath, Checkpoint.Writer + ".chk"); var chaserCheckFilename = Path.Combine(dbPath, Checkpoint.Chaser + ".chk"); var epochCheckFilename = Path.Combine(dbPath, Checkpoint.Epoch + ".chk"); var truncateCheckFilename = Path.Combine(dbPath, Checkpoint.Truncate + ".chk"); writerChk = new MemoryMappedFileCheckpoint(writerCheckFilename, Checkpoint.Writer, cached: true); chaserChk = new MemoryMappedFileCheckpoint(chaserCheckFilename, Checkpoint.Chaser, cached: true); epochChk = new MemoryMappedFileCheckpoint( epochCheckFilename, Checkpoint.Epoch, cached: true, initValue: -1); truncateChk = new MemoryMappedFileCheckpoint( truncateCheckFilename, Checkpoint.Truncate, cached: true, initValue: -1); } var nodeConfig = new TFChunkDbConfig( dbPath, new VersionedPatternFileNamingStrategy(dbPath, "chunk-"), chunkSize, chunksCacheSize, writerChk, chaserChk, epochChk, truncateChk, replicationCheckpoint, indexCheckpoint, Constants.TFChunkInitialReaderCountDefault, Constants.TFChunkMaxReaderCountDefault, inMemDb); return nodeConfig; } } }
{ "pile_set_name": "Github" }
/** * * "A Collection of Useful C++ Classes for Digital Signal Processing" * By Vinnie Falco and Bernd Porr * * Official project location: * https://github.com/berndporr/iir1 * * See Documentation.cpp for contact information, notes, and bibliography. * * ----------------------------------------------------------------- * * License: MIT License (http://www.opensource.org/licenses/mit-license.php) * Copyright (c) 2009 by Vinnie Falco * Copyright (c) 2011 by Bernd Porr * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. **/ #include "Common.h" #include "Butterworth.h" namespace Iir { namespace Butterworth { AnalogLowPass::AnalogLowPass () : m_numPoles (-1) { setNormal (0, 1); } void AnalogLowPass::design (int numPoles) { if (m_numPoles != numPoles) { m_numPoles = numPoles; reset (); const double n2 = 2 * numPoles; const int pairs = numPoles / 2; for (int i = 0; i < pairs; ++i) { complex_t c = std::polar (1., doublePi_2 + (2 * i + 1) * doublePi / n2); addPoleZeroConjugatePairs (c, infinity()); } if (numPoles & 1) add (-1, infinity()); } } //------------------------------------------------------------------------------ AnalogLowShelf::AnalogLowShelf () : m_numPoles (-1) { setNormal (doublePi, 1); } void AnalogLowShelf::design (int numPoles, double gainDb) { if (m_numPoles != numPoles || m_gainDb != gainDb) { m_numPoles = numPoles; m_gainDb = gainDb; reset (); const double n2 = numPoles * 2; const double g = pow (pow (10., gainDb/20), 1. / n2); const double gp = -1. / g; const double gz = -g; const int pairs = numPoles / 2; for (int i = 1; i <= pairs; ++i) { const double theta = doublePi * (0.5 - (2 * i - 1) / n2); addPoleZeroConjugatePairs (std::polar (gp, theta), std::polar (gz, theta)); } if (numPoles & 1) add (gp, gz); } } //------------------------------------------------------------------------------ void LowPassBase::setup (int order, double sampleRate, double cutoffFrequency) { m_analogProto.design (order); LowPassTransform (cutoffFrequency / sampleRate, m_digitalProto, m_analogProto); Cascade::setLayout (m_digitalProto); } void HighPassBase::setup (int order, double sampleRate, double cutoffFrequency) { m_analogProto.design (order); HighPassTransform (cutoffFrequency / sampleRate, m_digitalProto, m_analogProto); Cascade::setLayout (m_digitalProto); } void BandPassBase::setup (int order, double sampleRate, double centerFrequency, double widthFrequency) { m_analogProto.design (order); BandPassTransform (centerFrequency / sampleRate, widthFrequency / sampleRate, m_digitalProto, m_analogProto); Cascade::setLayout (m_digitalProto); } void BandStopBase::setup (int order, double sampleRate, double centerFrequency, double widthFrequency) { m_analogProto.design (order); BandStopTransform (centerFrequency / sampleRate, widthFrequency / sampleRate, m_digitalProto, m_analogProto); Cascade::setLayout (m_digitalProto); } void LowShelfBase::setup (int order, double sampleRate, double cutoffFrequency, double gainDb) { m_analogProto.design (order, gainDb); LowPassTransform (cutoffFrequency / sampleRate, m_digitalProto, m_analogProto); Cascade::setLayout (m_digitalProto); } void HighShelfBase::setup (int order, double sampleRate, double cutoffFrequency, double gainDb) { m_analogProto.design (order, gainDb); HighPassTransform (cutoffFrequency / sampleRate, m_digitalProto, m_analogProto); Cascade::setLayout (m_digitalProto); } void BandShelfBase::setup (int order, double sampleRate, double centerFrequency, double widthFrequency, double gainDb) { m_analogProto.design (order, gainDb); BandPassTransform (centerFrequency / sampleRate, widthFrequency / sampleRate, m_digitalProto, m_analogProto); // HACK! m_digitalProto.setNormal (((centerFrequency/sampleRate) < 0.25) ? doublePi : 0, 1); Cascade::setLayout (m_digitalProto); } } }
{ "pile_set_name": "Github" }
--- title: "Sorting vs Hashing" output: github_document editor_options: chunk_output_type: console --- ```{r, include = FALSE} knitr::opts_chunk$set(collapse = TRUE, comment = "#> ") knitr::opts_chunk$set(fig.width = 10) # prevent press() noise and autoplot() noise knitr::opts_chunk$set(message = FALSE, warning = FALSE) ``` Investigates performance of `vec_order()` vs current implementation of `vec_unique()`, which is based on hashing and a dictionary. It might be worth switching to use the sort based approach of `vec_order()`. ## Setup ```{r} library(vctrs) library(rlang) library(stringr) library(ggplot2) library(dplyr) ``` ```{r} # Generate `size` random words of varying string sizes new_dictionary <- function(size, min_length, max_length) { lengths <- rlang::seq2(min_length, max_length) stringi::stri_rand_strings( size, sample(lengths, size = size, replace = TRUE) ) } ``` ```{r} # Work around bench_expr bug where vectorized attribute isn't being sliced # https://github.com/r-lib/bench/pull/90 filter_bench <- function(.data, ...) { out <- dplyr::mutate(.data, rn = row_number()) %>% dplyr::filter(...) # patch up bench_expr which <- out$rn desc <- attr(.data$expression, "description") attr(out$expression, "description") <- desc[which] out$rn <- NULL out } ``` ```{r} plot_bench <- function(df, title = waiver()) { df %>% ggplot(aes(x = n_groups, y = as.numeric(median))) + geom_point(aes(color = as.character(expression))) + facet_wrap(~ size, labeller = label_both, nrow = 1) + scale_x_log10() + scale_y_log10() + labs( x = "Number of groups (log10)", y = "Time (log10 seconds)", color = "Type", title = title ) } ``` ## Compare with `vec_unique_loc()` It is worth comparing to `vec_unique_loc()`, which is the most bare bones of the uniqueness functions, to test whether or not uniqueness-by-sorting can be faster than uniqueness-by-hashing. In a branch, I hacked together an implementation of `vec_unique_loc()` based on `vec_order()`. It takes approximately the same amount of time as `vec_order()` itself, so I will just use `vec_order()` as a proxy for the sorting approach. ## Integers ### Test 1 - Integers - Varying total size (small) - Varying group size ```{r} set.seed(123) size <- 10 ^ (1:4) n_groups <- 10 ^ (1:6) df <- bench::press( size = size, n_groups = n_groups, { x <- sample(n_groups, size, replace = TRUE) bench::mark( vec_order(x), vec_unique_loc(x), iterations = 100, check = FALSE ) } ) ``` Performance is generally the same for small sizes ```{r, echo=FALSE} plot_bench(df, "Integers (small)") ``` However, `size = 10000` seems to already show `vec_order()` being faster. ```{r} df[-1] %>% filter(size == 10000) ``` ### Test 2 - Integers - Varying total size (large) - Varying number of groups ```{r} set.seed(123) size <- 10 ^ (5:7) n_groups <- 10 ^ (1:6) df <- bench::press( size = size, n_groups = n_groups, { x <- sample(n_groups, size, replace = TRUE) bench::mark( vec_order(x), vec_unique_loc(x), iterations = 20, check = FALSE ) } ) ``` As the total size increases, `vec_order()` starts to do better. ```{r, echo = FALSE} plot_bench(df, "Integers (large)") ``` ### Test 3 This benchmark shows how much better `vec_order()` scales for large size and large number of groups. For integers it is always faster, and scales extremely well. ```{r} set.seed(123) size <- 10 ^ 8 n_groups <- 10 ^ (1:7) df <- bench::press( size = size, n_groups = n_groups, { x <- sample(n_groups, size, replace = TRUE) bench::mark( vec_order(x), vec_unique_loc(x), iterations = 20, check = FALSE ) } ) ``` ```{r, echo = FALSE} df %>% ggplot(aes(x = n_groups, y = as.numeric(median))) + geom_point(aes(color = as.character(expression))) + geom_line(aes(color = as.character(expression))) + scale_x_log10() + labs( x = "Number of groups (log10)", y = "Time (seconds)", color = "Type", title = "Integers - Size=1e8, Varying n_groups" ) ``` ## Doubles ### Test 1 - Doubles - Varying total size (small) - Varying group size ```{r} set.seed(123) size <- 10 ^ (1:4) n_groups <- 10 ^ (1:6) df <- bench::press( size = size, n_groups = n_groups, { x <- sample(n_groups, size, replace = TRUE) + 0 bench::mark( vec_order(x), vec_unique_loc(x), iterations = 100, check = FALSE ) } ) ``` `vec_order()` is generally a bit slower for these smaller sizes, but it scales much better with large sizes and larger number of groups. See the next test. ```{r, echo=FALSE} plot_bench(df, "Doubles (small size)") ``` ### Test 2 This benchmark shows how much better `vec_order()` scales for large size and large number of groups. For doubles it is slower up front, but scales much better. ```{r} set.seed(123) size <- 10 ^ 8 n_groups <- 10 ^ (1:7) df <- bench::press( size = size, n_groups = n_groups, { x <- sample(n_groups, size, replace = TRUE) + 0 bench::mark( vec_order(x), vec_unique_loc(x), iterations = 20, check = FALSE ) } ) ``` ```{r, echo = FALSE} df %>% ggplot(aes(x = n_groups, y = as.numeric(median))) + geom_point(aes(color = as.character(expression))) + geom_line(aes(color = as.character(expression))) + scale_x_log10() + labs( x = "Number of groups (log10)", y = "Time (seconds)", color = "Type", title = "Doubles - Size=1e8, Varying n_groups" ) ``` ## Characters ### Test 1 Currently string ordering is much slower than `vec_unique_loc()` (especially when most strings are unique) due to all of the allocations that are required + the fact that it does a radix ordering of unique strings and then an integer radix ordering after that. I am confident that the C level part of `vec_order()` could gain a `sort_character = false` option that would do a much faster counting sort in order-of-first-appearance that utilizes the truelengths in a different way. It wouldn't sort strings at all, so should be very fast. This is what `cgroup()` does in `base::order()`, which is not currently implemented in `vec_order()` because I didn't have a use for it until now. https://github.com/wch/r-source/blob/8d7ac4699fba640d030703fa010b66bf26054cbd/src/main/radixsort.c#L1051 Very large set of strings with 10 groups - Don't notice much of a difference between the two here, because there aren't many unique strings. ```{r} set.seed(123) size <- 1e7 n_groups <- 10 dict <- new_dictionary(n_groups, min_length = 5, max_length = 20) x <- sample(dict, size, TRUE) bench::mark(vec_order(x), vec_unique_loc(x), iterations = 10, check = FALSE) ``` Very large set of completely random strings - Extremely large difference, because `vec_order()` is doing way too much work to actually sort the strings. ```{r} set.seed(123) n_groups <- 1e7 x <- new_dictionary(n_groups, min_length = 5, max_length = 20) bench::mark(vec_order(x), vec_unique_loc(x), iterations = 10, check = FALSE) ``` ## Multiple columns ### Test 1 3 integer columns, each with 20 groups. 1e7 total size. ```{r} set.seed(123) size <- 1e7L n_groups <- 20 n_cols <- 3 cols <- replicate(n_cols, sample(n_groups, size, TRUE), simplify = FALSE) names(cols) <- seq_along(cols) df <- vctrs::new_data_frame(cols, size) bench::mark( vec_order(df), vec_unique_loc(df), iterations = 10, check = FALSE ) ``` ### Test 2 Same as before but with character columns. We do worse here because as mentioned before, we do too much work in `vec_order()` right now with character vectors. ```{r} set.seed(123) size <- 1e7L n_groups <- 20 n_cols <- 3 cols <- replicate( n_cols, { dict <- new_dictionary(n_groups, 5, 20) sample(dict, size, TRUE) }, simplify = FALSE ) names(cols) <- seq_along(cols) df <- vctrs::new_data_frame(cols, size) bench::mark( vec_order(df), vec_unique_loc(df), iterations = 5, check = FALSE ) ``` ### Test 3 20 integer columns, each with 2 groups. 1e7 total size. ```{r} set.seed(123) size <- 1e7L n_groups <- 2 n_cols <- 20 cols <- replicate(n_cols, sample(n_groups, size, TRUE), simplify = FALSE) names(cols) <- seq_along(cols) df <- vctrs::new_data_frame(cols, size) bench::mark( vec_order(df), vec_unique_loc(df), iterations = 5, check = FALSE ) ```
{ "pile_set_name": "Github" }
#!/usr/bin/perl -w #use POSIX; #use POSIX qw(setsid); #use POSIX qw(:errno_h :fcntl_h); use strict; use LWP::Simple; #should have 605 mutations? my $build = 'hg18'; my $genomeVarFile = 'gvARdb.txt'; my $posFile = "gvPosARdb.$build.txt"; my $linkFile = 'gvLinkARdb.txt'; my $attFile = 'gvAttrARdb.txt'; my $badFile = 'unparsedARdb.txt'; my $converter = '../convert_coors2'; #my $geneFile = 'M20132.psl'; my $geneFile = "M20132.$build.psl"; my $faFile = 'M20132.fa'; my $strand; my %aa = ( 'Ala' => 'A', 'Val' => 'V', 'Leu' => 'L', 'Ile' => 'I', 'Pro' => 'P', 'Phe' => 'F', 'Trp' => 'W', 'Met' => 'M', 'Gly' => 'G', 'Ser' => 'S', 'Thr' => 'T', 'Cys' => 'C', 'Tyr' => 'Y', 'Asn' => 'N', 'Gln' => 'Q', 'Asp' => 'D', 'Glu' => 'E', 'Lys' => 'K', 'Arg' => 'R', 'His' => 'H', 'Stop' => 'X' ); my $fh; my $fh2; my $fh3; my $fh4; my $bfh; open($fh, ">", $genomeVarFile) or die "Couldn't open output file, $!\n"; open($fh2, ">", $posFile) or die "Couldn't open output file2, $!\n"; #no links for now open($fh3, ">", $linkFile) or die "Couldn't open output file3, $!\n"; open($fh4, ">", $attFile) or die "Couldn't open output file4, $!\n"; open($bfh, ">", $badFile) or die "Couldn't open output fileb, $!\n"; my $fcnt = 0; my $bcnt = 0; my $verCnt = 0; my %pheno; my %phenoId; #phenotype and ID to make unique? my %disease; #only set disease-associated once print "Writing files $genomeVarFile, $posFile, $attFile and $badFile\n"; #ARdb data #ACC Phenotype Mutation type exon pathogenicity CpG PositionFrom amino acid Toamino acid nucleotide position From nucleotide To nucleotide Bmax kD k Thermolabile comments Domain External genitalia Family history Reference Exon 1 tracts Poly Gln # Poly Gly # Poly Gly my $srcId = 'ARdb'; my %pkey; #IDs are primary key in gv table my %name; my %id; #use first id as ID for mutation, data per patient while (<>) { chomp; my $line = $_; if (!defined $line or $line eq '' or $line =~ /^\s*#/) { next; } parseDataLine($line); } #print out disease foreach my $k (keys %disease) { my $t = $disease{$k}; if ($t eq 'both') { $t = 'likely to be phenotype-associated'; } print $fh4 "$k\tdisease\t$t\n"; } close $fh or die "Couldn't close outfile, $!\n"; close $fh2 or die "Couldn't close outfile2, $!\n"; close $fh3 or die "Couldn't close outfile3, $!\n"; close $fh4 or die "Couldn't close outfile4, $!\n"; close $bfh or die "Couldn't close outfileb, $!\n"; my $unicnt = scalar (keys %name); print "found numbers for $fcnt entries\n", "found $unicnt unique mutation names\n", "found $bcnt entries that couldn't parse\n", "verified sequence on $verCnt names\n"; print "Found unique phenotypes:\n", join(", ", keys %pheno), "\n"; exit 0; sub parseDataLine { my $line = shift @_; my @f = split(/\t/, $line); foreach (@f) { s/^\s+//; s/\s+$//; } #0=id, 1=phenotype, 2=mutations type, 3=location, 9=base, 10=oldnts 11=new #4=disease association? #protein: 6=position, 7=from amino acid, 8=to amino acid my @chr; my $disease; if ($f[9] && ($f[10] or $f[11])) { $f[2] = lc $f[2]; if ($f[2] =~ /subst/) { $f[2] = 'substitution'; } #abrev elsif ($f[2] =~ /dupli/) { $f[2] = 'duplication'; } elsif ($f[2] eq 'splice') { $f[2] = 'unknown'; } #leave insertion and deletion if ($f[3] =~ /intron/) { $f[3] = 'intron'; } #has domain attached? elsif ($f[3] =~ /^\d+\s+\w+$/) { $f[3] = 'exon'; } #by number elsif ($f[3] =~ /^\d+\s*$/) { $f[3] = 'exon'; } #by number without domain else { $f[3] = 'unknown' . $f[3] . '?'; } #build hgvs protein name? cols 6-8 my $pname; if ($f[4]) { $f[4] =~ s/^\s+//; $f[4] =~ s/\s+$//; if ($f[4] eq '*') { $disease{"ARdb_$f[0]"} = 'phenotype-associated'; }#else treat as unknown } if (!$f[6] or !$f[7] or !defined $f[8]) { print $bfh $line, "\n"; $bcnt++; return; } if (!$f[2]) { die "Couldn't find type in $line\n"; } if ($f[2] eq 'substitution') { if (!exists $aa{$f[7]} or !exists $aa{$f[8]}) { die "No amino acid symbol for $f[7] or $f[8]\n"; } #$pname = 'AR p.' . $aa{$f[7]} . $f[6] . $aa{$f[8]}; $pname = make_sub_name(@f); }elsif ($f[2] eq 'deletion') { $pname = make_del_name(@f); } elsif ($f[2] eq 'insertion') { $pname = make_ins_name(@f); } elsif ($f[2] eq 'duplication') { $pname = make_dup_name(@f); } else { #bad mutation type print $bfh $line, "\n"; $bcnt++; return; } if (!$pname) { print $bfh $line, "\n"; $bcnt++; return; } $name{$pname}++; if ($f[9] =~ /\D/) { if ($f[2] eq 'deletion' && $pname) { #get coordinates from name @chr = convert_hgvs_name($pname); } if (!@chr or $chr[0] =~ /ERROR/) { print $bfh $line, "\n"; $bcnt++; return; } }else { @chr = convert_number($f[9]); $chr[2] = $chr[1]; if ($f[2] eq 'insertion') { $chr[1]--; } #inserted before nt listed } if ($chr[0] =~ /ERROR/) { print $bfh $line, "\n"; $bcnt++; return; } #adjust to ucsc numbers? $chr[1]--; if ($name{$pname} == 1) { #check sequence based on hgvs style name my $ch = sequenceCheck($pname); if (!$ch) { print $bfh $line, "\n"; $bcnt++; return; }elsif ($ch == 2) { $verCnt++; } if (exists $pkey{"ARdb_$f[0]"}) { print "WARNING duplicate ID ARdb_$f[0] for $pname\n"; }else { $pkey{"ARdb_$f[0]"} = 1; } print $fh2 "$chr[0]\t$chr[1]\t$chr[2]\tARdb_$f[0]\t$strand\t$pname\n"; print $fh "ARdb_$f[0]\t$pname\t$srcId\t$f[2]\t$f[3]\t1\n"; #test and add only valid links addLink($f[0]); $id{$pname} = "ARdb_$f[0]"; #attr table id, classId, nameId, value (255chars) #phenotype=class 6, #name = ? male pseudohermaphroditism ? reproductive system phenotype #cancers? tumorigenesis ? prostate tumor ? mammary adenocarcinoma ? if ($f[1]) { add_phenotype($id{$pname}, $f[1]); } #add protein change if available if ($f[6] && $f[7] && $f[8]) { $f[6] =~ s/^0+//; #remove leading zeros print $fh4 "ARdb_$f[0]\tprotEffect\tAR p.$f[7]$f[6]$f[8]\n"; if (!exists $disease{"ARdb_$f[0]"} && $f[7] ne $f[8]) { $disease{"ARdb_$f[0]"} = 'likely to be phenotype-associated'; } } if (!exists $disease{"ARdb_$f[0]"} && $f[2] =~ /deletion|insertion/ && $f[3] eq 'exon') { $disease{"ARdb_$f[0]"} = 'likely to be phenotype-associated'; } print $fh3 "ARdb_$f[0]\tgeneVarsDis\tomimTitle2\t313700\n"; $fcnt++; }else { #print $fh3 "$id{$pname}\t$f[0]\t$srcId\n"; if ($f[1]) { add_phenotype($id{$pname}, $f[1]); } } }elsif ($f[6] && defined $f[7] && defined $f[8] && $f[2] eq 'Substitut.') { #protein substitution $f[2] = 'substitution'; $f[6] =~ s/^0+//; #remove leading zeros my $pname = make_sub_name(@f); my @chr = ('chrX'); #AR gene is on chrX my @c; #my @c = convert_aa_number($f[6]); push(@chr, @c); if (!$chr[1]) { #unable to convert print $bfh $line, "\n"; $bcnt++; }else { $name{$pname}++; #adjust to ucsc numbers $chr[1]--; if ($name{$pname} == 1) { if (exists $pkey{"ARdb_$f[0]"}) { print "WARNING duplicate ID ARdb_$f[0] for $pname\n"; }else { $pkey{"ARdb_$f[0]"} = 1; } print $fh2 "$chr[0]\t$chr[1]\t$chr[2]\tARdb_$f[0]\t$strand\t$pname\n"; print $fh "ARdb_$f[0]\t$pname\t$srcId\t$f[2]\t$f[3]\t1\n"; $id{$pname} = "ARdb_$f[0]"; print $fh3 "ARdb_$f[0]\tgeneVarsDis\tomimTitle2\t313700\n"; $fcnt++; } if ($f[1]) { add_phenotype($id{$pname}, $f[1]); } } }else { print $bfh $line, "\n"; $bcnt++; } } ####End sub convert_number { my $pos = shift; #genomic position my @c; my $myfh; open($myfh, "$converter $geneFile M20132 $pos 2>&1 |") or die "ERROR Couldn't run convert_coors, $!\n"; while (<$myfh>) { if (/ERROR/) { $c[0] = $_; last; } /is mapped to (\w+\d*) (\d+) (\+|\-)/; $c[1] = $2; $c[0] = $1; $strand = $3; } if (!$c[0]) { close $myfh; return("ERROR couldn't convert '$pos'", undef); } close $myfh or die "ERROR Couldn't finish convert_coors run on $pos got $c[0], $!\n"; return @c; } ####End sub convert_aa_number { my $pos = shift; #amino acid number Met=1 my $gst; my $gend; my $aa = 0; my $aafh; my @coors; open ($aafh, $geneFile) or die "Couldn't open gene file, $!\n"; while (<$aafh>) { #BED: chrom start stop name score strand .. chomp; if (!defined) { next; } if (/\s*#/) { next; } #comment my @f = split(/\s+/); if (scalar @f < 6) { die "ERROR bad gene file format $_\n"; } $f[1]++; #switch to 1 based start #AR gene is + strand push(@coors, ($f[1]..$f[2])); } close $aafh or die "Problem closing geneFile, $!\n"; #A of ATG is at 363 of GenBank file splice(@coors, 0, 362); for(my $i=0; $i <= scalar @coors; $i+=3) { $aa++; #amino acid position if ($pos == $aa) { $gst = $coors[$i]; $gend = $coors[$i+2]; last; } } return ($gst, $gend); #both 1 based numbers } ####End #this generates an HGVS style name for substitutions sub make_sub_name { my @c = @_; #cols 9-11 dna, 6-8 protein my $name; if ($c[9] && $c[10] && $c[11]) { my $oldnt; my $newnt; my @nt = split(/ */, $c[10]); my @nt2 = split(/ */, $c[11]); my $i = 0; foreach my $n (@nt) { if ($n ne $nt2[$i]) { $oldnt = $n; $newnt = $nt2[$i]; last; } $i++; } if ($oldnt) { $name = 'M20132.1 g.' . $c[9] . $oldnt . '>' . $newnt; } } if (!$name) { $name = 'AR p.' . $aa{$c[7]} . $c[6] . $aa{$c[8]}; } return $name; } ####End of subroutine make_sub_name #make a name for deletions sub make_del_name { my @c = @_; my $name; if ($c[9] && $c[9] =~ /^\d+$/) { if ($c[16] =~ /Single nt\. del|1 nt\. del|1 nt del|1nt. deletion|single nt deletion/) { $name = 'M20132.1 g.' . $c[9] . 'del'; } #else { print STDERR "What deleted? $c[9], $c[16]\n"; } }elsif ($c[9] =~ /^(\d+)\-(\d+)$/ && defined $c[8] && $c[8] eq '0') { my $s = $1; my $d = $2; my $e = $s + 2; if ($e =~ /$d$/) { $name = 'M20132.1 g.' . $s . '_' . $e . 'del'; } #else {print STDERR "Wrong number? $c[9] $s $e\n"; } }elsif (defined $c[8] && $c[8] eq '0') { $name = 'AR p.' . $c[6] . 'del' . $aa{$c[7]}; }else { print STDERR "Couldn't do deletion with $c[6] $c[9] $c[16]\n"; } return $name; } ####End #get coordinates from hgvs_name that is built sub convert_hgvs_name { my $name = shift; my @c; if ($name =~ /M20132.1 g.(\d+)_(\d+)/) { my $s = $1; my $e = $2; my $chr; ($chr, $s) = convert_number($s); ($chr, $e) = convert_number($e); @c = ($chr, $s, $e); }else { return("ERROR no coordinates", undef, undef); } return @c; } ####End #make a name for the insertions sub make_ins_name { my @c = @_; my $name; if ($c[9]) { $name = 'M20132.1 g.' . ($c[9] - 1) . '_' . $c[9] . 'ins'; #get nt(s) inserted? if ($c[11] !~ /\+/ && length $c[10] != length $c[11]) { my @nt = split(/ */, $c[10]); foreach my $n (@nt) { $c[11] =~ s/$n//; } if ($c[11] && $c[11] ne '') { $name .= $c[11]; } }elsif ($c[11] =~ /\+([ACTG]+)/) { $name .= $1; } }else { $name = 'AR p.' . $c[6] . 'ins' . $aa{$c[7]}; } return $name; } ####End #make a name for a duplication sub make_dup_name { my @c = @_; my $name; if ($c[7] && !$c[8]) { #must have duplicated this protein $name = 'AR p.' . $c[6] . 'dup' . $aa{$c[7]}; } #for now all others are hand edits using comment return $name; } ####End #add phenotype sub add_phenotype { my $id = shift; my $ph = shift; #clean up phenotypes if (!defined $id) { return; } $ph =~ s/>/ /g; $ph =~ s/\s+/ /g; $ph =~ s/\s+$//; $ph =~ s/^\s+//; $ph =~ s/Cancer/cancer/; my @p; if ($ph =~ / and /) { @p = split(/ and /, $ph); for (my $i = 1; $i <= $#p; $i++) { $p[$i] = ucfirst($p[$i]); } }else { $p[0] = $ph; } #attr table id, classId, nameId, value (255chars) foreach my $t (@p) { if (!$t) { next; } if ($t =~ /\?/) { next; } if (!exists $phenoId{$id . $t}) { $phenoId{$id . $t}++; #spell out AIS? Androgen Insensitivity Syndrome? constitutional androgen insensitivity? print $fh4 "$id\tphenoCommon\t$t\n"; if ($t eq 'Normal') { print $fh4 "$id\tphenoOfficial\tno phenotype detected [MP:0002169]\n"; } #phenotype of 1 patient doesn't prove association #if ($t eq 'Normal' && (!exists $disease{$id} or #$disease{$id} =~ /likely/)) { #$disease{$id} = 'not phenotype-associated'; #}elsif ($t eq 'Normal' && $disease{$id} eq 'phenotype-associated') { #$disease{$id} = 'both'; #}elsif ($t ne 'Normal' && (!exists $disease{$id} or #$disease{$id} =~ /likely/)) { #$disease{$id} = 'phenotype-associated'; #}elsif ($t ne 'Normal' && $disease{$id} =~ /not phenotype-associated/) { #$disease{$id} = 'both'; #} } $pheno{$t}++; } } ####End sub sequenceCheck { my $name = shift @_; my $verified = 0; my $bad; if ($name =~ /'/) { return undef; } #illegal char can't check open(CFH, "../sequenceCheck $faFile '$name' 2>&1 |") or die "Couldn't run sequenceCheck, $!\n"; while (<CFH>) { chomp; if (/Sequence doesn't match/) { $bad = $_; } elsif (/Sequence matched$/) { $verified++; } } ## close(CFH); ## ## my $status = $?; ## ## if (WIFEXITED($status)) { ## print "\$? = $status; exited, status=", WEXITSTATUS($status), "\n"; ## } elsif (WIFSIGNALED($status)) { ## print "\$? = $status; signalled, signal=", WTERMSIG($status), "\n"; ## } elsif (WIFSTOPPED($status)) { ## print "\$? = $status; stopped, signal=", WSTOPSIG($status), "\n"; ## } else { ## die "Too bad. I don't know what's going on\n"; ## } close(CFH) or die "Couldn't finish sequenceCheck with ../sequenceCheck $faFile '$name', $!:exit status $?\n"; if ($bad) { print $bfh "#$bad\n"; return 0; }elsif ($verified) { return 2; }else { return 1; } } ####End sub addLink { my $id = shift; my $url = "http://srs.ebi.ac.uk/srsbin/cgi-bin/wgetz?-id+sessionId+-e+[ANDROGENR:'ARID']+-enum+1"; $url =~ s/ARID/$id/; my $page = get($url); if ($page && $page !~ /error/i) { print $fh3 "ARdb_$id\tsrcLink\t$srcId\t$id\t\n"; } } ####End #how to do links: #http://srs.ebi.ac.uk/srsbin/cgi-bin/wgetz?-id+sessionId+-e+[ANDROGENR:'0002']+-enum+1 #not all entries (<= 0515)
{ "pile_set_name": "Github" }
import unittest from flatlib import const from flatlib.chart import Chart from flatlib.datetime import Datetime from flatlib.geopos import GeoPos class ChartTests(unittest.TestCase): def setUp(self): self.date = Datetime('2015/03/13', '17:00', '+00:00') self.pos = GeoPos('38n32', '8w54') def test_solar_return_hsys(self): """Solar return charts must maintain original house system.""" chart = Chart(self.date, self.pos, hsys=const.HOUSES_MORINUS) sr_chart = chart.solarReturn(2018) self.assertEqual(chart.hsys, sr_chart.hsys)
{ "pile_set_name": "Github" }
config HOSTAP tristate "IEEE 802.11 for Host AP (Prism2/2.5/3 and WEP/TKIP/CCMP)" select WIRELESS_EXT select WEXT_SPY select WEXT_PRIV select CRYPTO select CRYPTO_ARC4 select CRYPTO_ECB select CRYPTO_AES select CRYPTO_MICHAEL_MIC select CRYPTO_ECB select CRC32 select LIB80211 select LIB80211_CRYPT_WEP select LIB80211_CRYPT_TKIP select LIB80211_CRYPT_CCMP ---help--- Shared driver code for IEEE 802.11b wireless cards based on Intersil Prism2/2.5/3 chipset. This driver supports so called Host AP mode that allows the card to act as an IEEE 802.11 access point. See <http://hostap.epitest.fi/> for more information about the Host AP driver configuration and tools. This site includes information and tools (hostapd and wpa_supplicant) for WPA/WPA2 support. This option includes the base Host AP driver code that is shared by different hardware models. You will also need to enable support for PLX/PCI/CS version of the driver to actually use the driver. The driver can be compiled as a module and it will be called hostap. config HOSTAP_FIRMWARE bool "Support downloading firmware images with Host AP driver" depends on HOSTAP ---help--- Configure Host AP driver to include support for firmware image download. This option by itself only enables downloading to the volatile memory, i.e. the card RAM. This option is required to support cards that don't have firmware in flash, such as D-Link DWL-520 rev E and D-Link DWL-650 rev P. Firmware image downloading needs a user space tool, prism2_srec. It is available from http://hostap.epitest.fi/. config HOSTAP_FIRMWARE_NVRAM bool "Support for non-volatile firmware download" depends on HOSTAP_FIRMWARE ---help--- Allow Host AP driver to write firmware images to the non-volatile card memory, i.e. flash memory that survives power cycling. Enable this option if you want to be able to change card firmware permanently. Firmware image downloading needs a user space tool, prism2_srec. It is available from http://hostap.epitest.fi/. config HOSTAP_PLX tristate "Host AP driver for Prism2/2.5/3 in PLX9052 PCI adaptors" depends on PCI && HOSTAP ---help--- Host AP driver's version for Prism2/2.5/3 PC Cards in PLX9052 based PCI adaptors. "Host AP support for Prism2/2.5/3 IEEE 802.11b" is required for this driver and its help text includes more information about the Host AP driver. The driver can be compiled as a module and will be named hostap_plx. config HOSTAP_PCI tristate "Host AP driver for Prism2.5 PCI adaptors" depends on PCI && HOSTAP ---help--- Host AP driver's version for Prism2.5 PCI adaptors. "Host AP support for Prism2/2.5/3 IEEE 802.11b" is required for this driver and its help text includes more information about the Host AP driver. The driver can be compiled as a module and will be named hostap_pci. config HOSTAP_CS tristate "Host AP driver for Prism2/2.5/3 PC Cards" depends on PCMCIA && HOSTAP ---help--- Host AP driver's version for Prism2/2.5/3 PC Cards. "Host AP support for Prism2/2.5/3 IEEE 802.11b" is required for this driver and its help text includes more information about the Host AP driver. The driver can be compiled as a module and will be named hostap_cs.
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0 /* * u_hid.h * * Utility definitions for the hid function * * Copyright (c) 2014 Samsung Electronics Co., Ltd. * http://www.samsung.com * * Author: Andrzej Pietrasiewicz <andrzej.p@samsung.com> */ #ifndef U_HID_H #define U_HID_H #include <linux/usb/composite.h> struct f_hid_opts { struct usb_function_instance func_inst; int minor; unsigned char subclass; unsigned char protocol; unsigned short report_length; unsigned short report_desc_length; unsigned char *report_desc; bool report_desc_alloc; /* * Protect the data form concurrent access by read/write * and create symlink/remove symlink. */ struct mutex lock; int refcnt; }; int ghid_setup(struct usb_gadget *g, int count); void ghid_cleanup(void); #endif /* U_HID_H */
{ "pile_set_name": "Github" }
/** * Designed and developed by Aidan Follestad (@afollestad) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.afollestad.aesthetic.views import android.content.Context import android.util.AttributeSet import androidx.core.widget.NestedScrollView import com.afollestad.aesthetic.Aesthetic.Companion.get import com.afollestad.aesthetic.utils.EdgeGlowUtil.setEdgeGlowColor import com.afollestad.aesthetic.utils.distinctToMainThread import com.afollestad.aesthetic.utils.subscribeTo import com.afollestad.aesthetic.utils.unsubscribeOnDetach /** @author Aidan Follestad (afollestad) */ class AestheticNestedScrollView( context: Context, attrs: AttributeSet? = null ) : NestedScrollView(context, attrs) { private fun invalidateColors(color: Int) = setEdgeGlowColor(this, color) override fun onAttachedToWindow() { super.onAttachedToWindow() get().colorAccent() .distinctToMainThread() .subscribeTo(::invalidateColors) .unsubscribeOnDetach(this) } }
{ "pile_set_name": "Github" }
package Types is pragma SPARK_Mode; subtype Float_1_T is Float; subtype Float_2_T is Float; type Float_Range_T is digits 7 range -180.0 .. 180.0; type Int_Range_T is range -30_000 .. 30_000; type Record_T is record A : Float_Range_T; B : Int_Range_T; C : Int_Range_T; D : Int_Range_T; E : Int_Range_T; end record; end Types;
{ "pile_set_name": "Github" }
<?php /* * This file is part of Respect/Validation. * * (c) Alexandre Gomes Gaigalas <alexandre@gaigalas.net> * * For the full copyright and license information, please view the "LICENSE.md" * file that was distributed with this source code. */ namespace Respect\Validation\Rules; /** * @group rule * @covers Respect\Validation\Rules\PostalCode * @covers Respect\Validation\Exceptions\PostalCodeException */ class PostalCodeTest extends \PHPUnit_Framework_TestCase { public function testShouldUsePatternAccordingToCountryCode() { $countryCode = 'BR'; $rule = new PostalCode($countryCode); $actualPattern = $rule->regex; $expectedPattern = $rule->postalCodes[$countryCode]; $this->assertEquals($expectedPattern, $actualPattern); } public function testShouldNotBeCaseSensitiveWhenChoosingPatternAccordingToCountryCode() { $rule1 = new PostalCode('BR'); $rule2 = new PostalCode('br'); $this->assertEquals($rule1->regex, $rule2->regex); } public function testShouldUseDefaultPatternWhenCountryCodeDoesNotHavePostalCode() { $rule = new PostalCode('ZW'); $actualPattern = $rule->regex; $expectedPattern = PostalCode::DEFAULT_PATTERN; $this->assertEquals($expectedPattern, $actualPattern); } public function testShouldValidateEmptyStringsWhenUsingDefaultPattern() { $rule = new PostalCode('ZW'); $this->assertTrue($rule->validate('')); } public function testShouldNotValidateNonEmptyStringsWhenUsingDefaultPattern() { $rule = new PostalCode('ZW'); $this->assertFalse($rule->validate(' ')); } /** * @expectedException Respect\Validation\Exceptions\ComponentException * @expectedExceptionMessage Cannot validate postal code from "Whatever" country */ public function testShouldThrowsExceptionWhenCountryCodeIsNotValid() { new PostalCode('Whatever'); } /** * @dataProvider validPostalCodesProvider */ public function testShouldValidatePatternAccordingToTheDefinedCountryCode($countryCode, $postalCode) { $rule = new PostalCode($countryCode); $this->assertTrue($rule->validate($postalCode)); } public function validPostalCodesProvider() { return [ ['BR', '02179-000'], ['BR', '02179000'], ['GB', 'GIR 0AA'], ['GB', 'PR1 9LY'], ['US', '02179'], ['YE', ''], ['PL', '99-300'], ]; } /** * @dataProvider invalidPostalCodesProvider */ public function testShouldNotValidatePatternAccordingToTheDefinedCountryCode($countryCode, $postalCode) { $rule = new PostalCode($countryCode); $this->assertFalse($rule->validate($postalCode)); } /** * @expectedException Respect\Validation\Exceptions\PostalCodeException * @expectedExceptionMessage "02179-000" must be a valid postal code on "US" */ public function testShouldThrowsPostalCodeExceptionWhenValidationFails() { $rule = new PostalCode('US'); $rule->check('02179-000'); } public function invalidPostalCodesProvider() { return [ ['BR', '02179'], ['BR', '02179.000'], ['GB', 'GIR 00A'], ['GB', 'GIR0AA'], ['GB', 'PR19LY'], ['US', '021 79'], ['YE', '02179'], ['PL', '99300'], ]; } }
{ "pile_set_name": "Github" }
{ "comments": [ { "content": "# Internet Tethering\n\nThere is a large class of applications known as IoT (Internet of Things) when the hardware connects to the Internet to send or receive data from some server. For example, log the temperature and moisture for smart greenhouses, get the current time, weather, send third-party service API request, and so on.\n\nIn this chapter, we are going to fetch the current date/time from the XOD Cloud API.\n\nThe `xod-cloud/basics/now` node sends a request to the XOD Cloud, parses the response, and outputs it as a `datetime` type value.\n\nThis node has an input pin `INET`. This pin has to be connected to an internet provider. The Internet can be provided:\n\n- by the controller board\n (such as ESP8266)\n- by an extension module\n (such as W5500 Ethernet Shield)\n- **by your PC**. This trick is known as Internet tethering and works in simulation and debug modes", "id": "HyGHq2IhU", "position": { "units": "slots", "x": 1, "y": 0 }, "size": { "height": 6, "units": "slots", "width": 7 } }, { "content": "Note, we’ve placed a `select` node here to get a reasonable value until we receive the result from the cloud. Otherwise, we would get a default datetime value, which is equal to `1970-01-01 00:00:00`, and it might be confusing.\n\n## Excercise\nUpdate the current datetime value by sending pulses.\n1. Place a `tweak-pulse` node.\n2. Link it to the `INIT` pin\n3. To make a value in the `watch` node properly, you need to provide the third possible value `\"Waiting pulse\"`. Drag the handle of the `select` node to the right to add one more pair of input pins\n4. Bind `\"Waiting pulse\"` and \"On Boot\" to the latest pair of pins.\n5. Link `tweak-pulse` to the `S2` pin\n6. Simulate the patch and send pulse using the `tweak-pulse` node.", "id": "S1aXcWd6U", "position": { "units": "slots", "x": 16, "y": 1 }, "size": { "height": 5, "units": "slots", "width": 7 } } ], "links": [ { "id": "Ska3H5DCL", "input": { "nodeId": "H1Bjn28h8", "pinKey": "r1lFErc828" }, "output": { "nodeId": "HyhhS5wR8", "pinKey": "SyveJHHBL" } }, { "id": "r1Mpo-_TI", "input": { "nodeId": "SkxnjWOp8", "pinKey": "S10qrR6UZ" }, "output": { "nodeId": "H1v2hnL3L", "pinKey": "BkvDEzR1Om" } }, { "id": "rJX6sWdpU", "input": { "nodeId": "SkxnjWOp8", "pinKey": "rkmiHCaIZ" }, "output": { "nodeId": "H1Bjn28h8", "pinKey": "Sk0OelypI" } }, { "id": "ry9nn2Lh8", "input": { "nodeId": "H1v2hnL3L", "pinKey": "Hy_D4G0JOX" }, "output": { "nodeId": "H1Bjn28h8", "pinKey": "ryFe8qI3U" } }, { "id": "ryH3oW_68", "input": { "nodeId": "B1an3hU3L", "pinKey": "HkXK-dGob" }, "output": { "nodeId": "SkxnjWOp8", "pinKey": "S1yaHC6UW" } } ], "nodes": [ { "id": "B1an3hU3L", "position": { "units": "slots", "x": 10, "y": 4 }, "size": { "height": 1, "units": "slots", "width": 5 }, "type": "xod/debug/watch" }, { "boundLiterals": { "HyF4Hq828": "On Boot" }, "id": "H1Bjn28h8", "position": { "units": "slots", "x": 10, "y": 1 }, "type": "xod-cloud/basics/now" }, { "id": "H1v2hnL3L", "position": { "units": "slots", "x": 10, "y": 2 }, "type": "xod/datetime/format-timestamp" }, { "id": "HyhhS5wR8", "position": { "units": "slots", "x": 10, "y": 0 }, "type": "xod/debug/tethering-inet" }, { "boundLiterals": { "rJUjrCTUb": "On Boot", "rygjH06LW": "\"Fetching...\"" }, "id": "SkxnjWOp8", "position": { "units": "slots", "x": 10, "y": 3 }, "type": "xod/core/select" } ] }
{ "pile_set_name": "Github" }
load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["urlesc.go"], importmap = "k8s.io/kops/vendor/github.com/PuerkitoBio/urlesc", importpath = "github.com/PuerkitoBio/urlesc", visibility = ["//visibility:public"], )
{ "pile_set_name": "Github" }
package com.bin.david.smartchart; import android.content.res.Resources; import android.graphics.DashPathEffect; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.widget.Toast; import com.daivd.chart.component.axis.BaseAxis; import com.daivd.chart.component.base.IComponent; import com.daivd.chart.core.Bar3DChart; import com.daivd.chart.data.ChartData; import com.daivd.chart.provider.component.level.LevelLine; import com.daivd.chart.data.BarData; import com.daivd.chart.data.style.FontStyle; import com.daivd.chart.data.style.PointStyle; import com.daivd.chart.listener.OnClickColumnListener; import com.daivd.chart.provider.component.mark.BubbleMarkView; import com.daivd.chart.provider.component.point.LegendPoint; import com.daivd.chart.provider.component.point.Point; import java.util.ArrayList; import java.util.List; public class Bar3DChartActivity extends AppCompatActivity { private Bar3DChart bar3DChart; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_3dbar); bar3DChart = (Bar3DChart) findViewById(R.id.columnChart); Resources res = getResources(); FontStyle.setDefaultTextSpSize(this,12); List<String> chartYDataList = new ArrayList<>(); chartYDataList.add("Tokyo"); chartYDataList.add("Paris"); chartYDataList.add("Hong Kong"); chartYDataList.add("Singapore"); List<BarData> ColumnDatas = new ArrayList<>(); ArrayList<Double> tempList1 = new ArrayList<>(); tempList1.add(26d); tempList1.add(35d); tempList1.add(40d); tempList1.add(10d); BarData columnData1 = new BarData("Temperature","℃",getResources().getColor(R.color.arc3),tempList1); ArrayList<Double> humidityList = new ArrayList<>(); humidityList.add(60d); humidityList.add(50d); humidityList.add(30d); humidityList.add(65d); BarData columnData2 = new BarData("Humidity","RH%",getResources().getColor(R.color.arc2),humidityList); ColumnDatas.add(columnData1); ColumnDatas.add(columnData2); ChartData<BarData> chartData = new ChartData<>("3D bar chart",chartYDataList,ColumnDatas); bar3DChart.setChartData(chartData); bar3DChart.startChartAnim(1000); bar3DChart.setZoom(true); bar3DChart.setShowChartName(true); //设置标题样式 FontStyle fontStyle = bar3DChart.getChartTitle().getFontStyle(); fontStyle.setTextColor(res.getColor(R.color.arc23)); fontStyle.setTextSpSize(this,15); bar3DChart.getProvider().setOpenMark(true); bar3DChart.getProvider().setOpenCross(true); LevelLine levelLine = new LevelLine(20); DashPathEffect effects = new DashPathEffect(new float[] { 1, 2, 4, 8}, 1); levelLine.getLineStyle().setWidth(this,1).setColor(res.getColor(R.color.arc23)).setEffect(effects); bar3DChart.getProvider().addLevelLine(levelLine); bar3DChart.getProvider().setMarkView(new BubbleMarkView(this)); LegendPoint legendPoint = (LegendPoint) bar3DChart.getLegend().getPoint(); PointStyle style = legendPoint.getPointStyle(); style.setShape(PointStyle.CIRCLE); BaseAxis vaxis = bar3DChart.getLeftVerticalAxis(); vaxis.setDrawGrid(true); bar3DChart.getLeftVerticalAxis().getGridStyle().setEffect(effects); vaxis.getGridStyle().setColor(R.color.arc_inteval); bar3DChart.getLegend().setDirection(IComponent.TOP); bar3DChart.setOnClickColumnListener(new OnClickColumnListener<BarData>() { @Override public void onClickColumn(BarData lineData, int position) { Toast.makeText(Bar3DChartActivity.this,lineData.getChartYDataList().get(position)+lineData.getUnit(),Toast.LENGTH_SHORT).show(); } }); } }
{ "pile_set_name": "Github" }