repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
hajime9652/observations | observations/r/tobacco.py | <gh_stars>100-1000
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def tobacco(path):
"""Households Tobacco Budget Share
a cross-section from 1995-96
*number of observations* : 2724
*observation* : individuals
*country* : Belgium
A dataframe containing :
occupation
a factor with levels (bluecol,whitecol,inactself), the last level
being inactive and self-employed
region
a factor with levels (flanders,wallon,brussels)
nkids
number of kids of more than two years old
nkids2
number of kids of less than two years old
nadults
number of adults in household
lnx
log of total expenditures
stobacco
budgetshare of tobacco
salcohol
budgetshare of alcohol
age
age in brackets (0-4)
National Institute of Statistics (NIS), Belgium.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `tobacco.csv`.
Returns:
Tuple of np.ndarray `x_train` with 2724 rows and 9 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'tobacco.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/Ecdat/Tobacco.csv'
maybe_download_and_extract(path, url,
save_file_name='tobacco.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
|
Fmendoza91/Proyecto-Final | www/wordpress/wp-content/themes/astra/hover.js | <reponame>Fmendoza91/Proyecto-Final<filename>www/wordpress/wp-content/themes/astra/hover.js
jQuery(document).ready(function($) {
// Get the main WC image as a variable
var wcih_main_imgage = $( 'a.woocommerce-main-image' ).attr( 'href' );
// This is what will happen when you hover a product thumb
$(".thumbnails a").hover(
// Swap out the main image with the thumb
function() {
var photo_fullsize = $(this).attr('href');
$('.woocommerce-main-image img').attr('src', photo_fullsize);
},
// Return the main image to the original
function(){
$('.woocommerce-main-image img').attr('src', wcih_main_imgage);
}
);
} ); |
shivalinga/Gooru-Web | src/main/java/org/ednovo/gooru/server/service/SearchServiceImpl.java | <gh_stars>0
/*******************************************************************************
* Copyright 2013 Ednovo d/b/a Gooru. All rights reserved.
*
* http://www.goorulearning.org/
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
******************************************************************************/
/**
*
*/
package org.ednovo.gooru.server.service;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import org.ednovo.gooru.client.PlaceTokens;
import org.ednovo.gooru.client.service.SearchService;
import org.ednovo.gooru.server.annotation.ServiceURL;
import org.ednovo.gooru.server.deserializer.AutoCompleteDeSerializer;
import org.ednovo.gooru.server.deserializer.AutoSearchKeyWordDeSerializer;
import org.ednovo.gooru.server.deserializer.CollectionItemsResultDeSerializer;
import org.ednovo.gooru.server.deserializer.CollectionSearchResultDeSerializer;
import org.ednovo.gooru.server.deserializer.ResourceSearchResultDeSerializer;
import org.ednovo.gooru.server.deserializer.SearchFilterDeSerialier;
import org.ednovo.gooru.server.deserializer.ShareDeSerializer;
import org.ednovo.gooru.server.request.JsonResponseRepresentation;
import org.ednovo.gooru.server.request.ServiceProcessor;
import org.ednovo.gooru.server.request.ShareUrlToken;
import org.ednovo.gooru.server.request.UrlToken;
import org.ednovo.gooru.server.serializer.JsonDeserializer;
import org.ednovo.gooru.shared.exception.GwtException;
import org.ednovo.gooru.shared.exception.ServerDownException;
import org.ednovo.gooru.shared.model.code.CodeDo;
import org.ednovo.gooru.shared.model.code.StandardsLevel1DO;
import org.ednovo.gooru.shared.model.code.StandardsLevel2DO;
import org.ednovo.gooru.shared.model.code.StandardsLevel3DO;
import org.ednovo.gooru.shared.model.code.StandardsLevel4DO;
import org.ednovo.gooru.shared.model.library.ProfileLibraryListDo;
import org.ednovo.gooru.shared.model.search.AutoSuggestKeywordSearchDo;
import org.ednovo.gooru.shared.model.search.CollectionItemSearchResultDo;
import org.ednovo.gooru.shared.model.search.CollectionSearchResultDo;
import org.ednovo.gooru.shared.model.search.ResourceSearchResultDo;
import org.ednovo.gooru.shared.model.search.SearchDo;
import org.ednovo.gooru.shared.model.search.SearchFilterDo;
import org.ednovo.gooru.shared.util.StringUtil;
import org.restlet.ext.json.JsonRepresentation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.core.type.TypeReference;
/**
* @author <NAME>
*
*/
@Service("searchService")
@ServiceURL("/searchService")
public class SearchServiceImpl extends BaseServiceImpl implements SearchService {
/**
*
*/
private static final long serialVersionUID = 4286188874651640611L;
private static final String RESOURCE = "resource";
private static final String SCOLLECTION = "scollection";
private static final String SINGLE = "single";
private static final String REAL_URL = "realUrl";
private static final String STANDARD = "standard";
private static final String TRUE = "true";
private static final String MY_STRING = "my";
private static final String TYPE = "type";
private static final String SHARETYPE="shareType";
private static final String CLASSPAGE = "classpage";
private static final String SHORTEN_URL = "shortenUrl";
private static final String FLT_CODE_ID = "flt.codeId";
private static final String COURSE_CODE_ID = "id";
private static final String FLT_SOURCE_CODE_ID = "flt.sourceCodeId";
private static final String COLLECTION_EDIT_EVENT ="collection-edit";
private static final String HTTPS = "https";
private static final String HTTP = "http";
@Autowired
private CollectionSearchResultDeSerializer collectionSearchResultDeSerializer;
@Autowired
private ResourceSearchResultDeSerializer resourceSearchResultDeSerializer;
@Autowired
private CollectionItemsResultDeSerializer collectionItemsResultDeSerializer;
@Autowired
private AutoCompleteDeSerializer autoCompleteDeSerializer;
@Autowired
private SearchFilterDeSerialier searchFilterDeSerialier;
@Autowired
private ShareDeSerializer shareDeSerializer;
@Autowired
private AutoSearchKeyWordDeSerializer autoSearchKeyWordDeSerializer;
String query;
String collectionQuery;
@Override
public SearchFilterDo getSearchFilters(String type) {
if (type.equalsIgnoreCase(PlaceTokens.RESOURCE_SEARCH)) {
type = RESOURCE;
} else {
type = SCOLLECTION;
}
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_FILTER, getLoggedInSessionToken(), type);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
return searchFilterDeSerialier.deserializeSearchFilter(jsonRep, type);
}
@Override
public SearchDo<ResourceSearchResultDo> getResourceSearchResults(SearchDo<ResourceSearchResultDo> searchDo) {
SearchDo<ResourceSearchResultDo> searchDOEmpty = new SearchDo<ResourceSearchResultDo>();
String query1=searchDo.getSearchQuery();
query= query1;
try{
if(searchDo.getFilters()!=null){
for (String key : searchDo.getFilters().keySet()) {
String value = searchDo.getFilters().get(key);
value = value.replaceAll("&", "%26");
searchDo.getFilters().put(key, value);
}
}
/*if(query!=null){
query = query.replaceAll("2C","");
if(query.contains("25")){
query=query.replaceAll("%","").replaceAll("2", "").replaceAll("5", "").replaceAll("B", "");
query=query.trim();
query=query.replaceAll(" ","%20");
}
}
*/
JsonRepresentation jsonRep=null;
Map<String,String> filtersMap=searchDo.getFilters();
if(filtersMap!=null){
String category=filtersMap.get("category");
if(category!=null&&category.equalsIgnoreCase("All")){
filtersMap.remove("category");
}
else if(category!=null){
if(category.equalsIgnoreCase("Website")){
category=category.replaceAll("Website", "webpage");
filtersMap.remove("category");
filtersMap.put("flt.resourceFormat",category);
}
else {
filtersMap.remove("category");
filtersMap.put("flt.resourceFormat",category);
}
}
}
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.RESOURCE_SEARCH, filtersMap, getLoggedInSessionToken(), query, searchDo.getPageNum() + "", searchDo.getPageSize() + "", SINGLE, "false", TRUE, TRUE);
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
System.out.println("getResourceSearchResults:::"+url);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try{
resourceSearchResultDeSerializer.deserialize(jsonRep, searchDo);
}
catch(Exception e)
{
}
return searchDo;
}catch(Exception e){
}
return searchDOEmpty;
}
private String appendHttpsURL(String url) {
url = url+"&protocolSupported=http,https";
return url;
}
@Override
public SearchDo<CollectionSearchResultDo> getCollectionSearchResults(SearchDo<CollectionSearchResultDo> searchDo) {
SearchDo<CollectionSearchResultDo> searchDOEmpty = new SearchDo<CollectionSearchResultDo>();
String query1=searchDo.getSearchQuery();
collectionQuery= query1 ;
try{
if(searchDo.getFilters()!=null){
for (String key : searchDo.getFilters().keySet()) {
String value = searchDo.getFilters().get(key);
value=value.replaceAll("&", "%26");
searchDo.getFilters().put(key, value);
}
}
JsonRepresentation jsonRep=null;
/*if(collectionQuery!=null){
if(collectionQuery.contains("252")){
collectionQuery=collectionQuery.replaceAll("%","").replaceAll("2", "").replaceAll("5", "").replaceAll("B", "");
collectionQuery=collectionQuery.trim();
collectionQuery=collectionQuery.replaceAll(" ","%20");
}
}*/
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SIMPLE_COLLECTION_SEARCH, searchDo.getFilters(), getLoggedInSessionToken(), collectionQuery, searchDo.getPageNum() + "", searchDo.getPageSize() + "", MY_STRING);
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
System.out.println("getCollectionSearchResults.."+url);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
collectionSearchResultDeSerializer.deserialize(jsonRep, searchDo);
return searchDo;
}catch(Exception e){
}
return searchDOEmpty;
}
/*@Override
public SearchDo<ResourceSearchResultDo> getCollectionResources(SearchDo<ResourceSearchResultDo> searchDo) {
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.COLLECTION_RESOURCE_LIST, getLoggedInSessionToken(), searchDo.getPageNum() + "", searchDo.getPageSize() + "", searchDo.getSearchQuery());
JsonRepresentation jsonRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
resourceSearchResultDeSerializer.deserialize(jsonRep, searchDo);
return searchDo;
}*/
@Override
public SearchDo<CollectionItemSearchResultDo> getCollectionItems(String collectionId) throws GwtException {
JsonRepresentation jsonRep=null;
SearchDo<CollectionItemSearchResultDo> searchDo=new SearchDo<CollectionItemSearchResultDo>();
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.COLLECTION_ITEMS_LIST, collectionId,getLoggedInSessionToken());
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
collectionItemsResultDeSerializer.deserializeCollectionItems(jsonRep, searchDo);
return searchDo;
}
@Override
public SearchDo<CollectionSearchResultDo> getResourceCollections(SearchDo<CollectionSearchResultDo> searchDo) {
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.RESOURCE_COLLECTION_LIST, getLoggedInSessionToken(), searchDo.getPageNum() + "", searchDo.getPageSize() + "", searchDo.getSearchQuery());
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
collectionSearchResultDeSerializer.deserialize(jsonRep, searchDo);
return searchDo;
}
/*@Override
public SearchDo<String> getSuggestSearchQuery(SearchDo<String> searchDo) {
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_SUGGEST_QUERY, getLoggedInSessionToken(), searchDo.getSearchQuery(), searchDo.getPageSize() + "", searchDo.getPageNum() + "");
JsonRepresentation jsonRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
searchDo.setSearchResults(autoCompleteDeSerializer.deserializeSearchQuery(jsonRep));
return searchDo;
}*/
@Override
public SearchDo<String> getSuggestSource(SearchDo<String> searchDo) {
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_SUGGEST_SOURCE, getLoggedInSessionToken(), URLEncoder.encode(searchDo.getSearchQuery()), searchDo.getPageSize() + "", searchDo.getPageNum() + "");
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
searchDo.setSearchResults(autoCompleteDeSerializer.deserializeSource(jsonRep));
return searchDo;
}
@Override
public SearchDo<CodeDo> getSuggestStandard(SearchDo<CodeDo> searchDo) {
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_SUGGEST_STANDARD, searchDo.getType() != null ? searchDo.getType() : STANDARD , getLoggedInSessionToken(), searchDo.getSearchQuery(), searchDo.getPageSize() + "", searchDo.getPageNum() + "");
if(searchDo.getFilters()!=null && searchDo.getFilters().size()>0) {
url = url + "&"+FLT_CODE_ID+"="+searchDo.getFilters().get(FLT_CODE_ID);
}
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
searchDo.setSearchResults(autoCompleteDeSerializer.deserializeStandards(jsonRep));
return searchDo;
}
@Override
public SearchDo<CodeDo> getSuggestStandardByFilterCourseId(SearchDo<CodeDo> searchDo) {
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.SUGGEST_STANDARD_BY_FILTER, getLoggedInSessionToken(), searchDo.getSearchQuery());
if(searchDo.getFilters()!=null && searchDo.getFilters().size()>0) {
url = url + "&"+COURSE_CODE_ID+"="+searchDo.getFilters().get(COURSE_CODE_ID);
}
System.out.println("getSuggestStandardByFilterCourseId:::::::"+url);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
searchDo.setSearchResults(autoCompleteDeSerializer.deserializeStandards(jsonRep));
return searchDo;
}
@Override
public Map<String, String> getShortenShareUrl(String contentGooruOid, Map<String, String> params) {
JsonRepresentation jsonRep=null;
Map<String, String> shortenUrl = new HashMap<String, String>();
if (params.get(TYPE).equalsIgnoreCase(PlaceTokens.RESOURCE_SEARCH)) {
if (params.get(SHARETYPE).equalsIgnoreCase("embed")){
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/"+ ShareUrlToken.RESOURCE_PLAY_URL.getUrl()+"%26embed=true", contentGooruOid, RESOURCE));
}else{
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/"+ ShareUrlToken.RESOURCE_PLAY_URL.getUrl()+"%26share=true", contentGooruOid, RESOURCE));
}
}else if(params.get(TYPE).equalsIgnoreCase(PlaceTokens.EDIT_CLASSPAGE))
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint()+"/" + ShareUrlToken.CLASSPAGE.getUrl(), contentGooruOid, CLASSPAGE));
else {
if (params.get(SHARETYPE).equalsIgnoreCase("embed")){
//params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint()+"/" + ShareUrlToken.COLLECTION_PLAY_URL.getUrl()+"%26embed=true", contentGooruOid));
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint()+"/" + ShareUrlToken.COLLECTION_PLAY_EMBEDED_URL.getUrl(), contentGooruOid));
}else{
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/" + ShareUrlToken.COLLECTION_PLAY_URL.getUrl()+"%26share=true", contentGooruOid));
}
}
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.SHARE_SHORTEN_URL, params, contentGooruOid, getLoggedInSessionToken());
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getRestUsername(), getRestPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try{
shortenUrl = shareDeSerializer.deserializeShortenUrl(jsonRep);
}
catch(Exception e)
{
}
if(getHttpRequest().getScheme().equalsIgnoreCase(HTTPS)) {
shortenUrl.put(SHORTEN_URL, shortenUrl.get(SHORTEN_URL).replaceAll(HTTP, HTTPS));
}
return shortenUrl;
}
@Override
public Map<String, String> getShortenShareUrlforAssign(String contentGooruOid, Map<String, String> params,String classpageItemId) {
JsonRepresentation jsonRep=null;
Map<String, String> shortenUrl = new HashMap<String, String>();
if (params.get(TYPE).equalsIgnoreCase(PlaceTokens.RESOURCE_SEARCH)) {
if (params.get(SHARETYPE).equalsIgnoreCase("embed")){
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/"+ ShareUrlToken.RESOURCE_PLAY_URL.getUrl()+"%26embed=true", contentGooruOid, RESOURCE));
}else{
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/"+ ShareUrlToken.RESOURCE_PLAY_URL.getUrl()+"%26share=true", contentGooruOid, RESOURCE));
}
}else if(params.get(TYPE).equalsIgnoreCase(PlaceTokens.EDIT_CLASSPAGE))
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint()+"/" + ShareUrlToken.CLASSPAGE.getUrl(), contentGooruOid, CLASSPAGE));
else if(params.get(TYPE).equalsIgnoreCase(PlaceTokens.COLLECTION_PLAY))
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/" + ShareUrlToken.COLLECTION_PLAY_CLASSPAGE_URL.getUrl()+"%26page=study%26share=true", contentGooruOid,classpageItemId));
else {
if (params.get(SHARETYPE).equalsIgnoreCase("embed")){
//params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint()+"/" + ShareUrlToken.COLLECTION_PLAY_URL.getUrl()+"%26embed=true", contentGooruOid));
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint()+"/" + ShareUrlToken.COLLECTION_PLAY_EMBEDED_URL.getUrl(), contentGooruOid));
}else{
params.put(REAL_URL, UrlGenerator.generateUrl(getHomeEndPoint() +"/" + ShareUrlToken.COLLECTION_PLAY_URLAssign.getUrl()+"%26share=true", contentGooruOid));
}
}
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.SHARE_SHORTEN_URL, params, contentGooruOid, getLoggedInSessionToken());
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getRestUsername(), getRestPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try{
shortenUrl = shareDeSerializer.deserializeShortenUrl(jsonRep);
}
catch(Exception e)
{
}
if(getHttpRequest().getScheme().equalsIgnoreCase(HTTPS)) {
shortenUrl.put(SHORTEN_URL, shortenUrl.get(SHORTEN_URL).replaceAll(HTTP, HTTPS));
}
return shortenUrl;
}
@Override
public String getGoogleSignin(String parms) {
parms = parms.replaceAll("#", "%23");
parms = parms.replaceAll("!", "%21");
parms = parms.replaceAll("&", "%26");
String gSigninUrl = getGoogleSignin() + "domain=gmail.com&callBackUrl=" + parms;
return gSigninUrl;
}
@Override
public String getGoogleSignin(String placeToken, Map<String, String> parms) {
String callback = StringUtil.generateMessage(getHomeEndPoint() + "#" +placeToken, parms);
callback = callback.replaceAll("#", "%23");
callback = callback.replaceAll("!", "%21");
callback = callback.replaceAll("&", "%26");
String gSigninUrl = getGoogleSignin() + "domain=gmail.com&callBackUrl=" + callback;
return gSigninUrl;
}
@Override
public String getGoogleDrive(String url, Map<String, String> parms) {
String callback = StringUtil.generateMessage(url, parms);
callback = callback.replaceAll("#", "%23");
callback = callback.replaceAll("!", "%21");
callback = callback.replaceAll("&", "%26");
String gDriveUrl = getDriveGoogle() + "?emailId="+parms.get("emailId")+"&callBackUrl=" + callback;
return gDriveUrl;
}
@Override
public String getCollectionPlayDirectLink(String params){
String directLink="";
directLink = getHomeEndPoint() + params;
return directLink;
}
@Override
public String getHomeEndPointUrl(){
return getHomeEndPoint();
}
@Override
public SearchDo<ResourceSearchResultDo> getSuggestSearchResultForResourceNoResult(SearchDo<ResourceSearchResultDo> searchDo){
JsonRepresentation jsonRep=null;
SearchDo<ResourceSearchResultDo> searchDOEmpty = new SearchDo<ResourceSearchResultDo>();
try{
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_SUGGEST_NO_RESULT, searchDo.getFilters(), getLoggedInSessionToken(), query);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
resourceSearchResultDeSerializer.deserialize(jsonRep, searchDo);
return searchDo;
}catch(Exception e){
}
return searchDOEmpty;
}
@Override
public SearchDo<AutoSuggestKeywordSearchDo> getSuggestedAutokeyword(
SearchDo<AutoSuggestKeywordSearchDo> searchDo) throws GwtException {
String pageSize="5";
String pageNumber="1";
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_AUTO_SUGGEST_KEYWORD, getLoggedInSessionToken(),searchDo.getSearchQuery(),pageSize,searchDo.getType(),pageNumber);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
searchDo.setSearchResults(autoSearchKeyWordDeSerializer.deserializeAutoKeyword(jsonRep));
return searchDo;
}
@Override
public SearchDo<String> getSuggestedAggregator(SearchDo<String> searchDo)
throws GwtException {
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_SUGGEST_AGGREGATOR, getLoggedInSessionToken(), URLEncoder.encode(searchDo.getSearchQuery()), searchDo.getPageSize() + "", searchDo.getPageNum() + "");
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
searchDo.setSearchResults(autoCompleteDeSerializer.deserializeAggregator(jsonRep));
return searchDo;
}
@Override
public SearchDo<ResourceSearchResultDo> getCollectionSuggestedResourceSearchResults(
SearchDo<ResourceSearchResultDo> searchDo, String contentGorruOid)
throws GwtException {
// TODO Auto-generated method stub
SearchDo<ResourceSearchResultDo> searchDOEmpty = new SearchDo<ResourceSearchResultDo>();
String query1=searchDo.getSearchQuery();
query= query1;
try{
if(searchDo.getFilters()!=null){
for (String key : searchDo.getFilters().keySet()) {
String value = searchDo.getFilters().get(key);
value = value.replaceAll("&", "%26");
searchDo.getFilters().put(key, value);
}
}
JsonRepresentation jsonRep=null;
Map<String,String> filtersMap=searchDo.getFilters();
if(filtersMap!=null){
String category=filtersMap.get("category");
if(category!=null&&category.equalsIgnoreCase("All")){
filtersMap.remove("category");
}
else if(category!=null){
if(category.equalsIgnoreCase("Website")){
category=category.replaceAll("Website", "webpage");
filtersMap.remove("category");
filtersMap.put("flt.resourceFormat",category);
}
else {
filtersMap.remove("category");
filtersMap.put("flt.resourceFormat",category);
}
}
}
//String url = UrlGenerator.generateUrl(getHomeEndPoint(), UrlToken.SEARCH_SUGGEST_RESOURCES, getLoggedInSessionToken(), URLEncoder.encode(searchDo.getSearchQuery()), COLLECTION_EDIT_EVENT ,"8c20a619-8aba-4b10-ae2c-6cf71d469a80");
String url = UrlGenerator.generateUrl(getHomeEndPoint(), UrlToken.SEARCH_SUGGEST_RESOURCES, getLoggedInSessionToken(), URLEncoder.encode(searchDo.getSearchQuery()), COLLECTION_EDIT_EVENT ,contentGorruOid);
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
System.out.println("search end point url::::::"+url);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try{
resourceSearchResultDeSerializer.deserializeSuggestedResources(jsonRep, searchDo);
}
catch(Exception e)
{
}
return searchDo;
}catch(Exception e){
}
return searchDOEmpty;
}
@Override
public ArrayList<StandardsLevel1DO> getFirstLevelStandards(String levelOrder, String standardLabel) {
JsonRepresentation jsonRep=null;
ArrayList<StandardsLevel1DO> standardLevelArry = new ArrayList<StandardsLevel1DO>();
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.V2_LevelWiseStandards,levelOrder,standardLabel, getLoggedInSessionToken());
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try {
if (jsonRep != null && jsonRep.getSize() != -1) {
standardLevelArry = JsonDeserializer.deserialize(jsonRep.getJsonArray().toString(), new TypeReference<ArrayList<StandardsLevel1DO>>() {});
return standardLevelArry;
}
} catch (Exception e) {
e.printStackTrace();
}
return standardLevelArry;
}
@Override
public ArrayList<StandardsLevel2DO> getSecondLevelStandards(String levelOrder, String standardLabel) {
JsonRepresentation jsonRep=null;
ArrayList<StandardsLevel2DO> standardLevelArry = new ArrayList<StandardsLevel2DO>();
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.V2_LevelWiseStandards,levelOrder,standardLabel, getLoggedInSessionToken());
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try {
if (jsonRep != null && jsonRep.getSize() != -1) {
standardLevelArry = JsonDeserializer.deserialize(jsonRep.getJsonArray().toString(), new TypeReference<ArrayList<StandardsLevel2DO>>() {});
return standardLevelArry;
}
} catch (Exception e) {
e.printStackTrace();
}
return standardLevelArry;
}
@Override
public ArrayList<StandardsLevel3DO> getThirdLevelStandards(String levelOrder, String standardLabel) {
JsonRepresentation jsonRep=null;
ArrayList<StandardsLevel3DO> standardLevelArry = new ArrayList<StandardsLevel3DO>();
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.V2_LevelWiseStandards,levelOrder,standardLabel, getLoggedInSessionToken());
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try {
if (jsonRep != null && jsonRep.getSize() != -1) {
standardLevelArry = JsonDeserializer.deserialize(jsonRep.getJsonArray().toString(), new TypeReference<ArrayList<StandardsLevel3DO>>() {});
return standardLevelArry;
}
} catch (Exception e) {
e.printStackTrace();
}
return standardLevelArry;
}
@Override
public ArrayList<StandardsLevel4DO> getFourthLevelStandards(String levelOrder, String standardLabel) {
JsonRepresentation jsonRep=null;
ArrayList<StandardsLevel4DO> standardLevelArry = new ArrayList<StandardsLevel4DO>();
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.V2_LevelWiseStandards,levelOrder,standardLabel, getLoggedInSessionToken());
if(getSearchEndPoint().contains(HTTPS)){
url = appendHttpsURL(url);
}
System.out.println("getFourthLevelStandards:::"+url);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
try {
if (jsonRep != null && jsonRep.getSize() != -1) {
standardLevelArry = JsonDeserializer.deserialize(jsonRep.getJsonArray().toString(), new TypeReference<ArrayList<StandardsLevel4DO>>() {});
return standardLevelArry;
}
} catch (Exception e) {
e.printStackTrace();
}
return standardLevelArry;
}
@Override
public SearchDo<CodeDo> getSuggestStandardByFilterCourseIdsource(
SearchDo<CodeDo> searchDo) throws GwtException, ServerDownException {
// TODO Auto-generated method stub
JsonRepresentation jsonRep=null;
String url = UrlGenerator.generateUrl(getRestEndPoint(), UrlToken.SUGGEST_STANDARD_BY_FILTER_Source_CodeId, getLoggedInSessionToken(), searchDo.getSearchQuery());
System.out.println("searchDo.getFilters():::::::"+searchDo.getFilters());
if(searchDo.getFilters()!=null && searchDo.getFilters().size()>0) {
url = url + "&"+FLT_SOURCE_CODE_ID+"="+searchDo.getFilters().get(FLT_SOURCE_CODE_ID);
}
System.out.println("getSuggestStandardByFiltersource course id 1 CourseId api new:::::::"+url);
JsonResponseRepresentation jsonResponseRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
jsonRep=jsonResponseRep.getJsonRepresentation();
searchDo.setSearchResults(autoCompleteDeSerializer.deserializeStandards(jsonRep));
return searchDo;
}
/*@Override
public SearchDo<CollectionSearchResultDo> getSuggestedSearchResultForCollectionNoResult(
SearchDo<CollectionSearchResultDo> searchDo) throws GwtException {
SearchDo<CollectionSearchResultDo> searchDOEmpty = new SearchDo<CollectionSearchResultDo>();
try{
String url = UrlGenerator.generateUrl(getSearchEndPoint(), UrlToken.SEARCH_SUGGEST_COLLECTION_NO_RESULT, searchDo.getFilters(), getLoggedInSessionToken(), collectionQuery);
JsonRepresentation jsonRep = ServiceProcessor.get(url, getSearchUsername(), getSearchPassword());
collectionSearchResultDeSerializer.deserialize(jsonRep, searchDo);
return searchDo;
}catch(Exception e){
}
return searchDOEmpty;
}*/
}
|
shaojiankui/iOS10-Runtime-Headers | PrivateFrameworks/CoreKnowledge.framework/CoreKnowledge.CKClosure.h | /* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/CoreKnowledge.framework/CoreKnowledge
*/
@interface CoreKnowledge.CKClosure : NSObject {
void entity;
void store;
}
@property (nonatomic, readonly) _TtC13CoreKnowledge8CKEntity *entity;
@property (nonatomic, readonly) NSString *identifier;
@property (nonatomic, readonly) _TtC13CoreKnowledge16CKKnowledgeStore *store;
- (id /* block */).cxx_destruct;
- (void)encodeWithCoder:(id)arg1;
- (id)entity;
- (void)execute;
- (id)identifier;
- (id)init;
- (id)initWithCoder:(id)arg1;
- (id)store;
@end
|
gadge/foba | packages/objects/object-number/resources/ArmSales.js | <gh_stars>0
// https://www.sipri.org/databases/armsindustry
// in millions USD
export const ArmSales = {
LockheedMartin: 47260,
Boeing: 29150,
NorthropGrumman: 26190,
Raytheon: 23440,
GeneralDynamics: 22000,
BAESystemsUK: 21210,
AirbusGroup: 11650,
BAESystemsUS: 10800,
Leonardo: 9820,
AlmazAntey: 9640,
Thales: 9470,
UnitedTechnologies: 9310,
L3Technologies: 8250,
HuntingtonIngallsInd: 7200,
HoneywellIntr: 5430,
UnitedAircraft: 5420,
Leidos: 5000,
Harris: 4970,
UnitedShipbuilding: 4700,
BoozAllenHamilton: 4680,
RollsRoyce: 4680,
NavalGroup: 4220,
Rheinmetall: 3800,
MBDA: 3780,
GeneralElectric: 3650,
MitsubishiHeavyInd: 3620,
TacticalMissiles: 3600,
Textron: 3500,
ElbitSystems: 3500,
CACIIntr: 3490,
Saab: 3240,
Safran: 3240,
Sandia: 3200,
BabcockIntr: 3180,
UnitedEngine: 2950,
DassaultAviation: 2930,
ScienceApplicationsIntr: 2800,
AECOM: 2770,
GeneralAtomics: 2750,
HindustanAeronautics: 2740,
AirbusHelicoptersInc: 2730,
IsraelAerospaceInd: 2650,
HighPrecisionSystems: 2630,
RockwellCollins: 2630,
KBR: 2600,
Perspecta: 2590,
Rafael: 2540,
RussianElectronics: 2330,
HanwhaAerospace: 2320,
CEA: 2300,
KawasakiHeavyInd: 2260,
BellHelicopterTextron: 2030,
Bechtel: 2000,
Fincantieri: 1900,
Oshkosh: 1850,
RussianHelicopters: 1810,
KRET: 1770,
ASELSAN: 1740,
KraussMaffeiWegmann: 1680,
IndianOrdnanceFactories: 1650,
ThyssenKrupp: 1650,
Cobham: 1590,
DynCorpIntr: 1560,
KoreaAerospaceInd: 1550,
STEngineering: 1540,
BharatElectronics: 1460,
ManTechIntr: 1430,
UralVagonZavod: 1370,
JacobsEngineering: 1370,
Fluor: 1350,
LIGNex1: 1340,
TransDigm: 1330,
GKN: 1320,
MelroseInd: 1320,
UnitedLaunchAlliance: 1320,
UkrOboronProm: 1300,
Fujitsu: 1270,
SercoGroup: 1260,
PGZ: 1250,
TeledyneTechnologies: 1240,
Navantia: 1240,
Hensoldt: 1240,
Vectrus: 1230,
AerojetRocketdyne: 1220,
AustalUSA: 1160,
Austal: 1140,
SierraNevada: 1100,
IHI: 1090,
Nexter: 1080,
TurkishAerospaceInd: 1070,
BWXTechnologies: 1070,
Engility: 1070,
CAE: 1010,
MIT: 980,
Meggitt: 970,
CurtissWright: 970,
TheAerospace: 970,
DevonportRoyalDockyard: 940,
Ball: 930,
Moog: 920,
QinetiQ: 910,
RUAG: 900,
ViaSat: 860,
MitsubishiElectric: 860,
Arconic: 840,
NEC: 840,
Amphenol: 820,
}
|
SwarmUS/HiveConnect | src/network/src/ros/include/NetworkOutputStream.h | #ifndef HIVE_CONNECT_NETWORKOUTPUTSTREAM_H
#define HIVE_CONNECT_NETWORKOUTPUTSTREAM_H
#include "INetworkOutputStream.h"
#include "logger/ILogger.h"
class NetworkOutputStream : public INetworkOutputStream {
public:
NetworkOutputStream(ILogger& logger);
~NetworkOutputStream() override;
bool send(const uint8_t* data, uint16_t length) override;
bool setDestination(uint32_t destination) override;
bool close() override;
private:
ILogger& m_logger;
int m_socketFd;
};
#endif // HIVE_CONNECT_NETWORKOUTPUTSTREAM_H
|
pldg/gpe | lib/parse-prefix.js | const {
isObjectLiteral
} = require('./utils/utils');
function parsePrefix(prefix) {
if (!prefix) {
prefix = {};
} else if (!isObjectLiteral(prefix)) {
throw new Error('`prefix` must be an object literal');
}
const keys = Object.keys(prefix);
// Attributes that can be prefixed
const attributes = [
'class',
'alt',
'media',
'sizes',
'srcset',
'src',
'type'
];
if (keys.some(k => attributes.indexOf(k) === -1)) {
throw new Error('Invalid prefix key');
}
for (const attribute of attributes) {
if (!prefix[attribute]) prefix[attribute] = '';
}
return prefix;
}
module.exports = parsePrefix; |
moneyhoardermike/MHG.Network | x/launch/keeper/msg_request_add_validator_test.go | <filename>x/launch/keeper/msg_request_add_validator_test.go
package keeper_test
import (
"testing"
testkeeper "github.com/tendermint/spn/testutil/keeper"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/stretchr/testify/require"
"github.com/tendermint/spn/testutil/sample"
"github.com/tendermint/spn/x/launch/types"
profiletypes "github.com/tendermint/spn/x/profile/types"
)
func TestMsgRequestAddValidator(t *testing.T) {
var (
invalidChain = uint64(1000)
coordAddr = sample.Address(r)
coordDisableAddr = sample.Address(r)
addr1 = sample.Address(r)
addr2 = sample.Address(r)
addr3 = sample.Address(r)
sdkCtx, tk, ts = testkeeper.NewTestSetup(t)
ctx = sdk.WrapSDKContext(sdkCtx)
)
coordID := tk.ProfileKeeper.AppendCoordinator(sdkCtx, profiletypes.Coordinator{
Address: coordAddr,
Active: true,
})
chains := createNChainForCoordinator(tk.LaunchKeeper, sdkCtx, coordID, 4)
chains[0].LaunchTriggered = true
tk.LaunchKeeper.SetChain(sdkCtx, chains[0])
chains[1].CoordinatorID = 99999
tk.LaunchKeeper.SetChain(sdkCtx, chains[1])
coordDisableID := tk.ProfileKeeper.AppendCoordinator(sdkCtx, profiletypes.Coordinator{
Address: coordDisableAddr,
Active: false,
})
disableChain := createNChainForCoordinator(tk.LaunchKeeper, sdkCtx, coordDisableID, 1)
for _, tc := range []struct {
name string
msg types.MsgRequestAddValidator
wantID uint64
wantApprove bool
err error
}{
{
name: "invalid chain",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), addr1, invalidChain),
err: types.ErrChainNotFound,
},
{
name: "chain with triggered launch",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), addr1, chains[0].LaunchID),
err: types.ErrTriggeredLaunch,
},
{
name: "chain without coordinator",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), addr1, chains[1].LaunchID),
err: types.ErrChainInactive,
},
{
name: "request to a chain 3",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), addr1, chains[2].LaunchID),
wantID: 1,
},
{
name: "second request to a chain 3",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), addr2, chains[2].LaunchID),
wantID: 2,
},
{
name: "request to a chain 4",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), addr1, chains[3].LaunchID),
wantID: 1,
},
{
name: "request from coordinator is pre-approved",
msg: sample.MsgRequestAddValidator(r, coordAddr, addr3, chains[3].LaunchID),
wantApprove: true,
},
{
name: "failing request from coordinator",
msg: sample.MsgRequestAddValidator(r, coordAddr, addr3, chains[3].LaunchID),
err: types.ErrValidatorAlreadyExist,
wantApprove: true,
},
{
name: "fail if the coordinator of the chain is disabled",
msg: sample.MsgRequestAddValidator(r, sample.Address(r), sample.Address(r), disableChain[0].LaunchID),
err: profiletypes.ErrCoordInactive,
},
} {
t.Run(tc.name, func(t *testing.T) {
got, err := ts.LaunchSrv.RequestAddValidator(ctx, &tc.msg)
if tc.err != nil {
require.ErrorIs(t, tc.err, err)
return
}
require.NoError(t, err)
require.Equal(t, tc.wantID, got.RequestID)
require.Equal(t, tc.wantApprove, got.AutoApproved)
if !tc.wantApprove {
request, found := tk.LaunchKeeper.GetRequest(sdkCtx, tc.msg.LaunchID, got.RequestID)
require.True(t, found, "request not found")
require.Equal(t, tc.wantID, request.RequestID)
require.Equal(t, tc.msg.Creator, request.Creator)
content := request.Content.GetGenesisValidator()
require.NotNil(t, content)
require.Equal(t, tc.msg.ValAddress, content.Address)
require.Equal(t, tc.msg.LaunchID, content.LaunchID)
require.True(t, tc.msg.SelfDelegation.Equal(content.SelfDelegation))
require.Equal(t, tc.msg.GenTx, content.GenTx)
require.Equal(t, tc.msg.Peer, content.Peer)
require.Equal(t, tc.msg.ConsPubKey, content.ConsPubKey)
} else {
_, found := tk.LaunchKeeper.GetGenesisValidator(sdkCtx, tc.msg.LaunchID, tc.msg.ValAddress)
require.True(t, found, "genesis validator not found")
}
})
}
}
|
kylearon/test1 | com.soartech.simjr.core/src/main/java/com/soartech/simjr/ui/pvd/PanAnimator.java | /*
* Copyright (c) 2010, Soar Technology, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Soar Technology, Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without the specific prior written permission of Soar Technology, Inc.
*
* THIS SOFTWARE IS PROVIDED BY SOAR TECHNOLOGY, INC. AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL SOAR TECHNOLOGY, INC. OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Created on Nov 12, 2008
*/
package com.soartech.simjr.ui.pvd;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.Timer;
import com.soartech.math.Vector3;
import com.soartech.shapesystem.SimplePosition;
import com.soartech.shapesystem.swing.SwingCoordinateTransformer;
/**
* @author ray
*/
public class PanAnimator
{
private final SwingCoordinateTransformer transformer;
private final Timer animationTimer;
private int animationSteps;
private double animationPanDx, animationPanDy;
/**
* @param transformer
*/
public PanAnimator(SwingCoordinateTransformer transformer)
{
this.transformer = transformer;
animationTimer = new Timer(50, new ActionListener() {
public void actionPerformed(ActionEvent e)
{
animate(true);
}});
}
public boolean isAnimating()
{
return animationTimer.isRunning();
}
private void animate(boolean doRepaint)
{
transformer.setPanOffset(transformer.getPanOffsetX() + animationPanDx,
transformer.getPanOffsetY() + animationPanDy);
animationSteps--;
if(animationSteps == 0)
{
animationTimer.stop();
}
if (doRepaint)
{
transformer.getComponent().repaint();
}
}
/**
* Move position p instantly to the center of the screen. Repaint
* the component only if doRepaint is true.
*
* @param p The point to move to the center, in meters.
* @param doRepaint Whether to repaint after the move.
*/
public void jumpToPosition(Vector3 p, boolean doRepaint)
{
if(isAnimating())
{
animationTimer.stop();
}
// find the current location of (x,y) in meters
SimplePosition currentPosition = transformer.metersToScreen(p.x, p.y);
// find the position of the center of the screen
double desiredX = transformer.getSize().getWidth() / 2;
double desiredY = transformer.getSize().getHeight() / 2;
// add the difference to the current offset to create the new offset
double offsetX = desiredX - currentPosition.x;
double offsetY = desiredY - currentPosition.y;
animationSteps = 1;
animationPanDx = offsetX;
animationPanDy = offsetY;
animate(doRepaint);
}
/**
* Move position p gradually to the center of the screen, repainting
* the component along the way.
*
* @param p The point to move to the center, in meters.
*/
public void panToPosition(Vector3 p)
{
if(isAnimating())
{
animationTimer.stop();
}
// find the current location of (x,y) in meters
SimplePosition currentPosition = transformer.metersToScreen(p.x, p.y);
// find the position of the center of the screen
double desiredX = transformer.getSize().getWidth() / 2;
double desiredY = transformer.getSize().getHeight() / 2;
// add the difference to the current offset to create the new offset
double offsetX = desiredX - currentPosition.x;
double offsetY = desiredY - currentPosition.y;
double distance = new Vector3(offsetX, offsetY, 0.0).length();
double pixelsPerSecond = distance * 2;
double updatesPerSecond = 1000 / animationTimer.getDelay();
animationSteps = (int) ((distance / pixelsPerSecond) * updatesPerSecond);
animationPanDx = offsetX / animationSteps;
animationPanDy = offsetY / animationSteps;
if(animationSteps > 0)
{
animationTimer.start();
}
}
}
|
mbits-libs/libarch | src/zip/stream.cc | <filename>src/zip/stream.cc
// Copyright (c) 2020 midnightBITS
// This code is licensed under MIT license (see LICENSE for details)
#include <arch/zip/stream.hh>
namespace arch::zip {
stream::stream(zip_file&& handle, io::status const& status)
: io::dos_stream_mixin{status}, handle_{std::move(handle)} {}
stream::~stream() { close(); }
void stream::close() { handle_.reset(); }
std::size_t stream::read(std::span<std::byte> bytes) {
if (bytes.empty()) return 0;
auto ret = zip_fread(handle_.get(), bytes.data(), bytes.size());
if (ret < 0) return 0;
return static_cast<size_t>(ret);
}
} // namespace arch::zip
|
alexrenz/bosen-2 | src/petuum_ps_common/util/vector_clock_mt.hpp | // author: jinliang
#pragma once
#include <petuum_ps_common/util/vector_clock.hpp>
#include <petuum_ps_common/util/lock.hpp>
#include <boost/noncopyable.hpp>
#include <glog/logging.h>
#include <boost/thread.hpp>
#include <vector>
#include <cstdint>
namespace petuum {
// VectorClock is a thread-safe extension of VectorClockST.
class VectorClockMT : public VectorClock, boost::noncopyable {
public:
VectorClockMT();
explicit VectorClockMT(const std::vector<int32_t>& ids);
// Override VectorClock
void AddClock(int32_t id, int32_t clock = 0);
int32_t Tick(int32_t id);
int32_t TickUntil(int32_t id, int32_t clock);
// Accessor to a particular clock.
int32_t get_clock(int32_t id) const;
int32_t get_min_clock() const;
private:
// Lock for slowest record
mutable SharedMutex mutex_;
};
} // namespace petuum
|
unsat/d | benchmark/c/nla/mannadiv.c | #include <stdio.h>
#include <stdlib.h>
void vassume(int b){}
void vtrace1(int q, int a, int b, int x, int y){}
//void vtrace2(int q, int a, int x, int y){}
int mainQ(int x, int y){
vassume(x >= 0);
vassume(y >= 1); //can also be y!=0, but CIVL will give disj
int q, a, b;
q = 0;
a = 0;
b = x;
while(1) {
//assert(q* y + a + b == x);
vtrace1(q, a, b, x, y);
if(!(b != 0)) break;
if (a + 1 == y) {
q = q + 1;
a = 0;
b = b - 1;
}
else {
a = a + 1;
b = b - 1;
}
}
//vtrace2(q, a, x, y);
//assert(q == x / y);
return q;
}
void main(int argc, char **argv){
mainQ(atoi(argv[1]), atoi(argv[2]));
}
|
Java-Publications/vaadin-001-charts-jumpstart | modules/gui/uilogic/src/test/java/junit/org/rapidpm/vaadin/jumpstart/gui/uilogic/security/LoginServiceImplTest.java | <gh_stars>0
package junit.org.rapidpm.vaadin.jumpstart.gui.uilogic.security;
import java.util.Optional;
import org.junit.Assert;
import org.junit.Test;
import org.rapidpm.ddi.DI;
import org.rapidpm.vaadin.jumpstart.gui.uilogic.security.LoginService;
import org.rapidpm.vaadin.jumpstart.gui.uilogic.security.User;
import junit.org.rapidpm.vaadin.jumpstart.gui.uilogic.BaseDDITest;
/**
* Created by svenruppert on 06.04.17.
*/
public class LoginServiceImplTest extends BaseDDITest {
@Test
public void validateUser001()
throws Exception {
final Optional<User> user = DI.activateDI(LoginService.class)
.loadUser(null, null);
Assert.assertFalse(user.isPresent());
}
@Test
public void validateUser002()
throws Exception {
final Optional<User> user = DI.activateDI(LoginService.class)
.loadUser(null, "XX");
Assert.assertFalse(user.isPresent());
}
@Test
public void validateUser003()
throws Exception {
final Optional<User> user = DI.activateDI(LoginService.class)
.loadUser("XX", null);
Assert.assertFalse(user.isPresent());
}
@Test
public void validateUser004()
throws Exception {
final Optional<User> user = DI.activateDI(LoginService.class)
.loadUser("admin", "");
Assert.assertEquals(User.NO_USER, user.get());
}
@Test
public void validateUser005()
throws Exception {
final Optional<User> user = DI.activateDI(LoginService.class)
.loadUser("", "admin");
Assert.assertEquals(User.NO_USER, user.get());
}
@Test
public void validateUser006()
throws Exception {
final Optional<User> user = DI.activateDI(LoginService.class)
.loadUser("admin", "admin");
Assert.assertTrue(user.isPresent());
}
} |
lechium/iOS1351Headers | System/Library/PrivateFrameworks/SpringBoard.framework/SpringBoard-Structs.h | <reponame>lechium/iOS1351Headers
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:23:48 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/PrivateFrameworks/SpringBoard.framework/SpringBoard
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
typedef struct CGSize {
double width;
double height;
} CGSize;
typedef struct __IOHIDEvent* IOHIDEventRef;
typedef struct __CFBoolean* CFBooleanRef;
typedef struct __GSEvent* GSEventRef;
typedef struct {
BOOL field1;
BOOL field2;
BOOL field3;
BOOL field4;
BOOL field5;
double field6;
int field7;
} SCD_Struct_SB4;
typedef struct _NSZone* NSZoneRef;
typedef struct UIRectCornerRadii {
double topLeft;
double bottomLeft;
double bottomRight;
double topRight;
} UIRectCornerRadii;
typedef struct CGPoint {
double x;
double y;
} CGPoint;
typedef struct CGRect {
CGPoint origin;
CGSize size;
} CGRect;
typedef struct __CTServerConnection* CTServerConnectionRef;
typedef struct __SCDynamicStore* SCDynamicStoreRef;
typedef struct __CFString* CFStringRef;
typedef struct {
int field1;
int field2;
} SCD_Struct_SB12;
typedef struct os_unfair_lock_s {
unsigned _os_unfair_lock_opaque;
} os_unfair_lock_s;
typedef struct CGColor* CGColorRef;
typedef struct {
unsigned field1;
unsigned field2;
unsigned field3;
unsigned field4;
unsigned field5;
int field6;
} SCD_Struct_SB15;
typedef struct {
os_unfair_lock_s field1;
SCD_Struct_SB15 field2;
} SCD_Struct_SB16;
typedef struct {
unsigned field1[8];
} SCD_Struct_SB17;
typedef struct {
long long field1;
CGPoint field2;
CGPoint field3;
} SCD_Struct_SB18;
typedef struct NSRange {
unsigned long long location;
unsigned long long length;
} NSRange;
typedef struct __CFRunLoop* CFRunLoopRef;
typedef struct __WiFiManagerClient* WiFiManagerClientRef;
typedef struct __WiFiDeviceClient* WiFiDeviceClientRef;
typedef struct __WiFiNetwork* WiFiNetworkRef;
typedef struct IONotificationPort* IONotificationPortRef;
typedef struct BKSDisplayBrightnessTransaction* BKSDisplayBrightnessTransactionRef;
typedef struct UIEdgeInsets {
double top;
double left;
double bottom;
double right;
} UIEdgeInsets;
typedef struct tcp_connection_fallback_watch_s* tcp_connection_fallback_watch_sRef;
typedef struct MGNotificationTokenStruct* MGNotificationTokenStructRef;
typedef struct {
long long field1;
long long field2;
double field3;
} SCD_Struct_SB29;
typedef struct {
long long startStyle;
long long endStyle;
double transitionFraction;
} SCD_Struct_SB30;
typedef struct {
BOOL valid;
SCD_Struct_SB30 styleTransitionState;
} SCD_Struct_SB31;
typedef struct {
BOOL field1;
SCD_Struct_SB30 field2;
} SCD_Struct_SB32;
typedef struct __CFRunLoopObserver* CFRunLoopObserverRef;
typedef struct SBDragPreviewShadowParameters {
double shadowOpacity;
double shadowRadius;
CGSize shadowOffset;
} SBDragPreviewShadowParameters;
typedef struct __CFArray* CFArrayRef;
typedef struct AWNotification_s* AWNotification_sRef;
typedef struct _CARenderImage* CARenderImageRef;
typedef struct {
unsigned secondaryDisplayItemRole : 1;
} SCD_Struct_SB38;
typedef struct {
BOOL field1[42];
char field2[64];
char field3[64];
char field4[256];
int field5;
int field6;
int field7;
int field8;
char field9[100];
char field10[100];
char field11[100];
char field12[100];
char field13[100];
char field14[2][1024];
unsigned field15;
unsigned field16;
unsigned field17 : 1;
unsigned field18 : 1;
int field19;
int field20;
unsigned field21 : 1;
unsigned field22;
unsigned field23;
int field24;
unsigned field25;
char field26[150];
int field27;
int field28;
unsigned field29 : 1;
unsigned field30 : 1;
unsigned field31 : 1;
char field32[256];
unsigned field33 : 1;
unsigned field34 : 1;
unsigned field35 : 1;
unsigned field36 : 1;
unsigned field37 : 2;
unsigned field38 : 1;
unsigned field39;
unsigned field40 : 1;
unsigned field41 : 1;
unsigned field42 : 1;
char field43[256];
char field44[256];
char field45[100];
unsigned field46 : 1;
unsigned field47 : 1;
unsigned field48 : 1;
unsigned field49 : 1;
double field50;
unsigned field51 : 1;
unsigned field52 : 1;
char field53[100];
char field54[100];
} SCD_Struct_SB39;
typedef struct CGImage* CGImageRef;
typedef struct {
double value;
BOOL inclusive;
} SCD_Struct_SB41;
typedef struct {
SCD_Struct_SB41 start;
SCD_Struct_SB41 end;
} SCD_Struct_SB42;
typedef struct {
BOOL field1[42];
unsigned field2 : 1;
unsigned field3 : 1;
unsigned field4 : 1;
unsigned field5 : 1;
unsigned field6 : 1;
unsigned field7 : 1;
unsigned field8 : 1;
unsigned field9 : 1;
unsigned field10 : 2;
unsigned field11 : 1;
unsigned field12 : 1;
unsigned field13 : 1;
unsigned field14 : 1;
unsigned field15 : 1;
unsigned field16 : 1;
unsigned field17 : 1;
unsigned field18 : 1;
unsigned field19 : 1;
unsigned field20 : 1;
unsigned field21 : 1;
unsigned field22 : 1;
unsigned field23 : 1;
unsigned field24 : 1;
unsigned field25 : 1;
unsigned field26 : 1;
unsigned field27 : 1;
unsigned field28;
unsigned field29 : 1;
unsigned field30 : 1;
unsigned field31 : 1;
unsigned field32 : 1;
unsigned field33 : 1;
unsigned field34 : 1;
unsigned field35 : 1;
SCD_Struct_SB39 field36;
} SCD_Struct_SB43;
typedef struct {
unsigned long long field1;
id field2;
unsigned long long field3;
unsigned long long field4[5];
} SCD_Struct_SB44;
typedef struct SBReachabilityActivationContext {
CGPoint location;
CGPoint translation;
CGPoint velocity;
CGRect viewBounds;
BOOL everTranslatedUpwards;
} SBReachabilityActivationContext;
typedef struct {
BOOL itemIsEnabled[42];
char timeString[64];
char shortTimeString[64];
char dateString[256];
int gsmSignalStrengthRaw;
int secondaryGsmSignalStrengthRaw;
int gsmSignalStrengthBars;
int secondaryGsmSignalStrengthBars;
char serviceString[100];
char secondaryServiceString[100];
char serviceCrossfadeString[100];
char secondaryServiceCrossfadeString[100];
char serviceImages[2][100];
char operatorDirectory[1024];
unsigned serviceContentType;
unsigned secondaryServiceContentType;
unsigned cellLowDataModeActive : 1;
unsigned secondaryCellLowDataModeActive : 1;
int wifiSignalStrengthRaw;
int wifiSignalStrengthBars;
unsigned wifiLowDataModeActive : 1;
unsigned dataNetworkType;
unsigned secondaryDataNetworkType;
int batteryCapacity;
unsigned batteryState;
char batteryDetailString[150];
int bluetoothBatteryCapacity;
int thermalColor;
unsigned thermalSunlightMode : 1;
unsigned slowActivity : 1;
unsigned syncActivity : 1;
char activityDisplayId[256];
unsigned bluetoothConnected : 1;
unsigned displayRawGSMSignal : 1;
unsigned displayRawWifiSignal : 1;
unsigned locationIconType : 1;
unsigned voiceControlIconType : 2;
unsigned quietModeInactive : 1;
unsigned tetheringConnectionCount;
unsigned batterySaverModeActive : 1;
unsigned deviceIsRTL : 1;
unsigned lock : 1;
char breadcrumbTitle[256];
char breadcrumbSecondaryTitle[256];
char personName[100];
unsigned electronicTollCollectionAvailable : 1;
unsigned radarAvailable : 1;
unsigned wifiLinkWarning : 1;
unsigned wifiSearching : 1;
double backgroundActivityDisplayStartDate;
unsigned shouldShowEmergencyOnlyStatus : 1;
unsigned secondaryCellularConfigured : 1;
char primaryServiceBadgeString[100];
char secondaryServiceBadgeString[100];
} SCD_Struct_SB46;
typedef struct {
BOOL usedQuickActionButton;
BOOL scrolledInNotifications;
BOOL longLookedNotification;
BOOL interactedNotifcation;
BOOL clearedNotifcations;
BOOL activatedCamera;
BOOL prewarmedCameraFalsePositive;
BOOL cameraSwipeFailed;
BOOL activatedControlCenter;
BOOL activatedSpotlight;
BOOL unlockDismissed;
} SCD_Struct_SB47;
typedef struct CGAffineTransform {
double a;
double b;
double c;
double d;
double tx;
double ty;
} CGAffineTransform;
typedef struct SBWindowLevelRange_struct {
double start;
double end;
} SBWindowLevelRange_struct;
|
abelhOrihuela/hawk | lib/frontend/router/link.js | <filename>lib/frontend/router/link.js
import React from 'react'
import env from '~base/env-variables'
import { Link } from 'react-router-dom'
const AppLink = (props) => {
var to = props.to
if (env.PREFIX) {
to = env.PREFIX + to
}
return <Link {...props} to={to} />
}
export default AppLink
|
uk-gov-mirror/ministryofjustice.wmt-worker | migrations/app/20170902000000_app_drop_grade_code_in_offender_manager.js | <reponame>uk-gov-mirror/ministryofjustice.wmt-worker
exports.up = function (knex, promise) {
const sql = 'ALTER TABLE app.offender_manager DROP COLUMN grade_code'
return knex.schema
.raw('SET ARITHABORT ON')
.raw(sql)
}
exports.down = function (knex, Promise) {
const sql = 'ALTER TABLE app.offender_manager ADD grade_code varchar(5)'
return knex.schema
.raw('SET ARITHABORT ON')
.raw(sql)
}
|
hardik0899/Competitive_Programming | USACO/Star League/Advanced Searching Techniques 2/lites/sol.cpp | #define __USE_MINGW_ANSI_STDIO 0
#include <iostream>
#include <iomanip>
#include <stdio.h>
#include <stdlib.h>
#include <vector>
#include <algorithm>
#include <queue>
#include <map>
#include <unordered_map>
#include <set>
#include <unordered_set>
#include <stack>
#include <deque>
#include <string.h>
#include <math.h>
using namespace std;
#define PI 4.0*atan(1.0)
#define epsilon 0.000000001a
#define INF 1000000000000000000
#define MOD 1000000007
struct Node{
int sum, lazy;
Node(){ sum = 0; lazy = 0; }
};
int N, M;
Node tree [400010];
int left(int p){ return p<<1; }
int right(int p){ return (p<<1)+1; }
void pushDown(int p, int L, int R){
if(tree[p].lazy == 0) return;
tree[p].sum = R-L+1-tree[p].sum;
int li = left(p), ri = right(p);
if(L != R){
tree[li].lazy ^= 1;
tree[ri].lazy ^= 1;
}
tree[p].lazy = 0;
}
int evalSum(int p, int L, int R){
if(tree[p].lazy) return R-L+1-tree[p].sum;
else return tree[p].sum;
}
void pullUp(int p, int L, int R){
int li = left(p), ri = right(p);
tree[p].sum = evalSum(li, L, (L+R)/2)+evalSum(ri, (L+R)/2+1, R);
}
void update(int p, int L, int R, int i, int j){
if(L > R || i > R || j < L) return;
if(L >= i && R <= j){ tree[p].lazy ^= 1; return; }
pushDown(p, L, R);
int li = left(p), ri = right(p);
update(li, L, (L+R)/2, i, j); update(ri, (L+R)/2+1, R, i, j);
pullUp(p, L, R);
}
int query(int p, int L, int R, int i, int j){
if(L > R || i > R || j < L) return 0;
if(L >= i && R <= j) return evalSum(p, L, R);
pushDown(p, L, R);
int li = left(p), ri = right(p);
long long ret = query(li, L, (L+R)/2, i, j)+query(ri, (L+R)/2+1, R, i, j);
pullUp(p, L, R);
return ret;
}
int main(){
//freopen("fcount.in", "r", stdin); freopen("fcount.out", "w", stdout);
ios_base::sync_with_stdio(0); cin.tie(0); cout << fixed << setprecision(18);
cin >> N >> M;
for(int i = 0; i < M; i++){
int x, y, z; cin >> z >> x >> y; x--; y--;
if(z == 0) update(1, 0, N-1, x, y);
else cout << query(1, 0, N-1, x, y) << '\n';
}
return 0;
}
|
tommcandrew/eventful-calendar | client/src/0-components/MyAccount.js | import React, { useContext } from "react";
import AuthContext from "../2-context/AuthContext";
import LanguageContext from "../2-context/LanguageContext";
import { logOutTextOptions } from "../3-data/siteText";
import { Redirect } from "react-router-dom";
import axios from "axios";
const MyAccount = ({ setShowMyAccount }) => {
const { language } = useContext(LanguageContext);
const { userName, userEmail, setAuthenticated } = useContext(AuthContext);
const handleLogout = () => {
if (userEmail === "<EMAIL>") {
axios.delete("/api/removeDemoEvents");
}
setAuthenticated(false);
localStorage.removeItem("my-token");
return <Redirect to="/" />;
};
return (
<div className="my-account">
<span
className="my-account__close-button"
onClick={() => setShowMyAccount(false)}
>
×
</span>
<div className="my-account__profile-pic">
<span className="my-account__user-initial">
{userName.substr(0, 1)}
</span>
</div>
<div className="my-account__user-details">
<h3 className="my-account__user-name">{userName}</h3>
<p className="my-account__user-email">{userEmail}</p>
</div>
<button className="my-account__logout" onClick={handleLogout}>
{logOutTextOptions[language]}
</button>
</div>
);
};
export default MyAccount;
|
hzjane/HUSTER-CS | 系统综合能力—虚拟机/ics2019/nemu/src/isa/riscv32/decode.c | <gh_stars>100-1000
#include "cpu/decode.h"
#include "rtl/rtl.h"
// decode operand helper
#define make_DopHelper(name) void concat(decode_op_, name) (Operand *op, uint32_t val, bool load_val)
static inline make_DopHelper(i) {
op->type = OP_TYPE_IMM;
op->imm = val;
rtl_li(&op->val, op->imm);
print_Dop(op->str, OP_STR_SIZE, "%d", op->imm);
}
static inline make_DopHelper(r) {
op->type = OP_TYPE_REG;
op->reg = val;
if (load_val) {
rtl_lr(&op->val, op->reg, 4);
}
print_Dop(op->str, OP_STR_SIZE, "%s", reg_name(op->reg, 4));
}
make_DHelper(U) {
decode_op_i(id_src, decinfo.isa.instr.imm31_12 << 12, true);
decode_op_r(id_dest, decinfo.isa.instr.rd, false);
print_Dop(id_src->str, OP_STR_SIZE, "0x%x", decinfo.isa.instr.imm31_12);
}
make_DHelper(I){
decode_op_r(id_src,decinfo.isa.instr.rs1,true);
decode_op_i(id_src2,decinfo.isa.instr.simm11_0,true);
print_Dop(id_src->str,OP_STR_SIZE,"0x%x",decinfo.isa.instr.rs1);
print_Dop(id_src2->str,OP_STR_SIZE,"0x%x",decinfo.isa.instr.simm11_0);
decode_op_r(id_dest,decinfo.isa.instr.rd,false);
}
make_DHelper(J){
int32_t offset=(decinfo.isa.instr.simm20<<20)|(decinfo.isa.instr.imm19_12<<12)|(decinfo.isa.instr.imm11_<<11)|(decinfo.isa.instr.imm10_1<<1);
decode_op_i(id_src,offset,true);
print_Dop(id_src->str,OP_STR_SIZE,"0x%x",offset);
decode_op_r(id_dest,decinfo.isa.instr.rd,false);
}
make_DHelper(B){
decode_op_r(id_src,decinfo.isa.instr.rs1,true);
decode_op_r(id_src2,decinfo.isa.instr.rs2,true);
print_Dop(id_src->str,OP_STR_SIZE,"0x%x",decinfo.isa.instr.rs1);
print_Dop(id_src2->str,OP_STR_SIZE,"0x%x",decinfo.isa.instr.rs2);
int32_t offset=(decinfo.isa.instr.simm12<<12)|(decinfo.isa.instr.imm11<<11)|(decinfo.isa.instr.imm10_5<<5)|(decinfo.isa.instr.imm4_1<<1);
decode_op_i(id_dest,offset,true);
}
make_DHelper(R){
decode_op_r(id_src,decinfo.isa.instr.rs1,true);
decode_op_r(id_src2,decinfo.isa.instr.rs2,true);
print_Dop(id_src->str,OP_STR_SIZE,"0x%x",decinfo.isa.instr.rs1);
print_Dop(id_src2->str,OP_STR_SIZE,"0x%x",decinfo.isa.instr.rs2);
decode_op_r(id_dest,decinfo.isa.instr.rd,false);
}
make_DHelper(ld) {
decode_op_r(id_src, decinfo.isa.instr.rs1, true);
decode_op_i(id_src2, decinfo.isa.instr.simm11_0, true);
print_Dop(id_src->str, OP_STR_SIZE, "%d(%s)", id_src2->val, reg_name(id_src->reg, 4));
rtl_add(&id_src->addr, &id_src->val, &id_src2->val);
decode_op_r(id_dest, decinfo.isa.instr.rd, false);
}
make_DHelper(st) {
decode_op_r(id_src, decinfo.isa.instr.rs1, true);
int32_t simm = (decinfo.isa.instr.simm11_5 << 5) | decinfo.isa.instr.imm4_0;
decode_op_i(id_src2, simm, true);
print_Dop(id_src->str, OP_STR_SIZE, "%d(%s)", id_src2->val, reg_name(id_src->reg, 4));
rtl_add(&id_src->addr, &id_src->val, &id_src2->val);
decode_op_r(id_dest, decinfo.isa.instr.rs2, true);
}
|
lechium/tvOS10Headers | System/Library/Frameworks/MobileCoreServices.framework/LSApplicationRestrictionsManager.h | <gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.0
* on Wednesday, March 22, 2017 at 9:01:19 AM Mountain Standard Time
* Operating System: Version 10.1 (Build 14U593)
* Image Source: /System/Library/Frameworks/MobileCoreServices.framework/MobileCoreServices
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@protocol OS_dispatch_queue;
@class MCProfileConnection, NSObject, NSNumber, NSMutableSet, NSSet;
@interface LSApplicationRestrictionsManager : NSObject {
MCProfileConnection* _connection;
NSObject*<OS_dispatch_queue> _restrictionsAccessQueue;
NSNumber* _whitelistState;
NSMutableSet* _pendingChanges;
NSSet* _whitelistedBundleIDs;
NSSet* _blacklistedBundleIDs;
NSSet* _restrictedBundleIDs;
NSSet* _removedSystemApplications;
NSNumber* _maximumRating;
}
@property (getter=isWhitelistEnabled,readonly) BOOL whitelistEnabled;
@property (getter=isOpenInRestrictionInEffect,readonly) BOOL openInRestrictionInEffect;
@property (getter=isAdTrackingEnabled,readonly) BOOL adTrackingEnabled;
@property (readonly) NSNumber * maximumRating; //@synthesize maximumRating=_maximumRating - In the implementation block
@property (readonly) NSSet * restrictedBundleIDs; //@synthesize restrictedBundleIDs=_restrictedBundleIDs - In the implementation block
@property (readonly) NSSet * blacklistedBundleIDs; //@synthesize blacklistedBundleIDs=_blacklistedBundleIDs - In the implementation block
@property (readonly) NSSet * removedSystemApplications; //@synthesize removedSystemApplications=_removedSystemApplications - In the implementation block
@property (readonly) NSSet * whitelistedBundleIDs; //@synthesize whitelistedBundleIDs=_whitelistedBundleIDs - In the implementation block
+(id)sharedInstance;
+(id)activeRestrictionIdentifiers;
-(NSSet *)removedSystemApplications;
-(BOOL)isApplicationRemoved:(id)arg1 ;
-(id)identifierForRemovedAppPrompt:(id)arg1 ;
-(BOOL)isAppExtensionRestricted:(id)arg1 ;
-(BOOL)isApplicationRestricted:(id)arg1 ;
-(void)handleMCEffectiveSettingsChanged;
-(void)handleMCRemovedSystemAppsChanged;
-(void)clearAllValues;
-(BOOL)isWhitelistEnabled;
-(id)_LSResolveIdentifiers:(id)arg1 ;
-(void)setWhitelistedBundleIDs:(NSSet *)arg1 ;
-(void)setBlacklistedBundleIDs:(NSSet *)arg1 ;
-(void)setRestrictedBundleIDs:(NSSet *)arg1 ;
-(void)setRemovedSystemApplications:(NSSet *)arg1 ;
-(NSSet *)whitelistedBundleIDs;
-(id)calculateSetDifference:(id)arg1 and:(id)arg2 ;
-(NSSet *)restrictedBundleIDs;
-(void)clearPendingChanges;
-(void)addPendingChanges:(id)arg1 ;
-(BOOL)isApplicationRestricted:(id)arg1 checkFeatureRestrictions:(BOOL)arg2 ;
-(BOOL)isFeatureAllowed:(unsigned long long)arg1 ;
-(id)blacklistedBundleID;
-(BOOL)setApplication:(id)arg1 removed:(BOOL)arg2 ;
-(BOOL)isAdTrackingEnabled;
-(NSSet *)blacklistedBundleIDs;
-(BOOL)isRatingAllowed:(id)arg1 ;
-(BOOL)isOpenInRestrictionInEffect;
-(void)dealloc;
-(id)init;
-(id)allowedOpenInAppBundleIDsAfterApplyingFilterToAppBundleIDs:(id)arg1 originatingAppBundleID:(id)arg2 originatingAccountIsManaged:(BOOL)arg3 ;
-(NSNumber *)maximumRating;
-(void)beginListeningForChanges;
-(id)pendingChanges;
@end
|
BitmanNL/traffictower-cms | assets/admin/grocery_crud/js/jquery_plugins/config/jquery.tine_mce.config.js | $(function() {
// minimal editor
tinymce.init(document.cms_tinymce_minimal);
//full editor
tinymce.init(document.cms_tinymce_full);
}); |
efleurine/chantons | src/Containers/Welcome.js | import React, { Component } from "react";
import { View, Text } from "react-native";
import PropTypes from "prop-types";
class WelcomeScreen extends Component {
static navigationOptions = {
title: "Welcome"
};
static propTypes = {
navigation: PropTypes.object.isRequired
};
componentDidMount() {
setTimeout(() => this.props.navigation.navigate("Registration"), 2000);
}
render() {
return (
<View>
<Text>Chantons est une application etc.</Text>
<Text>You see this message because you are a fist time user</Text>
</View>
);
}
}
export default WelcomeScreen;
|
OpenTrustGroup/fuchsia | sdk/lib/fidl-service/cpp/service_watcher.cc | <reponame>OpenTrustGroup/fuchsia<filename>sdk/lib/fidl-service/cpp/service_watcher.cc
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <fuchsia/io/cpp/fidl.h>
#include <lib/fdio/directory.h>
#include <lib/fidl-service/cpp/service_directory.h>
#include <lib/fidl-service/cpp/service_watcher.h>
namespace fidl {
zx_status_t ServiceWatcher::Begin(const ServiceDirectoryBase& svcdir,
async_dispatcher_t* dispatcher) {
zx::channel client_end, server_end;
zx_status_t status = zx::channel::create(0, &client_end, &server_end);
if (status != ZX_OK) {
return status;
}
SynchronousInterfacePtr<fuchsia::io::Directory> dir;
dir.Bind(zx::channel(fdio_service_clone(svcdir.channel().get())));
zx_status_t fidl_status;
status = dir->Watch(fuchsia::io::WATCH_MASK_EXISTING | fuchsia::io::WATCH_MASK_ADDED |
fuchsia::io::WATCH_MASK_REMOVED,
0, std::move(server_end), &fidl_status);
if (status != ZX_OK) {
return status;
} else if (fidl_status != ZX_OK) {
return fidl_status;
}
buf_.resize(fuchsia::io::MAX_BUF);
client_end_ = std::move(client_end);
wait_.set_object(client_end_.get());
wait_.set_trigger(ZX_CHANNEL_READABLE);
return wait_.Begin(dispatcher);
}
zx_status_t ServiceWatcher::Cancel() { return wait_.Cancel(); }
void ServiceWatcher::OnWatchedEvent(async_dispatcher_t* dispatcher, async::WaitBase* wait,
zx_status_t status, const zx_packet_signal_t* signal) {
if (status != ZX_OK || !(signal->observed & ZX_CHANNEL_READABLE)) {
return;
}
uint32_t size = buf_.size();
status = client_end_.read(0, buf_.data(), nullptr, size, 0, &size, nullptr);
if (status != ZX_OK) {
return;
}
for (auto i = buf_.begin(), end = buf_.begin() + size; std::distance(i, end) > 2;) {
// Process message structure, as described by fuchsia::io::WatchedEvent.
uint8_t event = *i++;
uint8_t len = *i++;
// Restrict the length to the remaining size of the buffer.
len = std::min<uint8_t>(len, std::max(0l, std::distance(i, end)));
// If the entry is valid, invoke the callback.
if (len != 1 || *i != '.') {
std::string instance(reinterpret_cast<char*>(i.base()), len);
callback_(event, std::move(instance));
}
i += len;
}
wait_.Begin(dispatcher);
}
} // namespace fidl
|
moni-roy/COPC | Online Judges/LeetCode/June LeetCoding Challenge/Week-1/Queue Reconstruction by Height.py | <gh_stars>1-10
class Solution:
def reconstructQueue(self, people: List[List[int]]) -> List[List[int]]:
people = sorted(people, key=lambda x: (-x[0], x[1]))
ret = []
for p in people:
ret.insert(p[1], p)
return ret
|
smartcommunitylab/sco.playgo | playandgo/src/main/webapp/js/gamification.js | var gamificationConsole = angular.module('gameconsole', [ 'ui.bootstrap', 'ngScrollable', 'ngMask', 'textAngular']);
gamificationConsole.directive('dlEnterKey', function() {
return function(scope, element, attrs) {
element.bind("keydown keypress", function(event) {
var keyCode = event.which || event.keyCode;
if (keyCode === 13) {
scope.$apply(function() {
scope.$eval(attrs.dlEnterKey);
});
event.preventDefault();
}
});
};
});
gamificationConsole.controller('GameCtrl', function($scope, $timeout, $http) {
$scope.users = [];
$scope.banned = [];
$scope.userMap = {};
$scope.selectedUser = null;
$scope.selectedItinerary = null;
$scope.selectedInstance = null;
$scope.layers = [];
$scope.fixpaths = false;
// $scope.showroutes = false;
$scope.selectedRoutes = [];
$scope.relatedLayers = [];
$scope.removeoutliers = false;
$scope.eventsMarkers = new Map();
$scope.fromDate = Date.today().previous().saturday().previous().saturday();
$scope.toDate = Date.today().next().saturday().add(-1).minute();
$scope.openedFrom = false;
$scope.openedTo = false;
$scope.excludeZeroPoints = false;
$scope.allDates = false;
$scope.toCheck = false;
$scope.unapprovedOnly = false;
$scope.pendingOnly = false;
$scope.approvedList = [{name: 'All', value : false}, {name: 'Modified', value : true}];
$scope.filterApproved = $scope.approvedList[0];
$scope.scores = "";
$scope.tripsStats = {total : 0, valid : 0, invalid: 0, pending: 0}
$scope.validities = ['', 'PENDING', 'INVALID', 'VALID'];
$scope.filterUserId = ""
$scope.filterTravelId = ""
$scope.rankingType = "NONE"
$scope.maxRanking = 50
$scope.filterMean = ""
$scope.format = 'EEE MMM dd HH:mm';
$scope.dateOptions = {
startingDay: 1
};
$scope.ttypeconversion = {
"BUS" : "BUS",
"TRAIN" : "TRAIN",
"BOAT": "BOAT",
"BIKE" : "BICYCLE",
"WALK" : "WALK"
}
$timeout(function() {
document.getElementById('fromDate').value = $scope.fromDate.toString('ddd MMM dd HH:mm');
document.getElementById('toDate').value = $scope.toDate.toString('ddd MMM dd HH:mm');
});
var spinOpts = {
lines: 13 // The number of lines to draw
, length: 28 // The length of each line
, width: 14 // The line thickness
, radius: 42 // The radius of the inner circle
, scale: 1 // Scales overall size of the spinner
, corners: 1 // Corner roundness (0..1)
, color: '#000' // #rgb or #rrggbb or array of colors
, opacity: 0.25 // Opacity of the lines
, rotate: 0 // The rotation offset
, direction: 1 // 1: clockwise, -1: counterclockwise
, speed: 1 // Rounds per second
, trail: 60 // Afterglow percentage
, fps: 20 // Frames per second when using setTimeout() as a
// fallback for CSS
, zIndex: 2e9 // The z-index (defaults to 2000000000)
, className: 'spinner' // The CSS class to assign to the spinner
, top: '66%' // Top position relative to parent
, left: '50%' // Left position relative to parent
, shadow: false // Whether to render a shadow
, hwaccel: false // Whether to use hardware acceleration
, position: 'absolute' // Element positioning
}
var target = document.getElementById('console')
var spinner = new Spinner(spinOpts);
var load = function() {
$http.get("console/appId").success(function(data) {
$scope.appId = data;
spinner.spin(target);
$scope.state = {};
$http.get("console/state").then(function(state) {
$scope.state = state.data;
});
$http.get("console/users?excludeZeroPoints=" + $scope.excludeZeroPoints + "&unapprovedOnly=" + $scope.unapprovedOnly + "&pendingOnly=" + $scope.pendingOnly + "&toCheck=" + $scope.toCheck + ($scope.allDates ? "" : ("&fromDate=" + $scope.fromDate.getTime() + "&toDate=" + $scope.toDate.getTime())) + "&filterUserId=" + $scope.filterUserId + "&filterTravelId=" + $scope.filterTravelId + "&rankingType=" + $scope.rankingType + "&maxRanking=" + $scope.maxRanking + "&mean=" + $scope.filterMean, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
var users = [];
var banned = [];
$scope.userTotals = {};
$scope.tripsStats = {total : 0, valid : 0, invalid: 0, pending: 0}
data.data.forEach(function(descr) {
users.push(descr.userId);
if (descr.banned) {
banned.push(descr.userId);
}
var vp = descr.valid + descr.pending;
var pp = 0;
if (vp != 0) {
pp = Math.round(100 * descr.pending / vp);
}
$scope.userTotals[descr.userId] = {
"total" : descr.total,
"valid" : (descr.valid),
"invalid" : (descr.invalid),
"pending" : (descr.pending),
"pendingPercent" : pp
};
$scope.tripsStats.total += descr.total;
$scope.tripsStats.valid += descr.valid;
$scope.tripsStats.invalid += descr.invalid;
$scope.tripsStats.pending += descr.pending;
});
$scope.users = users;
$scope.banned = banned;
$scope.userMap = {};
spinner.stop();
});
});
}
load();
$scope.toggleStatus = function() {
if (confirm('Are you sure')) {
$http.put("console/" + ($scope.state.send ? "deactivate" : "activate")).then(function() {
$http.get("console/state").then(function(state) {
$scope.state = state.data;
});
});
}
}
$scope.toggleSend = function() {
if (confirm('Are you sure')) {
$http.put("console/" + ($scope.state.send ? "deactivatesend" : "activatesend")).then(function() {
$http.get("console/state").then(function(state) {
$scope.state = state.data;
});
});
}
}
$scope.selectUser = function(user) {
if ($scope.selectedUser == user)
$scope.selectedUser = null;
else {
$scope.selectedUser = user;
if (!$scope.userMap[user]) {
spinner.spin(target);
// console.log($scope.allDates);
$http.get("console/useritinerary/" + user + "?excludeZeroPoints=" + $scope.excludeZeroPoints + "&unapprovedOnly=" + $scope.unapprovedOnly + "&pendingOnly=" + $scope.pendingOnly + "&toCheck=" + $scope.toCheck + ($scope.allDates ? "" : ("&fromDate=" + $scope.fromDate.getTime() + "&toDate=" + $scope.toDate.getTime())) + "&filterUserId=" + $scope.filterUserId + "&filterTravelId=" + $scope.filterTravelId + "&mean=" + $scope.filterMean, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
$scope.userMap[user] = data.data;
spinner.stop();
});
}
}
$scope.selectedItinerary = null;
$scope.selectedInstance = null;
resetLayers();
}
$scope.selectItinerary = function(itinerary) {
$scope.selectedItinerary = itinerary;
$scope.selectInstance(itinerary.instance);
}
$scope.validColor = function(totals) {
var notPending = totals.total - totals.pending;
if (notPending != 0) {
var r = 127 + Math.floor(128 * Math.pow(totals.invalid / totals.total, 1.5));
var g = 0 + Math.floor(255 * ((totals.valid) / totals.total));
return "color:rgb(" + r + "," + g + "," + 64 + ")";
} else {
return "color:rgb(0,0,0)";
}
}
$scope.pendingColor = function(totals) {
var c = Math.floor(128 * Math.pow(totals.pending / totals.total, 1.5));
if (totals.pending > 0) {
return "color:rgb(255,192,128)";
} else {
return "color:rgb(0,0,0)";
}
}
var resetLayers = function() {
$scope.layers.forEach(function(l) {
l.setMap(null);
});
$scope.layers = [];
$scope.relatedLayers.forEach(function(l) {
l.setMap(null);
});
$scope.relatedLayers = [];
}
$scope.revalidate = function() {
spinner.spin(target);
// $http.post("console/validate?fromDate=" + $scope.fromDate.getTime() +
// "&toDate=" + $scope.toDate.getTime() + "&excludeZeroPoints=" +
// $scope.excludeZeroPoints + "&toCheck=" + $scope.toCheck, {}, {"headers" : {
// "appId" : $scope.appId}}).then(function(data) {
$http.post("console/validate?excludeZeroPoints=" + $scope.excludeZeroPoints + "&toCheck=" + $scope.toCheck + "&pendingOnly=" + $scope.pendingOnly + ($scope.allDates ? "" : ("&fromDate=" + $scope.fromDate.getTime() + "&toDate=" + $scope.toDate.getTime())) + "&filterUserId=" + $scope.filterUserId + "&filterTravelId=" + $scope.filterTravelId + "&mean=" + $scope.filterMean, {}, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
load();
spinner.stop();
});
}
$scope.synchronize = function() {
spinner.spin(target);
$http.post("console/synchronize", {}, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
load();
spinner.stop();
});
}
// $scope.switchCurrentValidity = function(toggle) {
// if (toggle) {
// $http.post("console/itinerary/switchValidity/" + $scope.selectedInstance.id +
// "?value=" + $scope.selectedInstance.changedValidity, {}, {"headers" : {
// "appId" : $scope.appId}}).then(function(data) {
// $scope.selectedInstance.changedValidity = data.data.changedValidity;
// $scope.reselectInstance();
// });
// } else {
// $scope.selectedInstance.changedValidity =
// !$scope.selectedInstance.changedValidity;
// }
// }
$scope.openSwitchValidity = function(instance) {
$scope.selectInstance(instance);
$('#confirmModal1').modal();
}
$scope.openChangeDistances = function(instance) {
$('#distancesModal').modal();
}
$scope.switchCurrentValidity = function() {
$http.post("console/itinerary/changeValidity/" + $scope.selectedInstance.id + "?value=" + $scope.selectedInstance.changedValidity, {}, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
$scope.selectedInstance.changedValidity = data.data.changedValidity;
$scope.reselectInstance();
});
// if (toggle) {
// $http.post("console/itinerary/switchValidity/" + $scope.selectedInstance.id +
// "?value=" + $scope.selectedInstance.changedValidity, {}, {"headers" : {
// "appId" : $scope.appId}}).then(function(data) {
// $scope.selectedInstance.changedValidity = data.data.changedValidity;
// $scope.reselectInstance();
// });
// } else {
// $scope.selectedInstance.changedValidity =
// !$scope.selectedInstance.changedValidity;
// }
}
$scope.overrideDistances = function(instance) {
var distances = instance.overriddenDistances != null && instance.overriddenDistances.length != 0 ? instance.overriddenDistances : {}
$http.post("console/itinerary/overrideDistances/" + instance.id, distances, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
});
}
$scope.toggleToCheck = function(instance) {
$http.post("console/itinerary/toCheck/" + instance.id + "?value=" + !instance.toCheck, {}, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
instance.toCheck = data.data.toCheck;
});
}
// $scope.toggleApproved = function(instance) {
// $http.post("console/itinerary/approve/" + instance.id + "?value=" +
// !instance.approved, {}, {"headers" : { "appId" :
// $scope.appId}}).then(function(data) {
// instance.approved = data.data.approved;
// });
// }
$scope.approveAll = function() {
// spinner.spin(target);
$http.post("console/approveFiltered?excludeZeroPoints=" + $scope.excludeZeroPoints + "&toCheck=" + $scope.toCheck + "&pendingOnly=" + $scope.pendingOnly + ($scope.allDates ? "" : ("&fromDate=" + $scope.fromDate.getTime() + "&toDate=" + $scope.toDate.getTime())) + "&filterUserId=" + $scope.filterUserId + "&filterTravelId=" + $scope.filterTravelId + "&mean=" + $scope.filterMean, {}, {"headers" : { "appId" : $scope.appId}}).then(function(data) {
load();
spinner.stop();
});
}
$scope.report = function() {
spinner.spin(target);
$http.get("console/report" + ($scope.allDates ? "" : ("?fromDate=" + $scope.fromDate.getTime() + "&toDate=" + $scope.toDate.getTime())), {"headers" : { "appId" : $scope.appId}}).then(function(data) {
load();
spinner.stop();
});
}
$scope.clearFilter = function() {
document.getElementById("parsForm").reset();
$scope.allDates = false;
$scope.excludeZeroPoints = false;
$scope.toCheck = false;
$scope.pendingOnly = false;
$scope.filterUserId = "";
$scope.filterTravelId = "";
$scope.fromDate = Date.today().previous().saturday().previous().saturday();
$scope.toDate = Date.today().next().saturday().add(-1).minute();
$scope.rankingType = "NONE"
$scope.maxRanking = 50
$timeout(function() {
document.getElementById('fromDate').value = $scope.fromDate.toString('ddd MMM dd HH:mm');
document.getElementById('toDate').value = $scope.toDate.toString('ddd MMM dd HH:mm');
});
}
$scope.isTTypeDisabled = function(instance, ttype) {
if (instance == null) {
return true;
}
if (ttype == instance.freeTrackingTransport) {
return false;
}
if (instance.itinerary != null) {
for (i = 0; i < instance.itinerary.data.leg.length; i++) {
lg = instance.itinerary.data.leg[i];
// if (ttype.toUpperCase() == lg.transport.type) {
// return false;
// }
// console.log(ttype.toUpperCase() + " = " +
// $scope.ttypeconversion[ttype.toUpperCase()] + " / " + lg.transport.type);
if ($scope.ttypeconversion[ttype.toUpperCase()] == lg.transport.type) {
return false;
}
}
}
return true;
}
// $scope.getValidityStyle = function(instance) {
// if ((instance.valid & !instance.switchValidity) | (!instance.valid &
// instance.switchValidity)) {
// return true;
// }
// if ((!instance.valid & !instance.switchValidity) | (instance.valid &
// instance.switchValidity)) {
// return false;
// }
// }
$scope.hasValidStyle = function(instance) {
var style = $scope.getActualValidity(instance) == 'VALID';
return style;
}
$scope.hasInvalidStyle = function(instance) {
var style = $scope.getActualValidity(instance) == 'INVALID';
return style;
}
$scope.hasPendingStyle = function(instance) {
var style = $scope.getActualValidity(instance) == 'PENDING';
return style;
}
$scope.getActualValidity = function(instance) {
if (instance.changedValidity) {
return instance.changedValidity;
}
return instance.validationResult.travelValidity;
}
$scope.reload = function() {
load();
}
$scope.reselectInstance = function() {
if ($scope.selectedInstance != null) {
$scope.selectInstance($scope.selectedInstance);
}
}
$scope.showAllPoints = function() {
if (document.getElementById("showAllPoints").checked) {
for (var i = 0; i < $scope.selectedInstance.geolocationEvents.length; i++) {
var e = $scope.selectedInstance.geolocationEvents[i];
var pos = {'lat' : e.latitude, 'lng' : e.longitude};
var s = e.latitude + "_" + e.longitude;
var m = $scope.createMarkerObject(pos, 'circle2', i + 1, true);
$scope.layers.push(m);
}
// $scope.eventsMarkers.set(s,m);
}
}
$scope.newEventMarker = function(lat,lng, i) {
var pos = {'lat' : lat, 'lng' : lng};
var s = lat + "_" + lng;
if (!$scope.eventsMarkers.has(s)) {
var m = $scope.createMarkerObject(pos, 'circle', i);
$scope.layers.push(m);
$scope.eventsMarkers.set(s,m);
} else {
var m = $scope.eventsMarkers.get(s);
m.setMap(null);
$scope.eventsMarkers.delete(s);
}
}
$scope.toggleRoute = function(route) {
var index = $scope.selectedRoutes.indexOf(route);
if (index != -1) {
$scope.selectedRoutes.splice(index, 1);
} else {
$scope.selectedRoutes.push(route);
}
}
$scope.setRoute = function(route, value) {
var index = $scope.selectedRoutes.indexOf(route);
if (index != -1 && !value) {
$scope.selectedRoutes.splice(index, 1);
} else if (index == -1 && value) {
$scope.selectedRoutes.push(route);
}
}
$scope.toggleAllRoutes = function() {
var checkbox = document.getElementById("allRoutesCheckbox");
var checkboxes = document.getElementsByClassName("routeCheckbox");
var i;
for (i = 0; i < checkboxes.length; i++) {
checkboxes[i].checked = checkbox.checked;
$scope.setRoute(checkboxes[i].id, checkbox.checked);
}
}
$scope.toggleRelated = function() {
if ($scope.relatedLayers.length == 0){
$scope.selectedItinerary.related.forEach(function(rel) {
var line = new google.maps.Polyline({
path : rel.geolocationEvents.map(function(g) {
return {lat: g.latitude, lng: g.longitude};
}),
strokeColor : 'green',
strokeOpacity : 0.8,
strokeWeight : 3,
map : $scope.map
});
$scope.relatedLayers.push(line);
});
} else {
$scope.relatedLayers.forEach(function(layer) {
layer.setMap(null);
});
$scope.relatedLayers = [];
}
}
$scope.selectInstance = function(instance) {
if ($scope.selectedInstance != instance) {
$scope.selectedRoutes = [];
var checkboxes = document.getElementsByClassName("routeCheckbox");
var i;
for (i = 0; i < checkboxes.length; i++) {
checkboxes[i].checked = false;
}
}
$scope.selectedInstance = instance;
eventsMarkers = new Map();
resetLayers();
// SHOW TRACKED DATA
var coordinates = [];
instance.geolocationEvents.sort(function(a, b) {
return a.recorded_at - b.recorded_at;
});
var bounds = new google.maps.LatLngBounds();
instance.legs = [];
var lastLeg = {
activity_type : null
};
var cx = 0;
var cy = 0;
instance.geolocationEvents.forEach(function(e) {
cx += e.longitude;
cy += e.latitude;
var p = {
lat : e.latitude,
lng : e.longitude,
acc: e.accuracy,
recorded_at : e.recorded_at
};
coordinates.push(p);
bounds.extend(new google.maps.LatLng(p.lat, p.lng));
var type = e.activity_type;
if (type != lastLeg.activity_type && type != 'unknown') {
var leg = angular.copy(e);
leg.count = 1;
instance.legs.push(leg);
lastLeg = leg;
} else if (type == 'unknown') {
var leg = angular.copy(e);
leg.count = 1;
instance.legs.push(leg);
} else {
lastLeg.count++;
lastLeg.recorded_till = e.recorded_at;
}
});
// coordinates.splice(0,1);
// coordinates.splice(coordinates.length-1,1);
if (instance.freeTrackingTransport) {
$scope.scores = "[" + computeFreeTrackingScore(coordinates, instance.freeTrackingTransport);
}
if ($scope.fixpaths) {
var polyline = instance.validationResult.validationStatus.polyline
if (polyline) {
var path = google.maps.geometry.encoding.decodePath(polyline);
var line = new google.maps.Polyline({
path : path,
strokeColor : 'DeepPink',
strokeOpacity : 0.8,
strokeWeight : 5,
map : $scope.map
});
$scope.layers.push(line);
} else {
coordinates = removeOutliers(coordinates);
coordinates = transform(coordinates);
}
if (instance.freeTrackingTransport) {
$scope.scores += "," + computeFreeTrackingScore(coordinates, instance.freeTrackingTransport) + "]";
}
} else {
$scope.scores += ",??]";
}
if (!instance.freeTrackingTransport) {
$scope.scores = "";
}
$scope.map.fitBounds(bounds);
newMarker(coordinates[0], 'ic_start');
newMarker(coordinates[coordinates.length - 1], 'ic_stop');
if (instance.routesPolylines != null) {
if (instance.routesPolylines["train"] != null && document.getElementById("allRoutesCheckbox").checked) {
instance.routesPolylines["train"].forEach(function(polyline) {
var path = google.maps.geometry.encoding.decodePath(polyline);
var line = new google.maps.Polyline({
path : path,
strokeColor : 'brown',
strokeOpacity : 0.8,
strokeWeight : 5,
map : $scope.map
});
$scope.layers.push(line);
line = new google.maps.Polyline({
path : path,
strokeColor : 'black',
strokeOpacity : 1,
strokeWeight : 1,
map : $scope.map
});
$scope.layers.push(line);
});
}
if (instance.routesPolylines["boat"] != null && document.getElementById("allRoutesCheckbox").checked) {
instance.routesPolylines["boat"].forEach(function(polyline) {
var path = google.maps.geometry.encoding.decodePath(polyline);
var line = new google.maps.Polyline({
path : path,
strokeColor : 'brown',
strokeOpacity : 0.8,
strokeWeight : 5,
map : $scope.map
});
$scope.layers.push(line);
line = new google.maps.Polyline({
path : path,
strokeColor : 'black',
strokeOpacity : 1,
strokeWeight : 1,
map : $scope.map
});
$scope.layers.push(line);
});
}
if (instance.routesPolylines["bus"] != null) {
Object.keys(instance.routesPolylines["bus"]).forEach(function(route) {
var index = $scope.selectedRoutes.indexOf(route);
if (index != -1) {
var path = google.maps.geometry.encoding.decodePath(instance.routesPolylines["bus"][route]);
var line = new google.maps.Polyline({
path : path,
strokeColor : 'orange',
strokeOpacity : 0.8,
strokeWeight : 5,
map : $scope.map
});
$scope.layers.push(line);
line = new google.maps.Polyline({
path : path,
strokeColor : 'black',
strokeOpacity : 1,
strokeWeight : 1,
map : $scope.map
});
$scope.layers.push(line);
var m1 = $scope.createMarkerObject(path[0], 'circle_start', route, true);
$scope.layers.push(m1);
m1 = $scope.createMarkerObject(path[path.length - 1], 'circle_stop', route, true);
$scope.layers.push(m1);
}
});
}
}
var opacity = $scope.fixpaths ? 0.75 : 1.0;
var path = new google.maps.Polyline({
path : coordinates,
geodesic : true,
strokeColor : 'blue',
strokeOpacity : opacity,
strokeWeight : 2
});
$scope.layers.push(path);
path.setMap($scope.map);
// $SHOW PLANNED DATA
if (instance.itinerary != null) {
instance.itinerary.data.leg.forEach(function(leg) {
var path = google.maps.geometry.encoding.decodePath(leg.legGeometery.points);
var line = new google.maps.Polyline({
path : path,
strokeColor : 'green',
strokeOpacity : 0.8,
strokeWeight : 2,
map : $scope.map
});
newMarker(path[0], 'step');
$scope.layers.push(line);
});
}
$scope.showAllPoints();
}
var newMarker = function(pos, icon, i) {
var m = $scope.createMarkerObject(pos, icon, i);
$scope.layers.push(m);
return m;
};
$scope.createMarkerObject = function(pos, icon, i, bottom) {
var m;
if (i) {
// var sz = 32 - Math.pow(("" + i).length,1.1) * 3 + "px";
var len = ("" + i).length;
var sz = (len < 2 ? 22 : len < 4 ? 14 : len < 6 ? 8 : 6) + "px";
m = new google.maps.Marker({
position : pos,
icon : '../img/' + icon + '.png',
label : { text : "" + i, fontWeight : "bold", fontSize : sz, color: "DarkRed"},
map : $scope.map,
draggable : false,
labelContent : "A",
labelAnchor : new google.maps.Point(3, 30),
labelClass : "labels",
zIndex : (10 + i + bottom ? 0 : 100)
// draggable: true
});
} else {
m = new google.maps.Marker({
position : pos,
icon : '../img/' + icon + '.png',
map : $scope.map,
draggable : false,
labelContent : "A",
labelAnchor : new google.maps.Point(3, 30),
labelClass : "labels",
zIndex : 0
});
}
return m;
}
if (typeof (Number.prototype.toRad) === "undefined") {
Number.prototype.toRad = function () {
return this * Math.PI / 180;
}
}
function dist(p1, p2) {
var pt1 = [p1.lat, p1.lng];
var pt2 = [p2.lat, p2.lng];
var d = false;
if (pt1 && pt1[0] && pt1[1] && pt2 && pt2[0] && pt2[1]) {
var lat1 = Number(pt1[0]);
var lon1 = Number(pt1[1]);
var lat2 = Number(pt2[0]);
var lon2 = Number(pt2[1]);
var R = 6371; // km
// var R = 3958.76; // miles
var dLat = (lat2 - lat1).toRad();
var dLon = (lon2 - lon1).toRad();
var lat1 = lat1.toRad();
var lat2 = lat2.toRad();
var a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.sin(dLon / 2) * Math.sin(dLon / 2) * Math.cos(lat1) * Math.cos(lat2);
var c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
d = R * c;
} else {
console.log('cannot calculate distance!');
}
return d;
}
function transform(array) {
var res = [];
for (var i = 1; i < array.length; i++) {
transformPair(array[i-1], array[i], res, dist);
}
return res;
}
function compute(v1, a1, v2, a2, d) {
if ((a1 + a2)/1000 > d) {
var v = a1 > a2 ? (v2 - (v2-v1)*a2/a1) : (v1+ (v2-v1)*a1/a2);
return [v,v];
}
return [v1 + (v2-v1)*a1/d/1000, v2 - (v2-v1)*a2/d/1000];
}
function computeLats(p1, p2, d) {
if (p1.lat > p2.lat) {
var res = computeLats(p2,p1, d);
return [res[1],res[0]];
}
return compute(p1.lat, p1.acc, p2.lat, p2.acc, d);
}
function computeLngs(p1, p2, d) {
if (p1.lng > p2.lng) {
var res = computeLngs(p2, p1, d);
return [res[1],res[0]];
}
return compute(p1.lng, p1.acc, p2.lng, p2.acc, d);
}
function transformPair(p1, p2, res, distFunc) {
var d = distFunc(p1,p2);
if (d != 0) {
var lats = computeLats(p1,p2,d);
var lngs = computeLngs(p1,p2,d);
res.push({lat: lats[0], lng: lngs[0], acc: (p1.acc + p2.acc) / 2, recorded_at: (p1.recorded_at + p2.recorded_at) / 2});
res.push({lat: lats[1], lng: lngs[1], acc: (p1.acc + p2.acc) / 2, recorded_at: (p1.recorded_at + p2.recorded_at) / 2});
}
}
function removeOutliers(array) {
var distance = 0;
var itArray = array;
var res = [];
var removed = true;
for (var i = 1; i < itArray.length; i++) {
var d = dist(itArray[i], itArray[i - 1]) * 1000;
t = itArray[i].recorded_at - itArray[i - 1].recorded_at;
if (t > 0) {
distance += d;
}
}
var t = (itArray[itArray.length - 1].recorded_at - itArray[0].recorded_at) / 1000;
var avgSpeed = (3.6 * distance / t);
var toRemove = [];
// console.log(">" + itArray.length);
res = [];
toRemove = [];
for (var i = 1; i < itArray.length - 1; i++) {
var d1 = dist(itArray[i], itArray[i - 1]) * 1000;
var t1 = (itArray[i].recorded_at - itArray[i - 1].recorded_at) / 1000;
var s1 = 0;
if (t1 > 0) {
s1 = (3.6 * d1 / t1);
}
var d2 = dist(itArray[i], itArray[i + 1]) * 1000;
var t2 = (itArray[i + 1].recorded_at - itArray[i].recorded_at) / 1000;
var s2 = 0;
if (t2 > 0) {
s2 = (3.6 * d2 / t2);
}
var index = null;
var d3 = dist(itArray[i - 1], itArray[i + 1]) * 1000;
var a = Math.acos((d1 * d1 + d2 * d2 - d3 * d3) / (2 * d1 * d2));
// console.log("\t" + (i + 1) + ": " + a + " <- " + rd1 + "," + rd2 + "," +
// rd3);
// console.log("@" + i + "/" + (itArray.length) + " -> " + s2 + "/" + avgSpeed);
if (a < 0.017453292519943 * 3) {
index = i;
}
if (a < 0.017453292519943 * 30 && s1 > 4 * avgSpeed && s2 > 4 * avgSpeed && i != 1 && i != itArray.length - 2) {
index = i;
} else if (s1 > 4 * avgSpeed && i == 1) {
index = 0;
} else if (s2 > 4 * avgSpeed && i == itArray.length - 2) {
index = itArray.length - 1;
}
if (index) {
// console.log(index);
toRemove.push(index);
}
}
for (var i = 0; i < itArray.length; i++) {
if (toRemove.indexOf(i) == -1) {
res.push(itArray[i]);
}
}
itArray = res;
// console.log("<" + res.length);
return res;
}
function computeFreeTrackingScore(array, ttype) {
var distance = 0;
var score = 0.0;
for (var i = 1; i < array.length; i++) {
var d = dist(array[i], array[i - 1]);
distance += d;
}
if (ttype == "walk") {
score = (distance < 0.25 ? 0 : Math.min(3.5, distance)) * 10;
} else if (ttype == "bike") {
score = Math.min(7, distance) * 5;
}
score *= 1.5;
return parseInt(score);
}
$scope.toggleOpen = function($event, $from) {
$event.preventDefault();
$event.stopPropagation();
if ($from) {
$scope.openedFrom = !$scope.openedFrom;
$scope.openedTo = false;
} else {
$scope.openedTo = !$scope.openedTo;
$scope.openedFrom = false;
}
};
$scope.initMap = function() {
document.getElementById("left-scrollable").style.height = (window.innerHeight - 283) + "px";
// document.getElementById("right-scrollable").style.height =
// (window.innerHeight / 2 - 60) + "px";
if (!document.getElementById('map'))
return;
var ll = null;
var mapOptions = null;
ll = {
lat : 46.073769,
lng : 11.125985
};
mapOptions = {
zoom : 15,
center : ll,
gestureHandling: 'greedy'
}
$scope.map = new google.maps.Map(document.getElementById('map'), mapOptions);
}
$scope.tripType = function(itinerary) {
if (itinerary.instance.itinerary != null ) {
return 'Planned'
}
if ($scope.isShared(itinerary.instance)) {
return 'Shared - ' + ($scope.isDriver(itinerary.instance.sharedTravelId) ? 'Driver' : 'Passenger') + ' (' + itinerary.instance.sharedTravelId+ ')';
}
return ('Free tracking - ' + itinerary.instance.freeTrackingTransport);
}
$scope.isShared = function(instance) {
return instance.sharedTravelId != null;
}
$scope.isDriver = function(id) {
return id.indexOf('D') == 0;
}
$scope.initMap();
})
gamificationConsole.controller('UsersCtrl', function($scope, $timeout, $http) {
var download = function(url, name) {
$http.get(url).success(function(data){
const downloadLink = document.createElement("a");
const blob = new Blob([data],{type: 'text/csv'});
const url = URL.createObjectURL(blob);
downloadLink.href = url;
downloadLink.download = name+'.csv';
document.body.appendChild(downloadLink);
downloadLink.click();
document.body.removeChild(downloadLink);
});
}
$http.get('console/players').success(function(data){
if (data) {
data.sort(function(a,b){
return parseInt(a.id) - parseInt(b.id);
});
}
$scope.users = data;
});
$scope.loadLast50 = function() {
download('console/rating?rankingType=PREVIOUS&count=50', 'week50');
}
$scope.loadWeek = function() {
var date = prompt('Specify reference date (yyyy-MM-dd)');
download('console/rating?date='+date+'&count=50', 'week50');
}
$scope.loadGlobal50 = function() {
download('console/rating?rankingType=GLOBAL&count=50', 'global50');
}
})
gamificationConsole.controller('CheckinCtrl', function($scope, $timeout, $http) {
$scope.event = 'checkin';
$http.get('console/checkin/events').success(function(data){
$scope.events = data;
if (data) {
$scope.event = data[0];
$http.get('console/players').success(function(data) {
if (data) {
data.sort(function(a,b){
return parseInt(a.id) - parseInt(b.id);
});
}
$scope.updateCheckIn(data);
$scope.users = data;
});
}
});
$scope.updateCheckIn = function(data){
data.forEach(function(p) {
p.checkedIn = false;
if (p.eventsCheckIn) {
p.eventsCheckIn.forEach(function(e) {
if(e.name === $scope.event) {
p.checkedIn = true;
}
});
}
});
}
$scope.changeEvent = function() {
$scope.updateCheckIn($scope.users);
}
$scope.checkIn = function(p) {
$scope.currentPlayer = p;
$('#confirmModalCheckin').modal();
}
$scope.doCheckIn = function() {
var p = $scope.currentPlayer;
$http.put('console/players/'+p.id+'/checkin/'+$scope.event,{}).success(function(data) {
var checkedIn = false;
if (data.eventsCheckIn) {
data.eventsCheckIn.forEach(function(e) {
if(e.name === $scope.event) {
checkedIn = true;
}
});
}
p.eventsCheckIn = data.eventsCheckIn;
p.checkedIn = checkedIn;
$scope.currentPlayer = null;
});
}
})
gamificationConsole.controller('EmailCtrl', function($scope, $timeout, $http) {
$scope.data = {all: false, emails: [], html: null};
$http.get('console/email/template').success(function(data){
if (data) {
$scope.data.html = data.template;
}
});
$scope.send = function(){
if (!$scope.data.all && $scope.data.emails.length == 0) return;
$('#confirmModalSend').modal();
}
$scope.doSend = function() {
$http.put('console/email',$scope.data).then(function(data) {
$('#confirmModalSent').modal();
}, function(){
alert('Error sending data');
});
}
})
.directive('toggle', function() {
return {
span : function(scope, element, attrs) {
if (attrs.toggle == "tooltip") {
$(element).tooltip();
}
if (attrs.toggle == "popover") {
$(element).popover();
}
}
};
})
|
ArielLily/ClickHouse | dbms/src/DataStreams/tests/union_stream.cpp | #include <iostream>
#include <iomanip>
#include <IO/WriteBufferFromFileDescriptor.h>
#include <Storages/System/StorageSystemNumbers.h>
#include <DataStreams/LimitBlockInputStream.h>
#include <DataStreams/UnionBlockInputStream.h>
#include <DataStreams/BlockExtraInfoInputStream.h>
#include <DataStreams/IBlockOutputStream.h>
#include <DataStreams/copyData.h>
#include <DataTypes/DataTypesNumber.h>
#include <DataTypes/DataTypeString.h>
#include <Columns/ColumnString.h>
#include <Columns/ColumnsNumber.h>
#include <Interpreters/Context.h>
using namespace DB;
void test1()
{
Context context = Context::createGlobal();
StoragePtr table = StorageSystemNumbers::create("numbers", false);
Names column_names;
column_names.push_back("number");
QueryProcessingStage::Enum stage1;
QueryProcessingStage::Enum stage2;
QueryProcessingStage::Enum stage3;
BlockInputStreams streams;
streams.emplace_back(std::make_shared<LimitBlockInputStream>(table->read(column_names, 0, context, stage1, 1, 1)[0], 30, 30000));
streams.emplace_back(std::make_shared<LimitBlockInputStream>(table->read(column_names, 0, context, stage2, 1, 1)[0], 30, 2000));
streams.emplace_back(std::make_shared<LimitBlockInputStream>(table->read(column_names, 0, context, stage3, 1, 1)[0], 30, 100));
UnionBlockInputStream<> union_stream(streams, nullptr, 2);
WriteBufferFromFileDescriptor wb(STDERR_FILENO);
Block sample = table->getSampleBlock();
BlockOutputStreamPtr out = context.getOutputFormat("TabSeparated", wb, sample);
while (Block block = union_stream.read())
{
out->write(block);
wb.next();
}
}
void test2()
{
Context context = Context::createGlobal();
StoragePtr table = StorageSystemNumbers::create("numbers", false);
Names column_names;
column_names.push_back("number");
QueryProcessingStage::Enum stage1;
QueryProcessingStage::Enum stage2;
QueryProcessingStage::Enum stage3;
BlockExtraInfo extra_info1;
extra_info1.host = "host1";
extra_info1.resolved_address = "127.0.0.1";
extra_info1.port = 9000;
extra_info1.user = "user1";
BlockExtraInfo extra_info2;
extra_info2.host = "host2";
extra_info2.resolved_address = "127.0.0.2";
extra_info2.port = 9001;
extra_info2.user = "user2";
BlockExtraInfo extra_info3;
extra_info3.host = "host3";
extra_info3.resolved_address = "127.0.0.3";
extra_info3.port = 9003;
extra_info3.user = "user3";
BlockInputStreams streams;
BlockInputStreamPtr stream1 = std::make_shared<LimitBlockInputStream>(table->read(column_names, 0, context, stage1, 1, 1)[0], 30, 30000);
stream1 = std::make_shared<BlockExtraInfoInputStream>(stream1, extra_info1);
streams.emplace_back(stream1);
BlockInputStreamPtr stream2 = std::make_shared<LimitBlockInputStream>(table->read(column_names, 0, context, stage2, 1, 1)[0], 30, 2000);
stream2 = std::make_shared<BlockExtraInfoInputStream>(stream2, extra_info2);
streams.emplace_back(stream2);
BlockInputStreamPtr stream3 = std::make_shared<LimitBlockInputStream>(table->read(column_names, 0, context, stage3, 1, 1)[0], 30, 100);
stream3 = std::make_shared<BlockExtraInfoInputStream>(stream3, extra_info3);
streams.emplace_back(stream3);
UnionBlockInputStream<StreamUnionMode::ExtraInfo> union_stream(streams, nullptr, 2);
auto getSampleBlock = []()
{
Block block;
ColumnWithTypeAndName col;
col.name = "number";
col.type = std::make_shared<DataTypeUInt64>();
col.column = col.type->createColumn();
block.insert(col);
col.name = "host_name";
col.type = std::make_shared<DataTypeString>();
col.column = col.type->createColumn();
block.insert(col);
col.name = "host_address";
col.type = std::make_shared<DataTypeString>();
col.column = col.type->createColumn();
block.insert(col);
col.name = "port";
col.type = std::make_shared<DataTypeUInt16>();
col.column = col.type->createColumn();
block.insert(col);
col.name = "user";
col.type = std::make_shared<DataTypeString>();
col.column = col.type->createColumn();
block.insert(col);
return block;
};
WriteBufferFromFileDescriptor wb(STDERR_FILENO);
Block sample = getSampleBlock();
BlockOutputStreamPtr out = context.getOutputFormat("TabSeparated", wb, sample);
while (Block block = union_stream.read())
{
const auto & col = block.safeGetByPosition(0);
auto extra_info = union_stream.getBlockExtraInfo();
ColumnPtr host_name_column = std::make_shared<ColumnString>();
ColumnPtr host_address_column = std::make_shared<ColumnString>();
ColumnPtr port_column = std::make_shared<ColumnUInt16>();
ColumnPtr user_column = std::make_shared<ColumnString>();
size_t row_count = block.rows();
for (size_t i = 0; i < row_count; ++i)
{
host_name_column->insert(extra_info.resolved_address);
host_address_column->insert(extra_info.host);
port_column->insert(static_cast<UInt64>(extra_info.port));
user_column->insert(extra_info.user);
}
Block out_block;
out_block.insert(ColumnWithTypeAndName(col.column->clone(), col.type, col.name));
out_block.insert(ColumnWithTypeAndName(host_name_column, std::make_shared<DataTypeString>(), "host_name"));
out_block.insert(ColumnWithTypeAndName(host_address_column, std::make_shared<DataTypeString>(), "host_address"));
out_block.insert(ColumnWithTypeAndName(port_column, std::make_shared<DataTypeUInt16>(), "port"));
out_block.insert(ColumnWithTypeAndName(user_column, std::make_shared<DataTypeString>(), "user"));
out->write(out_block);
wb.next();
}
}
int main(int argc, char ** argv)
{
try
{
test1();
test2();
}
catch (const Exception & e)
{
std::cerr << e.what() << ", " << e.displayText() << std::endl
<< std::endl
<< "Stack trace:" << std::endl
<< e.getStackTrace().toString();
return 1;
}
return 0;
}
|
uibcdf/MolModSAKs | molsysmt/physchem/mass.py | import numpy as np
from molsysmt import puw
def mass(molecular_system, target ='atom', selection = 'all'):
from molsysmt.basic import get
from molsysmt.physico_chemical_properties.atoms.mass import physical, units
from molsysmt._private._digestion import digest_target
target = digest_target(target)
values=physical
output = []
if target == 'atom':
atom_types = get(molecular_system, target=target, selection=selection, atom_type=True)
for ii in atom_types:
output.append(values[ii.capitalize()])
elif target in ['group', 'component', 'molecule', 'chain', 'entity']:
atom_types_in_target = get(molecular_system, target=target, selection=selection, atom_type=True)
for aux in atom_types_in_target:
output.append(np.sum([values[ii.capitalize()] for ii in aux]))
elif target == 'system':
atom_types_in_target = get(molecular_system, target='atom', selection='all', atom_type=True)
output.append(np.sum([values[ii.capitalize()] for ii in atom_types_in_target]))
if target =='system':
output = output[0]*puw.unit(units)
else:
output = puw.quantity(np.array(output), units)
return output
|
xxm1995/bootx-platform | bootx-common-starters/common-starter-audit-log/src/main/java/cn/bootx/starter/audit/log/service/LoginLogService.java | <filename>bootx-common-starters/common-starter-audit-log/src/main/java/cn/bootx/starter/audit/log/service/LoginLogService.java
package cn.bootx.starter.audit.log.service;
import cn.bootx.common.core.rest.PageResult;
import cn.bootx.common.core.rest.param.PageParam;
import cn.bootx.starter.audit.log.dto.LoginLogDto;
import cn.bootx.starter.audit.log.param.LoginLogParam;
import org.springframework.scheduling.annotation.Async;
/**
* 登陆日志
* @author xxm
* @date 2021/12/2
*/
public interface LoginLogService {
/**
* 添加
*/
@Async("asyncExecutor")
void add(LoginLogParam loginLog);
/**
* 获取
*/
LoginLogDto findById(Long id);
/**
* 分页
*/
PageResult<LoginLogDto> page(PageParam pageParam, LoginLogParam loginLogParam);
/**
* 删除
*/
void delete(Long id);
}
|
Chrisaiki/sqrl | app/shared/components/Global/Message/Contract/Transfer.js | // @flow
import React, { Component } from 'react';
import { Header, List } from 'semantic-ui-react';
import { translate } from 'react-i18next';
import WalletMessageContractBase from './Base';
class WalletMessageContractTransfer extends Component<Props> {
render() {
const { data, t } = this.props;
return (
<WalletMessageContractBase>
<Header>
{t('system_transfer_title')}
</Header>
<p>{t('system_transfer_1', data)}</p>
<List style={{ marginLeft: '2em' }}>
<List.Item>{t('system_transfer_2', data)}</List.Item>
<List.Item>{t('system_transfer_3', data)}</List.Item>
<List.Item>{t('system_transfer_4', data)}</List.Item>
</List>
<p>{t('system_transfer_5', data)}</p>
<p>{t('system_transfer_6', data)}</p>
</WalletMessageContractBase>
);
}
}
export default translate('contracts')(WalletMessageContractTransfer);
|
RavensburgOP/core | homeassistant/components/meteo_france/sensor.py | """Support for Meteo-France raining forecast sensor."""
import logging
from meteofrance_api.helpers import (
get_warning_text_status_from_indice_color,
readeable_phenomenoms_dict,
)
from homeassistant.components.sensor import SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from homeassistant.util import dt as dt_util
from .const import (
ATTR_NEXT_RAIN_1_HOUR_FORECAST,
ATTR_NEXT_RAIN_DT_REF,
ATTRIBUTION,
COORDINATOR_ALERT,
COORDINATOR_FORECAST,
COORDINATOR_RAIN,
DOMAIN,
ENTITY_API_DATA_PATH,
ENTITY_DEVICE_CLASS,
ENTITY_ENABLE,
ENTITY_ICON,
ENTITY_NAME,
ENTITY_UNIT,
MANUFACTURER,
MODEL,
SENSOR_TYPES,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Meteo-France sensor platform."""
coordinator_forecast = hass.data[DOMAIN][entry.entry_id][COORDINATOR_FORECAST]
coordinator_rain = hass.data[DOMAIN][entry.entry_id][COORDINATOR_RAIN]
coordinator_alert = hass.data[DOMAIN][entry.entry_id][COORDINATOR_ALERT]
entities = []
for sensor_type in SENSOR_TYPES:
if sensor_type == "next_rain":
if coordinator_rain:
entities.append(MeteoFranceRainSensor(sensor_type, coordinator_rain))
elif sensor_type == "weather_alert":
if coordinator_alert:
entities.append(MeteoFranceAlertSensor(sensor_type, coordinator_alert))
elif sensor_type in ("rain_chance", "freeze_chance", "snow_chance"):
if coordinator_forecast.data.probability_forecast:
entities.append(MeteoFranceSensor(sensor_type, coordinator_forecast))
else:
_LOGGER.warning(
"Sensor %s skipped for %s as data is missing in the API",
sensor_type,
coordinator_forecast.data.position["name"],
)
else:
entities.append(MeteoFranceSensor(sensor_type, coordinator_forecast))
async_add_entities(
entities,
False,
)
class MeteoFranceSensor(CoordinatorEntity, SensorEntity):
"""Representation of a Meteo-France sensor."""
def __init__(self, sensor_type: str, coordinator: DataUpdateCoordinator) -> None:
"""Initialize the Meteo-France sensor."""
super().__init__(coordinator)
self._type = sensor_type
if hasattr(self.coordinator.data, "position"):
city_name = self.coordinator.data.position["name"]
self._name = f"{city_name} {SENSOR_TYPES[self._type][ENTITY_NAME]}"
self._unique_id = f"{self.coordinator.data.position['lat']},{self.coordinator.data.position['lon']}_{self._type}"
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name."""
return self._name
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self.platform.config_entry.unique_id)},
"name": self.coordinator.name,
"manufacturer": MANUFACTURER,
"model": MODEL,
"entry_type": "service",
}
@property
def state(self):
"""Return the state."""
path = SENSOR_TYPES[self._type][ENTITY_API_DATA_PATH].split(":")
data = getattr(self.coordinator.data, path[0])
# Specific case for probability forecast
if path[0] == "probability_forecast":
if len(path) == 3:
# This is a fix compared to other entitty as first index is always null in API result for unknown reason
value = _find_first_probability_forecast_not_null(data, path)
else:
value = data[0][path[1]]
# General case
else:
if len(path) == 3:
value = data[path[1]][path[2]]
else:
value = data[path[1]]
if self._type in ("wind_speed", "wind_gust"):
# convert API wind speed from m/s to km/h
value = round(value * 3.6)
return value
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_TYPES[self._type][ENTITY_UNIT]
@property
def icon(self):
"""Return the icon."""
return SENSOR_TYPES[self._type][ENTITY_ICON]
@property
def device_class(self):
"""Return the device class."""
return SENSOR_TYPES[self._type][ENTITY_DEVICE_CLASS]
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return SENSOR_TYPES[self._type][ENTITY_ENABLE]
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
class MeteoFranceRainSensor(MeteoFranceSensor):
"""Representation of a Meteo-France rain sensor."""
@property
def state(self):
"""Return the state."""
# search first cadran with rain
next_rain = next(
(cadran for cadran in self.coordinator.data.forecast if cadran["rain"] > 1),
None,
)
return (
dt_util.utc_from_timestamp(next_rain["dt"]).isoformat()
if next_rain
else None
)
@property
def extra_state_attributes(self):
"""Return the state attributes."""
reference_dt = self.coordinator.data.forecast[0]["dt"]
return {
ATTR_NEXT_RAIN_DT_REF: dt_util.utc_from_timestamp(reference_dt).isoformat(),
ATTR_NEXT_RAIN_1_HOUR_FORECAST: {
f"{int((item['dt'] - reference_dt) / 60)} min": item["desc"]
for item in self.coordinator.data.forecast
},
ATTR_ATTRIBUTION: ATTRIBUTION,
}
class MeteoFranceAlertSensor(MeteoFranceSensor):
"""Representation of a Meteo-France alert sensor."""
def __init__(self, sensor_type: str, coordinator: DataUpdateCoordinator) -> None:
"""Initialize the Meteo-France sensor."""
super().__init__(sensor_type, coordinator)
dept_code = self.coordinator.data.domain_id
self._name = f"{dept_code} {SENSOR_TYPES[self._type][ENTITY_NAME]}"
self._unique_id = self._name
@property
def state(self):
"""Return the state."""
return get_warning_text_status_from_indice_color(
self.coordinator.data.get_domain_max_color()
)
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {
**readeable_phenomenoms_dict(self.coordinator.data.phenomenons_max_colors),
ATTR_ATTRIBUTION: ATTRIBUTION,
}
def _find_first_probability_forecast_not_null(
probability_forecast: list, path: list
) -> int:
"""Search the first not None value in the first forecast elements."""
for forecast in probability_forecast[0:3]:
if forecast[path[1]][path[2]] is not None:
return forecast[path[1]][path[2]]
# Default return value if no value founded
return None
|
UnderflowDevelopment/RiseClient | src/main/java/dev/rise/script/block/impl/event/impl/packet/PacketReceiveBlock.java | <gh_stars>1-10
package dev.rise.script.block.impl.event.impl.packet;
import dev.rise.event.impl.packet.PacketReceiveEvent;
import dev.rise.script.block.api.BlockData;
import dev.rise.script.block.impl.event.EventBlock;
@BlockData(name = "onPacketReceive", description = "Instructions inside this event listener will get ran when the client receives a packet.")
public final class PacketReceiveBlock extends EventBlock {
public PacketReceiveBlock() {
super(PacketReceiveEvent.class);
}
}
|
extnet/docs5.ext.net | mobile/d0/d47/class_ext_1_1_net_1_1_mobile_1_1_m_v_c_1_1_presence_validator_attribute.js | <reponame>extnet/docs5.ext.net<filename>mobile/d0/d47/class_ext_1_1_net_1_1_mobile_1_1_m_v_c_1_1_presence_validator_attribute.js
var class_ext_1_1_net_1_1_mobile_1_1_m_v_c_1_1_presence_validator_attribute =
[
[ "CreateValidator", "d0/d47/class_ext_1_1_net_1_1_mobile_1_1_m_v_c_1_1_presence_validator_attribute.html#a372faa1e9bcc7427f525bda2f4863057", null ],
[ "AllowEmpty", "d0/d47/class_ext_1_1_net_1_1_mobile_1_1_m_v_c_1_1_presence_validator_attribute.html#aea5d1623205abba528e0c52d065aacbc", null ]
]; |
kami-z88/courier | account/modules/mixin.py | <filename>account/modules/mixin.py
from django.db import models
class NaiveHierarchyManager(models.Manager):
def get_roots(self):
return self.get_query_set().filter(parent__isnull=True)
class NaiveHierarchy(models.Model):
parent = models.ForeignKey('self', null=True, blank=True)
tree = NaiveHierarchyManager()
def get_children(self):
return self._default_manager.filter(parent=self)
def get_descendants(self):
descendants = set(self.get_children())
for node in list(descendants):
descendants.update(node.get_descendants())
return descendants
class Meta:
abstract = True
|
cuvent/react-native-navigation | lib/ios/ElementFrameTransition.h | <gh_stars>10-100
#import <Foundation/Foundation.h>
#import "ElementBaseTransition.h"
@interface ElementFrameTransition : ElementBaseTransition
@property (nonatomic) CGRect from;
@property (nonatomic) CGRect to;
@end
|
IBM/wc-jpa-modeler | workspace/JPAPortUtil/src/com/ibm/commerce/jpa/port/handlers/AccessBeanSubclassSearchHandler.java | package com.ibm.commerce.jpa.port.handlers;
/*
*-----------------------------------------------------------------
* Copyright 2018
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-----------------------------------------------------------------
*/
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import com.ibm.commerce.jpa.port.AccessBeanSubclassSearchJob;
/**
* This handler is invoked by clicking on the "Access Bean Subclass Search" menu option in this plugin.
*
* It launches a wizard which invokes the AccessBeanSubclassSearchJob when finished.
*
* @see org.eclipse.core.commands.IHandler
* @see org.eclipse.core.commands.AbstractHandler
*/
public class AccessBeanSubclassSearchHandler extends AbstractHandler {
/**
* The constructor.
*/
public AccessBeanSubclassSearchHandler() {
}
/**
* the command has been executed, so extract extract the needed information
* from the application context.
*/
public Object execute(ExecutionEvent event) throws ExecutionException {
AccessBeanSubclassSearchJob job = new AccessBeanSubclassSearchJob();
job.schedule();
return null;
}
}
|
Rkhoiwal/Competitive-prog-Archive | uva/volume-101/minesweeper.cpp | #include <iostream>
#include <utility>
#include <vector>
using namespace std;
constexpr char mine_square {'*'};
inline void use_io_optimizations()
{
ios_base::sync_with_stdio(false);
cin.tie(nullptr);
}
inline vector<pair<int, int>> adjacent_positions(int row, int column)
{
return {{row - 1, column },
{row + 1, column },
{row , column - 1},
{row , column + 1},
{row - 1, column - 1},
{row + 1, column + 1},
{row - 1, column + 1},
{row + 1, column - 1}};
}
inline bool is_valid_position(int x, int y, int height, int width)
{
return x >= 0 && x < height && y >= 0 && y < width;
}
void print_field(vector<vector<char>>& field, int field_number)
{
cout << "Field #" << field_number << ':' << '\n';
for (vector<char> row : field)
{
for (char square : row)
{
cout << square;
}
cout << '\n';
}
}
int main()
{
use_io_optimizations();
int field_number {1};
int height;
int width;
while (cin >> height >> width && height != 0 && width != 0)
{
vector<vector<char>> field(height, vector<char>(width, '0'));
for (int i {0}; i < height; ++i)
{
for (int j {0}; j < width; ++j)
{
char square;
cin >> square;
if (square == mine_square)
{
field[i][j] = mine_square;
for (pair<int, int> position : adjacent_positions(i, j))
{
int x {position.first};
int y {position.second};
if (is_valid_position(x, y, height, width) &&
field[x][y] != mine_square)
{
++field[x][y];
}
}
}
}
}
if (field_number > 1)
{
cout << '\n';
}
print_field(field, field_number++);
}
return 0;
}
|
NTrevisani/cmssw | Geometry/HGCalGeometry/interface/HGCalGeometryLoader.h | <filename>Geometry/HGCalGeometry/interface/HGCalGeometryLoader.h
#ifndef GeometryHGCalGeometryHGCalGeometryLoader_h
#define GeometryHGCalGeometryHGCalGeometryLoader_h
#include "Geometry/HGCalGeometry/interface/HGCalGeometry.h"
#include "Geometry/HGCalCommonData/interface/HGCalGeometryMode.h"
#include "Geometry/CaloGeometry/interface/CaloCellGeometry.h"
#include "Geometry/CaloGeometry/interface/FlatHexagon.h"
class HGCalTopology;
class HGCalGeometry;
class HGCalGeometryLoader {
public:
typedef CaloCellGeometry::CCGFloat CCGFloat;
typedef std::vector<float> ParmVec;
HGCalGeometryLoader();
~HGCalGeometryLoader();
HGCalGeometry* build(const HGCalTopology&);
private:
void buildGeom(const ParmVec&, const HepGeom::Transform3D&, const DetId&, HGCalGeometry*, int mode);
const double twoBysqrt3_;
int parametersPerShape_;
};
#endif
|
deltaautonomy/delta_perception | segmentation/libs/graph_utils.py | <reponame>deltaautonomy/delta_perception
r"""Utils for working with variables and proto defs."""
import re
import collections
from tensorflow.python.ops.variables import Variable
from tensorflow.python import pywrap_tensorflow
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.framework import ops
GraphTraversalState = collections.namedtuple(
"GraphTraversalState", ["already_visited", "output_node_stack"])
def remove_ref_from_node_name(node_name):
if node_name.endswith("/read"):
node_name = node_name[:-5]
return node_name
def node_name_matches(node_name, search_str):
if node_name.startswith("^"):
node_name = node_name[1:]
m = re.search(r"(.*)%s:\d+$"%search_str, node_name)
if m:
return m.group(1)
return None
def node_name_from_input(node_name):
if node_name.startswith("^"):
node_name = node_name[1:]
m = re.search(r"(.*)?:\d+$", node_name)
if m:
node_name = m.group(1)
return node_name
def clear_node_devices(input_graph_def_nodes):
for node in input_graph_def_nodes:
node.device = ""
def create_var_const_map(session, var_names):
values_dict = {}
for key in var_names:
tensor_name = key + ":0"
values_dict[key] = session.run(tensor_name)
return values_dict
def create_nodes_map(graph):
nodes_map = {}
for node in graph.node:
if node.name not in nodes_map.keys():
nodes_map[node.name] = node
else:
raise ValueError("Duplicate node names detected.")
return nodes_map
def create_constant_node(name, value, dtype, shape=None):
node = create_node("Const", name, [])
set_attr_dtype(node, "dtype", dtype)
set_attr_tensor(node, "value", value, dtype, shape)
return node
def create_node(op, name, inputs):
new_node = node_def_pb2.NodeDef()
new_node.op = op
new_node.name = name
for input_name in inputs:
new_node.input.extend([input_name])
return new_node
def copy_variable_ref_to_graph(input_graph, output_graph,
var_ref, init_value, scope=''):
if scope != '':
new_name = (
scope + '/' + var_ref.name[:var_ref.name.index(':')])
else:
new_name = var_ref.name[:var_ref.name.index(':')]
collections = []
for name, collection in input_graph._collections.items():
if var_ref in collection:
if (name == ops.GraphKeys.GLOBAL_VARIABLES or
name == ops.GraphKeys.TRAINABLE_VARIABLES or
scope == ''):
collections.append(name)
else:
collections.append(scope + '/' + name)
trainable = (var_ref in input_graph.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES))
with output_graph.as_default():
new_var = Variable(
init_value,
trainable,
name=new_name,
collections=collections,
validate_shape=False)
new_var.set_shape(init_value.shape)
return new_var
def add_variable_to_graph(output_graph, var_name, init_value,
trainable=True, collections=[], scope=''):
if scope != '':
new_name = scope + '/' + var_name
else:
new_name = var_name
with output_graph.as_default():
new_var = Variable(
init_value,
trainable,
name=new_name,
collections=collections,
validate_shape=False)
new_var.set_shape(init_value.shape)
return new_var
def get_vars_from_checkpoint(session, checkpoint, checkpoint_version):
var_list = {}
meta_graph_file = checkpoint + '.meta'
saver = saver_lib.import_meta_graph(meta_graph_file)
reader = pywrap_tensorflow.NewCheckpointReader(checkpoint)
var_to_shape_map = reader.get_variable_to_shape_map()
for key in sorted(var_to_shape_map):
try:
tensor_name = key + ":0"
tensor = session.graph.get_tensor_by_name(
tensor_name)
except KeyError:
continue
var_list[key] = tensor
saver.restore(session, checkpoint)
return var_list
|
enthought/supplement | supplement/project.py | <filename>supplement/project.py
import sys, os
from os.path import abspath, join, isdir, isfile, exists, dirname
import logging
from .tree import AstProvider
from .module import ModuleProvider, PackageResolver
from .watcher import DummyMonitor
from .calls import CallDB
class Project(object):
def __init__(self, root, config=None, monitor=None):
self.root = root
self.config = config or {}
self._refresh_paths()
self.monitor = monitor or DummyMonitor()
self.ast_provider = AstProvider()
self.module_providers = {
'default':ModuleProvider()
}
self.package_resolver = PackageResolver()
self.docstring_processors = []
self.registered_hooks = set()
for h in self.config.get('hooks', []):
self.register_hook(h)
self.calldb = CallDB(self)
def _refresh_paths(self):
self.sources = []
self.paths = []
if 'sources' in self.config:
for p in self.config['sources']:
p = join(abspath(self.root), p)
self.paths.append(p)
self.sources.append(p)
else:
self.paths.append(abspath(self.root))
self.sources.append(abspath(self.root))
for p in self.config.get('libs', []):
self.paths.append(p)
self.paths.extend(sys.path)
def get_module(self, name, filename=None):
assert name
ctx, sep, name = name.partition(':')
if not sep:
ctx, name = 'default', ctx
if filename:
return self.module_providers[ctx].get(self, name, filename)
else:
return self.module_providers[ctx].get(self, name)
def get_ast(self, module):
return self.ast_provider.get(module)
def get_possible_imports(self, start, filename=None):
result = set()
if not start:
paths = self.paths
result.update(r for r, m in sys.modules.items() if m)
else:
m = self.get_module(start, filename)
sub_package_prefix = m.module.__name__ + '.'
for name, module in sys.modules.iteritems():
if module and name.startswith(sub_package_prefix):
result.add(name[len(sub_package_prefix):])
try:
paths = m.module.__path__
except AttributeError:
paths = []
for path in paths:
if not exists(path) or not isdir(path):
continue
path = abspath(path)
for name in os.listdir(path):
if name == '__init__.py':
continue
filename = join(path, name)
if isdir(filename):
if isfile(join(filename, '__init__.py')):
result.add(name)
else:
if any(map(name.endswith, ('.py', '.so'))):
result.add(name.rpartition('.')[0])
return result
def register_hook(self, name):
if name not in self.registered_hooks:
try:
__import__(name)
sys.modules[name].init(self)
except:
logging.getLogger(__name__).exception('[%s] hook register failed' % name)
else:
self.registered_hooks.add(name)
def add_docstring_processor(self, processor):
self.docstring_processors.append(processor)
def add_module_provider(self, ctx, provider):
self.module_providers[ctx] = provider
def add_override_processor(self, override):
self.module_providers['default'].add_override(override)
def process_docstring(self, docstring, obj):
for p in self.docstring_processors:
result = p(docstring, obj)
if result is not None:
return result
return obj
def get_filename(self, name, rel=None):
if name.startswith('/'):
return join(self.root, name[1:])
return join(dirname(rel), name) |
purang-fintech/seppb | src/main/java/com/pr/sepp/utils/jenkins/pool/JenkinsPoolConfig.java | package com.pr.sepp.utils.jenkins.pool;
import com.pr.sepp.utils.jenkins.model.JenkinsProperties;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
public class JenkinsPoolConfig extends GenericObjectPoolConfig {
public JenkinsPoolConfig(JenkinsProperties jenkinsProperties) {
setTestWhileIdle(jenkinsProperties.isTestWhileIdle());
setMinEvictableIdleTimeMillis(jenkinsProperties.getMinEvictableIdleTimeMillis());
setTimeBetweenEvictionRunsMillis(jenkinsProperties.getTimeBetweenEvictionRunsMillis());
setEvictorShutdownTimeoutMillis(jenkinsProperties.getEvictorShutdownTimeoutMillis());
setNumTestsPerEvictionRun(jenkinsProperties.getNumTestsPerEvictionRun());
setTestOnBorrow(jenkinsProperties.isTestOnBorrow());
setTestOnReturn(jenkinsProperties.isTestOnReturn());
//最大连接数
setMaxTotal(jenkinsProperties.getMaxTotal());
setMaxIdle(jenkinsProperties.getMaxIdle());
setMaxWaitMillis(jenkinsProperties.getMaxWaitMillis());
}
public JenkinsPoolConfig() {
setTestWhileIdle(true);
setMinEvictableIdleTimeMillis(240_000);
setTimeBetweenEvictionRunsMillis(30_000);
setEvictorShutdownTimeoutMillis(5_000);
setSoftMinEvictableIdleTimeMillis(5_000);
setNumTestsPerEvictionRun(-1);
setTestOnBorrow(true);
setTestOnReturn(true);
//最大连接数
setMaxTotal(6);
setMaxIdle(6);
setMaxWaitMillis(4_000);
}
}
|
jlmucb/cloudproxy | cpvmm/vmm/guest/guest_cpu/guest_cpu_switch.c | <reponame>jlmucb/cloudproxy
/*
* Copyright (c) 2013 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "file_codes.h"
#define VMM_DEADLOOP() VMM_DEADLOOP_LOG(GUEST_CPU_SWITCH_C)
#define VMM_ASSERT(__condition) VMM_ASSERT_LOG(GUEST_CPU_SWITCH_C, __condition)
#include "guest_cpu_internal.h"
#include "vmx_ctrl_msrs.h"
#include "gpm_api.h"
#include "guest.h"
#include "vmx_asm.h"
#include "ipc.h"
#include "vmm_dbg.h"
#include "vmm_events_data.h"
#include "vmcs_merge_split.h"
#include "vmcs_api.h"
#include "vmexit_cr_access.h"
#include "pat_manager.h"
#include "vmx_nmi.h"
#include "host_cpu.h"
#include "vmdb.h"
#include "vmcs_init.h"
#include "unrestricted_guest.h"
#include "fvs.h"
#ifdef JLMDEBUG
#include "jlmdebug.h"
#endif
extern BOOLEAN is_ib_registered(void);
#ifdef VMCALL_NOT_ALLOWED_FROM_RING_1_TO_3
extern BOOLEAN gcpu_inject_invalid_opcode_exception(GUEST_CPU_HANDLE gcpu);
#endif
// do not report warning on unused params
#pragma warning( disable: 4100 )
// Decide on important events
typedef enum _GCPU_RESUME_EMULATOR_ACTION {
GCPU_RESUME_EMULATOR_ACTION_DO_NOTHING = 0,
GCPU_RESUME_EMULATOR_ACTION_START_EMULATOR
} GCPU_RESUME_EMULATOR_ACTION;
typedef enum _GCPU_RESUME_FLAT_PT_ACTION {
GCPU_RESUME_FLAT_PT_ACTION_DO_NOTHING = 0,
GCPU_RESUME_FLAT_PT_ACTION_INSTALL_32_BIT_PT,
GCPU_RESUME_FLAT_PT_ACTION_INSTALL_64_BIT_PT,
GCPU_RESUME_FLAT_PT_ACTION_REMOVE
} GCPU_RESUME_FLAT_PT_ACTION;
typedef struct _GCPU_RESUME_ACTION {
GCPU_RESUME_EMULATOR_ACTION emulator;
GCPU_RESUME_FLAT_PT_ACTION flat_pt;
} GCPU_RESUME_ACTION;
typedef enum { // bit values
VMCS_HW_ENFORCE_EMULATOR = 1,
VMCS_HW_ENFORCE_FLAT_PT = 2,
VMCS_HW_ENFORCE_CACHE_DISABLED = 4,
} VMCS_HW_ENFORCEMENT_ID;
extern BOOLEAN vmcs_sw_shadow_disable[];
static VMM_STATUS gcpu_set_hw_enforcement(GUEST_CPU_HANDLE gcpu, VMCS_HW_ENFORCEMENT_ID enforcement);
static VMM_STATUS gcpu_remove_hw_enforcement(GUEST_CPU_HANDLE gcpu, VMCS_HW_ENFORCEMENT_ID enforcement);
static void gcpu_apply_hw_enforcements(GUEST_CPU_HANDLE gcpu);
#define gcpu_hw_enforcement_is_active( gcpu, enforcement) (((gcpu)->hw_enforcements & enforcement) != 0)
static void gcpu_cache_disabled_support( const GUEST_CPU_HANDLE gcpu, BOOLEAN CD_value_requested)
{
if (1 == CD_value_requested) { // cache disabled - WSM does not support this!
if (!gcpu_hw_enforcement_is_active(gcpu, VMCS_HW_ENFORCE_CACHE_DISABLED)) {
VMM_LOG(mask_anonymous, level_trace,
"Guest %d:%d trying to set CD = 1\n",
guest_vcpu(gcpu)->guest_id,
guest_vcpu(gcpu)->guest_cpu_id);
gcpu_set_hw_enforcement(gcpu, VMCS_HW_ENFORCE_CACHE_DISABLED);
}
}
else {
if (gcpu_hw_enforcement_is_active(gcpu, VMCS_HW_ENFORCE_CACHE_DISABLED)) {
VMM_LOG(mask_anonymous, level_trace, "Guest %d:%d removing CD = 0 enforcement\n",
guest_vcpu(gcpu)->guest_id, guest_vcpu(gcpu)->guest_cpu_id);
gcpu_remove_hw_enforcement(gcpu, VMCS_HW_ENFORCE_CACHE_DISABLED);
}
}
}
// Receives cr0 and efer guest-visible values
// returns TRUE is something should be done + description of what should be
// done
static BOOLEAN gcpu_decide_on_resume_actions(const GUEST_CPU_HANDLE gcpu, UINT64 cr0_value,
UINT64 efer_value, GCPU_RESUME_ACTION* action)
{
EM64T_CR0 cr0;
IA32_EFER_S efer;
BOOLEAN do_something = FALSE;
BOOLEAN PE, PG, CD, LME;
VMM_ASSERT( gcpu );
VMM_ASSERT( action );
if (IS_MODE_EMULATOR(gcpu)) {
// if we under emulator, emulator will take care for everything
return FALSE;
}
action->emulator = GCPU_RESUME_EMULATOR_ACTION_DO_NOTHING;
action->flat_pt = GCPU_RESUME_FLAT_PT_ACTION_DO_NOTHING;
// now we in NATIVE mode only
if (IS_STATE_INACTIVE(GET_CACHED_ACTIVITY_STATE(gcpu))) {
// if we are in the wait-for-SIPI mode - do nothing
return FALSE;
}
cr0.Uint64 = cr0_value;
efer.Uint64 = efer_value;
PE = (cr0.Bits.PE == 1);
PG = (cr0.Bits.PG == 1);
CD = (cr0.Bits.CD == 1);
LME = (efer.Bits.LME == 1);
if (CD && global_policy_is_cache_dis_virtualized()) {
EM64T_CR0 real_cr0;
VMM_DEBUG_CODE(
const VIRTUAL_CPU_ID *vcpu = guest_vcpu(gcpu);
VMM_LOG(mask_anonymous, level_trace,"Guest %d:%d trying to set CD = 1\n", (int) vcpu->guest_id, (int) vcpu->guest_cpu_id);
);
// CD = 1 is not allowed.
real_cr0.Uint64 = gcpu_get_control_reg(gcpu, IA32_CTRL_CR0);
real_cr0.Bits.CD = 0;
gcpu_set_control_reg(gcpu, IA32_CTRL_CR0, real_cr0.Uint64);
}
// Run emulator explicitly
if( GET_EXPLICIT_EMULATOR_REQUEST_FLAG(gcpu) ) {
if (IS_MODE_UNRESTRICTED_GUEST(gcpu) ) {
gcpu_clr_unrestricted_guest(gcpu);
}
// if we start emulator, emulator will take care for everything
action->emulator = GCPU_RESUME_EMULATOR_ACTION_START_EMULATOR;
return TRUE;
}
// We have UG on all the time, except during Task Switch
if (is_unrestricted_guest_supported()) {
if (!IS_MODE_UNRESTRICTED_GUEST(gcpu)) {
unrestricted_guest_enable(gcpu);
}
}
if (PE == FALSE) {
// if we start emulator, emulator will take care for everything
if (!is_unrestricted_guest_supported()) {
action->emulator = GCPU_RESUME_EMULATOR_ACTION_START_EMULATOR;
do_something = TRUE;
}
return do_something;
}
// now PE is 1
if (!is_unrestricted_guest_supported()) {
if (PG == FALSE) {
// paging is off -> we need flat page tables.
if ((LME == FALSE) && (!GET_FLAT_PAGES_TABLES_32_FLAG(gcpu))) {
do_something = TRUE;
action->flat_pt = GCPU_RESUME_FLAT_PT_ACTION_INSTALL_32_BIT_PT;
}
// special case - Paging is OFF but Long Mode Enable (LME) is ON
// -> switch from 32bit to 64 bit page tables even is 32bit exist
if ((LME == TRUE) && (!GET_FLAT_PAGES_TABLES_64_FLAG(gcpu))) {
do_something = TRUE;
action->flat_pt = GCPU_RESUME_FLAT_PT_ACTION_INSTALL_64_BIT_PT;
}
}
// Paging is ON
else {
if(IS_FLAT_PT_INSTALLED(gcpu)) {
do_something = TRUE;
action->flat_pt = GCPU_RESUME_FLAT_PT_ACTION_REMOVE;
}
}
}
if (global_policy_is_cache_dis_virtualized()) {
gcpu_cache_disabled_support( gcpu, CD );
}
return do_something;
}
// Working with flat page tables
// called each time before resume if flat page tables are active
static void gcpu_enforce_flat_memory_setup( GUEST_CPU* gcpu )
{
EM64T_CR4 cr4;
EM64T_CR0 cr0;
VMM_ASSERT( IS_FLAT_PT_INSTALLED( gcpu ) );
VMM_ASSERT( gcpu->active_flat_pt_hpa );
gcpu_set_control_reg( gcpu, IA32_CTRL_CR3, gcpu->active_flat_pt_hpa );
cr4.Uint64 = gcpu_get_control_reg( gcpu, IA32_CTRL_CR4 );
cr0.Uint64 = gcpu_get_control_reg( gcpu, IA32_CTRL_CR0 );
// set required bits
// note: CR4.PAE ... are listed in the GCPU_CR4_VMM_CONTROLLED_BITS
// so their real values will not be visible by guest
if (! (cr4.Bits.PAE && cr4.Bits.PSE)) {
cr4.Bits.PAE = 1;
cr4.Bits.PSE = 1;
gcpu_set_control_reg( gcpu, IA32_CTRL_CR4, cr4.Uint64 );
}
// note: CR0.PG ... are listed in the GCPU_CR0_VMM_CONTROLLED_BITS
// so their real values will not be visible by guest
if (! cr0.Bits.PG) {
cr0.Bits.PG = 1;
gcpu_set_control_reg( gcpu, IA32_CTRL_CR0, cr0.Uint64 );
}
}
static void gcpu_install_flat_memory( GUEST_CPU* gcpu,
GCPU_RESUME_FLAT_PT_ACTION pt_type )
{
BOOLEAN gpm_flat_page_tables_ok = FALSE;
if (IS_FLAT_PT_INSTALLED(gcpu)) {
fpt_destroy_flat_page_tables( gcpu->active_flat_pt_handle );
}
else {
// first time install - save current user CR3
if (INVALID_CR3_SAVED_VALUE == gcpu->save_area.gp.reg[CR3_SAVE_AREA]) {
gcpu->save_area.gp.reg[CR3_SAVE_AREA]= gcpu_get_control_reg(gcpu, IA32_CTRL_CR3);
}
}
if (GCPU_RESUME_FLAT_PT_ACTION_INSTALL_32_BIT_PT == pt_type) {
UINT32 cr3_hpa;
gpm_flat_page_tables_ok =
fpt_create_32_bit_flat_page_tables(gcpu,
&(gcpu->active_flat_pt_handle), &cr3_hpa );
gcpu->active_flat_pt_hpa = cr3_hpa;
CLR_FLAT_PAGES_TABLES_64_FLAG(gcpu);
SET_FLAT_PAGES_TABLES_32_FLAG(gcpu);
}
else if (GCPU_RESUME_FLAT_PT_ACTION_INSTALL_64_BIT_PT == pt_type) {
gpm_flat_page_tables_ok =
fpt_create_64_bit_flat_page_tables(gcpu, &(gcpu->active_flat_pt_handle),
&(gcpu->active_flat_pt_hpa) );
CLR_FLAT_PAGES_TABLES_32_FLAG(gcpu);
SET_FLAT_PAGES_TABLES_64_FLAG(gcpu);
}
else {
VMM_LOG(mask_anonymous, level_trace,"Unknown Flat Page Tables type: %d\n", pt_type);
VMM_DEADLOOP();
}
VMM_ASSERT( gpm_flat_page_tables_ok );
gcpu_set_hw_enforcement(gcpu, VMCS_HW_ENFORCE_FLAT_PT);
}
static void gcpu_destroy_flat_memory( GUEST_CPU* gcpu )
{
EM64T_CR4 user_cr4;
RAISE_EVENT_RETVAL event_retval;
if (IS_FLAT_PT_INSTALLED(gcpu)) {
fpt_destroy_flat_page_tables( gcpu->active_flat_pt_handle );
gcpu->active_flat_pt_hpa = 0;
}
// now we should restore the original PAE and PSE bits
// actually we should ask uVMM-based application about this by
// issuing appropriate event
user_cr4.Uint64 = gcpu_get_guest_visible_control_reg( gcpu, IA32_CTRL_CR4 );
gcpu_set_control_reg( gcpu, IA32_CTRL_CR4, user_cr4.Uint64 );
event_retval = cr_raise_write_events( gcpu, IA32_CTRL_CR4, user_cr4.Uint64 );
VMM_ASSERT(event_retval != EVENT_NOT_HANDLED);
gcpu_set_control_reg( gcpu, IA32_CTRL_CR3, gcpu->save_area.gp.reg[CR3_SAVE_AREA] );
event_retval = cr_raise_write_events( gcpu, IA32_CTRL_CR3, gcpu->save_area.gp.reg[CR3_SAVE_AREA] );
VMM_ASSERT(event_retval != EVENT_NOT_HANDLED);
gcpu_remove_hw_enforcement(gcpu, VMCS_HW_ENFORCE_FLAT_PT);
CLR_FLAT_PAGES_TABLES_32_FLAG(gcpu);
CLR_FLAT_PAGES_TABLES_64_FLAG(gcpu);
}
void gcpu_physical_memory_modified( GUEST_CPU_HANDLE gcpu )
{
BOOLEAN gpm_flat_page_tables_ok = FALSE;
// this function is called after somebody modified guest physical memory
// renew flat page tables if required
if (! IS_FLAT_PT_INSTALLED(gcpu)) {
return;
}
fpt_destroy_flat_page_tables( gcpu->active_flat_pt_handle );
if (GET_FLAT_PAGES_TABLES_32_FLAG(gcpu)) {
UINT32 cr3_hpa;
gpm_flat_page_tables_ok =
fpt_create_32_bit_flat_page_tables(gcpu, &(gcpu->active_flat_pt_handle),
&cr3_hpa );
gcpu->active_flat_pt_hpa = cr3_hpa;
}
else if (GET_FLAT_PAGES_TABLES_64_FLAG(gcpu)) {
gpm_flat_page_tables_ok =
fpt_create_64_bit_flat_page_tables(gcpu, &(gcpu->active_flat_pt_handle),
&(gcpu->active_flat_pt_hpa) );
}
else {
VMM_LOG(mask_anonymous, level_trace,"Unknown Flat Page Tables type during FPT update after GPM modification\n");
VMM_DEADLOOP();
}
VMM_ASSERT( gpm_flat_page_tables_ok );
}
// Perform pre-resume actions
static void gcpu_perform_resume_actions( GUEST_CPU* gcpu,
const GCPU_RESUME_ACTION* action )
{
VMM_ASSERT( gcpu );
VMM_ASSERT( IS_MODE_NATIVE(gcpu) );
VMM_ASSERT( action );
#ifdef ENABLE_EMULATOR
if (action->emulator == GCPU_RESUME_EMULATOR_ACTION_START_EMULATOR) {
emul_start_guest_execution( gcpu_emulator_handle(gcpu) );
gcpu_set_hw_enforcement(gcpu, VMCS_HW_ENFORCE_EMULATOR);
CLR_EXPLICIT_EMULATOR_REQUEST_FLAG(gcpu);
SET_MODE_EMULATOR(gcpu); // enable redirection of set/get to emulator
VMM_ASSERT( action->flat_pt == GCPU_RESUME_FLAT_PT_ACTION_DO_NOTHING );
}
#endif
switch (action->flat_pt) {
case GCPU_RESUME_FLAT_PT_ACTION_INSTALL_32_BIT_PT:
gcpu_install_flat_memory( gcpu, GCPU_RESUME_FLAT_PT_ACTION_INSTALL_32_BIT_PT );
break;
case GCPU_RESUME_FLAT_PT_ACTION_INSTALL_64_BIT_PT:
gcpu_install_flat_memory( gcpu, GCPU_RESUME_FLAT_PT_ACTION_INSTALL_64_BIT_PT );
break;
case GCPU_RESUME_FLAT_PT_ACTION_REMOVE:
gcpu_destroy_flat_memory( gcpu );
break;
case GCPU_RESUME_FLAT_PT_ACTION_DO_NOTHING:
break;
default:
VMM_LOG(mask_anonymous, level_trace,"Unknown GCPU pre-resume flat_pt action value: %d\n", action->flat_pt);
VMM_DEADLOOP();
}
}
// Context switching
// perform full state save before switching to another guest
void gcpu_swap_out( GUEST_CPU_HANDLE gcpu )
{
VMCS_OBJECT* vmcs = gcpu_get_vmcs(gcpu);
// save state that is not saved by default
if (!GET_DEBUG_REGS_CACHED_FLAG(gcpu)) {
cache_debug_registers(gcpu);
}
if (!GET_FX_STATE_CACHED_FLAG(gcpu)) {
cache_fx_state(gcpu);
}
vmcs_deactivate( vmcs );
}
// perform state restore after switching from another guest
void gcpu_swap_in( const GUEST_CPU_HANDLE gcpu )
{
VMCS_OBJECT* vmcs = gcpu_get_vmcs(gcpu);
// make global assembler save area for this host CPU point to new guest
g_guest_regs_save_area[hw_cpu_id()] = &(gcpu->save_area);
vmcs_activate(vmcs);
SET_ALL_MODIFIED(gcpu);
}
// Initialize gcpu environment for each VMEXIT
// Must be the first gcpu call in each VMEXIT
void gcpu_vmexit_start( const GUEST_CPU_HANDLE gcpu )
{
VMCS_OBJECT* vmcs = gcpu_get_vmcs(gcpu);
// save current
gcpu->save_area.gp.reg[ CR2_SAVE_AREA ] = hw_read_cr2();
// CR3 should not be saved because guest asccess CR3 always causes VmExit and
// should be cached by CR3-access handler
gcpu->save_area.gp.reg[ CR8_SAVE_AREA ] = hw_read_cr8();
if (!vmcs_sw_shadow_disable[hw_cpu_id()]) {
CLR_ALL_CACHED(gcpu);
vmcs_clear_cache( vmcs );
}
// if CR3 is not virtualized, update
// internal storage with user-visible guest value
if (IS_MODE_NATIVE(gcpu) && !IS_FLAT_PT_INSTALLED (gcpu) &&
!gcpu_cr3_virtualized( gcpu )) {
gcpu_set_guest_visible_control_reg( gcpu, IA32_CTRL_CR3, INVALID_CR3_SAVED_VALUE );
}
}
void gcpu_raise_proper_events_after_level_change(GUEST_CPU_HANDLE gcpu,
MERGE_ORIG_VALUES *optional)
{
UINT64 value;
RAISE_EVENT_RETVAL update_event;
EVENT_GCPU_GUEST_MSR_WRITE_DATA msr_update_data;
value = gcpu_get_guest_visible_control_reg_layered(gcpu, IA32_CTRL_CR0, VMCS_MERGED);
if (optional && optional->visible_cr0 == value) {
update_event = cr_raise_write_events(gcpu, IA32_CTRL_CR0, value);
if (update_event == EVENT_NOT_HANDLED) {
VMM_ASSERT(update_event != EVENT_NOT_HANDLED); // Mustn't be GPF0
}
}
value = gcpu_get_guest_visible_control_reg_layered(gcpu, IA32_CTRL_CR4, VMCS_MERGED);
if (optional && optional->visible_cr4 == value) {
update_event = cr_raise_write_events(gcpu, IA32_CTRL_CR4, value);
if (update_event == EVENT_NOT_HANDLED) {
VMM_ASSERT(update_event != EVENT_NOT_HANDLED); // Mustn't be GPF0
}
}
value = gcpu_get_msr_reg_layered(gcpu, IA32_VMM_MSR_EFER, VMCS_MERGED);
if (optional && optional->EFER == value) {
msr_update_data.msr_index = IA32_MSR_EFER;
msr_update_data.new_guest_visible_value = value;
update_event = event_raise(EVENT_GCPU_AFTER_EFER_MSR_WRITE, gcpu, &msr_update_data);
}
if (optional && optional->visible_cr3 == value) {
value = gcpu_get_guest_visible_control_reg_layered(gcpu, IA32_CTRL_CR3, VMCS_MERGED);
update_event = cr_raise_write_events(gcpu, IA32_CTRL_CR3, value);
if (update_event == EVENT_NOT_HANDLED) {
VMM_ASSERT(update_event != EVENT_NOT_HANDLED); // Mustn't be GPF0
}
}
// PAT update will be tracked later in resume
}
GUEST_CPU_HANDLE gcpu_perform_split_merge (GUEST_CPU_HANDLE gcpu)
{
VMCS_HIERARCHY* hierarchy = &(gcpu->vmcs_hierarchy);
VMCS_OBJECT* level0_vmcs;
VMCS_OBJECT* level1_vmcs;
if ((gcpu->last_guest_level == GUEST_LEVEL_1_SIMPLE) &&
(gcpu->last_guest_level == gcpu->next_guest_level)) {
VMM_ASSERT(vmcs_read(vmcs_hierarchy_get_vmcs(hierarchy, VMCS_LEVEL_0), VMCS_EXIT_MSR_STORE_ADDRESS) == vmcs_read(vmcs_hierarchy_get_vmcs(hierarchy, VMCS_LEVEL_0), VMCS_ENTER_MSR_LOAD_ADDRESS));
VMM_ASSERT(vmcs_hierarchy_get_vmcs(hierarchy, VMCS_LEVEL_0) == vmcs_hierarchy_get_vmcs(hierarchy, VMCS_MERGED));
return gcpu;
}
level0_vmcs = vmcs_hierarchy_get_vmcs(hierarchy, VMCS_LEVEL_0);
level1_vmcs = vmcs_hierarchy_get_vmcs(hierarchy, VMCS_LEVEL_1);
if (gcpu->last_guest_level != gcpu->next_guest_level) {
if (gcpu->last_guest_level == GUEST_LEVEL_1_SIMPLE) {
VMM_ASSERT(gcpu->next_guest_level == GUEST_LEVEL_1_VMM);
// TODO: separate "level-0" and "merged" VMCSs
VMM_LOG(mask_anonymous, level_trace,
"%s: Separation of (level-0) and (merged) VMCSs is not implemented yet\n",
__FUNCTION__);
VMM_DEADLOOP();
}
else if (gcpu->last_guest_level == GUEST_LEVEL_1_VMM) {
if (gcpu->next_guest_level == GUEST_LEVEL_1_SIMPLE) {
// TODO: (level-1) --> simple guest mode
VMM_LOG(mask_anonymous, level_trace,
"%s: Layering switch off is not implemented yet\n",
__FUNCTION__);
VMM_DEADLOOP();
}
else {
VMM_ASSERT(gcpu->next_guest_level == GUEST_LEVEL_2);
ms_merge_to_level2(gcpu, FALSE /* merge all fields */);
}
}
else {
VMM_ASSERT(gcpu->next_guest_level == GUEST_LEVEL_2);
VMM_ASSERT(gcpu->next_guest_level == GUEST_LEVEL_1_VMM);
ms_split_from_level2(gcpu);
ms_merge_to_level1(gcpu, FALSE /* vmexit level2 -> level1 */,
FALSE /* merge all fields */);
}
gcpu_raise_proper_events_after_level_change(gcpu, NULL);
}
else {
/* gcpu->last_guest_level == gcpu->next_guest_level */
if (gcpu->last_guest_level == GUEST_LEVEL_1_VMM) {
BOOLEAN merge_only_dirty = GET_IMPORTANT_EVENT_OCCURED_FLAG(gcpu) ? FALSE : TRUE;
ms_merge_to_level1(gcpu, TRUE /* level1 -> level1 */, merge_only_dirty);
}
else {
BOOLEAN merge_only_dirty = GET_IMPORTANT_EVENT_OCCURED_FLAG(gcpu) ? FALSE : TRUE;
VMM_ASSERT(gcpu->last_guest_level == GUEST_LEVEL_2)
ms_merge_to_level2(gcpu, merge_only_dirty);
}
}
vmcs_clear_dirty(level0_vmcs);
vmcs_clear_dirty(level1_vmcs);
// gcpu->last_guest_level = gcpu->next_guest_level;
return gcpu;
}
static void gcpu_process_activity_state_change( GUEST_CPU_HANDLE gcpu )
{
EVENT_GCPU_ACTIVITY_STATE_CHANGE_DATA event_data;
event_data.new_state = gcpu_get_activity_state(gcpu);
event_data.prev_state = GET_CACHED_ACTIVITY_STATE(gcpu);
if (event_data.new_state != event_data.prev_state) {
event_raise( EVENT_GCPU_ACTIVITY_STATE_CHANGE, gcpu, &event_data );
SET_CACHED_ACTIVITY_STATE(gcpu, event_data.new_state);
if (IS_STATE_INACTIVE(event_data.new_state)) {
// switched from active to Wait-For-SIPI
// the HW CPU will not be able to respond to any interrupts
ipc_change_state_to_sipi( gcpu );
}
if (IS_STATE_INACTIVE(event_data.prev_state)) {
// switched from Wait-For-SIPI to active state
//:TODO: Looks like there is not need to apply GCPU control setup to VMCS-LEVEL0
//:TODO: after CPU switched to Active state, because IPC messages are passed not
//:TODO: in Wait-For-SIPI state also
// apply all vmexit-request changes that were not applied because of
// Wait-For-SIPI state
// gcpu_control_apply_only(gcpu);
ipc_change_state_to_active(gcpu);
}
}
CLR_ACTIVITY_STATE_CHANGED_FLAG(gcpu);
}
// Resume execution. Never returns.
void gcpu_resume(GUEST_CPU_HANDLE gcpu)
{
VMCS_OBJECT* vmcs;
#ifdef JLMDEBUG1
bprint("gcpu_resume\n");
#endif
if (IS_MODE_NATIVE( gcpu )) {
gcpu = gcpu->resume_func(gcpu); // layered specific resume
gcpu->last_guest_level = gcpu->next_guest_level;
// nmi_resume_handler(gcpu); // process platform NMI if any
}
vmcs = gcpu_get_vmcs(gcpu);
VMM_ASSERT(vmcs);
// exception which caused VMEXIT must be handled before resume
VMM_ASSERT(0 == GET_EXCEPTION_RESOLUTION_REQUIRED_FLAG(gcpu));
if (GET_IMPORTANT_EVENT_OCCURED_FLAG(gcpu)) {
if (GET_ACTIVITY_STATE_CHANGED_FLAG(gcpu)) {
gcpu_process_activity_state_change(gcpu);
}
// if we in the emulator, it will take care about all settings
if (IS_MODE_NATIVE(gcpu)) {
GCPU_RESUME_ACTION action;
if (gcpu_decide_on_resume_actions(gcpu,
gcpu_get_guest_visible_control_reg(gcpu, IA32_CTRL_CR0),
gcpu_get_msr_reg(gcpu, IA32_VMM_MSR_EFER), &action )) {
// do something
gcpu_perform_resume_actions( gcpu, &action );
}
}
CLR_IMPORTANT_EVENT_OCCURED_FLAG(gcpu);
}
// support for active CR3
if (IS_MODE_NATIVE(gcpu)) {
if (IS_FLAT_PT_INSTALLED( gcpu )) {
// gcpu_enforce_flat_memory_setup( gcpu ); VTDBG
}
else {
if (!gcpu_cr3_virtualized( gcpu )) {
UINT64 visible_cr3 = gcpu->save_area.gp.reg[CR3_SAVE_AREA];
if (INVALID_CR3_SAVED_VALUE != visible_cr3) {
// CR3 user-visible value was changed inside vmm or CR3
// virtualization was switched off
gcpu_set_control_reg(gcpu, IA32_CTRL_CR3, visible_cr3);
}
}
}
}
#ifdef FAST_VIEW_SWITCH
if ( fvs_is_eptp_switching_supported() ) {
fvs_save_resumed_eptp(gcpu);
}
#endif
// restore registers
hw_write_cr2( gcpu->save_area.gp.reg[ CR2_SAVE_AREA ] );
// CR3 should not be restored because guest asccess CR3 always causes VmExit and
// should be cached by CR3-access handler
hw_write_cr8( gcpu->save_area.gp.reg[ CR8_SAVE_AREA ] );
if (IS_MODE_NATIVE(gcpu)) {
vmdb_settings_apply_to_hw(gcpu); // apply GDB settings
}
//host_cpu_save_dr7(hw_cpu_id());
if (0 != gcpu->hw_enforcements) {
gcpu_apply_hw_enforcements(gcpu);
}
{
IA32_VMX_VMCS_VM_EXIT_INFO_IDT_VECTORING idt_vectoring_info;
idt_vectoring_info.Uint32 = (UINT32)vmcs_read(vmcs,VMCS_EXIT_INFO_IDT_VECTORING);
if(idt_vectoring_info.Bits.Valid &&
((idt_vectoring_info.Bits.InterruptType==IdtVectoringInterruptTypeExternalInterrupt )
||(idt_vectoring_info.Bits.InterruptType==IdtVectoringInterruptTypeNmi))) {
IA32_VMX_VMCS_VM_ENTER_INTERRUPT_INFO interrupt_info;
PROCESSOR_BASED_VM_EXECUTION_CONTROLS ctrls;
interrupt_info.Uint32= (UINT32)vmcs_read(vmcs,VMCS_ENTER_INTERRUPT_INFO);
VMM_ASSERT(!interrupt_info.Bits.Valid);
interrupt_info.Uint32 = 0;
interrupt_info.Bits.Valid = 1;
interrupt_info.Bits.Vector = idt_vectoring_info.Bits.Vector;
interrupt_info.Bits.InterruptType= idt_vectoring_info.Bits.InterruptType;
vmcs_write(vmcs,VMCS_ENTER_INTERRUPT_INFO, interrupt_info.Uint32);
if(idt_vectoring_info.Bits.InterruptType == IdtVectoringInterruptTypeNmi)
vmcs_write(vmcs,VMCS_GUEST_INTERRUPTIBILITY,0);
else
vmcs_write(vmcs,VMCS_GUEST_INTERRUPTIBILITY,
vmcs_read(vmcs,VMCS_GUEST_INTERRUPTIBILITY) & ~0x3 );
ctrls.Uint32 = (UINT32)vmcs_read(vmcs, VMCS_CONTROL_VECTOR_PROCESSOR_EVENTS);
if((ctrls.Bits.MonitorTrapFlag)&&(vmcs_read(vmcs,VMCS_EXIT_INFO_REASON)==
Ia32VmxExitBasicReasonEptViolation))
gcpu->trigger_log_event = 1 + interrupt_info.Bits.Vector;
}
}
// flash VMCS
if (!vmcs_sw_shadow_disable[hw_cpu_id()])
vmcs_flush_to_cpu(vmcs);
vmcs_sw_shadow_disable[hw_cpu_id()] = FALSE;
if (!vmcs_launch_required(vmcs))
nmi_window_update_before_vmresume(vmcs);
// check for Launch and resume
if (vmcs_launch_required(vmcs)) {
vmcs_set_launched(vmcs);
#ifdef JLMDEBUG1
bprint("launch required\n");
#endif
// call assembler launch
vmentry_func(TRUE);
VMM_LOG(mask_anonymous, level_trace,
"VmLaunch failed for GCPU %d GUEST %d in %s mode\n",
gcpu->vcpu.guest_cpu_id, gcpu->vcpu.guest_id,
IS_MODE_NATIVE(gcpu) ? "NATIVE" : "EMULATED");
}
else {
#ifdef JLMDEBUG1
bprint("launch NOT required\n");
#endif
// call assembler resume
vmentry_func(FALSE);
VMM_LOG(mask_anonymous, level_trace,
"VmResume failed for GCPU %d GUEST %d in %s mode\n",
gcpu->vcpu.guest_cpu_id, gcpu->vcpu.guest_id,
IS_MODE_NATIVE(gcpu) ? "NATIVE" : "EMULATED" );
}
#ifdef JLMDEBUG
bprint("looping at the end of gcpu_resume\n");
LOOP_FOREVER
#endif
VMM_DEADLOOP();
VMM_BREAKPOINT();
}
#ifdef ENABLE_EMULATOR
// Perform single step.
BOOLEAN gcpu_perform_single_step( const GUEST_CPU_HANDLE gcpu )
{
return emul_run_single_instruction(gcpu->emulator_handle);
}
void gcpu_run_emulator(const GUEST_CPU_HANDLE gcpu)
{
VMM_ASSERT( IS_MODE_NATIVE(gcpu) );
SET_EXPLICIT_EMULATOR_REQUEST_FLAG( gcpu );
SET_IMPORTANT_EVENT_OCCURED_FLAG( gcpu );
}
// Change execution mode - switch to native execution mode
VMM_STATUS gcpu_return_to_native_execution(GUEST_CPU_HANDLE gcpu,
ADDRESS* arg1 UNUSED, ADDRESS* arg2 UNUSED,
ADDRESS* arg3 UNUSED)
{
// check if emulator finished already
if (IS_MODE_EMULATOR(gcpu) && (gcpu->emulator_handle != NULL) &&
emul_is_running( gcpu->emulator_handle )) {
SET_MODE_NATIVE(gcpu); // disable redirection of set/get to emulator
emul_stop_guest_execution( gcpu->emulator_handle );
gcpu_remove_hw_enforcement(gcpu, VMCS_HW_ENFORCE_EMULATOR);
return VMM_OK;
}
#ifdef VMCALL_NOT_ALLOWED_FROM_RING_1_TO_3
gcpu_inject_invalid_opcode_exception(gcpu);
#endif
return VMM_ERROR;
}
BOOLEAN gcpu_is_mode_native(GUEST_CPU_HANDLE gcpu)
{
return IS_MODE_NATIVE( gcpu );
}
#endif
VMM_STATUS gcpu_set_hw_enforcement(GUEST_CPU_HANDLE gcpu, VMCS_HW_ENFORCEMENT_ID enforcement)
{
VMM_STATUS status = VMM_OK;
switch (enforcement) {
case VMCS_HW_ENFORCE_EMULATOR:
case VMCS_HW_ENFORCE_FLAT_PT:
case VMCS_HW_ENFORCE_CACHE_DISABLED:
gcpu->hw_enforcements |= enforcement;
break;
default:
VMM_ASSERT(0);
status = VMM_ERROR;
break;
}
return status;
}
VMM_STATUS gcpu_remove_hw_enforcement(GUEST_CPU_HANDLE gcpu,
VMCS_HW_ENFORCEMENT_ID enforcement)
{
VMM_STATUS status = VMM_OK;
switch (enforcement) {
case VMCS_HW_ENFORCE_EMULATOR:
gcpu_enforce_settings_on_hardware(gcpu, GCPU_TEMP_EXCEPTIONS_RESTORE_ALL);
gcpu_enforce_settings_on_hardware(gcpu, GCPU_TEMP_CR0_RESTORE_WP);
break;
case VMCS_HW_ENFORCE_FLAT_PT:
gcpu_enforce_settings_on_hardware(gcpu, GCPU_TEMP_RESTORE_PF_AND_CR3);
break;
case VMCS_HW_ENFORCE_CACHE_DISABLED:
// do nothing
break;
default:
VMM_ASSERT(0);
status = VMM_ERROR;
break;
}
gcpu->hw_enforcements&= ~enforcement;
return status;
}
void gcpu_apply_hw_enforcements(GUEST_CPU_HANDLE gcpu)
{
VMM_ASSERT( !GET_IMPORTANT_EVENT_OCCURED_FLAG(gcpu) );
if (gcpu->hw_enforcements & VMCS_HW_ENFORCE_EMULATOR) {
gcpu_enforce_settings_on_hardware(gcpu, GCPU_TEMP_EXCEPTIONS_EXIT_ON_ALL);
gcpu_enforce_settings_on_hardware(gcpu, GCPU_TEMP_CR0_NO_EXIT_ON_WP);
}
else if (gcpu->hw_enforcements & VMCS_HW_ENFORCE_FLAT_PT) {
gcpu_enforce_settings_on_hardware(gcpu, GCPU_TEMP_EXIT_ON_PF_AND_CR3);
gcpu_enforce_flat_memory_setup(gcpu);
}
if (gcpu->hw_enforcements & VMCS_HW_ENFORCE_CACHE_DISABLED) {
// CD = 1 is not allowed.
vmcs_update( vmcs_hierarchy_get_vmcs( &gcpu->vmcs_hierarchy, VMCS_MERGED),
VMCS_GUEST_CR0, 0, CR0_CD);
// flush HW caches
hw_wbinvd();
// the solution is not full because of
// 1. the OS may assume that some non-write-back memory is uncached
// 2. caching influencies in multicore environment
// 3. internal CPU behavior like in HSW VMCS caching effects.
}
VMM_ASSERT( !GET_IMPORTANT_EVENT_OCCURED_FLAG(gcpu) );
}
|
rencht/LeetCode | src/main/java/com/sid/leetcode/problem/sum/FourSum.java | package com.sid.leetcode.problem.sum;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* 18. 4Sum.
*
* <blockquote>
* Given an array S of n integers, are there elements a, b, c, and d in S such that a + b + c + d = target? Find all unique quadruplets in the array which gives the sum of target.
* <p><b>Note:</b> The solution set must not contain duplicate quadruplets.
* <blockquote>
* For example, given array S = [1, 0, -1, 0, -2, 2], and target = 0.
* <p>A solution set is:
* <p>[
* <p><pre>[-1, 0, 0, 1],</pre>
* <p><pre>[-2, -1, 1, 2],</pre>
* <p><pre>[-2, 0, 0, 2]</pre>
* <p>]
* </blockquote>
* </blockquote>
*
* @author Sid.Chen
* @version 1.0, 2016-07-29
*
*/
public class FourSum {
public List<List<Integer>> fourSum(int[] nums, int target) {
List<List<Integer>> quadruplets = new ArrayList<List<Integer>>();
if (nums.length >= 4) {
Arrays.sort(nums);
int temp = 0, halfTemp = 0, start = 0, end = 0;
for (int i = 0; i < nums.length - 3; i++) {
if (i == 0 || nums[i] != nums[i - 1]) {
for (int j = i + 1; j < nums.length - 2; j++) {
if (j == i + 1 || nums[j] != nums[j - 1]) {
temp = target - nums[i] - nums[j];
halfTemp = temp >> 1;
start = j + 1;
end = nums.length - 1;
while (start < end && nums[start] <= halfTemp && nums[end] >= halfTemp) {
if (nums[start] + nums[end] < temp) {
while (nums[start] == nums[++start] && start < end);
} else if (nums[start] + nums[end] > temp) {
while (nums[end] == nums[--end] && start < end);
} else {
quadruplets.add(Arrays.asList(nums[i], nums[j], nums[start], nums[end]));
while (nums[start] == nums[++start] && start < end);
while (nums[end] == nums[--end] && start < end);
}
}
}
}
}
}
}
return quadruplets;
}
}
|
belgianGeek/open-planner | routes/logout.js | module.exports = function(app, io) {
app.delete('/logout', (req, res) => {
req.logOut();
res.redirect('/login');
});
};
|
juanfelipe82193/opensap | sapui5-sdk-1.74.0/resources/sap/uiext/inbox/SubstitutionRulesManagerUtils-dbg.js | <gh_stars>0
/*!
* SAPUI5
(c) Copyright 2009-2020 SAP SE. All rights reserved
*/
// Utility functions for Substitution Rules Manager
jQuery.sap.declare("sap.uiext.inbox.SubstitutionRulesManagerUtils");
sap.uiext.inbox.SubstitutionRulesManagerUtils = function() {
throw new Error();
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getText = function(value, isSubstitutedUserRules, isActiveSubstRule,
bIsRecieveTasks, beginDate, endDate) {
var bIsOutDated = this._isOutDated(endDate);
var bInCurrentDateRange = (!this._isFutureDate(beginDate) && !bIsOutDated);
var _oBundle = sap.ui.getCore().getLibraryResourceBundle("sap.uiext.inbox");
// Eliminate out dated tasks
if (bIsOutDated) {
return "";
} else {
if (isSubstitutedUserRules) { // For My substitute rules
if (isActiveSubstRule) { // If the rule is enabled
if (bInCurrentDateRange) {
return value + " " + _oBundle.getText("SUBSTIUTION_RULE_CURRENTLY_RECEIVING_TASKS");
} else {// End
return value + " " + _oBundle.getText("SUBSTIUTION_RULE_WILL_RECEIVE_TASKS_FROM") + " "
+ this._getFormattedDate(beginDate);
}
} else {
if (bIsRecieveTasks) {
if (bInCurrentDateRange) {
return _oBundle.getText("SUBSTITUTION_RULE_ENABLE_FOR") + " " + value + " "
+ _oBundle.getText("SUBSTITUTION_RULE_TO_RECIEVE_TASKS");
} else {
return _oBundle.getText("SUBSTITUTION_RULE_ENABLE_FOR") + " " + value + " "
+ _oBundle.getText("SUBSTITUTION_RULE_TO_RECIEVE_TASKS") + " "
+ _oBundle.getText("SUBSTITUTION_RULE_FROM_TXT") + " " + this._getFormattedDate(beginDate);
}
} else {
return value + " " + _oBundle.getText("SUBSTIUTION_RULE_HAS_NOT_ACTIVATED_YOUR");
}
}
} else {// For I am substituting rules
if (isActiveSubstRule) {
if (bInCurrentDateRange) {
return _oBundle.getText("SUBSTIUTION_RULE_CURRENTLY_RECEIVING_TASKS_FROM") + " " + value;
} else {
return _oBundle.getText("SUBSTIUTION_RULE_YOU_WILL_RECEIVE_TASKS_FROM") + " " + value + " "
+ _oBundle.getText("SUBSTITUTION_RULE_FROM_TXT") + " " + this._getFormattedDate(beginDate);
}
} else {
if (bIsRecieveTasks) {
if (bInCurrentDateRange) {
return _oBundle.getText("SUBSTIUTION_RULE_TURN_ON_TO_RECEIVE_TASKS_FROM") + " " + value;
} else {
return _oBundle.getText("SUBSTIUTION_RULE_TURN_ON_TO_RECEIVE_TASKS_FROM") + " " + value + " "
+ _oBundle.getText("SUBSTITUTION_RULE_FROM_TXT") + " " + this._getFormattedDate(beginDate);
}
} else {
if(bInCurrentDateRange){
return _oBundle.getText("SUBSTIUTION_RULE_IS_CURRENTLY_DISABLED_BY") + " " + value;
}else{
return _oBundle.getText("SUBSTIUTION_RULE_YOU_WILL_RECEIVE_TASKS_FROM") + " " + value + " "
+ _oBundle.getText("SUBSTITUTION_RULE_FROM_TXT") + " " + this._getFormattedDate(beginDate);
}
}
}
}
}
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._isOutDated = function(date) {
if (date !== null && date !== '') {
if (this._getTimeDiff(date) < 0) {
return true;
}
}
return false;
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._isFutureDate = function(oDate) {
if (oDate !== null && oDate !== '') {
if (this._getTimeDiff(oDate) > 0 && !this._isCurrentDate(oDate)) {
return true;
}
}
return false;
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._isCurrentDate = function(oDate) {
if (oDate !== null && oDate !== '') {
var oCurrentDate = new Date();
if ((oCurrentDate.getDate() == oDate.getDate()) && (oCurrentDate.getMonth() == oDate.getMonth())
&& (oCurrentDate.getYear() == oDate.getYear())) {
return true;
}
}
return false;
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getStatus = function(isSubstitutedUserRules, isActiveSubstRule, beginDate, endDate) {
var bIsOutDated = this._isOutDated(endDate);
var bInCurrentDateRange = (!this._isFutureDate(beginDate) && !bIsOutDated );
var _oBundle = sap.ui.getCore().getLibraryResourceBundle("sap.uiext.inbox");
if (endDate == "")
return "";
if (bIsOutDated) // If the rule is outdated
return _oBundle.getText("SUBSTITUTION_OUT_OF_DATE_RANGE");
else {
if (isActiveSubstRule) { // If the rule is enabled
var sText = bInCurrentDateRange ? "SUBSTITUTION_RULE_ACTIVE_FOR_LABEL" : "SUBSTITUTION_RULE_ACTIVE_IN_LABEL";
return _oBundle.getText(sText) + " " + this._getNoOfDays(bInCurrentDateRange, beginDate, endDate);
} else // If the rule is disabled
return _oBundle.getText("SUBSTITUTION_DISABLED_STATUS");
}
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getNoOfDays = function(bInCurrentDateRange, startDate, endDate) {
var _oBundle = sap.ui.getCore().getLibraryResourceBundle("sap.uiext.inbox");
var timeInDaysTxt = ''; // TODO Use Service.
if (endDate !== null && endDate !== '') {
var timeDiff = this._getTimeDiff(bInCurrentDateRange ? endDate : startDate) / (1000 * 60 * 60 * 24);
if (timeDiff > 1) {
var sNoOfDays = Math.floor(timeDiff);
if (sNoOfDays === 1) {
return sNoOfDays + " " + _oBundle.getText("SUBSTIUTION_RULE_IN_DAY");
} else {
return sNoOfDays + " " + _oBundle.getText("SUBSTIUTION_RULE_IN_MORE_DAYS");
}
} else if (timeDiff > 0) {
return Math.ceil(timeDiff) + " " + _oBundle.getText("SUBSTIUTION_RULE_IN_DAY");
}
}
return "";
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getFormattedDate = function(dateValue) {
var ins = sap.ui.core.format.DateFormat.getDateInstance({
style : "medium"
});
if (dateValue != undefined && dateValue != "") {
return ins.format(dateValue);
}
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getTodaysDateinYYYYMMDD = function() {
var oDate = new Date();
var sToday = String(oDate.getFullYear());
if (oDate.getMonth() < 9) {
sToday = sToday + "0";
}
sToday = sToday + String(oDate.getMonth() + 1);
if (oDate.getDate() < 9) {
sToday = sToday + "0";
}
sToday = sToday + String(oDate.getDate());
return sToday;
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getTodaysDate = function() {
var oDate = new Date();
return oDate;
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getTimeDiff = function(endDate) {
var today = new Date();
var offset = today.getTimezoneOffset() * 60 * 1000;
var deadline = new Date(endDate.getFullYear(), endDate.getMonth(), endDate.getDate(), 24, 00, 00);
var diff = deadline.getTime() - (today.getTime());
return diff;
};
sap.uiext.inbox.SubstitutionRulesManagerUtils._getTimeZoneOffset = function() {
return undefined;
};
|
chillpert/renderer | docs/html/class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.js | var class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings =
[
[ "add", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#a37856d36392e371e4fdcfe35d184fa42", null ],
[ "initLayoutUnique", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#ac4a04461e4728f35c9146237a47e6e66", null ],
[ "initPoolUnique", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#a20edd74ad52f8a3604f7cd892f2239f6", null ],
[ "reset", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#a6ce4c6073f217615f9212444578ca217", null ],
[ "setPoolSizes", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#adb3fefd44d2cff639f5ad5a386b40f8a", null ],
[ "update", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#a5c7e9adcda6c378e5a1b342d70d3acc5", null ],
[ "write", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#a504a47e05d5db66c5545bf6402ae6b98", null ],
[ "write", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#afd5d8362c953f91609aff1db5bf4f07c", null ],
[ "write", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#a2862f850721b66b56104481eaf242852", null ],
[ "write", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#aea6967de37d60d1399037e3a5a2494fa", null ],
[ "writeArray", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#aa7d770601a7dc6456b8d1d58d1f8e7f4", null ],
[ "writeArray", "class_r_a_y_e_x___n_a_m_e_s_p_a_c_e_1_1_bindings.html#adea0ef24844370ca397ec05f53682a08", null ]
]; |
ssintzz/zynga-hacklang-framework | src/Zynga/Framework/Service/V2/Exceptions/InvalidServiceResponseException.hh | <gh_stars>10-100
<?hh // strict
namespace Zynga\Framework\Service\V2\Exceptions;
use Zynga\Framework\Exception\V1\Exception;
/**
* Represents that an invalid service response was attempted to be used
*/
class InvalidServiceResponseException extends Exception {}
|
intersentia/elasticconfig | src/main/java/be/intersentia/elasticsearch/configuration/annotation/mapping/DateMapping.java | package be.intersentia.elasticsearch.configuration.annotation.mapping;
import be.intersentia.elasticsearch.configuration.parser.DateMappingParser;
import be.intersentia.elasticsearch.configuration.parser.MappingParser;
import java.lang.annotation.Repeatable;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* This annotation defines the field as requiring a Date mapping in ElasticSearch. A Date mapping is used to index
* Date objects, Strings containing formatted dates, e.g. "2015-01-01" or "2015/01/01 12:10:30", Long numbers
* representing milliseconds-since-the-epoch or Integer numbers representing seconds-since-the-epoch.
*
* For more details see: https://www.elastic.co/guide/en/elasticsearch/reference/5.2/date.html
*/
@Target({TYPE, FIELD})
@Retention(RUNTIME)
@Repeatable(DateMappings.class)
@MappingParser(DateMappingParser.class)
public @interface DateMapping {
/**
* The field() method allows you to specify the name of the Elastic Search field. When annotating a Java Class you
* are required to provide a value for this method. When annotating a Java Field this method is optional: by default
* the value is he same as the name of the annotated Java Field.
*/
String field() default "DEFAULT";
/**
* Individual fields can be boosted automatically — count more towards the relevance score — at query time, with the
* boost parameter. Accepts a floating point number, defaults to 1.0.
*/
float boost() default 1.0f;
/**
* The copy_to parameter allows you to create custom _all fields. In other words, the values of multiple fields can
* be copied into a group field, which can then be queried as a single field. For instance, the first_name and
* last_name fields can be copied to the full_name field.
*/
String[] copyTo() default {};
/**
* Should the field be stored on disk in a column-stride fashion, so that it can later be used for sorting,
* aggregations, or scripting? Accepts true (default) or false.
*/
boolean docValues() default true;
/**
* The date format(s) that can be parsed. Defaults to strict_date_optional_time||epoch_millis.
*/
String format() default "DEFAULT";
/**
* The locale to use when parsing dates since months do not have the same names and/or abbreviations in all
* languages. The default is the ROOT locale,
*/
String locale() default "DEFAULT";
/**
* If true, malformed numbers are ignored. If false (default), malformed numbers throw an exception and reject the
* whole document.
*/
boolean ignoreMalformed() default false;
/**
* Whether or not the field value should be included in the _all field? Defaults to false if index is set to false,
* or if a parent object field sets includeInAll to false. Otherwise defaults to true.
* @deprecated Deleted in ElasticSearch 7.0
*/
@Deprecated
OptionalBoolean includeInAll() default OptionalBoolean.DEFAULT;
/**
* Should the field be searchable? Accepts true (default) or false.
*/
boolean index() default true;
/**
* The nullValue parameter allows you to replace explicit null values with the specified value so that it can be
* indexed and searched. By default null values cannot be indexed or searched.
*/
String nullValue() default "DEFAULT";
/**
* Whether the field value should be stored and retrievable separately from the _source field. Accepts true or
* false (default).
*/
boolean store() default false;
} |
JoshuaMasci/Genesis | Genesis/include/Genesis/System/EntitySystemSet.hpp | <reponame>JoshuaMasci/Genesis
#pragma once
#include "Genesis/Scene/Ecs.hpp"
#include "Genesis/System/EntitySystem.hpp"
namespace Genesis
{
class EntitySystemSet
{
public:
void add_system(EntitySystem* system) { this->systems.push_back(system); };
void run_systems(Scene* scene, const TimeStep time_step)
{
for (auto system : this->systems)
{
system->run(scene, time_step);
}
};
protected:
vector<EntitySystem*> systems;
};
} |
AlexandrKaleganov/akaleganov_junior | servletjsp/src/main/java/ru/job4j/architecture/err/TriplexConEx.java | <reponame>AlexandrKaleganov/akaleganov_junior<gh_stars>1-10
package ru.job4j.architecture.err;
public interface TriplexConEx<E, R, K> {
void accept(E e, R r, K k) throws Exception;
}
|
kaichiuchu/vm-tutorial | src/frontend/controllers/main_window.cpp | <reponame>kaichiuchu/vm-tutorial<filename>src/frontend/controllers/main_window.cpp<gh_stars>1-10
// vm-tutorial - Virtual machine tutorial targeting CHIP-8
//
// Written in 2021 by <NAME> aka kaichiuchu <<EMAIL>>
//
// To the extent possible under law, the author(s) have dedicated all copyright
// and related and neighboring rights to this software to the public domain
// worldwide. This software is distributed without any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication along
// with this software. If not, see
// <http://creativecommons.org/publicdomain/zero/1.0/>.
#include "main_window.h"
#include <QFileDialog>
#include <QMessageBox>
#include "models/app_settings.h"
MainWindowController::MainWindowController() noexcept {
view_.setupUi(this);
CreateStatusBarWidgets();
connect(view_.actionStart_ROM, &QAction::triggered, this, [this]() {
const auto file_name = QFileDialog::getOpenFileName(
this, tr("Open CHIP-8 ROM file"),
AppSettingsModel().GetProgramFilesPath(),
tr("CHIP-8 ROM files (*.c8, *.ch8);;All files (*)"));
if (!file_name.isEmpty()) {
emit StartROM(file_name);
}
});
connect(view_.actionResume, &QAction::triggered,
[this]() { emit ResumeEmulation(); });
connect(view_.actionPause, &QAction::triggered,
[this]() { emit PauseEmulation(); });
connect(view_.actionReset, &QAction::triggered,
[this]() { emit ResetEmulation(); });
connect(view_.actionDisplay_Debugger, &QAction::triggered,
[this]() { emit DisplayDebugger(); });
connect(view_.actionDisplayLogger, &QAction::triggered,
[this]() { emit DisplayLogger(); });
connect(view_.actionSettings, &QAction::triggered,
[this]() { emit DisplayProgramSettings(); });
}
void MainWindowController::SetRunState(const RunState run_state) noexcept {
view_.actionDisplay_Debugger->setEnabled(true);
switch (run_state) {
case RunState::kStopped:
view_.actionResume->setEnabled(true);
view_.actionPause->setEnabled(false);
view_.actionReset->setEnabled(true);
break;
case RunState::kRunning:
view_.actionResume->setEnabled(false);
view_.actionPause->setEnabled(true);
view_.actionReset->setEnabled(true);
break;
default:
break;
}
}
void MainWindowController::ReportROMOpenError(
const QString& rom_file, const QString& error_string) noexcept {
auto error_message = QString(tr("Unable to open ROM file"));
error_message += QString(" %1: %2").arg(rom_file).arg(error_string);
QMessageBox::critical(this, tr("Error opening ROM"), error_message);
}
void MainWindowController::ReportROMTooLargeError(
const QString& rom_file) noexcept {
auto error_message = QString(tr("Unable to open ROM file"));
error_message += QString(" %1: ").arg(rom_file);
error_message +=
QString(tr("The ROM file is too large, probably not a CHIP-8 ROM."));
QMessageBox::critical(this, tr("Error opening ROM"), error_message);
}
void MainWindowController::ReportROMBadRead(
const QString& rom_file, const quint64 bytes_read,
const quint64 bytes_expected) noexcept {
auto error_message = QString(tr("Failed to fully read ROM file"));
error_message += QString(" %1: ").arg(rom_file);
error_message += QString("%1 ").arg(bytes_read);
const auto bytes_read_word_pluralized =
QString("byte%1").arg(bytes_read != 1 ? "s" : "");
error_message += QString(tr(qPrintable(bytes_read_word_pluralized)));
error_message += QString(tr(" read, expected"));
error_message += QString(" %1 ").arg(bytes_expected);
const auto bytes_expected_word_pluralized =
QString("byte%1").arg(bytes_expected != 1 ? "s" : "");
error_message += QString(tr(qPrintable(bytes_expected_word_pluralized)));
QMessageBox::critical(this, tr("Error reading ROM"), error_message);
}
void MainWindowController::ReportExecutionFailure(
const chip8::StepResult step_result) noexcept {
QString step_result_message;
switch (step_result) {
case chip8::StepResult::kInvalidMemoryLocation:
step_result_message = tr("Invalid memory location");
break;
case chip8::StepResult::kInvalidInstruction:
step_result_message = tr("Invalid instruction");
break;
case chip8::StepResult::kInvalidKey:
step_result_message = tr("Invalid key specified");
break;
case chip8::StepResult::kInvalidSpriteLocation:
step_result_message = tr("Invalid sprite location");
break;
case chip8::StepResult::kStackUnderflow:
step_result_message = tr("Stack underflow");
break;
case chip8::StepResult::kStackOverflow:
step_result_message = tr("Stack overflow");
break;
default:
step_result_message = tr("This should never have happened!");
break;
}
auto error_message = QString(tr(
"The virtual machine encountered a problem running the guest program:"));
error_message += QString(" %1.\n\n").arg(step_result_message);
error_message += tr("Open debugger?");
const auto user_response =
QMessageBox::critical(this, tr("Execution failure"), error_message,
QMessageBox::Yes | QMessageBox::No);
if (user_response == QMessageBox::Yes) {
emit DisplayDebugger();
}
}
void MainWindowController::SetWindowTitleGuestProgramInfo(
const QString& program_file_name) noexcept {
setWindowTitle(QString{"vm-tutorial - running %1"}.arg(program_file_name));
}
void MainWindowController::UpdateFPSInfo(const unsigned int current_fps,
const unsigned int target_fps,
const double average_fps) noexcept {
const auto average_fps_text = QString::number(average_fps, 'f', 2);
fps_info_->setText(QString{"FPS: %1/%2 (avg. %3ms)"}
.arg(current_fps)
.arg(target_fps)
.arg(average_fps_text));
}
Renderer* MainWindowController::GetRenderer() const noexcept {
return view_.openGLWidget;
}
void MainWindowController::CreateStatusBarWidgets() noexcept {
fps_info_ = new QLabel(view_.statusBar);
view_.statusBar->addPermanentWidget(fps_info_);
}
void MainWindowController::keyPressEvent(QKeyEvent* key_event) noexcept {
const auto key = key_event->key();
if (!key_bindings_.count(key)) {
QMainWindow::keyPressEvent(key_event);
return;
}
emit CHIP8KeyPress(key_bindings_[key]);
}
void MainWindowController::keyReleaseEvent(QKeyEvent* key_event) noexcept {
const auto key = key_event->key();
if (!key_bindings_.count(key)) {
QMainWindow::keyPressEvent(key_event);
return;
}
emit CHIP8KeyRelease(key_bindings_[key]);
} |
josephcrosmanplays532/Wrapper-Offline-1.2.3- | utilities/avidemux/include/avidemux/2.7/ADM_coreJobs/ADM_coreJobs_export.h | <filename>utilities/avidemux/include/avidemux/2.7/ADM_coreJobs/ADM_coreJobs_export.h
#ifndef ADM_COREJOBS_EXPORT_H
#define ADM_COREJOBS_EXPORT_H
#ifdef ADM_COREJOBS_STATIC_DEFINE
# define ADM_COREJOBS_EXPORT
# define ADM_COREJOBS_NO_EXPORT
#else
# ifndef ADM_COREJOBS_EXPORT
# ifdef ADM_coreJobs_EXPORTS
/* We are building this library */
# ifdef _WIN32
# define ADM_COREJOBS_EXPORT __declspec(dllexport)
# else
# define ADM_COREJOBS_EXPORT __attribute__((visibility("default")))
# endif
# else
/* We are using this library */
# ifdef _WIN32
# define ADM_COREJOBS_EXPORT __declspec(dllimport)
# else
# define ADM_COREJOBS_EXPORT __attribute__((visibility("default")))
# endif
# endif
# endif
# ifndef ADM_COREJOBS_NO_EXPORT
# ifdef _WIN32
# define ADM_COREJOBS_NO_EXPORT
# else
# define ADM_COREJOBS_NO_EXPORT __attribute__((visibility("hidden")))
# endif
# endif
#endif
#ifndef ADM_COREJOBS_DEPRECATED
# ifdef __GNUC__
# define ADM_COREJOBS_DEPRECATED __attribute__ ((__deprecated__))
# define ADM_COREJOBS_DEPRECATED_EXPORT ADM_COREJOBS_EXPORT __attribute__ ((__deprecated__))
# define ADM_COREJOBS_DEPRECATED_NO_EXPORT ADM_COREJOBS_NO_EXPORT __attribute__ ((__deprecated__))
# elif defined(_WIN32)
# define ADM_COREJOBS_DEPRECATED __declspec(deprecated)
# define ADM_COREJOBS_DEPRECATED_EXPORT ADM_COREJOBS_EXPORT __declspec(deprecated)
# define ADM_COREJOBS_DEPRECATED_NO_EXPORT ADM_COREJOBS_NO_EXPORT __declspec(deprecated)
# endif
#endif
#define DEFINE_NO_DEPRECATED 0
#if DEFINE_NO_DEPRECATED
# define ADM_COREJOBS_NO_DEPRECATED
#endif
#endif
|
stlava/akita-cli | trace/backend_collector_test.go | <reponame>stlava/akita-cli
package trace
import (
"encoding/base64"
"net/url"
"sync"
"testing"
"time"
"github.com/golang/mock/gomock"
"github.com/golang/protobuf/proto"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
mockrest "github.com/akitasoftware/akita-cli/rest/mock"
pb "github.com/akitasoftware/akita-ir/go/api_spec"
"github.com/akitasoftware/akita-libs/akid"
"github.com/akitasoftware/akita-libs/akinet"
kgxapi "github.com/akitasoftware/akita-libs/api_schema"
"github.com/akitasoftware/akita-libs/batcher"
"github.com/akitasoftware/akita-libs/spec_util"
)
var (
fakeSvc = akid.NewServiceID(uuid.Must(uuid.Parse("8b2cf196-87fe-4e53-a6b9-1452d7efb863")))
fakeLrn = akid.NewLearnSessionID(uuid.Must(uuid.Parse("2b5dd735-9fc0-4365-93e8-74bf86d3f853")))
)
type witnessRecorder struct {
witnesses []*pb.Witness
}
// Record a call to LearnClient.AsyncReportsUpload
func (wr *witnessRecorder) recordAsyncReportsUpload(args ...interface{}) {
reports := args[2].(*kgxapi.UploadReportsRequest)
for _, r := range reports.Witnesses {
bs, err := base64.URLEncoding.DecodeString(r.WitnessProto)
if err != nil {
panic(err)
}
w := &pb.Witness{}
if err := proto.Unmarshal(bs, w); err != nil {
panic(err)
}
wr.witnesses = append(wr.witnesses, w)
}
}
// Make sure we obfuscate values before uploading.
func TestObfuscate(t *testing.T) {
ctrl := gomock.NewController(t)
mockClient := mockrest.NewMockLearnClient(ctrl)
defer ctrl.Finish()
var rec witnessRecorder
mockClient.
EXPECT().
AsyncReportsUpload(gomock.Any(), gomock.Any(), gomock.Any()).
Do(rec.recordAsyncReportsUpload).
AnyTimes().
Return(nil)
streamID := uuid.New()
req := akinet.ParsedNetworkTraffic{
Content: akinet.HTTPRequest{
StreamID: streamID,
Seq: 1203,
Method: "POST",
URL: &url.URL{
Path: "/v1/doggos",
},
Host: "example.com",
Header: map[string][]string{
"Content-Type": {"application/json"},
},
Body: []byte(`{"name": "prince", "number": 6119717375543385000}`),
},
}
resp := akinet.ParsedNetworkTraffic{
Content: akinet.HTTPResponse{
StreamID: streamID,
Seq: 1203,
StatusCode: 200,
Header: map[string][]string{
"Content-Type": {"application/json"},
},
Body: []byte(`{"homes": ["burbank, ca", "jeuno, ak", "versailles"]}`),
},
}
col := NewBackendCollector(fakeSvc, fakeLrn, mockClient, nil)
assert.NoError(t, col.Process(req))
assert.NoError(t, col.Process(resp))
assert.NoError(t, col.Close())
expectedWitnesses := []*pb.Witness{
&pb.Witness{
Method: &pb.Method{
Id: &pb.MethodID{
ApiType: pb.ApiType_HTTP_REST,
},
Args: map[string]*pb.Data{
"BuVeSzMAimw=": newTestBodySpecFromStruct(0, pb.HTTPBody_JSON, map[string]*pb.Data{
"name": dataFromPrimitive(spec_util.NewPrimitiveString(
"lgkXNsG1k7-cxarrFoo-MmhjoRP3YOXV3C0k6rrKy2A="),
),
"number": dataFromPrimitive(spec_util.NewPrimitiveInt64(8191886688482385179)),
}),
},
Responses: map[string]*pb.Data{
"Ye1yQe9ylz0=": newTestBodySpecFromStruct(200, pb.HTTPBody_JSON, map[string]*pb.Data{
"homes": dataFromList(
dataFromPrimitive(spec_util.NewPrimitiveString(
"hZwXhGMIxoOotCt-Cu4toMf9g8CpZnOdUe3bPxEn_Sg="),
),
dataFromPrimitive(spec_util.NewPrimitiveString(
"ESrSgUKxboEvBrJrfm6z9xQKnegYZ_YUcOaZ4il3ytY="),
),
dataFromPrimitive(spec_util.NewPrimitiveString(
"M7hhiIKycdahIkwhrHNl9gDQSxzbbcElQMyvDOPiJhI="),
),
),
}),
},
Meta: &pb.MethodMeta{
Meta: &pb.MethodMeta_Http{
Http: &pb.HTTPMethodMeta{
Method: "POST",
PathTemplate: "/v1/doggos",
Host: "example.com",
},
},
},
},
},
}
for i := range expectedWitnesses {
expected := proto.MarshalTextString(expectedWitnesses[i])
actual := proto.MarshalTextString(rec.witnesses[i])
assert.Equal(t, expected, actual)
}
}
func dataFromPrimitive(p *pb.Primitive) *pb.Data {
return &pb.Data{Value: &pb.Data_Primitive{Primitive: p}}
}
func dataFromStruct(fields map[string]*pb.Data) *pb.Data {
return &pb.Data{Value: &pb.Data_Struct{Struct: &pb.Struct{Fields: fields}}}
}
func dataFromList(elems ...*pb.Data) *pb.Data {
return &pb.Data{Value: &pb.Data_List{List: &pb.List{Elems: elems}}}
}
func newTestBodySpecFromStruct(statusCode int, contentType pb.HTTPBody_ContentType, s map[string]*pb.Data) *pb.Data {
return newTestBodySpecFromData(statusCode, contentType, dataFromStruct(s))
}
func newTestBodySpecFromData(statusCode int, contentType pb.HTTPBody_ContentType, d *pb.Data) *pb.Data {
d.Meta = newBodyDataMeta(statusCode, contentType)
return d
}
func newBodyDataMeta(responseCode int, contentType pb.HTTPBody_ContentType) *pb.DataMeta {
return newDataMeta(&pb.HTTPMeta{
Location: &pb.HTTPMeta_Body{
Body: &pb.HTTPBody{
ContentType: contentType,
},
},
ResponseCode: int32(responseCode),
})
}
func newDataMeta(httpM *pb.HTTPMeta) *pb.DataMeta {
return &pb.DataMeta{
Meta: &pb.DataMeta_Http{
Http: httpM,
},
}
}
// Verify processing latency computation
func TestTiming(t *testing.T) {
ctrl := gomock.NewController(t)
mockClient := mockrest.NewMockLearnClient(ctrl)
defer ctrl.Finish()
var rec witnessRecorder
mockClient.
EXPECT().
AsyncReportsUpload(gomock.Any(), gomock.Any(), gomock.Any()).
Do(rec.recordAsyncReportsUpload).
AnyTimes().
Return(nil)
streamID := uuid.New()
startTime := time.Now()
req := akinet.ParsedNetworkTraffic{
Content: akinet.HTTPRequest{
StreamID: streamID,
Seq: 1203,
Method: "GET",
URL: &url.URL{
Path: "/v1/doggos",
},
Host: "example.com",
},
ObservationTime: startTime,
FinalPacketTime: startTime.Add(2 * time.Millisecond),
}
resp := akinet.ParsedNetworkTraffic{
Content: akinet.HTTPResponse{
StreamID: streamID,
Seq: 1203,
StatusCode: 200,
},
ObservationTime: startTime.Add(10 * time.Millisecond),
FinalPacketTime: startTime.Add(13 * time.Millisecond),
}
col := NewBackendCollector(fakeSvc, fakeLrn, mockClient, nil)
assert.NoError(t, col.Process(req))
assert.NoError(t, col.Process(resp))
assert.NoError(t, col.Close())
assert.Equal(t, 1, len(rec.witnesses))
actual := rec.witnesses[0]
meta := spec_util.HTTPMetaFromMethod(actual.Method)
assert.NotNil(t, meta)
assert.InDelta(t, 8.0, meta.ProcessingLatency, 0.001)
}
// Demonstrate race condition with multiple interfaces
func TestMultipleInterfaces(t *testing.T) {
ctrl := gomock.NewController(t)
mockClient := mockrest.NewMockLearnClient(ctrl)
defer ctrl.Finish()
mockClient.EXPECT().
AsyncReportsUpload(gomock.Any(), gomock.Any(), gomock.Any()).
AnyTimes().
Return(nil)
bc := NewBackendCollector(fakeSvc, fakeLrn, mockClient, nil)
var wg sync.WaitGroup
fakeTrace := func(count int, start_seq int) {
for i := 0; i < count; i++ {
streamID := uuid.New()
// Re-using the example above
req := akinet.ParsedNetworkTraffic{
Content: akinet.HTTPRequest{
StreamID: streamID,
Seq: start_seq + count,
Method: "POST",
URL: &url.URL{
Path: "/v1/doggos",
},
Host: "example.com",
Header: map[string][]string{
"Content-Type": {"application/json"},
},
Body: []byte(`{"name": "prince", "number": 6119717375543385000}`),
},
}
bc.Process(req)
resp := akinet.ParsedNetworkTraffic{
Content: akinet.HTTPResponse{
StreamID: streamID,
Seq: start_seq + count,
StatusCode: 200,
Header: map[string][]string{
"Content-Type": {"application/json"},
},
Body: []byte(`{"homes": ["burbank, ca", "jeuno, ak", "versailles"]}`),
},
}
bc.Process(resp)
}
wg.Done()
}
wg.Add(2)
go fakeTrace(100, 1000)
go fakeTrace(200, 2000)
wg.Wait()
}
// Demonstrate that periodic flush exits
func TestFlushExit(t *testing.T) {
b := &BackendCollector{}
b.uploadReportBatch = batcher.NewInMemory(
func(_ []interface{}) {},
uploadBatchMaxSize,
uploadBatchFlushDuration,
)
b.flushDone = make(chan struct{})
close(b.flushDone)
b.periodicFlush()
// Test should exit immediately
}
|
lechium/iOS1351Headers | System/Library/Frameworks/HealthKitUI.framework/HKGLView.h | /*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:18:24 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/Frameworks/HealthKitUI.framework/HealthKitUI
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
#import <HealthKitUI/HealthKitUI-Structs.h>
#import <UIKitCore/UIView.h>
@class EAGLContext, UIScreen, CADisplayLink;
@interface HKGLView : UIView {
EAGLContext* _context;
UIScreen* _screen;
CADisplayLink* _displayLink;
BOOL _displayLinkPaused;
BOOL _viewWillMoveToWindow;
double _lastUpdateTime;
unsigned _latestDrawError;
int _drawableWidth;
int _drawableHeight;
BOOL _shouldDeleteFramebuffer;
unsigned _resolveColorRenderbuffer;
unsigned _multisampleFramebuffer;
unsigned _multisampleColorRenderbuffer;
unsigned _resolveFramebuffer;
unsigned _depthRenderbuffer;
BOOL _contextPushed;
BOOL _viewSnapshottingActive;
BOOL _synchronizesWithCA;
BOOL _use4XMSAA;
BOOL _shouldBypassApplicationStateChecking;
BOOL _viewIsVisible;
float _preferredFramesPerSecond;
int _drawableDepthFormat;
}
@property (assign,nonatomic) BOOL synchronizesWithCA; //@synthesize synchronizesWithCA=_synchronizesWithCA - In the implementation block
@property (assign,getter=isPaused,nonatomic) BOOL paused;
@property (assign,nonatomic) float preferredFramesPerSecond; //@synthesize preferredFramesPerSecond=_preferredFramesPerSecond - In the implementation block
@property (assign,nonatomic) BOOL use4XMSAA; //@synthesize use4XMSAA=_use4XMSAA - In the implementation block
@property (assign,nonatomic) int drawableDepthFormat; //@synthesize drawableDepthFormat=_drawableDepthFormat - In the implementation block
@property (assign,nonatomic) BOOL shouldBypassApplicationStateChecking; //@synthesize shouldBypassApplicationStateChecking=_shouldBypassApplicationStateChecking - In the implementation block
@property (nonatomic,readonly) double timeSinceLastUpdate;
@property (nonatomic,readonly) BOOL viewIsVisible; //@synthesize viewIsVisible=_viewIsVisible - In the implementation block
+(Class)layerClass;
+(void)clearCachedProgramForVertexShader:(id)arg1 fragmentShader:(id)arg2 ;
-(void)dealloc;
-(void)_update;
-(BOOL)isPaused;
-(id)initWithContext:(id)arg1 ;
-(void)update;
-(id)snapshot;
-(void)setPaused:(BOOL)arg1 ;
-(void)setFrame:(CGRect)arg1 ;
-(id)initWithFrame:(CGRect)arg1 ;
-(void)layoutSubviews;
-(void)didMoveToWindow;
-(void)setContentScaleFactor:(double)arg1 ;
-(void)displayLayer:(id)arg1 ;
-(void)willMoveToSuperview:(id)arg1 ;
-(void)willMoveToWindow:(id)arg1 ;
-(BOOL)_canDrawContent;
-(void)setPreferredFramesPerSecond:(float)arg1 ;
-(id)initWithFrame:(CGRect)arg1 context:(id)arg2 ;
-(void)setDrawableDepthFormat:(int)arg1 ;
-(BOOL)_controlsOwnScaleFactor;
-(float)preferredFramesPerSecond;
-(BOOL)viewIsVisible;
-(void)_createDisplayLinkForScreen:(id)arg1 ;
-(void)_deleteFramebuffer;
-(int)drawableDepthFormat;
-(void)_updateScreenIfChanged;
-(double)timeSinceLastUpdate;
-(void)setShouldBypassApplicationStateChecking:(BOOL)arg1 ;
-(void)_invalidateLastUpdateTime;
-(void)_updateGLLayerIsAsynchronous;
-(void)performWithContext:(/*^block*/id)arg1 ;
-(void)_pauseByNotification:(id)arg1 ;
-(void)_resumeByNotification:(id)arg1 ;
-(void)_viewSnapshottingWillBegin;
-(void)_viewSnapshottingDidEnd;
-(BOOL)_isLastUpdateTimeValid;
-(id)_context_generateSnapshot;
-(BOOL)_shouldAllowRendering;
-(void)_context_displayAndPresentFramebuffer:(BOOL)arg1 ;
-(void)_context_checkAndRepairFramebuffer;
-(unsigned)drawInRect:(CGRect)arg1 withContext:(id)arg2 ;
-(void)loadVertexShaderSource:(id)arg1 fragmentShaderSource:(id)arg2 forProgram:(unsigned*)arg3 ;
-(void)_displayLinkFired;
-(void)_context_deleteFramebuffer;
-(BOOL)_context_createFramebuffer;
-(void)_context_prepareFramebuffer:(BOOL*)arg1 ;
-(void)_context_drawRect:(CGRect)arg1 ;
-(void)_context_resolveAndDiscardFramebuffer;
-(BOOL)_context_presentFramebuffer;
-(void)setSynchronizesWithCA:(BOOL)arg1 ;
-(void)setUse4XMSAA:(BOOL)arg1 ;
-(void)loadVertexShader:(id)arg1 fragmentShader:(id)arg2 inBundle:(id)arg3 forProgram:(unsigned*)arg4 cache:(BOOL)arg5 ;
-(BOOL)synchronizesWithCA;
-(BOOL)use4XMSAA;
-(BOOL)shouldBypassApplicationStateChecking;
@end
|
a1730/avaje-inject | inject-test/src/test/java/org/example/coffee/factory/DFact.java | <reponame>a1730/avaje-inject<gh_stars>10-100
package org.example.coffee.factory;
public class DFact implements IDFact {
}
|
zhuochu/mand-mobile-rn | samples/demo/radio/base.demo.js | import * as React from 'react'
import { View, Text } from 'react-native'
import { MDRadio, MDButton } from 'mand-mobile-rn'
import styles from './styles'
export default class BaseCheckDemo extends React.Component {
constructor(props) {
super(props);
this.state = {
selected: null
}
}
render() {
return (
<View style={styles.container}>
<Text>单选项选中状态:{this.state.selected}</Text>
<MDButton
style={{ marginTop: 5, marginBottom: 5 }}
size='small'
onPress={() => { this.setState({ selected: null }) }}>
取消 Raido 选中
</MDButton>
<MDRadio
value="email"
label="Email"
selected={this.state.selected}
onChange={(checked, value) => { this.setState({ selected: value }) }}
/>
<MDRadio
value="phone"
label="Phone"
selected={this.state.selected}
onChange={(checked, value) => { this.setState({ selected: value }) }}
/>
<MDRadio
value="mail"
label="Mail"
disabled
selected={this.state.selected}
onChange={(checked, value) => { this.setState({ selected: value }) }}
/>
</View>
)
}
}
|
dbflute/dbflute-intro | src/main/java/org/dbflute/intro/app/logic/exception/DirNotFoundException.java | <gh_stars>1-10
/*
* Copyright 2014-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.intro.app.logic.exception;
/**
* The exception when the directory is not found.
* @author prprmurakami
* @author jflute
*/
public class DirNotFoundException extends Exception {
private static final long serialVersionUID = 1L;
private final String dirPath; // basically not null
public DirNotFoundException(String msg, String dirPath) {
super(msg);
this.dirPath = dirPath;
}
public String getDirPath() {
return dirPath;
}
}
|
sascha-andres/devenv | internal/interactive/cmds/repo_commit/is_responsible.go | package repo_commit
func (c Command) IsResponsible(commandName string) bool {
return commandName == "commit" || commandName == "ci"
}
|
CS150Student/PacManFirmware | PacMan_CANOpen/docs/html/dir_465c8a410b787ccece2cc89dfe6b85b1.js | var dir_465c8a410b787ccece2cc89dfe6b85b1 =
[
[ "CAN_Files", "dir_c79ad1a9b46b2034a0d44f91061db848.html", "dir_c79ad1a9b46b2034a0d44f91061db848" ],
[ "Display", "dir_a7f313cbf1c1eb810b58abda35ed74f1.html", "dir_a7f313cbf1c1eb810b58abda35ed74f1" ],
[ "Fonts", "dir_62e8ca88ace6de22d757c76ff27ebec6.html", "dir_62e8ca88ace6de22d757c76ff27ebec6" ],
[ "GxGDEH029A1", "dir_17c9c1a2d8a292466f4f46e5dc1356d7.html", "dir_17c9c1a2d8a292466f4f46e5dc1356d7" ],
[ "GxIO", "dir_0a772f0805a8df677a5d7cdb16f408be.html", "dir_0a772f0805a8df677a5d7cdb16f408be" ],
[ "imglib", "dir_1194a368b312d1de1a6212148bd15738.html", "dir_1194a368b312d1de1a6212148bd15738" ],
[ "Adafruit_GFX.cpp", "_adafruit___g_f_x_8cpp.html", "_adafruit___g_f_x_8cpp" ],
[ "Adafruit_GFX.h", "_adafruit___g_f_x_8h.html", [
[ "Adafruit_GFX", "class_adafruit___g_f_x.html", "class_adafruit___g_f_x" ],
[ "Adafruit_GFX_Button", "class_adafruit___g_f_x___button.html", "class_adafruit___g_f_x___button" ],
[ "GFXcanvas1", "class_g_f_xcanvas1.html", "class_g_f_xcanvas1" ],
[ "GFXcanvas8", "class_g_f_xcanvas8.html", "class_g_f_xcanvas8" ],
[ "GFXcanvas16", "class_g_f_xcanvas16.html", "class_g_f_xcanvas16" ]
] ],
[ "Adafruit_SPITFT.cpp", "_adafruit___s_p_i_t_f_t_8cpp.html", "_adafruit___s_p_i_t_f_t_8cpp" ],
[ "Adafruit_SPITFT.h", "_adafruit___s_p_i_t_f_t_8h.html", "_adafruit___s_p_i_t_f_t_8h" ],
[ "Adafruit_SPITFT_Macros.h", "_adafruit___s_p_i_t_f_t___macros_8h.html", null ],
[ "BitmapGraphics.h", "_bitmap_graphics_8h.html", "_bitmap_graphics_8h" ],
[ "gfxfont.h", "gfxfont_8h.html", [
[ "GFXglyph", "struct_g_f_xglyph.html", "struct_g_f_xglyph" ],
[ "GFXfont", "struct_g_f_xfont.html", "struct_g_f_xfont" ]
] ],
[ "glcdfont.c", "glcdfont_8c.html", "glcdfont_8c" ],
[ "GxEPD.cpp", "_gx_e_p_d_8cpp.html", null ],
[ "GxEPD.h", "_gx_e_p_d_8h.html", "_gx_e_p_d_8h" ],
[ "GxFont_GFX.cpp", "_gx_font___g_f_x_8cpp.html", "_gx_font___g_f_x_8cpp" ],
[ "GxFont_GFX.h", "_gx_font___g_f_x_8h.html", [
[ "GxFont_GFX", "class_gx_font___g_f_x.html", "class_gx_font___g_f_x" ]
] ]
]; |
lab11/M-ulator | simulator/core/isa/arm-v7-m/push.c | /* Mulator - An extensible {e,si}mulator
* Copyright 2011-2020 <NAME> <<EMAIL>>
*
* Licensed under either of the Apache License, Version 2.0
* or the MIT license, at your option.
*/
#include "core/isa/opcodes.h"
#include "core/isa/decode_helpers.h"
#include "core/operations/push.h"
// arm-v7-m
static void push_t2(uint32_t inst) {
uint16_t register_list = inst & 0x1fff;
bool M = (inst >> 14) & 0x1;
uint16_t registers = (M << 14) | register_list;
if (hamming(registers) < 2)
CORE_ERR_unpredictable("Too few regs to push\n");
OP_DECOMPILE("PUSH<c> <registers>", registers);
push(registers);
}
// arm-v7-m
static void push_t3(uint32_t inst) {
uint8_t rt = (inst >> 12) & 0xf;
uint16_t registers = (1 << rt);
if (BadReg(rt))
CORE_ERR_unpredictable("bad reg\n");
OP_DECOMPILE("PUSH<c> <registers>", registers);
push(registers);
}
__attribute__ ((constructor))
static void register_opcodes_arm_v7_m_push(void) {
// 1110 1001 0010 1101 0x0x xxxx xxxx xxxx
register_opcode_mask_32(0xe92d0000, 0x16d2a000, push_t2);
// 1111 1000 0100 1101 xxxx 1101 0000 0100 (t3)
register_opcode_mask_32(0xf84d0d04, 0x07b202fb, push_t3);
}
|
bob0bob/jmonkeyengine | jme3-lwjgl/src/main/java/com/jme3/system/lwjgl/LwjglAbstractDisplay.java | <reponame>bob0bob/jmonkeyengine<gh_stars>0
/*
* Copyright (c) 2009-2021 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.system.lwjgl;
import com.jme3.input.JoyInput;
import com.jme3.input.KeyInput;
import com.jme3.input.MouseInput;
import com.jme3.input.TouchInput;
import com.jme3.input.lwjgl.JInputJoyInput;
import com.jme3.input.lwjgl.LwjglKeyInput;
import com.jme3.input.lwjgl.LwjglMouseInput;
import com.jme3.system.AppSettings;
import com.jme3.system.JmeSystem;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.lwjgl.LWJGLException;
import org.lwjgl.Sys;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.OpenGLException;
import org.lwjgl.opengl.Util;
public abstract class LwjglAbstractDisplay extends LwjglContext implements Runnable {
private static final Logger logger = Logger.getLogger(LwjglAbstractDisplay.class.getName());
protected AtomicBoolean needClose = new AtomicBoolean(false);
protected boolean wasActive = false;
protected int frameRate = 0;
protected boolean autoFlush = true;
protected boolean allowSwapBuffers = false;
/**
* @return Type.Display or Type.Canvas
*/
@Override
public abstract Type getType();
/**
* Set the title if it's a windowed display
* @param title the desired title
*/
@Override
public abstract void setTitle(String title);
/**
* Restart if it's a windowed or full-screen display.
*/
@Override
public abstract void restart();
/**
* Apply the settings, changing resolution, etc.
* @param settings the AppSettings to apply
* @throws LWJGLException for various error conditions
*/
protected abstract void createContext(AppSettings settings) throws LWJGLException;
/**
* Destroy the context.
*/
protected abstract void destroyContext();
/**
* Does LWJGL display initialization in the OpenGL thread
*
* @return true if successful, otherwise false
*/
protected boolean initInThread() {
try {
if (!JmeSystem.isLowPermissions()) {
// Enable uncaught exception handler only for current thread
Thread.currentThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable thrown) {
listener.handleError("Uncaught exception thrown in "+thread.toString(), thrown);
if (needClose.get()) {
// listener.handleError() has requested the
// context to close. Satisfy request.
deinitInThread();
}
}
});
}
// For canvas, this will create a Pbuffer,
// allowing us to query information.
// When the canvas context becomes available, it will
// be replaced seamlessly.
createContext(settings);
printContextInitInfo();
created.set(true);
super.internalCreate();
} catch (Exception ex) {
try {
if (Display.isCreated()) {
Display.destroy();
}
} catch (Exception ex2){
logger.log(Level.WARNING, null, ex2);
}
listener.handleError("Failed to create display", ex);
createdLock.notifyAll(); // Release the lock, so start(true) doesn't deadlock.
return false; // if we failed to create display, do not continue
}
listener.initialize();
return true;
}
protected boolean checkGLError() {
try {
Util.checkGLError();
} catch (OpenGLException ex){
listener.handleError("An OpenGL error has occurred!", ex);
}
// NOTE: Always return true since this is used in an "assert" statement
return true;
}
/**
* execute one iteration of the render loop in the OpenGL thread
*/
protected void runLoop(){
if (!created.get())
throw new IllegalStateException();
listener.update();
// All this does is call update().
// If the canvas is not active, there's no need to waste time
// doing that.
if (renderable.get()){
assert checkGLError();
// calls swap buffers, etc.
try {
if (allowSwapBuffers && autoFlush) {
Display.update(false);
}
} catch (Throwable ex){
listener.handleError("Error while swapping buffers", ex);
}
}
int frameRateCap;
if (autoFlush) {
frameRateCap = frameRate;
} else {
frameRateCap = 20;
}
if (frameRateCap > 0) {
// Cap framerate
Display.sync(frameRateCap);
}
// check input after we synchronize with framerate.
// this reduces input lag.
if (renderable.get()){
Display.processMessages();
}
// Subclasses just call GLObjectManager. Clean up objects here.
// It is safe ... for now.
renderer.postFrame();
}
/**
* De-initialize in the OpenGL thread.
*/
protected void deinitInThread(){
destroyContext();
listener.destroy();
logger.fine("Display destroyed.");
super.internalDestroy();
}
@Override
public void run(){
if (listener == null) {
throw new IllegalStateException("SystemListener is not set on context!"
+ "Must set with JmeContext.setSystemListener().");
}
loadNatives();
logger.log(Level.FINE, "Using LWJGL {0}", Sys.getVersion());
if (!initInThread()) {
logger.log(Level.SEVERE, "Display initialization failed. Cannot continue.");
return;
}
while (true){
if (renderable.get()){
if (Display.isCloseRequested())
listener.requestClose(false);
if (wasActive != Display.isActive()) {
if (!wasActive) {
listener.gainFocus();
timer.reset();
wasActive = true;
} else {
listener.loseFocus();
wasActive = false;
}
}
}
runLoop();
if (needClose.get())
break;
}
deinitInThread();
}
@Override
public JoyInput getJoyInput() {
if (joyInput == null){
joyInput = new JInputJoyInput();
}
return joyInput;
}
@Override
public MouseInput getMouseInput() {
if (mouseInput == null){
mouseInput = new LwjglMouseInput(this);
}
return mouseInput;
}
@Override
public KeyInput getKeyInput() {
if (keyInput == null){
keyInput = new LwjglKeyInput(this);
}
return keyInput;
}
@Override
public TouchInput getTouchInput() {
return null;
}
@Override
public void setAutoFlushFrames(boolean enabled){
this.autoFlush = enabled;
}
@Override
public void destroy(boolean waitFor) {
if (needClose.get()) {
return; // Already destroyed
}
needClose.set(true);
if (waitFor)
waitFor(false);
}
}
|
HynemanKan/bilibot_Vertx | src/main/java/per/hynemankan/vertx/bilibot/handlers/common/ExceptionHandler.java | package per.hynemankan.vertx.bilibot.handlers.common;
import io.vertx.core.Handler;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ExceptionHandler implements Handler<Throwable> {
@Override
public void handle(Throwable event) {
//打印错误信息
log.error("Exception:", event);
}
}
|
igorrozzu/cp | resources/assets/js/src/components/forms/CinemaForm.js | <filename>resources/assets/js/src/components/forms/CinemaForm.js
import React from 'react';
import RaisedButton from 'material-ui/RaisedButton';
import { TextValidator, ValidatorForm } from 'react-material-ui-form-validator';
export default class CinemaForm extends React.Component {
constructor(props) {
super(props);
this.state = {
formData: {
"name": this.props.cinema.name || "",
"manager": this.props.cinema.manager || "",
"address": this.props.cinema.address || "",
"phone": this.props.cinema.phone || "",
},
submitted: false,
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleChange(event) {
const { formData } = this.state;
formData[event.target.name] = event.target.value;
this.setState({ formData });
}
handleSubmit() {
this.setState({ submitted: true }, () => {
setTimeout(() => this.setState({ submitted: false }), 5000);
});
this.props.handleSubmit(this.state.formData);
}
render() {
const { formData, submitted } = this.state;
return (
<ValidatorForm
ref="form"
className={"simple-form"}
onSubmit={this.handleSubmit}>
<h1>{this.props.action} cinema</h1>
<TextValidator
floatingLabelText="Name"
onChange={this.handleChange}
name="name"
value={formData.name}
validators={['required']}
errorMessages={['this field is required']}
/>
<br />
<TextValidator
floatingLabelText="Manager"
onChange={this.handleChange}
name="manager"
value={formData.manager}
validators={['required']}
errorMessages={['this field is required']}
/>
<br />
<TextValidator
floatingLabelText="Address"
onChange={this.handleChange}
name="address"
value={formData.address}
validators={['required']}
errorMessages={[
'this field is required',
]}
/>
<br />
<TextValidator
floatingLabelText="Phone"
onChange={this.handleChange}
name="phone"
value={formData.phone}
validators={['required', 'isNumber']}
errorMessages={[
'this field is required',
'phone must be a number'
]}
/>
<br />
<RaisedButton
type="submit"
label={
(submitted && 'Your form is submitted!')
|| (!submitted && this.props.action)
}
disabled={submitted}
/>
</ValidatorForm>
);
}
} |
biloocabba/new_KnCare-Rest | src/main/java/com/knits/kncare/config/CustomClientErrorHandler.java | package com.knits.kncare.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.web.client.ResponseErrorHandler;
import java.io.IOException;
public class CustomClientErrorHandler implements ResponseErrorHandler {
private final Logger LOG = LoggerFactory.getLogger(CustomClientErrorHandler.class);
@Override
public boolean hasError(ClientHttpResponse clientHttpResponse) throws IOException {
return clientHttpResponse.getStatusCode().is4xxClientError();
}
@Override
public void handleError(ClientHttpResponse clientHttpResponse) throws IOException {
LOG.error("CustomClientErrorHandler | HTTP Status Code: " + clientHttpResponse.getStatusCode().value());
}
} |
nanovc/nanovc-java | memory/src/main/java/io/nanovc/memory/reflective/ReflectiveObjectNanoRepo.java | /*
MIT License
https://opensource.org/licenses/MIT
Copyright 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.nanovc.memory.reflective;
import io.nanovc.*;
import io.nanovc.areas.ByteArrayHashMapArea;
import io.nanovc.areas.StringAreaAPI;
import io.nanovc.clocks.ClockWithVMNanos;
import io.nanovc.comparisons.HashMapComparisonHandler;
import io.nanovc.content.ByteArrayContent;
import io.nanovc.differences.HashMapDifferenceHandler;
import io.nanovc.indexes.HashWrapperByteArrayIndex;
import io.nanovc.memory.MemoryCommit;
import io.nanovc.memory.MemorySearchQuery;
import io.nanovc.memory.MemorySearchResults;
import io.nanovc.merges.DiffFromCommonAncestorMergeHandler;
import io.nanovc.merges.LastWinsMergeHandler;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
/**
* A fully self contained nano repository for version control of arbitrary objects using reflection.
* Use this class for general purpose storage of history.
* NOTE: This is a rudimentary implementation right now.
* Only simple reflection is supported meaning that it serializes String fields straight on the class.
* See {@link ReflectiveObjectMemoryRepoEngineBase#serializeObjectToContentArea(Object, AreaAPI, ContentFactory, ReflectiveObjectMemoryRepoBase)}
* If you want more control, see the {@link ReflectiveObjectMemoryRepoHandler} instead.
*/
public class ReflectiveObjectNanoRepo extends ReflectiveObjectMemoryRepo
implements ReflectiveObjectMemoryRepoHandlerAPI<
ByteArrayContent,
ByteArrayHashMapArea,
MemoryCommit,
MemorySearchQuery,
MemorySearchResults,
ReflectiveObjectMemoryRepo,
ReflectiveObjectMemoryRepoEngineAPI<
ByteArrayContent,
ByteArrayHashMapArea,
MemoryCommit,
MemorySearchQuery,
MemorySearchResults,
ReflectiveObjectMemoryRepo
>
>
{
/**
* The common engine to use for {@link ReflectiveObjectNanoRepo}'s.
*/
public static final ReflectiveObjectMemoryRepoEngine COMMON_ENGINE = new ReflectiveObjectMemoryRepoEngine();
/**
* The engine to use for this {@link ReflectiveObjectNanoRepo}.
*/
protected ReflectiveObjectMemoryRepoEngineAPI<
ByteArrayContent,
ByteArrayHashMapArea,
MemoryCommit,
MemorySearchQuery,
MemorySearchResults,
ReflectiveObjectMemoryRepo
> engine = COMMON_ENGINE;
/**
* A common clock that is used as the default for Nano Repos.
*/
public static final ClockWithVMNanos COMMON_CLOCK = new ClockWithVMNanos();
/**
* The clock that we use when we create commits.
*/
private ClockAPI<? extends TimestampAPI> clock = COMMON_CLOCK;
/**
* A common difference handler that is used as the default for Nano Repos.
*/
public static final DifferenceHandlerAPI<? extends DifferenceEngineAPI> COMMON_DIFFERENCE_HANDLER = HashMapDifferenceHandler.COMMON_DIFFERENCE_HANDLER;
/**
* The handler to use for {@link DifferenceAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*/
protected DifferenceHandlerAPI<? extends DifferenceEngineAPI> differenceHandler = COMMON_DIFFERENCE_HANDLER;
/**
* A common comparison handler that is used as the default for Nano Repos.
*/
public static final ComparisonHandlerAPI<? extends ComparisonEngineAPI> COMMON_COMPARISON_HANDLER = HashMapComparisonHandler.COMMON_COMPARISON_HANDLER;
/**
* The handler to use for {@link ComparisonAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*/
protected ComparisonHandlerAPI<? extends ComparisonEngineAPI> comparisonHandler = COMMON_COMPARISON_HANDLER;
/**
* A common merge handler that is used as the default for Nano Repos.
*/
public static final MergeHandlerAPI<? extends MergeEngineAPI> COMMON_MERGE_HANDLER = DiffFromCommonAncestorMergeHandler.COMMON_MERGE_HANDLER;
/**
* The handler to use for merging commits.
*/
protected MergeHandlerAPI<? extends MergeEngineAPI> mergeHandler = COMMON_MERGE_HANDLER;
/**
* The byte array index to use for managing the in-memory byte arrays that get created in a repo.
* This index allows us to re-use arrays in memory.
* The index gives us Value-Equality semantics for byte[] lookups.
*/
protected ByteArrayIndex byteArrayIndex;
/**
* Creates a new Memory Nano Repo.
*/
public ReflectiveObjectNanoRepo()
{
ensureDependenciesExist();
}
/**
* Creates a new Memory Nano Repo.
*
* @param byteArrayIndex The index to use when committing content.
*/
public ReflectiveObjectNanoRepo(ByteArrayIndex byteArrayIndex)
{
this.byteArrayIndex = byteArrayIndex;
ensureDependenciesExist();
}
/**
* Creates a new Memory Nano Repo.
*
* @param byteArrayIndex The byte array index to reuse. This allows us to keep a shared pool of byte arrays for the content that is created. This index could be shared across multiple repos to save memory. Plug in an alternative handler or use {@link HashWrapperByteArrayIndex}.
* @param engine The engine to use for the version control functionality. All of the version control logic is delegated to this engine. You can plug in an alternative engine to modify the behaviour for this repo. Plug in an alternative handler or use {@link #COMMON_ENGINE}.
* @param clock The clock to use when creating commits for this repo. Plug in an alternative handler or use {@link #COMMON_CLOCK}.
* @param differenceHandler The handler to use when computing differences between commits. Plug in an alternative handler or use {@link #COMMON_DIFFERENCE_HANDLER}.
* @param comparisonHandler The handler to use when computing comparisons between commits. Plug in an alternative handler or use {@link #COMMON_COMPARISON_HANDLER}.
* @param mergeHandler The handler to use when merging commits. Plug in an alternative handler or use {@link #COMMON_MERGE_HANDLER}.
*/
public ReflectiveObjectNanoRepo(ByteArrayIndex byteArrayIndex, ReflectiveObjectMemoryRepoEngineAPI<
ByteArrayContent,
ByteArrayHashMapArea,
MemoryCommit,
MemorySearchQuery,
MemorySearchResults,
ReflectiveObjectMemoryRepo
> engine, ClockAPI<? extends TimestampAPI> clock, DifferenceHandlerAPI<? extends DifferenceEngineAPI> differenceHandler, ComparisonHandlerAPI<? extends ComparisonEngineAPI> comparisonHandler, MergeHandlerAPI<? extends MergeEngineAPI> mergeHandler)
{
this.byteArrayIndex = byteArrayIndex;
this.engine = engine;
this.clock = clock;
this.differenceHandler = differenceHandler;
this.comparisonHandler = comparisonHandler;
this.mergeHandler = mergeHandler;
ensureDependenciesExist();
}
private void ensureDependenciesExist()
{
// Make sure we have a repo engine:
if (this.engine == null)
{
// Create the default engine:
this.engine = COMMON_ENGINE;
}
// Make sure we have a byte array index:
if (this.byteArrayIndex == null)
{
// Create a new byte array index:
this.byteArrayIndex = engine.createByteArrayIndex();
}
// Make sure we have a clock:
if (this.clock == null)
{
// Create a new clock:
this.clock = engine.createClock();
}
// Make sure that we have the difference handler:
if (this.differenceHandler == null)
{
// Create the new difference handler:
this.differenceHandler = new HashMapDifferenceHandler();
}
// Make sure that we have the comparison handler:
if (this.comparisonHandler == null)
{
// Create the new comparison handler:
this.comparisonHandler = new HashMapComparisonHandler();
}
// Make sure that we have a merge handler:
if (this.mergeHandler == null)
{
// Create a new merge handler:
this.mergeHandler = new LastWinsMergeHandler();
}
}
/**
* Creates a new area where content can be placed.
*
* @return A new content area that can be used for committing.
*/
@Override
public ByteArrayHashMapArea createArea()
{
return this.getEngine().createArea(ByteArrayHashMapArea::new);
}
/**
* Commit the given content to the repo.
*
* @param contentAreaToCommit The content area to commit to version control.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit for this content.
*/
@Override
public MemoryCommit commit(ByteArrayHashMapArea contentAreaToCommit, String message, StringAreaAPI commitTags)
{
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock);
}
/**
* Commit the given content to the repo.
* It tracks the given commit as the parent.
*
* @param contentAreaToCommit The content area to commit to version control.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param parentCommit The parent commit that we want to make this commit from.
* @return The commit for this content.
*/
@Override
public MemoryCommit commit(ByteArrayHashMapArea contentAreaToCommit, String message, StringAreaAPI commitTags, MemoryCommit parentCommit)
{
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, parentCommit);
}
/**
* Commit the given content to the repo.
* It tracks the given commits as the parents.
*
* @param contentAreaToCommit The content area to commit to version control.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param firstParentCommit The parent commit that we want to make this commit from.
* @param otherParentCommits The other parents to have in addition to the first parent commit.
* @return The commit for this content area.
*/
@Override
public MemoryCommit commit(ByteArrayHashMapArea contentAreaToCommit, String message, StringAreaAPI commitTags, MemoryCommit firstParentCommit, MemoryCommit... otherParentCommits)
{
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, firstParentCommit, Arrays.asList(otherParentCommits));
}
/**
* Commit the given content to the repo.
* It tracks the given commits as the parents.
*
* @param contentAreaToCommit The content area to commit to version control.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param parentCommits The parents of this commit. Consider using the other overloads when there is are one or a few parent commits.
* @return The commit for this content area.
*/
@Override public MemoryCommit commit(ByteArrayHashMapArea contentAreaToCommit, String message, StringAreaAPI commitTags, List<MemoryCommit> parentCommits)
{
// Determine how many parent commits there are to decide how to route this to the engine:
if (parentCommits == null)
{
// There is no list of parent commits.
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock);
}
else
{
// There is a list of parent commits.
// Determine how to pass the list to the engine as efficiently as possible:
switch (parentCommits.size())
{
case 0:
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock);
case 1:
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, parentCommits.get(0));
default:
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, parentCommits.get(0), parentCommits.subList(1, parentCommits.size()));
}
}
}
/**
* Commit the given content to the repo.
* It tracks the given commits as the parents.
*
* @param contentAreaToCommit The content area to commit to version control.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param firstParentCommit The parent commit that we want to make this commit from.
* @param otherParentCommits The other parents to have in addition to the first parent commit.
* @return The commit for this content area.
*/
@Override
public MemoryCommit commit(ByteArrayHashMapArea contentAreaToCommit, String message, StringAreaAPI commitTags, MemoryCommit firstParentCommit, List<MemoryCommit> otherParentCommits)
{
return this.getEngine().commit(contentAreaToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, firstParentCommit, otherParentCommits);
}
/**
* Commit the given content to the given branch in the the repo.
*
* @param contentAreaToCommit The content area to commit to version control.
* @param branch The branch to commit to. If the branch doesn't exist, it is created.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit for this content.
*/
@Override
public MemoryCommit commitToBranch(ByteArrayHashMapArea contentAreaToCommit, String branch, String message, StringAreaAPI commitTags)
{
return this.getEngine().commitToBranch(contentAreaToCommit, branch, message, commitTags, this, this.byteArrayIndex, this.clock);
}
/**
* Creates a new branch with the given name and makes it point at the given commit.
* If the repo already has a branch with this name then it is updated to point at this commit.
*
* @param commit The commit where the new branch should be created.
* @param branchName The name of the branch to create at the commit.
*/
@Override
public void createBranchAtCommit(MemoryCommit commit, String branchName)
{
this.getEngine().createBranchAtCommit(commit, branchName, this);
}
/**
* Removes the branch with the given name from the repo.
*
* @param branchName The name of the branch to remove.
*/
@Override public void removeBranch(String branchName)
{
this.getEngine().removeBranch(this, branchName);
}
/**
* Gets the latest commit for the branch with the given name.
*
* @param branchName The name of the branch to get the latest commit for.
* @return The latest commit for the given branch. Null if there is no branch with the given name.
*/
@Override
public MemoryCommit getLatestCommitForBranch(String branchName)
{
return this.getEngine().getLatestCommitForBranch(branchName, this);
}
/**
* Checks out the content for the given commit into the given content area.
*
* @param commit The commit to check out.
* @param areaToUpdate The area to update with the content for the commit.
*/
@Override
public void checkoutIntoArea(MemoryCommit commit, ByteArrayHashMapArea areaToUpdate)
{
this.getEngine().checkoutIntoArea(commit, this, areaToUpdate, ByteArrayContent::new);
}
/**
* Checks out the content for the given commit into a new content area.
*
* @param commit The commit to check out.
* @return A new content area with the content from the checkout.
*/
@Override
public ByteArrayHashMapArea checkout(MemoryCommit commit)
{
return this.getEngine().checkout(commit, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Tags the commit with the given name.
* This tag name can be used to reference a specific commit in the history, independently of the branches.
*
* @param commit The commit to tag with a name.
* @param tagName The name of the tag to give to this commit.
*/
@Override
public void tagCommit(MemoryCommit commit, String tagName)
{
this.getEngine().tagCommit(this, commit, tagName);
}
/**
* Gets the commit with the given tag name.
*
* @param tagName The name of the tagged commit.
* @return The commit with the given tag name. Null if there is no tag with this name.
*/
@Override
public MemoryCommit getCommitForTag(String tagName)
{
return this.getEngine().getCommitForTag(this, tagName);
}
/**
* Removes the tag with the given name from the repo.
*
* @param tagName The name of the tag to remove. If this tag doesn't exist then nothing happens.
*/
@Override
public void removeTag(String tagName)
{
this.getEngine().removeTag(this, tagName);
}
/**
* Computes a difference between the given areas.
* The areas could have come from anywhere.
*
* @param fromArea The first area to find differences from.
* @param toArea The second area to find differences to.
* @return The differences between the given areas.
*/
@Override
public DifferenceAPI computeDifferenceBetweenAreas(AreaAPI<? extends ByteArrayContent> fromArea, AreaAPI<? extends ByteArrayContent> toArea)
{
return this.getEngine().computeDifferenceBetweenAreas(fromArea, toArea, this.differenceHandler);
}
/**
* Computes a difference between the given commits.
* It is assumed that the commits come from this repo.
*
* @param fromCommit The first commit to find differences from.
* @param toCommit The second commit to find differences to.
* @return The differences between the given commits.
*/
@Override
public DifferenceAPI computeDifferenceBetweenCommits(MemoryCommit fromCommit, MemoryCommit toCommit)
{
return this.getEngine().computeDifferenceBetweenCommits(fromCommit, toCommit, this.differenceHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Computes a difference between the given branches.
*
* @param fromBranchName The first branch to find differences from.
* @param toBranchName The second branch to find differences to.
* @return The differences between the given branches.
*/
@Override
public DifferenceAPI computeDifferenceBetweenBranches(String fromBranchName, String toBranchName)
{
return this.getEngine().computeDifferenceBetweenBranches(fromBranchName, toBranchName, this.differenceHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Computes a comparison between the given areas.
* The areas could have come from anywhere.
*
* @param fromArea The first area to find comparisons from.
* @param toArea The second area to find comparisons to.
* @return The comparisons between the given areas.
*/
@Override
public ComparisonAPI computeComparisonBetweenAreas(AreaAPI<? extends ByteArrayContent> fromArea, AreaAPI<? extends ByteArrayContent> toArea)
{
return this.getEngine().computeComparisonBetweenAreas(fromArea, toArea, this.comparisonHandler);
}
/**
* Computes a comparison between the given commits.
* It is assumed that the commits come from this repo.
*
* @param fromCommit The first commit to find comparisons from.
* @param toCommit The second commit to find comparisons to.
* @return The comparisons between the given commits.
*/
@Override
public ComparisonAPI computeComparisonBetweenCommits(MemoryCommit fromCommit, MemoryCommit toCommit)
{
return this.getEngine().computeComparisonBetweenCommits(fromCommit, toCommit, this.comparisonHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Computes a comparison between the given branches.
*
* @param fromBranchName The first branch to find comparisons from.
* @param toBranchName The second branch to find comparisons to.
* @return The comparisons between the given branches.
*/
@Override
public ComparisonAPI computeComparisonBetweenBranches(String fromBranchName, String toBranchName)
{
return this.getEngine().computeComparisonBetweenBranches(fromBranchName, toBranchName, this.comparisonHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Commit the given object to the repo.
*
* @param objectToCommit The object to commit to the repo.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit for this content.
*/
@Override
public MemoryCommit commitObject(Object objectToCommit, String message, StringAreaAPI commitTags)
{
return this.getEngine().commitObject(objectToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Commit the given object to the repo.
* It tracks the given commit as the parent.
*
* @param objectToCommit The object to commit to the repo.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param parentCommit The parent commit that we want to make this commit from.
* @return The commit for this content.
*/
@Override
public MemoryCommit commitObject(Object objectToCommit, String message, StringAreaAPI commitTags, MemoryCommit parentCommit)
{
return this.getEngine().commitObject(objectToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, ByteArrayHashMapArea::new, ByteArrayContent::new, parentCommit);
}
/**
* Commit the given object to the repo.
* It tracks the given commits as the parents.
*
* @param objectToCommit The object to commit to the repo.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param firstParentCommit The parent commit that we want to make this commit from.
* @param otherParentCommits The other parents to have in addition to the first parent commit.
* @return The commit for this content area.
*/
@Override
public MemoryCommit commitObject(Object objectToCommit, String message, StringAreaAPI commitTags, MemoryCommit firstParentCommit, MemoryCommit... otherParentCommits)
{
return this.getEngine().commitObject(objectToCommit, message,commitTags, this, this.byteArrayIndex, this.clock, ByteArrayHashMapArea::new, ByteArrayContent::new, firstParentCommit, Arrays.asList(otherParentCommits));
}
/**
* Commit the given object to the repo.
* It tracks the given commits as the parents.
*
* @param objectToCommit The object to commit to the repo.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @param firstParentCommit The parent commit that we want to make this commit from.
* @param otherParentCommits The other parents to have in addition to the first parent commit.
* @return The commit for this content area.
*/
@Override
public MemoryCommit commitObject(Object objectToCommit, String message, StringAreaAPI commitTags, MemoryCommit firstParentCommit, List<MemoryCommit> otherParentCommits)
{
return this.getEngine().commitObject(objectToCommit, message, commitTags, this, this.byteArrayIndex, this.clock, ByteArrayHashMapArea::new, ByteArrayContent::new, firstParentCommit, otherParentCommits);
}
/**
* Commit the given object to the given branch in the the repo.
*
* @param objectToCommit The object to commit to the repo.
* @param branch The branch to commit to. If the branch doesn't exist, it is created.
* @param message The commit message.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit for this content.
*/
@Override
public MemoryCommit commitObjectToBranch(Object objectToCommit, String branch, String message, StringAreaAPI commitTags)
{
return this.getEngine().commitObjectToBranch(objectToCommit, branch, message, commitTags, this, this.byteArrayIndex, this.clock, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Checks out the object for the given commit.
*
* @param memoryCommit The commit to check out.
* @return A new object of the expected type from the checkout.
*/
@Override
public Object checkoutObject(MemoryCommit memoryCommit)
{
return this.getEngine().checkoutObject(memoryCommit, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Gets the set of branch names in the repo.
*
* @return The set of branch names in the repo. If there are no branches in the repo then an empty set is returned.
*/
@Override
public Set<String> getBranchNames()
{
return this.getEngine().getBranchNames(this);
}
/**
* Gets the set of tag names in the repo.
*
* @return The set of tag names in the repo. If there are no tags in the repo then an empty set is returned.
*/
@Override
public Set<String> getTagNames()
{
return this.getEngine().getTagNames(this);
}
/**
* Gets the handler to use for {@link DifferenceAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*
* @return The handler to use for {@link DifferenceAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*/
@Override
public DifferenceHandlerAPI<? extends DifferenceEngineAPI> getDifferenceHandler()
{
return this.differenceHandler;
}
/**
* Sets the handler to use for {@link DifferenceAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*
* @param differenceHandler The handler to use for {@link DifferenceAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*/
@Override
public void setDifferenceHandler(DifferenceHandlerAPI<? extends DifferenceEngineAPI> differenceHandler)
{
this.differenceHandler = differenceHandler;
}
/**
* Gets the handler to use for {@link ComparisonAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*
* @return The handler to use for {@link ComparisonAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*/
@Override
public ComparisonHandlerAPI<? extends ComparisonEngineAPI> getComparisonHandler()
{
return this.comparisonHandler;
}
/**
* Sets the handler to use for {@link ComparisonAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*
* @param comparisonHandler The handler to use for {@link ComparisonAPI}s between {@link AreaAPI}s of {@link ContentAPI}.
*/
@Override
public void setComparisonHandler(ComparisonHandlerAPI<? extends ComparisonEngineAPI> comparisonHandler)
{
this.comparisonHandler = comparisonHandler;
}
/**
* Gets the handler to use for merges.
*
* @return The handler to use for merges.
*/
@Override
public MergeHandlerAPI<? extends MergeEngineAPI> getMergeHandler()
{
return this.mergeHandler;
}
/**
* Sets the handler to use for merges.
*
* @param mergeHandler The handler to use for merges.
*/
@Override
public void setMergeHandler(MergeHandlerAPI<? extends MergeEngineAPI> mergeHandler)
{
this.mergeHandler = mergeHandler;
}
/**
* Prepares a reusable search query from the given search definition.
* This search query can be thought of as the compiled/prepared search query.
* The same search query can be run for multiple repo's without needing to recompute the search query each time.
*
* @param searchQueryDefinition The definition of the search to perform.
* @return The query for the search. This query can be evaluated multiple times on different repos. The query needs to be evaluated to get the results.
*/
@Override
public MemorySearchQuery prepareSearchQuery(SearchQueryDefinitionAPI searchQueryDefinition)
{
return this.getEngine().prepareSearchQuery(searchQueryDefinition);
}
/**
* Searches for commits that match the given search query.
* Use this when you want to reuse the search query
*
* @param searchQuery The search query to reuse for this search.
* @return The query for the search. This query can be evaluated multiple times on different repos. The query needs to be evaluated to get the results.
*/
@Override
public MemorySearchResults searchWithQuery(MemorySearchQuery searchQuery)
{
return this.getEngine().searchWithQuery(searchQuery, null, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Searches for commits that match the given search query.
* Use this when you want to reuse the search query
*
* @param searchQuery The search query to reuse for this search.
* @param overrideParameters Parameters to override the defaults of the search query with. Pass null to use the parameters in the search query.
* @return The query for the search. This query can be evaluated multiple times on different repos. The query needs to be evaluated to get the results.
*/
@Override
public MemorySearchResults searchWithQuery(MemorySearchQuery searchQuery, SearchParametersAPI overrideParameters)
{
return this.getEngine().searchWithQuery(searchQuery, overrideParameters, this, ByteArrayHashMapArea::new, ByteArrayContent::new);
}
/**
* Searches for commits that match the given search definition.
*
* @param searchQueryDefinition The definition of the search to perform.
* @return The query for the search. This query can be evaluated multiple times on different repos. The query needs to be evaluated to get the results.
*/
@Override
public MemorySearchResults search(SearchQueryDefinitionAPI searchQueryDefinition)
{
MemorySearchQuery searchQuery = prepareSearchQuery(searchQueryDefinition);
return searchWithQuery(searchQuery);
}
/**
* Searches for commits that match the given search definition.
*
* @param searchQueryDefinition The definition of the search to perform.
* @param overrideParameters Parameters to override the defaults of the search definition with. Pass null to use the parameters in the search definition.
* @return The query for the search. This query can be evaluated multiple times on different repos. The query needs to be evaluated to get the results.
*/
@Override
public MemorySearchResults search(SearchQueryDefinitionAPI searchQueryDefinition, SearchParametersAPI overrideParameters)
{
MemorySearchQuery searchQuery = prepareSearchQuery(searchQueryDefinition);
return searchWithQuery(searchQuery, overrideParameters);
}
/**
* Merges one branch into another.
* The merge handler is used to resolve any merge conflicts if there are any.
*
* @param destinationBranchName The branch that we should merge into.
* @param sourceBranchName The branch that we should merge from.
* @param message The commit message to use for the merge.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit that was performed for the merge.
*/
@Override
public MemoryCommit mergeIntoBranchFromAnotherBranch(String destinationBranchName, String sourceBranchName, String message, StringAreaAPI commitTags)
{
return this.getEngine().mergeIntoBranchFromAnotherBranch(destinationBranchName, sourceBranchName, message, commitTags, this.mergeHandler, this.comparisonHandler, this.differenceHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new, this.byteArrayIndex, this.clock);
}
/**
* Merges a commit into a branch.
* The merge handler is used to resolve any merge conflicts if there are any.
*
* @param destinationBranchName The branch that we should merge into.
* @param sourceCommit The commit that we should merge from.
* @param message The commit message to use for the merge.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit that was performed for the merge.
*/
@Override public MemoryCommit mergeIntoBranchFromCommit(String destinationBranchName, MemoryCommit sourceCommit, String message, StringAreaAPI commitTags)
{
return this.getEngine().mergeIntoBranchFromCommit(destinationBranchName, sourceCommit, message, commitTags, this.mergeHandler, this.comparisonHandler, this.differenceHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new, this.byteArrayIndex, this.clock);
}
/**
* Merges a branch into a commit.
* The merge handler is used to resolve any merge conflicts if there are any.
*
* @param destinationCommit The commit that we should merge into.
* @param sourceBranchName The branch that we should merge from.
* @param message The commit message to use for the merge.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit that was performed for the merge.
*/
@Override public MemoryCommit mergeIntoCommitFromBranch(MemoryCommit destinationCommit, String sourceBranchName, String message, StringAreaAPI commitTags)
{
return this.getEngine().mergeIntoCommitFromBranch(destinationCommit, sourceBranchName, message, commitTags, this.mergeHandler, this.comparisonHandler, this.differenceHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new, this.byteArrayIndex, this.clock);
}
/**
* Merges a commit into another commit.
* The merge handler is used to resolve any merge conflicts if there are any.
*
* @param destinationCommit The commit that we should merge into.
* @param sourceCommit The commit that we should merge from.
* @param message The commit message to use for the merge.
* @param commitTags The commit tags to add to this commit. This allows an arbitrary amount of information to be associated with this commit. See {@link CommitTags} for helper methods here. Any {@link StringAreaAPI} can be used here.
* @return The commit that was performed for the merge.
*/
@Override public MemoryCommit mergeCommits(MemoryCommit destinationCommit, MemoryCommit sourceCommit, String message, StringAreaAPI commitTags)
{
return this.getEngine().mergeCommits(destinationCommit, sourceCommit, message, commitTags, this.mergeHandler, this.comparisonHandler, this.differenceHandler, this, ByteArrayHashMapArea::new, ByteArrayContent::new, this.byteArrayIndex, this.clock);
}
/**
* Casts or clones the given area to the specific type required by this repo handler.
*
* @param areaToCastOrClone The area to cast if it is already the required type or to clone if it is a different area type.
* @return A compatible area for the repo handler which is either a cast of the same instance or a completely new clone of it if it is an incompatible type.
*/
@Override
public ByteArrayHashMapArea castOrCloneArea(AreaAPI<? extends ContentAPI> areaToCastOrClone)
{
return this.getEngine().castOrCloneArea(areaToCastOrClone, this::createArea, ByteArrayContent::new, this.byteArrayIndex);
}
/**
* Gets the repo that is being handled.
*
* @return The repo that is being handled.
*/
@Override
public ReflectiveObjectMemoryRepo getRepo()
{
return this;
}
/**
* Sets the repo that is being handled.
*
* @param repo The repo that is being handled.
*/
@Override
public void setRepo(ReflectiveObjectMemoryRepo repo)
{
throw new IllegalArgumentException("Cannot set a nano repo to another nano repo. That doesn't make sense for Object Oriented nano repos.");
}
/**
* Gets the engine that is used to work with the repo.
* An alternate (but compatible) engine can be plugged in to modify the algorithm being used for working with the repo.
*
* @return The engine that is used to work with the repo.
*/
@Override
public ReflectiveObjectMemoryRepoEngineAPI<
ByteArrayContent,
ByteArrayHashMapArea,
MemoryCommit,
MemorySearchQuery,
MemorySearchResults,
ReflectiveObjectMemoryRepo
> getEngine()
{
return this.engine;
}
/**
* Sets the engine that is used to work with the repo.
* An alternate (but compatible) engine can be plugged in to modify the algorithm being used for working with the repo.
*
* @param engine The engine that is used to work with the repo.
*/
@Override
public void setEngine(ReflectiveObjectMemoryRepoEngineAPI<
ByteArrayContent,
ByteArrayHashMapArea,
MemoryCommit,
MemorySearchQuery,
MemorySearchResults,
ReflectiveObjectMemoryRepo
> engine)
{
this.engine = engine;
}
/**
* Gets the clock to use for creating timestamps.
*
* @return The clock to use for creating timestamps.
*/
@Override public ClockAPI<? extends TimestampAPI> getClock()
{
return this.clock;
}
/**
* Sets the clock to use for creating timestamps.
*
* @param clock The clock to use for creating timestamps.
*/
@Override public void setClock(ClockAPI<? extends TimestampAPI> clock)
{
this.clock = clock;
}
}
|
sfneal/mysql-toolkit | mysql/toolkit/components/__init__.py | <reponame>sfneal/mysql-toolkit<filename>mysql/toolkit/components/__init__.py<gh_stars>1-10
from mysql.toolkit.components.connector import Connector
from mysql.toolkit.components.manipulate import Manipulate
from mysql.toolkit.components.structure import Structure
from mysql.toolkit.components.operations import Operations
__all__ = ['Connector', 'Manipulate', 'Structure', 'Operations']
|
tommac7/hydroshare | hs_communities/migrations/0004_remove_topic_order.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-09-04 13:16
from __future__ import unicode_literals
from django.db import migrations
from hs_communities.models import Topic
def populate_topics(apps, schema_editor):
topics = ["Air Temperature", "Barometric Pressure", "Chlorophyll", "Climate", "Diatoms",
"Digital Elevation Model (DEM)",
"Dissolved Organic Matter (DOM)", "Ecosystem model", "Electrical Conductivity",
"Flux Tower", "Geology",
"Geomorphology", "Geophysics", "GIS / Map Data",
"Ground Penetrating Radar (GPR)", "Groundwater Chemistry",
"Groundwater Depth", "Groundwater Temperatures", "Hydropedologic Properties",
"Land Cover", "Land Use History",
"LiDAR", "Lysimeter Water Samples Chemistry", "Matric Potential",
"Meteorology", "Nutrient Fluxes",
"Overland Water Chemistry", "Ozone", "Photographic Imagery", "Piezometer",
"Precipitation",
"Precipitation Chemistry", "Rainfall Chemistry", "Regolith Survey",
"Reservoir Height", "Rock Moisture",
"Sap Flow", "Sediment Transport", "Seismic Refraction", "Snow Depth",
"Snow Pits", "Snow Survey",
"Soil Biogeochemistry", "Soil Electrical Resistivity",
"Soil Evapotranspiration", "Soil Gas",
"Soil Geochemistry", "Soil Invertebrates", "Soil Microbes",
"Soil Mineralogy", "Soil Moisture",
"Soil Porewater Chemistry", "Soil Porosity", "Soil Redox Potential",
"Soil Respiration",
"Soil Survey",
"Soil Temperature", "Soil Texture", "Soil Water", "Soil Water Chemistry",
"Solar Radiation", "Stable Isotopes",
"Stage", "Stream Ecology", "Stream Suspended Sediment",
"Stream Water Chemistry", "Stream Water Temperatures",
"Streamflow / Discharge", "Surface Water Chemistry",
"Throughfall Chemistry", "Topographic Carbon Storage",
"Tree Growth & Physiology", "Vegetation", "Water Potential",
"Well Water Levels"]
for t in topics:
n = Topic()
n.name = t
n.save()
class Migration(migrations.Migration):
dependencies = [
('hs_communities', '0003_auto_20190903_1548'),
]
operations = [
migrations.RemoveField(
model_name='topic',
name='order',
),
migrations.RunPython(populate_topics),
]
|
LabCryptoOrg/fence | src/client/default_message_receiver.cc | <filename>src/client/default_message_receiver.cc<gh_stars>1-10
#include <sstream>
#include <chrono>
#include <thread>
#include <org/labcrypto/abettor/fs.h>
#include <org/labcrypto/abettor++/conf/config_manager.h>
#include <org/labcrypto/abettor++/date/helper.h>
#include <org/labcrypto/hottentot/runtime/configuration.h>
#include <org/labcrypto/hottentot/runtime/logger.h>
#include <org/labcrypto/hottentot/runtime/utils.h>
#include <org/labcrypto/hottentot/runtime/proxy/proxy.h>
#include <org/labcrypto/hottentot/runtime/proxy/proxy_runtime.h>
#include <fence/message.h>
#include <org/labcrypto/fence/client/default_message_receiver.h>
#include <org/labcrypto/fence/client/runtime.h>
namespace org {
namespace labcrypto {
namespace fence {
namespace client {
void
DefaultMessageReceiver::Init (
int argc,
char **argv
) {
::org::labcrypto::hottentot::runtime::Logger::Init();
::org::labcrypto::hottentot::runtime::proxy::ProxyRuntime::Init(argc, argv);
if (::org::labcrypto::hottentot::runtime::Configuration::Verbose()) {
::org::labcrypto::hottentot::runtime::Logger::GetOut() <<
"[" << ::org::labcrypto::abettor::date::helper::GetCurrentUTCTimeString() << "]: " <<
"Proxy runtime is initialized." << std::endl;
}
Runtime::RegisterWorkDirPath(workDirPath_);
runtime_ = Runtime::GetRuntime(workDirPath_);
runtime_->Init(workDirPath_, argc, argv);
runtime_->Init(workDirPath_, argc, argv);
receiverThread_ = new ReceiverThread(fenceHost_, fencePort_, popLabel_, workDirPath_, runtime_);
receiverThread_->Start();
}
void
DefaultMessageReceiver::Shutdown() {
receiverThread_->Shutdown();
delete receiverThread_;
runtime_->Shutdown();
}
std::vector<Message*>
DefaultMessageReceiver::GetMessages () {
std::vector<Message*> messages;
{
std::lock_guard<std::mutex> guard(runtime_->mainLock_);
if (runtime_->poppedButNotAcked_.find(popLabel_) != runtime_->poppedButNotAcked_.end()) {
if (runtime_->poppedButNotAcked_[popLabel_]->size() > 0) {
for (std::map<uint64_t, uint64_t>::iterator it = runtime_->poppedButNotAcked_[popLabel_]->begin();
it != runtime_->poppedButNotAcked_[popLabel_]->end();
it++) {
uint64_t currentTime = time(NULL);
if ((currentTime - it->second) > ackTimeout_) {
std::stringstream ss;
ss << it->first;
ORG_LABCRYPTO_ABETTOR_data data;
ORG_LABCRYPTO_ABETTOR_length dataLength;
ORG_LABCRYPTO_ABETTOR__fs__read_file_with_base_dir (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pna").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
&data,
&dataLength
);
::org::labcrypto::fence::Message message;
message.Deserialize(data, dataLength);
free(data);
/* ------------------------------------------ */
::org::labcrypto::fence::client::Message *clientMessage =
new ::org::labcrypto::fence::client::Message;
clientMessage->SetId (it->first);
if (message.GetRelId().GetValue() != 0) {
std::stringstream crss;
crss << message.GetRelId().GetValue();
uint64_t clientRelId;
if (ORG_LABCRYPTO_ABETTOR__fs__file_exists (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/s").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)(crss.str() + ".cid").c_str()
)
) {
ORG_LABCRYPTO_ABETTOR__fs__read_file_into_buffer (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/s/" + crss.str() + ".cid").c_str(),
(ORG_LABCRYPTO_ABETTOR_data)&clientRelId,
0
);
clientMessage->SetRelId (clientRelId);
} else {
clientMessage->SetRelId (0);
}
} else {
clientMessage->SetRelId(0);
}
clientMessage->SetLabel(popLabel_);
clientMessage->SetContent (
ByteArray (
message.GetContent().GetValue(),
message.GetContent().GetLength()
)
);
messages.push_back(clientMessage);
/* ------------------------------------------ */
uint64_t currentTime = time(NULL);
ORG_LABCRYPTO_ABETTOR__fs__write_to_file (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pnat").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
(ORG_LABCRYPTO_ABETTOR_data)¤tTime,
sizeof(currentTime)
);
if (runtime_->poppedButNotAcked_.find(popLabel_) == runtime_->poppedButNotAcked_.end()) {
runtime_->poppedButNotAcked_.insert(
std::pair<std::string, std::map<uint64_t, uint64_t>*>(
popLabel_, new std::map<uint64_t, uint64_t>()));
}
(*(runtime_->poppedButNotAcked_[popLabel_]))[it->first] = currentTime;
}
}
}
}
if (runtime_->received_.find(popLabel_) == runtime_->received_.end()) {
return messages;
}
if (runtime_->received_[popLabel_]->size() == 0) {
return messages;
}
std::deque<uint64_t> receivedIds = std::move(*(runtime_->received_[popLabel_]));
for (uint64_t i = 0; i < receivedIds.size(); i++) {
std::stringstream ss;
ss << receivedIds[i];
ORG_LABCRYPTO_ABETTOR_data data;
ORG_LABCRYPTO_ABETTOR_length dataLength;
ORG_LABCRYPTO_ABETTOR__fs__read_file_with_base_dir (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/r").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
&data,
&dataLength
);
::org::labcrypto::fence::Message message;
message.Deserialize(data, dataLength);
free(data);
/* ------------------------------------------ */
::org::labcrypto::fence::client::Message *clientMessage =
new ::org::labcrypto::fence::client::Message;
clientMessage->SetId (receivedIds[i]);
if (message.GetRelId().GetValue() != 0) {
std::stringstream crss;
crss << message.GetRelId().GetValue();
uint64_t clientRelId;
if (ORG_LABCRYPTO_ABETTOR__fs__file_exists (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/s").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)(crss.str() + ".cid").c_str()
)
) {
ORG_LABCRYPTO_ABETTOR__fs__read_file_into_buffer (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/s/" + crss.str() + ".cid").c_str(),
(ORG_LABCRYPTO_ABETTOR_data)&clientRelId,
0
);
clientMessage->SetRelId (clientRelId);
} else {
clientMessage->SetRelId (0);
}
} else {
clientMessage->SetRelId (0);
}
clientMessage->SetLabel(popLabel_);
clientMessage->SetContent (
ByteArray (
message.GetContent().GetValue(),
message.GetContent().GetLength()
)
);
messages.push_back(clientMessage);
/* ------------------------------------------ */
ORG_LABCRYPTO_ABETTOR__fs__copy_file (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/r").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/a").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str()
);
ORG_LABCRYPTO_ABETTOR__fs__move_file (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/r").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pna").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str()
);
uint64_t currentTime = time(NULL);
ORG_LABCRYPTO_ABETTOR__fs__write_to_file (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pnat").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
(ORG_LABCRYPTO_ABETTOR_data)¤tTime,
sizeof(currentTime)
);
if (runtime_->poppedButNotAcked_.find(popLabel_) == runtime_->poppedButNotAcked_.end()) {
runtime_->poppedButNotAcked_.insert(
std::pair<std::string, std::map<uint64_t, uint64_t>*>(
popLabel_, new std::map<uint64_t, uint64_t>()));
}
(*(runtime_->poppedButNotAcked_[popLabel_]))[receivedIds[i]] = currentTime;
}
}
return messages;
}
void
DefaultMessageReceiver::Ack (
std::vector<uint64_t> ids
) {
{
std::lock_guard<std::mutex> guard(runtime_->mainLock_);
for (uint32_t i = 0; i < ids.size(); i++) {
uint64_t messageId = ids[i];
std::stringstream ss;
ss << messageId;
if (ORG_LABCRYPTO_ABETTOR__fs__file_exists (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pna").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str()
)
) {
ORG_LABCRYPTO_ABETTOR_data data;
ORG_LABCRYPTO_ABETTOR_length dataLength;
ORG_LABCRYPTO_ABETTOR__fs__read_file_with_base_dir (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pna").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
&data,
&dataLength
);
::org::labcrypto::fence::Message message;
message.Deserialize(data, dataLength);
free(data);
if (runtime_->poppedButNotAcked_.find(popLabel_)
!= runtime_->poppedButNotAcked_.end()) {
runtime_->poppedButNotAcked_[popLabel_]->erase(messageId);
}
ORG_LABCRYPTO_ABETTOR__fs__move_file (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pna").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pa").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str()
);
uint64_t currentTime = time(NULL);
ORG_LABCRYPTO_ABETTOR__fs__write_to_file (
(ORG_LABCRYPTO_ABETTOR_path)(workDirPath_ + "/pnat").c_str(),
(ORG_LABCRYPTO_ABETTOR_string)ss.str().c_str(),
(ORG_LABCRYPTO_ABETTOR_data)¤tTime,
sizeof(currentTime)
);
}
}
}
}
} // END NAMESAPCE client
} // END NAMESPACE fence
} // END NAMESPACE labcrypto
} // END NAMESPACE org |
oisindoherty3/drem | tests/unit/transform/test_sa_statistics.py | <reponame>oisindoherty3/drem
from typing import Dict
import geopandas as gpd
import pandas as pd
import pytest
from geopandas.testing import assert_geodataframe_equal
from icontract import ViolationError
from pandas.testing import assert_frame_equal
from shapely.geometry import Point
from shapely.geometry import Polygon
from drem.transform.sa_statistics import _convert_columns_to_dict
from drem.transform.sa_statistics import _extract_column_names_via_glossary
from drem.transform.sa_statistics import _extract_rows_from_glossary
from drem.transform.sa_statistics import _get_columns
from drem.transform.sa_statistics import _link_small_areas_to_postcodes
from drem.transform.sa_statistics import _melt_columns
from drem.transform.sa_statistics import _merge_with_geometries
from drem.transform.sa_statistics import _pivot_table
from drem.transform.sa_statistics import _rename_columns_via_glossary
from drem.transform.sa_statistics import _replace_substring_in_column
from drem.transform.sa_statistics import _split_column_in_two_on_substring
from drem.transform.sa_statistics import _strip_column
@pytest.fixture
def raw_glossary() -> pd.DataFrame:
"""Create Raw Small Area Statistics Glossary.
Returns:
pd.DataFrame: Raw glossary table
"""
return pd.DataFrame(
{
"Tables Within Themes": [
"Table 1",
"Private households by type of accommodation ",
"Table 2",
"Permanent private households by year built ",
"Table 5",
"Permanent private households by central heating ",
"Table 3",
"Number of households with internet ",
],
"Column Names": [
"T6_1_HB_H",
"T6_1_FA_H",
"T6_2_PRE19H",
"T6_2_PRE19P",
"T6_5_NCH",
"T6_5_OCH",
"T15_3_B",
"T15_3_OTH",
],
"Description of Field": [
"House/Bungalow (No. of households)",
"Flat/Apartment (No. of households)",
"Pre 1919 (No. of households)",
"Pre 1919 (No. of persons)",
"No central heating",
"Oil",
"Broadband",
"Other",
],
},
)
@pytest.fixture
def year_built_glossary() -> Dict[str, str]:
"""Create Year built glossary.
Returns:
Dict[str, str]: Maps Year Built encodings to values
"""
return {
"T6_2_PRE19H": "Pre 1919 (No. of households)",
"T6_2_PRE19P": "Pre 1919 (No. of persons)",
}
@pytest.fixture
def raw_statistics() -> pd.DataFrame:
"""Create Raw Small Area Statistics.
Returns:
pd.DataFrame: Raw Statistics
"""
return pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"T6_1_HB_H": [2],
"T6_1_FA_H": [3],
"T6_2_PRE19H": [10],
"T6_2_PRE19P": [20],
"T6_5_NCH": [7],
"T6_5_OCH": 12,
},
)
def test_extracted_year_built_table_from_glossary_matches_expected(
raw_glossary: pd.DataFrame,
) -> None:
"""Extracted year built table matches matches expected table.
Args:
raw_glossary (pd.DataFrame): Raw glossary table
"""
expected_output = pd.DataFrame(
{
"Tables Within Themes": [
"Table 2",
"Permanent private households by year built ",
],
"Column Names": ["T6_2_PRE19H", "T6_2_PRE19P"],
"Description of Field": [
"Pre 1919 (No. of households)",
"Pre 1919 (No. of persons)",
],
},
)
output = _extract_rows_from_glossary.run(
raw_glossary,
target="Tables Within Themes",
table_name="Permanent private households by year built ",
)
assert_frame_equal(output, expected_output)
def test_extracted_boiler_type_table_from_glossary_matches_expected(
raw_glossary: pd.DataFrame,
) -> None:
"""Extracted boiler type table matches matches expected table.
Args:
raw_glossary (pd.DataFrame): Raw glossary table
"""
expected_output = pd.DataFrame(
{
"Tables Within Themes": [
"Table 5",
"Permanent private households by central heating ",
],
"Column Names": ["T6_5_NCH", "T6_5_OCH"],
"Description of Field": ["No central heating", "Oil"],
},
)
output = _extract_rows_from_glossary.run(
raw_glossary,
target="Tables Within Themes",
table_name="Permanent private households by central heating ",
)
assert_frame_equal(output, expected_output)
def test_convert_columns_to_dict_as_expected(
year_built_glossary: pd.DataFrame,
) -> None:
"""Extract 2 DataFrame columns and convert to Dict.
Args:
year_built_glossary (pd.DataFrame): Year built glossary
"""
raw_year_built_glossary = pd.DataFrame(
{
"Tables Within Themes": [
"Table 2",
"Permanent private households by year built ",
],
"Column Names": ["T6_2_PRE19H", "T6_2_PRE19P"],
"Description of Field": [
"Pre 1919 (No. of households)",
"Pre 1919 (No. of persons)",
],
},
)
expected_output = year_built_glossary
output = _convert_columns_to_dict.run(
raw_year_built_glossary,
column_name_index="Column Names",
column_name_values="Description of Field",
)
assert output == expected_output
def test_extract_year_built_column_names_via_glossary(
raw_statistics: pd.DataFrame, year_built_glossary: Dict[str, str],
) -> None:
"""Extract year built column names from DataFrame via glossary.
Args:
raw_statistics (pd.DataFrame): Raw Statistics
year_built_glossary (Dict[str, str]): Year built glossary
"""
expected_output = pd.DataFrame(
{"GEOGID": ["SA2017_017001001"], "T6_2_PRE19H": [10], "T6_2_PRE19P": [20]},
)
output = _extract_column_names_via_glossary.run(
raw_statistics, year_built_glossary, additional_columns=["GEOGID"],
)
assert_frame_equal(output, expected_output)
def test_extract_column_names_via_glossary_raises_error_with_unknown_columns(
raw_statistics: pd.DataFrame, year_built_glossary: Dict[str, str],
) -> None:
"""Raise error with unknown additional columns.
Args:
raw_statistics (pd.DataFrame): Raw Statistics
year_built_glossary (Dict[str, str]): Year built glossary
"""
with pytest.raises(ViolationError):
_extract_column_names_via_glossary.run(
raw_statistics, year_built_glossary, additional_columns=["I break things"],
)
def test_extract_column_names_via_glossary_raises_error_with_unknown_glossary(
raw_statistics: pd.DataFrame,
) -> None:
"""Raise error with a dictionary with keys not in DataFrame columns.
Args:
raw_statistics (pd.DataFrame): Raw Statistics
"""
hot_potato_glossary = {"I break things": "in pandas"}
with pytest.raises(ViolationError):
_extract_column_names_via_glossary.run(
raw_statistics, hot_potato_glossary, additional_columns=["GEOGID"],
)
def test_rename_columns_via_glossary(year_built_glossary: Dict[str, str]) -> None:
"""Rename year built column names via glossary.
Args:
year_built_glossary (Dict[str, str]): Year built glossary
"""
before_renaming = pd.DataFrame(
{"GEOGID": ["SA2017_017001001"], "T6_2_PRE19H": [10], "T6_2_PRE19P": [20]},
)
expected_output = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"Pre 1919 (No. of households)": [10],
"Pre 1919 (No. of persons)": [20],
},
)
output = _rename_columns_via_glossary.run(before_renaming, year_built_glossary)
assert_frame_equal(output, expected_output)
def test_melt_columns() -> None:
"""Melt selected columns to rows."""
before_melt = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"Pre 1919 (No. of households)": [10],
"Pre 1919 (No. of persons)": [25],
},
)
expected_output = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001", "SA2017_017001001"],
"variable": ["Pre 1919 (No. of households)", "Pre 1919 (No. of persons)"],
"value": [10, 25],
},
)
output = _melt_columns.run(before_melt, id_vars="GEOGID")
assert_frame_equal(output, expected_output)
def test_split_column_in_two_on_substring() -> None:
"""Split column in two on substring."""
before_split = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"variable": ["Pre 1919 (No. of households)"],
"value": [10],
},
)
expected_output = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"variable": ["Pre 1919 (No. of households)"],
"value": [10],
"raw_year_built": ["Pre 1919 "],
"raw_households_and_persons": ["No. of households)"],
},
)
output = _split_column_in_two_on_substring.run(
before_split,
target="variable",
pat=r"(",
left_column_name="raw_year_built",
right_column_name="raw_households_and_persons",
)
assert_frame_equal(output, expected_output)
def test_replace_substring_in_column() -> None:
"""Replace substring in column."""
before_removal = pd.DataFrame({"dirty_column": ["No. of households)"]})
expected_output = pd.DataFrame(
{"dirty_column": ["No. of households)"], "clean_column": ["households"]},
)
output = _replace_substring_in_column.run(
before_removal,
target="dirty_column",
result="clean_column",
pat=r"(No. of )|(\))",
repl="",
)
assert_frame_equal(output, expected_output)
def test_strip_column() -> None:
"""Strip column strings of whitespace."""
before_strip = pd.DataFrame({"dirty_column": ["Pre 1919 "]})
expected_output = pd.DataFrame(
{"dirty_column": ["Pre 1919 "], "clean_column": ["Pre 1919"]},
)
output = _strip_column.run(
before_strip, target="dirty_column", result="clean_column",
)
assert_frame_equal(output, expected_output)
def test_pivot_table() -> None:
"""Pivot table to expected format."""
before_pivot_table = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001", "SA2017_017001001"],
"households_and_persons": ["households", "persons"],
"year_built": ["Pre 1919", "Pre 1919"],
"value": [4, 20],
},
)
expected_output = pd.DataFrame(
{
"GEOGID": ["SA2017_017001001"],
"year_built": ["Pre 1919"],
"households": [4],
"persons": [20],
},
)
output = _pivot_table.run(
before_pivot_table,
index=["GEOGID", "year_built"],
values="value",
columns="households_and_persons",
)
assert_frame_equal(output, expected_output, check_like=True)
def test_merge_with_geometries() -> None:
"""Geometries are added to Statistics."""
statistics: pd.DataFrame = pd.DataFrame(
{"small_area": [111], "very_important_data": [42]},
)
dublin_geometries: gpd.GeoDataFrame = gpd.GeoDataFrame(
{"small_area": [111], "geometry": [Point((0, 0))]},
)
expected_output: gpd.GeoDataFrame = gpd.GeoDataFrame(
{"small_area": [111], "geometry": [Point((0, 0))], "very_important_data": [42]},
)
output = _merge_with_geometries.run(statistics, dublin_geometries, on="small_area")
assert_geodataframe_equal(output, expected_output)
def test_link_small_areas_to_postcodes() -> None:
"""Small Areas that are 'mostly' in Postcode are linked to Postcode."""
small_areas = gpd.GeoDataFrame(
{
"period_built": ["before 1919", "after 2010"],
"households": [3, 4],
"people": [10, 12],
"small_area": [1, 2],
"geometry": [
Polygon([(1, 0), (1, 1), (3, 1)]),
Polygon([(1, 0), (1, 1), (0, 1)]),
],
},
)
postcodes = gpd.GeoDataFrame(
{
"postcodes": ["Co. Dublin", "Dublin 1"],
"geometry": [
Polygon([(0, 0), (3, 0), (0, 3)]), # only overlaps with small_area=1
Polygon([(3, 3), (0, 3), (3, 0)]), # mostly overlaps with small_area==1
],
},
)
expected_output = gpd.GeoDataFrame(
{
"period_built": ["before 1919", "after 2010"],
"households": [3, 4],
"people": [10, 12],
"small_area": [1, 2],
"geometry": [
Polygon([(1, 0), (1, 1), (3, 1)]),
Polygon([(1, 0), (1, 1), (0, 1)]),
],
"postcodes": ["Co. Dublin", "Co. Dublin"],
},
)
output = _link_small_areas_to_postcodes.run(small_areas, postcodes)
assert_geodataframe_equal(output, expected_output, check_like=True)
def test_get_columns_raises_error_if_passed_nonexistent_column_name() -> None:
"""Raise error if passed non-existent column name."""
i_am_data = pd.DataFrame({"my_name_is": ["what"]})
with pytest.raises(ViolationError):
_get_columns.run(i_am_data, ["my_name_is", "i_dont_exist"])
|
joe62/swzql | Server/Trunk/models/online.js | <filename>Server/Trunk/models/online.js
/* jshint indent: 1 */
module.exports = function(sequelize, DataTypes) {
return sequelize.define('online', {
ID: {
type: DataTypes.INTEGER(11),
allowNull: false,
primaryKey: true
},
usr: {
type: DataTypes.STRING(24),
allowNull: false
},
logintm: {
type: DataTypes.DATE,
allowNull: false
},
status: {
type: DataTypes.INTEGER(4),
allowNull: true
},
ip: {
type: DataTypes.STRING(32),
allowNull: true
}
}, {
tableName: 'online',
timestamps: false
});
};
|
homembaixinho/exercism | cpp/scrabble-score/scrabble_score.cpp | <reponame>homembaixinho/exercism<gh_stars>0
#include "scrabble_score.h"
#include <map>
using namespace std;
namespace scrabble_score {
map<char, int> table = {
{'a', 1}, {'e', 1}, {'i', 1}, {'o', 1}, {'u', 1}, {'l', 1}, {'n', 1}, {'r', 1}, {'s', 1}, {'t', 1},
{'d', 2}, {'g', 2},
{'b', 3}, {'c', 3}, {'m', 3}, {'p', 3},
{'f', 4}, {'h', 4}, {'v', 4}, {'w', 4}, {'y', 4},
{'k', 5},
{'j', 8}, {'x', 8},
{'q', 10}, {'z', 10}
};
int score(string word) {
int s = 0;
for (char c: word)
s += table[tolower(c)];
return s;
}
}
|
yijunyu/demo-fast | datasets/github_cpp_10/10/100.cpp |
int levenshteinDistance(const std::string& s, const std::string& t) {
std::vector<std::vector<int>> d(s.size() + 1, std::vector<int>(t.size() + 1, 0));
for (int i = 0; i <= s.size(); ++i) d[i][0] = i;
for (int j = 0; j <= t.size(); ++j) d[0][j] = j;
for (int i = 1; i <= s.size(); ++i) {
for (int j = 1; j <= t.size(); ++j) {
int cost = (s[i - 1] == t[j - 1]) ? 0 : 1;
d[i][j] = std::min({ d[i - 1][j - 0] + 1,
d[i - 0][j - 1] + 1,
d[i - 1][j - 1] + cost });
}
}
return d[s.size()][t.size()];
} |
nagineni/chromium-crosswalk | content/renderer/scoped_clipboard_writer_glue.h | <filename>content/renderer/scoped_clipboard_writer_glue.h
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_RENDERER_SCOPED_CLIPBOARD_WRITER_GLUE_H_
#define CONTENT_RENDERER_SCOPED_CLIPBOARD_WRITER_GLUE_H_
#include "ui/base/clipboard/scoped_clipboard_writer.h"
#include "base/memory/scoped_ptr.h"
#include "content/renderer/clipboard_client.h"
namespace content {
class ScopedClipboardWriterGlue
: public ui::ScopedClipboardWriter {
public:
explicit ScopedClipboardWriterGlue(ClipboardClient* client);
virtual ~ScopedClipboardWriterGlue();
void WriteBitmapFromPixels(const void* pixels, const gfx::Size& size);
private:
scoped_ptr<ClipboardClient::WriteContext> context_;
DISALLOW_COPY_AND_ASSIGN(ScopedClipboardWriterGlue);
};
} // namespace content
#endif // CONTENT_RENDERER_SCOPED_CLIPBOARD_WRITER_GLUE_H_
|
dvan44/web-homework | webapp/src/components/num/Num.js | <filename>webapp/src/components/num/Num.js
import React from 'react'
import { number } from 'prop-types'
import { numberToRoman } from '../../utilities/roman-numeral-converter'
export function Num ({ number }) {
const showAsNumeral = JSON.parse(window.sessionStorage.getItem('showAsNumeral'))
let valueToDisplay = number
if (showAsNumeral) {
valueToDisplay = numberToRoman(number)
}
return (
<span>{valueToDisplay}</span>
)
}
Num.propTypes = {
number: number
}
|
scintiller/OnlineJudge | problem/migrations/0020_auto_20190618_0145.py | # Generated by Django 2.1.7 on 2019-06-18 01:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('problem', '0019_comment'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='reply_to',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='problem.Comment'),
),
]
|
junglegodlion/design_pattern | src/main/java/com/geely/design/principle/singleresponsibility/AppTest2.java | <filename>src/main/java/com/geely/design/principle/singleresponsibility/AppTest2.java<gh_stars>0
package com.geely.design.principle.singleresponsibility;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
public class AppTest2 {
public static void main(String[] args) throws IOException {
//统计一个文本文件中,有多少个单词。
Reader in = new FileReader("E:\\infos.txt");
BufferedReader br = new BufferedReader(in);
String line = null;
StringBuffer sb = new StringBuffer("");
while ((line = br.readLine()) != null) {
sb.append(line);
sb.append(" ");
}
// 关闭,只关上层流
br.close();
// 以不是英文字母的作为分割符
String[] words = sb.toString().split("[^a-zA-Z]+");
System.out.println(words.length);
}
}
|
denisbohm/firefly-ice-firmware | src/fd_hal_rtc.h | <filename>src/fd_hal_rtc.h<gh_stars>1-10
#ifndef FD_HAL_RTC_H
#define FD_HAL_RTC_H
#include "fd_gpio.h"
#include "fd_rtc.h"
#include "fd_time.h"
#include <stdint.h>
fd_time_t fd_hal_rtc_get_time_retained(void);
void fd_hal_rtc_initialize(void);
void fd_hal_rtc_enable_pin_input(const fd_rtc_t *rtc, fd_gpio_t gpio);
void fd_hal_rtc_disable_pin_input(const fd_rtc_t *rtc, fd_gpio_t gpio);
void fd_hal_rtc_enable_pin_output(const fd_rtc_t *rtc, fd_gpio_t gpio);
void fd_hal_rtc_disable_pin_output(const fd_rtc_t *rtc, fd_gpio_t gpio);
void fd_hal_rtc_sleep(void);
void fd_hal_rtc_wake(void);
void fd_hal_rtc_set_utc_offset(int32_t utc_offset);
int32_t fd_hal_rtc_get_utc_offset(void);
void fd_hal_rtc_set_time(fd_time_t time);
uint32_t fd_hal_rtc_get_seconds(void);
fd_time_t fd_hal_rtc_get_time(void);
fd_time_t fd_hal_rtc_get_accurate_time(void);
void fd_hal_rtc_set_countdown(uint32_t countdown);
uint32_t fd_hal_rtc_get_countdown(void);
uint32_t fd_hal_rtc_get_tick(void);
#endif
|
mikini/c19 | content/data.js | <gh_stars>10-100
export default {
'da-DK': {
banner: 'Detaljerede data fra selvrapporteringen vil blive tilgængelige, så snart vi har indsamlet tilstrækkeligt',
},
'en-UK': {
banner: 'Detailed data collected from self reporting will be accessible when we have enough data',
},
};
|
cbworden/shakemap | shakemap/coremods/coverage.py | <filename>shakemap/coremods/coverage.py
# stdlib imports
import os.path
import json
# third party imports
import numpy as np
from scipy.ndimage import gaussian_filter
from impactutils.io.smcontainers import ShakeMapOutputContainer
from openquake.hazardlib import imt
# local imports
from .base import CoreModule, Contents
from shakemap.utils.config import get_config_paths
from shakelib.utils.imt_string import oq_to_file
# Not really relevant, but seemingly necessary
COMPONENT = "GREATER_OF_TWO_HORIZONTAL"
class CoverageModule(CoreModule):
"""
coverage -- Create JSON coverage(s) of the ground motion layers.
"""
command_name = "coverage"
targets = [
r"products/coverage_h\.json",
r"products/coverage_m\.json",
r"products/coverage_l\.json",
]
dependencies = [("products/shake_result.hdf", True)]
def __init__(self, eventid):
super(CoverageModule, self).__init__(eventid)
self.contents = Contents("JSON Coverages", "coverages", eventid)
def execute(self):
"""Create high, medium, and low resolution coverage of the mapped
parameters.
Raises:
NotADirectoryError: When the event data directory does not exist.
FileNotFoundError: When the the shake_result HDF file does not
exist.
"""
install_path, data_path = get_config_paths()
datadir = os.path.join(data_path, self._eventid, "current", "products")
if not os.path.isdir(datadir):
raise NotADirectoryError(f"{datadir} is not a valid directory.")
datafile = os.path.join(datadir, "shake_result.hdf")
if not os.path.isfile(datafile):
raise FileNotFoundError(f"{datafile} does not exist.")
# Open the ShakeMapOutputContainer and extract the data
container = ShakeMapOutputContainer.load(datafile)
# get all of the grid layers and the geodict
if container.getDataType() != "grid":
raise NotImplementedError(
"coverage module can only function on "
"gridded data, not sets of points"
)
imtlist = container.getIMTs()
for imtype in imtlist:
component, imtype = imtype.split("/")
fileimt = oq_to_file(imtype)
oqimt = imt.from_string(imtype)
imtdict = container.getIMTGrids(imtype, component)
grid_data = imtdict["mean"]
metadata = imtdict["mean_metadata"]
if imtype == "MMI":
description = ("Modified Mercalli Intensity",)
property_id = (
"https://earthquake.usgs.gov/learn/topics/mercalli.php",
) # noqa
decimals = 1
elif imtype == "PGA":
description = ("Peak Ground Acceleration",)
units = 'natural logarithm of "g"'
symbol = "ln(g)"
decimals = 2
elif imtype == "PGV":
description = ("Peak Ground Velocity",)
units = "natural logarithm of centimeters per second"
symbol = "ln(cm/s)"
decimals = 2
elif imtype.startswith("SA"):
description = (str(oqimt.period) + "-second Spectral Acceleration",)
units = 'natural logarithm of "g"'
symbol = "ln(g)"
decimals = 2
else:
raise TypeError("Unknown IMT in coverage module")
for i in range(3):
if i == 0:
resolution = "high"
fgrid = grid_data
decimation = 1
elif i == 1:
resolution = "medium"
fgrid = gaussian_filter(grid_data, sigma=1)
decimation = 2
elif i == 2:
resolution = "low"
fgrid = gaussian_filter(grid_data, sigma=2)
decimation = 4
rgrid = fgrid[::decimation, ::decimation]
ny, nx = rgrid.shape
rnd_grd = np.flipud(np.around(rgrid, decimals=decimals)).flatten()
if imtype == "MMI":
rnd_grd = np.clip(rnd_grd, 1.0, 10.0)
xstart = metadata["xmin"]
xstop = metadata["xmin"] + (nx - 1) * decimation * metadata["dx"]
ystart = metadata["ymin"]
ystop = metadata["ymin"] + (ny - 1) * decimation * metadata["dy"]
coverage = {
"type": "Coverage",
"domain": {
"type": "Domain",
"domainType": "Grid",
"axes": {
"x": {"start": xstart, "stop": xstop, "num": nx},
"y": {"start": ystart, "stop": ystop, "num": ny},
},
"referencing": [
{
"coordinates": ["x", "y"],
"system": {
"type": "GeographicCRS",
"id": "http://www.opengis.net/def/crs/OGC/1.3/CRS84", # noqa
},
}
],
},
"parameters": {
imtype: {
"type": "Parameter",
"description": {"en": description},
"observedProperty": {
"id": property_id,
"label": {"en": imtype},
},
}
},
"ranges": {
imtype: {
"type": "NdArray",
"dataType": "float",
"axisNames": ["y", "x"],
"shape": [ny, nx],
"values": rnd_grd.tolist(),
}
},
}
if imtype == "MMI":
coverage["parameters"]["MMI"]["preferredPalette"] = {
"colors": [
"rgb(255, 255, 255)",
"rgb(255, 255, 255)",
"rgb(191, 204, 255)",
"rgb(160, 230, 255)",
"rgb(128, 255, 255)",
"rgb(122, 255, 147)",
"rgb(255, 255, 0)",
"rgb(255, 200, 0)",
"rgb(255, 145, 0)",
"rgb(255, 0, 0)",
"rgb(200, 0, 0)",
],
"extent": [0, 10],
"interpolation": "linear",
}
else:
coverage["parameters"][imtype]["unit"] = {
"label": {"en": units},
"symbol": {
"value": symbol,
"type": "http://www.opengis.net/def/uom/UCUM/",
},
}
if component == "GREATER_OF_TWO_HORIZONTAL":
fname = f"coverage_{fileimt}_{resolution}_res.covjson"
else:
fname = f"coverage_{fileimt}_{resolution}_{component}_res.covjson"
filepath = os.path.join(datadir, fname)
with open(filepath, "w") as outfile:
json.dump(coverage, outfile, separators=(",", ":"))
self.contents.addFile(
imtype + "_" + resolution + "_res_coverage",
resolution + "-res " + imtype.upper() + " Coverage",
"Coverage of " + resolution + " resolution " + imtype,
fname,
"application/json",
)
container.close()
|
whuang022nccu/IndriLab | include/indri/Collection.hpp | /*==========================================================================
* Copyright (c) 2003-2004 University of Massachusetts. All Rights Reserved.
*
* Use of the Lemur Toolkit for Language Modeling and Information Retrieval
* is subject to the terms of the software license set forth in the LICENSE
* file included with this software, and also available at
* http://www.lemurproject.org/license.html
*
*==========================================================================
*/
//
// Collection
//
// 11 May 2004 -- tds
//
#ifndef INDRI_COLLECTION_HPP
#define INDRI_COLLECTION_HPP
#include "indri/ObjectHandler.hpp"
#include "indri/ParsedDocument.hpp"
#include "lemur/IndexTypes.hpp"
namespace indri
{
namespace collection
{
class Collection {
public:
virtual ~Collection() {};
virtual void addDocument( lemur::api::DOCID_T documentID, indri::api::ParsedDocument* document ) = 0;
virtual indri::api::ParsedDocument* retrieve( lemur::api::DOCID_T documentID ) = 0;
};
}
}
#endif // INDRI_COLLECTION_HPP
|
DDMGNI/viVlasov1D | vlasov/solvers/preconditioner/setup.py | #!/usr/bin/env python
#$ python setup.py build_ext --inplace
from vlasov.setup_inc import *
ext_modules = [
Extension("TensorProduct",
sources=["TensorProduct.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
),
# Extension("TensorProductDiagonal",
# sources=["TensorProductDiagonal.pyx"],
# include_dirs=INCLUDE_DIRS,
# libraries=LIBRARIES,
# library_dirs=LIBRARY_DIRS,
# runtime_library_dirs=LIBRARY_DIRS,
# extra_compile_args=CARGS,
# extra_link_args=LARGS
# ),
Extension("TensorProductKinetic",
sources=["TensorProductKinetic.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
),
Extension("TensorProductKineticFast",
sources=["TensorProductKineticFast.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
),
Extension("TensorProductKineticSciPy",
sources=["TensorProductKineticSciPy.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
),
Extension("TensorProductPotential",
sources=["TensorProductPotential.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
),
Extension("TensorProductPotentialFast",
sources=["TensorProductPotentialFast.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
),
Extension("TensorProductPotentialSciPy",
sources=["TensorProductPotentialSciPy.pyx"],
include_dirs=INCLUDE_DIRS,
libraries=LIBRARIES,
library_dirs=LIBRARY_DIRS,
runtime_library_dirs=LIBRARY_DIRS,
extra_compile_args=CARGS,
extra_link_args=LARGS
)
]
setup(
name = 'PETSc Variational Vlasov-Poisson Solver',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules
)
|
joezqren/google-cloud-cpp | google/cloud/accesscontextmanager/internal/access_context_manager_metadata_decorator.cc | // Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated by the Codegen C++ plugin.
// If you make any local changes, they will be lost.
// source: google/identity/accesscontextmanager/v1/access_context_manager.proto
#include "google/cloud/accesscontextmanager/internal/access_context_manager_metadata_decorator.h"
#include "google/cloud/internal/api_client_header.h"
#include "google/cloud/status_or.h"
#include <google/identity/accesscontextmanager/v1/access_context_manager.grpc.pb.h>
#include <memory>
namespace google {
namespace cloud {
namespace accesscontextmanager_internal {
GOOGLE_CLOUD_CPP_INLINE_NAMESPACE_BEGIN
AccessContextManagerMetadata::AccessContextManagerMetadata(
std::shared_ptr<AccessContextManagerStub> child)
: child_(std::move(child)),
api_client_header_(
google::cloud::internal::ApiClientHeader("generator")) {}
StatusOr<google::identity::accesscontextmanager::v1::ListAccessPoliciesResponse>
AccessContextManagerMetadata::ListAccessPolicies(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::ListAccessPoliciesRequest const&
request) {
SetMetadata(context, {});
return child_->ListAccessPolicies(context, request);
}
StatusOr<google::identity::accesscontextmanager::v1::AccessPolicy>
AccessContextManagerMetadata::GetAccessPolicy(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::GetAccessPolicyRequest const&
request) {
SetMetadata(context, "name=" + request.name());
return child_->GetAccessPolicy(context, request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncCreateAccessPolicy(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::AccessPolicy const& request) {
SetMetadata(*context, {});
return child_->AsyncCreateAccessPolicy(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncUpdateAccessPolicy(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::UpdateAccessPolicyRequest const&
request) {
SetMetadata(*context, "policy.name=" + request.policy().name());
return child_->AsyncUpdateAccessPolicy(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncDeleteAccessPolicy(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::DeleteAccessPolicyRequest const&
request) {
SetMetadata(*context, "name=" + request.name());
return child_->AsyncDeleteAccessPolicy(cq, std::move(context), request);
}
StatusOr<google::identity::accesscontextmanager::v1::ListAccessLevelsResponse>
AccessContextManagerMetadata::ListAccessLevels(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::ListAccessLevelsRequest const&
request) {
SetMetadata(context, "parent=" + request.parent());
return child_->ListAccessLevels(context, request);
}
StatusOr<google::identity::accesscontextmanager::v1::AccessLevel>
AccessContextManagerMetadata::GetAccessLevel(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::GetAccessLevelRequest const&
request) {
SetMetadata(context, "name=" + request.name());
return child_->GetAccessLevel(context, request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncCreateAccessLevel(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::CreateAccessLevelRequest const&
request) {
SetMetadata(*context, "parent=" + request.parent());
return child_->AsyncCreateAccessLevel(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncUpdateAccessLevel(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::UpdateAccessLevelRequest const&
request) {
SetMetadata(*context, "access_level.name=" + request.access_level().name());
return child_->AsyncUpdateAccessLevel(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncDeleteAccessLevel(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::DeleteAccessLevelRequest const&
request) {
SetMetadata(*context, "name=" + request.name());
return child_->AsyncDeleteAccessLevel(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncReplaceAccessLevels(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
ReplaceAccessLevelsRequest const& request) {
SetMetadata(*context, "parent=" + request.parent());
return child_->AsyncReplaceAccessLevels(cq, std::move(context), request);
}
StatusOr<
google::identity::accesscontextmanager::v1::ListServicePerimetersResponse>
AccessContextManagerMetadata::ListServicePerimeters(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::
ListServicePerimetersRequest const& request) {
SetMetadata(context, "parent=" + request.parent());
return child_->ListServicePerimeters(context, request);
}
StatusOr<google::identity::accesscontextmanager::v1::ServicePerimeter>
AccessContextManagerMetadata::GetServicePerimeter(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::
GetServicePerimeterRequest const& request) {
SetMetadata(context, "name=" + request.name());
return child_->GetServicePerimeter(context, request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncCreateServicePerimeter(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
CreateServicePerimeterRequest const& request) {
SetMetadata(*context, "parent=" + request.parent());
return child_->AsyncCreateServicePerimeter(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncUpdateServicePerimeter(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
UpdateServicePerimeterRequest const& request) {
SetMetadata(*context,
"service_perimeter.name=" + request.service_perimeter().name());
return child_->AsyncUpdateServicePerimeter(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncDeleteServicePerimeter(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
DeleteServicePerimeterRequest const& request) {
SetMetadata(*context, "name=" + request.name());
return child_->AsyncDeleteServicePerimeter(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncReplaceServicePerimeters(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
ReplaceServicePerimetersRequest const& request) {
SetMetadata(*context, "parent=" + request.parent());
return child_->AsyncReplaceServicePerimeters(cq, std::move(context), request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncCommitServicePerimeters(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
CommitServicePerimetersRequest const& request) {
SetMetadata(*context, "parent=" + request.parent());
return child_->AsyncCommitServicePerimeters(cq, std::move(context), request);
}
StatusOr<google::identity::accesscontextmanager::v1::
ListGcpUserAccessBindingsResponse>
AccessContextManagerMetadata::ListGcpUserAccessBindings(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::
ListGcpUserAccessBindingsRequest const& request) {
SetMetadata(context, "parent=" + request.parent());
return child_->ListGcpUserAccessBindings(context, request);
}
StatusOr<google::identity::accesscontextmanager::v1::GcpUserAccessBinding>
AccessContextManagerMetadata::GetGcpUserAccessBinding(
grpc::ClientContext& context,
google::identity::accesscontextmanager::v1::
GetGcpUserAccessBindingRequest const& request) {
SetMetadata(context, "name=" + request.name());
return child_->GetGcpUserAccessBinding(context, request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncCreateGcpUserAccessBinding(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
CreateGcpUserAccessBindingRequest const& request) {
SetMetadata(*context, "parent=" + request.parent());
return child_->AsyncCreateGcpUserAccessBinding(cq, std::move(context),
request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncUpdateGcpUserAccessBinding(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
UpdateGcpUserAccessBindingRequest const& request) {
SetMetadata(*context, "gcp_user_access_binding.name=" +
request.gcp_user_access_binding().name());
return child_->AsyncUpdateGcpUserAccessBinding(cq, std::move(context),
request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncDeleteGcpUserAccessBinding(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::identity::accesscontextmanager::v1::
DeleteGcpUserAccessBindingRequest const& request) {
SetMetadata(*context, "name=" + request.name());
return child_->AsyncDeleteGcpUserAccessBinding(cq, std::move(context),
request);
}
future<StatusOr<google::longrunning::Operation>>
AccessContextManagerMetadata::AsyncGetOperation(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::longrunning::GetOperationRequest const& request) {
SetMetadata(*context, "name=" + request.name());
return child_->AsyncGetOperation(cq, std::move(context), request);
}
future<Status> AccessContextManagerMetadata::AsyncCancelOperation(
google::cloud::CompletionQueue& cq,
std::unique_ptr<grpc::ClientContext> context,
google::longrunning::CancelOperationRequest const& request) {
SetMetadata(*context, "name=" + request.name());
return child_->AsyncCancelOperation(cq, std::move(context), request);
}
void AccessContextManagerMetadata::SetMetadata(
grpc::ClientContext& context, std::string const& request_params) {
context.AddMetadata("x-goog-request-params", request_params);
context.AddMetadata("x-goog-api-client", api_client_header_);
}
GOOGLE_CLOUD_CPP_INLINE_NAMESPACE_END
} // namespace accesscontextmanager_internal
} // namespace cloud
} // namespace google
|
jyi/ITSP | dataset/Lab-11/3299/305632_correct.c | /*numPass=7, numTotal=7
Verdict:ACCEPTED, Visibility:1, Input:"5
1 2 3 4 5", ExpOutput:"1 2 3 4 5
", Output:"1 2 3 4 5 "
Verdict:ACCEPTED, Visibility:1, Input:"5
5 4 3 2 1", ExpOutput:"1 2 3 4 5
", Output:"1 2 3 4 5 "
Verdict:ACCEPTED, Visibility:1, Input:"4
1 3 2 1", ExpOutput:"1 1 2 3
", Output:"1 1 2 3 "
Verdict:ACCEPTED, Visibility:0, Input:"100
49 28 45 49 61 14 82 12 44 52 43 61 9 74 89 44 47 25 28 80 23 95 17 69 49 54 38 77 10 47 14 65 21 48 84 16 40 66 40 16 84 88 76 10 22 27 100 41 71 77 57 52 83 97 54 42 30 96 56 65 95 39 44 35 59 48 11 45 62 94 12 51 58 17 87 10 46 59 78 21 74 84 50 40 87 50 68 55 53 1 48 30 37 16 42 10 47 40 32 40", ExpOutput:"1 9 10 10 10 10 11 12 12 14 14 16 16 16 17 17 21 21 22 23 25 27 28 28 30 30 32 35 37 38 39 40 40 40 40 40 41 42 42 43 44 44 44 45 45 46 47 47 47 48 48 48 49 49 49 50 50 51 52 52 53 54 54 55 56 57 58 59 59 61 61 62 65 65 66 68 69 71 74 74 76 77 77 78 80 82 83 84 84 84 87 87 88 89 94 95 95 96 97 100
", Output:"1 9 10 10 10 10 11 12 12 14 14 16 16 16 17 17 21 21 22 23 25 27 28 28 30 30 32 35 37 38 39 40 40 40 40 40 41 42 42 43 44 44 44 45 45 46 47 47 47 48 48 48 49 49 49 50 50 51 52 52 53 54 54 55 56 57 58 59 59 61 61 62 65 65 66 68 69 71 74 74 76 77 77 78 80 82 83 84 84 84 87 87 88 89 94 95 95 96 97 100 "
Verdict:ACCEPTED, Visibility:0, Input:"100
57 67 48 54 70 64 47 23 33 67 21 68 13 51 96 94 92 100 12 42 11 32 51 61 24 100 26 23 6 93 34 26 42 49 39 53 72 79 42 69 19 55 63 48 91 52 99 30 73 99 48 99 53 95 58 3 3 69 56 56 49 34 1 96 32 6 16 11 13 83 31 43 20 100 33 39 65 89 17 15 18 62 12 56 42 48 61 99 54 5 97 24 100 38 58 96 45 29 36 12", ExpOutput:"1 3 3 5 6 6 11 11 12 12 12 13 13 15 16 17 18 19 20 21 23 23 24 24 26 26 29 30 31 32 32 33 33 34 34 36 38 39 39 42 42 42 42 43 45 47 48 48 48 48 49 49 51 51 52 53 53 54 54 55 56 56 56 57 58 58 61 61 62 63 64 65 67 67 68 69 69 70 72 73 79 83 89 91 92 93 94 95 96 96 96 97 99 99 99 99 100 100 100 100
", Output:"1 3 3 5 6 6 11 11 12 12 12 13 13 15 16 17 18 19 20 21 23 23 24 24 26 26 29 30 31 32 32 33 33 34 34 36 38 39 39 42 42 42 42 43 45 47 48 48 48 48 49 49 51 51 52 53 53 54 54 55 56 56 56 57 58 58 61 61 62 63 64 65 67 67 68 69 69 70 72 73 79 83 89 91 92 93 94 95 96 96 96 97 99 99 99 99 100 100 100 100 "
Verdict:ACCEPTED, Visibility:0, Input:"1
42", ExpOutput:"42
", Output:"42 "
Verdict:ACCEPTED, Visibility:0, Input:"0", ExpOutput:"
", Output:""
*/
#include<stdio.h>
int len(int a[]){
int i=0;
while(a[i]!='\0'){
i++;
}
return i;
}
void swap(int a[],int i, int j){
int t;
t=a[i];
a[i]=a[j];
a[j]=t;
}
int partition(int a[], int n){
int pivot=a[0],l=0,r=n-1;
while((l<=n-1)&&(r>=0)){
while(a[l]<=pivot&&l<n){
l++;
}
while(a[r]>=pivot&&r>=0){
r--;
}
if(l<r){
swap(a,l,r);
l++;
r--;
}
else{
swap(a,l-1,0);
return l-1;
}
}
}
void quicksort(int a[],int n){
int pindex;
if(n<=1){
return;
}
else{
pindex=partition(a,n);
quicksort(a,pindex);
quicksort(a+pindex+1,n-pindex-1);
}
}
int main(){
int *n,*a,*i;
//dynamic allocation
i=(int*)malloc(sizeof(int));
n=(int*)malloc(sizeof(int));
scanf("%d",n);
a=(int*)malloc((*n)*sizeof(int));
for((*i)=0;(*i)<(*n);++(*i)){
scanf("%d",&a[*i]);
}
//dynamic allocation
quicksort(a,(*n));
for((*i)=0;(*i)<(*n);++(*i)){
printf("%d ",a[*i]);
}
free(a);
free(n);
free(i);
} |
PigThinkingTec/Application | src/main/java/com/pigthinkingtec/framework/dbaccess/DBNullObject.java | package com.pigthinkingtec.framework.dbaccess;
/**
* NULLをあらわすオブジェクトクラス
*
* @author <EMAIL>
* @version $Revision: 1.1 $ $Date: 2009/11/13 06:58:21 $
* @history
*
*
*/
public class DBNullObject {
//SQL型
private int sqlType = 0;
/**
* コンストラクタ
*
* @param sqlType
*/
public DBNullObject(int sqlType) {
this.sqlType = sqlType;
}
/**
* SQL型を取得する。
*
* @return int
*/
public int getSqlType() {
return sqlType;
}
}
|
tallycheck/data-support | meta-info/src/main/java/com/taoswork/tallycheck/info/descriptor/field/base/IBasicFieldInfoRW.java | package com.taoswork.tallycheck.info.descriptor.field.base;
import com.taoswork.tallycheck.info.descriptor.field.IBasicFieldInfo;
import com.taoswork.tallycheck.info.descriptor.field.IFieldInfo;
/**
* Created by <NAME> on 2015/10/24.
*/
public interface IBasicFieldInfoRW extends IBasicFieldInfo {
IFieldInfo setSupportSort(boolean supportSort);
IFieldInfo setSupportFilter(boolean supportFilter);
}
|
qsyttkx/geek_time_cpp | 30/boost_coroutine2/fibonacci_coroutine2.cpp | #include <iostream> // std::cout/endl
#include <stdint.h> // uint64_t
#include <boost/coroutine2/all.hpp> // boost::coroutines2
typedef boost::coroutines2::coroutine<const uint64_t> coro_t;
void fibonacci(coro_t::push_type& yield)
{
uint64_t a = 0;
uint64_t b = 1;
while (true) {
yield(b);
auto tmp = a;
a = b;
b += tmp;
}
}
int main()
{
for (auto i : coro_t::pull_type(
boost::coroutines2::fixedsize_stack(),
fibonacci)) {
if (i >= 10000) {
break;
}
std::cout << i << std::endl;
}
}
|
pmesnier/openssl | engines/e_loader_attic_err.c | <gh_stars>1000+
/*
* Generated by util/mkerr.pl DO NOT EDIT
* Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/
#include <openssl/err.h>
#include "e_loader_attic_err.h"
#ifndef OPENSSL_NO_ERR
static ERR_STRING_DATA ATTIC_str_reasons[] = {
{ERR_PACK(0, 0, ATTIC_R_AMBIGUOUS_CONTENT_TYPE), "ambiguous content type"},
{ERR_PACK(0, 0, ATTIC_R_BAD_PASSWORD_READ), "bad password read"},
{ERR_PACK(0, 0, ATTIC_R_ERROR_VERIFYING_PKCS12_MAC),
"error verifying pkcs12 mac"},
{ERR_PACK(0, 0, ATTIC_R_INIT_FAILED), "init failed"},
{ERR_PACK(0, 0, ATTIC_R_PASSPHRASE_CALLBACK_ERROR),
"passphrase callback error"},
{ERR_PACK(0, 0, ATTIC_R_PATH_MUST_BE_ABSOLUTE), "path must be absolute"},
{ERR_PACK(0, 0, ATTIC_R_SEARCH_ONLY_SUPPORTED_FOR_DIRECTORIES),
"search only supported for directories"},
{ERR_PACK(0, 0, ATTIC_R_UI_PROCESS_INTERRUPTED_OR_CANCELLED),
"ui process interrupted or cancelled"},
{ERR_PACK(0, 0, ATTIC_R_UNSUPPORTED_CONTENT_TYPE),
"unsupported content type"},
{ERR_PACK(0, 0, ATTIC_R_UNSUPPORTED_SEARCH_TYPE),
"unsupported search type"},
{ERR_PACK(0, 0, ATTIC_R_URI_AUTHORITY_UNSUPPORTED),
"uri authority unsupported"},
{0, NULL}
};
#endif
static int lib_code = 0;
static int error_loaded = 0;
static int ERR_load_ATTIC_strings(void)
{
if (lib_code == 0)
lib_code = ERR_get_next_error_library();
if (!error_loaded) {
#ifndef OPENSSL_NO_ERR
ERR_load_strings(lib_code, ATTIC_str_reasons);
#endif
error_loaded = 1;
}
return 1;
}
static void ERR_unload_ATTIC_strings(void)
{
if (error_loaded) {
#ifndef OPENSSL_NO_ERR
ERR_unload_strings(lib_code, ATTIC_str_reasons);
#endif
error_loaded = 0;
}
}
static void ERR_ATTIC_error(int function, int reason, const char *file, int line)
{
if (lib_code == 0)
lib_code = ERR_get_next_error_library();
ERR_raise(lib_code, reason);
ERR_set_debug(file, line, NULL);
}
|
lalit-satapathy/beats | x-pack/winlogbeat/module/powershell/test/powershell_windows_test.go | // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.
package test
import (
"strings"
"testing"
"github.com/elastic/beats/v7/x-pack/winlogbeat/module"
"github.com/elastic/go-sysinfo/providers/windows"
// Register required processors.
_ "github.com/elastic/beats/v7/libbeat/cmd/instance"
_ "github.com/elastic/beats/v7/libbeat/processors/timestamp"
)
// Ignore these fields because they can be different on different versions
// of windows.
var ignoreFields = []string{
"message",
}
func TestPowerShell(t *testing.T) {
// FIXME: We do not get opcode strings in the XML on Windows 2022, so ignore that
// field there. Only apply this to that platform to avoid regressions elsewhere.
// This means that golden values should be generated on a non-2022 version of
// Windows to ensure that this field is properly rendered. This is checked in
// the module.TestPipeline function.
//
// See https://github.com/elastic/beats/issues/31490 for tracking issue.
os, err := windows.OperatingSystem()
if err != nil {
t.Fatalf("failed to get operating system info: %v", err)
}
t.Logf("running tests on %s", os.Name)
if strings.Contains(os.Name, "2022") {
ignoreFields = append(ignoreFields, "winlog.opcode")
t.Log("ignoring winlog.opcode")
}
module.TestPipeline(t, "testdata/*.evtx", module.WithFieldFilter(ignoreFields))
}
|
imgui-works/MoravaEngine_opengl_vulkan_2d_3d_game_engine | MoravaEngine/src/Framebuffer/FramebufferTextureCubemap.h | #pragma once
#include "Framebuffer/FramebufferTexture.h"
#include "Texture/MoravaTexture.h"
#include <string>
class FramebufferTextureCubemap : public FramebufferTexture
{
public:
FramebufferTextureCubemap();
FramebufferTextureCubemap(unsigned int width, unsigned int height, bool isMultisample,
AttachmentFormat attachmentFormat, unsigned int orderID);
FramebufferTextureCubemap(MoravaTexture::Specification spec, unsigned int orderID);
virtual ~FramebufferTextureCubemap() override;
virtual void InitSpecification() override;
virtual void OpenGLCreate() override;
virtual void Bind(unsigned int slot = 0) const override;
virtual void Unbind() override;
};
|
lucky401/vue_template | src/utils/mutator.js | /**
* Mutate your state with this way:
* commit('MUTATION_NAME', {accessor: 'a.b.c', value: 'cValue'})
* Interpolated as:
* state.a.b.c = 'cvalue'
*
* @param {String} state Current state of vuex.
* @param {Object} payload Mutation payload. eg: {accessor: 'a.b.c.d', value: 'dValue'}
*
* @return {void}
*/
export default function (state, payload) {
const { accessor, value } = payload;
const accessors = accessor.split('.');
const field = accessors.pop();
accessors.reduce((object, index) => object[index], state)[field] = value;
}
|
niraveen/kafka-connect-file-pulse | connect-file-pulse-api/src/main/java/io/streamthoughts/kafka/connect/filepulse/data/internal/TypeConverter.java | /*
* Copyright 2019-2020 StreamThoughts.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.streamthoughts.kafka.connect.filepulse.data.internal;
import io.streamthoughts.kafka.connect.filepulse.data.DataException;
import java.io.Serializable;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Objects;
import java.util.Optional;
import java.util.regex.Pattern;
/**
* Class which can be used to convert an object to a specific type.
*/
public class TypeConverter implements Serializable {
private static final String BOOLEAN_TRUE = "true";
private static final String BOOLEAN_FALSE = "false";
private static final String MIN_LONG_STR_NO_SIGN = String.valueOf(Long.MIN_VALUE).substring(1);
private static final String MAX_LONG_STR = String.valueOf(Long.MAX_VALUE);
public static Collection getArray(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (Collection.class.isAssignableFrom(value.getClass())) {
return (Collection<?>) value;
} else if (value instanceof Object[]) {
return Arrays.asList((Object[]) value);
}
throw new DataException(
String.format("'%s' is not assignable to Collection: \"%s\"", value.getClass(), value)
);
}
public static Boolean getBool(final Object value) throws IllegalArgumentException {
Boolean result = null;
if (value == null) {
result = false;
}
if (value instanceof String) {
String s = (String) value;
if (s.length() == 1 && Character.isDigit(s.charAt(0))) {
int digit = (int) s.charAt(0);
result = digit > 0;
} else {
result = s.equalsIgnoreCase(BOOLEAN_TRUE) ||
s.equalsIgnoreCase("yes") ||
s.equalsIgnoreCase("y");
}
}
if (value instanceof Boolean) {
result = (Boolean) value;
}
if (result == null) {
throw new DataException(String.format("Cannot parse boolean content from \"%s\"", value));
}
return result;
}
public static Short getShort(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (value instanceof String && isIntegerNumber((String) value)) {
return new BigDecimal(value.toString()).shortValue();
}
if (value instanceof Number) {
Number number = (Number) value;
return number.shortValue();
}
throw new DataException(String.format("Cannot parse 32-bits int content from \"%s\"", value));
}
public static Integer getInt(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (value instanceof String && isIntegerNumber((String) value)) {
return new BigDecimal(value.toString()).intValue();
}
if (value instanceof Number) {
Number number = (Number) value;
return number.intValue();
}
throw new DataException(String.format("Cannot parse 32-bits int content from \"%s\"", value));
}
public static Long getLong(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (value instanceof String) {
final String trimmed = ((String) value).trim();
if (isIntegerNumber(trimmed)) {
return new BigDecimal(trimmed).longValue();
}
}
if (value instanceof Number) {
Number number = (Number) value;
return number.longValue();
}
throw new DataException(String.format("Cannot parse 64-bits long content from \"%s\"", value));
}
public static Float getFloat(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (value instanceof String) {
return getBigDecimal(value)
.map(BigDecimal::floatValue)
.orElseThrow(() ->
new DataException(String.format("Cannot parse 64-bits double content from \"%s\"", value))
);
}
if (value instanceof Number) {
Number number = (Number) value;
return number.floatValue();
}
throw new DataException(String.format("Cannot parse 32-bits float content from \"%s\"", value));
}
private static Optional<BigDecimal> getBigDecimal(final Object value) {
try {
return Optional.of(new BigDecimal(value.toString().replace(",", ".")));
} catch (NumberFormatException e) {
return Optional.empty();
}
}
public static Double getDouble(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (value instanceof String) {
return getBigDecimal(value)
.map(BigDecimal::doubleValue)
.orElseThrow(() ->
new DataException(String.format("Cannot parse 64-bits double content from \"%s\"", value))
);
}
if (value instanceof Number) {
Number number = (Number) value;
return number.doubleValue();
}
throw new DataException(String.format("Cannot parse 64-bits double content from \"%s\"", value));
}
public static Date getDate(final Object value) throws IllegalArgumentException {
Objects.requireNonNull(value, "value can't be null");
if (value instanceof Date) {
return (Date) value;
}
if (value instanceof Number) {
Number number = (Number) value;
return new Date(number.longValue());
}
if (value instanceof String && isIntegerNumber((String) value)) {
return new Date(Long.parseLong((String) value));
}
throw new DataException(String.format("Cannot parse Date content from \"%s\"", value));
}
public static String getString(final Object value) {
if (value instanceof ByteBuffer) {
return StandardCharsets.UTF_8.decode((ByteBuffer) value).toString();
}
return (value != null) ? value.toString() : null;
}
public static byte[] getBytes(final Object value) {
if (value instanceof ByteBuffer) {
return ((ByteBuffer) value).array();
}
if (value instanceof String) {
return ((String) value).getBytes(StandardCharsets.UTF_8);
}
if (value.getClass().isArray()) {
return (byte[]) value;
}
throw new DataException(String.format("Cannot parse byte[] from \"%s\"", value));
}
public static BigDecimal getDecimal(final Object value) {
Objects.requireNonNull(value, "value can't be null");
String result = null;
if (value instanceof Double) {
result = String.valueOf(value);
}
if (value instanceof Integer) {
result = String.valueOf(value);
}
if (value instanceof String) {
result = (String) value;
}
if (result == null) {
throw new DataException(
String.format("Cannot parse decimal content from \"%s\"", value));
}
if (result.trim().length() == 0) {
return null;
}
return getBigDecimal(value)
.orElseThrow(() ->
new DataException(String.format("Cannot parse decimal content from \"%s\"", value))
);
}
public static boolean isBooleanString(final String text) {
return BOOLEAN_TRUE.equalsIgnoreCase(text) || BOOLEAN_FALSE.equalsIgnoreCase(text);
}
public static boolean isIntegerNumber(final String text) {
if (text.isEmpty())
return false;
// skip leading negative sign, do NOT allow leading plus
char c = text.charAt(0);
final int start = c == '-' ? 1 : 0;
if (start == 1 && text.length() == 1)
return false;
for (int i = start; i < text.length(); i++) {
c = text.charAt(i);
if (Character.digit(c, 10) < 0) {
return false;
}
}
return true;
}
public static boolean isDoubleNumber(final String text) {
return text != null && DOUBLE_REGEX_MATCHER.matcher(text).matches();
}
public static boolean isInLongRange(String s) {
final boolean negative = s.charAt(0) == '-';
final String cmp = negative ? MIN_LONG_STR_NO_SIGN : MAX_LONG_STR;
if (negative) {
s = s.substring(1);
}
int cmpLen = cmp.length();
int alen = s.length();
if (alen < cmpLen) {
return true;
} else if (alen > cmpLen) {
return false;
} else {
for(int i = 0; i < cmpLen; ++i) {
int diff = s.charAt(i) - cmp.charAt(i);
if (diff != 0) {
return diff < 0;
}
}
return true;
}
}
/**
* The regexp suggested by the {@link Double#valueOf(String)}.
*/
private static final String Digits = "(\\p{Digit}+)";
private static final String HexDigits = "(\\p{XDigit}+)";
// an exponent is 'e' or 'E' followed by an optionally
// signed decimal integer.
private static final String Exp = "[eE][+-]?" + Digits;
private static final String fpRegex =
"[\\x00-\\x20]*" + // Optional leading "whitespace"
"[+-]?(" + // Optional sign character
"NaN|" + // "NaN" string
"Infinity|" + // "Infinity" string
// A decimal floating-point string representing a finite positive
// number without a leading sign has at most five basic pieces:
// Digits . Digits ExponentPart FloatTypeSuffix
//
// Since this method allows integer-only strings as input
// in addition to strings of floating-point literals, the
// two sub-patterns below are simplifications of the grammar
// productions from section 3.10.2 of
// The Java Language Specification.
// Digits ._opt Digits_opt ExponentPart_opt FloatTypeSuffix_opt
"(((" + Digits + "(\\.)?(" + Digits + "?)(" + Exp + ")?)|" +
// . Digits ExponentPart_opt FloatTypeSuffix_opt
"(\\.(" + Digits + ")(" + Exp + ")?)|" +
// Hexadecimal strings
"((" +
// 0[xX] HexDigits ._opt BinaryExponent FloatTypeSuffix_opt
"(0[xX]" + HexDigits + "(\\.)?)|" +
// 0[xX] HexDigits_opt . HexDigits BinaryExponent FloatTypeSuffix_opt
"(0[xX]" + HexDigits + "?(\\.)" + HexDigits + ")" +
")[pP][+-]?" + Digits + "))" +
"[fFdD]?))" +
"[\\x00-\\x20]*";// Optional trailing "whitespace"
private static final Pattern DOUBLE_REGEX_MATCHER = Pattern.compile(fpRegex);
} |
hoangdo94/nodebb-plugin-tuchanloan | lib/controllers/skills.js | "use strict";
var skills = require('../skills'),
skillsController = {};
skillsController.render = function(req, res, callback) {
skills.view(req.user.uid, function(err, data) {
if (err) {
return require.main.require('./src/controllers/helpers').redirect(res, '/rpg/character');
}
res.render('rpg/skills', data);
});
};
module.exports = skillsController; |
signaldust/dust-toolkit | dust/widgets/logview.h | #pragma once
namespace dust
{
// Simple read-only multi-line text control
struct LogView : Panel
{
Font _font;
unsigned tabStop = 8;
ARGB fgColor;
ARGB bgColor;
// called when clicking lines looking like <filename>:<num>:<num>:
// this is what at least clang errors look like
std::function<void(const char*,int,int)> onClickError
= [](const char*,int,int){};
LogView()
{
style.rule = LayoutStyle::FILL;
bgColor = theme.bgColor;
fgColor = theme.fgColor;
sizeX = 0;
sizeY = 0;
}
void clear()
{
buffer.clear();
stopScroll = false;
recalculateSize();
}
void append(const char * txt, unsigned n)
{
buffer.insert(buffer.end(), txt, txt + n);
recalculateSize();
}
// FIXME: make this use components like labels
Font & getFont()
{
if(_font.valid()) return _font;
// fall-back if no font can be found
Window * win = getWindow();
if(win)
{
// default to monospace even if we can handle proportional
_font.loadDefaultFont(7, win->getDPI(), true);
recalculateSize();
}
return _font;
}
void ev_dpi(float dpi)
{
Font & font = getFont();
if(font->parameters.dpi != dpi)
{
font.setDPI(dpi);
recalculateSize();
}
}
// FIXME: can we have TextBuffer implement utf8 iterator directly?
void recalculateSize()
{
Font & font = getFont();
if(!font.valid()) return;
int lines = 1, lineHeight = (int)ceil(font->getLineHeight());
float w = 0, x = 0;
// assume (for now) all digits are same size
// this is safe for most sensible fonts
float dw = font->getCharAdvanceW('0');
// use space width for tabStops
// only matters for proportional fonts
float sw = font->getCharAdvanceW(' ');
utf8::Decoder decoder;
for(auto byte : buffer)
{
// keep going until we have a full char
if(!decoder.next(byte)) continue;
auto ch = decoder.ch;
// check newlines
if(ch == '\n') { x = 0; ++lines; continue; }
// check for tabs
if(ch == '\t')
{
x += tabStop*sw - fmod(x, tabStop*sw);
continue;
}
x += font->getCharAdvanceW(decoder.ch);
if(w < x) w = x;
}
// handle trailing invalid unicode
if(decoder.state != utf8::ACCEPT)
{
x += font->getCharAdvanceW(decoder.ch);
if(w < x) w = x;
}
sizeX = (int) ceilf(w);
sizeY = lines * lineHeight;
reflow(); // do reflow first so we can hope to scroll
// reset to very bottom unless manually scrolled
if(!stopScroll) scrollToView(0, sizeY);
}
int ev_size_x(float dpi) { return sizeX; }
int ev_size_y(float dpi) { return sizeY; }
void ev_mouse_exit()
{
hoverLine = -1;
}
bool ev_mouse(const MouseEvent & e)
{
Font & font = getFont();
if(!font.valid()) return false;
// pass scroll to parent
if(e.type == MouseEvent::tScroll)
{
stopScroll = true;
return false;
}
if(e.type == MouseEvent::tMove && !e.button)
{
hoverLine = int(e.y - font->getDescent())
/ (int)ceil(font->getLineHeight());
redraw();
}
// try to parse for error positions
if(e.type == MouseEvent::tDown && e.button == 1)
{
int line = 0, lineHeight = (int)ceil(font->getLineHeight());
int wantLine = int(e.y - font->getDescent())
/ (int)ceil(font->getLineHeight());
int colons = 0;
std::vector<uint8_t> filename;
int errLine = 0, errCol = 0;
for(auto byte : buffer)
{
// check newlines
if(byte == '\n') { ++line; continue; }
// found the correct line
if(line == wantLine)
{
if(byte == ':') { ++colons; continue; }
switch(colons)
{
case 0: filename.push_back(byte); break;
case 1:
if(byte < '0' || byte > '9') return true;
errLine = errLine * 10 + byte - '0';
break;
case 2:
if(byte < '0' || byte > '9') return true;
errCol = errCol * 10 + byte - '0';
break;
case 3:
if(!filename.size()) return true;
filename.push_back(0);
onClickError(
(const char*)filename.data(), errLine, errCol);
return true;
}
}
}
}
return true;
}
void render(RenderContext & rc)
{
Font & font = getFont();
if(!font.valid()) return;
// for drawing margins, undo parent's contentOffsetX
// this keeps margin fixed when parent is scrolling
// using context offset ensures stable float rounding
RenderContext rcMargin(rc,
-getParent()->getLayout().contentOffsetX, 0);
rc.clear(bgColor);
int line = 0, lineHeight = (int)ceil(font->getLineHeight());
float dw = font->getCharAdvanceW('0');
float sw = font->getCharAdvanceW(' ');
float x = 0, y = lineHeight - font->getDescent();
// don't bother with outputting characters
// that are above or below the current view
const Rect & clip = rc.getClipRect();
ARGB midColor = color::lerp(bgColor, fgColor, 0x40);
int linePx = (int) getWindow()->pt();
if(line == hoverLine)
rc.fillRect(paint::Color(midColor),
0, int(y)+2*linePx, layout.w, linePx);
utf8::Decoder decoder;
for(auto byte : buffer)
{
// keep going until we have a full char
if(!decoder.next(byte)) continue;
auto ch = decoder.ch;
int lineY = (int) (y-font->getAscent());
bool skipHidden = (lineY > clip.y1
|| lineY + lineHeight < clip.y0);
// check newlines
if(ch == '\n')
{
x = 0; ++line;
y += lineHeight;
if(line == hoverLine)
rc.fillRect(paint::Color(midColor),
0, int(y)+2*linePx, layout.w, linePx);
continue;
}
// check for tabs
if(ch == '\t')
{
x += tabStop*sw - fmod(x, tabStop*sw);
continue;
}
// actual rendering, we can skip this part
if(skipHidden) continue;
x += rc.drawChar(font, decoder.ch,
paint::Color(fgColor), x, y);
}
// handle trailing invalid unicode
if(decoder.state != utf8::ACCEPT)
{
x += rc.drawChar(font, utf8::invalid,
paint::Color(fgColor), x, y);
}
}
protected:
std::vector<char> buffer;
int sizeX;
int sizeY;
int hoverLine = -1;
bool stopScroll = false;
};
};
|
NovaSBE-DSKC/predict-campaing-sucess-rate | dskc/clean/text.py | import math
import re
import pandas as pd
from dskc._util.pandas import df_to_list_w_column_idx
from dskc.clean import insert_next_to
from dskc.io import get_root_path
def _get_stopwords():
with open(get_root_path() + "/dskc/visualization/graphs/types/word_cloud/stopwords.txt", encoding="utf-8") as f:
stopwords = []
for word in f.readlines():
stopwords.append(word.strip())
# adding to the file his words doesnt solve the problem, IDK
stopwords.extend(["a", "é"])
return stopwords
def get_text_from(series, stop_words=[]):
stop_words.extend(_get_stopwords())
new_list = []
# for each row
for text in series:
words = []
if not text or (type(text) == float and math.isnan(text)):
continue
# filter line
text = re.sub('[!?-]', ' ', str(text))
text = text.replace("'", "")
text = text.replace(":", "")
text = text.split()
# for each word
for word in text:
word = str(word).lower()
# add if not in stopwords
if len(word) <= 1:
continue
if not word in stop_words:
words.append(word)
new_list.append(words)
return new_list
def _get_lexicon():
path = get_root_path() + "/dskc/clean/lexicons/en_pt_lex.csv" # todo json
lexicon = pd.read_csv(path)
return lexicon
def sentiment_calculator(word_list, lexicon):
num_pos_words = 0
num_neg_words = 0
for word in word_list:
if word not in lexicon:
continue
sentiment = lexicon[word]
if sentiment == "positive":
num_pos_words += 1
elif sentiment == "negative":
num_neg_words += 1
sentiment_sum = num_pos_words - num_neg_words
return sentiment_sum
def get_sentiment(series):
lexicon, c_idx, columns = df_to_list_w_column_idx(_get_lexicon()) # refactor to be json
lexicon_dict = {}
for row in lexicon:
lexicon_dict[row[c_idx["word"]]] = row[c_idx["sentiment"]]
words_list = get_text_from(series)
results = [sentiment_calculator(x, lexicon_dict) for x in words_list]
return pd.Series(results)
def set_sentiment(df, column, sufix="_sentiment"):
results = get_sentiment(df[column])
insert_next_to(df, column + sufix, column, results)
def get_text(series, stop_words=[]):
stop_words.extend(_get_stopwords())
words = []
# for each text variable
for text in series:
if not text or (type(text) == float and math.isnan(text)):
continue
# filter line
text = re.sub('[!?]', ' ', str(text))
text = re.split('; |, |\n| ', str(text))
# for each word
for word in text:
word = str(word).lower()
# add if not in stopwords
if len(word) <= 1:
continue
if not word in stop_words:
words.append(word)
return " ".join(words)
def most_freq_words(series, stop_words=[]):
# transform the series into a single string
text = get_text(series, stop_words=stop_words)
# break the string into list of words
str_list = text.split()
# gives set of unique words
unique_words = set(str_list)
word = []
frequency = []
for words in unique_words:
word.append(words)
frequency.append(str_list.count(words))
return pd.DataFrame({'Word': word,
'Frequency': frequency}).sort_values(by='Frequency',
ascending=False).reset_index(drop=True)
def categories_from_word_series(df, series, categories=10, stop_words=[], prefix="CAT_"):
# Selecting the top categories
categories = most_freq_words(df[series], stop_words=stop_words).iloc[0:categories]
# assigning "new?series?name" to the new pd column not working
df[prefix + str(series).upper()] = 'outros'
for cat in categories['Word']:
df.loc[(df[series].str.contains(cat, na=False, case=False)) & (
df[prefix + str(series).upper()] == 'outros'), prefix + str(series).upper()] = cat
return df
|
Epxoxy/LiveRoku-Java | src/danmaku/KeepAliveHandler.java | package danmaku;
import java.util.Random;
import danmaku.packet.Packet;
import danmaku.packet.PacketMsgType;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
public class KeepAliveHandler extends ChannelInboundHandlerAdapter{
private final int channelId;
private int retryTimes = 3;
private boolean isActive = false;
private Thread heartbeat = null;
public KeepAliveHandler(int channelId) {
this.channelId = channelId;
}
private Packet simplePacket(int packetType, String payload) {
Packet packet = new Packet();
packet.devType = 1;
packet.packetType = packetType;
packet.device = 1;
packet.payload = payload;
return packet;
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
this.isActive = true;
//Handshake
System.out.println("Invoke KeepAliveHandler.channelActive(ctx)");
long tmpUid = (long) (1e14 + 2e14 * new Random ().nextDouble ());
String payload = "{ \"roomid\":" + channelId + ", \"uid\":" + tmpUid + "}";
try {
ctx.writeAndFlush (simplePacket(PacketMsgType.Handshake, payload));
} catch (Exception e) {
e.printStackTrace ();
ctx.close ();
return;
}
stopThread(heartbeat);
heartbeat = new Thread() {
@Override
public void run() {
int errorTimes = 0;
Packet ping = simplePacket(PacketMsgType.Heartbeat,"");
while (isActive) {
try {
ctx.writeAndFlush (ping);
System.out.println("Heartbeat...");
} catch (Exception e) {
e.printStackTrace ();
if (errorTimes > retryTimes) break;
++errorTimes;
continue;
}
try {
Thread.sleep(30000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
ctx.close ();
}
};
heartbeat.start();
super.channelActive(ctx);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
this.isActive = false;
System.out.println("channelInactive");
stopThread(heartbeat);
super.channelInactive(ctx);
}
private void stopThread(Thread thread) {
if(thread!= null) {
try {
thread.interrupt();
}catch (Exception e) {
e.printStackTrace ();
}
}
}
}
|
CCasusensa/SuperSS-Dev | Server Lib/Game Server/GAME/login_manager.hpp | <filename>Server Lib/Game Server/GAME/login_manager.hpp
// Arquivo login_manager.hpp
// Criado em 01/05/2018 as 17:28 por Acrisio
// Definição da classe LoginManager
#pragma once
#ifndef _STDA_LOGIN_MANAGER_HPP
#define _STDA_LOGIN_MANAGER_HPP
#if defined(_WIN32)
#include <Windows.h>
#elif defined(__linux__)
#include "../../Projeto IOCP/UTIL/WinPort.h"
#include <pthread.h>
#include <unistd.h>
#endif
#include "../../Projeto IOCP/PANGYA_DB/pangya_db.h"
#include "login_task.hpp"
#include <vector>
#include "../SESSION/player.hpp"
#include "../../Projeto IOCP/THREAD POOL/thread.h"
namespace stdA {
class LoginManager {
public:
LoginManager();
~LoginManager();
LoginTask* createTask(player& _session, KeysOfLogin& _kol, player_info& _pi, ClientVersion& _cv, void* _gs);
void deleteTask(LoginTask* _task);
static void SQLDBResponse(uint32_t _msg_id, pangya_db& _pangya_db, void* _arg);
static bool canSameIDLogin();
static const std::string& getClientVersionSideServer();
#if defined(_WIN32)
static DWORD WINAPI CALLBACK _checkTaskFinish(LPVOID _lpParameter);
#elif defined(__linux__)
static void* _checkTaskFinish(LPVOID _lpParameter);
#endif
protected:
void loadIni();
void clear();
size_t getSize();
protected:
std::vector< LoginTask* > v_task;
thread *m_pThread;
#if defined(_WIN32)
DWORD checkTaskFinish();
#elif defined(__linux__)
void* checkTaskFinish();
#endif
protected:
static bool m_same_id_login;
static std::string m_client_version;
#if defined(_WIN32)
LONG volatile m_check_task_finish_shutdown;
#elif defined(__linux__)
uint32_t volatile m_check_task_finish_shutdown;
#endif
#if defined(_WIN32)
CRITICAL_SECTION m_cs;
#elif defined(__linux__)
pthread_mutex_t m_cs;
#endif
};
}
#endif //!_STDA_LOGIN_MANAGER_HPP
|
RokKos/eol-cloth | External/mosek/9.1/tools/examples/c/logistic.c | ////
// Copyright: Copyright (c) MOSEK ApS, Denmark. All rights reserved.
//
// File: logistic.c
//
// Purpose: Implements logistic regression with regulatization.
//
// Demonstrates using the exponential cone and log-sum-exp in Optimizer API.
#include "mosek.h" /* Include the MOSEK definition file. */
#define MSKCALL(x) if (res==MSK_RES_OK) res = (x);
static void MSKAPI printstr(void *handle,
const char str[])
{
printf("%s", str);
} /* printstr */
const double inf = 0.0;
// t >= log( 1 + exp(u) )
// t_i >= log( 1 + exp(u_i) ), i = 0..n-1
// Adds auxiliary variables and constraints
MSKrescodee softplus(MSKtask_t task, MSKint32t t, MSKint32t u, int n)
{
MSKint32t nvar, ncon;
MSKint32t z1, z2, v1, v2, q1, q2;
MSKint32t zcon, v1con, v2con;
MSKint32t *subi = (MSKint32t*) calloc(7*n, sizeof(MSKint32t));
MSKint32t *subj = (MSKint32t*) calloc(7*n, sizeof(MSKint32t));
MSKrealt *aval = (MSKrealt*) calloc(7*n, sizeof(MSKrealt));
int k = 0, i = 0;
MSKrescodee res = MSK_RES_OK;
MSKCALL(MSK_getnumvar(task, &nvar));
MSKCALL(MSK_getnumcon(task, &ncon));
z1 = nvar, z2 = nvar+n, v1 = nvar+2*n, v2 = nvar+3*n, q1 = nvar+4*n, q2 = nvar+5*n;
zcon = ncon, v1con = ncon+n, v2con=ncon+2*n;
MSKCALL(MSK_appendvars(task, 6*n));
MSKCALL(MSK_appendcons(task, 3*n));
// Linear constraints
for(i = 0; i < n; i++)
{
// z1 + z2 = 1
subi[k] = zcon+i; subj[k] = z1+i; aval[k] = 1; k++;
subi[k] = zcon+i; subj[k] = z2+i; aval[k] = 1; k++;
// u - t - v1 = 0
subi[k] = v1con+i; subj[k] = u+i; aval[k] = 1; k++;
subi[k] = v1con+i; subj[k] = t+i; aval[k] = -1; k++;
subi[k] = v1con+i; subj[k] = v1+i; aval[k] = -1; k++;
// - t - v2 = 0
subi[k] = v2con+i; subj[k] = t+i; aval[k] = -1; k++;
subi[k] = v2con+i; subj[k] = v2+i; aval[k] = -1; k++;
}
MSKCALL(MSK_putaijlist(task, 7*n, subi, subj, aval));
MSKCALL(MSK_putconboundsliceconst(task, ncon, ncon+n, MSK_BK_FX, 1, 1));
MSKCALL(MSK_putconboundsliceconst(task, ncon+n, ncon+3*n, MSK_BK_FX, 0, 0));
// Bounds for variables
MSKCALL(MSK_putvarboundsliceconst(task, nvar, nvar+4*n, MSK_BK_FR, -inf, inf));
MSKCALL(MSK_putvarboundsliceconst(task, nvar+4*n, nvar+6*n, MSK_BK_FX, 1, 1));
// Cones
for(i = 0; i < n && res == MSK_RES_OK; i++)
{
MSKint32t csub[3];
csub[0] = z1+i; csub[1] = q1+i; csub[2] = v1+i;
MSKCALL(MSK_appendcone(task, MSK_CT_PEXP, 0.0, 3, csub));
csub[0] = z2+i; csub[1] = q2+i; csub[2] = v2+i;
MSKCALL(MSK_appendcone(task, MSK_CT_PEXP, 0.0, 3, csub));
}
free(subi); free(subj); free(aval);
return res;
}
// Model logistic regression (regularized with full 2-norm of theta)
// X - n x d matrix of data points
// y - length n vector classifying training points
// lamb - regularization parameter
MSKrescodee logisticRegression(MSKenv_t env,
int n, // num samples
int d, // dimension
double *X,
int *y,
double lamb,
double *thetaVal) // result
{
MSKrescodee res = MSK_RES_OK;
MSKrescodee trm = MSK_RES_OK;
MSKtask_t task = NULL;
MSKint32t nvar = 1+d+2*n;
MSKint32t r = 0, theta = 1, t = 1+d, u = 1+d+n;
int i = 0;
MSKCALL(MSK_maketask(env, 0, 0, &task));
MSKCALL(MSK_linkfunctotaskstream(task, MSK_STREAM_LOG, NULL, printstr));
// Variables [r; theta; t; u]
MSKCALL(MSK_appendvars(task, nvar));
MSKCALL(MSK_putvarboundsliceconst(task, 0, nvar, MSK_BK_FR, -inf, inf));
// Constraints: theta'*X +/- u = 0
MSKCALL(MSK_appendcons(task, n));
MSKCALL(MSK_putconboundsliceconst(task, 0, n, MSK_BK_FX, 0, 0));
// Objective lambda*r + sum(t)
MSKCALL(MSK_putcj(task, r, lamb));
for(i = 0; i < n && res == MSK_RES_OK; i++)
MSKCALL(MSK_putcj(task, t+i, 1.0));
// The X block in theta'*X +/- u = 0
{
MSKint32t *subi = (MSKint32t*) calloc(d*n+n, sizeof(MSKint32t));
MSKint32t *subj = (MSKint32t*) calloc(d*n+n, sizeof(MSKint32t));
MSKrealt *aval = (MSKrealt*) calloc(d*n+n, sizeof(MSKrealt));
int j, k;
for(i = 0; i < n; i++)
{
for(j = 0; j < d; j++)
{
k = i * d + j;
subi[k] = i; subj[k] = theta+j; aval[k] = X[k];
}
subi[d*n+i] = i; subj[d*n+i] = u+i;
if (y[i]) aval[d*n+i] = 1; else aval[d*n+i] = -1;
}
MSKCALL(MSK_putaijlist(task, n*d+n, subi, subj, aval));
free(subi); free(subj); free(aval);
}
// Softplus function constraints
MSKCALL(softplus(task, t, u, n));
// Regularization
MSKCALL(MSK_appendconeseq(task, MSK_CT_QUAD, 0.0, 1+d, r));
// Solution
MSKCALL(MSK_optimizetrm(task, &trm));
MSKCALL(MSK_solutionsummary(task, MSK_STREAM_MSG));
MSKCALL(MSK_getxxslice(task, MSK_SOL_ITR, theta, theta+d, thetaVal));
return res;
}
int main()
{
MSKenv_t env;
MSKrescodee res = MSK_RES_OK;
MSKCALL(MSK_makeenv(&env, NULL));
// Test: detect and approximate a circle using degree 2 polynomials
{
int n = 30;
double X[6*30*30];
int Y[30*30];
int i,j;
double theta[6];
for(i=0; i<n; i++)
for(j=0; j<n; j++)
{
int k = i*n+j;
double x = -1 + 2.0*i/(n-1);
double y = -1 + 2.0*j/(n-1);
X[6*k+0] = 1.0; X[6*k+1] = x; X[6*k+2] = y; X[6*k+3] = x*y;
X[6*k+4] = x*x; X[6*k+5] = y*y;
Y[k] = (x*x+y*y>=0.69) ? 1 : 0;
}
MSKCALL(logisticRegression(env, n*n, 6, X, Y, 0.1, theta));
if (res == MSK_RES_OK)
for(i=0;i<6;i++) printf("%.4f\n", theta[i]);
}
return res;
} |
alexiosc/megistos | src/system/emud/emud.c | /*****************************************************************************\
** **
** FILE: emud (orig. ttysnoops) **
** AUTHORS: Alexios (hack only) **
** REVISION: B, February 95 **
** PURPOSE: The new emulation daemon **
** NOTES: This is based on ttysnoop 0.12 alpha by <NAME>. **
** Ttysnoop copyrights etc belong to the author. **
** **
** Emud uses a pseudo-tty to emulate a user session. It must **
** be run by the super-user, for obvious reasons of security. **
** Once it forks, it spawns bbslogin. Hence it's execl()'d by **
** bbsgetty to handle the line once a connection is made. **
** **
** All the output sent to the user is logged in **
** /bbs/etc/.log-tty*. A FIFO (/bbs/etc/.emu-tty*) is provided **
** for emulating the user's input (writing to the FIFO has the **
** same effect as the user typing at their keyboard). The PID **
** of the daemon is stored in /bbs/etc/.emud-tty*.pid. **
** **
** Environment variables passed on: CHANNEL contains the tty **
** where the user logged from (without the "/dev/" string). **
** EMUINP contains the pseudo-tty used by the daemon. EMUINP **
** is also the controlling tty of all subsequent processes. **
** **
** LEGALESE: **
** **
** This program is free software; you can redistribute it and/or modify **
** it under the terms of the GNU General Public License as published by **
** the Free Software Foundation; either version 2 of the License, or (at **
** your option) any later version. **
** **
** This program is distributed in the hope that it will be useful, but **
** WITHOUT ANY WARRANTY; without even the implied warranty of **
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU **
** General Public License for more details. **
** **
** You should have received a copy of the GNU General Public License **
** along with this program; if not, write to the Free Software **
** Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. **
** **
\*****************************************************************************/
/*
* $Id: emud.c,v 2.0 2004/09/13 19:44:53 alexios Exp $
*
* $Log: emud.c,v $
* Revision 2.0 2004/09/13 19:44:53 alexios
* Stepped version to recover CVS repository after near-catastrophic disk
* crash.
*
* Revision 1.8 2004/05/21 23:53:42 alexios
* Fixed permissions problem that led to user emulation failing.
*
* Revision 1.7 2004/05/21 20:08:23 alexios
* Removed system(3) command to call mkfifo(1), replaced it with the
* mkfifo(3) function. Also, removed hardwired, system(3)-based chown
* operation to bbs.bbs in favour of using the chown(2) system call and
* the appropriate BBS instance UID and GIDs. This may fix serious
* permission-related bugs.
*
* Revision 1.6 2004/05/03 05:42:49 alexios
* Added code to differentiate between SSH and telnet connections and
* apply the appropriate timeout to each.
*
* Revision 1.5 2004/02/29 16:35:16 alexios
* Ran through megistos-config --oh.
*
* Revision 1.4 2001/04/22 14:49:07 alexios
* Merged in leftover 0.99.2 changes and additional bug fixes.
*
* Revision 1.11 2000/01/06 11:44:46 alexios
* Slight corrections to one comment. Reduced the buffer size from
* 16000 bytes to 8181 (+1) bytes. The extra buffer size is not
* needed and, after all, buffer size was not at all related to
* the overruns experienced during non-blocking I/O.
*
* Revision 1.10 1999/08/13 17:06:35 alexios
* Emud now checks for line timeouts in telnet lines. In the
* future, it may be extended to handle all timeouts, too.
*
* Revision 1.9 1999/07/28 23:15:19 alexios
* Fixed POSIX incompatibility problem.
*
* Revision 1.8 1999/07/18 22:03:34 alexios
* Added debugging info (#ifdef'ed out). Minor fixes and
* cosmetic changes.
*
* Revision 1.7 1998/12/27 16:27:54 alexios
* Added autoconf support. Added support for new channel_getstatus().
* Other minor fixes.
*
* Revision 1.6 1998/08/14 11:59:29 alexios
* Reduced the buffer size to one page (4k). Everything seems
* to work ok now.
*
* Revision 1.5 1998/07/26 21:13:52 alexios
* Moved emud to stable status.
*
* Revision 1.4 1998/07/24 10:27:20 alexios
* Migrated to bbslib 0.6.
*
* Revision 1.3 1998/03/10 10:17:42 alexios
* Added keyboard translation (reverse translation to that used
* for outputting stuff to the user). Rearranged receiving and
* transmitting a bit, so that each end gets data in the right
* encoding. Changed an occurence of "/bbs/bin" to BINDIR.
*
* Revision 1.2 1997/11/06 20:16:19 alexios
* Added GPL legalese to the top of this file.
*
* Revision 1.1 1997/11/03 00:48:51 alexios
* Trimmed includes so that we don't read in useless header files.
* Beautified code a bit. Added a few comments. Added code to read
* in translation tables from disk using function readxlation().
* Added code to translate text on the fly for both user and
* any emulating operators.
*
* Revision 1.0 1997/09/14 18:35:09 alexios
* Initial revision
*
*
*/
static const char rcsinfo[] =
"$Id: emud.c,v 2.0 2004/09/13 19:44:53 alexios Exp $";
/*
ttysnoops.c
ttysnoops sets up a pseudo-tty for the specified program and
redirects its stdin/stdout/stderr to/from the current ctty
and a specified snoop device (usually another tty or a socket)
v0.00 4-1-94 <NAME> - first version
v0.01 6-1-94 "" - added /etc/snooptab file
v0.02 9-1-94 "" - added socket support
v0.10 8-8-94 "" - various bug fixes
v0.11 9-8-94 "" - password authentication added
v0.11a 23-8-94 <NAME> - shadow support hacked in
v0.12 6-9-94 <NAME> - added suspend/resume key
*/
#define WANT_STDIO_H 1
#define WANT_STDLIB_H 1
#define WANT_STDARG_H 1
#define WANT_UNISTD_H 1
#define WANT_STRING_H 1
#define WANT_FCNTL_H 1
#define WANT_GRP_H 1
#define WANT_PWD_H 1
#define WANT_SYS_SHM_H 1
#define WANT_SYS_STAT_H 1
#define WANT_SYS_IOCTL_H 1
#define WANT_SYS_SOCKET_H 1
#define WANT_NETINET_IN_H 1
#define WANT_TIME_H 1
#define WANT_WAIT_H 1
#define WANT_TERMIOS_H 1
#include <bbsinclude.h>
#include <sys/socket.h>
#include <mbk/mbk_sysvar.h>
#include <megistos/config.h>
#include <megistos/bbs.h>
/* #define DEBUG 1 */
#define BUFF_SIZE 8191
char buff[BUFF_SIZE + 1];
int authpid = -1;
int emuin = -1;
int fdmax = 0;
int pid;
int idlzapt = 0;
int via_telnet = 0;
char ptynam[32];
char fname[128], tty[32];
char relogfn[256];
char kbdxlation[NUMXLATIONS][256];
char xlation[NUMXLATIONS][256];
struct sockaddr_in peer;
struct emuqueue *emuq;
struct stat st;
#define TTY_STORE 16
static struct termios orig_tty_state[TTY_STORE];
static int sttyfds[TTY_STORE];
int bbsuid, bbsgid;
void
storepid ()
{
FILE *fp;
char fname[256];
sprintf (fname, "%s/emud-%s.pid", mkfname (BBSRUNDIR), tty);
if ((fp = fopen (fname, "w")) == NULL)
return;
fprintf (fp, "%d", getpid ());
fclose (fp);
chmod (fname, 0600);
chown (fname, 0, 0);
}
void
notifybbsd ()
{
FILE *fp;
if ((fp = fopen (mkfname (BBSDPIPEFILE), "w")) == NULL)
return;
fprintf (fp, "getty %s %d\n", tty, getpid ());
fclose (fp);
}
void
errorf (char *fmt, ...)
{
va_list args;
va_start (args, fmt);
fprintf (stderr, "emud: ");
vfprintf (stderr, fmt, args);
exit (1);
}
char *
leafname (char *path)
{
#undef HAVE_GETPT
#ifndef HAVE_GETPT
int i = 0, j;
for (j = 0; path[j]; j++)
if (path[j] == '/')
i = j + 1;
return (path + i);
#else
# error "This won't work, please fix lots of things first!"
char *devdir = DEVDIR "/";
if (!strncmp (path, devdir, strlen (devdir)))
return path + strlen (devdir);
#endif /* HAVE_GETPT */
}
/* init the stty store array */
void
stty_initstore (void)
{
int i;
for (i = 0; i < TTY_STORE; i++)
sttyfds[i] = -1;
}
/* set tty on fd into raw mode */
int
stty_raw (int fd)
{
struct termios tty_state;
int i;
if (tcgetattr (fd, &tty_state) < 0)
return (-1);
for (i = 0; i < TTY_STORE; i++)
if (sttyfds[i] == -1) {
orig_tty_state[i] = tty_state;
sttyfds[i] = fd;
break;
}
tty_state.c_lflag &= ~(ICANON | IEXTEN | ISIG | ECHO);
tty_state.c_iflag &= ~(ICRNL | INPCK | ISTRIP | IXON | BRKINT);
tty_state.c_oflag &= ~OPOST;
tty_state.c_cflag |= CS8;
tty_state.c_cc[VMIN] = 1;
tty_state.c_cc[VTIME] = 0;
if (tcsetattr (fd, TCSAFLUSH, &tty_state) < 0)
return (-1);
return (0);
}
/* restore all altered ttys to their original state */
void
stty_orig (void)
{
int i;
for (i = 0; i < TTY_STORE; i++)
if (sttyfds[i] != -1) {
tcsetattr (sttyfds[i], TCSAFLUSH, &orig_tty_state[i]);
sttyfds[i] = -1;
}
}
void
makeshm (char *tty)
{
char fname[256];
FILE *fp;
int shmid = 0;
struct shmid_ds buf;
char *s;
if ((s = getenv ("BBSUID")) == NULL) {
error_fatal ("Environment improperly set. The rc.bbs script is broken.");
} else bbsuid = atoi (s);
if ((s = getenv ("BBSGID")) == NULL) {
error_fatal ("Environment improperly set. The rc.bbs script is broken.");
} else bbsgid = atoi (s);
/* Create shared memory */
sprintf (fname, "%s/emud-%s.pid", mkfname (BBSRUNDIR), tty);
if ((shmid =
shmget (ftok (fname, 'E'), 16384,
IPC_CREAT | IPC_EXCL | 0660)) == -1)
return;
sprintf (fname, "%s/.shmid-%s", mkfname (BBSETCDIR), tty);
/* Write shared memory ID to disk */
if ((fp = fopen (fname, "w")) == NULL)
return;
fprintf (fp, "%012d", shmid);
fclose (fp);
/* Attach to shared memory */
if ((emuq = (struct emuqueue *) shmat (shmid, NULL, 0)) == NULL) {
errorf ("unable to attach shared memory.\n");
}
/* Create the emuqueue data structure */
bzero (emuq, sizeof (struct emuqueue));
emuq->xlation = -1;
/* Make the BBS instance UID and GID the owner of the shared memory
* block */
if (shmctl (shmid, IPC_STAT, &buf) < 0) {
errorf ("unable to IPC_STAT shared memory (errno=%d)", errno);
} else {
buf.shm_perm.uid = bbsuid;
buf.shm_perm.gid = bbsgid;
if (shmctl (shmid, IPC_SET, &buf) < 0) {
errorf ("unable to IPC_SET shared memory (errno=%d)",
errno);
}
}
/* Mark the block for destruction when we're done with it. */
shmctl (shmid, IPC_RMID, &buf);
/* Fix file permissions */
chown (fname, bbsuid, bbsgid);
sprintf (fname, "%s/emud-%s.pid", mkfname (BBSRUNDIR), tty);
chown (fname, bbsuid, bbsgid);
chmod (fname, 0444);
}
void
gracefulexit ()
{
char fname[256];
channel_status_t status;
putchar (0);
channel_getstatus (tty, &status);
mod_init (INI_OUTPUT | INI_SYSVARS | INI_TTYNUM);
sprintf (fname, "%s/%s", mkfname (ONLINEDIR), status.user);
unlink (fname);
sprintf (fname, "%s/[SIGNUP-%s]", mkfname (ONLINEDIR), tty);
unlink (fname);
sprintf (fname, "%s/_%s", mkfname (ONLINEDIR), status.user);
close (creat (fname, 0660));
status.baud = 0;
status.result = LSR_LOGOUT;
status.user[0] = 0;
channel_setstatus (tty, &status);
kill (-getpid (), SIGKILL);
exit (0);
}
/* find & open a pty to be used by the pty-master */
int
find_ptyxx (char *ptyname)
{
#ifndef HAVE_GETPT
int fd, i, j;
static char *s1 = "srpq", *s2 = "fedcba9876543210";
strcpy (ptyname, DEVDIR "/ptyxx");
for (i = 0; s1[i]; i++) {
ptyname[8] = s1[i];
for (j = 0; s2[j]; j++) {
ptyname[9] = s2[j];
if ((fd = open (ptyname, O_RDWR)) >= 0) {
ptyname[5] = 't';
return (fd);
}
if (errno == ENOENT)
return (-1);
}
}
return (-1);
#else
int fd = getpt ();
if (fd >= 0)
return fd;
#endif /* HAVE_GETPT */
}
/* find & open a pty (tty) to be used by pty-client */
int
find_ttyxx (char *ttyname, int ptyfd)
{
struct group *grp;
int gid, fd;
if ((grp = getgrnam ("tty")) != NULL)
gid = grp->gr_gid;
else
gid = -1;
chown (ttyname, getuid (), gid);
chmod (ttyname, S_IRUSR | S_IWUSR | S_IWGRP);
if ((fd = open (ttyname, O_RDWR)) >= 0)
return (fd);
close (ptyfd);
return (-1);
}
/* fork off the pty-client and redirect its stdin/out/err to the pty */
int
fork_pty (int *ptyfd, char *ttynam)
{
struct termios term;
struct winsize twin;
int ttyfd;
char name[32];
tcgetattr (STDIN_FILENO, &term);
ioctl (STDIN_FILENO, TIOCGWINSZ, (char *) &twin);
if ((*ptyfd = find_ptyxx (name)) < 0)
errorf ("No free pty.\n");
strcpy (ttynam, leafname (name));
setenv ("EMUPTY", ttynam, 1);
if ((pid = fork ()) < 0)
errorf ("Can't fork.\n");
if (pid == 0) { /* child */
if (setsid () < 0)
errorf ("setsid failed.\n");
if ((ttyfd = find_ttyxx (name, *ptyfd)) < 0)
errorf ("can't open tty.\n");
close (*ptyfd);
if (tcsetattr (ttyfd, TCSANOW, &term) < 0)
errorf ("can't set termios.\n");
if (ioctl (ttyfd, TIOCSWINSZ, &twin) < 0)
errorf ("can't set winsize.\n");
if (dup2 (ttyfd, STDIN_FILENO) != STDIN_FILENO)
errorf ("can't dup2 into stdin.\n");
if (dup2 (ttyfd, STDOUT_FILENO) != STDOUT_FILENO)
errorf ("can't dup2 into stdout.\n");
if (dup2 (ttyfd, STDERR_FILENO) != STDERR_FILENO)
errorf ("can't dup2 into stderr.\n");
if (ttyfd > STDERR_FILENO)
close (ttyfd);
}
return (pid);
}
void
makepipe ()
{
struct stat s;
char fname[256];
char command[256];
sprintf (fname, "%s/.injoth-%s", mkfname (ONLINEDIR), tty);
unlink (fname);
sprintf (fname, "%s/.emu-%s", mkfname (EMUFIFODIR), tty);
if (!stat (fname, &s)) return;
if (mkfifo (fname, 0666) != 0) {
errorf ("unable to create pipe %s: %s", fname, strerror (errno));
}
if (chmod (fname, 0666) != 0) {
errorf ("unable to chmod(2) %s: %s", fname, strerror (errno));
}
if (chown (fname, bbs_uid, bbs_gid) != 0) {
errorf ("unable to chown(2) %s: %s", fname, strerror (errno));
}
}
void
readxlation ()
{
FILE *fp;
if ((fp = fopen (mkfname (XLATIONFILE), "r")) == NULL) {
int i = errno;
errorf ("unable to open %s (errno=%d)\n",
mkfname (XLATIONFILE), i);
}
if (fread (xlation, sizeof (xlation), 1, fp) != 1) {
int i = errno;
errorf ("unable to read %s (errno=%d)\n",
mkfname (XLATIONFILE), i);
}
if (fread (kbdxlation, sizeof (kbdxlation), 1, fp) != 1) {
int i = errno;
errorf ("unable to read %s (errno=%d)\n",
mkfname (XLATIONFILE), i);
}
fclose (fp);
}
static void
inittimeout ()
{
char * baud = getenv("BAUD");
idlzapt = 0;
mod_init (INI_TTYNUM);
if ((chan_getnum (tty) >= 0) &&
(chan_last->flags & TTF_TELNET)) {
promptblock_t *msg = msg_open ("sysvar");
/* Is this an SSH connection? */
if ((baud != NULL) && (strcmp (baud, "-1") == 0))
idlzapt = msg_int (IDLZAPS, 0, 32767) * 60;
/* No. It's a telnet connection. */
else
idlzapt = msg_int (IDLZAPT, 0, 32767) * 60;
msg_close (msg);
}
mod_done (INI_TTYNUM);
}
/* Check if we're running on a socket (i.e. serving a telnet connection) */
static void
checktelnet ()
{
int len = sizeof (peer), res;
/* Try to get information on the other end. If we fail, then we'll
just assume this isn't a telnet connection. */
res = getpeername (0, &peer, &len);
via_telnet = (res == 0);
if (via_telnet) {
printf ("CONNECTING THROUGH TELNET.\n\n\n\n");
sleep (5);
}
}
/* main program */
int
main (int argc, char *argv[])
{
time_t tmout = time (NULL);
fd_set readset;
int ptyfd, n, sel = 0;
if (getuid ())
errorf ("Only root can execute this daemon.\n");
if (!isatty (STDIN_FILENO))
errorf ("stdin is not a tty.\n");
/* do init stuff */
readxlation ();
strcpy (tty, leafname (ttyname (STDIN_FILENO)));
storepid ();
makeshm (tty);
inittimeout ();
makepipe ();
checktelnet ();
setenv ("CHANNEL", tty, 1);
setenv ("BAUD", "0", 0); /* Why is this here? */
signal (SIGPIPE, SIG_IGN);
#if 0
/* This is a no-no under POSIX and Linux. Look at signal(2). */
/* signal (SIGCHLD, SIG_IGN); */
#endif
signal (SIGHUP, gracefulexit);
signal (SIGINT, gracefulexit);
signal (SIGQUIT, gracefulexit);
signal (SIGUSR1, readxlation);
stty_initstore ();
atexit (stty_orig);
atexit (gracefulexit);
/* fork off the client and load the new image */
relogon:
if (fork_pty (&ptyfd, ptynam) == 0) {
/* exec the real pty-client program */
setenv ("BBSPREFIX", mkfname (""), 1);
setenv ("PREFIX", mkfname (""), 1);
execl (mkfname (BINDIR "/bbslogin"), "bbslogin", "-", NULL);
errorf ("can't exec bbslogin\n");
}
notifybbsd ();
/* put stdin into raw mode */
stty_raw (STDIN_FILENO);
/* calc (initial) max file descriptor to use in select() */
fdmax = max (STDIN_FILENO, ptyfd);
/* open snoop-device and put it into raw mode */
sprintf (fname, "%s/.emu-%s", mkfname (BBSETCDIR), tty);
if ((emuin = open (fname, O_RDONLY | O_NDELAY)) < 0)
errorf ("can't open inp_buffer emulation FIFO %s\n", fname);
fdmax = max (fdmax, emuin);
sprintf (relogfn, "%s/.relogon-%s", mkfname (ONLINEDIR), tty);
while (1) {
if (!stat (relogfn, &st)) {
bbsdcommand ("relogon", tty, "");
unlink (relogfn);
if (!kill (pid, SIGKILL))
wait (NULL);
stty_orig ();
channel_setresult (tty, LSR_RELOGON);
goto relogon;
}
do {
struct timeval tv;
tv.tv_sec = 5;
tv.tv_usec = 0;
FD_ZERO (&readset);
FD_SET (STDIN_FILENO, &readset);
FD_SET (ptyfd, &readset);
FD_SET (emuin, &readset);
errno = 0;
sel = select (fdmax + 1, &readset, NULL, NULL, &tv);
tv.tv_sec = errno;
#ifdef DEBUG
fprintf (stderr, "\n\r---(res=%d err=%d,%s) ", sel,
tv.tv_sec, strerror (tv.tv_sec));
if (FD_ISSET (STDIN_FILENO, &readset))
fprintf (stderr, "[userinp] ");
if (FD_ISSET (ptyfd, &readset))
fprintf (stderr, "[output] ");
if (FD_ISSET (emuin, &readset))
fprintf (stderr, "[emuinp] ");
#endif
} while (sel == -1);
if (sel == 0) {
/* For efficiency, we only check for timeouts whenever select times out
itself. It has a 5s timeout which is good enough granularity for
checking timeouts measured in minutes. This reduces the number of
time() system calls. */
if (idlzapt)
if (tmout + idlzapt < time (NULL))
gracefulexit ();
continue;
}
#ifdef DEBUG
fprintf (stderr, "1");
#endif
if (sel == -1 /* && errno != EINTR */ ) {
printf ("select failed; errno = %d\n", errno);
fflush (stdout);
}
#ifdef DEBUG
fprintf (stderr, "2");
#endif
if (FD_ISSET (STDIN_FILENO, &readset)) {
n = read (STDIN_FILENO, buff, BUFF_SIZE);
if (emuq->xlation > 0) {
buff[n] = 0;
if (emuq->xlation > 0)
faststgxlate (buff,
kbdxlation[emuq->
xlation]);
}
write (ptyfd, buff, n);
tmout = time (NULL);
}
#ifdef DEBUG
fprintf (stderr, "3");
#endif
if ((emuin >= 0) && FD_ISSET (emuin, &readset)) {
int i;
errno = 0;
n = read (emuin, buff, BUFF_SIZE);
i = errno;
#ifdef DEBUG
fprintf (stderr, "3A(%d,%d,%s)", n, i, strerror (i));
#endif
if (n == 0) {
#ifdef DEBUG
fprintf (stderr, "[closing]");
#endif
close (emuin);
sprintf (fname, "%s/.emu-%s",
mkfname (BBSETCDIR), tty);
if ((emuin =
open (fname, O_RDONLY | O_NDELAY)) < 0)
errorf
("can't open inp_buffer emulation FIFO %s\n",
fname);
fdmax = max (fdmax, emuin);
} else {
if (emuq->xlation > 0) {
buff[n] = 0;
if (emuq->xlation > 0)
faststgxlate (buff,
xlation[emuq->
xlation]);
}
write (ptyfd, buff, n);
}
}
#ifdef DEBUG
fprintf (stderr, "4");
#endif
if (FD_ISSET (ptyfd, &readset)) {
register int i;
if ((n = read (ptyfd, buff, BUFF_SIZE)) < 1)
exit (0);
#ifdef DEBUG
fprintf (stderr, "5");
#endif
for (i = 0; i < n; i++) {
emuq->queue[emuq->index] = buff[i];
emuq->index =
(emuq->index + 1) % sizeof (emuq->queue);
}
#ifdef DEBUG
fprintf (stderr, "6");
#endif
if (emuq->xlation > 0) {
buff[n] = 0;
if (emuq->xlation > 0)
faststgxlate (buff,
xlation[emuq->xlation]);
}
#ifdef DEBUG
fprintf (stderr, "7");
#endif
write (STDOUT_FILENO, buff, n);
}
}
}
/* End of File */
|
predictiveworks/deep-graph | src/main/scala/de/kp/works/graph/analytics/Closeness.scala | package de.kp.works.graph.analytics
/*
* Copyright (c) 2019 - 2021 Dr. Krusche & Partner PartG. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @author <NAME>, Dr. Krusche & Partner PartG
*
*/
import ml.sparkling.graph.api.operators.IterativeComputation.wholeGraphBucket
import ml.sparkling.graph.api.operators.measures.VertexMeasureConfiguration
import ml.sparkling.graph.operators.measures.vertex.closeness.{Closeness => ClosenessML}
import org.apache.spark.graphx._
import org.apache.spark.sql.{DataFrame, Row}
import scala.reflect.ClassTag
/**
* This is the base wrapper for the Sparkling Graph
* [Closeness] Operator. Note, with respect to an
* integration into a GraphFrame based environment,
* this class specifies the 2nd computation stage.
*
* This class requires a vertex that is specified as
* (VertexId [=Long], Any), and an edge specified as
* (VertexId, VertexId, Numeric).
*
*/
class Closeness[VD: ClassTag, ED: ClassTag]
extends BaseAnalytics {
private var vertexMeasureConfiguration: VertexMeasureConfiguration[VD, ED] =
new VertexMeasureConfiguration[VD, ED]( wholeGraphBucket[VD, ED])
def setVertexMeasureCfg(value:VertexMeasureConfiguration[VD, ED]): Closeness[VD, ED] = {
vertexMeasureConfiguration = value
this
}
/**
* Closeness centrality measure is defined as inverted sum
* of distances (d(y,x)) from given node to all other nodes.
*
* Distance is defined as length of shortest path.
*
* Measure can be understood as how far away from other nodes
* given node is located.
*/
def transform(g:Graph[VD, ED])(implicit num:Numeric[ED]):DataFrame = {
/**
* This method computes the closeness of the vertices
* of the provided graph.
*/
val result:Graph[Double, ED] = ClosenessML.compute(g, vertexMeasureConfiguration)
/*
* The result of this method represents a VertexRDD
* with (VertexId, Double (closeness)
*/
val rdd = result.vertices
.map{case(vid:VertexId, measure:Double) =>
Row(vid.toLong, measure)
}
session.createDataFrame(rdd, measureSchema)
}
}
|
mholtrop/slic | src/StdHepEventSource.cc | <reponame>mholtrop/slic
#include "StdHepEventSource.hh"
// SLIC
#include "LcioManager.hh"
#include "MCParticleManager.hh"
// LCIO
#include "UTIL/LCTOOLS.h"
namespace slic {
StdHepEventSource::StdHepEventSource(const std::string& fname) :
EventSourceWithInputFile("StdHepEventSource", fname), m_eventGenerator(0) {
}
void StdHepEventSource::generate(G4Event* anEvent) {
//std::cout << "StdHepEventSource::generate" << std::endl;
m_eventGenerator->generateEvent(anEvent);
}
// open the current file
void StdHepEventSource::open() {
/* Initialize the event generator using the current file name. */
m_eventGenerator = new StdHepGenerator(getFilename());
m_fileIsOpen = true;
}
// close the current file
void StdHepEventSource::close() {
delete m_eventGenerator;
m_fileIsOpen = false;
}
// read the next event
void StdHepEventSource::readNextEvent() {
m_eventGenerator->readNextEvent();
if (m_eventGenerator->getCurrentParticleCollection() == 0) {
m_eof = true;
}
}
void StdHepEventSource::printCurrentEvent() {
if (MCParticleManager::instance()->getMCParticleCollection() != NULL)
UTIL::LCTOOLS::printMCParticles(MCParticleManager::instance()->getMCParticleCollection());
}
void StdHepEventSource::beginRun(const G4Run* aRun) {
// do superclass setup
EventSourceWithInputFile::beginRun(aRun);
}
void StdHepEventSource::beginEvent(const G4Event* anEvent) {
//std::cout << "StdHepEventSource::beginEvent" << std::endl;
// read an event
EventSourceWithInputFile::beginEvent(anEvent);
}
}
|
fedepal450/ROAR-RL_FullControl | ROAR_Jetson/vive/models.py | <gh_stars>1-10
from pydantic import BaseModel, Field, validator
import math
class ViveTrackerMessage(BaseModel):
valid: int = Field(default=0)
x: float = Field(default=0)
y: float = Field(default=0)
z: float = Field(default=0)
roll: float = Field(default=0)
pitch: float = Field(default=0)
yaw: float = Field(default=0)
device_name: str = Field(default="tracker_1")
vel_x: float = Field(default=0)
vel_y: float = Field(default=0)
vel_z: float = Field(default=0)
def __repr__(self):
return f"{self.device_name} -> " \
f"x: {round(self.x, 5)}, y: {round(self.y, 5)}, z: {round(self.z, 5)} | " \
f"pitch: {round(self.pitch, 5)}, yaw: {round(self.yaw, 5)}, roll: {round(self.roll, 5)} | " \
f"vel_x: {round(self.vel_x, 5)}, vel_y: {round(self.vel_y, 5)}, vel_z: {round(self.vel_z, 5)}"
def __str__(self):
return self.__repr__()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.