code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30 values | license stringclasses 15 values | size int64 3 1.01M |
|---|---|---|---|---|---|
package com.axj.apiw.model;
import java.util.Date;
public class FirstReplyText {
private long id;
private long userId;
private String wechatId;
private Date createTime;
private Date modifyTime;
private int delFlag;
private String txt;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public long getUserId() {
return userId;
}
public void setUserId(long userId) {
this.userId = userId;
}
public String getWechatId() {
return wechatId;
}
public void setWechatId(String wechatId) {
this.wechatId = wechatId;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getModifyTime() {
return modifyTime;
}
public void setModifyTime(Date modifyTime) {
this.modifyTime = modifyTime;
}
public int getDelFlag() {
return delFlag;
}
public void setDelFlag(int delFlag) {
this.delFlag = delFlag;
}
public String getTxt() {
return txt;
}
public void setTxt(String txt) {
this.txt = txt;
}
}
| xiangtao/dxwx-transfer | src/main/java/com/axj/apiw/model/FirstReplyText.java | Java | apache-2.0 | 1,115 |
using BrockAllen.MembershipReboot;
using BrockAllen.MembershipReboot.Ef;
using BrockAllen.MembershipReboot.Relational;
namespace Ef6CoreTest.Db
{
public class Ef6DbContext : MembershipRebootDbContext<RelationalUserAccount, RelationalGroup>
{
public Ef6DbContext() : base("MembershipReboot")
{
}
public Ef6DbContext(string nameOrConnectionString) : base(nameOrConnectionString)
{
}
}
} | kwaxi/Ef6CoreTest | Ef6CoreTest.Db/Ef6DbContext.cs | C# | apache-2.0 | 449 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="zh">
<head>
<!-- Generated by javadoc (1.8.0_91) on Thu Jul 13 16:16:26 CST 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>com.webside.thread (webside 0.0.1-SNAPSHOT API)</title>
<meta name="date" content="2017-07-13">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="com.webside.thread (webside 0.0.1-SNAPSHOT API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>您的浏览器已禁用 JavaScript。</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="跳过导航链接">跳过导航链接</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="导航">
<li><a href="../../../overview-summary.html">概览</a></li>
<li class="navBarCell1Rev">程序包</li>
<li>类</li>
<li><a href="package-use.html">使用</a></li>
<li><a href="package-tree.html">树</a></li>
<li><a href="../../../deprecated-list.html">已过时</a></li>
<li><a href="../../../index-all.html">索引</a></li>
<li><a href="../../../help-doc.html">帮助</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../com/webside/shiro/util/package-summary.html">上一个程序包</a></li>
<li><a href="../../../com/webside/user/controller/package-summary.html">下一个程序包</a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?com/webside/thread/package-summary.html" target="_top">框架</a></li>
<li><a href="package-summary.html" target="_top">无框架</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">所有类</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="程序包" class="title">程序包 com.webside.thread</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="类概要表, 列表类和解释">
<caption><span>类概要</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">类</th>
<th class="colLast" scope="col">说明</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../com/webside/thread/ThreadPool.html" title="com.webside.thread中的类">ThreadPool</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../com/webside/thread/WorkerItem.html" title="com.webside.thread中的类">WorkerItem</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="跳过导航链接">跳过导航链接</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="导航">
<li><a href="../../../overview-summary.html">概览</a></li>
<li class="navBarCell1Rev">程序包</li>
<li>类</li>
<li><a href="package-use.html">使用</a></li>
<li><a href="package-tree.html">树</a></li>
<li><a href="../../../deprecated-list.html">已过时</a></li>
<li><a href="../../../index-all.html">索引</a></li>
<li><a href="../../../help-doc.html">帮助</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../com/webside/shiro/util/package-summary.html">上一个程序包</a></li>
<li><a href="../../../com/webside/user/controller/package-summary.html">下一个程序包</a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?com/webside/thread/package-summary.html" target="_top">框架</a></li>
<li><a href="package-summary.html" target="_top">无框架</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">所有类</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2017. All rights reserved.</small></p>
</body>
</html>
| ofpteam/ofp | target/apidocs/com/webside/thread/package-summary.html | HTML | apache-2.0 | 5,093 |
'use strict';
/**
* Module dependencies.
*/
const path = require('path');
const extend = require('util')._extend;
const development = require('./env/development');
const test = require('./env/test');
const production = require('./env/production');
const notifier = {
service: 'postmark',
APN: false,
email: true, // true
actions: ['comment'],
tplPath: path.join(__dirname, '..', 'app/mailer/templates'),
key: 'POSTMARK_KEY'
};
const defaults = {
root: path.join(__dirname, '..'),
notifier: notifier
};
/**
* Expose
*/
module.exports = {
development: extend(development, defaults),
test: extend(test, defaults),
production: extend(production, defaults)
}[process.env.NODE_ENV || 'development'];
| muhammad-saleh/weightlyio | server/config/config.js | JavaScript | apache-2.0 | 749 |
<!DOCTYPE html>
<html lang="en">
<head><link rel="stylesheet" href="simple.css"></head>
<body><div>hi there</div></body>
</html> | applitools/demo | DomSnapshot/simple.html | HTML | apache-2.0 | 128 |
package opc
// White
// Set all pixels to white.
import (
"github.com/longears/pixelslinger/colorutils"
"github.com/longears/pixelslinger/config"
"github.com/longears/pixelslinger/midi"
)
func MakePatternWhite(locations []float64) ByteThread {
return func(bytesIn chan []byte, bytesOut chan []byte, midiState *midi.MidiState) {
for bytes := range bytesIn {
H := float64(midiState.ControllerValues[config.HUE_KNOB]) / 127.0
FADE_TO_WHITE := float64(midiState.ControllerValues[config.MORPH_KNOB]) / 127.0
r, g, b := colorutils.HslToRgb(H, 1.0, 0.5)
r = r*(1-FADE_TO_WHITE) + 1*FADE_TO_WHITE
g = g*(1-FADE_TO_WHITE) + 1*FADE_TO_WHITE
b = b*(1-FADE_TO_WHITE) + 1*FADE_TO_WHITE
n_pixels := len(bytes) / 3
for ii := 0; ii < n_pixels; ii++ {
bytes[ii*3+0] = colorutils.FloatToByte(r)
bytes[ii*3+1] = colorutils.FloatToByte(g)
bytes[ii*3+2] = colorutils.FloatToByte(b)
}
bytesOut <- bytes
}
}
}
| longears/pixelslinger | opc/pattern-white.go | GO | apache-2.0 | 945 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_26) on Mon Sep 12 10:43:51 CEST 2011 -->
<TITLE>
it.essepuntato.facility.list
</TITLE>
<META NAME="date" CONTENT="2011-09-12">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style">
</HEAD>
<BODY BGCOLOR="white">
<FONT size="+1" CLASS="FrameTitleFont">
<A HREF="../../../../it/essepuntato/facility/list/package-summary.html" target="classFrame">it.essepuntato.facility.list</A></FONT>
<TABLE BORDER="0" WIDTH="100%" SUMMARY="">
<TR>
<TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont">
Classes</FONT>
<FONT CLASS="FrameItemFont">
<BR>
<A HREF="ListFacility.html" title="class in it.essepuntato.facility.list" target="classFrame">ListFacility</A></FONT></TD>
</TR>
</TABLE>
</BODY>
</HTML>
| essepuntato/EarmarkDataStructure | doc/it/essepuntato/facility/list/package-frame.html | HTML | apache-2.0 | 907 |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.yy.base.dao.test;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.test.jpa.AbstractJpaTests;
/**
*
* @author yyi
*/
public abstract class BaseDaoTestCase extends AbstractJpaTests {
/**
* Log variable for all child classes. Uses LogFactory.getLog(getClass()) from Commons Logging
*/
protected final Logger log = LoggerFactory.getLogger(getClass());
/**
* ResourceBundle loaded from src/test/resources/${package.name}/ClassName.properties (if exists)
*/
protected ResourceBundle rb;
/**
* Sets AutowireMode to AUTOWIRE_BY_NAME and configures all context files needed to tests DAOs.
* @return String array of Spring context files.
*/
@Override
protected String[] getConfigLocations() {
setAutowireMode(AUTOWIRE_BY_NAME);
return new String[] {
// "classpath:/applicationContext-resources.xml",
// "classpath:/applicationContext-dao.xml",
"classpath*:/applicationContext.xml", // for modular projects
"classpath:**/applicationContext*.xml" // for web projects
};
}
/**
* Default constructor - populates "rb" variable if properties file exists for the class in
* src/test/resources.
*/
public BaseDaoTestCase() {
// Since a ResourceBundle is not required for each class, just
// do a simple check to see if one exists
String className = this.getClass().getName();
try {
rb = ResourceBundle.getBundle(className);
} catch (MissingResourceException mre) {
//log.warn("No resource bundle found for: " + className);
}
}
/**
* Utility method to populate a javabean-style object with values
* from a Properties file
* @param obj the model object to populate
* @return Object populated object
* @throws Exception if BeanUtils fails to copy properly
*/
protected Object populate(Object obj) throws Exception {
// loop through all the beans methods and set its properties from its .properties file
Map<String, String> map = new HashMap<String, String>();
for (Enumeration<String> keys = rb.getKeys(); keys.hasMoreElements();) {
String key = keys.nextElement();
map.put(key, rb.getString(key));
}
BeanUtils.copyProperties(map, obj);
return obj;
}
}
| yyitsz/myjavastudio | app-fw/app-base/src/main/java/org/yy/base/dao/test/BaseDaoTestCase.java | Java | apache-2.0 | 2,761 |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.externalSystem.service.project.manage;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.model.*;
import com.intellij.openapi.externalSystem.model.project.ModuleData;
import com.intellij.openapi.externalSystem.model.project.ProjectData;
import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider;
import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProviderImpl;
import com.intellij.openapi.externalSystem.service.project.ProjectDataManager;
import com.intellij.openapi.externalSystem.util.DisposeAwareProjectChange;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.externalSystem.util.ExternalSystemBundle;
import com.intellij.openapi.externalSystem.util.ExternalSystemUtil;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.impl.ProjectImpl;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.ContainerUtilRt;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.util.containers.ContainerUtil.map2Array;
/**
* Aggregates all {@link ProjectDataService#EP_NAME registered data services} and provides entry points for project data management.
*
* @author Denis Zhdanov
* @since 4/16/13 11:38 AM
*/
public class ProjectDataManagerImpl implements ProjectDataManager {
private static final Logger LOG = Logger.getInstance(ProjectDataManagerImpl.class);
private static final com.intellij.openapi.util.Key<Boolean> DATA_READY =
com.intellij.openapi.util.Key.create("externalSystem.data.ready");
@NotNull private final NotNullLazyValue<Map<Key<?>, List<ProjectDataService<?, ?>>>> myServices;
public static ProjectDataManagerImpl getInstance() {
ProjectDataManager service = ServiceManager.getService(ProjectDataManager.class);
return (ProjectDataManagerImpl)service;
}
public ProjectDataManagerImpl() {
myServices = new NotNullLazyValue<Map<Key<?>, List<ProjectDataService<?, ?>>>>() {
@NotNull
@Override
protected Map<Key<?>, List<ProjectDataService<?, ?>>> compute() {
Map<Key<?>, List<ProjectDataService<?, ?>>> result = ContainerUtilRt.newHashMap();
for (ProjectDataService<?, ?> service : ProjectDataService.EP_NAME.getExtensions()) {
List<ProjectDataService<?, ?>> services = result.get(service.getTargetDataKey());
if (services == null) {
result.put(service.getTargetDataKey(), services = ContainerUtilRt.newArrayList());
}
services.add(service);
}
for (List<ProjectDataService<?, ?>> services : result.values()) {
ExternalSystemApiUtil.orderAwareSort(services);
}
return result;
}
};
}
@SuppressWarnings("unchecked")
@Override
public void importData(@NotNull Collection<DataNode<?>> nodes,
@NotNull Project project,
@NotNull IdeModifiableModelsProvider modelsProvider,
boolean synchronous) {
if (project.isDisposed()) return;
MultiMap<Key<?>, DataNode<?>> grouped = ExternalSystemApiUtil.recursiveGroup(nodes);
for (Key<?> key : myServices.getValue().keySet()) {
if (!grouped.containsKey(key)) {
grouped.put(key, Collections.emptyList());
}
}
final Collection<DataNode<?>> projects = grouped.get(ProjectKeys.PROJECT);
// only one project(can be multi-module project) expected for per single import
assert projects.size() == 1 || projects.isEmpty();
final DataNode<ProjectData> projectNode = (DataNode<ProjectData>)ContainerUtil.getFirstItem(projects);
final ProjectData projectData;
ProjectSystemId projectSystemId;
if (projectNode != null) {
projectData = projectNode.getData();
projectSystemId = projectNode.getData().getOwner();
ExternalProjectsDataStorage.getInstance(project).saveInclusionSettings(projectNode);
}
else {
projectData = null;
DataNode<ModuleData> aModuleNode = (DataNode<ModuleData>)ContainerUtil.getFirstItem(grouped.get(ProjectKeys.MODULE));
projectSystemId = aModuleNode != null ? aModuleNode.getData().getOwner() : null;
}
if (projectSystemId != null) {
ExternalSystemUtil.scheduleExternalViewStructureUpdate(project, projectSystemId);
}
List<Runnable> onSuccessImportTasks = ContainerUtil.newSmartList();
try {
final Set<Map.Entry<Key<?>, Collection<DataNode<?>>>> entries = grouped.entrySet();
final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
if (indicator != null) {
indicator.setIndeterminate(false);
}
final int size = entries.size();
int count = 0;
List<Runnable> postImportTasks = ContainerUtil.newSmartList();
for (Map.Entry<Key<?>, Collection<DataNode<?>>> entry : entries) {
if (indicator != null) {
String message = ExternalSystemBundle.message(
"progress.update.text", projectSystemId != null ? projectSystemId.getReadableName() : "",
"Refresh " + getReadableText(entry.getKey()));
indicator.setText(message);
indicator.setFraction((double)count++ / size);
}
doImportData(entry.getKey(), entry.getValue(), projectData, project, modelsProvider, postImportTasks, onSuccessImportTasks);
}
for (Runnable postImportTask : postImportTasks) {
postImportTask.run();
}
commit(modelsProvider, project, synchronous, "Imported data");
if (indicator != null) {
indicator.setIndeterminate(true);
}
project.getMessageBus().syncPublisher(ProjectDataImportListener.TOPIC)
.onImportFinished(projectData != null ? projectData.getLinkedExternalProjectPath() : null);
}
catch (Throwable t) {
dispose(modelsProvider, project, synchronous);
ExceptionUtil.rethrowAllAsUnchecked(t);
}
for (Runnable onSuccessImportTask : ContainerUtil.reverse(onSuccessImportTasks)) {
onSuccessImportTask.run();
}
}
@NotNull
private static String getReadableText(@NotNull Key key) {
StringBuilder buffer = new StringBuilder();
String s = key.toString();
for (int i = 0; i < s.length(); i++) {
char currChar = s.charAt(i);
if (Character.isUpperCase(currChar)) {
if (i != 0) {
buffer.append(' ');
}
buffer.append(StringUtil.toLowerCase(currChar));
}
else {
buffer.append(currChar);
}
}
return buffer.toString();
}
@Override
public <T> void importData(@NotNull Collection<DataNode<T>> nodes, @NotNull Project project, boolean synchronous) {
Collection<DataNode<?>> dummy = ContainerUtil.newSmartList();
dummy.addAll(nodes);
importData(dummy, project, new IdeModifiableModelsProviderImpl(project), synchronous);
}
@Override
public <T> void importData(@NotNull DataNode<T> node,
@NotNull Project project,
@NotNull IdeModifiableModelsProvider modelsProvider,
boolean synchronous) {
Collection<DataNode<?>> dummy = ContainerUtil.newSmartList();
dummy.add(node);
importData(dummy, project, modelsProvider, synchronous);
}
@Override
public <T> void importData(@NotNull DataNode<T> node,
@NotNull Project project,
boolean synchronous) {
importData(node, project, new IdeModifiableModelsProviderImpl(project), synchronous);
}
@SuppressWarnings("unchecked")
private <T> void doImportData(@NotNull Key<T> key,
@NotNull Collection<DataNode<?>> nodes,
@Nullable final ProjectData projectData,
@NotNull final Project project,
@NotNull final IdeModifiableModelsProvider modelsProvider,
@NotNull final List<Runnable> postImportTasks,
@NotNull final List<Runnable> onSuccessImportTasks) {
if (project.isDisposed()) return;
if (project instanceof ProjectImpl) {
assert ((ProjectImpl)project).isComponentsCreated();
}
final List<DataNode<T>> toImport = ContainerUtil.newSmartList();
final List<DataNode<T>> toIgnore = ContainerUtil.newSmartList();
for (DataNode node : nodes) {
if (!key.equals(node.getKey())) continue;
if (node.isIgnored()) {
toIgnore.add(node);
}
else {
toImport.add(node);
}
}
ensureTheDataIsReadyToUse((Collection)toImport);
final List<ProjectDataService<?, ?>> services = myServices.getValue().get(key);
if (services == null) {
LOG.warn(String.format(
"Can't import data nodes '%s'. Reason: no service is registered for key %s. Available services for %s",
toImport, key, myServices.getValue().keySet()
));
}
else {
for (ProjectDataService<?, ?> service : services) {
final long importStartTime = System.currentTimeMillis();
((ProjectDataService)service).importData(toImport, projectData, project, modelsProvider);
if (LOG.isDebugEnabled()) {
final long importTimeInMs = (System.currentTimeMillis() - importStartTime);
LOG.debug(String.format("Service %s imported data in %d ms", service.getClass().getSimpleName(), importTimeInMs));
}
if (projectData != null) {
ensureTheDataIsReadyToUse((Collection)toIgnore);
final long removeStartTime = System.currentTimeMillis();
final Computable<Collection<?>> orphanIdeDataComputable =
((ProjectDataService)service).computeOrphanData(toImport, projectData, project, modelsProvider);
((ProjectDataService)service).removeData(orphanIdeDataComputable, toIgnore, projectData, project, modelsProvider);
if (LOG.isDebugEnabled()) {
final long removeTimeInMs = (System.currentTimeMillis() - removeStartTime);
LOG.debug(String.format("Service %s computed and removed data in %d ms", service.getClass().getSimpleName(), removeTimeInMs));
}
}
}
}
if (services != null && projectData != null) {
postImportTasks.add(() -> {
for (ProjectDataService<?, ?> service : services) {
if (service instanceof AbstractProjectDataService) {
final long taskStartTime = System.currentTimeMillis();
((AbstractProjectDataService)service).postProcess(toImport, projectData, project, modelsProvider);
if (LOG.isDebugEnabled()) {
final long taskTimeInMs = (System.currentTimeMillis() - taskStartTime);
LOG.debug(String.format("Service %s run post import task in %d ms", service.getClass().getSimpleName(), taskTimeInMs));
}
}
}
});
onSuccessImportTasks.add(() -> {
for (ProjectDataService<?, ?> service : services) {
if (service instanceof AbstractProjectDataService) {
final long taskStartTime = System.currentTimeMillis();
((AbstractProjectDataService)service).onSuccessImport(project);
if (LOG.isDebugEnabled()) {
final long taskTimeInMs = (System.currentTimeMillis() - taskStartTime);
LOG.debug(String.format("Service %s run post import task in %d ms", service.getClass().getSimpleName(), taskTimeInMs));
}
}
}
});
}
}
@Override
public void ensureTheDataIsReadyToUse(@Nullable DataNode dataNode) {
if (dataNode == null) return;
if (Boolean.TRUE.equals(dataNode.getUserData(DATA_READY))) return;
ExternalSystemApiUtil.visit(dataNode, dataNode1 -> {
prepareDataToUse(dataNode1);
dataNode1.putUserData(DATA_READY, Boolean.TRUE);
});
}
@SuppressWarnings("unchecked")
public <E, I> void removeData(@NotNull Key<E> key,
@NotNull Collection<I> toRemove,
@NotNull final Collection<DataNode<E>> toIgnore,
@NotNull final ProjectData projectData,
@NotNull Project project,
@NotNull final IdeModifiableModelsProvider modelsProvider,
boolean synchronous) {
try {
List<ProjectDataService<?, ?>> services = myServices.getValue().get(key);
for (ProjectDataService service : services) {
final long removeStartTime = System.currentTimeMillis();
service.removeData(new Computable.PredefinedValueComputable<Collection>(toRemove), toIgnore, projectData, project, modelsProvider);
if (LOG.isDebugEnabled()) {
final long removeTimeInMs = System.currentTimeMillis() - removeStartTime;
LOG.debug(String.format("Service %s removed data in %d ms", service.getClass().getSimpleName(), removeTimeInMs));
}
}
commit(modelsProvider, project, synchronous, "Removed data");
}
catch (Throwable t) {
dispose(modelsProvider, project, synchronous);
ExceptionUtil.rethrowAllAsUnchecked(t);
}
}
public <E, I> void removeData(@NotNull Key<E> key,
@NotNull Collection<I> toRemove,
@NotNull final Collection<DataNode<E>> toIgnore,
@NotNull final ProjectData projectData,
@NotNull Project project,
boolean synchronous) {
removeData(key, toRemove, toIgnore, projectData, project, new IdeModifiableModelsProviderImpl(project), synchronous);
}
public void updateExternalProjectData(@NotNull Project project, @NotNull ExternalProjectInfo externalProjectInfo) {
if (!project.isDisposed()) {
ExternalProjectsManagerImpl.getInstance(project).updateExternalProjectData(externalProjectInfo);
}
}
@Nullable
@Override
public ExternalProjectInfo getExternalProjectData(@NotNull Project project,
@NotNull ProjectSystemId projectSystemId,
@NotNull String externalProjectPath) {
return !project.isDisposed() ? ExternalProjectsDataStorage.getInstance(project).get(projectSystemId, externalProjectPath) : null;
}
@NotNull
@Override
public Collection<ExternalProjectInfo> getExternalProjectsData(@NotNull Project project, @NotNull ProjectSystemId projectSystemId) {
if (!project.isDisposed()) {
return ExternalProjectsDataStorage.getInstance(project).list(projectSystemId);
}
else {
return ContainerUtil.emptyList();
}
}
private void ensureTheDataIsReadyToUse(@NotNull Collection<DataNode<?>> nodes) {
for (DataNode<?> node : nodes) {
ensureTheDataIsReadyToUse(node);
}
}
private void prepareDataToUse(@NotNull DataNode dataNode) {
final Map<Key<?>, List<ProjectDataService<?, ?>>> servicesByKey = myServices.getValue();
List<ProjectDataService<?, ?>> services = servicesByKey.get(dataNode.getKey());
if (services != null) {
try {
dataNode.prepareData(map2Array(services, ClassLoader.class, service -> service.getClass().getClassLoader()));
}
catch (Exception e) {
LOG.debug(e);
dataNode.clear(true);
}
}
}
private static void commit(@NotNull final IdeModifiableModelsProvider modelsProvider,
@NotNull Project project,
boolean synchronous,
@NotNull final String commitDesc) {
ExternalSystemApiUtil.executeProjectChangeAction(synchronous, new DisposeAwareProjectChange(project) {
@Override
public void execute() {
final long startTime = System.currentTimeMillis();
modelsProvider.commit();
final long timeInMs = System.currentTimeMillis() - startTime;
LOG.debug(String.format("%s committed in %d ms", commitDesc, timeInMs));
}
});
}
private static void dispose(@NotNull final IdeModifiableModelsProvider modelsProvider,
@NotNull Project project,
boolean synchronous) {
ExternalSystemApiUtil.executeProjectChangeAction(synchronous, new DisposeAwareProjectChange(project) {
@Override
public void execute() {
modelsProvider.dispose();
}
});
}
}
| signed/intellij-community | platform/external-system-impl/src/com/intellij/openapi/externalSystem/service/project/manage/ProjectDataManagerImpl.java | Java | apache-2.0 | 17,677 |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v10.services;
import com.google.api.core.BetaApi;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.protobuf.AbstractMessage;
import io.grpc.ServerServiceDefinition;
import java.util.List;
import javax.annotation.Generated;
@BetaApi
@Generated("by gapic-generator-java")
public class MockCampaignLabelService implements MockGrpcService {
private final MockCampaignLabelServiceImpl serviceImpl;
public MockCampaignLabelService() {
serviceImpl = new MockCampaignLabelServiceImpl();
}
@Override
public List<AbstractMessage> getRequests() {
return serviceImpl.getRequests();
}
@Override
public void addResponse(AbstractMessage response) {
serviceImpl.addResponse(response);
}
@Override
public void addException(Exception exception) {
serviceImpl.addException(exception);
}
@Override
public ServerServiceDefinition getServiceDefinition() {
return serviceImpl.bindService();
}
@Override
public void reset() {
serviceImpl.reset();
}
}
| googleads/google-ads-java | google-ads-stubs-v10/src/test/java/com/google/ads/googleads/v10/services/MockCampaignLabelService.java | Java | apache-2.0 | 1,650 |
/**
*
* Copyright (c) 2021 Project CHIP Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ChannelManager.h"
#include "TvApp-JNI.h"
#include <app-common/zap-generated/ids/Clusters.h>
#include <jni.h>
#include <lib/core/CHIPSafeCasts.h>
#include <lib/support/CHIPJNIError.h>
#include <lib/support/JniReferences.h>
#include <lib/support/JniTypeWrappers.h>
using namespace chip;
using namespace chip::app;
using namespace chip::app::Clusters::Channel;
using namespace chip::Uint8;
/** @brief Channel Cluster Init
*
* This function is called when a specific cluster is initialized. It gives the
* application an opportunity to take care of cluster initialization procedures.
* It is called exactly once for each endpoint where cluster is present.
*
* @param endpoint Ver.: always
*
*/
void emberAfChannelClusterInitCallback(EndpointId endpoint)
{
ChipLogProgress(Zcl, "TV Android App: Channel::PostClusterInit");
TvAppJNIMgr().PostClusterInit(chip::app::Clusters::Channel::Id, endpoint);
}
void ChannelManager::NewManager(jint endpoint, jobject manager)
{
ChipLogProgress(Zcl, "TV Android App: Channel::SetDefaultDelegate");
ChannelManager * mgr = new ChannelManager();
mgr->InitializeWithObjects(manager);
chip::app::Clusters::Channel::SetDefaultDelegate(static_cast<EndpointId>(endpoint), mgr);
}
CHIP_ERROR ChannelManager::HandleGetChannelList(AttributeValueEncoder & aEncoder)
{
CHIP_ERROR err = CHIP_NO_ERROR;
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
ChipLogProgress(Zcl, "Received ChannelManager::HandleGetChannelList");
VerifyOrExit(mChannelManagerObject != nullptr, err = CHIP_ERROR_INCORRECT_STATE);
VerifyOrExit(mGetChannelListMethod != nullptr, err = CHIP_ERROR_INCORRECT_STATE);
VerifyOrExit(env != NULL, err = CHIP_JNI_ERROR_NO_ENV);
return aEncoder.EncodeList([this, env](const auto & encoder) -> CHIP_ERROR {
jobjectArray channelInfoList = (jobjectArray) env->CallObjectMethod(mChannelManagerObject, mGetChannelListMethod);
if (env->ExceptionCheck())
{
ChipLogError(Zcl, "Java exception in ChannelManager::HandleGetChannelList");
env->ExceptionDescribe();
env->ExceptionClear();
return CHIP_ERROR_INCORRECT_STATE;
}
jint length = env->GetArrayLength(channelInfoList);
for (jint i = 0; i < length; i++)
{
chip::app::Clusters::Channel::Structs::ChannelInfo::Type channelInfo;
jobject channelObject = env->GetObjectArrayElement(channelInfoList, i);
jclass channelClass = env->GetObjectClass(channelObject);
jfieldID getCallSignField = env->GetFieldID(channelClass, "callSign", "Ljava/lang/String;");
jstring jcallSign = static_cast<jstring>(env->GetObjectField(channelObject, getCallSignField));
if (jcallSign != NULL)
{
JniUtfString callsign(env, jcallSign);
channelInfo.callSign = Optional<CharSpan>(callsign.charSpan());
}
jfieldID getNameField = env->GetFieldID(channelClass, "name", "Ljava/lang/String;");
jstring jname = static_cast<jstring>(env->GetObjectField(channelObject, getNameField));
if (jname != NULL)
{
JniUtfString name(env, jname);
channelInfo.name = Optional<CharSpan>(name.charSpan());
}
jfieldID getJaffiliateCallSignField = env->GetFieldID(channelClass, "affiliateCallSign", "Ljava/lang/String;");
jstring jaffiliateCallSign = static_cast<jstring>(env->GetObjectField(channelObject, getJaffiliateCallSignField));
if (jaffiliateCallSign != NULL)
{
JniUtfString affiliateCallSign(env, jaffiliateCallSign);
channelInfo.affiliateCallSign = Optional<CharSpan>(affiliateCallSign.charSpan());
}
jfieldID majorNumField = env->GetFieldID(channelClass, "majorNumber", "I");
jint jmajorNum = env->GetIntField(channelObject, majorNumField);
channelInfo.majorNumber = static_cast<uint16_t>(jmajorNum);
jfieldID minorNumField = env->GetFieldID(channelClass, "minorNumber", "I");
jint jminorNum = env->GetIntField(channelObject, minorNumField);
channelInfo.minorNumber = static_cast<uint16_t>(jminorNum);
ReturnErrorOnFailure(encoder.Encode(channelInfo));
}
return CHIP_NO_ERROR;
});
exit:
if (err != CHIP_NO_ERROR)
{
ChipLogError(Zcl, "ChannelManager::getChannelList status error: %s", err.AsString());
}
return err;
}
CHIP_ERROR ChannelManager::HandleGetLineup(AttributeValueEncoder & aEncoder)
{
chip::app::Clusters::Channel::Structs::LineupInfo::Type lineupInfo;
CHIP_ERROR err = CHIP_NO_ERROR;
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
ChipLogProgress(Zcl, "Received ChannelManager::HandleGetLineup");
VerifyOrExit(mChannelManagerObject != nullptr, err = CHIP_ERROR_INCORRECT_STATE);
VerifyOrExit(mGetLineupMethod != nullptr, err = CHIP_ERROR_INCORRECT_STATE);
VerifyOrExit(env != NULL, err = CHIP_JNI_ERROR_NO_ENV);
{
jobject channelLineupObject = env->CallObjectMethod(mChannelManagerObject, mGetLineupMethod);
jclass channelLineupClazz = env->GetObjectClass(channelLineupObject);
jfieldID operatorNameFild = env->GetFieldID(channelLineupClazz, "operatorName", "Ljava/lang/String;");
jstring joperatorName = static_cast<jstring>(env->GetObjectField(channelLineupObject, operatorNameFild));
if (joperatorName != NULL)
{
JniUtfString operatorName(env, joperatorName);
lineupInfo.operatorName = operatorName.charSpan();
}
jfieldID lineupNameFild = env->GetFieldID(channelLineupClazz, "lineupName", "Ljava/lang/String;");
jstring jlineupName = static_cast<jstring>(env->GetObjectField(channelLineupObject, lineupNameFild));
if (jlineupName != NULL)
{
JniUtfString lineupName(env, jlineupName);
lineupInfo.lineupName = Optional<CharSpan>(lineupName.charSpan());
}
jfieldID postalCodeFild = env->GetFieldID(channelLineupClazz, "postalCode", "Ljava/lang/String;");
jstring jpostalCode = static_cast<jstring>(env->GetObjectField(channelLineupObject, postalCodeFild));
if (jpostalCode != NULL)
{
JniUtfString postalCode(env, jpostalCode);
lineupInfo.postalCode = Optional<CharSpan>(postalCode.charSpan());
}
jfieldID lineupInfoTypeFild = env->GetFieldID(channelLineupClazz, "lineupInfoTypeEnum", "I");
jint jlineupInfoType = (env->GetIntField(channelLineupObject, lineupInfoTypeFild));
lineupInfo.lineupInfoType = static_cast<app::Clusters::Channel::LineupInfoTypeEnum>(jlineupInfoType);
err = aEncoder.Encode(lineupInfo);
}
exit:
if (err != CHIP_NO_ERROR)
{
ChipLogError(Zcl, "ChannelManager::getChannelLineup status error: %s", err.AsString());
}
return err;
}
CHIP_ERROR ChannelManager::HandleGetCurrentChannel(AttributeValueEncoder & aEncoder)
{
chip::app::Clusters::Channel::Structs::ChannelInfo::Type channelInfo;
CHIP_ERROR err = CHIP_NO_ERROR;
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
ChipLogProgress(Zcl, "Received ChannelManager::HandleGetCurrentChannel");
VerifyOrExit(mChannelManagerObject != nullptr, err = CHIP_ERROR_INCORRECT_STATE);
VerifyOrExit(mGetCurrentChannelMethod != nullptr, err = CHIP_ERROR_INCORRECT_STATE);
VerifyOrExit(env != NULL, err = CHIP_JNI_ERROR_NO_ENV);
{
jobject channelInfoObject = env->CallObjectMethod(mChannelManagerObject, mGetCurrentChannelMethod);
jclass channelClass = env->GetObjectClass(channelInfoObject);
jfieldID getCallSignField = env->GetFieldID(channelClass, "callSign", "Ljava/lang/String;");
jstring jcallSign = static_cast<jstring>(env->GetObjectField(channelInfoObject, getCallSignField));
if (jcallSign != NULL)
{
JniUtfString callsign(env, jcallSign);
channelInfo.callSign = Optional<CharSpan>(callsign.charSpan());
}
jfieldID getNameField = env->GetFieldID(channelClass, "name", "Ljava/lang/String;");
jstring jname = static_cast<jstring>(env->GetObjectField(channelInfoObject, getNameField));
if (jname != NULL)
{
JniUtfString name(env, jname);
channelInfo.name = Optional<CharSpan>(name.charSpan());
}
jfieldID getJaffiliateCallSignField = env->GetFieldID(channelClass, "affiliateCallSign", "Ljava/lang/String;");
jstring jaffiliateCallSign = static_cast<jstring>(env->GetObjectField(channelInfoObject, getJaffiliateCallSignField));
if (jaffiliateCallSign != NULL)
{
JniUtfString affiliateCallSign(env, jaffiliateCallSign);
channelInfo.affiliateCallSign = Optional<CharSpan>(affiliateCallSign.charSpan());
}
jfieldID majorNumField = env->GetFieldID(channelClass, "majorNumber", "I");
jint jmajorNum = env->GetIntField(channelInfoObject, majorNumField);
channelInfo.majorNumber = static_cast<uint16_t>(jmajorNum);
jfieldID minorNumField = env->GetFieldID(channelClass, "minorNumber", "I");
jint jminorNum = env->GetIntField(channelInfoObject, minorNumField);
channelInfo.minorNumber = static_cast<uint16_t>(jminorNum);
err = aEncoder.Encode(channelInfo);
}
exit:
if (err != CHIP_NO_ERROR)
{
ChipLogError(Zcl, "ChannelManager::HandleGetCurrentChannel status error: %s", err.AsString());
}
return err;
}
void ChannelManager::HandleChangeChannel(CommandResponseHelper<ChangeChannelResponseType> & helper, const CharSpan & match)
{
std::string name(match.data(), match.size());
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
ChangeChannelResponseType response;
response.data = chip::MakeOptional(CharSpan::fromCharString("data response"));
ChipLogProgress(Zcl, "Received ChannelManager::HandleChangeChannel name %s", name.c_str());
VerifyOrExit(mChannelManagerObject != nullptr, ChipLogError(Zcl, "mChannelManagerObject null"));
VerifyOrExit(mChangeChannelMethod != nullptr, ChipLogError(Zcl, "mChangeChannelMethod null"));
VerifyOrExit(env != NULL, ChipLogError(Zcl, "env null"));
{
UtfString jniname(env, name.c_str());
env->ExceptionClear();
jobject channelObject = env->CallObjectMethod(mChannelManagerObject, mChangeChannelMethod, jniname.jniValue());
if (env->ExceptionCheck())
{
ChipLogError(DeviceLayer, "Java exception in ChannelManager::HandleChangeChannel");
env->ExceptionDescribe();
env->ExceptionClear();
goto exit;
}
jclass channelClass = env->GetObjectClass(channelObject);
jfieldID getStatusField = env->GetFieldID(channelClass, "status", "I");
jint jstatus = env->GetIntField(channelObject, getStatusField);
response.status = static_cast<app::Clusters::Channel::StatusEnum>(jstatus);
helper.Success(response);
}
exit:
return;
}
bool ChannelManager::HandleChangeChannelByNumber(const uint16_t & majorNumber, const uint16_t & minorNumber)
{
jboolean ret = JNI_FALSE;
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
ChipLogProgress(Zcl, "Received ChannelManager::HandleChangeChannelByNumber majorNumber %d, minorNumber %d", majorNumber,
minorNumber);
VerifyOrExit(mChannelManagerObject != nullptr, ChipLogError(Zcl, "mChannelManagerObject null"));
VerifyOrExit(mChangeChannelByNumberMethod != nullptr, ChipLogError(Zcl, "mChangeChannelByNumberMethod null"));
VerifyOrExit(env != NULL, ChipLogError(Zcl, "env null"));
env->ExceptionClear();
ret = env->CallBooleanMethod(mChannelManagerObject, mChangeChannelByNumberMethod, static_cast<jint>(majorNumber),
static_cast<jint>(minorNumber));
if (env->ExceptionCheck())
{
ChipLogError(DeviceLayer, "Java exception in ChannelManager::changeChannelByNumber");
env->ExceptionDescribe();
env->ExceptionClear();
return false;
}
exit:
return static_cast<bool>(ret);
}
bool ChannelManager::HandleSkipChannel(const uint16_t & count)
{
jboolean ret = JNI_FALSE;
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
ChipLogProgress(Zcl, "Received ChannelManager::HandleSkipChannel count %d", count);
VerifyOrExit(mChannelManagerObject != nullptr, ChipLogError(Zcl, "mChannelManagerObject null"));
VerifyOrExit(mSkipChannelMethod != nullptr, ChipLogError(Zcl, "mSkipChannelMethod null"));
VerifyOrExit(env != NULL, ChipLogError(Zcl, "env null"));
env->ExceptionClear();
ret = env->CallBooleanMethod(mChannelManagerObject, mSkipChannelMethod, static_cast<jint>(count));
if (env->ExceptionCheck())
{
ChipLogError(DeviceLayer, "Java exception in ChannelManager::HandleSkipChannel");
env->ExceptionDescribe();
env->ExceptionClear();
return false;
}
exit:
return static_cast<bool>(ret);
}
void ChannelManager::InitializeWithObjects(jobject managerObject)
{
JNIEnv * env = JniReferences::GetInstance().GetEnvForCurrentThread();
VerifyOrReturn(env != nullptr, ChipLogError(Zcl, "Failed to GetEnvForCurrentThread for ChannelManager"));
mChannelManagerObject = env->NewGlobalRef(managerObject);
VerifyOrReturn(mChannelManagerObject != nullptr, ChipLogError(Zcl, "Failed to NewGlobalRef ChannelManager"));
jclass managerClass = env->GetObjectClass(mChannelManagerObject);
VerifyOrReturn(managerClass != nullptr, ChipLogError(Zcl, "Failed to get ChannelManager Java class"));
mGetChannelListMethod = env->GetMethodID(managerClass, "getChannelList", "()[Lcom/tcl/chip/tvapp/ChannelInfo;");
if (mGetChannelListMethod == nullptr)
{
ChipLogError(Zcl, "Failed to access ChannelManager 'getChannelList' method");
env->ExceptionClear();
}
mGetLineupMethod = env->GetMethodID(managerClass, "getLineup", "()Lcom/tcl/chip/tvapp/ChannelLineupInfo;");
if (mGetLineupMethod == nullptr)
{
ChipLogError(Zcl, "Failed to access ChannelManager 'getLineup' method");
env->ExceptionClear();
}
mGetCurrentChannelMethod = env->GetMethodID(managerClass, "getCurrentChannel", "()Lcom/tcl/chip/tvapp/ChannelInfo;");
if (mGetCurrentChannelMethod == nullptr)
{
ChipLogError(Zcl, "Failed to access ChannelManager 'getCurrentChannel' method");
env->ExceptionClear();
}
mChangeChannelMethod = env->GetMethodID(managerClass, "changeChannel", "(Ljava/lang/String;)Lcom/tcl/chip/tvapp/ChannelInfo;");
if (mChangeChannelMethod == nullptr)
{
ChipLogError(Zcl, "Failed to access ChannelManager 'changeChannel' method");
env->ExceptionClear();
}
mChangeChannelByNumberMethod = env->GetMethodID(managerClass, "changeChannelByNumber", "(II)Z");
if (mChangeChannelByNumberMethod == nullptr)
{
ChipLogError(Zcl, "Failed to access ChannelManager 'changeChannelByNumber' method");
env->ExceptionClear();
}
mSkipChannelMethod = env->GetMethodID(managerClass, "skipChannel", "(I)Z");
if (mSkipChannelMethod == nullptr)
{
ChipLogError(Zcl, "Failed to access ChannelManager 'skipChannel' method");
env->ExceptionClear();
}
}
| project-chip/connectedhomeip | examples/tv-app/android/java/ChannelManager.cpp | C++ | apache-2.0 | 16,428 |
* add `--endpoint`` arg
| joprice/dynamite | notes/0.0.2.markdown | Markdown | apache-2.0 | 26 |
#include "Globals.h"
#include "BlockDoor.h"
#include "../Entities/Player.h"
cBlockDoorHandler::cBlockDoorHandler(BLOCKTYPE a_BlockType)
: super(a_BlockType)
{
}
void cBlockDoorHandler::OnDestroyed(cChunkInterface & a_ChunkInterface, cWorldInterface & a_WorldInterface, int a_BlockX, int a_BlockY, int a_BlockZ)
{
NIBBLETYPE OldMeta = a_ChunkInterface.GetBlockMeta(a_BlockX, a_BlockY, a_BlockZ);
if (OldMeta & 8)
{
// Was upper part of door
if (IsDoorBlockType(a_ChunkInterface.GetBlock(a_BlockX, a_BlockY - 1, a_BlockZ)))
{
a_ChunkInterface.FastSetBlock(a_BlockX, a_BlockY - 1, a_BlockZ, E_BLOCK_AIR, 0);
}
}
else
{
// Was lower part
if (IsDoorBlockType(a_ChunkInterface.GetBlock(a_BlockX, a_BlockY + 1, a_BlockZ)))
{
a_ChunkInterface.FastSetBlock(a_BlockX, a_BlockY + 1, a_BlockZ, E_BLOCK_AIR, 0);
}
}
}
void cBlockDoorHandler::OnUse(cChunkInterface & a_ChunkInterface, cWorldInterface & a_WorldInterface, cPlayer * a_Player, int a_BlockX, int a_BlockY, int a_BlockZ, eBlockFace a_BlockFace, int a_CursorX, int a_CursorY, int a_CursorZ)
{
UNUSED(a_WorldInterface);
UNUSED(a_BlockFace);
UNUSED(a_CursorX);
UNUSED(a_CursorY);
UNUSED(a_CursorZ);
if (a_ChunkInterface.GetBlock(a_BlockX, a_BlockY, a_BlockZ) == E_BLOCK_WOODEN_DOOR)
{
ChangeDoor(a_ChunkInterface, a_BlockX, a_BlockY, a_BlockZ);
a_Player->GetWorld()->BroadcastSoundParticleEffect(1003, a_BlockX, a_BlockY, a_BlockZ, 0, a_Player->GetClientHandle());
}
}
void cBlockDoorHandler::OnCancelRightClick(cChunkInterface & a_ChunkInterface, cWorldInterface & a_WorldInterface, cPlayer * a_Player, int a_BlockX, int a_BlockY, int a_BlockZ, eBlockFace a_BlockFace)
{
UNUSED(a_ChunkInterface);
a_WorldInterface.SendBlockTo(a_BlockX, a_BlockY, a_BlockZ, a_Player);
NIBBLETYPE Meta = a_ChunkInterface.GetBlockMeta(a_BlockX, a_BlockY, a_BlockZ);
if (Meta & 0x8)
{
// Current block is top of the door
a_WorldInterface.SendBlockTo(a_BlockX, a_BlockY - 1, a_BlockZ, a_Player);
}
else
{
// Current block is bottom of the door
a_WorldInterface.SendBlockTo(a_BlockX, a_BlockY + 1, a_BlockZ, a_Player);
}
}
NIBBLETYPE cBlockDoorHandler::MetaRotateCCW(NIBBLETYPE a_Meta)
{
if (a_Meta & 0x08)
{
// The meta doesn't change for the top block
return a_Meta;
}
else
{
// Rotate the bottom block
return super::MetaRotateCCW(a_Meta);
}
}
NIBBLETYPE cBlockDoorHandler::MetaRotateCW(NIBBLETYPE a_Meta)
{
if (a_Meta & 0x08)
{
// The meta doesn't change for the top block
return a_Meta;
}
else
{
// Rotate the bottom block
return super::MetaRotateCW(a_Meta);
}
}
NIBBLETYPE cBlockDoorHandler::MetaMirrorXY(NIBBLETYPE a_Meta)
{
/*
Top bit (0x08) contains door block position (Top / Bottom). Only Bottom blocks contain position data
Return a_Meta if panel is a top panel (0x08 bit is set to 1)
*/
// Note: Currently, you can not properly mirror the hinges on a double door. The orientation of the door is stored
// in only the bottom tile while the hinge position is in the top tile. This function only operates on one tile at a time,
// so the function can only see either the hinge position or orientation, but not both, at any given time. The class itself
// needs extra datamembers.
if (a_Meta & 0x08)
{
return a_Meta;
}
// Holds open/closed meta data. 0x0C == 1100.
NIBBLETYPE OtherMeta = a_Meta & 0x0C;
// Mirrors according to a table. 0x03 == 0011.
switch (a_Meta & 0x03)
{
case 0x03: return 0x01 + OtherMeta; // South -> North
case 0x01: return 0x03 + OtherMeta; // North -> South
}
// Not Facing North or South; No change.
return a_Meta;
}
NIBBLETYPE cBlockDoorHandler::MetaMirrorYZ(NIBBLETYPE a_Meta)
{
// Top bit (0x08) contains door panel type (Top/Bottom panel) Only Bottom panels contain position data
// Return a_Meta if panel is a top panel (0x08 bit is set to 1)
// Note: Currently, you can not properly mirror the hinges on a double door. The orientation of the door is stored
// in only the bottom tile while the hinge position is in the top tile. This function only operates on one tile at a time,
// so the function can only see either the hinge position or orientation, but not both, at any given time.The class itself
// needs extra datamembers.
if (a_Meta & 0x08)
{
return a_Meta;
}
// Holds open/closed meta data. 0x0C == 1100.
NIBBLETYPE OtherMeta = a_Meta & 0x0C;
// Mirrors according to a table. 0x03 == 0011.
switch (a_Meta & 0x03)
{
case 0x00: return 0x02 + OtherMeta; // West -> East
case 0x02: return 0x00 + OtherMeta; // East -> West
}
// Not Facing North or South; No change.
return a_Meta;
}
| jammet/MCServer | src/Blocks/BlockDoor.cpp | C++ | apache-2.0 | 4,656 |
# Entrega del cuarto hito
**Alumno:**
- Roberto Morcillo Jiménez: [@robermorji](https://github.com/robermorji)
##Submódulo Gesco: GescoChat
**Enlace al repositorio del submódulo:** [https://github.com/Gescosolution/GescoChat](https://github.com/Gescosolution/GescoChat)
**Enlace al despliegue de la aplicación en Heroku:**
[https://gesco-chat.herokuapp.com/](https://gesco-chat.herokuapp.com/)
**Enlace al último _Release_ de la aplicación:** [https://github.com/Gescosolution/GescoChat/releases/tag/gescoChat_v0.3](https://github.com/Gescosolution/GescoChat/releases/tag/gescoChat_v0.3)
**Enlace al repositorio de Docker: **
[https://hub.docker.com/r/robermorji/planning-task/](https://hub.docker.com/r/robermorji/planning-task/)
## Submódulo Gesco: UserManagement
**Enlace al repositorio del submódulo:** [https://github.com/Gescosolution/Gesco-UserManagement](https://github.com/Gescosolution/Gesco-UserManagement)
**Enlace al despliegue de la aplicación en _OpenShift_:** [https://gescousrmanagement-jfrancisco.rhcloud.com/](https://gescousrmanagement-jfrancisco.rhcloud.com/)
**Enlace al último _Release_ de la aplicación:** [https://github.com/Gescosolution/Gesco-UserManagement/releases/tag/v0.4](https://github.com/Gescosolution/Gesco-UserManagement/releases/tag/v0.4)
**Enlace al contendor disponible en _Docker Hub_:** [https://hub.docker.com/r/jfrancisco4490/gesco-usermanagement/](https://hub.docker.com/r/jfrancisco4490/gesco-usermanagement/)
**Enlace a la documentación del proyecto actualizada:** [Herramientas de Construcción/Despliegue](https://github.com/Gescosolution/Gesco-UserManagement/blob/master/docs/project/Herramientas.md)
**Alumno:**
- Abel Josué Francisco Agra: [@jfrancisco4490](https://github.com/jfrancisco4490)
##Submódulo de PeriódicoInteractivo: estructura de la aplicación y extracción de noticias
Alumno: [José Cristóbal López Zafra](https://github.com/JCristobal)
[**Enlace hacia documentación del hito**](https://github.com/JCristobal/ProjectCC#entorno-de-pruebas-mediante-contenedores-docker)
[**Dockerfile del contenedor**](https://github.com/JCristobal/ubuntu-periodicointeractivo/blob/master/Dockerfile)
[**Contenedor disponible en DockerHub**](https://hub.docker.com/r/jcristobal/ubuntu-periodicointeractivo/)
##Submódulo de PeriodicoInteractivo : Análisis y Representación de datos.
Alumno: [Miguel Fernández Villegas](https://github.com/miguelfervi)
[**Enlace hacia documentación del hito**](https://github.com/miguelfervi/ProjectCC#entorno-de-pruebas-mediante-contenedores-docker)
[**Dockerfile del contenedor**](https://github.com/miguelfervi/ubuntu-periodicointeractivo/blob/master/Dockerfile)
## Creación de una infraestructura para Cloud Robotics
**Alumno**
José Ángel Segura Muros: [@leontes](https://github.com/Leontes)
[**Enlace al repositorio del proyecto**](https://github.com/Leontes/Roboearth-Cloud-Infrastructure)
[**Dockerfile**](https://github.com/Leontes/Roboearth-Cloud-Infrastructure/blob/master/Dokerfile)
[**Contenedor base de Dockerfile usado**](https://hub.docker.com/_/ros/)
## Sistema de Mensajeria Instantanea (OpenSecureChat)
**Alumno**
Luis Alberto Segura Delgado: [@segura2010](https://github.com/segura2010)
[**Enlace al repositorio del proyecto**](https://github.com/segura2010/CC-Proyecto-OpenSecureChat)
[**Enlace a la documentación del Hito**](https://github.com/segura2010/CC-Proyecto-OpenSecureChat/blob/master/hitos_proyecto/hito_4.md)
[**Enlace al último _Release_ de la aplicación**](https://github.com/segura2010/CC-Proyecto-OpenSecureChat/releases/tag/0.0.2)
[**Dockerfile**](https://github.com/segura2010/CC-Proyecto-OpenSecureChat/blob/master/Dockerfile)
[**Contenedor disponible en DockerHub**](https://hub.docker.com/r/segura2010/cc-proyecto-opensecurechat/)
[**Entorno de pruebas disponible en BlueMix**](http://osc.eu-gb.mybluemix.net/)
##Submódulo Red social ETSIIT: Gestión de usuarios
**Enlace al repositorio aplicación:** [Repositoeio](https://github.com/alcasla/ProyectoCloudComputing)
**Enlace al repositorio Docker:** [Repositoeio](https://github.com/alcasla/ProyectoCloudComputing-Docker)
**Proceso de realización** [Hito4](https://github.com/alcasla/clases-CC-2015-16/blob/master/ejercicios/AlbertoCastilloLamas/Tema3_Contenedores/Ejercicio12.md)
| alcasla/clases-CC-2015-16 | proyecto/4.md | Markdown | apache-2.0 | 4,326 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2014 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
from six.moves import urllib
import httmock
import requests
import six
from girder.constants import SettingKey
from tests import base
def setUpModule():
base.enabledPlugins.append('oauth')
base.startServer()
def tearDownModule():
base.stopServer()
class OauthTest(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
# girder.plugins is not available until setUp is running
global PluginSettings
from girder.plugins.oauth.constants import PluginSettings
self.adminUser = self.model('user').createUser(
email='admin@mail.com',
login='admin',
firstName='first',
lastName='last',
password='password',
admin=True
)
# Specifies which test account (typically "new" or "existing") a
# redirect to a provider will simulate authentication for
self.accountType = None
def testDeriveLogin(self):
"""
Unit tests the _deriveLogin method of the provider classes.
"""
from girder.plugins.oauth.providers.base import ProviderBase
login = ProviderBase._deriveLogin('1234@mail.com', 'John', 'Doe')
self.assertEqual(login, 'johndoe')
login = ProviderBase._deriveLogin('hello.world.foo@mail.com', 'A', 'B')
self.assertEqual(login, 'helloworldfoo')
login = ProviderBase._deriveLogin('hello.world@mail.com', 'A', 'B', 'user2')
self.assertEqual(login, 'user2')
login = ProviderBase._deriveLogin('admin@admin.com', 'A', 'B', 'admin')
self.assertEqual(login, 'admin1')
def _testOauth(self, providerInfo):
# Close registration to start off, and simulate a new user
self.model('setting').set(SettingKey.REGISTRATION_POLICY, 'closed')
self.accountType = 'new'
# We should get an empty listing when no providers are set up
params = {
'key': PluginSettings.PROVIDERS_ENABLED,
'value': []
}
resp = self.request(
'/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
resp = self.request('/oauth/provider', exception=True, params={
'redirect': 'http://localhost/#foo/bar',
'list': True
})
self.assertStatusOk(resp)
self.assertFalse(resp.json)
# Turn on provider, but don't set other settings
params = {
'list': json.dumps([{
'key': PluginSettings.PROVIDERS_ENABLED,
'value': [providerInfo['id']]
}])
}
resp = self.request(
'/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
resp = self.request('/oauth/provider', exception=True, params={
'redirect': 'http://localhost/#foo/bar'})
self.assertStatus(resp, 500)
# Set up provider normally
params = {
'list': json.dumps([
{
'key': PluginSettings.PROVIDERS_ENABLED,
'value': [providerInfo['id']]
}, {
'key': providerInfo['client_id']['key'],
'value': providerInfo['client_id']['value']
}, {
'key': providerInfo['client_secret']['key'],
'value': providerInfo['client_secret']['value']
}
])
}
resp = self.request(
'/system/setting', user=self.adminUser, method='PUT',
params=params)
self.assertStatusOk(resp)
# No need to re-fetch and test all of these settings values; they will
# be implicitly tested later
# Make sure that if no list param is passed, we receive the old format
resp = self.request('/oauth/provider', params={
'redirect': 'http://localhost/#foo/bar'
})
self.assertStatusOk(resp)
self.assertIsInstance(resp.json, dict)
self.assertEqual(len(resp.json), 1)
self.assertIn(providerInfo['name'], resp.json)
self.assertRegexpMatches(
resp.json[providerInfo['name']],
providerInfo['url_re'])
# This will need to be called several times, to get fresh tokens
def getProviderResp():
resp = self.request('/oauth/provider', params={
'redirect': 'http://localhost/#foo/bar',
'list': True
})
self.assertStatusOk(resp)
self.assertIsInstance(resp.json, list)
self.assertEqual(len(resp.json), 1)
providerResp = resp.json[0]
self.assertSetEqual(
set(six.viewkeys(providerResp)),
{'id', 'name', 'url'})
self.assertEqual(providerResp['id'], providerInfo['id'])
self.assertEqual(providerResp['name'], providerInfo['name'])
self.assertRegexpMatches(
providerResp['url'],
providerInfo['url_re'])
redirectParams = urllib.parse.parse_qs(
urllib.parse.urlparse(providerResp['url']).query)
csrfTokenParts = redirectParams['state'][0].partition('.')
token = self.model('token').load(
csrfTokenParts[0], force=True, objectId=False)
self.assertLess(
token['expires'],
datetime.datetime.utcnow() + datetime.timedelta(days=0.30))
self.assertEqual(
csrfTokenParts[2],
'http://localhost/#foo/bar')
return providerResp
# Try the new format listing
getProviderResp()
# Try callback, for a non-existant provider
resp = self.request('/oauth/foobar/callback')
self.assertStatus(resp, 400)
# Try callback, without providing any params
resp = self.request('/oauth/%s/callback' % providerInfo['id'])
self.assertStatus(resp, 400)
# Try callback, providing params as though the provider failed
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'],
params={
'code': None,
'error': 'some_custom_error',
}, exception=True)
self.assertStatus(resp, 502)
self.assertEqual(
resp.json['message'],
"Provider returned error: 'some_custom_error'.")
# This will need to be called several times, to use fresh tokens
def getCallbackParams(providerResp):
resp = requests.get(providerResp['url'], allow_redirects=False)
self.assertEqual(resp.status_code, 302)
callbackLoc = urllib.parse.urlparse(resp.headers['location'])
self.assertEqual(
callbackLoc.path,
r'/api/v1/oauth/%s/callback' % providerInfo['id'])
callbackLocQuery = urllib.parse.parse_qs(callbackLoc.query)
self.assertNotHasKeys(callbackLocQuery, ('error',))
callbackParams = {
key: val[0] for key, val in six.viewitems(callbackLocQuery)
}
return callbackParams
# Call (simulated) external provider
getCallbackParams(getProviderResp())
# Try callback, with incorrect CSRF token
params = getCallbackParams(getProviderResp())
params['state'] = 'something_wrong'
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 403)
self.assertTrue(
resp.json['message'].startswith('Invalid CSRF token'))
# Try callback, with expired CSRF token
params = getCallbackParams(getProviderResp())
token = self.model('token').load(
params['state'].partition('.')[0], force=True, objectId=False)
token['expires'] -= datetime.timedelta(days=1)
self.model('token').save(token)
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 403)
self.assertTrue(
resp.json['message'].startswith('Expired CSRF token'))
# Try callback, with a valid CSRF token but no redirect
params = getCallbackParams(getProviderResp())
params['state'] = params['state'].partition('.')[0]
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith('No redirect location'))
# Try callback, with incorrect code
params = getCallbackParams(getProviderResp())
params['code'] = 'something_wrong'
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 502)
# Try callback, with real parameters from provider, but still for the
# 'new' account
params = getCallbackParams(getProviderResp())
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith(
'Registration on this instance is closed.'))
# This will need to be called several times, and will do a normal login
def doOauthLogin(accountType):
self.accountType = accountType
params = getCallbackParams(getProviderResp())
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertEqual(resp.headers['Location'],
'http://localhost/#foo/bar')
self.assertTrue('girderToken' in resp.cookie)
resp = self.request('/user/me',
token=resp.cookie['girderToken'].value)
self.assertStatusOk(resp)
self.assertEqual(resp.json['email'],
providerInfo['accounts'][accountType]['user']['email'])
self.assertEqual(resp.json['login'],
providerInfo['accounts'][accountType]['user']['login'])
self.assertEqual(resp.json['firstName'],
providerInfo['accounts'][accountType]['user']['firstName'])
self.assertEqual(resp.json['lastName'],
providerInfo['accounts'][accountType]['user']['lastName'])
# Try callback for the 'existing' account, which should succeed
doOauthLogin('existing')
# Try callback for the 'new' account, with open registration
self.model('setting').set(SettingKey.REGISTRATION_POLICY, 'open')
doOauthLogin('new')
# Password login for 'new' OAuth-only user should fail gracefully
newUser = providerInfo['accounts']['new']['user']
resp = self.request('/user/authentication',
basicAuth='%s:mypasswd' % newUser['login'])
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith('You don\'t have a password.'))
# Reset password for 'new' OAuth-only user should work
self.assertTrue(base.mockSmtp.isMailQueueEmpty())
resp = self.request('/user/password/temporary',
method='PUT', params={
'email': providerInfo['accounts']['new']['user']['email']})
self.assertStatusOk(resp)
self.assertEqual(resp.json['message'], 'Sent temporary access email.')
self.assertTrue(base.mockSmtp.waitForMail())
msg = base.mockSmtp.getMail()
# Pull out the auto-generated token from the email
search = re.search('<a href="(.*)">', msg)
link = search.group(1)
linkParts = link.split('/')
userId = linkParts[-3]
tokenId = linkParts[-1]
tempToken = self.model('token').load(
tokenId, force=True, objectId=False)
resp = self.request('/user/password/temporary/' + userId,
method='GET', params={
'token': tokenId})
self.assertStatusOk(resp)
self.assertEqual(resp.json['user']['login'], newUser['login'])
# We should now be able to change the password
resp = self.request('/user/password',
method='PUT', user=resp.json['user'], params={
'old': tokenId,
'new': 'mypasswd'})
self.assertStatusOk(resp)
# The temp token should get deleted on password change
token = self.model('token').load(tempToken, force=True, objectId=False)
self.assertEqual(token, None)
# Password login for 'new' OAuth-only user should now succeed
resp = self.request('/user/authentication',
basicAuth='%s:mypasswd' % newUser['login'])
self.assertStatusOk(resp)
@httmock.all_requests
def mockOtherRequest(self, url, request):
raise Exception('Unexpected url %s' % str(request.url))
def testGoogleOauth(self):
providerInfo = {
'id': 'google',
'name': 'Google',
'client_id': {
'key': PluginSettings.GOOGLE_CLIENT_ID,
'value': 'google_test_client_id'
},
'client_secret': {
'key': PluginSettings.GOOGLE_CLIENT_SECRET,
'value': 'google_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/google/callback$',
'url_re': r'^https://accounts\.google\.com/o/oauth2/auth',
'accounts': {
'existing': {
'auth_code': 'google_existing_auth_code',
'access_token': 'google_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'google',
'id': '5326'
}
}
},
'new': {
'auth_code': 'google_new_auth_code',
'access_token': 'google_new_test_token',
'user': {
# this login will be created internally by _deriveLogin
'login': 'googleuser',
'email': 'google_user@mail.com',
'firstName': 'John',
'lastName': 'Doe',
'oauth': {
'provider': 'google',
'id': '9876'
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^accounts.google.com$',
path='^/o/oauth2/auth$', method='GET')
def mockGoogleRedirect(url, request):
try:
params = urllib.parse.parse_qs(url.query)
self.assertEqual(
params['response_type'],
['code'])
self.assertEqual(
params['access_type'],
['online'])
self.assertEqual(
params['scope'],
['profile email'])
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegexpMatches(
params['redirect_uri'][0],
providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (params['redirect_uri'][0],
returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^accounts.google.com$',
path='^/o/oauth2/token$', method='POST')
def mockGoogleToken(url, request):
try:
params = urllib.parse.parse_qs(request.body)
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(
params['grant_type'],
['authorization_code'])
self.assertEqual(
params['client_secret'],
[providerInfo['client_secret']['value']])
self.assertRegexpMatches(
params['redirect_uri'][0],
providerInfo['allowed_callback_re'])
for account in six.viewvalues(providerInfo['accounts']):
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
return json.dumps({
'token_type': 'Bearer',
'access_token': account['access_token'],
'expires_in': 3546,
'id_token': 'google_id_token'
})
@httmock.urlmatch(scheme='https', netloc='^www.googleapis.com$',
path='^/plus/v1/people/me$', method='GET')
def mockGoogleApi(url, request):
try:
for account in six.viewvalues(providerInfo['accounts']):
if 'Bearer %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
params = urllib.parse.parse_qs(url.query)
self.assertSetEqual(
set(params['fields'][0].split(',')),
{'id', 'emails', 'name'})
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'name': {
'givenName': account['user']['firstName'],
'familyName': account['user']['lastName']
},
'emails': [
{
'type': 'other',
'value': 'secondary@email.com'
}, {
'type': 'account',
'value': account['user']['email']
}
]
})
with httmock.HTTMock(
mockGoogleRedirect,
mockGoogleToken,
mockGoogleApi,
# Must keep "mockOtherRequest" last
self.mockOtherRequest
):
self._testOauth(providerInfo)
def testGithubOauth(self):
providerInfo = {
'id': 'github',
'name': 'GitHub',
'client_id': {
'key': PluginSettings.GITHUB_CLIENT_ID,
'value': 'github_test_client_id'
},
'client_secret': {
'key': PluginSettings.GITHUB_CLIENT_SECRET,
'value': 'github_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/github/callback$',
'url_re': r'^https://github\.com/login/oauth/authorize',
'accounts': {
'existing': {
'auth_code': 'github_existing_auth_code',
'access_token': 'github_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'github',
'id': '2399'
}
}
},
'new': {
'auth_code': 'github_new_auth_code',
'access_token': 'github_new_test_token',
'user': {
# login may be provided externally by GitHub; for
# simplicity here, do not use a username with whitespace
# or underscores
'login': 'jane83',
'email': 'github_user@mail.com',
'firstName': 'Jane',
'lastName': 'Doe',
'oauth': {
'provider': 'github',
'id': 1234
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^github.com$',
path='^/login/oauth/authorize$', method='GET')
def mockGithubRedirect(url, request):
redirectUri = None
try:
params = urllib.parse.parse_qs(url.query)
# Check redirect_uri first, so other errors can still redirect
redirectUri = params['redirect_uri'][0]
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegexpMatches(
redirectUri,
providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
self.assertEqual(
params['scope'],
['user:email'])
except (KeyError, AssertionError) as e:
returnQuery = urllib.parse.urlencode({
'error': repr(e),
})
else:
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (redirectUri, returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^github.com$',
path='^/login/oauth/access_token$', method='POST')
def mockGithubToken(url, request):
try:
self.assertEqual(request.headers['Accept'], 'application/json')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
for account in six.viewvalues(providerInfo['accounts']):
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertEqual(
params['client_secret'],
[providerInfo['client_secret']['value']])
self.assertRegexpMatches(
params['redirect_uri'][0],
providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
returnBody = json.dumps({
'error': repr(e),
'error_description': repr(e)
})
else:
returnBody = json.dumps({
'token_type': 'bearer',
'access_token': account['access_token'],
'scope': 'user:email'
})
return {
'status_code': 200,
'headers': {
'Content-Type': 'application/json'
},
'content': returnBody
}
@httmock.urlmatch(scheme='https', netloc='^api.github.com$',
path='^/user$', method='GET')
def mockGithubApiUser(url, request):
try:
for account in six.viewvalues(providerInfo['accounts']):
if 'token %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'login': account['user']['login'],
'name': '%s %s' % (account['user']['firstName'],
account['user']['lastName'])
})
@httmock.urlmatch(scheme='https', netloc='^api.github.com$',
path='^/user/emails$', method='GET')
def mockGithubApiEmail(url, request):
try:
for account in six.viewvalues(providerInfo['accounts']):
if 'token %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps([
{
'primary': False,
'email': 'secondary@email.com',
'verified': True
}, {
'primary': True,
'email': account['user']['email'],
'verified': True
}
])
with httmock.HTTMock(
mockGithubRedirect,
mockGithubToken,
mockGithubApiUser,
mockGithubApiEmail,
# Must keep "mockOtherRequest" last
self.mockOtherRequest
):
self._testOauth(providerInfo)
| opadron/girder | plugins/oauth/plugin_tests/oauth_test.py | Python | apache-2.0 | 29,618 |
/*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
* Copyright [2016-2019] EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2003 EBI, GRL
*
* This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.ensembl.healthcheck.testcase.generic;
import java.sql.Connection;
import java.util.Collection;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Team;
import org.ensembl.healthcheck.testcase.Priority;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
import org.ensembl.healthcheck.util.DBUtils;
/**
* Check that all the non-optional meta keys listed in the production database are present, and that all the meta keys are valid.
*/
public class ProductionMeta extends SingleDatabaseTestCase {
/**
* Constructor.
*/
public ProductionMeta() {
setDescription("Check that all the non-optional meta keys listed in the production database are present, and that all the meta keys are valid.");
setPriority(Priority.AMBER);
setEffect("Unknown/incorrect meta keys.");
setTeamResponsible(Team.GENEBUILD);
}
/**
* This test Does not apply to sangervega dbs
*/
public void types() {
removeAppliesToType(DatabaseType.SANGER_VEGA);
}
/**
* Run the test.
*
* @param dbre
* The database to use.
* @return true if the test passed.
*
*/
@SuppressWarnings("unchecked")
public boolean run(DatabaseRegistryEntry dbre) {
boolean result = true;
Connection con = dbre.getConnection();
DatabaseRegistryEntry prodDbre = getProductionDatabase();
// we'll use a different query depending on the database type; also some keys are only for certain species
String databaseType = dbre.getType().getName(); // will be core, otherfeatures etc
List<String> dbMetaKeys = DBUtils.getColumnValuesList(con, "SELECT DISTINCT(meta_key) FROM meta");
// First check that keys present in database are all valid and current
List<String> productionMetaKeys =
DBUtils.getColumnValuesList(prodDbre.getConnection(),
"SELECT mk.name " +
"FROM meta_key mk " +
"WHERE FIND_IN_SET('" + databaseType + "', mk.db_type) > 0 AND " +
"mk.is_current = 1");
// remove the list of valid keys from the list of keys in the database, the remainder (if any) are invalid
Collection<String> dbOnly = (Collection<String>)CollectionUtils.subtract(dbMetaKeys, productionMetaKeys);
if (!dbOnly.isEmpty()) {
for(String key: dbOnly) {
String msg = String.format("Meta key '%s' is not in the allowed meta key list from production", key);
ReportManager.problem(this, con, msg);
}
result = false;
} else {
ReportManager.correct(this, con, "Set of meta keys matches the current valid list in the production database.");
}
// now check that all non-optional keys in production database appear here
dbMetaKeys = DBUtils.getColumnValuesList(con, "SELECT DISTINCT(meta_key) FROM meta");
productionMetaKeys =
DBUtils.getColumnValuesList(prodDbre.getConnection(),
"SELECT mk.name " +
"FROM meta_key mk " +
"WHERE FIND_IN_SET('" + databaseType + "', mk.db_type) > 0 AND " +
"mk.is_current = 1 AND " +
"mk.is_optional = 0");
// remove the keys in the database from the non-optional list, any remaining in the non-optional list are missing from the
// database
Collection<String> productionOnly = (Collection<String>)CollectionUtils.subtract(productionMetaKeys, dbMetaKeys);
if (!productionOnly.isEmpty()) {
for(String key: productionOnly) {
String msg = String.format("Missing required meta key: %s", key);
ReportManager.problem(this, con, msg);
}
result = false;
} else {
ReportManager.correct(this, con, "All current required meta keys are present.");
}
return result;
} // run
} // ProductionBiotypes
| Ensembl/ensj-healthcheck | src/org/ensembl/healthcheck/testcase/generic/ProductionMeta.java | Java | apache-2.0 | 5,428 |
/*
* Copyright 2017 Anton Wierenga
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.antonwierenga.sqltemplatecli.domain
class Template(val name: String) {
}
| antonwierenga/sql-template-cli | src/main/scala/com/antonwierenga/sqltemplatecli/domain/Template.scala | Scala | apache-2.0 | 686 |
using System;
using System.Collections.Generic;
using System.Data;
using Afrodite.Abstract;
using Afrodite.Common;
using System.Linq;
using CsvHelper;
using System.IO;
using Afrodite.Concrete;
namespace Afrodite
{
public class DatabaseStateLogger : IStateLogger, IDisposable
{
private readonly IDbConnection connection;
public DatabaseStateLogger (IDbConnection connection, string stateTableName)
{
this.connection = connection;
if (!connection.TableExists (stateTableName))
{
throw new InvalidOperationException (string.Format ("{0} doesn't exists.", stateTableName));
}
}
public void SaveState<T> (IComponentState<T> state)
{
var cmd = connection.CreateCommand ();
cmd.CommandText =
"insert into {0} (lbs_statusdate,loadballancer_machines_lbm_id," +
"lbs_cpuussage,lbs_freememory,lbs_freediskspace) " +
"values ($1,$2,$3,$4,$5)";
IDbDataParameter param = cmd.CreateParameter ();
param.DbType = DbType.DateTime;
param.ParameterName = "1";
param.Value = DateTime.UtcNow;
cmd.Parameters.Add (param);
param = cmd.CreateParameter ();
param.DbType = DbType.UInt64;
param.ParameterName = "2";
param.Value = state.MachineNumber;
cmd.Parameters.Add (param);
param = cmd.CreateParameter ();
param.DbType = DbType.Double;
param.ParameterName = "3";
param.Value = state.CpuUsages.Average (x => (double)x.Value);
cmd.Parameters.Add (param);
param = cmd.CreateParameter ();
param.DbType = DbType.UInt64;
param.ParameterName = "4";
param.Value = state.AviableMemory;
cmd.Parameters.Add (param);
param = cmd.CreateParameter ();
param.DbType = DbType.UInt64;
param.ParameterName = "5";
param.Value = state.FreeDiskSpace;
cmd.Parameters.Add (param);
cmd.ExecuteNonQuery ();
}
public IEnumerable<IComponentState<object>> GetAllStates ()
{
throw new NotImplementedException ();
}
public void Dispose ()
{
connection.Dispose ();
}
}
} | wjch-krl/Affrodite | Afrodite/DatabaseStateLogger.cs | C# | apache-2.0 | 1,972 |
#include "mocks.h"
#include <memory>
#include "source/common/stats/symbol_table_impl.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
using testing::_;
using testing::Invoke;
using testing::NiceMock;
using testing::ReturnPointee;
using testing::ReturnRef;
namespace Envoy {
namespace Stats {
MockCounter::MockCounter() {
ON_CALL(*this, used()).WillByDefault(ReturnPointee(&used_));
ON_CALL(*this, value()).WillByDefault(ReturnPointee(&value_));
ON_CALL(*this, latch()).WillByDefault(ReturnPointee(&latch_));
}
MockCounter::~MockCounter() = default;
MockGauge::MockGauge() : used_(false), value_(0), import_mode_(ImportMode::Accumulate) {
ON_CALL(*this, used()).WillByDefault(ReturnPointee(&used_));
ON_CALL(*this, value()).WillByDefault(ReturnPointee(&value_));
ON_CALL(*this, importMode()).WillByDefault(ReturnPointee(&import_mode_));
}
MockGauge::~MockGauge() = default;
MockTextReadout::MockTextReadout() {
ON_CALL(*this, used()).WillByDefault(ReturnPointee(&used_));
ON_CALL(*this, value()).WillByDefault(ReturnPointee(&value_));
}
MockTextReadout::~MockTextReadout() = default;
MockHistogram::MockHistogram() {
ON_CALL(*this, unit()).WillByDefault(ReturnPointee(&unit_));
ON_CALL(*this, recordValue(_)).WillByDefault(Invoke([this](uint64_t value) {
if (store_ != nullptr) {
store_->deliverHistogramToSinks(*this, value);
}
}));
}
MockHistogram::~MockHistogram() = default;
MockParentHistogram::MockParentHistogram() {
ON_CALL(*this, used()).WillByDefault(ReturnPointee(&used_));
ON_CALL(*this, unit()).WillByDefault(ReturnPointee(&unit_));
ON_CALL(*this, recordValue(_)).WillByDefault(Invoke([this](uint64_t value) {
if (store_ != nullptr) {
store_->deliverHistogramToSinks(*this, value);
}
}));
ON_CALL(*this, intervalStatistics()).WillByDefault(ReturnRef(*histogram_stats_));
ON_CALL(*this, cumulativeStatistics()).WillByDefault(ReturnRef(*histogram_stats_));
}
MockParentHistogram::~MockParentHistogram() = default;
MockMetricSnapshot::MockMetricSnapshot() {
ON_CALL(*this, counters()).WillByDefault(ReturnRef(counters_));
ON_CALL(*this, gauges()).WillByDefault(ReturnRef(gauges_));
ON_CALL(*this, histograms()).WillByDefault(ReturnRef(histograms_));
}
MockMetricSnapshot::~MockMetricSnapshot() = default;
MockSink::MockSink() = default;
MockSink::~MockSink() = default;
MockSinkPredicates::MockSinkPredicates() = default;
MockSinkPredicates::~MockSinkPredicates() = default;
MockStore::MockStore() {
ON_CALL(*this, counter(_)).WillByDefault(ReturnRef(counter_));
ON_CALL(*this, gauge(_, _)).WillByDefault(ReturnRef(gauge_));
ON_CALL(*this, histogram(_, _))
.WillByDefault(Invoke([this](const std::string& name, Histogram::Unit unit) -> Histogram& {
auto* histogram = new NiceMock<MockHistogram>(); // symbol_table_);
histogram->name_ = name;
histogram->unit_ = unit;
histogram->store_ = this;
histograms_.emplace_back(histogram);
return *histogram;
}));
ON_CALL(*this, histogramFromString(_, _))
.WillByDefault(Invoke([this](const std::string& name, Histogram::Unit unit) -> Histogram& {
return TestUtil::TestStore::histogramFromString(name, unit);
}));
}
MockStore::~MockStore() = default;
MockIsolatedStatsStore::MockIsolatedStatsStore() = default;
MockIsolatedStatsStore::~MockIsolatedStatsStore() = default;
MockStatsMatcher::MockStatsMatcher() = default;
MockStatsMatcher::~MockStatsMatcher() = default;
} // namespace Stats
} // namespace Envoy
| lyft/envoy | test/mocks/stats/mocks.cc | C++ | apache-2.0 | 3,539 |
{% extends "base.html" %}
{% block content %}
<h2>Links</h2>
<p>
Take a gander at these links to some of our friends, interesting and useful resources, and more.
</p>
<div class="row">
<div class="col-sm-6">
<h3>Site members:</h3>
<ul>
<li>
<p>
Roland Taylor:<br>
<a href="http://about.me/rolandixor">http://about.me/rolandixor</a>
</p>
</li>
<li>
<p>
Joshua P. Bell:<br>
<a href="http://www.joshuapbell.com/">http://www.joshuapbell.com/</a>
</p>
</li>
<li>
<p>
James Gifford:<br>
<a href="https://github.com/jrgifford">https://github.com/jrgifford</a>
</p>
</li>
<li>
<p>
Nathan Osman:<br>
<a href="http://quickmediasolutions.com/">http://quickmediasolutions.com/</a>
</p>
</li>
<li>
<p>
Amith K.K.:<br>
<a href="http://amithkk.com/">http://amithkk.com/</a>
</p>
</li>
<li>
<p>
Nitin Venkatesh:<br>
<a href="http://nitstorm.github.io/">http://nitstorm.github.io/</a>
</p>
</li>
</ul>
</div>
<div class="col-sm-6">
<h3>Social pages:</h3>
<p>
Drop us a line on your favorite social network.
</p>
<p>
<a href="https://www.facebook.com/pages/2buntucom/129981420418132"
title="2buntu on Facebook"
target="_blank">
<span class="fa fa-facebook fa-fw"></span>
Facebook
</a>
</p>
<p>
<a href="https://plus.google.com/+2buntu"
title="2buntu on Google+"
target="_blank">
<span class="fa fa-google-plus fa-fw"></span>
Google+
</a>
</p>
<p>
<a href="https://twitter.com/2buntu"
title="2buntu on Twitter"
target="_blank">
<span class="fa fa-twitter fa-fw"></span>
Twitter
</a>
</p>
</div>
</div>
{% endblock %}
| 2buntu/2buntu-blog | twobuntu/templates/pages/links.html | HTML | apache-2.0 | 2,769 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tck;
import jakarta.enterprise.context.spi.Context;
import org.jboss.cdi.tck.spi.Contexts;
import org.jboss.weld.Container;
import org.jboss.weld.context.ApplicationContext;
import org.jboss.weld.context.DependentContext;
import org.jboss.weld.context.ManagedContext;
import org.jboss.weld.context.RequestContext;
import org.jboss.weld.context.http.HttpRequestContext;
import org.jboss.weld.util.ForwardingContext;
public class ContextsImpl implements Contexts<Context> {
public RequestContext getRequestContext() {
return Container.instance().deploymentManager().instance().select(HttpRequestContext.class).get();
}
public void setActive(Context context) {
context = ForwardingContext.unwrap(context);
if (context instanceof ManagedContext) {
((ManagedContext) context).activate();
} else if (context instanceof ApplicationContext) {
// No-op, always active
} else {
throw new UnsupportedOperationException();
}
}
public void setInactive(Context context) {
context = ForwardingContext.unwrap(context);
if (context instanceof ManagedContext) {
((ManagedContext) context).deactivate();
} else {
throw new UnsupportedOperationException();
}
}
public DependentContext getDependentContext() {
return Container.instance().deploymentManager().instance().select(DependentContext.class).get();
}
public void destroyContext(Context context) {
context = ForwardingContext.unwrap(context);
if (context instanceof ManagedContext) {
ManagedContext managedContext = (ManagedContext) context;
managedContext.invalidate();
managedContext.deactivate();
managedContext.activate();
} else if (context instanceof ApplicationContext) {
((ApplicationContext) context).invalidate();
} else {
throw new UnsupportedOperationException();
}
}
}
| weld/core | porting-package/src/main/java/org/jboss/weld/tck/ContextsImpl.java | Java | apache-2.0 | 2,825 |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.ofbiz.service.engine;
import static org.ofbiz.base.util.UtilGenerics.cast;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.script.ScriptContext;
import javax.script.ScriptException;
import org.ofbiz.base.util.Assert;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.ScriptUtil;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.GenericServiceException;
import org.ofbiz.service.ModelService;
import org.ofbiz.service.ServiceDispatcher;
import org.ofbiz.service.ServiceUtil;
/**
* Generic Script Service Engine. This service engine uses the javax.script package (JSR-223) to invoke scripts or script functions.
* <p>The script service engine will put the following artifacts in the script engine's bindings:<br />
* <ul>
* <li><code>parameters</code> - the service attributes <code>Map</code></li>
* <li><code>dctx</code> - a <code>DispatchContext</code> instance</li>
* <li><code>dispatcher</code> - a <code>LocalDispatcher</code> instance</li>
* <li><code>delegator</code> - a <code>Delegator</code> instance</li>
* </ul></p>
* <p>If the service definition includes an invoke attribute, then the matching script function/method will be called
* with a single argument - the bindings <code>Map</code>.</p>
*/
public final class ScriptEngine extends GenericAsyncEngine {
public static final String module = ScriptEngine.class.getName();
private static final Set<String> protectedKeys = createProtectedKeys();
private static Set<String> createProtectedKeys() {
Set<String> newSet = new HashSet<String>();
/* Commenting out for now because some scripts write to the parameters Map - which should not be allowed.
newSet.add(ScriptUtil.PARAMETERS_KEY);
*/
newSet.add("dctx");
newSet.add("dispatcher");
newSet.add("delegator");
return Collections.unmodifiableSet(newSet);
}
public ScriptEngine(ServiceDispatcher dispatcher) {
super(dispatcher);
}
@Override
public Map<String, Object> runSync(String localName, ModelService modelService, Map<String, Object> context) throws GenericServiceException {
Assert.notNull("localName", localName, "modelService.location", modelService.location, "context", context);
Map<String, Object> params = new HashMap<String, Object>();
params.putAll(context);
context.put(ScriptUtil.PARAMETERS_KEY, params);
DispatchContext dctx = dispatcher.getLocalContext(localName);
context.put("dctx", dctx);
context.put("dispatcher", dctx.getDispatcher());
context.put("delegator", dispatcher.getDelegator());
try {
ScriptContext scriptContext = ScriptUtil.createScriptContext(context, protectedKeys);
Object resultObj = ScriptUtil.executeScript(getLocation(modelService), modelService.invoke, scriptContext, null);
if (resultObj == null) {
resultObj = scriptContext.getAttribute(ScriptUtil.RESULT_KEY);
}
if (resultObj != null && resultObj instanceof Map<?, ?>) {
return cast(resultObj);
}
Map<String, Object> result = ServiceUtil.returnSuccess();
result.putAll(modelService.makeValid(scriptContext.getBindings(ScriptContext.ENGINE_SCOPE), "OUT"));
return result;
} catch (ScriptException se) {
return ServiceUtil.returnError(se.getMessage());
} catch (Exception e) {
Debug.logWarning(e, "Error invoking service " + modelService.name + ": ", module);
throw new GenericServiceException(e);
}
}
@Override
public void runSyncIgnore(String localName, ModelService modelService, Map<String, Object> context) throws GenericServiceException {
runSync(localName, modelService, context);
}
}
| ofbizfriends/vogue | framework/service/src/org/ofbiz/service/engine/ScriptEngine.java | Java | apache-2.0 | 4,921 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0) on Fri Aug 09 15:47:06 EDT 2013 -->
<title>Uses of Class org.drip.product.creator.CDSBuilder</title>
<meta name="date" content="2013-08-09">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.drip.product.creator.CDSBuilder";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/drip/product/creator/CDSBuilder.html" title="class in org.drip.product.creator">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/drip/product/creator/\class-useCDSBuilder.html" target="_top">Frames</a></li>
<li><a href="CDSBuilder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.drip.product.creator.CDSBuilder" class="title">Uses of Class<br>org.drip.product.creator.CDSBuilder</h2>
</div>
<div class="classUseContainer">No usage of org.drip.product.creator.CDSBuilder</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/drip/product/creator/CDSBuilder.html" title="class in org.drip.product.creator">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/drip/product/creator/\class-useCDSBuilder.html" target="_top">Frames</a></li>
<li><a href="CDSBuilder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| tectronics/splinelibrary | 2.2/docs/Javadoc/org/drip/product/creator/class-use/CDSBuilder.html | HTML | apache-2.0 | 4,242 |
/*
All functions related to creating a Slider item.
*/
var slideCount = 1;
// SETTER
function addSlide() {
slideCount++;
}
// GETTER
function getSlide() {
return(slideCount);
}
// Adds the Slider as well as the 'Edit' and 'delete' buttons.
function slider() {
var node = document.createElement("LI");
node.setAttribute('class', 'base');
var x = document.createElement("DIV");
x.setAttribute('class', 'slideStuff');
var slideName = "slider_" + String(getSlide());
addSlide();
x.setAttribute('id', slideName);
var y = document.createElement('button');
y.setAttribute('content', 'test content');
y.setAttribute('class', 'properties');
y.innerHTML = 'Edit';
var z = document.createElement('button');
z.setAttribute('class', 'delete');
z.innerHTML = 'x';
node.appendChild(x);
node.appendChild(y);
node.appendChild(z);
var tabID = String(getSelectedTabId());
var tabContID = document.getElementById(tabID).children[0].id;
document.getElementById(tabContID).appendChild(node);
$(function() {
$( ".slideStuff" ).slider();
});
var active = $("#tabs .ui-tabs-panel:visible").attr("id");
var test = document.getElementById(active).getElementsByClassName("fieldClass");
if(test.length > 0) {
remake();
}
}
//Is called when the Edit button is clicked. Creates the appropriate Properties.
function slideProps(myValue) {
document.getElementById("list_3").innerHTML = "";
var linebreak = document.createElement('br');
//Properties Title
var node = document.createElement("LI");
var x = document.createTextNode("Slider");
var el_span = document.createElement('span');
el_span.setAttribute('class', 'propLabel');
el_span.appendChild(x);
node.appendChild(el_span);
document.getElementById("list_3").appendChild(node);
//Textbox for Label change.
var node2 = document.createElement("LI");
var label = document.createTextNode("Label: ");
var y = document.createElement('input');
y.setAttribute('type', 'text');
y.setAttribute('id', 'selector');
var elem = document.getElementById(myValue).parentNode;
var name = "";
var node = elem.childNodes[3];
if (typeof node !== 'undefined') {
if(node.nodeType == 3) {
name = String(node.data);
}
}
if(name != "") {
y.setAttribute('value', name);
}
y.addEventListener("change", function() {
setValues(myValue, y);
});
function setValues(myVal, y) {
var sturf = document.createTextNode(String(y.value));
var searching = document.getElementById(myVal).parentNode;
if(searching.lastChild.nodeType == 3) {
searching.lastChild.remove();
}
document.getElementById(myVal).parentNode.appendChild(sturf);
}
node2.appendChild(label);
node2.appendChild(y);
document.getElementById("list_3").appendChild(node2);
}
| kevcorky/Task-Builder | javascript/widgets/Slider.js | JavaScript | apache-2.0 | 3,104 |
package com.bitgate.nucleus.render;
import com.bitgate.nucleus.render.tag.DocumentTag;
import com.bitgate.nucleus.util.VariableStore;
/**
* This is a storage bean class that is used to keep track of an actively rendered document. It is used by the
* {@link DocumentTag}.
*/
public class RenderContext {
private String contentType;
private String uri;
private String path;
private VariableStore variableStore;
private VariableStore formVariableStore;
private RenderMode renderMode;
/**
* Indicates the rendering mode.
*/
public enum RenderMode {
/** Indicates that a render is being done live, and not as the body of a variable. */
LIVE,
/** Indicates that a render is taking place within a sub-tag inside the document. */
INLINE
}
/**
* Constructor.
*/
public RenderContext() {
this.contentType = "text/html";
this.uri = null;
this.path = null;
this.variableStore = new VariableStore();
this.formVariableStore = new VariableStore();
this.renderMode = RenderMode.LIVE;
}
/**
* Constructor, copies the current set of {@link VariableStore} objects, used for inline rendering.
*
* @param store {@link VariableStore} of variables.
*/
public RenderContext(VariableStore store) {
this.contentType = "text/html";
this.uri = null;
this.path = null;
this.variableStore = new VariableStore(store.getStore());
this.formVariableStore = new VariableStore();
this.renderMode = RenderMode.INLINE;
}
/**
* Retrieves the current content type.
*
* @return {@link String} containing the content type.
*/
public String getContentType() {
return contentType;
}
/**
* Sets the current content type.
*
* @param contentType {@link String} containing the content type.
*/
public void setContentType(String contentType) {
this.contentType = contentType;
}
/**
* Retrieves the URI of the original request.
*
* @return {@link String} containing the original request URI.
*/
public String getUri() {
return uri;
}
/**
* Sets the URI of the original request.
*
* @param uri {@code String} containing the URI, {@code null} indicates inline content.
*/
public void setUri(String uri) {
this.uri = uri;
}
/**
* Retrieves the path to the content being rendered.
*
* @return {@link String} containing the path of the content.
*/
public String getPath() {
return path;
}
/**
* Sets the path for the content being rendered.
*
* @param path {@code String} containing the path; {@code null} indicates inline content.
*/
public void setPath(String path) {
this.path = path;
}
/**
* Retrieves the current {@link VariableStore}.
*
* @return Current {@link VariableStore} object.
*/
public VariableStore getVariableStore() {
return variableStore;
}
/**
* Retrieves the current {@link VariableStore} for GET variables that were posted.
*
* @return Current {@link VariableStore} object containing GET variables.
*/
public VariableStore getFormVariableStore() {
return formVariableStore;
}
/**
* Retrieves the current rendering mode.
*
* @return {@link RenderMode}
*/
public RenderMode getRenderMode() {
return renderMode;
}
/**
* Sets the current render mode.
*
* @param renderMode {@link RenderMode} to set.
*/
public void setRenderMode(RenderMode renderMode) {
this.renderMode = renderMode;
}
}
| KenSuenobu/nucleus | nucleus-render/src/main/java/com/bitgate/nucleus/render/RenderContext.java | Java | apache-2.0 | 3,510 |
/*
* Copyright (C) 2020-2021 Arm Limited or its affiliates. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* ----------------------------------------------------------------------
* Project: CMSIS NN Library
* Title: arm_nn_vec_mat_mult_t_s8
* Description: s8 vector by matrix (transposed) multiplication
*
* $Date: 19. August 2021
* $Revision: V.2.5.2
*
* Target Processor: Cortex-M
*
* -------------------------------------------------------------------- */
#include "third_party/cmsis/CMSIS/NN/Include/arm_nnsupportfunctions.h"
/**
* @ingroup groupSupport
*/
/**
* @addtogroup NNBasicMath
* @{
*/
/*
* s8 vector(lhs) by matrix (transposed) multiplication
*
* Refer header file for details.
*
*/
arm_status arm_nn_vec_mat_mult_t_s8(const q7_t *lhs,
const q7_t *rhs,
const q31_t *bias,
q7_t *dst,
const int32_t lhs_offset,
const int32_t rhs_offset,
const int32_t dst_offset,
const int32_t dst_multiplier,
const int32_t dst_shift,
const int32_t rhs_cols,
const int32_t rhs_rows,
const int32_t activation_min,
const int32_t activation_max)
{
(void)rhs_offset;
#if defined(ARM_MATH_MVEI)
const int32_t row_loop_cnt = rhs_rows / 3;
for (int i_row_loop_cnt = 0; i_row_loop_cnt < row_loop_cnt; i_row_loop_cnt++)
{
int32_t acc_0 = 0;
int32_t acc_1 = 0;
int32_t acc_2 = 0;
const int32_t col_loop_cnt = (rhs_cols + 15) / 16;
const int8_t *lhs_vec = lhs;
const int8_t *rhs_0 = rhs;
const int8_t *rhs_1 = rhs + rhs_cols;
const int8_t *rhs_2 = rhs + 2 * rhs_cols;
int32_t rhs_sum_0 = 0;
int32_t rhs_sum_1 = 0;
int32_t rhs_sum_2 = 0;
uint32_t col_cnt = (uint32_t)rhs_cols;
for (int i = 0; i < col_loop_cnt; i++)
{
mve_pred16_t p = vctp8q(col_cnt);
col_cnt -= 16;
const int8x16_t input = vldrbq_z_s8(lhs_vec, p);
const int8x16_t ker_0 = vldrbq_z_s8(rhs_0, p);
rhs_sum_0 = vaddvaq_p_s8(rhs_sum_0, ker_0, p);
acc_0 = vmladavaq_p_s8(acc_0, ker_0, input, p);
const int8x16_t ker_1 = vldrbq_z_s8(rhs_1, p);
rhs_sum_1 = vaddvaq_p_s8(rhs_sum_1, ker_1, p);
acc_1 = vmladavaq_p_s8(acc_1, ker_1, input, p);
const int8x16_t ker_2 = vldrbq_z_s8(rhs_2, p);
rhs_sum_2 = vaddvaq_p_s8(rhs_sum_2, ker_2, p);
acc_2 = vmladavaq_p_s8(acc_2, ker_2, input, p);
lhs_vec += 16;
rhs_0 += 16;
rhs_1 += 16;
rhs_2 += 16;
}
rhs += 3 * rhs_cols;
int32x4_t acc = {acc_0, acc_1, acc_2, 0};
mve_pred16_t p = vctp32q(3);
if (bias)
{
int32x4_t b = vldrwq_z_s32(bias, p);
acc = vaddq_m_s32(vuninitializedq_s32(), acc, b, p);
bias += 3;
}
const int32x4_t rhs_sum = {rhs_sum_0, rhs_sum_1, rhs_sum_2, 0};
acc += vdupq_n_s32(lhs_offset) * rhs_sum;
acc = arm_requantize_mve(acc, dst_multiplier, dst_shift);
acc = vaddq_s32(acc, vdupq_n_s32(dst_offset));
acc = vmaxq_s32(acc, vdupq_n_s32(activation_min));
acc = vminq_s32(acc, vdupq_n_s32(activation_max));
vstrbq_p_s32(dst, acc, p);
dst += 3;
}
const int loop_cnt = rhs_rows % 3;
for (int i_row_loop_cnt = 0; i_row_loop_cnt < loop_cnt; i_row_loop_cnt++)
{
int32_t acc_0 = 0;
const int32_t col_loop_cnt = (rhs_cols + 15) / 16;
const int8_t *lhs_vec = lhs;
const int8_t *rhs_0 = rhs;
int32_t rhs_sum_0 = 0;
uint32_t col_cnt = (uint32_t)rhs_cols;
for (int i = 0; i < col_loop_cnt; i++)
{
mve_pred16_t p = vctp8q(col_cnt);
col_cnt -= 16;
const int8x16_t input = vldrbq_z_s8(lhs_vec, p);
const int8x16_t ker_0 = vldrbq_z_s8(rhs_0, p);
rhs_sum_0 = vaddvaq_p_s8(rhs_sum_0, ker_0, p);
acc_0 = vmladavaq_p_s8(acc_0, ker_0, input, p);
lhs_vec += 16;
rhs_0 += 16;
}
rhs += rhs_cols;
if (bias)
{
acc_0 += *bias;
bias++;
}
const int32_t offsets = rhs_sum_0 * lhs_offset;
acc_0 += offsets;
acc_0 = arm_nn_requantize(acc_0, dst_multiplier, dst_shift);
acc_0 += dst_offset;
// Clamp the result
acc_0 = MAX(acc_0, activation_min);
*dst = MIN(acc_0, activation_max);
dst++;
}
#elif defined(ARM_MATH_DSP)
const int32_t row_loop_cnt = rhs_rows / 2;
const int16_t lhs_offset_s16 = (int16_t)lhs_offset;
const uint32_t lhs_offset_s16x2 = __PKHBT(lhs_offset_s16, lhs_offset_s16, 16);
for (int32_t i = 0; i < row_loop_cnt; i++)
{
int32_t acc_0 = 0;
int32_t acc_1 = 0;
if (bias)
{
acc_0 = *bias++;
acc_1 = *bias++;
}
const int32_t col_loop_cnt = rhs_cols / 4;
const int8_t *lhs_vec = lhs;
const int8_t *rhs_0 = rhs;
const int8_t *rhs_1 = rhs + rhs_cols;
rhs += 2 * rhs_cols;
for (int j = col_loop_cnt; j != 0; j--)
{
int32_t vec_0 = arm_nn_read_q7x4_ia(&lhs_vec);
int32_t vec_1 = __SXTAB16_RORn(lhs_offset_s16x2, (uint32_t)vec_0, 8);
vec_0 = __SXTAB16(lhs_offset_s16x2, vec_0);
int32_t ker_0 = arm_nn_read_q7x4_ia(&rhs_0);
int32_t ker_1 = __SXTB16_RORn((uint32_t)ker_0, 8);
ker_0 = __SXTB16(ker_0);
acc_0 = __SMLAD(ker_1, vec_1, acc_0);
acc_0 = __SMLAD(ker_0, vec_0, acc_0);
ker_0 = arm_nn_read_q7x4_ia(&rhs_1);
ker_1 = __SXTB16_RORn((uint32_t)ker_0, 8);
ker_0 = __SXTB16(ker_0);
acc_1 = __SMLAD(ker_1, vec_1, acc_1);
acc_1 = __SMLAD(ker_0, vec_0, acc_1);
}
for (int k = col_loop_cnt * 4; k < rhs_cols; k++)
{
const int32_t lhs_temp = (*lhs_vec + lhs_offset);
lhs_vec++;
acc_0 += lhs_temp * (*rhs_0);
rhs_0++;
acc_1 += lhs_temp * (*rhs_1);
rhs_1++;
}
acc_0 = arm_nn_requantize(acc_0, dst_multiplier, dst_shift);
acc_1 = arm_nn_requantize(acc_1, dst_multiplier, dst_shift);
// Add offset
acc_0 += dst_offset;
acc_1 += dst_offset;
// Clamp the result
acc_0 = MAX(acc_0, activation_min);
acc_0 = MIN(acc_0, activation_max);
acc_1 = MAX(acc_1, activation_min);
acc_1 = MIN(acc_1, activation_max);
*dst++ = (q7_t)acc_0;
*dst++ = (q7_t)acc_1;
}
if (rhs_rows & 0x1)
{
int32_t acc_0 = 0;
if (bias)
{
acc_0 = *bias++;
}
const int32_t col_loop_cnt = rhs_cols / 4;
const int8_t *lhs_vec = lhs;
const int8_t *rhs_0 = rhs;
for (int i = col_loop_cnt; i != 0; i--)
{
int32_t vec_0 = arm_nn_read_q7x4_ia(&lhs_vec);
int32_t vec_1 = __SXTAB16_RORn(lhs_offset_s16x2, (uint32_t)vec_0, 8);
vec_0 = __SXTAB16(lhs_offset_s16x2, vec_0);
int32_t ker_0 = arm_nn_read_q7x4_ia(&rhs_0);
int32_t ker_1 = __SXTB16_RORn((uint32_t)ker_0, 8);
ker_0 = __SXTB16(ker_0);
acc_0 = __SMLAD(ker_1, vec_1, acc_0);
acc_0 = __SMLAD(ker_0, vec_0, acc_0);
}
for (int j = col_loop_cnt * 4; j < rhs_cols; j++)
{
const int32_t lhs_temp = (*lhs_vec + lhs_offset);
lhs_vec++;
acc_0 += lhs_temp * (*rhs_0);
rhs_0++;
}
acc_0 = arm_nn_requantize(acc_0, dst_multiplier, dst_shift);
// Add offset
acc_0 += dst_offset;
// Clamp the result
acc_0 = MAX(acc_0, activation_min);
acc_0 = MIN(acc_0, activation_max);
*dst++ = (q7_t)acc_0;
}
#else
const int32_t row_loop_cnt = rhs_rows / 3;
for (int i_row_loop_cnt = 0; i_row_loop_cnt < row_loop_cnt; i_row_loop_cnt++)
{
const q7_t *lhs_ptr = lhs;
const q7_t *rhs_ptr_0 = &rhs[0];
const q7_t *rhs_ptr_1 = &rhs[rhs_cols];
const q7_t *rhs_ptr_2 = &rhs[rhs_cols * 2];
q31_t res00 = 0;
q31_t res01 = 0;
q31_t res02 = 0;
if (bias)
{
res00 = *bias++;
res01 = *bias++;
res02 = *bias++;
}
for (int32_t rhs_cols_idx = 0; rhs_cols_idx < rhs_cols; ++rhs_cols_idx)
{
const q31_t rhs_value0 = (int8_t)*rhs_ptr_0;
const q31_t rhs_value1 = (int8_t)*rhs_ptr_1;
const q31_t rhs_value2 = (int8_t)*rhs_ptr_2;
const q31_t lhs_value = (int8_t)*lhs_ptr + lhs_offset;
res00 += lhs_value * rhs_value0;
res01 += lhs_value * rhs_value1;
res02 += lhs_value * rhs_value2;
++rhs_ptr_0;
++rhs_ptr_1;
++rhs_ptr_2;
++lhs_ptr;
}
// Quantize down
res00 = arm_nn_requantize(res00, dst_multiplier, dst_shift);
res01 = arm_nn_requantize(res01, dst_multiplier, dst_shift);
res02 = arm_nn_requantize(res02, dst_multiplier, dst_shift);
// Add offset
res00 += dst_offset;
res01 += dst_offset;
res02 += dst_offset;
// Clamp the result
res00 = MAX(res00, activation_min);
res00 = MIN(res00, activation_max);
res01 = MAX(res01, activation_min);
res01 = MIN(res01, activation_max);
res02 = MAX(res02, activation_min);
res02 = MIN(res02, activation_max);
*dst++ = (q7_t)res00;
*dst++ = (q7_t)res01;
*dst++ = (q7_t)res02;
rhs += 3 * rhs_cols;
}
const int loop_cnt = rhs_rows % 3;
for (int i_loop_cnt = 0; i_loop_cnt < loop_cnt; i_loop_cnt++)
{
const q7_t *lhs_ptr = &lhs[0];
const q7_t *rhs_ptr = &rhs[0];
q31_t res00 = 0;
if (bias)
{
res00 = *bias++;
}
for (int32_t rhs_cols_idx = 0; rhs_cols_idx < rhs_cols; ++rhs_cols_idx)
{
q31_t rhs_value0 = (int8_t)rhs_ptr[0];
q31_t lhs_value = (int8_t)lhs_ptr[0] + lhs_offset;
res00 += lhs_value * rhs_value0;
++rhs_ptr;
++lhs_ptr;
}
// Quantize down
res00 = arm_nn_requantize(res00, dst_multiplier, dst_shift);
// Add offset
res00 += dst_offset;
// Clamp the result
res00 = MAX(res00, activation_min);
res00 = MIN(res00, activation_max);
*dst++ = (q7_t)res00;
rhs += rhs_cols;
}
#endif
return ARM_MATH_SUCCESS;
}
/**
* @} end of NNBasicMath group
*/
| tensorflow/tflite-micro-arduino-examples | src/third_party/cmsis/CMSIS/NN/Source/NNSupportFunctions/arm_nn_vec_mat_mult_t_s8.c | C | apache-2.0 | 11,802 |
# Bulbophyllum flexiliscapum Summerh. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Bulbophyllum/Bulbophyllum saltatorium/ Syn. Bulbophyllum flexiliscapum/README.md | Markdown | apache-2.0 | 192 |
# #!/usr/bin/env python
#
# import nlopt # THIS IS NOT A PACKAGE!
# import numpy as np
#
# print(('nlopt version='+nlopt.__version__))
#
# def f(x, grad):
# F=x[0]
# L=x[1]
# E=x[2]
# I=x[3]
# D=F*L**3/(3.*E*I)
# return D
#
# n = 4
# opt = nlopt.opt(nlopt.LN_COBYLA, n)
# opt.set_min_objective(f)
# lb = np.array([40., 50., 30e3, 1.])
# ub = np.array([60., 60., 40e3, 10.])
# x = (lb+ub)/2.
# opt.set_lower_bounds(lb)
# opt.set_upper_bounds(ub)
# opt.set_xtol_rel(1e-3)
# opt.set_ftol_rel(1e-3)
# xopt = opt.optimize(x)
#
# opt_val = opt.last_optimum_value()
# result = opt.last_optimize_result()
# print(('opt_result='+str(result)))
# print(('optimizer='+str(xopt)))
# print(('opt_val='+str(opt_val)))
| PMBio/limix | External/nlopt/test/test_std.py | Python | apache-2.0 | 727 |
// Copyright (c) Oleg Zudov. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
// This file is based on or incorporates material from the project Selenium, licensed under the Apache License, Version 2.0. More info in THIRD-PARTY-NOTICES file.
using System.Collections.ObjectModel;
using System.Threading;
using System.Threading.Tasks;
namespace Zu.AsyncWebDriver
{
public static class TaskISearchContextExtensions
{
public static async Task<IWebElement> FindElement(this Task<ISearchContext> elementTask, By by,
CancellationToken cancellationToken = new CancellationToken())
{
var el = await elementTask.ConfigureAwait(false);
return await el.FindElement(by, cancellationToken).ConfigureAwait(false);
}
public static async Task<ReadOnlyCollection<IWebElement>> FindElements(this Task<ISearchContext> elementTask, By by,
CancellationToken cancellationToken = new CancellationToken())
{
var el = await elementTask.ConfigureAwait(false);
return await el.FindElements(by, cancellationToken).ConfigureAwait(false);
}
}
} | ToCSharp/AsyncWebDriver | AsyncWebDriver/Extensions/TaskISearchContextExtensions.cs | C# | apache-2.0 | 1,230 |
module.exports = [{"isId":true,"priority":100000.0002,"key":"Container","style":{}},{"isId":true,"priority":100000.0003,"key":"windowActivity","style":{backgroundColor:"#166181",}},{"isId":true,"priority":100000.0004,"key":"activityIndicator","style":{style:Ti.UI.ActivityIndicatorStyle.DARK,indicatorColor:"White",color:"Black",height:Ti.UI.SIZE,width:Ti.UI.SIZE,}},{"isId":true,"priority":100101.0005,"key":"activityIndicator","style":{style:Ti.UI.iPhone.ActivityIndicatorStyle.DARK,indicatorColor:"White",height:Ti.UI.SIZE,width:Ti.UI.SIZE,}}]; | tasa95/Sudoku | Resources/iphone/alloy/styles/CreationSudoku.js | JavaScript | apache-2.0 | 547 |
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.vm_util import POLL_INTERVAL
FLAGS = flags.FLAGS
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
flags.DEFINE_string('openstack_nova_endpoint_type',
os.getenv('NOVA_ENDPOINT_TYPE', 'publicURL'),
'OpenStack Nova endpoint type, '
'defaults to $NOVA_ENDPOINT_TYPE.')
class KeystoneAuth(object):
"""
Usage example:
auth = KeystoneAuth(auth_url, auth_tenant, auth_user, auth_password)
token = auth.get_token()
tenant_id = auth.get_tenant_id()
token and tenant_id are required to use all OpenStack python clients
"""
def __init__(self, url, tenant, user, password):
self.__url = url
self.__tenant = tenant
self.__user = user
self.__password = password
self.__connection = None
self.__session = None
def GetConnection(self):
if self.__connection is None:
self.__authenticate()
return self.__connection
def __authenticate(self):
import keystoneclient.v2_0.client as ksclient
self.__connection = ksclient.Client(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant=self.__tenant)
self.__connection.authenticate()
def get_token(self):
return self.GetConnection().get_token(self.__session)
def get_tenant_id(self):
raw_token = self.GetConnection().get_raw_token_from_identity_service(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant_name=self.__tenant
)
return raw_token['token']['tenant']['id']
class NovaClient(object):
def __getattribute__(self, item):
try:
return super(NovaClient, self).__getattribute__(item)
except AttributeError:
return self.__client.__getattribute__(item)
def GetPassword(self):
# For compatibility with Nova CLI, use 'OS'-prefixed environment value
# if present. Also support reading the password from a file.
error_msg = ('No OpenStack password specified. '
'Either set the environment variable OS_PASSWORD to the '
'admin password, or provide the name of a file '
'containing the password using the OPENSTACK_PASSWORD_FILE '
'environment variable or --openstack_password_file flag.')
password = os.getenv('OS_PASSWORD')
if password is not None:
return password
try:
with open(os.path.expanduser(FLAGS.openstack_password_file)) as pwfile:
password = pwfile.readline().rstrip()
return password
except IOError as e:
raise Exception(error_msg + ' ' + str(e))
raise Exception(error_msg)
def __init__(self):
from novaclient import client as noclient
self.url = FLAGS.openstack_auth_url
self.user = FLAGS.openstack_username
self.tenant = FLAGS.openstack_tenant
self.endpoint_type = FLAGS.openstack_nova_endpoint_type
self.password = self.GetPassword()
self.__auth = KeystoneAuth(self.url, self.tenant,
self.user, self.password)
self.__client = noclient.Client('2',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
endpoint_type=self.endpoint_type,
)
def reconnect(self):
from novaclient import client as noclient
self.__auth = KeystoneAuth(self.url, self.tenant, self.user,
self.password)
self.__client = noclient.Client('2',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
endpoint_type=self.endpoint_type,
)
class AuthException(Exception):
"""Wrapper for NovaClient auth exceptions."""
pass
def retry_authorization(max_retries=1, poll_interval=POLL_INTERVAL):
def decored(function):
@vm_util.Retry(max_retries=max_retries,
poll_interval=poll_interval,
retryable_exceptions=AuthException,
log_errors=False)
@functools.wraps(function)
def decor(*args, **kwargs):
from novaclient.exceptions import Unauthorized
try:
return function(*args, **kwargs)
except Unauthorized as e:
NovaClient.instance.reconnect()
raise AuthException(str(e))
return decor
return decored
| syed/PerfKitBenchmarker | perfkitbenchmarker/openstack/utils.py | Python | apache-2.0 | 6,884 |
/*
Copyright 2015-2018 Orchidware Studios LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "DefaultOverlay.hpp"
#ifdef UWP
#include "pch.h"
#include "Engine\Constants.hpp"
#include "LogicEngine\RuleConstants.hpp"
#include "Engine\Include\Tools\ToolFactory.hpp"
#include "OrchiOne\Include\Model\Tools\Sword.hpp"
#include "OrchiOne\Include\Model\Tools\SwordParameter.hpp"
#include "OrchiOne\Include\Model\Tools\Boomerang.hpp"
#include "OrchiOne\Include\Model\Tools\BoomerangParameter.hpp"
#include "OrchiOne\Include\Model\Tools\Bomb.hpp"
#include "OrchiOne\Include\Model\Tools\BombParameter.hpp"
#include "OrchiOne\Include\Model\Tools\Arrow.hpp"
#include "OrchiOne\Include\Model\Tools\ArrowParameter.hpp"
#include "OrchiOne\Include\Model\Tools\Pebble.hpp"
#include "OrchiOne\Include\Model\Tools\PebbleParameter.hpp"
#include "Engine\Include\Utils\Utils.hpp"
#include "Engine\Include\Config\Config.hpp"
#include "Engine\Constants.hpp"
#include "Engine\Include\Content\GameWindow.hpp"
#elif defined(IOS)
#include "Constants.hpp"
#include "RuleConstants.hpp"
#include "ToolFactory.hpp"
#include "Sword.hpp"
#include "SwordParameter.hpp"
#include "Boomerang.hpp"
#include "BoomerangParameter.hpp"
#include "Bomb.hpp"
#include "BombParameter.hpp"
#include "Arrow.hpp"
#include "ArrowParameter.hpp"
#include "Utils.hpp"
#include "Config.hpp"
#include "Constants.hpp"
#include "GameWindow.hpp"
#endif
//
//
//
DefaultOverlay::DefaultOverlay(Controller * pController)
: ControllerOverlay(pController)
{
}
//
//
//
bool DefaultOverlay::GetIsConnected()
{
return m_pController->GetIsConnected();
}
//
//
//
bool DefaultOverlay::Update(
ControllerParameter * pParam,
Controller *)
{
bool bRetVal = false;
// For the TouchScreenController, this does nothing.
bRetVal = m_pController->Update(pParam, m_pController);
CheckAButton(pParam);
CheckBButton(pParam);
CheckXButton(pParam);
CheckYButton(pParam);
CheckLTButton(pParam);
CheckLBButton(pParam);
CheckRTButton(pParam);
CheckRBButton(pParam);
CheckLeftThumbstick(pParam);
CheckRightThumbstick(pParam);
CheckDirectionalKeypad(pParam);
CheckKey(pParam);
return bRetVal;
}
//
//
//
int DefaultOverlay::CheckAButton(
ControllerParameter * pParam)
{
int nButtonStateChange =
m_pController->CheckAButton(pParam);
if (nButtonStateChange == TO_HIGH)
{
PebbleParameter pebbleParameter
{
pParam->GetPlayer(),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_HEADING, A_HEADING_VALUE)),
float2
{
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_X)),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_Y)),
}
};
// Using the Sword entails the following:
// 1. Put the Sword on the stack of the current subdivision.
// 2. Remove the Sword from the stack of the current
// subdivision after it disappears.
// Create a prototypical sword.
Tool * pPebble =
ToolFactory::Clone(Pebble::PEBBLE, &pebbleParameter);
pParam->GetSubdivision()->Set(LAYER_TOOLS, pPebble);
}
return nButtonStateChange;
}
//
//
//
int DefaultOverlay::CheckBButton(
ControllerParameter * pParam)
{
int nButtonStateChange =
m_pController->CheckBButton(pParam);
if (nButtonStateChange == TO_HIGH)
{
BombParameter bombParameter
{
pParam->GetPlayer(),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_HEADING, A_HEADING_VALUE)),
float2
{
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_X)),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_Y)),
}
};
// Create a prototypical sword.
Tool * pBomb =
ToolFactory::Clone(Bomb::BOMB, &bombParameter);
pParam->GetSubdivision()->Set(LAYER_TOOLS, pBomb);
}
return nButtonStateChange;
}
//
//
//
int DefaultOverlay::CheckXButton(
ControllerParameter * pParam)
{
int nButtonStateChange =
m_pController->CheckXButton(pParam);
if (nButtonStateChange == TO_HIGH)
{
BoomerangParameter boomerangParameter
{
pParam->GetPlayer(),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_HEADING, A_HEADING_VALUE)),
float2
{
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_X)),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_Y)),
},
true
};
// Create a prototypical sword.
Tool * pBoomerang =
ToolFactory::Clone(
Boomerang::BOOMERANG,
&boomerangParameter);
pParam->GetSubdivision()->Set(LAYER_TOOLS, pBoomerang);
}
return nButtonStateChange;
}
//
//
//
int DefaultOverlay::CheckYButton(
ControllerParameter * pParam)
{
int nButtonStateChange =
m_pController->CheckYButton(pParam);
if (nButtonStateChange == TO_HIGH)
{
ArrowParameter arrowParameter
{
pParam->GetPlayer(),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_HEADING, A_HEADING_VALUE)),
float2
{
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_X)),
FLOAT(pParam->GetPlayer()->GetAttributes()->LookupValue(A_LOCATION, A_LOCATION_Y)),
}
};
// Create a prototypical sword.
Tool * pArrow =
ToolFactory::Clone(Arrow::ARROW, &arrowParameter);
pParam->GetSubdivision()->Set(LAYER_TOOLS, pArrow);
}
return nButtonStateChange;
}
//
//
//
void DefaultOverlay::CheckLBButton(
ControllerParameter * pParam)
{
m_pController->CheckLBButton(pParam);
}
//
//
//
int DefaultOverlay::CheckRBButton(
ControllerParameter * pParam)
{
int nButtonStateChange =
m_pController->CheckRBButton(pParam);
if (nButtonStateChange == TO_HIGH)
{
bool bState = Config::GetInstance()->IsDiagnosticMode();
if (bState == true)
{
Config::GetInstance()->SetDiagnosticMode(false);
}
else
{
Config::GetInstance()->SetDiagnosticMode(true);
}
}
return nButtonStateChange;
}
//
//
//
double DefaultOverlay::CheckRTButton(
ControllerParameter * pParam)
{
return m_pController->CheckRTButton(pParam);
}
//
//
//
double DefaultOverlay::CheckLTButton(
ControllerParameter * pParam)
{
return m_pController->CheckLTButton(pParam);
}
//
// Called by Update, and nothing else
//
bool DefaultOverlay::CheckLeftThumbstick(
ControllerParameter * pParam)
{
return m_pController->CheckLeftThumbstick(pParam);
}
//
//
//
void DefaultOverlay::CheckRightThumbstick(
ControllerParameter * pParam)
{
m_pController->CheckRightThumbstick(pParam);
}
//
//
//
void DefaultOverlay::CheckDirectionalKeypad(
ControllerParameter * pParam)
{
m_pController->CheckDirectionalKeypad(pParam);
}
//
//
//
void DefaultOverlay::CheckKey(
ControllerParameter * pParam)
{
m_pController->CheckKey(pParam);
}
| orchidwarestudios/orchis-isle-directx | adventures-of-orchi/OrchiOne/Input/DefaultOverlay.cpp | C++ | apache-2.0 | 7,149 |
<%#
Copyright 2013-2017 the original author or authors from the StackStack project.
This file is part of the StackStack project, see http://www.jhipster.tech/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
package <%=packageName%>.config;
/**
* Application constants.
*/
public final class Constants {
//Regex for acceptable logins
public static final String LOGIN_REGEX = "^[_'.@A-Za-z0-9-]*$";
public static final String SYSTEM_ACCOUNT = "system";
public static final String ANONYMOUS_USER = "anonymoususer";
private Constants() {
}
}
| siliconharborlabs/generator-jhipster | generators/server/templates/src/main/java/package/config/_Constants.java | Java | apache-2.0 | 1,085 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ec2/model/DescribeFpgaImagesResponse.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/logging/LogMacros.h>
#include <utility>
using namespace Aws::EC2::Model;
using namespace Aws::Utils::Xml;
using namespace Aws::Utils::Logging;
using namespace Aws::Utils;
using namespace Aws;
DescribeFpgaImagesResponse::DescribeFpgaImagesResponse()
{
}
DescribeFpgaImagesResponse::DescribeFpgaImagesResponse(const AmazonWebServiceResult<XmlDocument>& result)
{
*this = result;
}
DescribeFpgaImagesResponse& DescribeFpgaImagesResponse::operator =(const AmazonWebServiceResult<XmlDocument>& result)
{
const XmlDocument& xmlDocument = result.GetPayload();
XmlNode rootNode = xmlDocument.GetRootElement();
XmlNode resultNode = rootNode;
if (rootNode.GetName() != "DescribeFpgaImagesResponse")
{
resultNode = rootNode.FirstChild("DescribeFpgaImagesResponse");
}
if(!resultNode.IsNull())
{
XmlNode fpgaImagesNode = resultNode.FirstChild("fpgaImageSet");
if(!fpgaImagesNode.IsNull())
{
XmlNode fpgaImagesMember = fpgaImagesNode.FirstChild("item");
while(!fpgaImagesMember.IsNull())
{
m_fpgaImages.push_back(fpgaImagesMember);
fpgaImagesMember = fpgaImagesMember.NextNode("item");
}
}
XmlNode nextTokenNode = resultNode.FirstChild("nextToken");
if(!nextTokenNode.IsNull())
{
m_nextToken = StringUtils::Trim(nextTokenNode.GetText().c_str());
}
}
XmlNode responseMetadataNode = rootNode.FirstChild("ResponseMetadata");
m_responseMetadata = responseMetadataNode;
AWS_LOGSTREAM_DEBUG("Aws::EC2::Model::DescribeFpgaImagesResponse", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() );
return *this;
}
| svagionitis/aws-sdk-cpp | aws-cpp-sdk-ec2/source/model/DescribeFpgaImagesResponse.cpp | C++ | apache-2.0 | 2,402 |
/**
*
*/
package org.flowvisor.events;
import org.flowvisor.events.FVEvent;
import java.nio.channels.*;
/**
* Event: underlying socket has pending I/O
*
* @author capveg
*
*/
public class FVIOEvent extends FVEvent {
SelectionKey sk;
public FVIOEvent(SelectionKey sk, FVEventHandler src, FVEventHandler dst) {
super(src, dst);
this.sk = sk;
}
public SelectionKey getSelectionKey() {
return sk;
}
}
| routeflow/AutomaticConfigurationRouteFlow | FLOWVISOR/src/org/flowvisor/events/FVIOEvent.java | Java | apache-2.0 | 419 |
using System;
using Talifun.Commander.Command.Esb;
namespace Talifun.Commander.Command.PicasaUploader.Command.Response
{
public class ExecutedPicasaUploaderWorkflowMessage : CorrelatedMessageBase<ExecutedPicasaUploaderWorkflowMessage>, IExecutedPicasaUploaderWorkflowMessage
{
public Exception Error { get; set; }
public bool Cancelled { get; set; }
}
} | taliesins/talifun-commander | src/Talifun.Commander.Command.PicasaUploader/Command/Response/ExecutedPicasaUploaderWorkflowMessage.cs | C# | apache-2.0 | 364 |
# Premna areolata Merr. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Premna/Premna areolata/README.md | Markdown | apache-2.0 | 171 |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.nuklei.shell;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.kaazing.nuklei.Configuration.DIRECTORY_PROPERTY_NAME;
import java.net.InetSocketAddress;
import java.util.Properties;
import java.util.Scanner;
import org.kaazing.nuklei.Configuration;
import org.kaazing.nuklei.http.internal.HttpController;
import org.kaazing.nuklei.reaktor.internal.Reaktor;
import org.kaazing.nuklei.tcp.internal.TcpController;
import org.kaazing.nuklei.ws.internal.WsController;
public final class Main
{
public static void main(final String[] args) throws Exception
{
final Properties properties = new Properties();
properties.setProperty(DIRECTORY_PROPERTY_NAME, "target/controller-example");
Configuration config = new Configuration(properties);
try (Reaktor reaktor = Reaktor.launch(config, n -> true, c -> true))
{
TcpController tcpctl = reaktor.controller(TcpController.class);
WsController wsctl = reaktor.controller(WsController.class);
HttpController httpctl = reaktor.controller(HttpController.class);
long tcpInitRef = tcpctl.bind(0x21).get();
long httpInitRef = httpctl.bind(0x21).get();
long wsInitRef = wsctl.bind(0x21).get();
long wsReplyRef = wsctl.bind(0x22).get();
long httpReplyRef = httpctl.bind(0x22).get();
long tcpReplyRef = tcpctl.bind(0x22).get();
tcpctl.route("any", tcpInitRef, "http", httpInitRef, new InetSocketAddress("localhost", 8080)).get();
httpctl.route("tcp", httpInitRef, "ws", wsInitRef, singletonMap(":path", "/")).get();
wsctl.route("http", wsInitRef, "ws", wsReplyRef, null).get();
wsctl.route("ws", wsReplyRef, "http", httpReplyRef, null).get();
httpctl.route("ws", httpReplyRef, "tcp", tcpReplyRef, emptyMap()).get();
tcpctl.route("http", tcpReplyRef, "any", 0, null);
System.out.println("echo bound to ws://localhost:8080/");
try (Scanner scanner = new Scanner(System.in))
{
scanner.nextLine();
}
}
}
private Main()
{
}
}
| jfallows/nuklei | shell/src/main/java/org/kaazing/nuklei/shell/Main.java | Java | apache-2.0 | 2,888 |
#pragma once
#include "Map.h"
Map::Map(string mapName){
imagePath = "resources/images/";
load(mapName);
}
Map::Map(int mapWidth, int mapHeight){
imagePath = "resources/images/";
hasStart = false;
hasEnd = false;
setMapSize(mapWidth, mapHeight);
blankMap();
}
Map::~Map(){
}
int Map::getMapWidth() const{
return mapWidth;
}
int Map::getMapHeight() const{
return mapHeight;
}
const Tile* const Map::getTile(const int tileX, const int tileY) const{
if(!outOfBounds(tileX, tileY))
return tileMap[tileX][tileY].get();
else
return nullptr;
}
deque<const Tile* const> Map::getMapPath() const{
return mapPath;
}
void Map::setMapSize(const int mapWidth, const int mapHeight){
if(mapWidth <= MAX_MAP_WIDTH && mapHeight <= MAX_MAP_HEIGHT){
this->mapWidth = mapWidth;
this->mapHeight = mapHeight;
//resize 1st dimension of vector to mapWidth (number of tiles width-wise)
tileMap.resize(mapWidth);
mapObjects.resize(mapWidth);
//resize 2nd dimension of vector to mapHeight(number of tiles height-wise)
for(int i = 0; i < mapWidth; ++i)
{
tileMap.at(i).resize(mapHeight);
mapObjects.at(i).resize(mapHeight);
}
}
else
cout << "Invalid map size! Max size is: " << MAX_MAP_WIDTH << ", " << MAX_MAP_HEIGHT << endl;
}
void Map::resetMap(){
hasStart = false;
hasEnd = false;
mapPath.clear();
mapObjects.clear();
mapObjects.shrink_to_fit();
tileMap.clear();
tileMap.shrink_to_fit();
}
void Map::blankMap(){
for(int tileX = 0; tileX < mapWidth; ++tileX){
for(int tileY = 0 ; tileY < mapHeight; ++tileY){
if(tileMap[tileX][tileY] == nullptr)
tileMap[tileX][tileY].reset(new Tile(imagePath + "blank.png", tileX, tileY));
else
removeTile(tileX, tileY);
}
}
}
void Map::fillMap(){
for(int tileX = 0 ; tileX < mapWidth ; ++tileX){
for(int tileY = 0 ; tileY < mapHeight ; ++tileY){
if(tileMap[tileX][tileY]->getType() == Tile::EMPTY)
tileMap[tileX][tileY].reset(new SceneryTile(imagePath + "scenery.png", tileX, tileY));
}
}
}
void Map::unfillMap(){
for(int tileX = 0 ; tileX < mapWidth ; ++tileX){
for(int tileY = 0 ; tileY < mapHeight ; ++tileY){
if(tileMap[tileX][tileY]->getType() == Tile::SCENERY)
tileMap[tileX][tileY].reset(new Tile(imagePath + "blank.png", tileX, tileY));
}
}
}
bool Map::isMapValid(){
bool hasEmpty = false;
for(int i = 0 ; i < mapWidth ; ++i){
for(int j = 0 ; j < mapHeight ; ++j){
if(tileMap[i][j]->getType() == Tile::EMPTY){
hasEmpty = true;
break;
}
}
}
if(!hasStart){
cout << "missing Start tile!" << endl;
}
if(!hasEnd){
cout << "missing End tile!" << endl;
}
if(hasEmpty){
cout << "map still has Empty tiles!" << endl;
}
return hasStart && hasEnd && !hasEmpty;
}
//checks adjacent tiles to path to be placed, returns true if it is a valid placement
bool Map::validPathPlacement(const int tileX, const int tileY){
int adjacentTileCtr = 0;
bool isNextTile = false;
if(!outOfBounds(tileX+1, tileY) && (tileMap[tileX+1][tileY]->getType() == Tile::PATH || tileMap[tileX+1][tileY]->getType() == Tile::START)){
if(mapPath.back()->getTileX() == tileX+1 && mapPath.back()->getTileY() == tileY)
isNextTile = true;
++adjacentTileCtr;
}
if(!outOfBounds(tileX, tileY+1) && (tileMap[tileX][tileY+1]->getType() == Tile::PATH || tileMap[tileX][tileY+1]->getType() == Tile::START)){
if(mapPath.back()->getTileX() == tileX && mapPath.back()->getTileY() == tileY+1)
isNextTile = true;
++adjacentTileCtr;
}
if(!outOfBounds(tileX-1, tileY) && (tileMap[tileX-1][tileY]->getType() == Tile::PATH || tileMap[tileX-1][tileY]->getType() == Tile::START)){
if(mapPath.back()->getTileX() == tileX-1 && mapPath.back()->getTileY() == tileY)
isNextTile = true;
++adjacentTileCtr;
}
if(!outOfBounds(tileX, tileY-1) && (tileMap[tileX][tileY-1]->getType() == Tile::PATH || tileMap[tileX][tileY-1]->getType() == Tile::START)){
if(mapPath.back()->getTileX() == tileX && mapPath.back()->getTileY() == tileY-1)
isNextTile = true;
++adjacentTileCtr;
}
return adjacentTileCtr < 2 && isNextTile;
}
//returns true if position is outside of map boundaries
bool Map::outOfBounds(const int tileX, const int tileY) const{
return (!(-1 < tileX && tileX < mapWidth) || !(-1 < tileY && tileY < mapHeight));
}
//private conversion function
Tile::TYPE Map::convertType(const string tileTypeString) const{
if(tileTypeString == "start")
return Tile::START;
else if(tileTypeString == "end")
return Tile::END;
else if(tileTypeString == "path")
return Tile::PATH;
else if(tileTypeString == "scenery")
return Tile::SCENERY;
else if(tileTypeString == "dead")
return Tile::DEAD;
else
return Tile::EMPTY;
}
//private conversion function
string Map::convertType(Tile::TYPE type) const{
if(type == Tile::START)
return "start";
else if(type == Tile::END)
return "end";
else if(type == Tile::PATH)
return "path";
else if(type == Tile::SCENERY)
return "scenery";
else if(type == Tile::DEAD)
return "dead";
else
return "empty";
}
//creates tile and adds it according to its type. If path, start, or end; adds tile to mapPath queue accordingly
bool Map::addTile(int tileX, int tileY, const Tile::TYPE tileType){
if(!outOfBounds(tileX, tileY) && getTile(tileX, tileY)->getType() != Tile::EMPTY){
cout << "Tile not empty" << endl;
return false;
}
if(!outOfBounds(tileX, tileY)){
switch(tileType){
case(Tile::START):{
if(hasEnd){
cout << "Remove end tile first" << endl;
return false;
}
else if(hasStart){
cout << "Start tile already exists" << endl;
return false;
}
tileMap[tileX][tileY].reset(new StartTile(imagePath + "start.png", tileX, tileY));
mapPath.push_back(getTile(tileX, tileY));
hasStart = true;
break;
}
case(Tile::END):{
if(hasEnd){
cout << "Remove end tile first" << endl;
return false;
}
else if(!hasStart){
cout << "Place a start tile first" << endl;
return false;
}
if(validPathPlacement(tileX, tileY)){
tileMap[tileX][tileY].reset(new EndTile(imagePath + "end.png", tileX, tileY));
mapPath.push_back(getTile(tileX, tileY));
hasEnd = true;
}
else
{
cout << "Not a valid end placement" << endl;
return false;
}
break;
}
case(Tile::PATH):{
if(hasEnd){
cout << "Remove end tile first" << endl;
return false;
}
else if(!hasStart){
cout << "Place a start tile first" << endl;
return false;
}
if(validPathPlacement(tileX, tileY)){
tileMap[tileX][tileY].reset(new PathTile(imagePath + "path.png", tileX, tileY));
mapPath.push_back(getTile(tileX, tileY));
}
else
{
cout << "Not a valid path placement" << endl;
return false;
}
break;
}
case(Tile::DEAD):{
tileMap[tileX][tileY].reset(new DeadTile(imagePath + "dead.png", tileX, tileY));
break;
}
case(Tile::SCENERY):{
tileMap[tileX][tileY].reset(new SceneryTile(imagePath + "scenery.png", tileX, tileY));
break;
}
default:{
cout << "No tile could be placed" << endl;
return false;
break;
}
}
}
return true;
}
//removes tile at position specified, and if it is in the mapPath queue, removes it.
//if removing a path tile would invalidate the map path, all tiles in the queue that appear after removed tile
//are deleted from the queue and the map
void Map::removeTile(const int tileX, const int tileY){
if(!outOfBounds(tileX, tileY) && getTile(tileX, tileY)->getType() == Tile::EMPTY)
return;
if(!outOfBounds(tileX, tileY) && getTile(tileX, tileY)->getType() == Tile::START){
while(!mapPath.empty()){
int thisX = mapPath.back()->getTileX();
int thisY = mapPath.back()->getTileY();
mapPath.pop_back();
tileMap[thisX][thisY].reset(new Tile(imagePath + "blank.png", thisX, thisY));
removeGameObject(thisX, thisY);
}
mapPath.shrink_to_fit();
hasStart = false;
hasEnd = false;
return;
}
if(!outOfBounds(tileX, tileY) && getTile(tileX, tileY)->getType() == Tile::END){
int thisX = mapPath.back()->getTileX();
int thisY = mapPath.back()->getTileY();
mapPath.pop_back();
tileMap[thisX][thisY].reset(new Tile(imagePath + "blank.png", thisX, thisY));
removeGameObject(thisX, thisY);
mapPath.shrink_to_fit();
hasEnd = false;
return;
}
if(!outOfBounds(tileX, tileY) && getTile(tileX, tileY)->getType() == Tile::PATH){
while(!(mapPath.back()->getTileX() == tileX && mapPath.back()->getTileY() == tileY)){
int thisX = mapPath.back()->getTileX();
int thisY = mapPath.back()->getTileY();
if(getTile(thisX,thisY)->getType() == Tile::END)
hasEnd = false;
mapPath.pop_back();
tileMap[thisX][thisY].reset(new Tile(imagePath + "blank.png", thisX, thisY));
removeGameObject(thisX, thisY);
}
mapPath.shrink_to_fit();
if(mapPath.back()->getTileX() == tileX && mapPath.back()->getTileY() == tileY){
int thisX = mapPath.back()->getTileX();
int thisY = mapPath.back()->getTileY();
mapPath.pop_back();
tileMap[thisX][thisY].reset(new Tile(imagePath + "blank.png", thisX, thisY));
removeGameObject(thisX, thisY);
mapPath.shrink_to_fit();
return;
}
}
if(!outOfBounds(tileX, tileY) && (getTile(tileX, tileY)->getType() == Tile::SCENERY || getTile(tileX, tileY)->getType() == Tile::DEAD)){
tileMap[tileX][tileY].reset(new Tile(imagePath + "blank.png", tileX, tileY));
removeGameObject(tileX, tileY);
}
}
void Map::save(const string filename){
using namespace rapidxml;
if(!isMapValid()){
cout << "Map not valid, can't be saved" << endl;
return;
}
xml_document<> xmlDoc;
ostringstream os;
stack<string> store;
const char* value;
os << mapWidth;
store.push(os.str());
value = store.top().c_str();
xml_node<>* root = xmlDoc.allocate_node(node_element, "map");
root->append_attribute(xmlDoc.allocate_attribute("width", value));
os.str(string());
os << mapHeight;
store.push(os.str());
value = store.top().c_str();
root->append_attribute(xmlDoc.allocate_attribute("height", value));
os.str(string());
xmlDoc.append_node(root);
deque<const Tile* const> path = getMapPath();
xml_node<>* pathTile;
while(!path.empty()){
pathTile = xmlDoc.allocate_node(node_element, "pathTile");
int x = path.front()->getTileX();
int y = path.front()->getTileY();
string tileType = convertType(path.front()->getType());
string textureID = path.front()->getFileName();
path.pop_front();
os << x;
store.push(os.str());
value = store.top().c_str();
pathTile->append_attribute(xmlDoc.allocate_attribute("x", value));
os.str(string());
os << y;
store.push(os.str());
value = store.top().c_str();
pathTile->append_attribute(xmlDoc.allocate_attribute("y", value));
os.str(string());
os << tileType;
store.push(os.str());
value = store.top().c_str();
pathTile->append_attribute(xmlDoc.allocate_attribute("tileType", value));
os.str(string());
os << textureID;
store.push(os.str());
value = store.top().c_str();
pathTile->append_attribute(xmlDoc.allocate_attribute("textureID", value));
os.str(string());
root->append_node(pathTile);
}
xml_node<>* tile;
for(int tileX = 0; tileX < mapWidth ; ++tileX){
for(int tileY = 0; tileY < mapHeight ; ++tileY){
if(tileMap[tileX][tileY]->getType() == Tile::SCENERY ||
tileMap[tileX][tileY]->getType() == Tile::DEAD){
tile = xmlDoc.allocate_node(node_element, "tile");
string tileType = convertType(tileMap[tileX][tileY]->getType());
string textureID = tileMap[tileX][tileY]->getFileName();
os << tileX;
store.push(os.str());
value = store.top().c_str();
tile->append_attribute(xmlDoc.allocate_attribute("x", value));
os.str(string());
os << tileY;
store.push(os.str());
value = store.top().c_str();
tile->append_attribute(xmlDoc.allocate_attribute("y", value));
os.str(string());
os << tileType;
store.push(os.str());
value = store.top().c_str();
tile->append_attribute(xmlDoc.allocate_attribute("tileType", value));
os.str(string());
os << textureID;
store.push(os.str());
value = store.top().c_str();
tile->append_attribute(xmlDoc.allocate_attribute("textureID", value));
os.str(string());
root->append_node(tile);
}
}
}
value = nullptr;
os.clear();
std::ofstream thisMap;
if(int(filename.size()) > 0)
thisMap.open("resources/maps/" + filename + ".xml", std::ofstream::out | std::ofstream::trunc);
if(!thisMap.is_open()){
xmlDoc.clear();
cout << "Map could not be saved" << endl;
return;
}
thisMap << xmlDoc;
thisMap.close();
xmlDoc.clear();
cout << "Saved successfully" << endl;
}
void Map::load(string filename){
using namespace rapidxml;
std::ifstream inFile;
inFile.open("resources/maps/" + filename);
if(!inFile.is_open())
return;
//Dump contents of file into a string
string xmlString;
string line;
while(getline(inFile,line))
xmlString +=line;
inFile.close();
//Convert string to rapidxml readable char*
vector<char> xmlData = vector<char>(xmlString.begin(), xmlString.end());
xmlData.push_back('\0');
//Create a parsed document with &xmlData[0] which is the char*
xml_document<> xmlLoad;
xmlLoad.parse<parse_no_data_nodes>(&xmlData[0]);
//Get the root node
xml_node<>* root = xmlLoad.first_node();
//Get map attributes
int mapW = atoi(root->first_attribute("width")->value());
int mapH = atoi(root->first_attribute("height")->value());
//Reset and initialize blank map
resetMap();
setMapSize(mapW, mapH);
blankMap();
//Go through each tile
xml_node<>* tile = root->first_node();
while(tile)
{
//Get all the attributes
int tileX = atoi(tile->first_attribute("x")->value());
int tileY = atoi(tile->first_attribute("y")->value());
string tileTypeString = tile->first_attribute("tileType")->value();
string textureID = tile->first_attribute("textureID")->value();
Tile::TYPE tileType = convertType(tileTypeString);
addTile(tileX, tileY, tileType);
tile = tile->next_sibling();
}
xmlLoad.clear();
cout << "Map loaded successfully" << endl;
}
//implementations for game objects similar to implementation for tile objects
const GameObject* const Map::getGameObject(const int tileX, const int tileY){
if(!outOfBounds(tileX, tileY))
return mapObjects[tileX][tileY].get();
else
return nullptr;
}
void Map::removeGameObject(const int tileX, const int tileY){
if(!outOfBounds(tileX, tileY) && mapObjects[tileX][tileY] != nullptr)
mapObjects[tileX][tileY].reset(nullptr);
}
void Map::placeCritter(const string textureID, const int tileX, int const tileY){
if(!outOfBounds(tileX, tileY) && (getTile(tileX, tileY)->getType() == Tile::PATH || getTile(tileX, tileY)->getType() == Tile::START || getTile(tileX, tileY)->getType() == Tile::END)){
mapObjects[tileX][tileY].reset(new GameObject());
mapObjects[tileX][tileY]->load(textureID);
mapObjects[tileX][tileY]->setPosition(float(tileX*32), float(tileY*32));
}
else
cout << "Critter not placed" << endl;
}
void Map::placeTower(const string textureID, const int tileX, const int tileY){
if(!outOfBounds(tileX, tileY) && (getTile(tileX, tileY)->getType() == Tile::SCENERY)){
mapObjects[tileX][tileY].reset(new GameObject());
mapObjects[tileX][tileY]->load(textureID);
mapObjects[tileX][tileY]->setPosition(float(tileX*32), float(tileY*32));
}
else
cout << "Tower not placed" << endl;
}
//draws map to game window
void Map::draw(sf::RenderWindow& gameWindow){
for(int tileX = 0; tileX < mapWidth ; ++tileX){
for(int tileY = 0; tileY < mapHeight ; ++tileY){
if(tileMap[tileX][tileY] != nullptr)
tileMap[tileX][tileY]->draw(gameWindow);
if(mapObjects[tileX][tileY] != nullptr)
mapObjects[tileX][tileY]->draw(gameWindow);
}
}
} | sli-fox/jamms | TowerDefense/TowerDefense/src/Map.cpp | C++ | apache-2.0 | 15,743 |
# -*- coding: utf-8 -*-
#imports
from linkedin import linkedin
import easygui
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import requests
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
Mode = enum('PREVIEW', 'EDIT', 'REFRESH')
mode = 0
paramslist = []
key = ''
i = 0
msg = "Enter Required Information"
title = "Linkedin Extractor"
fieldNames = ["Consumer Key","Consumer Secret",
"User Key","User Secret"]
fieldValues = [] # we start with blanks for the values
for i in range(4):
fieldValues.append(i)
for i in range(len(sys.argv)):
if str(sys.argv[i]).lower() == "-mode" and (i + 1) < len(sys.argv):
if str(sys.argv[i + 1]).lower() == "preview":
mode = Mode.PREVIEW
elif str(sys.argv[i + 1]).lower() == "edit":
mode = Mode.EDIT
elif str(sys.argv[i + 1]).lower() == "refresh":
mode = Mode.REFRESH
elif str(sys.argv[i]).lower() == "-size":
size = int(sys.argv[i + 1])
elif str(sys.argv[i]).lower() == "-params":
params = str(sys.argv[i + 1])
paramslist = params.split(';')
i += 1
def setArgs(fieldValues):
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
USER_TOKEN = ''
USER_SECRET = ''
RETURN_URL = '' # Not required for developer authentication
fieldValues[0] = ''
fieldValues[1] = ''
fieldValues[2] = ''
fieldValues[3] = ''
return fieldValues
def parseArgs(fieldValues):
#if paramslist is None: break
for i in range(len(paramslist)):
if paramslist[i].split('=')[0].lower() == 'consumer_key':
try:
fieldValues[0] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[0] = 'ENTER_CONSUMER_KEY'
elif paramslist[i].split('=')[0].lower() == 'consumer_secret':
try:
fieldValues[1] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[1] = 'ENTER_CONSUMER_SECRET'
elif paramslist[i].split('=')[0].lower() == 'user_token':
try:
fieldValues[2] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[2] = 'ENTER_USER_TOKEN'
elif paramslist[i].split('=')[0].lower() == 'user_secret':
try:
fieldValues[3] = paramslist[i].split('=')[1].decode('hex')
except:
fieldValues[3] = 'ENTER_USER_SECRET'
i += 1
return fieldValues
def getScreenInput(fieldValues):
fieldValues = easygui.multenterbox(msg = msg, title = title, fields = fieldNames, values = fieldValues )
# make sure that none of the fields was left blank
while 1:
if fieldValues == None: break
errmsg = ""
for i in range(len(fieldNames)):
if fieldValues[i].strip() == "":
errmsg += ('"%s" is a required field.\n\n' % fieldNames[i])
if errmsg == "":
break # no problems found
fieldValues = easygui.multenterbox(errmsg, title, fieldNames, fieldValues)
return fieldValues
def printData(fieldValues):
if fieldValues != None:
CONSUMER_KEY = fieldValues[0]
CONSUMER_SECRET = fieldValues[1]
USER_TOKEN = fieldValues[2]
USER_SECRET = fieldValues[3]
RETURN_URL = ''
print "beginDSInfo"
print """fileName;#;true
csv_first_row_has_column_names;true;true;
csv_separator;|;true
csv_number_grouping;,;true
csv_number_decimal;.;true
csv_date_format;d.M.yyyy;true"""
print ''.join(['consumer_key;', str(fieldValues[0]).encode('hex'), ';true'])
print ''.join(['consumer_secret;', str(fieldValues[1]).encode('hex'), ';true'])
print ''.join(['user_token;', str(fieldValues[2]).encode('hex'), ';true'])
print ''.join(['user_secret;', str(fieldValues[3]).encode('hex'), ';true'])
print "endDSInfo"
print "beginData"
print 'First_Name, Last_Name, Location'
#try:
# Instantiate the developer authentication class
auth = linkedin.LinkedInDeveloperAuthentication(CONSUMER_KEY, CONSUMER_SECRET,
USER_TOKEN, USER_SECRET,
RETURN_URL,
permissions=linkedin.PERMISSIONS.enums.values())
# Pass it in to the app...
app = linkedin.LinkedInApplication(auth)
try:
connections = app.get_connections()
except requests.ConnectionError:
easygui.msgbox('Connection Error, Extension Doesnt Support Proxies Yet')
#print connections
for c in connections['values']:
#if c.has_key('location')]
try:
print ''.join([c['firstName'].replace(',', ''), ',']),
except:
print ''.join(['None', ', ']),
try:
print ''.join([c['lastName'].replace(',', ''), ',']),
except:
print ''.join(['None', ', ']),
try:
print ''.join([c['location']['name'].replace(',', '')])
except:
print ''.join(['None'])
print "endData"
else:
print "beginDSInfo"
print "endDSInfo"
print "beginData"
print """Error
User Cancelled"""
print "endData"
if mode == Mode.PREVIEW:
fieldValues = setArgs(fieldValues)
#easygui.textbox(msg = 'preview1', text = sys.argv)
fieldValues = getScreenInput(fieldValues)
#easygui.textbox(msg = 'preview2', text = fieldValues)
printData(fieldValues)
elif mode == Mode.EDIT:
#easygui.textbox(msg = 'edit1', text = sys.argv)
fieldValues = parseArgs(fieldValues)
#easygui.textbox(msg = 'edit2', text = fieldValues)
fieldValues = getScreenInput(fieldValues)
#easygui.textbox(msg = 'edit2', text = fieldValues)
printData(fieldValues)
elif mode == Mode.REFRESH:
fieldValues = parseArgs(fieldValues)
#easygui.textbox(msg = 'refresh1', text = sys.argv)
printData(fieldValues)
| SAP/lumira-extension-da-linkedin | source/LinkedinExtractor.py | Python | apache-2.0 | 6,132 |
package coordinate
import (
"fmt"
"testing"
)
func TestEuclideanDistance(t *testing.T) {
tests := []struct {
c1 []float64
c2 []float64
want float64
}{
{[]float64{2, -1}, []float64{-2, 2}, 5},
}
for _, test := range tests {
c1 := New(test.c1...)
c2 := New(test.c2...)
t.Run(fmt.Sprintf("Euclidean distance between %s and %s", c1, c2), func(t *testing.T) {
got := EuclideanDistance(c1, c2)
if test.want != got {
t.Errorf("Wanted %v got %v", test.want, got)
}
})
}
}
func TestManhattanDistance(t *testing.T) {
tests := []struct {
x1 float64
y1 float64
x2 float64
y2 float64
expected float64
}{
{0, 0, 1, 1, 2},
{0, 0, 2, 2, 4},
{-1, -1, 1, 1, 4},
{1, 1, 1, 1, 0},
}
for i, test := range tests {
start := New(test.x1, test.y1)
end := New(test.x2, test.y2)
d := ManhattanDistance(start, end)
if d != test.expected {
t.Errorf("tests[%d] expected %f got %f", i, test.expected, d)
}
d = ManhattanDistance(end, start)
if d != test.expected {
t.Errorf("tests[%d] expected %f got %f", i, test.expected, d)
}
}
}
func TestAddSubtractCoordinates(t *testing.T) {
tests := []struct {
x float64
y float64
diffX float64
diffY float64
addX float64
addY float64
subX float64
subY float64
}{
{1, 1, 0, -1, 1, 0, 1, 2},
{1, 1, 0, -1, 1, 0, 1, 2},
{1, 1, 0, -1, 1, 0, 1, 2},
{1, 1, -1, 0, 0, 1, 2, 1},
}
for i, test := range tests {
coord := New(test.x, test.y)
diffX := New(test.diffX, test.diffY)
result := coord.Add(diffX)
expected := New(test.addX, test.addY)
wantStr := fmt.Sprintf("(%f,%f)", test.x, test.y)
gotStr := fmt.Sprintf("%s", coord)
if wantStr != gotStr {
t.Errorf("tests[%d] expected %s got %s", i, wantStr, gotStr)
}
if !result.Equal(expected) {
t.Errorf("tests[%d] Add expected %s got %s", i, expected, result)
}
result = coord.Subtract(diffX)
expected = New(test.subX, test.subY)
if !result.Equal(expected) {
t.Errorf("tests[%d] Subtract expected %s got %s", i, expected, result)
}
}
}
func TestSegmentCoincident(t *testing.T) {
tests := []struct {
name string
s1 *Segment
s2 *Segment
want bool
}{
{"true", NewSegment(New(0, 0), New(0, 1)), NewSegment(New(0, 0), New(0, 1)), true},
{"false", NewSegment(New(0, 2), New(0, 1)), NewSegment(New(0, 0), New(0, 1)), false},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
for _, s := range [][]*Segment{{test.s1, test.s2}, {test.s2, test.s1}} {
got := s[0].Coincident(s[1])
if test.want != got {
t.Errorf("Wanted (%v).Coincident(%v) == %v got %v", s[0], s[1], test.want, got)
}
}
})
}
}
func TestSegmentIntersection(t *testing.T) {
tests := []struct {
name string
l1 *Segment
l2 *Segment
wantIntersection Coordinate
wantIntersect bool
}{
{"does not intersect", NewSegment(New(5, 2, -1), New(6, 0, -4)), NewSegment(New(2, 0, 4), New(3, 2, 3)), nil, false},
{"intersect", NewSegment(New(3, 6, 5), New(15, -18, -31)), NewSegment(New(1, -2, 5), New(12, 20, -6)), New(4, 4, 2), true},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
gotIntersection, gotIntersect := test.l1.Intersection(test.l2)
if gotIntersect == test.wantIntersect {
if gotIntersect && !gotIntersection.Equal(test.wantIntersection) {
t.Errorf("Wanted intersection %v got %v", test.wantIntersection, gotIntersection)
}
} else {
t.Errorf("Wanted intersect to be %v got %v", test.wantIntersect, gotIntersect)
}
})
}
}
| abates/AdventOfCode | coordinate/coordinate_test.go | GO | apache-2.0 | 3,612 |
/*
** Copyright 2014 Centreon
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
**
** For more information : contact@centreon.com
*/
#include "com/centreon/broker/notification/notification_scheduler.hh"
#include <QMutexLocker>
#include <limits>
#include "com/centreon/broker/notification/node_cache.hh"
#include "com/centreon/broker/notification/state.hh"
using namespace com::centreon::broker::notification;
using namespace com::centreon::broker::notification::objects;
/**
* Default constructor.
*
* @param[in] st The state object.
* @param[in] cache The data cache object.
*/
notification_scheduler::notification_scheduler(state& st, node_cache& cache)
: _should_exit{false},
_general_mutex{},
_state(st),
_cache(cache),
_started_flag{false} {}
/**
* Called by the notification thread when it starts.
*/
void notification_scheduler::run() {
while (1) {
// Lock the general mutex used by the notification scheduler.
std::unique_lock<std::mutex> lock(_general_mutex);
// Wait until the first action in the queue - or forever until awakened
// if the queue is empty.
time_t first_time = _queue.get_first_time();
time_t now = ::time(NULL);
unsigned long wait_for = first_time == time_t(-1)
? std::numeric_limits<unsigned long>::max()
: (first_time >= now) ? (first_time - now) * 1000
: 0;
log_v2::notification()->debug(
"notification: scheduler sleeping for {} seconds", wait_for / 1000.0);
_general_condition.wait_for(lock, std::chrono::milliseconds(wait_for));
log_v2::notification()->debug("notification: scheduler waking up");
// The should exit flag was set - exit.
if (_should_exit)
break;
// Process the actions and release the mutex.
_process_actions();
}
}
/**
* Start the notification scheduler and wait until it has started.
*/
void notification_scheduler::start() {
_thread = std::thread(¬ification_scheduler::run, this);
_started_flag = true;
}
void notification_scheduler::wait() {
_thread.join();
_started_flag = false;
}
/**
* Ask gracefully for the notification thread to exit.
*/
void notification_scheduler::exit() throw() {
// Set the should exit flag.
{
std::lock_guard<std::mutex> lock(_general_mutex);
_should_exit = true;
// Wake the notification scheduling thread.
_general_condition.notify_all();
}
}
/**
* @brief Add an action to the internal queue.
*
* Can be called outside or inside the notif thread context.
*
* @param at The time of the action.
* @param a The action.
*/
void notification_scheduler::add_action_to_queue(time_t at, action a) {
bool need_to_wake = false;
// Add the action to the queue.
{
std::lock_guard<std::mutex> lock(_general_mutex);
// If we just replaced the first event, we need to wake the scheduling
// thread.
time_t first_time(_queue.get_first_time());
if ((first_time > at) || ((time_t)-1 == first_time))
need_to_wake = true;
_queue.run(at, a);
// Wake the notification scheduling thread if needed.
if (need_to_wake)
_general_condition.notify_all();
}
}
/**
* @brief Remove all the actions associated to a node.
*
* Called outside the notif thread context.
*
* @param[in] id The id of the node.
*/
void notification_scheduler::remove_actions_of_node(objects::node_id id) {
{
std::lock_guard<std::mutex> lock(_general_mutex);
// Get all the action of a particular node.
time_t first_time = _queue.get_first_time();
std::vector<const action*> actions = _queue.get_actions_of_node(id);
// Iterate over the actions to remove them.
for (std::vector<const action*>::iterator it(actions.begin()),
end(actions.end());
it != end; ++it)
_queue.remove(**it);
// If we just deleted the first event, we need to wake
// the scheduling thread.
if (_queue.get_first_time() != first_time)
_general_condition.notify_all();
}
}
/**
* @brief Called repeatedly by the notification thread to process actions.
*
* This method releases the mutex as soon as possible to prevent long
* mutex locking.
*/
void notification_scheduler::_process_actions() {
// Move the global queue to a local queue and release the mutex.
// That way, we can add new actions in an external thread while this thread
// is processing those actions.
run_queue local_queue;
time_t now = ::time(NULL);
_queue.move_to_queue(local_queue, now);
_general_mutex.unlock();
// Iterate on the local queue.
for (run_queue::iterator it(local_queue.begin()), end(local_queue.end());
it != end; ++it) {
// The action processing can add other actions to the queue.
std::vector<std::pair<time_t, action> > spawned_actions;
{
// Lock the state mutex.
std::unique_ptr<QReadLocker> lock(_state.read_lock());
// Process the action.
it->second->process_action(_state, _cache, spawned_actions);
}
// Add the spawned action to the queue.
_schedule_actions(spawned_actions);
}
}
/**
* Schedule several actions.
*
* @param[in] actions The actions to schedule.
*/
void notification_scheduler::_schedule_actions(
std::vector<std::pair<time_t, action> > const& actions) {
for (std::vector<std::pair<time_t, action> >::const_iterator
it(actions.begin()),
end(actions.end());
it != end; ++it)
add_action_to_queue(it->first, it->second);
}
| centreon/centreon-broker | notification/src/notification_scheduler.cc | C++ | apache-2.0 | 6,086 |
package org.carrot2.elasticsearch;
import java.io.IOException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchWrapperException;
import org.elasticsearch.common.io.stream.StreamInput;
/** Generic exception implementing {@link org.elasticsearch.ElasticsearchWrapperException} */
@SuppressWarnings("serial")
public class ClusteringException extends ElasticsearchException
implements ElasticsearchWrapperException {
public ClusteringException(Throwable cause) {
super(cause);
}
public ClusteringException(String msg, Object... args) {
super(msg, args);
}
public ClusteringException(String msg, Throwable cause, Object... args) {
super(msg, cause, args);
}
public ClusteringException(StreamInput in) throws IOException {
super(in);
}
}
| carrot2/elasticsearch-carrot2 | src/main/java/org/carrot2/elasticsearch/ClusteringException.java | Java | apache-2.0 | 814 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.ntp.snippets;
import android.graphics.Bitmap;
import android.support.annotation.Nullable;
import org.chromium.base.DiscardableReferencePool.DiscardableReference;
import org.chromium.chrome.browser.suggestions.OfflinableSuggestion;
import java.io.File;
/**
* Represents the data for an article card on the NTP.
*/
public class SnippetArticle implements OfflinableSuggestion {
/** The category of this article. */
public final int mCategory;
/** The identifier for this article within the category - not necessarily unique globally. */
public final String mIdWithinCategory;
/** The title of this article. */
public final String mTitle;
/** The canonical publisher name (e.g., New York Times). */
public final String mPublisher;
/** The snippet preview text. */
public final String mPreviewText;
/** The URL of this article. This may be an AMP url. */
public final String mUrl;
/** The time when this article was published. */
public final long mPublishTimestampMilliseconds;
/** The score expressing relative quality of the article for the user. */
public final float mScore;
/**
* The time when the article was fetched from the server. This field is only used for remote
* suggestions.
*/
public final long mFetchTimestampMilliseconds;
/** The rank of this article within its section. */
private int mPerSectionRank = -1;
/** The global rank of this article in the complete list. */
private int mGlobalRank = -1;
/** Bitmap of the thumbnail, fetched lazily, when the RecyclerView wants to show the snippet. */
private DiscardableReference<Bitmap> mThumbnailBitmap;
/** Stores whether impression of this article has been tracked already. */
private boolean mImpressionTracked;
/** Whether the linked article represents an asset download. */
private boolean mIsAssetDownload;
/** The GUID of the asset download (only for asset download articles). */
private String mAssetDownloadGuid;
/** The path to the asset download (only for asset download articles). */
private File mAssetDownloadFile;
/** The mime type of the asset download (only for asset download articles). */
private String mAssetDownloadMimeType;
/** The tab id of the corresponding tab (only for recent tab articles). */
private int mRecentTabId;
/** The offline id of the corresponding offline page, if any. */
private Long mOfflinePageOfflineId;
/**
* Creates a SnippetArticleListItem object that will hold the data.
*/
public SnippetArticle(int category, String idWithinCategory, String title, String publisher,
String previewText, String url, long publishTimestamp, float score,
long fetchTimestamp) {
mCategory = category;
mIdWithinCategory = idWithinCategory;
mTitle = title;
mPublisher = publisher;
mPreviewText = previewText;
mUrl = url;
mPublishTimestampMilliseconds = publishTimestamp;
mScore = score;
mFetchTimestampMilliseconds = fetchTimestamp;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof SnippetArticle)) return false;
SnippetArticle rhs = (SnippetArticle) other;
return mCategory == rhs.mCategory && mIdWithinCategory.equals(rhs.mIdWithinCategory);
}
@Override
public int hashCode() {
return mCategory ^ mIdWithinCategory.hashCode();
}
/**
* Returns this article's thumbnail as a {@link Bitmap}. Can return {@code null} as it is
* initially unset.
*/
public Bitmap getThumbnailBitmap() {
return mThumbnailBitmap == null ? null : mThumbnailBitmap.get();
}
/** Sets the thumbnail bitmap for this article. */
public void setThumbnailBitmap(DiscardableReference<Bitmap> bitmap) {
mThumbnailBitmap = bitmap;
}
/** Returns whether to track an impression for this article. */
public boolean trackImpression() {
// Track UMA only upon the first impression per life-time of this object.
if (mImpressionTracked) return false;
mImpressionTracked = true;
return true;
}
/** @return whether a snippet is a remote suggestion. */
public boolean isArticle() {
return mCategory == KnownCategories.ARTICLES;
}
/** @return whether a snippet is either offline page or asset download. */
public boolean isDownload() {
return mCategory == KnownCategories.DOWNLOADS;
}
/** @return whether a snippet is asset download. */
public boolean isAssetDownload() {
return mIsAssetDownload;
}
/**
* @return the GUID of the asset download. May only be called if {@link #mIsAssetDownload} is
* {@code true} (which implies that this snippet belongs to the DOWNLOADS category).
*/
public String getAssetDownloadGuid() {
assert isDownload();
assert mIsAssetDownload;
return mAssetDownloadGuid;
}
/**
* @return the asset download path. May only be called if {@link #mIsAssetDownload} is
* {@code true} (which implies that this snippet belongs to the DOWNLOADS category).
*/
public File getAssetDownloadFile() {
assert isDownload();
assert mIsAssetDownload;
return mAssetDownloadFile;
}
/**
* @return the mime type of the asset download. May only be called if {@link #mIsAssetDownload}
* is {@code true} (which implies that this snippet belongs to the DOWNLOADS category).
*/
public String getAssetDownloadMimeType() {
assert isDownload();
assert mIsAssetDownload;
return mAssetDownloadMimeType;
}
/**
* Marks the article suggestion as an asset download with the given path and mime type. May only
* be called if this snippet belongs to DOWNLOADS category.
*/
public void setAssetDownloadData(String downloadGuid, String filePath, String mimeType) {
assert isDownload();
mIsAssetDownload = true;
mAssetDownloadGuid = downloadGuid;
mAssetDownloadFile = new File(filePath);
mAssetDownloadMimeType = mimeType;
}
/**
* Marks the article suggestion as an offline page download with the given id. May only
* be called if this snippet belongs to DOWNLOADS category.
*/
public void setOfflinePageDownloadData(long offlinePageId) {
assert isDownload();
mIsAssetDownload = false;
setOfflinePageOfflineId(offlinePageId);
}
@Override
public boolean requiresExactOfflinePage() {
return isDownload() || isRecentTab();
}
public boolean isRecentTab() {
return mCategory == KnownCategories.RECENT_TABS;
}
/**
* @return the corresponding recent tab id. May only be called if this snippet is a recent tab
* article.
*/
public int getRecentTabId() {
assert isRecentTab();
return mRecentTabId;
}
/**
* Sets tab id and offline page id for recent tab articles. May only be called if this snippet
* is a recent tab article.
*/
public void setRecentTabData(int tabId, long offlinePageId) {
assert isRecentTab();
mRecentTabId = tabId;
setOfflinePageOfflineId(offlinePageId);
}
@Override
public String getUrl() {
return mUrl;
}
@Override
public void setOfflinePageOfflineId(@Nullable Long offlineId) {
mOfflinePageOfflineId = offlineId;
}
@Override
@Nullable
public Long getOfflinePageOfflineId() {
return mOfflinePageOfflineId;
}
@Override
public String toString() {
// For debugging purposes. Displays the first 42 characters of the title.
return String.format("{%s, %1.42s}", getClass().getSimpleName(), mTitle);
}
public void setRank(int perSectionRank, int globalRank) {
mPerSectionRank = perSectionRank;
mGlobalRank = globalRank;
}
public int getGlobalRank() {
return mGlobalRank;
}
public int getPerSectionRank() {
return mPerSectionRank;
}
}
| mogoweb/365browser | app/src/main/java/org/chromium/chrome/browser/ntp/snippets/SnippetArticle.java | Java | apache-2.0 | 8,388 |
using System;
using System.ComponentModel;
using DesktopWidgets.Helpers;
using DesktopWidgets.Properties;
namespace DesktopWidgets.Actions
{
public class WidgetMuteUnmuteAction : WidgetActionBase
{
[DisplayName("Duration")]
public TimeSpan Duration { get; set; } = Settings.Default.MuteDuration;
[DisplayName("Mode")]
public MuteMode Mode { get; set; } = MuteMode.Toggle;
protected override void ExecuteAction()
{
base.ExecuteAction();
switch (Mode)
{
case MuteMode.Toggle:
WidgetId.ToggleMute(Duration);
break;
case MuteMode.Mute:
WidgetId.Mute(Duration);
break;
case MuteMode.Unmute:
WidgetId.Unmute();
break;
}
}
}
} | danielchalmers/DesktopWidgets | DesktopWidgets/Actions/WidgetMuteUnmuteAction.cs | C# | apache-2.0 | 909 |
# Discovery
* See [Element Discovery](element-discovery.md)
```
CheckBox check = window.getCheckBox(Search.getBuilder(0).build());
check.toggle();
try {
ToggleState state = check.getToggleState();
logger.info("State: " + state);
} catch (Exception ex) {
logger.info("Failed to get toggle state");
}
```
# Methods
## Name
Gets the name of the button control
## Value
Gets the value of the button, i.e. the text associated with it
## Toggle
## ToggleState
| mmarquee/ui-automation | docs/checkbox.md | Markdown | apache-2.0 | 469 |
/*************************************************************
*
* MathJax/fonts/HTML-CSS/TeX/png/Main/Regular/SuppMathOperators.js
*
* Defines the image size data needed for the HTML-CSS OutputJax
* to display mathematics using fallback images when the fonts
* are not available to the client browser.
*
* ---------------------------------------------------------------------
*
* Copyright (c) 2009-2013 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
MathJax.OutputJax["HTML-CSS"].defineImageData({
MathJax_Main: {
0x2a3f: [
// AMALGAMATION OR COPRODUCT
[5, 5, 0],
[6, 6, 0],
[8, 7, 0],
[9, 9, 0],
[10, 10, 0],
[12, 12, 0],
[15, 14, 0],
[17, 16, 0],
[20, 19, 0],
[24, 23, 0],
[29, 27, 0],
[34, 32, 0],
[40, 38, 0],
[48, 45, 0]
],
0x2aaf: [
// PRECEDES ABOVE SINGLE-LINE EQUALS SIGN
[5, 6, 1],
[6, 7, 1],
[7, 9, 2],
[9, 10, 2],
[10, 11, 2],
[12, 14, 3],
[14, 16, 3],
[17, 18, 3],
[20, 22, 4],
[23, 26, 5],
[28, 31, 6],
[33, 37, 7],
[39, 44, 8],
[46, 52, 10]
],
0x2ab0: [
// SUCCEEDS ABOVE SINGLE-LINE EQUALS SIGN
[5, 6, 1],
[6, 7, 1],
[7, 9, 2],
[9, 10, 2],
[10, 11, 2],
[12, 14, 3],
[14, 16, 3],
[17, 19, 4],
[20, 22, 4],
[23, 26, 5],
[28, 31, 6],
[33, 37, 7],
[39, 44, 8],
[46, 52, 10]
]
}
});
MathJax.Ajax.loadComplete(
MathJax.OutputJax["HTML-CSS"].imgDir +
"/Main/Regular" +
MathJax.OutputJax["HTML-CSS"].imgPacked +
"/SuppMathOperators.js"
);
| GerHobbelt/MathJax | fonts/HTML-CSS/TeX/png/Main/Regular/unpacked/SuppMathOperators.js | JavaScript | apache-2.0 | 2,217 |
/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var randu = require( '@stdlib/random/base/randu' );
var cdf = require( './../lib' );
var alpha;
var beta;
var x;
var y;
var i;
for ( i = 0; i < 10; i++ ) {
x = randu() * 3.0;
alpha = randu() * 5.0;
beta = randu() * 5.0;
y = cdf( x, alpha, beta );
console.log( 'x: %d, α: %d, β: %d, F(x;α,β): %d', x.toFixed( 4 ), alpha.toFixed( 4 ), beta.toFixed( 4 ), y.toFixed( 4 ) );
}
| stdlib-js/stdlib | lib/node_modules/@stdlib/stats/base/dists/gamma/cdf/examples/index.js | JavaScript | apache-2.0 | 1,016 |
# Mycosphaerella pyri (Auersw.) Boerema, 1970 SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Neth. Jl Pl. Path. 76(3): 166 (1970)
#### Original name
Sphaerella pyri Auersw., 1869
### Remarks
null | mdoering/backbone | life/Fungi/Ascomycota/Dothideomycetes/Capnodiales/Mycosphaerellaceae/Mycosphaerella/Mycosphaerella pyri/README.md | Markdown | apache-2.0 | 258 |
/* $NetBSD: dsn_print.c,v 1.1.1.1 2009/06/23 10:08:45 tron Exp $ */
/*++
/* NAME
/* dsn_print
/* SUMMARY
/* write DSN structure to stream
/* SYNOPSIS
/* #include <dsn_print.h>
/*
/* int dsn_print(print_fn, stream, flags, ptr)
/* ATTR_PRINT_MASTER_FN print_fn;
/* VSTREAM *stream;
/* int flags;
/* void *ptr;
/* DESCRIPTION
/* dsn_print() writes a DSN structure to the named stream using
/* the specified attribute print routine. dsn_print() is meant
/* to be passed as a call-back to attr_print(), thusly:
/*
/* ... ATTR_TYPE_FUNC, dsn_print, (void *) dsn, ...
/* DIAGNOSTICS
/* Fatal: out of memory.
/* LICENSE
/* .ad
/* .fi
/* The Secure Mailer license must be distributed with this software.
/* AUTHOR(S)
/* Wietse Venema
/* IBM T.J. Watson Research
/* P.O. Box 704
/* Yorktown Heights, NY 10598, USA
/*--*/
/* System library. */
#include <sys_defs.h>
/* Utility library. */
#include <attr.h>
/* Global library. */
#include <mail_proto.h>
#include <dsn_print.h>
/* dsn_print - write DSN to stream */
int dsn_print(ATTR_PRINT_MASTER_FN print_fn, VSTREAM *fp,
int flags, void *ptr)
{
DSN *dsn = (DSN *) ptr;
int ret;
/*
* The attribute order is determined by backwards compatibility. It can
* be sanitized after all the ad-hoc DSN read/write code is replaced.
*/
ret = print_fn(fp, flags | ATTR_FLAG_MORE,
ATTR_TYPE_STR, MAIL_ATTR_DSN_STATUS, dsn->status,
ATTR_TYPE_STR, MAIL_ATTR_DSN_DTYPE, dsn->dtype,
ATTR_TYPE_STR, MAIL_ATTR_DSN_DTEXT, dsn->dtext,
ATTR_TYPE_STR, MAIL_ATTR_DSN_MTYPE, dsn->mtype,
ATTR_TYPE_STR, MAIL_ATTR_DSN_MNAME, dsn->mname,
ATTR_TYPE_STR, MAIL_ATTR_DSN_ACTION, dsn->action,
ATTR_TYPE_STR, MAIL_ATTR_WHY, dsn->reason,
ATTR_TYPE_END);
return (ret);
}
| execunix/vinos | external/ibm-public/postfix/dist/src/global/dsn_print.c | C | apache-2.0 | 1,782 |
// Plato.NET
// Copyright (c) 2017 ReflectSoftware Inc.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Reflection;
using System.Runtime.InteropServices;
[assembly: AssemblyTitle("Plato.Cache")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
[assembly: Guid("a5ddd990-8ef2-450f-a5b1-f7130826f9c4")]
| reflectsoftware/Plato.NET | src/Plato.Cache/Properties/AssemblyInfo.cs | C# | apache-2.0 | 476 |
GWTRecorder
===========
GWT/Java Wrapper for [Recorderjs](https://github.com/mattdiamond/Recorderjs)
TODO
----
- support buffer
- support samplerate | akjava/GWTRecorderjs | README.md | Markdown | apache-2.0 | 160 |
// THIS FILE WAS AUTO-GENERATED BY ADKGEN -- DO NOT MODIFY!
//
// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).
// All rights reserved.
//
using System;
using System.Text;
using System.Security.Permissions;
using System.Runtime.Serialization;
using OpenADK.Library;
using OpenADK.Library.Global;
using OpenADK.Library.au.Common;
namespace OpenADK.Library.au.Student{
/// <summary>A TermInfo</summary>
/// <remarks>
///
/// <para>Author: Generated by adkgen</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
[Serializable]
public class TermInfo : SifDataObject
{
/// <summary>
/// Creates an instance of a TermInfo
/// </summary>
public TermInfo() : base( Adk.SifVersion, StudentDTD.TERMINFO ){}
/// <summary>
/// Constructor that accepts values for all mandatory fields
/// </summary>
///<param name="refId">The ID (GUID) that uniquely identifies this TermInfo entity.</param>
///<param name="schoolInfoRefId">The ID (GUID) that identifies the school where the term is used.</param>
///<param name="schoolYear">A SchoolYear</param>
///<param name="startDate">Starting date of the term.</param>
///<param name="endDate">Ending date of the term.</param>
///
public TermInfo( string refId, string schoolInfoRefId, int? schoolYear, DateTime? startDate, DateTime? endDate ) : base( Adk.SifVersion, StudentDTD.TERMINFO )
{
this.RefId = refId;
this.SchoolInfoRefId = schoolInfoRefId;
this.SchoolYear = schoolYear;
this.StartDate = startDate;
this.EndDate = endDate;
}
/// <summary>
/// Constructor used by the .Net Serialization formatter
/// </summary>
[SecurityPermission( SecurityAction.Demand, SerializationFormatter=true )]
protected TermInfo( SerializationInfo info, StreamingContext context ) : base( info, context ) {}
/// <summary>
/// Gets the metadata fields that make up the key of this object
/// </summary>
/// <value>
/// an array of metadata fields that make up the object's key
/// </value>
public override IElementDef[] KeyFields {
get { return new IElementDef[] { StudentDTD.TERMINFO_REFID }; }
}
/// <summary>
/// Gets or sets the value of the <c>RefId</c> attribute.
/// </summary>
/// <value> The <c>RefId</c> attribute of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this attribute as: "The ID (GUID) that uniquely identifies this TermInfo entity."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public override string RefId
{
get
{
return (string) GetSifSimpleFieldValue( StudentDTD.TERMINFO_REFID ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_REFID, new SifString( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><SchoolInfoRefId></c> element.
/// </summary>
/// <value> The <c>SchoolInfoRefId</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "The ID (GUID) that identifies the school where the term is used."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string SchoolInfoRefId
{
get
{
return (string) GetSifSimpleFieldValue( StudentDTD.TERMINFO_SCHOOLINFOREFID ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_SCHOOLINFOREFID, new SifString( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><SchoolYear></c> element.
/// </summary>
/// <value> The <c>SchoolYear</c> element of this object.</value>
/// <remarks>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public int? SchoolYear
{
get
{
return (int?) GetSifSimpleFieldValue( StudentDTD.TERMINFO_SCHOOLYEAR ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_SCHOOLYEAR, new SifInt( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><StartDate></c> element.
/// </summary>
/// <value> The <c>StartDate</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Starting date of the term."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public DateTime? StartDate
{
get
{
return (DateTime?) GetSifSimpleFieldValue( StudentDTD.TERMINFO_STARTDATE ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_STARTDATE, new SifDate( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><EndDate></c> element.
/// </summary>
/// <value> The <c>EndDate</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Ending date of the term."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public DateTime? EndDate
{
get
{
return (DateTime?) GetSifSimpleFieldValue( StudentDTD.TERMINFO_ENDDATE ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_ENDDATE, new SifDate( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><Description></c> element.
/// </summary>
/// <value> The <c>Description</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Text-based description of the term."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string Description
{
get
{
return (string) GetSifSimpleFieldValue( StudentDTD.TERMINFO_DESCRIPTION ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_DESCRIPTION, new SifString( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><RelativeDuration></c> element.
/// </summary>
/// <value> The <c>RelativeDuration</c> element of this object.</value>
/// <remarks>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public decimal? RelativeDuration
{
get
{
return (decimal?) GetSifSimpleFieldValue( StudentDTD.TERMINFO_RELATIVEDURATION ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_RELATIVEDURATION, new SifDecimal( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><TermCode></c> element.
/// </summary>
/// <value> The <c>TermCode</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Locally-defined code."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string TermCode
{
get
{
return (string) GetSifSimpleFieldValue( StudentDTD.TERMINFO_TERMCODE ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_TERMCODE, new SifString( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><Track></c> element.
/// </summary>
/// <value> The <c>Track</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "The name, description, or code of the track that contains this term. This is used when there are multiple tracks within a school. For instance, kindergarten commonly has a different set of terms than other grades within the school."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string Track
{
get
{
return (string) GetSifSimpleFieldValue( StudentDTD.TERMINFO_TRACK ) ;
}
set
{
SetFieldValue( StudentDTD.TERMINFO_TRACK, new SifString( value ), value );
}
}
/// <summary>
/// Gets or sets the value of the <c><TermSpan></c> element.
/// </summary>
/// <value> The <c>TermSpan</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "What sort of Session this TermSpan equates to."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string TermSpan
{
get
{
return GetFieldValue( StudentDTD.TERMINFO_TERMSPAN );
}
set
{
SetField( StudentDTD.TERMINFO_TERMSPAN, value );
}
}
/// <summary>
/// Sets the value of the <c><TermSpan></c> element.
/// </summary>
/// <param name="val">A TermInfoSessionType object</param>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "What sort of Session this TermSpan equates to."</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public void SetTermSpan( TermInfoSessionType val )
{
SetField( StudentDTD.TERMINFO_TERMSPAN, val );
}
/// <summary>
/// Gets or sets the value of the <c><MarkingTerm></c> element.
/// </summary>
/// <value> The <c>MarkingTerm</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Does this TermInfo represent a marking period?"</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string MarkingTerm
{
get
{
return GetFieldValue( StudentDTD.TERMINFO_MARKINGTERM );
}
set
{
SetField( StudentDTD.TERMINFO_MARKINGTERM, value );
}
}
/// <summary>
/// Sets the value of the <c><MarkingTerm></c> element.
/// </summary>
/// <param name="val">A AUCodeSetsYesOrNoCategoryType object</param>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Does this TermInfo represent a marking period?"</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public void SetMarkingTerm( AUCodeSetsYesOrNoCategoryType val )
{
SetField( StudentDTD.TERMINFO_MARKINGTERM, val );
}
/// <summary>
/// Gets or sets the value of the <c><SchedulingTerm></c> element.
/// </summary>
/// <value> The <c>SchedulingTerm</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Does this TermInfo represent a scheduling term?"</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string SchedulingTerm
{
get
{
return GetFieldValue( StudentDTD.TERMINFO_SCHEDULINGTERM );
}
set
{
SetField( StudentDTD.TERMINFO_SCHEDULINGTERM, value );
}
}
/// <summary>
/// Sets the value of the <c><SchedulingTerm></c> element.
/// </summary>
/// <param name="val">A AUCodeSetsYesOrNoCategoryType object</param>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Does this TermInfo represent a scheduling term?"</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public void SetSchedulingTerm( AUCodeSetsYesOrNoCategoryType val )
{
SetField( StudentDTD.TERMINFO_SCHEDULINGTERM, val );
}
/// <summary>
/// Gets or sets the value of the <c><AttendanceTerm></c> element.
/// </summary>
/// <value> The <c>AttendanceTerm</c> element of this object.</value>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Does this TermInfo represent an attendance term?"</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public string AttendanceTerm
{
get
{
return GetFieldValue( StudentDTD.TERMINFO_ATTENDANCETERM );
}
set
{
SetField( StudentDTD.TERMINFO_ATTENDANCETERM, value );
}
}
/// <summary>
/// Sets the value of the <c><AttendanceTerm></c> element.
/// </summary>
/// <param name="val">A AUCodeSetsYesOrNoCategoryType object</param>
/// <remarks>
/// <para>The SIF specification defines the meaning of this element as: "Does this TermInfo represent an attendance term?"</para>
/// <para>Version: 2.6</para>
/// <para>Since: 2.3</para>
/// </remarks>
public void SetAttendanceTerm( AUCodeSetsYesOrNoCategoryType val )
{
SetField( StudentDTD.TERMINFO_ATTENDANCETERM, val );
}
}}
| open-adk/OpenADK-csharp | src/au/sdo/Student/TermInfo.cs | C# | apache-2.0 | 11,792 |
import {Binding, resolveForwardRef, Injectable} from 'angular2/di';
import {
Type,
isBlank,
isPresent,
BaseException,
normalizeBlank,
stringify,
isArray,
isPromise
} from 'angular2/src/facade/lang';
import {Promise, PromiseWrapper} from 'angular2/src/facade/async';
import {List, ListWrapper, Map, MapWrapper} from 'angular2/src/facade/collection';
import {DirectiveResolver} from './directive_resolver';
import {AppProtoView} from './view';
import {ElementBinder} from './element_binder';
import {ProtoViewRef} from './view_ref';
import {DirectiveBinding} from './element_injector';
import {TemplateResolver} from './template_resolver';
import {View} from '../annotations_impl/view';
import {ComponentUrlMapper} from './component_url_mapper';
import {ProtoViewFactory} from './proto_view_factory';
import {UrlResolver} from 'angular2/src/services/url_resolver';
import * as renderApi from 'angular2/src/render/api';
/**
* Cache that stores the AppProtoView of the template of a component.
* Used to prevent duplicate work and resolve cyclic dependencies.
*/
@Injectable()
export class CompilerCache {
_cache: Map<Type, AppProtoView> = new Map();
_hostCache: Map<Type, AppProtoView> = new Map();
set(component: Type, protoView: AppProtoView): void { this._cache.set(component, protoView); }
get(component: Type): AppProtoView {
var result = this._cache.get(component);
return normalizeBlank(result);
}
setHost(component: Type, protoView: AppProtoView): void {
this._hostCache.set(component, protoView);
}
getHost(component: Type): AppProtoView {
var result = this._hostCache.get(component);
return normalizeBlank(result);
}
clear(): void {
this._cache.clear();
this._hostCache.clear();
}
}
/**
* @exportedAs angular2/view
*/
@Injectable()
export class Compiler {
private _reader: DirectiveResolver;
private _compilerCache: CompilerCache;
private _compiling: Map<Type, Promise<AppProtoView>>;
private _templateResolver: TemplateResolver;
private _componentUrlMapper: ComponentUrlMapper;
private _urlResolver: UrlResolver;
private _appUrl: string;
private _render: renderApi.RenderCompiler;
private _protoViewFactory: ProtoViewFactory;
constructor(reader: DirectiveResolver, cache: CompilerCache, templateResolver: TemplateResolver,
componentUrlMapper: ComponentUrlMapper, urlResolver: UrlResolver,
render: renderApi.RenderCompiler, protoViewFactory: ProtoViewFactory) {
this._reader = reader;
this._compilerCache = cache;
this._compiling = new Map();
this._templateResolver = templateResolver;
this._componentUrlMapper = componentUrlMapper;
this._urlResolver = urlResolver;
this._appUrl = urlResolver.resolve(null, './');
this._render = render;
this._protoViewFactory = protoViewFactory;
}
private _bindDirective(directiveTypeOrBinding): DirectiveBinding {
if (directiveTypeOrBinding instanceof DirectiveBinding) {
return directiveTypeOrBinding;
} else if (directiveTypeOrBinding instanceof Binding) {
let annotation = this._reader.resolve(directiveTypeOrBinding.token);
return DirectiveBinding.createFromBinding(directiveTypeOrBinding, annotation);
} else {
let annotation = this._reader.resolve(directiveTypeOrBinding);
return DirectiveBinding.createFromType(directiveTypeOrBinding, annotation);
}
}
// Create a hostView as if the compiler encountered <hostcmp></hostcmp>.
// Used for bootstrapping.
compileInHost(componentTypeOrBinding: Type | Binding): Promise<ProtoViewRef> {
var componentBinding = this._bindDirective(componentTypeOrBinding);
Compiler._assertTypeIsComponent(componentBinding);
var directiveMetadata = componentBinding.metadata;
var hostPvPromise;
var component = <Type>componentBinding.key.token;
var hostAppProtoView = this._compilerCache.getHost(component);
if (isPresent(hostAppProtoView)) {
hostPvPromise = PromiseWrapper.resolve(hostAppProtoView);
} else {
hostPvPromise = this._render.compileHost(directiveMetadata)
.then((hostRenderPv) => {
return this._compileNestedProtoViews(componentBinding, hostRenderPv,
[componentBinding]);
});
}
return hostPvPromise.then((hostAppProtoView) => { return new ProtoViewRef(hostAppProtoView); });
}
private _compile(componentBinding: DirectiveBinding): Promise<AppProtoView>| AppProtoView {
var component = <Type>componentBinding.key.token;
var protoView = this._compilerCache.get(component);
if (isPresent(protoView)) {
// The component has already been compiled into an AppProtoView,
// returns a plain AppProtoView, not wrapped inside of a Promise.
// Needed for recursive components.
return protoView;
}
var pvPromise = this._compiling.get(component);
if (isPresent(pvPromise)) {
// The component is already being compiled, attach to the existing Promise
// instead of re-compiling the component.
// It happens when a template references a component multiple times.
return pvPromise;
}
var template = this._templateResolver.resolve(component);
var directives = this._flattenDirectives(template);
for (var i = 0; i < directives.length; i++) {
if (!Compiler._isValidDirective(directives[i])) {
throw new BaseException(
`Unexpected directive value '${stringify(directives[i])}' on the View of component '${stringify(component)}'`);
}
}
var boundDirectives =
ListWrapper.map(directives, (directive) => this._bindDirective(directive));
var renderTemplate = this._buildRenderTemplate(component, template, boundDirectives);
pvPromise =
this._render.compile(renderTemplate)
.then((renderPv) => {
return this._compileNestedProtoViews(componentBinding, renderPv, boundDirectives);
});
this._compiling.set(component, pvPromise);
return pvPromise;
}
private _compileNestedProtoViews(componentBinding, renderPv, directives): Promise<AppProtoView>|
AppProtoView {
var protoViews =
this._protoViewFactory.createAppProtoViews(componentBinding, renderPv, directives);
var protoView = protoViews[0];
if (isPresent(componentBinding)) {
var component = componentBinding.key.token;
if (renderPv.type === renderApi.ViewType.COMPONENT) {
// Populate the cache before compiling the nested components,
// so that components can reference themselves in their template.
this._compilerCache.set(component, protoView);
MapWrapper.delete(this._compiling, component);
} else {
this._compilerCache.setHost(component, protoView);
}
}
var nestedPVPromises = [];
ListWrapper.forEach(this._collectComponentElementBinders(protoViews), (elementBinder) => {
var nestedComponent = elementBinder.componentDirective;
var elementBinderDone =
(nestedPv: AppProtoView) => { elementBinder.nestedProtoView = nestedPv; };
var nestedCall = this._compile(nestedComponent);
if (isPromise(nestedCall)) {
nestedPVPromises.push((<Promise<AppProtoView>>nestedCall).then(elementBinderDone));
} else {
elementBinderDone(<AppProtoView>nestedCall);
}
});
if (nestedPVPromises.length > 0) {
return PromiseWrapper.all(nestedPVPromises).then((_) => protoView);
} else {
return protoView;
}
}
private _collectComponentElementBinders(protoViews: List<AppProtoView>): List<ElementBinder> {
var componentElementBinders = [];
ListWrapper.forEach(protoViews, (protoView) => {
ListWrapper.forEach(protoView.elementBinders, (elementBinder) => {
if (isPresent(elementBinder.componentDirective)) {
componentElementBinders.push(elementBinder);
}
});
});
return componentElementBinders;
}
private _buildRenderTemplate(component, view, directives): renderApi.ViewDefinition {
var componentUrl =
this._urlResolver.resolve(this._appUrl, this._componentUrlMapper.getUrl(component));
var templateAbsUrl = null;
var styleAbsUrls = null;
if (isPresent(view.templateUrl)) {
templateAbsUrl = this._urlResolver.resolve(componentUrl, view.templateUrl);
} else if (isPresent(view.template)) {
// Note: If we have an inline template, we also need to send
// the url for the component to the render so that it
// is able to resolve urls in stylesheets.
templateAbsUrl = componentUrl;
}
if (isPresent(view.styleUrls)) {
styleAbsUrls =
ListWrapper.map(view.styleUrls, url => this._urlResolver.resolve(componentUrl, url));
}
return new renderApi.ViewDefinition({
componentId: stringify(component),
templateAbsUrl: templateAbsUrl, template: view.template,
styleAbsUrls: styleAbsUrls,
styles: view.styles,
directives: ListWrapper.map(directives, directiveBinding => directiveBinding.metadata)
});
}
private _flattenDirectives(template: View): List<Type> {
if (isBlank(template.directives)) return [];
var directives = [];
this._flattenList(template.directives, directives);
return directives;
}
private _flattenList(tree: List<any>, out: List<Type | Binding | List<any>>): void {
for (var i = 0; i < tree.length; i++) {
var item = resolveForwardRef(tree[i]);
if (isArray(item)) {
this._flattenList(item, out);
} else {
out.push(item);
}
}
}
private static _isValidDirective(value: Type | Binding): boolean {
return isPresent(value) && (value instanceof Type || value instanceof Binding);
}
private static _assertTypeIsComponent(directiveBinding: DirectiveBinding): void {
if (directiveBinding.metadata.type !== renderApi.DirectiveMetadata.COMPONENT_TYPE) {
throw new BaseException(
`Could not load '${stringify(directiveBinding.key.token)}' because it is not a component.`);
}
}
}
| hswolff/angular | modules/angular2/src/core/compiler/compiler.ts | TypeScript | apache-2.0 | 10,193 |
// Code generated by protoc-gen-gogo.
// source: google/logging/v2/log_entry.proto
// DO NOT EDIT!
package google_logging_v2
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gengo/grpc-gateway/third_party/googleapis/google/api"
import google_api3 "go.pedge.io/pb/gogo/google/api"
import google_logging_type "go.pedge.io/pb/gogo/google/logging/type"
import google_logging_type1 "go.pedge.io/pb/gogo/google/logging/type"
import google_protobuf1 "go.pedge.io/pb/gogo/google/protobuf"
import google_protobuf2 "go.pedge.io/pb/gogo/google/protobuf"
import google_protobuf3 "go.pedge.io/pb/gogo/google/protobuf"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
const _ = proto.GoGoProtoPackageIsVersion1
// An individual entry in a log.
type LogEntry struct {
// Required. The resource name of the log to which this log entry
// belongs. The format of the name is
// `projects/<project-id>/logs/<log-id%gt;`. Examples:
// `"projects/my-projectid/logs/syslog"`,
// `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
//
// The log ID part of resource name must be less than 512 characters
// long and can only include the following characters: upper and
// lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
// characters: forward-slash, underscore, hyphen, and period.
// Forward-slash (`/`) characters in the log ID must be URL-encoded.
LogName string `protobuf:"bytes,12,opt,name=log_name,json=logName,proto3" json:"log_name,omitempty"`
// Required. The monitored resource associated with this log entry.
// Example: a log entry that reports a database error would be
// associated with the monitored resource designating the particular
// database that reported the error.
Resource *google_api3.MonitoredResource `protobuf:"bytes,8,opt,name=resource" json:"resource,omitempty"`
// Required. The log entry payload, which can be one of multiple types.
//
// Types that are valid to be assigned to Payload:
// *LogEntry_ProtoPayload
// *LogEntry_TextPayload
// *LogEntry_JsonPayload
Payload isLogEntry_Payload `protobuf_oneof:"payload"`
// Optional. The time the event described by the log entry occurred. If
// omitted, Cloud Logging will use the time the log entry is written.
Timestamp *google_protobuf3.Timestamp `protobuf:"bytes,9,opt,name=timestamp" json:"timestamp,omitempty"`
// Optional. The severity of the log entry. The default value is
// `LogSeverity.DEFAULT`.
Severity google_logging_type1.LogSeverity `protobuf:"varint,10,opt,name=severity,proto3,enum=google.logging.type.LogSeverity" json:"severity,omitempty"`
// Optional. A unique ID for the log entry. If you provide this field, the
// logging service considers other log entries in the same log with the same
// ID as duplicates which can be removed.
// If omitted, Cloud Logging will generate a unique ID for this log entry.
InsertId string `protobuf:"bytes,4,opt,name=insert_id,json=insertId,proto3" json:"insert_id,omitempty"`
// Optional. Information about the HTTP request associated with this log entry,
// if applicable.
HttpRequest *google_logging_type.HttpRequest `protobuf:"bytes,7,opt,name=http_request,json=httpRequest" json:"http_request,omitempty"`
// Optional. A set of user-defined (key, value) data that provides additional
// information about the log entry.
Labels map[string]string `protobuf:"bytes,11,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// Optional. Information about an operation associated with the log entry, if
// applicable.
Operation *LogEntryOperation `protobuf:"bytes,15,opt,name=operation" json:"operation,omitempty"`
}
func (m *LogEntry) Reset() { *m = LogEntry{} }
func (m *LogEntry) String() string { return proto.CompactTextString(m) }
func (*LogEntry) ProtoMessage() {}
func (*LogEntry) Descriptor() ([]byte, []int) { return fileDescriptorLogEntry, []int{0} }
type isLogEntry_Payload interface {
isLogEntry_Payload()
}
type LogEntry_ProtoPayload struct {
ProtoPayload *google_protobuf1.Any `protobuf:"bytes,2,opt,name=proto_payload,json=protoPayload,oneof"`
}
type LogEntry_TextPayload struct {
TextPayload string `protobuf:"bytes,3,opt,name=text_payload,json=textPayload,proto3,oneof"`
}
type LogEntry_JsonPayload struct {
JsonPayload *google_protobuf2.Struct `protobuf:"bytes,6,opt,name=json_payload,json=jsonPayload,oneof"`
}
func (*LogEntry_ProtoPayload) isLogEntry_Payload() {}
func (*LogEntry_TextPayload) isLogEntry_Payload() {}
func (*LogEntry_JsonPayload) isLogEntry_Payload() {}
func (m *LogEntry) GetPayload() isLogEntry_Payload {
if m != nil {
return m.Payload
}
return nil
}
func (m *LogEntry) GetResource() *google_api3.MonitoredResource {
if m != nil {
return m.Resource
}
return nil
}
func (m *LogEntry) GetProtoPayload() *google_protobuf1.Any {
if x, ok := m.GetPayload().(*LogEntry_ProtoPayload); ok {
return x.ProtoPayload
}
return nil
}
func (m *LogEntry) GetTextPayload() string {
if x, ok := m.GetPayload().(*LogEntry_TextPayload); ok {
return x.TextPayload
}
return ""
}
func (m *LogEntry) GetJsonPayload() *google_protobuf2.Struct {
if x, ok := m.GetPayload().(*LogEntry_JsonPayload); ok {
return x.JsonPayload
}
return nil
}
func (m *LogEntry) GetTimestamp() *google_protobuf3.Timestamp {
if m != nil {
return m.Timestamp
}
return nil
}
func (m *LogEntry) GetHttpRequest() *google_logging_type.HttpRequest {
if m != nil {
return m.HttpRequest
}
return nil
}
func (m *LogEntry) GetLabels() map[string]string {
if m != nil {
return m.Labels
}
return nil
}
func (m *LogEntry) GetOperation() *LogEntryOperation {
if m != nil {
return m.Operation
}
return nil
}
// XXX_OneofFuncs is for the internal use of the proto package.
func (*LogEntry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
return _LogEntry_OneofMarshaler, _LogEntry_OneofUnmarshaler, _LogEntry_OneofSizer, []interface{}{
(*LogEntry_ProtoPayload)(nil),
(*LogEntry_TextPayload)(nil),
(*LogEntry_JsonPayload)(nil),
}
}
func _LogEntry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
m := msg.(*LogEntry)
// payload
switch x := m.Payload.(type) {
case *LogEntry_ProtoPayload:
_ = b.EncodeVarint(2<<3 | proto.WireBytes)
if err := b.EncodeMessage(x.ProtoPayload); err != nil {
return err
}
case *LogEntry_TextPayload:
_ = b.EncodeVarint(3<<3 | proto.WireBytes)
_ = b.EncodeStringBytes(x.TextPayload)
case *LogEntry_JsonPayload:
_ = b.EncodeVarint(6<<3 | proto.WireBytes)
if err := b.EncodeMessage(x.JsonPayload); err != nil {
return err
}
case nil:
default:
return fmt.Errorf("LogEntry.Payload has unexpected type %T", x)
}
return nil
}
func _LogEntry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
m := msg.(*LogEntry)
switch tag {
case 2: // payload.proto_payload
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
msg := new(google_protobuf1.Any)
err := b.DecodeMessage(msg)
m.Payload = &LogEntry_ProtoPayload{msg}
return true, err
case 3: // payload.text_payload
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
x, err := b.DecodeStringBytes()
m.Payload = &LogEntry_TextPayload{x}
return true, err
case 6: // payload.json_payload
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
msg := new(google_protobuf2.Struct)
err := b.DecodeMessage(msg)
m.Payload = &LogEntry_JsonPayload{msg}
return true, err
default:
return false, nil
}
}
func _LogEntry_OneofSizer(msg proto.Message) (n int) {
m := msg.(*LogEntry)
// payload
switch x := m.Payload.(type) {
case *LogEntry_ProtoPayload:
s := proto.Size(x.ProtoPayload)
n += proto.SizeVarint(2<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(s))
n += s
case *LogEntry_TextPayload:
n += proto.SizeVarint(3<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(len(x.TextPayload)))
n += len(x.TextPayload)
case *LogEntry_JsonPayload:
s := proto.Size(x.JsonPayload)
n += proto.SizeVarint(6<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(s))
n += s
case nil:
default:
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
}
return n
}
// Additional information about a potentially long-running operation with which
// a log entry is associated.
type LogEntryOperation struct {
// Required. An arbitrary operation identifier. Log entries with the
// same identifier are assumed to be part of the same operation.
//
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
// Required. An arbitrary producer identifier. The combination of
// `id` and `producer` must be globally unique. Examples for `producer`:
// `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
//
Producer string `protobuf:"bytes,2,opt,name=producer,proto3" json:"producer,omitempty"`
// Optional. Set this to True if this is the first log entry in the operation.
First bool `protobuf:"varint,3,opt,name=first,proto3" json:"first,omitempty"`
// Optional. Set this to True if this is the last log entry in the operation.
Last bool `protobuf:"varint,4,opt,name=last,proto3" json:"last,omitempty"`
}
func (m *LogEntryOperation) Reset() { *m = LogEntryOperation{} }
func (m *LogEntryOperation) String() string { return proto.CompactTextString(m) }
func (*LogEntryOperation) ProtoMessage() {}
func (*LogEntryOperation) Descriptor() ([]byte, []int) { return fileDescriptorLogEntry, []int{1} }
func init() {
proto.RegisterType((*LogEntry)(nil), "google.logging.v2.LogEntry")
proto.RegisterType((*LogEntryOperation)(nil), "google.logging.v2.LogEntryOperation")
}
var fileDescriptorLogEntry = []byte{
// 548 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x7c, 0x93, 0x51, 0x6f, 0xd3, 0x30,
0x14, 0x85, 0xe9, 0xda, 0xb5, 0xc9, 0x6d, 0x37, 0x98, 0x35, 0x44, 0x16, 0x86, 0x28, 0x1b, 0x82,
0x3d, 0xb9, 0x52, 0x79, 0xd9, 0x00, 0x09, 0x51, 0x84, 0x34, 0xa4, 0x01, 0x93, 0xc7, 0x7b, 0x95,
0xb6, 0x5e, 0x66, 0x48, 0xed, 0xe0, 0x38, 0x15, 0xfd, 0x41, 0xfc, 0x4f, 0x6c, 0xc7, 0x4e, 0xab,
0x16, 0xed, 0xcd, 0xd7, 0xfe, 0x8e, 0xcf, 0xcd, 0xf1, 0x0d, 0xbc, 0x48, 0x85, 0x48, 0x33, 0x3a,
0xc8, 0x44, 0x9a, 0x32, 0x9e, 0x0e, 0x16, 0x43, 0xb3, 0x1c, 0x53, 0xae, 0xe4, 0x12, 0xe7, 0x52,
0x28, 0x81, 0x0e, 0x2a, 0x04, 0x3b, 0x04, 0x2f, 0x86, 0xf1, 0xb1, 0x53, 0x25, 0x39, 0x1b, 0x24,
0x9c, 0x0b, 0x95, 0x28, 0x26, 0x78, 0x51, 0x09, 0xe2, 0xd3, 0xb5, 0xd3, 0xb9, 0xe0, 0x4c, 0x09,
0x49, 0x67, 0x63, 0x49, 0x0b, 0x51, 0xca, 0x29, 0x75, 0xd0, 0xab, 0x0d, 0x63, 0xb5, 0xcc, 0xe9,
0xe0, 0x4e, 0xa9, 0x5c, 0x83, 0xbf, 0x4b, 0x5a, 0xa8, 0xfb, 0x38, 0xd3, 0x62, 0x41, 0x17, 0x54,
0x32, 0xe5, 0xba, 0x8c, 0x8f, 0x1c, 0x67, 0xab, 0x49, 0x79, 0xab, 0xfb, 0xf2, 0x47, 0xc7, 0x9b,
0x47, 0x85, 0x92, 0xe5, 0xd4, 0x1b, 0x3c, 0xdf, 0x3c, 0x55, 0x6c, 0xae, 0xed, 0x93, 0x79, 0x5e,
0x01, 0x27, 0x7f, 0x77, 0x21, 0xb8, 0x12, 0xe9, 0x67, 0x13, 0x09, 0x3a, 0x82, 0xc0, 0x98, 0xf3,
0x64, 0x4e, 0xa3, 0x5e, 0xbf, 0x71, 0x16, 0x92, 0x8e, 0xae, 0xbf, 0xe9, 0x12, 0x5d, 0x40, 0xe0,
0xbf, 0x31, 0x0a, 0xf4, 0x51, 0x77, 0xf8, 0x0c, 0xbb, 0xe8, 0x74, 0x12, 0xf8, 0xab, 0x4f, 0x82,
0x38, 0x88, 0xd4, 0x38, 0x7a, 0x07, 0x7b, 0xd6, 0x6b, 0x9c, 0x27, 0xcb, 0x4c, 0x24, 0xb3, 0x68,
0xc7, 0xea, 0x0f, 0xbd, 0xde, 0xf7, 0x86, 0x3f, 0xf2, 0xe5, 0xe5, 0x03, 0xd2, 0xb3, 0xf5, 0x75,
0xc5, 0xa2, 0x53, 0xe8, 0x29, 0xfa, 0x47, 0xd5, 0xda, 0xa6, 0x69, 0x4b, 0x53, 0x5d, 0xb3, 0xeb,
0xa1, 0xf7, 0xd0, 0xfb, 0x59, 0x08, 0x5e, 0x43, 0x6d, 0x6b, 0xf0, 0x64, 0xcb, 0xe0, 0xc6, 0x46,
0x63, 0xd4, 0x06, 0xf7, 0xea, 0x73, 0x08, 0xeb, 0x54, 0xa2, 0xd0, 0x4a, 0xe3, 0x2d, 0xe9, 0x0f,
0x4f, 0x90, 0x15, 0xac, 0x7d, 0x03, 0xff, 0x50, 0x11, 0x68, 0xe1, 0xfe, 0xb0, 0x8f, 0x37, 0xe6,
0xc9, 0xbc, 0x28, 0xd6, 0x01, 0xdf, 0x38, 0x8e, 0xd4, 0x0a, 0xf4, 0x14, 0x42, 0xc6, 0x0b, 0x2a,
0xd5, 0x98, 0xcd, 0xa2, 0x96, 0x8d, 0x3b, 0xa8, 0x36, 0xbe, 0xcc, 0xd0, 0x27, 0xe8, 0xad, 0xcf,
0x4b, 0xd4, 0xb1, 0x7d, 0xfd, 0xff, 0xfa, 0x4b, 0x0d, 0x92, 0x8a, 0x23, 0xdd, 0xbb, 0x55, 0x81,
0x3e, 0x40, 0x3b, 0x4b, 0x26, 0x34, 0x2b, 0xa2, 0x6e, 0xbf, 0xa9, 0xe5, 0xaf, 0xf1, 0xd6, 0xb4,
0x63, 0xff, 0xf8, 0xf8, 0xca, 0x92, 0x76, 0x4d, 0x9c, 0x0c, 0x8d, 0x20, 0x14, 0x39, 0x95, 0xf6,
0x07, 0x88, 0x1e, 0xda, 0x16, 0x5e, 0xde, 0x73, 0xc7, 0x77, 0xcf, 0x92, 0x95, 0x2c, 0xbe, 0x80,
0xee, 0xda, 0xd5, 0xe8, 0x11, 0x34, 0x7f, 0xd1, 0x65, 0xd4, 0xb0, 0xdf, 0x6b, 0x96, 0xe8, 0x10,
0x76, 0x17, 0x49, 0x56, 0x52, 0x3b, 0x17, 0x21, 0xa9, 0x8a, 0xb7, 0x3b, 0xe7, 0x8d, 0x51, 0x08,
0x1d, 0xf7, 0xa4, 0x27, 0x0c, 0x0e, 0xb6, 0x5c, 0xd0, 0x3e, 0xec, 0xe8, 0xe8, 0xaa, 0xab, 0xf4,
0x0a, 0xc5, 0x10, 0xe8, 0x07, 0x9b, 0x95, 0x53, 0x2a, 0xdd, 0x65, 0x75, 0x6d, 0x5c, 0x6e, 0x99,
0xd4, 0x49, 0x9a, 0x09, 0x0a, 0x48, 0x55, 0x20, 0x04, 0xad, 0x2c, 0xd1, 0x9b, 0x2d, 0xbb, 0x69,
0xd7, 0xa3, 0x33, 0x78, 0x3c, 0x15, 0xf3, 0xed, 0xcf, 0x1c, 0xed, 0xf9, 0x0e, 0xae, 0xed, 0x84,
0x36, 0x26, 0x6d, 0x3b, 0x1e, 0x6f, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0x9b, 0x74, 0x3a, 0x8e,
0x68, 0x04, 0x00, 0x00,
}
| kubeup/archon | vendor/go.pedge.io/pb/gogo/google/logging/v2/log_entry.pb.go | GO | apache-2.0 | 13,614 |
package teststate.core
import teststate.data._
import teststate.typeclass.Profunctor
object Types {
type SackE[-A, +B, +E] = Sack[A, NamedError[Failure[E]] Or B]
type CheckShape1[C[-_, _]] = ({ type T[-O, -S, E] = C[OS[O, S], E] })
type CheckShapeA[C[-_, _], -A, E] = SackE[A, C[A, E], E]
type CheckShape [C[-_, _], -O, -S, E] = CheckShapeA[C, OS[O, S], E]
type Points [-O, -S, E] = CheckShape[Point , O, S, E]
type Arounds [-O, -S, E] = CheckShape[Around , O, S, E]
type Invariants[-O, -S, E] = CheckShape[Invariant, O, S, E]
// OS →ˢ (NamedError E | OS →ᶜ E)
implicit def checkShapeProfunctorOps[C[-_, _], O, S, E](a: CheckShape[C, O, S, E]): Profunctor.Ops[Sack, OS[O, S], NamedError[Failure[E]] Or C[OS[O, S], E]] =
new Profunctor.Ops[Sack, OS[O, S], NamedError[Failure[E]] Or C[OS[O, S], E]](a)
implicit def checkShapeAProfunctorOps[C[-_, _], A, E](a: CheckShapeA[C, A, E]): Profunctor.Ops[Sack, A, NamedError[Failure[E]] Or C[A, E]] =
new Profunctor.Ops[Sack, A, NamedError[Failure[E]] Or C[A, E]](a)
}
| japgolly/test-state | core/shared/src/main/scala/teststate/core/Types.scala | Scala | apache-2.0 | 1,069 |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.engine.depgraph;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.ParameterizedFunction;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.util.test.TestGroup;
/**
* Tests the support classes for {@link ResolutionFailure}
*/
@Test(groups = TestGroup.UNIT)
public class ResolutionFailureTest {
private int _count;
private static final class DebugResolutionFailureVisitor extends ResolutionFailureVisitor<String> {
@Override
protected String visitRecursiveRequirement(final ValueRequirement valueRequirement) {
return "recursiveRequirement=" + valueRequirement;
}
@Override
protected String visitFailedFunction(final ValueRequirement valueRequirement, final String function, final ValueSpecification desiredOutput,
final Map<ValueSpecification, ValueRequirement> satisfied, final Set<ResolutionFailure> unsatisfied) {
final StringBuilder sb = new StringBuilder();
sb.append("failedFunction=").append(valueRequirement).append(",").append(function).append(",").append(desiredOutput).append(",")
.append(satisfied);
for (final ResolutionFailure requirement : unsatisfied) {
sb.append(",").append(requirement.accept(this));
}
return sb.toString();
}
@Override
protected String visitGetRequirementsFailed(final ValueRequirement valueRequirement, final String function, final ValueSpecification desiredOutput) {
return "getRequirementsFailed=" + valueRequirement + "," + function + "," + desiredOutput;
}
@Override
protected String visitSuccessfulFunction(final ValueRequirement valueRequirement, final String function, final ValueSpecification desiredOutput,
final Map<ValueSpecification, ValueRequirement> satisfied) {
return "successfulFunction=" + valueRequirement + "," + function + "," + desiredOutput + "," + satisfied;
}
@Override
protected String visitGetAdditionalRequirementsFailed(final ValueRequirement valueRequirement, final String function,
final ValueSpecification desiredOutput,
final Map<ValueSpecification, ValueRequirement> requirements) {
return "getAdditionalRequirementsFailed=" + valueRequirement + "," + function + "," + desiredOutput + "," + requirements;
}
@Override
protected String visitNoFunctions(final ValueRequirement valueRequirement) {
return "noFunctions=" + valueRequirement;
}
@Override
protected String visitCouldNotResolve(final ValueRequirement valueRequirement) {
return "couldNotResolve=" + valueRequirement;
}
@Override
protected String visitUnsatisfied(final ValueRequirement valueRequirement) {
return "unsatisfied=" + valueRequirement;
}
@Override
protected String visitMarketDataMissing(final ValueRequirement valueRequirement) {
return "marketDataMissing=" + valueRequirement;
}
@Override
protected String visitBlacklistSuppressed(final ValueRequirement valueRequirement, final String function, final ValueSpecification desiredOutput,
final Map<ValueSpecification, ValueRequirement> requirements) {
return "blacklistSuppressed=" + valueRequirement + "," + function + "," + desiredOutput + "," + requirements;
}
@Override
protected String visitGetResultsFailed(final ValueRequirement valueRequirement, final String function, final ValueSpecification desiredOutput,
final Map<ValueSpecification, ValueRequirement> requirements) {
return "getResultsFailed=" + valueRequirement + "," + function + "," + desiredOutput + "," + requirements;
}
@Override
protected String visitLateResolutionFailure(final ValueRequirement valueRequirement, final String function, final ValueSpecification desiredOutput,
final Map<ValueSpecification, ValueRequirement> requirements) {
return "lateResolutionFailure=" + valueRequirement + "," + function + "," + desiredOutput + "," + requirements;
}
}
private ValueRequirement valueRequirement() {
return new ValueRequirement("Foo" + _count++, ComputationTargetSpecification.NULL);
}
private ParameterizedFunction parameterizedFunction() {
final ComputationTarget target = new ComputationTarget(ComputationTargetType.NULL, null);
final MockFunction function = new MockFunction(target);
return new ParameterizedFunction(function, function.getDefaultParameters());
}
private ValueSpecification valueSpecification(final ValueRequirement requirement) {
return new ValueSpecification(requirement.getValueName(),
requirement.getTargetReference().getSpecification(), ValueProperties.with(ValuePropertyNames.FUNCTION, "Mock").get());
}
private void assertEquals(final ResolutionFailure failure, final List<String> expected) {
final ResolutionFailureGatherer<String> gatherer = new ResolutionFailureGatherer<>(new DebugResolutionFailureVisitor());
failure.accept(gatherer);
Assert.assertEquals(gatherer.getResults(), expected);
}
private void assertEquals(final ResolutionFailure failure, final String... expected) {
assertEquals(failure, Arrays.asList(expected));
}
public void testRecursiveRequirement() {
final ValueRequirement requirement = valueRequirement();
assertEquals(ResolutionFailureImpl.recursiveRequirement(requirement), "recursiveRequirement=" + requirement);
}
public void testFunctionApplication() {
final ValueRequirement req1 = valueRequirement();
final ValueRequirement req2 = valueRequirement();
final ValueRequirement req3 = valueRequirement();
final ParameterizedFunction function = parameterizedFunction();
final ValueSpecification spec1 = valueSpecification(req1);
final ValueSpecification spec2 = valueSpecification(req2);
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).getRequirementsFailed(),
"getRequirementsFailed=" + req1 + ",mock," + spec1);
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirement(req2, null),
"failedFunction=" + req1 + ",mock," + spec1 + ",{},[unsatisfied=" + req2 + "]");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirement(req2, ResolutionFailureImpl.recursiveRequirement(req2)),
"failedFunction=" + req1 + ",mock," + spec1 + ",{},[recursiveRequirement=" + req2 + "]");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2)),
"successfulFunction=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "}");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2)).requirement(req3, null),
"failedFunction=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "},[unsatisfied=" + req3 + "]");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2))
.getAdditionalRequirementsFailed(),
"getAdditionalRequirementsFailed=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "}");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2)).getResultsFailed(),
"getResultsFailed=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "}");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2)).suppressed(),
"blacklistSuppressed=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "}");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2)).lateResolutionFailure(),
"lateResolutionFailure=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "}");
assertEquals(ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2))
.additionalRequirement(req3, null),
"failedFunction=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "},[unsatisfied=" + req3 + "]");
assertEquals(
ResolutionFailureImpl.functionApplication(req1, function, spec1).requirements(Collections.singletonMap(spec2, req2))
.additionalRequirement(req3, ResolutionFailureImpl.recursiveRequirement(req3)),
"failedFunction=" + req1 + ",mock," + spec1 + ",{" + spec2 + "=" + req2 + "},[recursiveRequirement=" + req3 + "]");
}
public void testNoFunctions() {
final ValueRequirement requirement = valueRequirement();
assertEquals(ResolutionFailureImpl.noFunctions(requirement), "noFunctions=" + requirement);
}
public void testCouldNotResolve() {
final ValueRequirement requirement = valueRequirement();
assertEquals(ResolutionFailureImpl.couldNotResolve(requirement), "couldNotResolve=" + requirement);
}
public void testUnsatisfied() {
final ValueRequirement requirement = valueRequirement();
assertEquals(ResolutionFailureImpl.unsatisfied(requirement), "unsatisfied=" + requirement);
}
public void testMarketDataMissing() {
final ValueRequirement requirement = valueRequirement();
assertEquals(ResolutionFailureImpl.marketDataMissing(requirement), "marketDataMissing=" + requirement);
}
}
| McLeodMoores/starling | projects/engine/src/test/java/com/opengamma/engine/depgraph/ResolutionFailureTest.java | Java | apache-2.0 | 10,098 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="rustdoc">
<meta name="description" content="API documentation for the Rust `get_program_build_info` fn in crate `ocl_core`.">
<meta name="keywords" content="rust, rustlang, rust-lang, get_program_build_info">
<title>ocl_core::get_program_build_info - Rust</title>
<link rel="stylesheet" type="text/css" href="../normalize.css">
<link rel="stylesheet" type="text/css" href="../rustdoc.css">
<link rel="stylesheet" type="text/css" href="../main.css">
</head>
<body class="rustdoc fn">
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
<nav class="sidebar">
<p class='location'><a href='index.html'>ocl_core</a></p><script>window.sidebarCurrent = {name: 'get_program_build_info', ty: 'fn', relpath: ''};</script><script defer src="sidebar-items.js"></script>
</nav>
<nav class="sub">
<form class="search-form js-only">
<div class="search-container">
<input class="search-input" name="search"
autocomplete="off"
placeholder="Click or press ‘S’ to search, ‘?’ for more options…"
type="search">
</div>
</form>
</nav>
<section id='main' class="content">
<h1 class='fqn'><span class='in-band'>Function <a href='index.html'>ocl_core</a>::<wbr><a class="fn" href=''>get_program_build_info</a></span><span class='out-of-band'><span id='render-detail'>
<a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs">
[<span class='inner'>−</span>]
</a>
</span><a class='srclink' href='../src/ocl_core/functions.rs.html#1595-1634' title='goto source code'>[src]</a></span></h1>
<pre class='rust fn'>pub fn get_program_build_info<D: <a class="trait" href="../ocl_core/types/abs/trait.ClDeviceIdPtr.html" title="trait ocl_core::types::abs::ClDeviceIdPtr">ClDeviceIdPtr</a> + <a class="trait" href="https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html" title="trait core::fmt::Debug">Debug</a>>(<br> obj: &<a class="struct" href="../ocl_core/types/abs/struct.Program.html" title="struct ocl_core::types::abs::Program">Program</a>, <br> device_obj: D, <br> request: <a class="enum" href="../ocl_core/enum.ProgramBuildInfo.html" title="enum ocl_core::ProgramBuildInfo">ProgramBuildInfo</a><br>) -> <a class="enum" href="../ocl_core/types/enums/enum.ProgramBuildInfoResult.html" title="enum ocl_core::types::enums::ProgramBuildInfoResult">ProgramBuildInfoResult</a></pre><div class='docblock'><p>Get program build info.</p>
</div></section>
<section id='search' class="content hidden"></section>
<section class="footer"></section>
<aside id="help" class="hidden">
<div>
<h1 class="hidden">Help</h1>
<div class="shortcuts">
<h2>Keyboard Shortcuts</h2>
<dl>
<dt>?</dt>
<dd>Show this help dialog</dd>
<dt>S</dt>
<dd>Focus the search field</dd>
<dt>⇤</dt>
<dd>Move up in search results</dd>
<dt>⇥</dt>
<dd>Move down in search results</dd>
<dt>⏎</dt>
<dd>Go to active search result</dd>
<dt>+</dt>
<dd>Collapse/expand all sections</dd>
</dl>
</div>
<div class="infos">
<h2>Search Tricks</h2>
<p>
Prefix searches with a type followed by a colon (e.g.
<code>fn:</code>) to restrict the search to a given type.
</p>
<p>
Accepted types are: <code>fn</code>, <code>mod</code>,
<code>struct</code>, <code>enum</code>,
<code>trait</code>, <code>type</code>, <code>macro</code>,
and <code>const</code>.
</p>
<p>
Search functions by type signature (e.g.
<code>vec -> usize</code> or <code>* -> vec</code>)
</p>
</div>
</div>
</aside>
<script>
window.rootPath = "../";
window.currentCrate = "ocl_core";
</script>
<script src="../jquery.js"></script>
<script src="../main.js"></script>
<script defer src="../search-index.js"></script>
</body>
</html> | liebharc/clFFT | docs/bindings/ocl_core/fn.get_program_build_info.html | HTML | apache-2.0 | 4,945 |
/**
* hujiawei 15/4/25
* <p/>
* 模拟
* <p/>
* https://leetcode.com/problems/valid-sudoku/
*/
public class ValidSudoku_36 {
public static void main(String[] args) {
}
//参考:http://www.acmerblog.com/leetcode-solution-valid-sudoku-6211.html
public boolean isValidSudoku(char[][] board) {
boolean[] flag;
for (int i = 0; i < 9; i++) {
flag = new boolean[9];
for (int j = 0; j < 9; j++) {
if (!check(board[i][j], flag)) {//检查第i行
return false;
}
}
flag = new boolean[9];
for (int j = 0; j < 9; j++) {
if (!check(board[j][i], flag)) {//检查第i列
return false;
}
}
}
for (int r = 0; r < 3; r++) {//检查9个小格子,注意遍历小格子的写法
for (int c = 0; c < 3; c++) {
flag = new boolean[9];
for (int i = 3 * r; i <= 3 * r + 2; i++) {
for (int j = 3 * c; j <= 3 * c + 2; j++) {
if (!check(board[i][j], flag)) {
return false;
}
}
}
}
}
return true;
}
private boolean check(char c, boolean[] flag) {
if (c == '.') return true;
if (flag[c - '1']) return false;
flag[c - '1'] = true;
return true;
}
}
| hujiaweibujidao/XSolutions | java/leetcode/ValidSudoku_36.java | Java | apache-2.0 | 1,507 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server;
import java.util.List;
import org.apache.activemq.artemis.core.filter.Filter;
public interface Consumer
{
/**
* There was a change on semantic during 2.3 here.<br/>
* We now first accept the message, and the actual deliver is done as part of
* {@link #proceedDeliver(MessageReference)}. This is to avoid holding a lock on the queues while
* the delivery is being accomplished To avoid a lock on the queue in case of misbehaving
* consumers.
* <p>
* This should return busy if handle is called before proceed deliver is called
* @param reference
* @return
* @throws Exception
*/
HandleStatus handle(MessageReference reference) throws Exception;
/**
* This will proceed with the actual delivery.
* Notice that handle should hold a readLock and proceedDelivery should release the readLock
* any lock operation on Consumer should also get a writeLock on the readWriteLock
* to guarantee there are no pending deliveries
* @throws Exception
*/
void proceedDeliver(MessageReference reference) throws Exception;
Filter getFilter();
/**
* Add the in-deliver mode messages to the ref-list passed as parameter
* @param refList the placeholder for where the output messages will be placed
*/
List<MessageReference> getDeliveringMessages();
String debug();
/**
* This method will create a string representation meant for management operations.
* This is different from the toString method that's meant for debugging and will contain information that regular users won't understand well
* @return
*/
String toManagementString();
/**
* disconnect the consumer
*/
void disconnect();
}
| jbertram/activemq-artemis-old | artemis-server/src/main/java/org/apache/activemq/artemis/core/server/Consumer.java | Java | apache-2.0 | 2,567 |
/*
* Copyright 2014 CIRDLES.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cirdles.topsoil.plot;
import java.util.List;
import java.util.Map;
/**
* A generalized plot that can express itself as a {@link javafx.scene.Node}.
*
* @author John Zeringue
*/
public interface Plot extends Displayable {
List<Map<String, Object>> getData();
void setData(List<Map<String, Object>> data);
Map<String, Object> getProperties();
void setProperties(Map<String, Object> properties);
}
| johnzeringue/Topsoil | core/src/main/java/org/cirdles/topsoil/plot/Plot.java | Java | apache-2.0 | 1,027 |
package it.unibz.krdb.obda.reformulation.tests;
/*
* #%L
* ontop-quest-owlapi3
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.io.ModelIOManager;
import it.unibz.krdb.obda.io.QueryIOManager;
import it.unibz.krdb.obda.model.OBDADataFactory;
import it.unibz.krdb.obda.model.OBDAModel;
import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl;
import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants;
import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences;
import it.unibz.krdb.obda.owlrefplatform.owlapi3.*;
import it.unibz.krdb.obda.querymanager.QueryController;
import it.unibz.krdb.obda.querymanager.QueryControllerEntity;
import it.unibz.krdb.obda.querymanager.QueryControllerQuery;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.LinkedList;
import java.util.List;
import static org.junit.Assert.assertFalse;
/**
* The following tests take the Stock exchange scenario and execute the queries
* of the scenario to validate the results. The validation is simple, we only
* count the number of distinct tuples returned by each query, which we know in
* advance.
*
* We execute the scenario in different modes, virtual, classic, with and
* without optimizations.
*
* The data is obtained from an inmemory database with the stock exchange
* tuples. If the scenario is run in classic, this data gets imported
* automatically by the reasoner.
*/
@Ignore // GUOHUI: 2016-01-16 SI+Mapping mode is disabled
public class TreeWitnessRewriterH2Test{
// TODO We need to extend this test to import the contents of the mappings
// into OWL and repeat everything taking form OWL
private OBDADataFactory fac;
private Connection conn;
Logger log = LoggerFactory.getLogger(this.getClass());
private OBDAModel obdaModel = null;
private OWLOntology ontology;
List<TestQuery> testQueries = new LinkedList<TestQuery>();
final String testCase = "twr-predicate";
final String owlfile = "src/test/resources/test/treewitness/" + testCase + ".owl";
final String obdafile = "src/test/resources/test/treewitness/" + testCase + ".obda";
final String qfile = "src/test/resources/test/treewitness/" + testCase + ".q";
/* These are the distinct tuples that we know each query returns */
final int[] tuples = { 7, 0, 4, 1, // Simple queries group
1, 2, 2, 1, 4, 3, 3, // CQs group
0, 1, // Literal
0, -1, 2, // String: Incompatible, Invalid, OK
0, 2, 2, 0, 2, 2, 0, 0, 0, // Integer: (Incompatible, OK, OK);
// (Incompatible, OK, OK);
// (Incompatible, No result, No result)
0, 1, 1, 0, 1, 1, 0, 1, 1, // Decimal: (Incompatible, OK, OK);
// (Incompatible, OK, OK);
// (Incompatible, OK, OK)
0, 2, 2, 0, 2, 2, 0, 0, 0, // Double: (Incompatible, OK, OK);
// (Incompatible, OK, OK);
// (Incompatible, No result, No result)
0, 0, 0, -1, -1, -1, -1, -1, 1, // Date time: (Incompatible,
// Incompatible, Incompatible);
// (Invalid, Invalid, Invalid);
// (Invalid, Invalid, OK)
0, 0, 0, 0, 5, 5, -1, 0, 5, -1, -1, 5, // Boolean: (Incompatible,
// Incompatible,
// Incompatible,
// Incompatible); (OK, OK,
// Invalid, Invalid); (OK,
// Invalid, Invalid, OK)
2, 5, // FILTER: String (EQ, NEQ)
2, 5, 5, 7, 0, 2, // FILTER: Integer (EQ, NEQ, GT, GTE, LT, LTE)
1, 3, 2, 3, 1, 2, // FILTER: Decimal (EQ, NEQ, GT, GTE, LT, LTE)
2, 0, 0, 2, 0, 2, // FILTER: Double (EQ, NEQ, GT, GTE, LT, LTE)
1, 3, 2, 3, 1, 2, // FILTER: Date Time (EQ, NEQ, GT, GTE, LT, LTE)
5, 5, // FILTER: Boolean (EQ, NEQ)
10, // FILTER: LangMatches
1, 2, 1, 3, 2, // Nested boolean expression
3, 3, 5, 5, 3, 7, 7, 7, 3, 10 // Query modifiers: LIMIT, OFFSET, and
// ORDER BY
};
public class TestQuery {
public String id = "";
public String query = "";
public int distinctTuples = -1;
}
public class Result {
public String id = "";
public String query = "";
public int distinctTuples = -1;
public long timeelapsed = -1;
}
@Before
public void setUp() throws Exception {
/*
* Initializing and H2 database with the stock exchange data
*/
// String driver = "org.h2.Driver";
String url = "jdbc:h2:mem:questjunitdb";
String username = "sa";
String password = "";
fac = OBDADataFactoryImpl.getInstance();
conn = DriverManager.getConnection(url, username, password);
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/treewitness/bsbm-schema-create-mysql.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
conn.commit();
// Loading the OWL file
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile)));
// Loading the OBDA data
obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel); // (obdaModel,
// new
// QueryController());
ioManager.load(new File(obdafile)); // .loadOBDADataFromURI(new
// File(obdafile).toURI(),
// ontology.getOntologyID().getOntologyIRI().toURI(),
// obdaModel.getPrefixManager());
}
@After
public void tearDown() throws Exception {
dropTables();
conn.close();
}
private void dropTables() throws SQLException, IOException {
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/treewitness/bsbm-schema-drop-mysql.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
st.close();
conn.commit();
}
private void prepareTestQueries(int[] answer) throws Exception {
/*
* Loading the queries (we have 61 queries)
*/
QueryController qcontroller = new QueryController();
QueryIOManager qman = new QueryIOManager(qcontroller);
qman.load(new File(qfile));
int counter = 0;
// for (QueryControllerGroup group : qcontroller.getGroups()) {
for (QueryControllerEntity entity : qcontroller.getElements()) {
if (!(entity instanceof QueryControllerQuery))
continue;
QueryControllerQuery query = (QueryControllerQuery) entity;
TestQuery tq = new TestQuery();
tq.id = query.getID();
tq.query = query.getQuery();
tq.distinctTuples = answer[counter];
testQueries.add(tq);
counter += 1;
}
// }
}
private void runTests(QuestPreferences p) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().preferences(p).obdaModel(obdaModel).build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
// Now we are ready for querying
QuestOWLStatement st = reasoner.getStatement();
List<Result> summaries = new LinkedList<TreeWitnessRewriterH2Test.Result>();
boolean fail = false;
int qc = 0;
for (TestQuery tq : testQueries) {
log.debug("Executing query: {}", qc);
String query = tq.query;
log.debug("Query: {}", query);
qc += 1;
int count = 0;
long start = System.currentTimeMillis();
long end = 0;
try {
if (query.toLowerCase().contains("select")) {
QuestOWLResultSet rs = st.executeTuple(query);
end = System.currentTimeMillis();
while (rs.nextRow()) {
count += 1;
}
} else {
List<OWLAxiom> list = st.executeGraph(query);
count += list.size();
}
} catch (Exception e) {
fail = true;
log.debug(e.getMessage(), e);
end = System.currentTimeMillis();
count = -1;
}
Result summary = new Result();
summary.id = tq.id;
summary.query = query;
summary.timeelapsed = end - start;
summary.distinctTuples = count;
summaries.add(summary);
}
/* Closing resources */
st.close();
reasoner.dispose();
// /* Comparing and printing results */
//
// int totaltime = 0;
// for (int i = 0; i < testQueries.size(); i++) {
// TestQuery tq = testQueries.get(i);
// Result summary = summaries.get(i);
// totaltime += summary.timeelapsed;
// fail = fail | tq.distinctTuples != summary.distinctTuples;
// String out =
// "Query: %3d Tup. Ex.: %6d Tup. ret.: %6d Time elapsed: %6.3f s ";
// log.debug(String.format(out, i, tq.distinctTuples,
// summary.distinctTuples, (double) summary.timeelapsed / (double) 1000)
// + " " + (tq.distinctTuples == summary.distinctTuples ? " " :
// "ERROR"));
// }
// log.debug("==========================");
// log.debug(String.format("Total time elapsed: %6.3f s", (double) totaltime / (double) 1000));
assertFalse(fail);
}
@Test
public void testViEqSig() throws Exception {
prepareTestQueries(tuples);
/*
* QuestPreferences p = new QuestPreferences();
* p.setCurrentValueOf(QuestPreferences.ABOX_MODE,
* QuestConstants.VIRTUAL);
* p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
* p.setCurrentValueOf(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
* p.setProperty("rewrite", "true");
*/
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
p.setCurrentValueOf(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setCurrentValueOf(QuestPreferences.OBTAIN_FROM_ONTOLOGY, "true");
p.setProperty("rewrite", "true");
runTests(p);
}
}
| eschwert/ontop | quest-owlapi3/src/test/java/it/unibz/krdb/obda/reformulation/tests/TreeWitnessRewriterH2Test.java | Java | apache-2.0 | 11,075 |
package com.github.monchenkoid.project_9_10_14.backend;
import com.google.appengine.api.datastore.*;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
@author Iryna Monchenko
@version on 17.02.2015
*/
public class GlossaryServlet extends HttpServlet {
@Override
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
String namespace = "default";
Key namespaceKey = KeyFactory.createKey("Socionics", namespace);
String name = req.getParameter("name");
String description = req.getParameter("description");
Entity greeting = new Entity("Glosary", name, namespaceKey);
greeting.setProperty("name", name);
greeting.setProperty("description", description);
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
datastore.put(greeting);
resp.sendRedirect("/glossary.jsp");
}
@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
}
}
| MonchenkoID/Project_9_10_14 | Project_9_10_14/backend/src/main/java/com/github/monchenkoid/project_9_10_14/backend/GlossaryServlet.java | Java | apache-2.0 | 1,247 |
package com.excilys.cdb.cli;
import java.util.List;
public interface Console<T> {
/**
* Add data.
*/
void add(T t);
/**
* Delete data.
*/
void delete(String id);
/**
* Display data.
*/
List<T> display();
/**
* Update data.
*/
void update();
T findById(String id);
}
| gpuget/training-java | computer-database/console/src/main/java/com/excilys/cdb/cli/Console.java | Java | apache-2.0 | 352 |
/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// A basic integration test for the service.
// Assumes that there is a pre-existing etcd server running on localhost.
package main
import (
"fmt"
"io/ioutil"
"net"
"net/http"
"net/http/httptest"
"os"
"reflect"
"runtime"
"strconv"
"strings"
"sync"
"time"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/errors"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/latest"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/testapi"
"github.com/GoogleCloudPlatform/kubernetes/pkg/apiserver"
"github.com/GoogleCloudPlatform/kubernetes/pkg/client"
nodeControllerPkg "github.com/GoogleCloudPlatform/kubernetes/pkg/cloudprovider/controller"
replicationControllerPkg "github.com/GoogleCloudPlatform/kubernetes/pkg/controller"
"github.com/GoogleCloudPlatform/kubernetes/pkg/kubelet/dockertools"
kubeletServer "github.com/GoogleCloudPlatform/kubernetes/pkg/kubelet/server"
"github.com/GoogleCloudPlatform/kubernetes/pkg/kubelet/volume/empty_dir"
"github.com/GoogleCloudPlatform/kubernetes/pkg/labels"
"github.com/GoogleCloudPlatform/kubernetes/pkg/master"
"github.com/GoogleCloudPlatform/kubernetes/pkg/probe"
"github.com/GoogleCloudPlatform/kubernetes/pkg/service"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util/wait"
"github.com/GoogleCloudPlatform/kubernetes/plugin/pkg/admission/admit"
"github.com/GoogleCloudPlatform/kubernetes/plugin/pkg/scheduler"
_ "github.com/GoogleCloudPlatform/kubernetes/plugin/pkg/scheduler/algorithmprovider"
"github.com/GoogleCloudPlatform/kubernetes/plugin/pkg/scheduler/factory"
"github.com/coreos/go-etcd/etcd"
"github.com/golang/glog"
)
var (
fakeDocker1, fakeDocker2 dockertools.FakeDockerClient
)
type fakeKubeletClient struct{}
func (fakeKubeletClient) GetPodStatus(host, podNamespace, podID string) (api.PodStatusResult, error) {
glog.V(3).Infof("Trying to get container info for %v/%v/%v", host, podNamespace, podID)
// This is a horrible hack to get around the fact that we can't provide
// different port numbers per kubelet...
var c client.PodInfoGetter
switch host {
case "localhost":
c = &client.HTTPKubeletClient{
Client: http.DefaultClient,
Port: 10250,
}
case "127.0.0.1":
c = &client.HTTPKubeletClient{
Client: http.DefaultClient,
Port: 10251,
}
default:
glog.Fatalf("Can't get info for: '%v', '%v - %v'", host, podNamespace, podID)
}
r, err := c.GetPodStatus("127.0.0.1", podNamespace, podID)
if err != nil {
return r, err
}
r.Status.PodIP = "1.2.3.4"
m := make(api.PodInfo)
for k, v := range r.Status.Info {
v.Ready = true
v.PodIP = "1.2.3.4"
m[k] = v
}
r.Status.Info = m
return r, nil
}
func (fakeKubeletClient) HealthCheck(host string) (probe.Result, error) {
return probe.Success, nil
}
type delegateHandler struct {
delegate http.Handler
}
func (h *delegateHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
if h.delegate != nil {
h.delegate.ServeHTTP(w, req)
return
}
w.WriteHeader(http.StatusNotFound)
}
func startComponents(manifestURL string) (apiServerURL string) {
// Setup
servers := []string{}
glog.Infof("Creating etcd client pointing to %v", servers)
machineList := []string{"localhost", "127.0.0.1"}
handler := delegateHandler{}
apiServer := httptest.NewServer(&handler)
etcdClient := etcd.NewClient(servers)
sleep := 4 * time.Second
ok := false
for i := 0; i < 3; i++ {
keys, err := etcdClient.Get("/", false, false)
if err != nil {
glog.Warningf("Unable to list root etcd keys: %v", err)
if i < 2 {
time.Sleep(sleep)
sleep = sleep * sleep
}
continue
}
for _, node := range keys.Node.Nodes {
if _, err := etcdClient.Delete(node.Key, true); err != nil {
glog.Fatalf("Unable delete key: %v", err)
}
}
ok = true
break
}
if !ok {
glog.Fatalf("Failed to connect to etcd")
}
cl := client.NewOrDie(&client.Config{Host: apiServer.URL, Version: testapi.Version()})
helper, err := master.NewEtcdHelper(etcdClient, "")
if err != nil {
glog.Fatalf("Unable to get etcd helper: %v", err)
}
// Master
host, port, err := net.SplitHostPort(strings.TrimLeft(apiServer.URL, "http://"))
if err != nil {
glog.Fatalf("Unable to parse URL '%v': %v", apiServer.URL, err)
}
portNumber, err := strconv.Atoi(port)
if err != nil {
glog.Fatalf("Nonnumeric port? %v", err)
}
publicAddress := net.ParseIP(host)
if publicAddress == nil {
glog.Fatalf("no public address for %s", host)
}
// Create a master and install handlers into mux.
m := master.New(&master.Config{
Client: cl,
EtcdHelper: helper,
KubeletClient: fakeKubeletClient{},
EnableLogsSupport: false,
APIPrefix: "/api",
Authorizer: apiserver.NewAlwaysAllowAuthorizer(),
AdmissionControl: admit.NewAlwaysAdmit(),
ReadWritePort: portNumber,
ReadOnlyPort: portNumber,
PublicAddress: publicAddress,
CacheTimeout: 2 * time.Second,
})
handler.delegate = m.Handler
// Scheduler
schedulerConfigFactory := factory.NewConfigFactory(cl)
schedulerConfig, err := schedulerConfigFactory.Create()
if err != nil {
glog.Fatalf("Couldn't create scheduler config: %v", err)
}
scheduler.New(schedulerConfig).Run()
endpoints := service.NewEndpointController(cl)
go util.Forever(func() { endpoints.SyncServiceEndpoints() }, time.Second*10)
controllerManager := replicationControllerPkg.NewReplicationManager(cl)
// Prove that controllerManager's watch works by making it not sync until after this
// test is over. (Hopefully we don't take 10 minutes!)
controllerManager.Run(10 * time.Minute)
nodeResources := &api.NodeResources{}
nodeController := nodeControllerPkg.NewNodeController(nil, "", machineList, nodeResources, cl, fakeKubeletClient{}, 10, 5*time.Minute)
nodeController.Run(5*time.Second, true)
// Kubelet (localhost)
testRootDir := makeTempDirOrDie("kubelet_integ_1.")
glog.Infof("Using %s as root dir for kubelet #1", testRootDir)
kubeletServer.SimpleRunKubelet(cl, nil, &fakeDocker1, machineList[0], testRootDir, manifestURL, "127.0.0.1", 10250, api.NamespaceDefault, empty_dir.ProbeVolumePlugins())
// Kubelet (machine)
// Create a second kubelet so that the guestbook example's two redis slaves both
// have a place they can schedule.
testRootDir = makeTempDirOrDie("kubelet_integ_2.")
glog.Infof("Using %s as root dir for kubelet #2", testRootDir)
kubeletServer.SimpleRunKubelet(cl, nil, &fakeDocker2, machineList[1], testRootDir, "", "127.0.0.1", 10251, api.NamespaceDefault, empty_dir.ProbeVolumePlugins())
return apiServer.URL
}
func makeTempDirOrDie(prefix string) string {
tempDir, err := ioutil.TempDir("/tmp", prefix)
if err != nil {
glog.Fatalf("Can't make a temp rootdir: %v", err)
}
if err = os.MkdirAll(tempDir, 0750); err != nil {
glog.Fatalf("Can't mkdir(%q): %v", tempDir, err)
}
return tempDir
}
// podsOnMinions returns true when all of the selected pods exist on a minion.
func podsOnMinions(c *client.Client, pods api.PodList) wait.ConditionFunc {
podInfo := fakeKubeletClient{}
return func() (bool, error) {
for i := range pods.Items {
host, id, namespace := pods.Items[i].Status.Host, pods.Items[i].Name, pods.Items[i].Namespace
if len(host) == 0 {
return false, nil
}
if _, err := podInfo.GetPodStatus(host, namespace, id); err != nil {
glog.Infof("GetPodStatus error: %v", err)
return false, nil
}
}
return true, nil
}
}
func endpointsSet(c *client.Client, serviceNamespace, serviceID string, endpointCount int) wait.ConditionFunc {
return func() (bool, error) {
endpoints, err := c.Endpoints(serviceNamespace).Get(serviceID)
if err != nil {
return false, nil
}
return len(endpoints.Endpoints) == endpointCount, nil
}
}
func podExists(c *client.Client, podNamespace string, podID string) wait.ConditionFunc {
return func() (bool, error) {
_, err := c.Pods(podNamespace).Get(podID)
return err == nil, nil
}
}
func runReplicationControllerTest(c *client.Client) {
data, err := ioutil.ReadFile("api/examples/controller.json")
if err != nil {
glog.Fatalf("Unexpected error: %v", err)
}
var controller api.ReplicationController
if err := api.Scheme.DecodeInto(data, &controller); err != nil {
glog.Fatalf("Unexpected error: %v", err)
}
glog.Infof("Creating replication controllers")
updated, err := c.ReplicationControllers("test").Create(&controller)
if err != nil {
glog.Fatalf("Unexpected error: %v", err)
}
glog.Infof("Done creating replication controllers")
// Give the controllers some time to actually create the pods
if err := wait.Poll(time.Second, time.Second*30, client.ControllerHasDesiredReplicas(c, updated)); err != nil {
glog.Fatalf("FAILED: pods never created %v", err)
}
// wait for minions to indicate they have info about the desired pods
pods, err := c.Pods("test").List(labels.Set(updated.Spec.Selector).AsSelector())
if err != nil {
glog.Fatalf("FAILED: unable to get pods to list: %v", err)
}
if err := wait.Poll(time.Second, time.Second*30, podsOnMinions(c, *pods)); err != nil {
glog.Fatalf("FAILED: pods never started running %v", err)
}
glog.Infof("Pods created")
}
func runAPIVersionsTest(c *client.Client) {
v, err := c.ServerAPIVersions()
if err != nil {
glog.Fatalf("failed to get api versions: %v", err)
}
if e, a := []string{"v1beta1", "v1beta2"}, v.Versions; !reflect.DeepEqual(e, a) {
glog.Fatalf("Expected version list '%v', got '%v'", e, a)
}
glog.Infof("Version test passed")
}
func runSelfLinkTestOnNamespace(c *client.Client, namespace string) {
var svc api.Service
err := c.Post().
NamespaceIfScoped(namespace, len(namespace) > 0).
Resource("services").Body(
&api.Service{
ObjectMeta: api.ObjectMeta{
Name: "selflinktest",
Namespace: namespace,
Labels: map[string]string{
"name": "selflinktest",
},
},
Spec: api.ServiceSpec{
Port: 12345,
// This is here because validation requires it.
Selector: map[string]string{
"foo": "bar",
},
Protocol: "TCP",
SessionAffinity: "None",
},
},
).Do().Into(&svc)
if err != nil {
glog.Fatalf("Failed creating selflinktest service: %v", err)
}
// TODO: this is not namespace aware
err = c.Get().RequestURI(svc.SelfLink).Do().Into(&svc)
if err != nil {
glog.Fatalf("Failed listing service with supplied self link '%v': %v", svc.SelfLink, err)
}
var svcList api.ServiceList
err = c.Get().NamespaceIfScoped(namespace, len(namespace) > 0).Resource("services").Do().Into(&svcList)
if err != nil {
glog.Fatalf("Failed listing services: %v", err)
}
err = c.Get().RequestURI(svcList.SelfLink).Do().Into(&svcList)
if err != nil {
glog.Fatalf("Failed listing services with supplied self link '%v': %v", svcList.SelfLink, err)
}
found := false
for i := range svcList.Items {
item := &svcList.Items[i]
if item.Name != "selflinktest" {
continue
}
found = true
err = c.Get().RequestURI(item.SelfLink).Do().Into(&svc)
if err != nil {
glog.Fatalf("Failed listing service with supplied self link '%v': %v", item.SelfLink, err)
}
break
}
if !found {
glog.Fatalf("never found selflinktest service in namespace %s", namespace)
}
glog.Infof("Self link test passed in namespace %s", namespace)
// TODO: Should test PUT at some point, too.
}
func runAtomicPutTest(c *client.Client) {
var svc api.Service
err := c.Post().Resource("services").Body(
&api.Service{
TypeMeta: api.TypeMeta{
APIVersion: latest.Version,
},
ObjectMeta: api.ObjectMeta{
Name: "atomicservice",
Labels: map[string]string{
"name": "atomicService",
},
},
Spec: api.ServiceSpec{
Port: 12345,
// This is here because validation requires it.
Selector: map[string]string{
"foo": "bar",
},
Protocol: "TCP",
SessionAffinity: "None",
},
},
).Do().Into(&svc)
if err != nil {
glog.Fatalf("Failed creating atomicService: %v", err)
}
glog.Info("Created atomicService")
testLabels := labels.Set{
"foo": "bar",
}
for i := 0; i < 5; i++ {
// a: z, b: y, etc...
testLabels[string([]byte{byte('a' + i)})] = string([]byte{byte('z' - i)})
}
var wg sync.WaitGroup
wg.Add(len(testLabels))
for label, value := range testLabels {
go func(l, v string) {
for {
glog.Infof("Starting to update (%s, %s)", l, v)
var tmpSvc api.Service
err := c.Get().
Resource("services").
Name(svc.Name).
Do().
Into(&tmpSvc)
if err != nil {
glog.Errorf("Error getting atomicService: %v", err)
continue
}
if tmpSvc.Spec.Selector == nil {
tmpSvc.Spec.Selector = map[string]string{l: v}
} else {
tmpSvc.Spec.Selector[l] = v
}
glog.Infof("Posting update (%s, %s)", l, v)
err = c.Put().Resource("services").Name(svc.Name).Body(&tmpSvc).Do().Error()
if err != nil {
if errors.IsConflict(err) {
glog.Infof("Conflict: (%s, %s)", l, v)
// This is what we expect.
continue
}
glog.Errorf("Unexpected error putting atomicService: %v", err)
continue
}
break
}
glog.Infof("Done update (%s, %s)", l, v)
wg.Done()
}(label, value)
}
wg.Wait()
if err := c.Get().Resource("services").Name(svc.Name).Do().Into(&svc); err != nil {
glog.Fatalf("Failed getting atomicService after writers are complete: %v", err)
}
if !reflect.DeepEqual(testLabels, labels.Set(svc.Spec.Selector)) {
glog.Fatalf("Selector PUTs were not atomic: wanted %v, got %v", testLabels, svc.Spec.Selector)
}
glog.Info("Atomic PUTs work.")
}
func runMasterServiceTest(client *client.Client) {
time.Sleep(12 * time.Second)
var svcList api.ServiceList
err := client.Get().
Namespace("default").
Resource("services").
Do().
Into(&svcList)
if err != nil {
glog.Fatalf("unexpected error listing services: %v", err)
}
var foundRW, foundRO bool
found := util.StringSet{}
for i := range svcList.Items {
found.Insert(svcList.Items[i].Name)
if svcList.Items[i].Name == "kubernetes" {
foundRW = true
}
if svcList.Items[i].Name == "kubernetes-ro" {
foundRO = true
}
}
if foundRW {
var ep api.Endpoints
err := client.Get().
Namespace("default").
Resource("endpoints").
Name("kubernetes").
Do().
Into(&ep)
if err != nil {
glog.Fatalf("unexpected error listing endpoints for kubernetes service: %v", err)
}
if len(ep.Endpoints) == 0 {
glog.Fatalf("no endpoints for kubernetes service: %v", ep)
}
} else {
glog.Errorf("no RW service found: %v", found)
}
if foundRO {
var ep api.Endpoints
err := client.Get().
Namespace("default").
Resource("endpoints").
Name("kubernetes-ro").
Do().
Into(&ep)
if err != nil {
glog.Fatalf("unexpected error listing endpoints for kubernetes service: %v", err)
}
if len(ep.Endpoints) == 0 {
glog.Fatalf("no endpoints for kubernetes service: %v", ep)
}
} else {
glog.Errorf("no RO service found: %v", found)
}
if !foundRW || !foundRO {
glog.Fatalf("Kubernetes service test failed: %v", found)
}
glog.Infof("Master service test passed.")
}
func runServiceTest(client *client.Client) {
pod := &api.Pod{
ObjectMeta: api.ObjectMeta{
Name: "foo",
Labels: map[string]string{
"name": "thisisalonglabel",
},
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: "c1",
Image: "foo",
Ports: []api.Port{
{ContainerPort: 1234},
},
ImagePullPolicy: "PullIfNotPresent",
},
},
RestartPolicy: api.RestartPolicy{Always: &api.RestartPolicyAlways{}},
DNSPolicy: api.DNSClusterFirst,
},
Status: api.PodStatus{
PodIP: "1.2.3.4",
},
}
pod, err := client.Pods(api.NamespaceDefault).Create(pod)
if err != nil {
glog.Fatalf("Failed to create pod: %v, %v", pod, err)
}
if err := wait.Poll(time.Second, time.Second*20, podExists(client, pod.Namespace, pod.Name)); err != nil {
glog.Fatalf("FAILED: pod never started running %v", err)
}
svc1 := &api.Service{
ObjectMeta: api.ObjectMeta{Name: "service1"},
Spec: api.ServiceSpec{
Selector: map[string]string{
"name": "thisisalonglabel",
},
Port: 8080,
Protocol: "TCP",
SessionAffinity: "None",
},
}
svc1, err = client.Services(api.NamespaceDefault).Create(svc1)
if err != nil {
glog.Fatalf("Failed to create service: %v, %v", svc1, err)
}
// create an identical service in the default namespace
svc3 := &api.Service{
ObjectMeta: api.ObjectMeta{Name: "service1"},
Spec: api.ServiceSpec{
Selector: map[string]string{
"name": "thisisalonglabel",
},
Port: 8080,
Protocol: "TCP",
SessionAffinity: "None",
},
}
svc3, err = client.Services("other").Create(svc3)
if err != nil {
glog.Fatalf("Failed to create service: %v, %v", svc3, err)
}
if err := wait.Poll(time.Second, time.Second*20, endpointsSet(client, svc1.Namespace, svc1.Name, 1)); err != nil {
glog.Fatalf("FAILED: unexpected endpoints: %v", err)
}
// A second service with the same port.
svc2 := &api.Service{
ObjectMeta: api.ObjectMeta{Name: "service2"},
Spec: api.ServiceSpec{
Selector: map[string]string{
"name": "thisisalonglabel",
},
Port: 8080,
Protocol: "TCP",
SessionAffinity: "None",
},
}
svc2, err = client.Services(api.NamespaceDefault).Create(svc2)
if err != nil {
glog.Fatalf("Failed to create service: %v, %v", svc2, err)
}
if err := wait.Poll(time.Second, time.Second*20, endpointsSet(client, svc2.Namespace, svc2.Name, 1)); err != nil {
glog.Fatalf("FAILED: unexpected endpoints: %v", err)
}
if ok, err := endpointsSet(client, svc3.Namespace, svc3.Name, 0)(); !ok || err != nil {
glog.Fatalf("FAILED: service in other namespace should have no endpoints: %v %v", ok, err)
}
svcList, err := client.Services(api.NamespaceAll).List(labels.Everything())
if err != nil {
glog.Fatalf("Failed to list services across namespaces: %v", err)
}
names := util.NewStringSet()
for _, svc := range svcList.Items {
names.Insert(fmt.Sprintf("%s/%s", svc.Namespace, svc.Name))
}
if !names.HasAll("default/kubernetes", "default/kubernetes-ro", "default/service1", "default/service2", "other/service1") {
glog.Fatalf("Unexpected service list: %#v", names)
}
glog.Info("Service test passed.")
}
type testFunc func(*client.Client)
func main() {
util.InitFlags()
runtime.GOMAXPROCS(runtime.NumCPU())
util.ReallyCrash = true
util.InitLogs()
defer util.FlushLogs()
go func() {
defer util.FlushLogs()
time.Sleep(3 * time.Minute)
glog.Fatalf("This test has timed out.")
}()
manifestURL := ServeCachedManifestFile()
apiServerURL := startComponents(manifestURL)
// Ok. we're good to go.
glog.Infof("API Server started on %s", apiServerURL)
// Wait for the synchronization threads to come up.
time.Sleep(time.Second * 10)
kubeClient := client.NewOrDie(&client.Config{Host: apiServerURL, Version: testapi.Version()})
// Run tests in parallel
testFuncs := []testFunc{
runReplicationControllerTest,
runAtomicPutTest,
runServiceTest,
runAPIVersionsTest,
runMasterServiceTest,
func(c *client.Client) {
runSelfLinkTestOnNamespace(c, "")
runSelfLinkTestOnNamespace(c, "other")
},
}
var wg sync.WaitGroup
wg.Add(len(testFuncs))
for i := range testFuncs {
f := testFuncs[i]
go func() {
f(kubeClient)
wg.Done()
}()
}
wg.Wait()
// Check that kubelet tried to make the pods.
// Using a set to list unique creation attempts. Our fake is
// really stupid, so kubelet tries to create these multiple times.
createdPods := util.StringSet{}
for _, p := range fakeDocker1.Created {
// The last 8 characters are random, so slice them off.
if n := len(p); n > 8 {
createdPods.Insert(p[:n-8])
}
}
for _, p := range fakeDocker2.Created {
// The last 8 characters are random, so slice them off.
if n := len(p); n > 8 {
createdPods.Insert(p[:n-8])
}
}
// We expect 9: 2 pod infra containers + 2 pods from the replication controller +
// 1 pod infra container + 2 pods from the URL +
// 1 pod infra container + 1 pod from the service test.
if len(createdPods) != 9 {
glog.Fatalf("Unexpected list of created pods:\n\n%#v\n\n%#v\n\n%#v\n\n", createdPods.List(), fakeDocker1.Created, fakeDocker2.Created)
}
glog.Infof("OK - found created pods: %#v", createdPods.List())
}
// ServeCachedManifestFile serves a file for kubelet to read.
func ServeCachedManifestFile() (servingAddress string) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/manifest" {
w.Write([]byte(testManifestFile))
return
}
glog.Fatalf("Got request: %#v\n", r)
http.NotFound(w, r)
}))
return server.URL + "/manifest"
}
const (
// This is copied from, and should be kept in sync with:
// https://raw.githubusercontent.com/GoogleCloudPlatform/container-vm-guestbook-redis-python/master/manifest.yaml
// Note that kubelet complains about these containers not having a self link.
testManifestFile = `version: v1beta2
id: container-vm-guestbook
containers:
- name: redis
image: dockerfile/redis
volumeMounts:
- name: redis-data
mountPath: /data
- name: guestbook
image: google/guestbook-python-redis
ports:
- name: www
hostPort: 80
containerPort: 80
volumes:
- name: redis-data`
)
| knodir/kubernetes | cmd/integration/integration.go | GO | apache-2.0 | 22,027 |
package agent.fuzzy;
import agent.Action;
import agent.State;
import env.CueNames;
import proc.grid.GridWorld;
import reward.Reward;
import Jama.Matrix;
import action.ActionSelection;
import agent.AgentInternalConstants;
import agent.fuzzy.set.AFuzzySet;
import agent.fuzzy.set.ATrapezoidFuzzySet;
import agent.fuzzy.set.LeftOpenSideFuzzySet;
import agent.fuzzy.set.RightOpenSideFuzzySet;
import agent.kalman.KalmanFilterAgent;
import agent.kalman.MuVarQValue;
import env.Cue;
import env.Environment;
import java.util.HashMap;
import java.util.Map;
public class FuzzyKalmanAgent extends KalmanFilterAgent {
private static final long serialVersionUID = 5295108835776727798L;
private AFuzzySet[] fuzzySet;
private GridWorld gridWorld;
private int numberOfSets;
private double currentXPos;
private double currentYPos;
private double prevXPos;
private double prevYPos;
private int stepsToPrey;
private boolean simple;
public FuzzyKalmanAgent(Environment environment,
ActionSelection actionSelection,
AgentInternalConstants parameterValues, int xdim, boolean simple) {
super(environment, actionSelection, parameterValues);
int halfNumberOfSets = numberOfSets / 2;
double precentOfGrid = 0.1;
double lengthOfSet = xdim * precentOfGrid;
double steepRatio = 6;
this.simple = simple;
fuzzySet = new AFuzzySet[halfNumberOfSets * 2];
double p2 = 0;
fuzzySet[0] = new ATrapezoidFuzzySet(0 - lengthOfSet / steepRatio / 2,
lengthOfSet / steepRatio / 2, 0 + lengthOfSet - lengthOfSet
/ steepRatio, 0 + lengthOfSet);
fuzzySet[halfNumberOfSets] = new ATrapezoidFuzzySet(-lengthOfSet, 0
- lengthOfSet + lengthOfSet / steepRatio, -lengthOfSet
/ steepRatio / 2, lengthOfSet / steepRatio / 2);
for (int i = 1; i < halfNumberOfSets - 1; i++) {
p2 += lengthOfSet;
fuzzySet[i] = new ATrapezoidFuzzySet(p2 - lengthOfSet / steepRatio,
p2, p2 + lengthOfSet - lengthOfSet / steepRatio, p2
+ lengthOfSet);
fuzzySet[i + halfNumberOfSets] = new ATrapezoidFuzzySet(-p2
- lengthOfSet,
-p2 - lengthOfSet + lengthOfSet / steepRatio, -p2, -p2
+ lengthOfSet / steepRatio);
}
fuzzySet[halfNumberOfSets - 1] = new RightOpenSideFuzzySet(p2
- lengthOfSet / steepRatio, p2);
fuzzySet[+halfNumberOfSets + halfNumberOfSets - 1] = new LeftOpenSideFuzzySet(
-p2, -p2 + lengthOfSet / steepRatio);
}
@Override
public int getNumberOfBasisFuncs() {
numberOfSets = 4;
return numberOfSets * numberOfSets * getNumberOfActions();
}
protected int getIndex(int x, int y, Action action) {
int state = x * numberOfSets + y;
return state * getNumberOfActions() + getOrdinalAction(action);
}
@Override
protected void updateValue(Reward r, double transitionDelay) {
stepsToPrey++;
gridWorld = (GridWorld) environment.getProcedure();
observationNoise = 0.08;
currentXPos = gridWorld.getXpos();
currentYPos = gridWorld.getYpos();
double df = Math.pow(this.df, transitionDelay);
P = P.plus(P.times(DiffusionNoise));
Matrix H1 = new Matrix(getNumberOfBasisFuncs(), 1, 0), H2 = new Matrix(
getNumberOfBasisFuncs(), 1, 0);
for (int x = 0; x < numberOfSets; x++)
for (int y = 0; y < numberOfSets; y++) {
H1.set(getIndex(x, y, getCurrentAction()), 0,
fuzzySet[x].getMembership(currentXPos)
* fuzzySet[y].getMembership(currentYPos) * df);
H2.set(getIndex(x, y, getPrevAction()), 0,
fuzzySet[x].getMembership(prevXPos)
* fuzzySet[y].getMembership(prevYPos) * 1);
}
Matrix H = H2.minus(H1);
double delta = r.getMagnitude() - H.transpose().times(Theta).get(0, 0);
Matrix P_theta = P.times(H);
double P_r = H.transpose().times(P).times(H).get(0, 0)
+ observationNoise;
K = P_theta.times(1 / P_r);
if (simple)
Theta = Theta.plus(H.times(0.1).times(delta));
else
Theta = Theta.plus(K.times(delta));
P = P.minus(K.times(K.transpose()).times(P_r));
for (State s: getStates()) {
for (Action i: getActions()) {
if (getQValue(s, i) != null) {
((MuVarQValue) getQValue(s, i)).setEmu(Theta.get(
getStateActionIndex(s, i), 0));
((MuVarQValue) getQValue(s, i)).SetVarMu(P.get(getStateActionIndex(
s, i), getStateActionIndex(s, i)));
}
}
}
prevXPos = currentXPos;
prevYPos = currentYPos;
}
@Override
public State resolveState(Cue[] cue) {
return addState(Cue.get(CueNames.InitialState));
}
public void logQTable() {
for (int y = 0; y < numberOfSets / 2; y++)
for (int x = 0; x < numberOfSets / 2; x++)
for (Action a: getActions()) {
System.out.println("x : " + x + " y : " + y + " a : "
+ a + " value : "
+ Theta.get(getIndex(x, y, a), 0));
}
for (int y = numberOfSets / 2; y < numberOfSets; y++)
for (int x = numberOfSets / 2; x < numberOfSets; x++)
for (Action a: getActions()) {
System.out.println("x : " + (x - numberOfSets / 2)
+ " y : " + (y - numberOfSets / 2) + " a : "
+ a + " value : "
+ Theta.get(getIndex(x, y, a), 0));
}
}
public double getCurrentXPos() {
return currentXPos;
}
public double getCurrentYPos() {
return currentYPos;
}
public Map<Action, Double> getActionValues() {
Map<Action, Double > values = new HashMap<Action, Double>();
for (Action a: getActions())
for (int x = 0; x < numberOfSets; x++)
for (int y = 0; y < numberOfSets; y++) {
if (values.get(a) == null)
values.put(a, 0D);
values.put(a, values.get(a) + fuzzySet[x].getMembership(currentXPos)
* fuzzySet[y].getMembership(currentYPos)
* Theta.get(getIndex(x, y, a), 0));
}
return values;
}
@Override
public void newTrial() {
super.newTrial();
}
public int getStepsToPrey() {
int k = stepsToPrey;
stepsToPrey = 0;
return k;
}
}
| adezfouli/JRLSim | src/agent/fuzzy/FuzzyKalmanAgent.java | Java | apache-2.0 | 6,000 |
package com.yingjunyu.GetInfo.beans;
/**
* Created by yingjunyu on 2016/8/11.
*/
public class StockBean {
private String sname; //指数名称
private String curdot; //当前指数
private String curprice; //涨跌指数
private String rate; //涨跌百分比
public String getSname() {
return sname;
}
public void setSname(String sname) {
this.sname = sname;
}
public String getCurdot() {
return curdot;
}
public void setCurdot(String curdot) {
this.curdot = curdot;
}
public String getCurprice() {
return curprice;
}
public void setCurprice(String curprice) {
this.curprice = curprice;
}
public String getRate() {
return rate;
}
public void setRate(String rate) {
this.rate = rate;
}
}
| yingjunyu/getinfo | app/src/main/java/com/yingjunyu/GetInfo/beans/StockBean.java | Java | apache-2.0 | 844 |
# AUTOGENERATED FILE
FROM balenalib/imx8mm-var-dart-debian:sid-run
# remove several traces of debian python
RUN apt-get purge -y python.*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# install python dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
netbase \
&& rm -rf /var/lib/apt/lists/*
# key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported
RUN gpg --batch --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
ENV PYTHON_VERSION 3.10.0
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.2.4
ENV SETUPTOOLS_VERSION 58.0.0
RUN set -x \
&& buildDeps=' \
curl \
' \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-aarch64-libffi3.3.tar.gz" \
&& echo "47c9b05b54b91145dc2b000ef0e86c159a208a3ae13ec1f43cf0a8ead2aadd91 Python-$PYTHON_VERSION.linux-aarch64-libffi3.3.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-aarch64-libffi3.3.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-aarch64-libffi3.3.tar.gz" \
&& ldconfig \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
# set PYTHONPATH to point to dist-packages
ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Debian Sid \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.10.0, Pip v21.2.4, Setuptools v58.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | resin-io-library/base-images | balena-base-images/python/imx8mm-var-dart/debian/sid/3.10.0/run/Dockerfile | Dockerfile | apache-2.0 | 4,090 |
/***************************************************************
*
* Copyright (C) 1990-2007, Condor Team, Computer Sciences Department,
* University of Wisconsin-Madison, WI.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************/
#include "classad/common.h"
#include "classad/indexfile.h"
#include <iostream>
using namespace std;
BEGIN_NAMESPACE( classad )
int IndexFile::
dump_index()
{
index_itr_type m=Index.begin();
cout << "in dump index the length= " << Index.size() << std::endl;
while (m!=Index.end()){
cout << "dump index key= " << m->first << " offset=" << m->second << endl;
m++;
};
return 1;
}
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable : 4996) // the seek, read, open, close, fileno, etc are deprecated, use _seek, etc instead.
#endif
bool IndexFile::
TruncateStorageFile()
{
int cur_set;
int new_filed;
char* filename="temp_file";
char* logfilename="storagefile";
cur_set = 0;
index_itr_type ptr;
if( ( new_filed = open(filename, O_RDWR | O_CREAT | O_APPEND, 0600 )) < 0 ) {
CondorErrno = ERR_CACHE_FILE_ERROR;
CondorErrMsg = "internal error: unable to create the temp file in truncating storagefile";
return( false );
};
for (ptr=Index.begin();ptr!=Index.end();ptr++){
lseek(filed,ptr->second,SEEK_SET);
char k[1];
string m;
int l;
while ((l=read(filed,k,1))>0){
string n(k,1);
if (n=="\n"){
break;
} else {
m=m+n;
}
}
m=m+'\n';
if (m[0]!='*'){
if (write(new_filed,(void *)(m.c_str()),m.size())<0){
return false;
} else {
fsync(filed);
ptr->second=cur_set;
cur_set+=m.size();
}
}
}
fsync(new_filed);
if( rename(filename, logfilename) < 0 ) {
CondorErrno = ERR_CACHE_FILE_ERROR;
char buf[10];
sprintf( buf, "%d", errno );
CondorErrMsg = "failed to truncate storagefile: rename("
+ string(filename) + " , "
+ string(logfilename) +", errno="
+ string(buf);
return( false );
}
return true;
}
int IndexFile::
First(string &key)
{
index_itr=Index.begin();
if (index_itr != Index.end()){
key=index_itr->first;
return index_itr->second;
} else {
return -1;
}
}
int IndexFile::
Next(string &key)
{
index_itr++;
if (index_itr != Index.end()){
key=index_itr->first;
return index_itr->second;
} else {
return -1;
}
}
string IndexFile::
GetClassadFromFile(string, int offset)
{
if (filed != 0){
int curset;
curset = lseek(filed,offset,SEEK_SET);
char k[1];
string m;
int l;
while ((l=read(filed,k,1))>0){
string n(k,1);
if (n=="\n"){
break;
} else {
m=m+n;
}
}
if (m[0] != '*'){
return m;
} else {
return "";
}
}else{
return "";
}
}
bool IndexFile::
UpdateIndex(string key, int offset)
{
Index[key]=offset;
//cout << Index[key] << endl;
return true;
}
void IndexFile::
Init(int file_handler)
{
filed=file_handler;
Index.clear();
}
bool IndexFile::
WriteBack(string key, string ad)
{
DeleteFromStorageFile(key);
int k=lseek(filed,0,SEEK_END);
Index[key]=k;
ad=ad+"\n";
if (write(filed,(void *)(ad.c_str()),ad.size())<0){
return false;
} else {
fsync(filed);
return true;
};
}
bool IndexFile::
FindInFile(string key,tag &ptr)
{
index_itr_type m=Index.find(key);
if (m!=Index.end()){
ptr.offset=m->second;
return true;
} else {
return false;
}
}
bool IndexFile::
DeleteFromStorageFile(string key)
{
index_itr_type i=Index.find(key);
if (i!=Index.end()){
int offset=i->second;
lseek(filed,offset,SEEK_SET);
char k[1];
string m;
int l;
while ((l=read(filed,k,1))>0){
string n(k,1);
if (n=="\n"){
break;
} else {
m=m+n;
}
}
m[0]='*';
m=m+'\n';
lseek(filed,offset,SEEK_SET);
write(filed,(void *)(m.c_str()),m.size());
fsync(filed);
Index.erase(key);
return true;
} else {
return false;
}
}
#ifdef _MSC_VER
#pragma warning(pop) // the seek, read, open, close, fileno, etc are deprecated, use _seek, etc instead.
#endif
END_NAMESPACE
| clalancette/condor-dcloud | src/classad/indexfile.cpp | C++ | apache-2.0 | 4,606 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_45) on Mon Jun 24 22:25:08 UTC 2013 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
Uses of Class org.apache.hadoop.hbase.regionserver.KeyValueHeap (HBase 0.94.9 API)
</TITLE>
<META NAME="date" CONTENT="2013-06-24">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.hbase.regionserver.KeyValueHeap (HBase 0.94.9 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/hbase/regionserver/KeyValueHeap.html" title="class in org.apache.hadoop.hbase.regionserver"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/hbase/regionserver//class-useKeyValueHeap.html" target="_top"><B>FRAMES</B></A>
<A HREF="KeyValueHeap.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.hbase.regionserver.KeyValueHeap</B></H2>
</CENTER>
No usage of org.apache.hadoop.hbase.regionserver.KeyValueHeap
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/hbase/regionserver/KeyValueHeap.html" title="class in org.apache.hadoop.hbase.regionserver"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/hbase/regionserver//class-useKeyValueHeap.html" target="_top"><B>FRAMES</B></A>
<A HREF="KeyValueHeap.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2013 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All Rights Reserved.
</BODY>
</HTML>
| zqxjjj/NobidaBase | docs/apidocs/org/apache/hadoop/hbase/regionserver/class-use/KeyValueHeap.html | HTML | apache-2.0 | 6,282 |
package com.vijaysharma.gezzoo.utilities;
import com.googlecode.objectify.stringifier.Stringifier;
public class LongStringifier implements Stringifier<Long>
{
@Override
public String toString(Long obj) {
return obj.toString();
}
@Override
public Long fromString(String str) {
return Long.parseLong(str);
}
}
| vijaysharm/gezzoo-java | src/main/java/com/vijaysharma/gezzoo/utilities/LongStringifier.java | Java | apache-2.0 | 321 |
package io.kamara.firebase.quickstart.android.demo;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.provider.Settings;
import android.support.annotation.NonNull;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.firebase.ui.auth.ErrorCodes;
import com.firebase.ui.auth.IdpResponse;
import com.firebase.ui.auth.ResultCodes;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.OnProgressListener;
import com.google.firebase.storage.StorageMetadata;
import com.google.firebase.storage.StorageReference;
import com.google.firebase.storage.UploadTask;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import io.kamara.firebase.quickstart.android.R;
import io.kamara.firebase.quickstart.android.util.Utility;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
public class DemoActivity extends AppCompatActivity {
private final int SELECT_PHOTO = 1;
@BindView(R.id.authBtn)
Button mAuthButton;
@BindView(R.id.mainContainer)
RelativeLayout mMainContainer;
@BindView(R.id.photoImageView)
ImageView mPhotoImageView;
@BindView(R.id.progressBar)
TextView mProgressTextView;
private FirebaseAuth mFirebaseAuth;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_demo);
ButterKnife.bind(this);
mFirebaseAuth = FirebaseAuth.getInstance();
}
@Override
protected void onResume() {
super.onResume();
updateAuthUI();
}
@OnClick(R.id.authBtn)
protected void authenticateUser() {
if (mFirebaseAuth.getCurrentUser() != null) {
Utility.signOut(this, new Utility.SignOutDelegate() {
@Override
public void onSignOut() {
showSnackBar("Sign-out successful");
mAuthButton.setText("Sign-In");
}
});
} else {
Utility.requestSignIn(this);
}
}
@OnClick(R.id.photoImageView)
protected void selectPhoto () {
boolean storagePermission = Utility.hasReadStoragePermission(this);
if (storagePermission) {
startPhotoPickIntent();
} else {
Utility.requestStoragePermission(this);
}
}
@OnClick(R.id.uploadBtn)
protected void uploadPhoto () {
if (mFirebaseAuth.getCurrentUser()!= null) {
mPhotoImageView.setDrawingCacheEnabled(true);
mPhotoImageView.buildDrawingCache();
Bitmap bitmap = ((BitmapDrawable)mPhotoImageView.getDrawable()).getBitmap();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, baos);
uploadStory(baos.toByteArray());
} else {
showSnackBar("Sing-In required, please Sign-In");
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (requestCode == Utility.READ_EXTERNAL_STORAGE_REQUEST) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startPhotoPickIntent();
} else {
if (Utility.shouldShowReadStorageRequestPermissionRationale(this)) {
new AlertDialog.Builder(this)
.setTitle("Permission required")
.setMessage("Storage permission is needed to select photo on your device!")
.setPositiveButton("Allow", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
Utility.requestStoragePermission(DemoActivity.this);
dialog.dismiss();
}
})
.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
showPermissionDeniedSnackBar();
dialog.dismiss();
}
})
.show();
}
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch(requestCode) {
case Utility.RC_SIGN_IN:
IdpResponse response = IdpResponse.fromResultIntent(data);
handleSignInResponse(resultCode, response);
break;
case SELECT_PHOTO:
if(resultCode == RESULT_OK){
mPhotoImageView.setImageBitmap(null);
Uri photoUri = data.getData();
try {
Bitmap bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), photoUri);
mPhotoImageView.setImageBitmap(bitmap);
} catch (IOException e) {
e.printStackTrace();
showSnackBar("Error picking photo");
}
} else {
showSnackBar("Error picking photo");
}
break;
}
}
private void handleSignInResponse(int resultCode, IdpResponse response) {
if (resultCode == ResultCodes.OK) {
updateAuthUI();
return;
} else {
if (response == null) {
showSnackBar("Sign in cancelled");
return;
}
if (response.getErrorCode() == ErrorCodes.NO_NETWORK) {
showSnackBar("No internet connection");
return;
}
if (response.getErrorCode() == ErrorCodes.UNKNOWN_ERROR) {
showSnackBar("Unknown error");
return;
}
}
showSnackBar("Unknown sign_in response");
}
private void updateAuthUI(){
FirebaseUser currentUser = mFirebaseAuth.getCurrentUser();
String snackBarText;
String authButtontext;
if (currentUser != null) {
snackBarText = "Welcome, " + currentUser.getDisplayName();
authButtontext = "Sign-Out";
} else {
snackBarText = "You are not Signed-In";
authButtontext = "Sign-In";
}
mAuthButton.setText(authButtontext);
showSnackBar(snackBarText);
}
private void startPhotoPickIntent() {
Intent intent = Utility.newImagePickerIntent();
startActivityForResult(Intent.createChooser(intent, "Select photo"), SELECT_PHOTO);
}
private void showSnackBar(String message) {
Snackbar.make(mMainContainer, message, Snackbar.LENGTH_LONG).show();
}
private void showPermissionDeniedSnackBar() {
Snackbar snackbar = Snackbar.make(mMainContainer, "Storage permission required!", Snackbar.LENGTH_LONG);
snackbar.setAction("Settings", new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent();
intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
Context context = mMainContainer.getContext();
Uri uri = Uri.fromParts("package", context.getPackageName(), null);
intent.setData(uri);
context.startActivity(intent);
}
});
snackbar.show();
}
private void uploadStory(byte[] data){
mProgressTextView.setText("");
StorageMetadata metadata = new StorageMetadata.Builder()
.setContentType("image/jpeg")
.build();
String filename = String.valueOf(System.currentTimeMillis());
StorageReference storageReference = FirebaseStorage.getInstance().getReference();
StorageReference photoReference = storageReference.child(FirebaseAuth.getInstance().getCurrentUser().getUid())
.child(filename);
photoReference.putBytes(data, metadata)
.addOnSuccessListener(new OnSuccessListener<UploadTask.TaskSnapshot>() {
@Override
public void onSuccess(UploadTask.TaskSnapshot taskSnapshot) {
saveStory(taskSnapshot);
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
showSnackBar("Failed to upload photo");
}
})
.addOnProgressListener(new OnProgressListener<UploadTask.TaskSnapshot>() {
@Override
public void onProgress(UploadTask.TaskSnapshot snapshot) {
int progress = (int) ((100.0 * snapshot.getBytesTransferred()) / snapshot.getTotalByteCount());
mProgressTextView.setText("Upload is " + progress + "% done");
}
});
}
private void saveStory(UploadTask.TaskSnapshot snapshot) {
DatabaseReference stories = FirebaseDatabase.getInstance().getReference("stories").push();
Story story = new Story(snapshot.getDownloadUrl().toString(), snapshot.getMetadata().getPath(), "My Awesome Photo", null);
stories.setValue(story)
.addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
new AlertDialog.Builder(DemoActivity.this)
.setTitle("Congratulation!")
.setMessage("Photo successfully saved to Database.\nGo to the Firebase console and verify photo upload in storage, " +
"new story and user added to database & authentication respectively!\nHappy Coding")
.setPositiveButton("Exit Demo", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
dialog.dismiss();
}
})
.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.show();
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
showSnackBar("Failed to save photo to Database");
}
});
}
private class Story {
public String downloadUrl;
public String title;
public String uuid;
public String filePath;
public Story() {
// Default constructor required for calls to DataSnapshot.getValue(Story.class)
}
public Story(String downloadUrl, String filePath, String title, String uuid) {
this.downloadUrl = downloadUrl;
this.filePath = filePath;
this.title = title;
this.uuid = uuid;
}
}
}
| GDG-Trondheim/firebase-ui-quickstart-android | app/src/main/java/io/kamara/firebase/quickstart/android/demo/DemoActivity.java | Java | apache-2.0 | 13,076 |
using System.Linq;
using RealtyInvest.Common.ServiceResult;
using RealtyInvest.DataModel.Models;
using RealtyInvest.DataModel.UnitsOfWorks;
using RealtyInvest.DataModel.ViewModels;
using RealtyInvest.DataModel.ViewModels.Manage;
namespace RealtyInvest.Core.Services.Impl
{
public class RealtySearchService : IRealtySearchService
{
private readonly IUnitOfWorkFactory _factory;
public RealtySearchService(IUnitOfWorkFactory factory)
{
_factory = factory;
}
public ServiceResult<SearchResult[]> Search(SearchModel model)
{
ServiceResult<SearchResult[]> result = new ServiceResult<SearchResult[]>();
using (var uow = _factory.CreateUnitOfWork())
{
result.Value = new SearchResult[]
{
new SearchResult {Price = 10, RealtyName = "Dom v tsarskom sele", PictureUrl = "http://storage.googleapis.com/bd-ua-01/buildings/11762.jpg" },
new SearchResult {Price = 10, RealtyName = "Dom v tsarskom sele", PictureUrl = "http://storage.googleapis.com/bd-ua-01/buildings/11762.jpg" },
new SearchResult {Price = 10, RealtyName = "Dom v tsarskom sele", PictureUrl = "http://storage.googleapis.com/bd-ua-01/buildings/11762.jpg" },
new SearchResult {Price = 10, RealtyName = "Dom v tsarskom sele", PictureUrl = "http://storage.googleapis.com/bd-ua-01/buildings/11762.jpg" },
new SearchResult {Price = 10, RealtyName = "Dom v tsarskom sele", PictureUrl = "http://storage.googleapis.com/bd-ua-01/buildings/11762.jpg" },
new SearchResult {Price = 10, RealtyName = "Dom v tsarskom sele", PictureUrl = "http://storage.googleapis.com/bd-ua-01/buildings/11762.jpg" }
};
result.ServiceStatus = Status.Success;
}
return result;
}
public ServiceResult<SearchResult[]> AutoSearch(string userid, string text)
{
ServiceResult<SearchResult[]> result = new ServiceResult<SearchResult[]>();
using (var uow = _factory.CreateUnitOfWork())
{
result.Value = uow.RealEstateRepository
.All(x => x.Owner.Id == userid &&
(x.Location.City.Contains(text) || x.Location.Country.Contains(text)
|| x.Name.Contains(text) || x.Owner.UserName.Contains(text) || x.Description.Contains(text)))
.Select(x => new SearchResult
{
RealtyId = x.Id,
Description = x.Description,
Location = x.Location,
RealtyName = x.Name,
PictureUrl = x.MainPictureUrl,
Price = x.Price
}).ToArray();
result.ServiceStatus = Status.Success;
}
return result;
}
}
} | quaternion1994/RealtyInvest | RealtyInvest.Core/Services/Impl/RealtySearchService.cs | C# | apache-2.0 | 3,011 |
///
/// Copyright (c) 2016 Dropbox, Inc. All rights reserved.
///
/// Auto-generated by Stone, do not modify.
///
#import <Foundation/Foundation.h>
#import "DBSerializableProtocol.h"
#import "DBTEAMMembersDeactivateArg.h"
@class DBTEAMMembersRemoveArg;
@class DBTEAMUserSelectorArg;
#pragma mark - API Object
///
/// The `MembersRemoveArg` struct.
///
/// This class implements the `DBSerializable` protocol (serialize and
/// deserialize instance methods), which is required for all Obj-C SDK API route
/// objects.
///
@interface DBTEAMMembersRemoveArg : DBTEAMMembersDeactivateArg <DBSerializable, NSCopying>
#pragma mark - Instance fields
/// If provided, files from the deleted member account will be transferred to
/// this user.
@property (nonatomic, readonly) DBTEAMUserSelectorArg * _Nullable transferDestId;
/// If provided, errors during the transfer process will be sent via email to
/// this user. If the transfer_dest_id argument was provided, then this argument
/// must be provided as well.
@property (nonatomic, readonly) DBTEAMUserSelectorArg * _Nullable transferAdminId;
/// Downgrade the member to a Basic account. The user will retain the email
/// address associated with their Dropbox account and data in their account
/// that is not restricted to team members. In order to keep the account the
/// argument wipe_data should be set to False.
@property (nonatomic, readonly) NSNumber * _Nonnull keepAccount;
#pragma mark - Constructors
///
/// Full constructor for the struct (exposes all instance variables).
///
/// @param user Identity of user to remove/suspend.
/// @param wipeData If provided, controls if the user's data will be deleted on
/// their linked devices.
/// @param transferDestId If provided, files from the deleted member account
/// will be transferred to this user.
/// @param transferAdminId If provided, errors during the transfer process will
/// be sent via email to this user. If the transfer_dest_id argument was
/// provided, then this argument must be provided as well.
/// @param keepAccount Downgrade the member to a Basic account. The user will
/// retain the email address associated with their Dropbox account and data in
/// their account that is not restricted to team members. In order to keep the
/// account the argument wipe_data should be set to False.
///
/// @return An initialized instance.
///
- (nonnull instancetype)initWithUser:(DBTEAMUserSelectorArg * _Nonnull)user
wipeData:(NSNumber * _Nullable)wipeData
transferDestId:(DBTEAMUserSelectorArg * _Nullable)transferDestId
transferAdminId:(DBTEAMUserSelectorArg * _Nullable)transferAdminId
keepAccount:(NSNumber * _Nullable)keepAccount;
///
/// Convenience constructor (exposes only non-nullable instance variables with
/// no default value).
///
/// @param user Identity of user to remove/suspend.
///
/// @return An initialized instance.
///
- (nonnull instancetype)initWithUser:(DBTEAMUserSelectorArg * _Nonnull)user;
@end
#pragma mark - Serializer Object
///
/// The serialization class for the `MembersRemoveArg` struct.
///
@interface DBTEAMMembersRemoveArgSerializer : NSObject
///
/// Serializes `DBTEAMMembersRemoveArg` instances.
///
/// @param instance An instance of the `DBTEAMMembersRemoveArg` API object.
///
/// @return A json-compatible dictionary representation of the
/// `DBTEAMMembersRemoveArg` API object.
///
+ (NSDictionary * _Nonnull)serialize:(DBTEAMMembersRemoveArg * _Nonnull)instance;
///
/// Deserializes `DBTEAMMembersRemoveArg` instances.
///
/// @param dict A json-compatible dictionary representation of the
/// `DBTEAMMembersRemoveArg` API object.
///
/// @return An instantiation of the `DBTEAMMembersRemoveArg` object.
///
+ (DBTEAMMembersRemoveArg * _Nonnull)deserialize:(NSDictionary * _Nonnull)dict;
@end
| vntodorova/Notes | Notes/Pods/ObjectiveDropboxOfficial/Source/ObjectiveDropboxOfficial/Shared/Generated/ApiObjects/Team/Headers/DBTEAMMembersRemoveArg.h | C | apache-2.0 | 3,881 |
# Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
#
# This program is licensed to you under the Apache License Version 2.0,
# and you may not use this file except in compliance with the Apache License Version 2.0.
# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the Apache License Version 2.0 is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
# Author:: Alex Dean (mailto:support@snowplowanalytics.com)
# Copyright:: Copyright (c) 2012-2014 Snowplow Analytics Ltd
# License:: Apache License Version 2.0
require 'sluice'
require 'contracts'
# Ruby module to support the load of Snowplow events into Redshift
module Snowplow
module StorageLoader
module RedshiftLoader
include Contracts
# Constants for the load process
EVENT_FIELD_SEPARATOR = "\\t"
# Used to find the altered enriched events
ALTERED_ENRICHED_PATTERN = /(run=[0-9\-]+\/atomic-events)/
# Versions 0.5.0 and earlier of Hadoop Shred don't copy atomic.events into the shredded bucket
OLD_ENRICHED_PATTERN = /0\.[0-5]\.[0-9]/
SqlStatements = Struct.new(:copy, :analyze, :vacuum)
# Loads the Snowplow event files and shredded type
# files into Redshift.
#
# Parameters:
# +config+:: the configuration options
# +target+:: the configuration for this specific target
# +snowplow_tracking_enabled+:: whether we should emit Snowplow events for this
Contract Hash, Hash, Bool => nil
def self.load_events_and_shredded_types(config, target, snowplow_tracking_enabled)
puts "Loading Snowplow events and shredded types into #{target[:name]} (Redshift cluster)..."
s3 = Sluice::Storage::S3::new_fog_s3_from(
config[:aws][:s3][:region],
config[:aws][:access_key_id],
config[:aws][:secret_access_key])
# First let's get our statements for shredding (if any)
shredded_statements = get_shredded_statements(config, target, s3)
# Now let's get the manifest statement
manifest_statement = get_manifest_statement(target[:table], shredded_statements.length)
# Build our main transaction, consisting of COPY and COPY FROM JSON
# statements, and potentially also a set of table ANALYZE statements.
atomic_events_location = if OLD_ENRICHED_PATTERN.match(config[:enrich][:versions][:hadoop_shred])
:enriched
else
:shredded
end
copy_statements = if atomic_events_location == :shredded
loc = Sluice::Storage::S3::Location.new(config[:aws][:s3][:buckets][:shredded][:good])
altered_enriched_filepath = Sluice::Storage::S3::list_files(s3, loc).find { |file|
ALTERED_ENRICHED_PATTERN.match(file.key)
}
if altered_enriched_filepath.nil?
raise DatabaseLoadError, 'Cannot find atomic-events directory in shredded/good'
end
# Of the form "run=xxx/atomic-events"
altered_enriched_subdirectory = ALTERED_ENRICHED_PATTERN.match(altered_enriched_filepath.key)[1]
[build_copy_from_tsv_statement(config, config[:aws][:s3][:buckets][:shredded][:good] + altered_enriched_subdirectory, target[:table], target[:maxerror])]
else
[build_copy_from_tsv_statement(config, config[:aws][:s3][:buckets][:enriched][:good], target[:table], target[:maxerror])]
end + shredded_statements.map(&:copy) + [manifest_statement]
credentials = [config[:aws][:access_key_id], config[:aws][:secret_access_key]]
status = PostgresLoader.execute_transaction(target, copy_statements)
unless status == []
raw_error_message = "#{status[1]} error executing COPY statements: #{status[0]}: #{status[2]}"
error_message = Sanitization.sanitize_message(raw_error_message, credentials)
if snowplow_tracking_enabled
Monitoring::Snowplow.instance.track_load_failed(error_message)
end
raise DatabaseLoadError, error_message
end
if snowplow_tracking_enabled
Monitoring::Snowplow.instance.track_load_succeeded()
end
# If vacuum is requested, build a set of VACUUM statements
# and execute them in series. VACUUMs cannot be performed
# inside of a transaction
if config[:include].include?('vacuum')
vacuum_statements = [build_vacuum_statement(target[:table])] + shredded_statements.map(&:vacuum).uniq
vacuum_status = PostgresLoader.execute_queries(target, vacuum_statements)
unless vacuum_status == []
raise DatabaseLoadError, Sanitization.sanitize_message("#{vacuum_status[1]} error executing VACUUM statements: #{vacuum_status[0]}: #{vacuum_status[2]}", credentials)
end
end
# ANALYZE statements should be executed after VACUUM statements.
unless config[:skip].include?('analyze')
analyze_statements = [build_analyze_statement(target[:table])] + shredded_statements.map(&:analyze).uniq
analyze_status = PostgresLoader.execute_transaction(target, analyze_statements)
unless analyze_status == []
raise DatabaseLoadError, Sanitization.sanitize_message("#{analyze_status[1]} error executing ANALYZE statements: #{analyze_status[0]}: #{analyze_status[2]}", credentials)
end
end
nil
end
private
# Generates an array of SQL statements for loading
# the shredded types.
#
# Parameters:
# +config+:: the configuration options
# +target+:: the configuration for this specific target
# +s3+:: the Fog object for accessing S3
Contract Hash, Hash, FogStorage => ArrayOf[SqlStatements]
def self.get_shredded_statements(config, target, s3)
if config[:skip].include?('shred') # No shredded types to load
[]
else
schema = extract_schema(target[:table])
ShreddedType.discover_shredded_types(s3, config[:aws][:s3][:buckets][:shredded][:good], schema).map { |st|
jsonpaths_file = st.discover_jsonpaths_file(s3, config[:aws][:s3][:buckets][:jsonpath_assets])
if jsonpaths_file.nil?
raise DatabaseLoadError, "Cannot find JSON Paths file to load #{st.s3_objectpath} into #{st.table}"
end
SqlStatements.new(
build_copy_from_json_statement(config, st.s3_objectpath, jsonpaths_file, st.table, target[:maxerror]),
build_analyze_statement(st.table),
build_vacuum_statement(st.table)
)
}
end
end
# Generates the SQL statement for updating the
# manifest table
#
# Parameters:
# +events_table+:: the name of the events table being loaded
# +shredded_cardinality+:: the number of shredded child events and contexts tables loaded in this run
Contract String, Num => String
def self.get_manifest_statement(events_table, shredded_cardinality)
s = extract_schema(events_table)
schema = if s.nil? then "" else "#{s}." end
"INSERT INTO #{schema}manifest
SELECT etl_tstamp, sysdate AS commit_tstamp, count(*) AS event_count, #{shredded_cardinality} AS shredded_cardinality
FROM #{events_table}
WHERE etl_tstamp IS NOT null
GROUP BY 1
ORDER BY etl_tstamp DESC
LIMIT 1;
"
end
# Looks at the events table to determine if there's
# a schema we should use for the shredded type tables.
#
# Parameters:
# +events_table+:: the events table to load into
Contract String => Maybe[String]
def self.extract_schema(events_table)
parts = events_table.split(/\./)
if parts.size > 1 then parts[0] else nil end
end
# Replaces an initial "s3n" with "s3" in an S3 path
Contract String => String
def self.fix_s3_path(path)
path.gsub(/^s3n/, 's3')
end
# Constructs the COPY statement to load the enriched
# event TSV files into Redshift.
#
# Parameters:
# +config+:: the configuration options
# +s3_object_path+:: the S3 path to the files containing
# this shredded type
# +table+:: the name of the table to load, including
# optional schema
# +maxerror+:: how many errors to allow for this COPY
Contract Hash, String, String, Num => String
def self.build_copy_from_tsv_statement(config, s3_objectpath, table, maxerror)
# Assemble the relevant parameters for the bulk load query
credentials = get_credentials(config)
compression_format = get_compression_format(config[:enrich][:output_compression])
fixed_objectpath = fix_s3_path(s3_objectpath)
comprows =
if config[:include].include?('compudate')
"COMPUPDATE COMPROWS #{config[:comprows]}"
else
""
end
"COPY #{table} FROM '#{fixed_objectpath}' CREDENTIALS '#{credentials}' REGION AS '#{config[:aws][:s3][:region]}' DELIMITER '#{EVENT_FIELD_SEPARATOR}' MAXERROR #{maxerror} EMPTYASNULL FILLRECORD TRUNCATECOLUMNS #{comprows} TIMEFORMAT 'auto' ACCEPTINVCHARS #{compression_format};"
end
# Constructs the COPY FROM JSON statement required for
# loading a shredded JSON into a dedicated table; also
# returns the table name.
#
# Parameters:
# +config+:: the configuration options
# +s3_object_path+:: the S3 path to the files containing
# this shredded type
# +jsonpaths_file+:: the file on S3 containing the JSON Path
# statements to load the JSON
# +table+:: the name of the table to load, including
# optional schema
# +maxerror+:: how many errors to allow for this COPY
Contract Hash, String, String, String, Num => String
def self.build_copy_from_json_statement(config, s3_objectpath, jsonpaths_file, table, maxerror)
credentials = get_credentials(config)
compression_format = get_compression_format(config[:enrich][:output_compression])
fixed_objectpath = fix_s3_path(s3_objectpath)
# TODO: what about COMPUPDATE/ROWS?
"COPY #{table} FROM '#{fixed_objectpath}' CREDENTIALS '#{credentials}' JSON AS '#{jsonpaths_file}' REGION AS '#{config[:aws][:s3][:region]}' MAXERROR #{maxerror} TRUNCATECOLUMNS TIMEFORMAT 'auto' ACCEPTINVCHARS #{compression_format};"
end
# Builds an ANALYZE statement for the
# given table.
#
# Parameters:
# +table+:: the name of the table to analyze
Contract String => String
def self.build_analyze_statement(table)
"ANALYZE #{table};"
end
# Builds a VACUUM statement for the
# given table.
#
# Parameters:
# +table+:: the name of the table to analyze
Contract String => String
def self.build_vacuum_statement(table)
"VACUUM SORT ONLY #{table};"
end
# Constructs the credentials expression for a
# Redshift COPY statement.
#
# Parameters:
# +config+:: the configuration options
Contract Hash => String
def self.get_credentials(config)
"aws_access_key_id=#{config[:aws][:access_key_id]};aws_secret_access_key=#{config[:aws][:secret_access_key]}"
end
# Returns the compression format for a
# Redshift COPY statement.
#
# Parameters:
# +output_codec+:: the output code
Contract String => String
def self.get_compression_format(output_codec)
if output_codec == 'NONE'
''
elsif output_codec == 'GZIP'
'GZIP'
end
# TODO: fix non-exhaustive match above
end
end
end
end
| atotech/snowplow | 4-storage/storage-loader/lib/snowplow-storage-loader/redshift_loader.rb | Ruby | apache-2.0 | 12,191 |
//{{NO_DEPENDENCIES}}
// Microsoft Developer Studio generated include file.
// Used by Joystick.rc
//
#define IDI_MAIN 102
#define IDD_JOYST_IMM 103
#define IDR_ACCELERATOR1 103
#define IDC_CLOSE 1001
#define IDC_X_AXIS 1010
#define IDC_Y_AXIS 1011
#define IDC_Z_AXIS 1012
#define IDC_X_AXIS_TEXT 1013
#define IDC_Y_AXIS_TEXT 1014
#define IDC_Z_AXIS_TEXT 1015
#define IDC_X_ROT_TEXT 1016
#define IDC_Y_ROT_TEXT 1017
#define IDC_Z_ROT_TEXT 1018
#define IDC_SLIDER0_TEXT 1019
#define IDC_X_ROT 1020
#define IDC_Y_ROT 1021
#define IDC_Z_ROT 1022
#define IDC_SLIDER1_TEXT 1023
#define IDC_POV0_TEXT 1024
#define IDC_POV1_TEXT 1025
#define IDC_POV2_TEXT 1026
#define IDC_POV3_TEXT 1027
#define IDC_SLIDER0 1030
#define IDC_SLIDER1 1031
#define IDC_POV 1040
#define IDC_POV0 1040
#define IDC_BUTTONS 1041
#define IDC_POV1 1042
#define IDC_POV2 1043
#define IDC_POV3 1044
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 104
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1025
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif
| MorrigansWings/GamePhysics | PlanetSim/DirectX/Samples/C++/DirectInput/Joystick/resource.h | C | apache-2.0 | 1,744 |
package org.pikater.core.ontology.subtrees.datasource;
import jade.content.AgentAction;
/**
* User: Kuba
* Date: 2.5.2014
* Time: 13:21
*/
public class GetDataSourcePath implements AgentAction {
/**
*
*/
private static final long serialVersionUID = 2808234460031853327L;
private String taskId;
private String type;
public String getTaskId() {
return taskId;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
| tomkren/pikater | src/org/pikater/core/ontology/subtrees/datasource/GetDataSourcePath.java | Java | apache-2.0 | 611 |
/*
* Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.query.api.expression.function;
import org.wso2.siddhi.query.api.expression.Expression;
import java.util.Arrays;
public class AttributeFunction extends Expression {
protected String functionName;
protected Expression[] parameters;
public AttributeFunction(String functionName, Expression... parameters) {
this.functionName = functionName;
this.parameters = parameters;
}
public String getFunction() {
return functionName;
}
public void setParameters(Expression[] parameters) {
this.parameters = parameters;
}
public Expression[] getParameters() {
return parameters;
}
@Override
public String toString() {
return "AttributeFunction{" +
"functionName='" + functionName + '\'' +
", parameters=" + Arrays.toString(parameters) +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AttributeFunction that = (AttributeFunction) o;
if (functionName != null ? !functionName.equals(that.functionName) : that.functionName != null) {
return false;
}
if (!Arrays.equals(parameters, that.parameters)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = functionName != null ? functionName.hashCode() : 0;
result = 31 * result + (parameters != null ? Arrays.hashCode(parameters) : 0);
return result;
}
} | sacjaya/siddhi-3 | modules/siddhi-query-api/src/main/java/org/wso2/siddhi/query/api/expression/function/AttributeFunction.java | Java | apache-2.0 | 2,320 |
package org.gradle.test.performance.mediummonolithicjavaproject.p264;
import org.gradle.test.performance.mediummonolithicjavaproject.p263.Production5279;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test5282 {
Production5282 objectUnderTest = new Production5282();
@Test
public void testProperty0() {
Production5279 value = new Production5279();
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
Production5280 value = new Production5280();
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
Production5281 value = new Production5281();
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p264/Test5282.java | Java | apache-2.0 | 2,259 |
"""Support for Lutron Caseta scenes."""
from typing import Any
from homeassistant.components.scene import Scene
from .const import BRIDGE_LEAP, DOMAIN as CASETA_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta scene platform.
Adds scenes from the Caseta bridge associated with the config_entry as
scene entities.
"""
entities = []
data = hass.data[CASETA_DOMAIN][config_entry.entry_id]
bridge = data[BRIDGE_LEAP]
scenes = bridge.get_scenes()
for scene in scenes:
entity = LutronCasetaScene(scenes[scene], bridge)
entities.append(entity)
async_add_entities(entities, True)
class LutronCasetaScene(Scene):
"""Representation of a Lutron Caseta scene."""
def __init__(self, scene, bridge):
"""Initialize the Lutron Caseta scene."""
self._scene_name = scene["name"]
self._scene_id = scene["scene_id"]
self._bridge = bridge
@property
def name(self):
"""Return the name of the scene."""
return self._scene_name
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._bridge.activate_scene(self._scene_id)
| turbokongen/home-assistant | homeassistant/components/lutron_caseta/scene.py | Python | apache-2.0 | 1,243 |
/**
* Copyright (C) 2009 Progress Software, Inc. All rights reserved.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.mop;
import java.io.File;
import java.io.IOException;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.TreeSet;
import junit.framework.TestCase;
import org.fusesource.mop.support.Database;
/**
* @author chirino
*/
public class DatabaseTest extends TestCase {
public void testDatabase() throws IOException {
Database database = new Database();
File directroy = new File("./target/test-data");
database.setDirectroy(directroy);
database.delete();
database.open(false);
LinkedHashSet<String> artifacts = new LinkedHashSet<String>();
artifacts.add("org.fusesource.mop:test:jar:1.0");
artifacts.add("org.fusesource.mop:foo:jar:1.0");
database.install(artifacts);
database.close();
database = new Database();
database.setDirectroy(directroy);
database.open(false);
artifacts = new LinkedHashSet<String>();
artifacts.add("org.test:test:jar:1.0");
artifacts.add("org.test:other:jar:1.0");
database.install(artifacts);
database.close();
database = new Database();
database.setDirectroy(directroy);
database.open(true);
Set<String> rc = new TreeSet<String>(database.findByArtifactId("test"));
artifacts = new LinkedHashSet<String>();
artifacts.add("org.fusesource.mop:test:jar:1.0");
artifacts.add("org.test:test:jar:1.0");
assertEquals(artifacts,rc);
database.close();
}
} | chirino/mop | mop-core/src/test/java/org/fusesource/mop/DatabaseTest.java | Java | apache-2.0 | 2,228 |
<?php
function forums_delete($course) {
global $db;
$sql = "SELECT * FROM ".TABLE_PREFIX."forums_courses WHERE course_id=$course";
$f_result = mysql_query($sql, $db);
while ($forum = mysql_fetch_assoc($f_result)) {
$forum_id = $forum['forum_id'];
$sql = "SELECT COUNT(*) AS cnt FROM ".TABLE_PREFIX."forums_courses WHERE forum_id=$forum_id";
$result = mysql_query($sql, $db);
$row = mysql_fetch_assoc($result);
if ($row['cnt'] == 1) {
$sql = "SELECT post_id FROM ".TABLE_PREFIX."forums_threads WHERE forum_id=$forum_id";
$result = mysql_query($sql, $db);
while ($row = mysql_fetch_assoc($result)) {
$sql = "DELETE FROM ".TABLE_PREFIX."forums_accessed WHERE post_id=$row[post_id]";
$result2 = mysql_query($sql, $db);
}
$sql = "DELETE FROM ".TABLE_PREFIX."forums_subscriptions WHERE forum_id=$forum_id";
$result = mysql_query($sql, $db);
$sql = "DELETE FROM ".TABLE_PREFIX."forums_threads WHERE forum_id=$forum_id";
$result = mysql_query($sql, $db);
$sql = "DELETE FROM ".TABLE_PREFIX."forums WHERE forum_id=$forum_id";
$result = mysql_query($sql, $db);
$sql = "DELETE FROM ".TABLE_PREFIX."forums_courses WHERE forum_id=$forum_id";
$result = mysql_query($sql, $db);
} else if ($row['cnt'] > 1) {
// this is a shared forum:
// debug('unsubscribe all the students who will not be able to access this forum anymore.');
$sql = "SELECT course_id FROM ".TABLE_PREFIX."forums_courses WHERE forum_id=$forum[forum_id] AND course_id <> $course";
$result2 = mysql_query($sql, $db);
while ($row2 = mysql_fetch_assoc($result2)) {
$courses[] = $row2['course_id'];
}
$courses_list = implode(',', $courses);
// list of all the students who are in other courses as well
$sql = "SELECT member_id FROM ".TABLE_PREFIX."course_enrollment WHERE course_id IN ($courses_list)";
$result2 = mysql_query($sql, $db);
while ($row2 = mysql_fetch_assoc($result2)) {
$students[] = $row2['member_id'];
}
$students_list = implode(',', $students);
if ($students_list) {
// remove the subscriptions
$sql = "SELECT post_id FROM ".TABLE_PREFIX."forums_threads WHERE forum_id=$forum[forum_id]";
$result2 = mysql_query($sql, $db);
while ($row2 = mysql_fetch_array($result2)) {
$sql = "DELETE FROM ".TABLE_PREFIX."forums_accessed WHERE post_id=$row2[post_id] AND member_id NOT IN ($students_list)";
$result3 = mysql_query($sql, $db);
}
$sql = "DELETE FROM ".TABLE_PREFIX."forums_subscriptions WHERE forum_id=$forum[forum_id] AND member_id NOT IN ($students_list)";
$result3 = mysql_query($sql, $db);
}
$sql = "DELETE FROM ".TABLE_PREFIX."forums_courses WHERE forum_id=$forum[forum_id] AND course_id=$course";
$result = mysql_query($sql, $db);
}
}
$sql = "OPTIMIZE TABLE ".TABLE_PREFIX."forums_threads";
$result = mysql_query($sql, $db);
}
?> | CaviereFabien/Test | ATutor/mods/_standard/forums/module_delete.php | PHP | apache-2.0 | 2,969 |
#!/usr/bin/env python
###
# This script sets up a Spark cluster on Google Compute Engine
# Sigmoidanalytics.com
###
from __future__ import with_statement
import logging
import os
import pipes
import random
import shutil
import subprocess
import sys
import tempfile
import time
import commands
import urllib2
from optparse import OptionParser
from sys import stderr
import shlex
import getpass
import threading
import json
###
# Make sure gcutil is installed and authenticated
# Usage: spark_gce.py <project> <no-slaves> <slave-type> <master-type> <identity-file> <zone> <cluster-name>
# Usage: spark_gce.py <project> <cluster-name> destroy
###
identity_file = ""
slave_no = ""
slave_type = ""
master_type = ""
zone = ""
cluster_name = ""
username = ""
project = ""
def read_args():
global identity_file
global slave_no
global slave_type
global master_type
global zone
global cluster_name
global username
global project
if len(sys.argv) == 8:
project = sys.argv[1]
slave_no = int(sys.argv[2])
slave_type = sys.argv[3]
master_type = sys.argv[4]
identity_file = sys.argv[5]
zone = sys.argv[6]
cluster_name = sys.argv[7]
username = getpass.getuser()
elif len(sys.argv) == 4 and sys.argv[3].lower() == "destroy":
print 'Destroying cluster ' + sys.argv[2]
project = sys.argv[1]
cluster_name = sys.argv[2]
try:
command = 'gcloud compute --project ' + project + ' instances list --format json'
output = subprocess.check_output(command, shell=True)
data = json.loads(output)
master_nodes=[]
slave_nodes=[]
for instance in data:
try:
host_name = instance['name']
host_ip = instance['networkInterfaces'][0]['accessConfigs'][0]['natIP']
if host_name == cluster_name + '-master':
command = 'gcloud compute instances delete ' + host_name + ' --project ' + project
command = shlex.split(command)
subprocess.call(command)
elif cluster_name + '-slave' in host_name:
command = 'gcloud compute instances delete ' + host_name + ' --project ' + project
command = shlex.split(command)
subprocess.call(command)
except:
pass
except:
print "Failed to Delete instances"
sys.exit(1)
sys.exit(0)
else:
print '# Usage: spark_gce.py <project> <no-slaves> <slave-type> <master-type> <identity-file> <zone> <cluster-name>'
print '# Usage: spark_gce.py <project> <cluster-name> destroy'
sys.exit(0)
def setup_network():
print '[ Setting up Network & Firewall Entries ]'
try:
command = 'gcloud compute --project=' + project + ' networks create "' + cluster_name + '-network" --range "10.240.0.0/16"'
command = shlex.split(command)
subprocess.call(command)
#Uncomment the above and comment the below section if you don't want to open all ports for public.
command = 'gcloud compute firewall-rules delete internal --project '+ project
command = 'gcloud compute firewall-rules create internal --network ' + cluster_name + '-network --allow tcp udp icmp --project '+ project
command = shlex.split(command)
subprocess.call(command)
except OSError:
print "Failed to setup Network & Firewall. Exiting.."
sys.exit(1)
def launch_master():
print '[ Launching Master ]'
command = 'gcloud compute --project "' + project + '" instances create "' + cluster_name + '-master" --zone "' + zone + '" --machine-type "' + master_type + '" --network "' + cluster_name + '-network" --maintenance-policy "MIGRATE" --scopes "https://www.googleapis.com/auth/devstorage.read_only" --image "https://www.googleapis.com/compute/v1/projects/centos-cloud/global/images/centos-6-v20141218" --boot-disk-type "pd-standard" --boot-disk-device-name "' + cluster_name + '-md"'
command = shlex.split(command)
subprocess.call(command)
def launch_slaves():
print '[ Launching Slaves ]'
for s_id in range(1,slave_no+1):
command = 'gcloud compute --project "' + project + '" instances create "' + cluster_name + '-slave' + str(s_id) + '" --zone "' + zone + '" --machine-type "' + slave_type + '" --network "' + cluster_name + '-network" --maintenance-policy "MIGRATE" --scopes "https://www.googleapis.com/auth/devstorage.read_only" --image "https://www.googleapis.com/compute/v1/projects/centos-cloud/global/images/centos-6-v20141218" --boot-disk-type "pd-standard" --boot-disk-device-name "' + cluster_name + '-s' + str(s_id) + 'd"'
command = shlex.split(command)
subprocess.call(command)
def launch_cluster():
print '[ Creating the Cluster ]'
setup_network()
launch_master()
launch_slaves()
def check_gcloud():
myexec = "gcloud"
print '[ Verifying gcloud ]'
try:
subprocess.call([myexec, 'info'])
except OSError:
print "%s executable not found. \n# Make sure gcloud is installed and authenticated\nPlease follow https://cloud.google.com/compute/docs/gcloud-compute/" % myexec
sys.exit(1)
def get_cluster_ips():
command = 'gcloud compute --project ' + project + ' instances list --format json'
output = subprocess.check_output(command, shell=True)
data = json.loads(output)
master_nodes=[]
slave_nodes=[]
for instance in data:
try:
host_name = instance['name']
host_ip = instance['networkInterfaces'][0]['accessConfigs'][0]['natIP']
if host_name == cluster_name + '-master':
master_nodes.append(host_ip)
elif cluster_name + '-slave' in host_name:
slave_nodes.append(host_ip)
except:
pass
# Return all the instances
return (master_nodes, slave_nodes)
def enable_sudo(master,command):
'''
ssh_command(master,"echo \"import os\" > setuid.py ")
ssh_command(master,"echo \"import sys\" >> setuid.py")
ssh_command(master,"echo \"import commands\" >> setuid.py")
ssh_command(master,"echo \"command=sys.argv[1]\" >> setuid.py")
ssh_command(master,"echo \"os.setuid(os.geteuid())\" >> setuid.py")
ssh_command(master,"echo \"print commands.getstatusoutput(\"command\")\" >> setuid.py")
'''
os.system("ssh -i " + identity_file + " -t -o 'UserKnownHostsFile=/dev/null' -o 'CheckHostIP=no' -o 'StrictHostKeyChecking no' "+ username + "@" + master + " '" + command + "'")
def ssh_thread(host,command):
enable_sudo(host,command)
def install_java(master_nodes,slave_nodes):
print '[ Installing Java and Development Tools ]'
master = master_nodes[0]
master_thread = threading.Thread(target=ssh_thread, args=(master,"sudo yum install -y java-1.7.0-openjdk;sudo yum install -y java-1.7.0-openjdk-devel;sudo yum groupinstall \'Development Tools\' -y"))
master_thread.start()
#ssh_thread(master,"sudo yum install -y java-1.7.0-openjdk")
for slave in slave_nodes:
slave_thread = threading.Thread(target=ssh_thread, args=(slave,"sudo yum install -y java-1.7.0-openjdk;sudo yum install -y java-1.7.0-openjdk-devel;sudo yum groupinstall \'Development Tools\' -y"))
slave_thread.start()
#ssh_thread(slave,"sudo yum install -y java-1.7.0-openjdk")
slave_thread.join()
master_thread.join()
def ssh_command(host,command):
#print "ssh -i " + identity_file + " -o 'UserKnownHostsFile=/dev/null' -o 'CheckHostIP=no' -o 'StrictHostKeyChecking no' "+ username + "@" + host + " '" + command + "'"
commands.getstatusoutput("ssh -i " + identity_file + " -o 'UserKnownHostsFile=/dev/null' -o 'CheckHostIP=no' -o 'StrictHostKeyChecking no' "+ username + "@" + host + " '" + command + "'" )
def deploy_keys(master_nodes,slave_nodes):
print '[ Generating SSH Keys on Master ]'
key_file = os.path.basename(identity_file)
master = master_nodes[0]
ssh_command(master,"ssh-keygen -q -t rsa -N \"\" -f ~/.ssh/id_rsa")
ssh_command(master,"cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys")
os.system("scp -i " + identity_file + " -oUserKnownHostsFile=/dev/null -oCheckHostIP=no -oStrictHostKeyChecking=no -o 'StrictHostKeyChecking no' "+ identity_file + " " + username + "@" + master + ":")
ssh_command(master,"chmod 600 " + key_file)
ssh_command(master,"tar czf .ssh.tgz .ssh")
ssh_command(master,"ssh-keyscan -H $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) >> ~/.ssh/known_hosts")
ssh_command(master,"ssh-keyscan -H $(cat /etc/hosts | grep $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) | cut -d\" \" -f2) >> ~/.ssh/known_hosts")
print '[ Transfering SSH keys to slaves ]'
for slave in slave_nodes:
print commands.getstatusoutput("ssh -i " + identity_file + " -oUserKnownHostsFile=/dev/null -oCheckHostIP=no -oStrictHostKeyChecking=no " + username + "@" + master + " 'scp -i " + key_file + " -oStrictHostKeyChecking=no .ssh.tgz " + username +"@" + slave + ":'")
ssh_command(slave,"tar xzf .ssh.tgz")
ssh_command(master,"ssh-keyscan -H " + slave + " >> ~/.ssh/known_hosts")
ssh_command(slave,"ssh-keyscan -H $(cat /etc/hosts | grep $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) | cut -d\" \" -f2) >> ~/.ssh/known_hosts")
ssh_command(slave,"ssh-keyscan -H $(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1) >> ~/.ssh/known_hosts")
def attach_drive(master_nodes,slave_nodes):
print '[ Adding new 500GB drive on Master ]'
master = master_nodes[0]
command='gcloud compute --project="' + project + '" disks create "' + cluster_name + '-m-disk" --size 500GB --type "pd-standard" --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
command = 'gcloud compute --project="' + project + '" instances attach-disk ' + cluster_name + '-master --device-name "' + cluster_name + '-m-disk" --disk ' + cluster_name + '-m-disk --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
master_thread = threading.Thread(target=ssh_thread, args=(master,"sudo mkfs.ext3 /dev/disk/by-id/google-"+ cluster_name + "-m-disk " + " -F < /dev/null"))
master_thread.start()
print '[ Adding new 500GB drive on Slaves ]'
i = 1
for slave in slave_nodes:
master = slave
command='gcloud compute --project="' + project + '" disks create "' + cluster_name + '-s' + str(i) + '-disk" --size 500GB --type "pd-standard" --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
command = 'gcloud compute --project="' + project + '" instances attach-disk ' + cluster_name + '-slave' + str(i) + ' --disk ' + cluster_name + '-s' + str(i) + '-disk --device-name "' + cluster_name + '-s' + str(i) + '-disk" --zone ' + zone
command = shlex.split(command)
subprocess.call(command)
slave_thread = threading.Thread(target=ssh_thread, args=(slave,"sudo mkfs.ext3 /dev/disk/by-id/google-" + cluster_name + "-s" + str(i) + "-disk -F < /dev/null"))
slave_thread.start()
i=i+1
slave_thread.join()
master_thread.join()
print '[ Mounting new Volume ]'
enable_sudo(master_nodes[0],"sudo mount /dev/disk/by-id/google-"+ cluster_name + "-m-disk /mnt")
enable_sudo(master_nodes[0],"sudo chown " + username + ":" + username + " /mnt")
i=1
for slave in slave_nodes:
enable_sudo(slave,"sudo mount /dev/disk/by-id/google-"+ cluster_name + "-s" + str(i) +"-disk /mnt")
enable_sudo(slave,"sudo chown " + username + ":" + username + " /mnt")
i=i+1
print '[ All volumns mounted, will be available at /mnt ]'
def setup_spark(master_nodes,slave_nodes):
print '[ Downloading Binaries ]'
master = master_nodes[0]
ssh_command(master,"rm -fr sigmoid")
ssh_command(master,"mkdir sigmoid")
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/1.2.0/spark-1.2.0-bin-cdh4.tgz")
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/scala.tgz")
ssh_command(master,"cd sigmoid;tar zxf spark-1.2.0-bin-cdh4.tgz;rm spark-1.2.0-bin-cdh4.tgz")
ssh_command(master,"cd sigmoid;tar zxf scala.tgz;rm scala.tgz")
print '[ Updating Spark Configurations ]'
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;cp spark-env.sh.template spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SCALA_HOME=\"/home/`whoami`/sigmoid/scala\"' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SPARK_MEM=2454m' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo \"SPARK_JAVA_OPTS+=\\\" -Dspark.local.dir=/mnt/spark \\\"\" >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SPARK_JAVA_OPTS' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export SPARK_MASTER_IP=PUT_MASTER_IP_HERE' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export MASTER=spark://PUT_MASTER_IP_HERE:7077' >> spark-env.sh")
ssh_command(master,"cd sigmoid;cd spark-1.2.0-bin-cdh4/conf;echo 'export JAVA_HOME=/usr/lib/jvm/java-1.7.0-openjdk.x86_64' >> spark-env.sh")
for slave in slave_nodes:
ssh_command(master,"echo " + slave + " >> sigmoid/spark-1.2.0-bin-cdh4/conf/slaves")
ssh_command(master,"sed -i \"s/PUT_MASTER_IP_HERE/$(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1)/g\" sigmoid/spark-1.2.0-bin-cdh4/conf/spark-env.sh")
ssh_command(master,"chmod +x sigmoid/spark-1.2.0-bin-cdh4/conf/spark-env.sh")
print '[ Rsyncing Spark to all slaves ]'
#Change permissions
enable_sudo(master,"sudo chown " + username + ":" + username + " /mnt")
i=1
for slave in slave_nodes:
enable_sudo(slave,"sudo chown " + username + ":" + username + " /mnt")
for slave in slave_nodes:
ssh_command(master,"rsync -za /home/" + username + "/sigmoid " + slave + ":")
ssh_command(slave,"mkdir /mnt/spark")
ssh_command(master,"mkdir /mnt/spark")
print '[ Starting Spark Cluster ]'
ssh_command(master,"sigmoid/spark-1.2.0-bin-cdh4/sbin/start-all.sh")
#setup_shark(master_nodes,slave_nodes)
setup_hadoop(master_nodes,slave_nodes)
print "\n\nSpark Master Started, WebUI available at : http://" + master + ":8080"
def setup_hadoop(master_nodes,slave_nodes):
master = master_nodes[0]
print '[ Downloading hadoop ]'
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/sigmoidanalytics-builds/hadoop/hadoop-2.0.0-cdh4.2.0.tar.gz")
ssh_command(master,"cd sigmoid;tar zxf hadoop-2.0.0-cdh4.2.0.tar.gz")
ssh_command(master,"cd sigmoid;rm hadoop-2.0.0-cdh4.2.0.tar.gz")
print '[ Configuring Hadoop ]'
#Configure .bashrc
ssh_command(master,"echo '#HADOOP_CONFS' >> .bashrc")
ssh_command(master,"echo 'export JAVA_HOME=/usr/lib/jvm/java-1.7.0-openjdk.x86_64' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_INSTALL=/home/`whoami`/sigmoid/hadoop-2.0.0-cdh4.2.0' >> .bashrc")
ssh_command(master,"echo 'export PATH=$PATH:\$HADOOP_INSTALL/bin' >> .bashrc")
ssh_command(master,"echo 'export PATH=$PATH:\$HADOOP_INSTALL/sbin' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_MAPRED_HOME=\$HADOOP_INSTALL' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_COMMON_HOME=\$HADOOP_INSTALL' >> .bashrc")
ssh_command(master,"echo 'export HADOOP_HDFS_HOME=\$HADOOP_INSTALL' >> .bashrc")
ssh_command(master,"echo 'export YARN_HOME=\$HADOOP_INSTALL' >> .bashrc")
#Remove *-site.xmls
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0;rm etc/hadoop/core-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0;rm etc/hadoop/yarn-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0;rm etc/hadoop/hdfs-site.xml")
#Download Our Confs
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/core-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/hdfs-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/mapred-site.xml")
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;wget https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/yarn-site.xml")
#Config Core-site
ssh_command(master,"sed -i \"s/PUT-MASTER-IP/$(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1)/g\" sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/core-site.xml")
#Create data/node dirs
ssh_command(master,"mkdir -p /mnt/hadoop/hdfs/namenode;mkdir -p /mnt/hadoop/hdfs/datanode")
#Config slaves
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;rm slaves")
for slave in slave_nodes:
ssh_command(master,"cd sigmoid/hadoop-2.0.0-cdh4.2.0/etc/hadoop/;echo " + slave + " >> slaves")
print '[ Rsyncing with Slaves ]'
#Rsync everything
for slave in slave_nodes:
ssh_command(master,"rsync -za /home/" + username + "/sigmoid " + slave + ":")
ssh_command(slave,"mkdir -p /mnt/hadoop/hdfs/namenode;mkdir -p /mnt/hadoop/hdfs/datanode")
ssh_command(master,"rsync -za /home/" + username + "/.bashrc " + slave + ":")
print '[ Formating namenode ]'
#Format namenode
ssh_command(master,"sigmoid/hadoop-2.0.0-cdh4.2.0/bin/hdfs namenode -format")
print '[ Starting DFS ]'
#Start dfs
ssh_command(master,"sigmoid/hadoop-2.0.0-cdh4.2.0/sbin/start-dfs.sh")
def setup_shark(master_nodes,slave_nodes):
master = master_nodes[0]
print '[ Downloading Shark binaries ]'
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/spark-ui/hive-0.11.0-bin.tgz")
ssh_command(master,"cd sigmoid;wget https://s3.amazonaws.com/spark-ui/shark-0.9-hadoop-2.0.0-mr1-cdh4.2.0.tar.gz")
ssh_command(master,"cd sigmoid;tar zxf hive-0.11.0-bin.tgz")
ssh_command(master,"cd sigmoid;tar zxf shark-0.9-hadoop-2.0.0-mr1-cdh4.2.0.tar.gz")
ssh_command(master,"rm sigmoid/hive-0.11.0-bin.tgz")
ssh_command(master,"rm sigmoid/shark-0.9-hadoop-2.0.0-mr1-cdh4.2.0.tar.gz")
print '[ Configuring Shark ]'
ssh_command(master,"cd sigmoid/shark/;echo \"export SHARK_MASTER_MEM=1g\" > conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"SPARK_JAVA_OPTS+=\\\" -Dspark.kryoserializer.buffer.mb=10 \\\"\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export SPARK_JAVA_OPTS\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export HIVE_HOME=/home/`whoami`/sigmoid/hive-0.11.0-bin\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export SPARK_JAVA_OPTS\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export MASTER=spark://PUT_MASTER_IP_HERE:7077\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export SPARK_HOME=/home/`whoami`/sigmoid/spark-0.9.1-bin-cdh4\" >> conf/shark-env.sh")
ssh_command(master,"mkdir /mnt/tachyon")
ssh_command(master,"cd sigmoid/shark/;echo \"export TACHYON_MASTER=PUT_MASTER_IP_HERE:19998\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"export TACHYON_WAREHOUSE_PATH=/mnt/tachyon\" >> conf/shark-env.sh")
ssh_command(master,"cd sigmoid/shark/;echo \"source /home/`whoami`/sigmoid/spark-0.9.1-bin-cdh4/conf/spark-env.sh\" >> conf/shark-env.sh")
ssh_command(master,"sed -i \"s/PUT_MASTER_IP_HERE/$(/sbin/ifconfig eth0 | grep \"inet addr:\" | cut -d: -f2 | cut -d\" \" -f1)/g\" sigmoid/shark/conf/shark-env.sh")
ssh_command(master,"chmod +x sigmoid/shark/conf/shark-env.sh")
print '[ Rsyncing Shark on slaves ]'
for slave in slave_nodes:
ssh_command(master,"rsync -za /home/" + username + "/sigmoid " + slave + ":")
print '[ Starting Shark Server ]'
ssh_command(master,"cd sigmoid/shark/;./bin/shark --service sharkserver 10000 > log.txt 2>&1 &")
def show_banner():
os.system("wget -qO- https://s3.amazonaws.com/sigmoidanalytics-builds/spark/0.9.1/gce/configs/banner")
def real_main():
show_banner()
print "[ Script Started ]"
#Read the arguments
read_args()
#Make sure gcloud is accessible.
check_gcloud()
#Launch the cluster
launch_cluster()
#Wait some time for machines to bootup
print '[ Waiting 120 Seconds for Machines to start up ]'
time.sleep(120)
#Get Master/Slave IP Addresses
(master_nodes, slave_nodes) = get_cluster_ips()
#Install Java and build-essential
install_java(master_nodes,slave_nodes)
#Generate SSH keys and deploy
deploy_keys(master_nodes,slave_nodes)
#Attach a new empty drive and format it
attach_drive(master_nodes,slave_nodes)
#Set up Spark/Shark/Hadoop
setup_spark(master_nodes,slave_nodes)
def main():
try:
real_main()
except Exception as e:
print >> stderr, "\nError:\n", e
if __name__ == "__main__":
main()
| sigmoidanalytics/spark_gce | spark_gce.py | Python | apache-2.0 | 20,428 |
using System;
using System.Reflection;
using System.Management;
using System.Linq;
using System.Diagnostics;
using System.Collections.Generic;
namespace Acrolinx.Sdk.Sidebar.Util
{
public class AssemblyUtil
{
private readonly Assembly asm;
public AssemblyUtil(Assembly asm)
{
this.asm = asm;
}
#region Assembly Attribute Accessors
public string AssemblyPath
{
get
{
return new Uri(this.asm.CodeBase).LocalPath;
}
}
public string AssemblyVersion
{
get
{
return this.asm.GetName().Version.ToString();
}
}
public string AssemblyDescription
{
get
{
object[] attributes = this.asm.GetCustomAttributes(typeof(AssemblyDescriptionAttribute), false);
if (attributes.Length == 0)
{
return "";
}
return ((AssemblyDescriptionAttribute)attributes[0]).Description;
}
}
public string AssemblyProduct
{
get
{
object[] attributes = this.asm.GetCustomAttributes(typeof(AssemblyProductAttribute), false);
if (attributes.Length == 0)
{
return "";
}
return ((AssemblyProductAttribute)attributes[0]).Product;
}
}
public string AssemblyCopyright
{
get
{
object[] attributes = this.asm.GetCustomAttributes(typeof(AssemblyCopyrightAttribute), false);
if (attributes.Length == 0)
{
return "";
}
return ((AssemblyCopyrightAttribute)attributes[0]).Copyright;
}
}
public string AssemblyCompany
{
get
{
object[] attributes = this.asm.GetCustomAttributes(typeof(AssemblyCompanyAttribute), false);
if (attributes.Length == 0)
{
return "";
}
return ((AssemblyCompanyAttribute)attributes[0]).Company;
}
}
#endregion
public static Dictionary<string, string> OSInfo()
{
var osInfo = new Dictionary<string, string>();
bool is64bit = Environment.Is64BitOperatingSystem;
var name = (from t in new ManagementObjectSearcher("SELECT Caption FROM Win32_OperatingSystem").Get().Cast<ManagementObject>() select t.GetPropertyValue("Caption")).FirstOrDefault();
var osName = name != null ? name.ToString() : "Unknown";
osInfo.Add("osName", osName);
var architecture = is64bit ? " 64 bit" : " 32 bit";
var osVersion = Environment.OSVersion.ToString() + architecture;
osInfo.Add("version", osVersion);
osInfo.Add("osId", osName.Trim().Replace(" ", ".").ToLower());
return osInfo;
}
public static Dictionary<string, string> AppInfo()
{
var appInfo = new Dictionary<string, string>();
FileVersionInfo fvi = FileVersionInfo.GetVersionInfo(System.Diagnostics.Process.GetCurrentProcess().MainModule.FileName);
var architecture = (IntPtr.Size * 8).ToString() + " bit";
appInfo.Add("applicationName", fvi.FileDescription.Split(' ').Last());
appInfo.Add("productName", fvi.FileDescription);
appInfo.Add("version", fvi.FileVersion + " " + architecture);
appInfo.Add("appId", fvi.FileDescription.Trim().Replace(" ", ".").ToLower());
return appInfo;
}
}
}
| acrolinx/sidebar-sdk-dotnet | Acrolinx.Sidebar/Util/AssemblyUtil.cs | C# | apache-2.0 | 3,841 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import static org.apache.jasper.JasperMessages.MESSAGES;
import java.util.HashMap;
import org.apache.jasper.JasperException;
/**
* Repository of {page, request, session, application}-scoped beans
*
* @author Mandar Raje
* @author Remy Maucherat
*/
public class BeanRepository {
private final HashMap<String, String> beanTypes;
private final ClassLoader loader;
private final ErrorDispatcher errDispatcher;
/**
* Constructor.
* @param loader The class loader
* @param err The error dispatcher that will be used to report errors
*/
public BeanRepository(ClassLoader loader, ErrorDispatcher err) {
this.loader = loader;
this.errDispatcher = err;
beanTypes = new HashMap<>();
}
public void addBean(Node.UseBean n, String s, String type, String scope)
throws JasperException {
if (!(scope == null || scope.equals("page") || scope.equals("request")
|| scope.equals("session") || scope.equals("application"))) {
errDispatcher.jspError(n, MESSAGES.badScopeForUseBean());
}
beanTypes.put(s, type);
}
public Class<?> getBeanType(String bean)
throws JasperException {
Class<?> clazz = null;
try {
clazz = loader.loadClass(beanTypes.get(bean));
} catch (ClassNotFoundException ex) {
throw new JasperException (ex);
}
return clazz;
}
public boolean checkVariable(String bean) {
return beanTypes.containsKey(bean);
}
}
| undertow-io/jastow | src/main/java/org/apache/jasper/compiler/BeanRepository.java | Java | apache-2.0 | 2,396 |
.tab-content{
border: 1px solid #ddd;
border-top: 0 !important;
}
thead th{
padding: 20px 0 20px 10px !important;
width: 12.5%;
}
thead tr{
background: #E7E5E6
}
td{
padding-left: 10 !important;
padding-right: 0 !important:;
}
| xuyuqin/xuyuqin | src/main/resources/static/public/yun_css/member/orderconfirm.css | CSS | apache-2.0 | 240 |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for classification of real numbers."""
__author__ = 'Sean Lip'
from core.tests import test_utils
from extensions.rules import real
class RealRuleUnitTests(test_utils.GenericTestBase):
"""Tests for rules operating on Real objects."""
def test_equals_rule(self):
self.assertTrue(real.Equals(3).eval(3))
self.assertTrue(real.Equals(3.0).eval(3))
self.assertFalse(real.Equals(4).eval(3))
def test_is_less_than_rule(self):
self.assertTrue(real.IsLessThan(4).eval(3))
self.assertTrue(real.IsLessThan(4).eval(3.0))
self.assertTrue(real.IsLessThan(4.0).eval(3.0))
self.assertFalse(real.IsLessThan(3).eval(3))
self.assertFalse(real.IsLessThan(3.0).eval(3.0))
self.assertFalse(real.IsLessThan(3.0).eval(4.0))
self.assertFalse(real.IsLessThan(3).eval(4))
def test_is_greater_than_rule(self):
self.assertTrue(real.IsGreaterThan(3).eval(4))
self.assertTrue(real.IsGreaterThan(3.0).eval(4))
self.assertTrue(real.IsGreaterThan(3.0).eval(4.0))
self.assertFalse(real.IsGreaterThan(3).eval(3))
self.assertFalse(real.IsGreaterThan(3.0).eval(3.0))
self.assertFalse(real.IsGreaterThan(4.0).eval(3.0))
self.assertFalse(real.IsGreaterThan(4).eval(3))
def test_is_less_than_or_equal_to_rule(self):
rule = real.IsLessThanOrEqualTo(3)
self.assertTrue(rule.eval(2))
self.assertTrue(rule.eval(3))
self.assertFalse(rule.eval(4))
def test_is_greater_than_or_equal_to_rule(self):
rule = real.IsGreaterThanOrEqualTo(3)
self.assertTrue(rule.eval(4))
self.assertTrue(rule.eval(3))
self.assertFalse(rule.eval(2))
def test_is_inclusively_between_rule(self):
with self.assertRaises(AssertionError):
real.IsInclusivelyBetween(2, 1)
rule = real.IsInclusivelyBetween(1, 3)
self.assertTrue(rule.eval(2))
self.assertTrue(rule.eval(1))
self.assertTrue(rule.eval(3))
self.assertTrue(rule.eval(1.0))
self.assertFalse(rule.eval(3.001))
def test_is_within_tolerance_rule(self):
rule = real.IsWithinTolerance(0.5, 0)
self.assertTrue(rule.eval(0))
self.assertTrue(rule.eval(0.5))
self.assertFalse(rule.eval(0.51))
| won0089/oppia | extensions/rules/real_test.py | Python | apache-2.0 | 2,944 |
#region License
//
// Dasher
//
// Copyright 2015-2017 Drew Noakes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// More information about this project is available at:
//
// https://github.com/drewnoakes/dasher
//
#endregion
using System;
using System.Diagnostics.CodeAnalysis;
namespace Dasher
{
/// <summary>
/// Commonly used to signify an empty message.
/// </summary>
/// <remarks>
/// <see cref="Empty"/> is mainly used with generic wrapper types. Consider
/// a message envelope such as <c>Envelope<T></c> for which header
/// fields apply, but no body is required. In such a case you can serialise
/// and deserialise <c>Envelope<Empty></c> successfully.
/// <para />
/// On the wire, <c>Empty</c> is serialised as a MsgPack map with zero elements.
/// Conceptually this is a complex type with no fields.
/// <para />
/// A serialised <c>Empty</c> value can be deserialised as a complex type if
/// all parameters have default values, allowing versioning of contracts over time.
/// <para />
/// This class may not be instantiated.
/// </remarks>
[SuppressMessage("ReSharper", "ConvertToStaticClass")]
public sealed class Empty
{
private Empty()
{
throw new NotSupportedException("Not for instantiation.");
}
}
} | drewnoakes/dasher | Dasher/Empty.cs | C# | apache-2.0 | 1,903 |
// NOTE: This file was generated by the ServiceGenerator.
// ----------------------------------------------------------------------------
// API:
// Version History API (versionhistory/v1)
// Description:
// Version History API - Prod
// Documentation:
// https://developers.chrome.com/versionhistory
#if SWIFT_PACKAGE || GTLR_USE_MODULAR_IMPORT
@import GoogleAPIClientForRESTCore;
#elif GTLR_BUILT_AS_FRAMEWORK
#import "GTLR/GTLRService.h"
#else
#import "GTLRService.h"
#endif
#if GTLR_RUNTIME_VERSION != 3000
#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
#endif
// Generated comments include content from the discovery document; avoid them
// causing warnings since clang's checks are some what arbitrary.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
NS_ASSUME_NONNULL_BEGIN
/**
* Service for executing Version History API queries.
*
* Version History API - Prod
*/
@interface GTLRVersionHistoryService : GTLRService
// No new methods
// Clients should create a standard query with any of the class methods in
// GTLRVersionHistoryQuery.h. The query can the be sent with GTLRService's
// execute methods,
//
// - (GTLRServiceTicket *)executeQuery:(GTLRQuery *)query
// completionHandler:(void (^)(GTLRServiceTicket *ticket,
// id object, NSError *error))handler;
// or
// - (GTLRServiceTicket *)executeQuery:(GTLRQuery *)query
// delegate:(id)delegate
// didFinishSelector:(SEL)finishedSelector;
//
// where finishedSelector has a signature of:
//
// - (void)serviceTicket:(GTLRServiceTicket *)ticket
// finishedWithObject:(id)object
// error:(NSError *)error;
//
// The object passed to the completion handler or delegate method
// is a subclass of GTLRObject, determined by the query method executed.
@end
NS_ASSUME_NONNULL_END
#pragma clang diagnostic pop
| google/google-api-objectivec-client-for-rest | Source/GeneratedServices/VersionHistory/GTLRVersionHistoryService.h | C | apache-2.0 | 2,053 |
<div id="semillaNeta" style="display: none" class="ui-state-error ui-corner-all">
<div class="cantidades" style="float: left;width: 14em;text-align: left">
<label style="width: 97%;">Número de Etiquetas</label>
</div>
<div>
<input type="text" id="nroetiquetas" class="number" maxlength="3" size="5"/>
</div>
<div style="width:20em"> </div>
<div class="cantidades" style="float: left;width: 14em;text-align: left">
<label style="width: 83%;">Capacidad de bolsa (entre 30 Kg, a 75 Kg.)</label>
</div>
<div style="float: left">
<input id="capacidad" class="number" type="text" size="5" maxlength="5"/>
</div>
<div style="margin-bottom: 1em;width:20em"> </div>
<div style="margin-bottom: 1em;width:20em"> </div>
<div class="cantidades" style="width: 14em;text-align: left">
<label>Total semilla Neta</label>
</div>
<div style="float: left">
<input id="total" class="total not-edit" type="hidden" style="float:right" readonly="readonly"/>
<button id="calcular-semilla-neta" class="btn btn-success">
Calcular
</button>
</div>
</div>
<script>
$(document).ready(function(){
$("input#nroetiquetas,input#capacidad").numeric();
$("input#capacidad").on("blur",function(){
if ($(this).val()<30 || $(this).val()>75){
$(this).val(40);
}
});
});
</script> | ewill15/usfx | vista/dialogos/semillaNeta.html | HTML | apache-2.0 | 1,384 |
package ru.stqa.pft.sandbox;
/**
* Created by jnorina on 30/04/2017.
*/
public class Rectangle {
public double a;
public double b;
public Rectangle (double a, double b){
this.a=a;
this.b=b;
}
public double area(){
return this.a*this.b;
}
}
| ynorina/java_pft | sandbox/src/main/java/ru/stqa/pft/sandbox/Rectangle.java | Java | apache-2.0 | 292 |
/*
* Copyright (c) 2013 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
package ops
object tuple {
import shapeless.ops.{ hlist => hl }
/**
* Type class witnessing that this tuple is composite and providing access to head and tail.
*
* @author Miles Sabin
*/
trait IsComposite[P] {
type H
type T
def head(p : P) : H
def tail(p : P) : T
}
object IsComposite {
def apply[P](implicit isComp: IsComposite[P]): Aux[P, isComp.H, isComp.T] = isComp
type Aux[P, H0, T0] = IsComposite[P] { type H = H0; type T = T0 }
implicit def isComposite[P, L <: HList, H0, T <: HList]
(implicit gen: Generic.Aux[P, L], isHCons: hl.IsHCons.Aux[L, H0, T], tp: hl.Tupler[T]): Aux[P, H0, tp.Out] =
new IsComposite[P] {
type H = H0
type T = tp.Out
def head(p: P): H = isHCons.head(gen.to(p))
def tail(p: P): T = tp(isHCons.tail(gen.to(p)))
}
}
/**
* Type class supporting prepending to this tuple.
*
* @author Miles Sabin
*/
trait Prepend[T, U] extends DepFn2[T, U]
object Prepend {
def apply[T, U](implicit prepend: Prepend[T, U]): Aux[T, U, prepend.Out] = prepend
type Aux[T, U, Out0] = Prepend[T, U] { type Out = Out0 }
implicit def prepend[T, L1 <: HList, U, L2 <: HList, L3 <: HList]
(implicit gent: Generic.Aux[T, L1], genu: Generic.Aux[U, L2], prepend: hl.Prepend.Aux[L1, L2, L3], tp: hl.Tupler[L3]): Aux[T, U, tp.Out] =
new Prepend[T, U] {
type Out = tp.Out
def apply(t: T, u: U): Out = prepend(gent.to(t), genu.to(u)).tupled
}
}
/**
* Type class supporting reverse prepending to this tuple.
*
* @author Miles Sabin
*/
trait ReversePrepend[T, U] extends DepFn2[T, U]
object ReversePrepend {
def apply[T, U](implicit prepend: ReversePrepend[T, U]): Aux[T, U, prepend.Out] = prepend
type Aux[T, U, Out0] = ReversePrepend[T, U] { type Out = Out0 }
implicit def prepend[T, L1 <: HList, U, L2 <: HList, L3 <: HList]
(implicit gent: Generic.Aux[T, L1], genu: Generic.Aux[U, L2], prepend: hl.ReversePrepend.Aux[L1, L2, L3], tp: hl.Tupler[L3]): Aux[T, U, tp.Out] =
new ReversePrepend[T, U] {
type Out = tp.Out
def apply(t: T, u: U): Out = prepend(gent.to(t), genu.to(u)).tupled
}
}
/**
* Type class supporting access to the ''nth'' element of this tuple. Available only if this tuple has at least
* ''n'' elements.
*
* @author Miles Sabin
*/
trait At[T, N <: Nat] extends DepFn1[T]
object At {
def apply[T, N <: Nat](implicit at: At[T, N]): Aux[T, N, at.Out] = at
type Aux[T, N <: Nat, Out0] = At[T, N] { type Out = Out0 }
implicit def at[T, L1 <: HList, N <: Nat]
(implicit gen: Generic.Aux[T, L1], at: hl.At[L1, N]): Aux[T, N, at.Out] =
new At[T, N] {
type Out = at.Out
def apply(t: T): Out = at(gen.to(t))
}
}
/**
* Type class supporting access to the last element of this tuple. Available only if this tuple has at least one
* element.
*
* @author Miles Sabin
*/
trait Last[T] extends DepFn1[T]
object Last {
def apply[T](implicit last: Last[T]): Aux[T, last.Out] = last
type Aux[T, Out0] = Last[T] { type Out = Out0 }
implicit def last[T, L <: HList]
(implicit gen: Generic.Aux[T, L], last: hl.Last[L]): Aux[T, last.Out] =
new Last[T] {
type Out = last.Out
def apply(t: T): Out = gen.to(t).last
}
}
/**
* Type class supporting access to all but the last element of this tuple. Available only if this tuple has at
* least one element.
*
* @author Miles Sabin
*/
trait Init[T] extends DepFn1[T]
object Init {
def apply[T](implicit init: Init[T]): Aux[T, init.Out] = init
type Aux[T, Out0] = Init[T] { type Out = Out0 }
implicit def init[T, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], init: hl.Init.Aux[L1, L2], tp: hl.Tupler[L2]): Aux[T, tp.Out] =
new Init[T] {
type Out = tp.Out
def apply(t: T): Out = init(gen.to(t)).tupled
}
}
/**
* Type class supporting access to the first element of this tuple of type `U`. Available only if this tuple
* contains an element of type `U`.
*
* @author Miles Sabin
*/
trait Selector[T, U] extends DepFn1[T] { type Out = U }
object Selector {
def apply[T, U](implicit selector: Selector[T, U]): Aux[T, U] = selector
type Aux[T, U] = Selector[T, U]
implicit def select[T, L <: HList, U]
(implicit gen: Generic.Aux[T, L], selector: hl.Selector[L, U]): Aux[T, U] =
new Selector[T, U] {
def apply(t: T): U = gen.to(t).select[U]
}
}
/**
* Type class supporting access to the all elements of this tuple of type `U`.
*
* @author Miles Sabin
*/
trait Filter[T, U] extends DepFn1[T]
object Filter {
def apply[T, U](implicit filter: Filter[T, U]): Aux[T, U, filter.Out] = filter
type Aux[T, U, Out0] = Filter[T, U] { type Out = Out0 }
implicit def filterTuple[T, L1 <: HList, U, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], filter: hl.Filter.Aux[L1, U, L2], tp: hl.Tupler[L2]): Aux[T, U, tp.Out] = new Filter[T, U] {
type Out = tp.Out
def apply(t: T): Out = tp(filter(gen.to(t)))
}
}
/**
* Type class supporting access to the all elements of this tuple of type different than `U`.
*
* @author Miles Sabin
*/
trait FilterNot[T, U] extends DepFn1[T]
object FilterNot {
def apply[T, U](implicit filter: FilterNot[T, U]): Aux[T, U, filter.Out] = filter
type Aux[T, U, Out0] = FilterNot[T, U] { type Out = Out0 }
implicit def filterNotTuple[T, L1 <: HList, U, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], filterNot: hl.FilterNot.Aux[L1, U, L2], tp: hl.Tupler[L2]): Aux[T, U, tp.Out] = new FilterNot[T, U] {
type Out = tp.Out
def apply(t: T): Out = tp(filterNot(gen.to(t)))
}
}
/**
* Type class supporting removal of an element from this tuple. Available only if this tuple contains an
* element of type `U`.
*
* @author Miles Sabin
*/
trait Remove[T, U] extends DepFn1[T]
object Remove {
def apply[T, E](implicit remove: Remove[T, E]): Aux[T, E, remove.Out] = remove
type Aux[T, U, Out0] = Remove[T, U] { type Out = Out0 }
implicit def removeTuple[T, L1 <: HList, U, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], remove: hl.Remove.Aux[L1, U, (U, L2)], tp: hl.Tupler[L2]): Aux[T, U, (U, tp.Out)] = new Remove[T, U] {
type Out = (U, tp.Out)
def apply(t: T): Out = { val (u, rem) = remove(gen.to(t)) ; (u, tp(rem)) }
}
}
/**
* Type class supporting removal of a sublist from this tuple. Available only if this tuple contains a
* sublist of type `SL`.
*
* The elements of `SL` do not have to be contiguous in this tuple.
*
* @author Miles Sabin
*/
trait RemoveAll[T, S] extends DepFn1[T]
object RemoveAll {
def apply[T, S](implicit remove: RemoveAll[T, S]): Aux[T, S, remove.Out] = remove
type Aux[T, S, Out0] = RemoveAll[T, S] { type Out = Out0 }
implicit def removeAllTuple[T, ST, SL <: HList, L1 <: HList, L2 <: HList]
(implicit gent: Generic.Aux[T, L1], gens: Generic.Aux[ST, SL], removeAll: hl.RemoveAll.Aux[L1, SL, (SL, L2)], tp: hl.Tupler[L2]): Aux[T, ST, (ST, tp.Out)] =
new RemoveAll[T, ST] {
type Out = (ST, tp.Out)
def apply(t: T): Out = { val (e, rem) = removeAll(gent.to(t)) ; (gens.from(e), tp(rem)) }
}
}
/**
* Type class supporting replacement of the first element of type U from this tuple with an element of type V.
* Available only if this tuple contains an element of type `U`.
*
* @author Miles Sabin
*/
trait Replacer[T, U, V] extends DepFn2[T, U]
object Replacer {
def apply[T, U, V](implicit replacer: Replacer[T, U, V]): Aux[T, U, V, replacer.Out] = replacer
type Aux[T, U, V, Out0] = Replacer[T, U, V] { type Out = Out0 }
implicit def replaceTuple[T, L1 <: HList, U, V, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], replace: hl.Replacer.Aux[L1, V, U, (V, L2)], tp: hl.Tupler[L2]): Aux[T, U, V, (V, tp.Out)] = new Replacer[T, U, V] {
type Out = (V, tp.Out)
def apply(t: T, u: U): Out = { val (v, rep) = replace(gen.to(t), u) ; (v, tp(rep)) }
}
}
/**
* Type class supporting replacement of the Nth element of this tuple with an element of type V. Available only if
* this tuple contains at least N elements.
*
* @author Miles Sabin
*/
trait ReplaceAt[T, N <: Nat, U] extends DepFn2[T, U]
object ReplaceAt {
def apply[T, N <: Nat, V](implicit replacer: ReplaceAt[T, N, V]): Aux[T, N, V, replacer.Out] = replacer
type Aux[T, N <: Nat, U, Out0] = ReplaceAt[T, N, U] { type Out = Out0 }
implicit def replaceTuple[T, L1 <: HList, N <: Nat, U, V, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], replaceAt: hl.ReplaceAt.Aux[L1, N, U, (V, L2)], tp: hl.Tupler[L2]): Aux[T, N, U, (V, tp.Out)] = new ReplaceAt[T, N, U] {
type Out = (V, tp.Out)
def apply(t: T, u: U): Out = { val (v, rep) = replaceAt(gen.to(t), u) ; (v, tp(rep)) }
}
}
/**
* Type class supporting replacement of the first element of type U from this tuple with the result of
* its transformation via a given function into a new element of type V.
* Available only if this tuple contains an element of type `U`.
*
* @author Howard Branch
*/
trait Modifier[T, U, V] extends DepFn2[T, U => V]
object Modifier {
def apply[T, U, V](implicit modifier: Modifier[T, U, V]): Aux[T, U, V, modifier.Out] = modifier
type Aux[T, U, V, Out0] = Modifier[T, U, V] { type Out = Out0 }
implicit def modifyTuple[T, L1 <: HList, U, V, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], modify: hl.Modifier.Aux[L1, U, V, (U, L2)], tp: hl.Tupler[L2]): Aux[T, U, V, (U, tp.Out)] = new Modifier[T, U, V] {
type Out = (U, tp.Out)
def apply(t: T, f: U => V): Out = { val (u, rep) = modify(gen.to(t), f) ; (u, tp(rep)) }
}
}
/**
* Type class supporting retrieval of the first ''n'' elements of this tuple. Available only if this tuple has at
* least ''n'' elements.
*
* @author Miles Sabin
*/
trait Take[T, N <: Nat] extends DepFn1[T]
object Take {
def apply[T, N <: Nat](implicit take: Take[T, N]): Aux[T, N, take.Out] = take
type Aux[T, N <: Nat, Out0] = Take[T, N] { type Out = Out0 }
implicit def tupleTake[T, L1 <: HList, N <: Nat, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], take: hl.Take.Aux[L1, N, L2], tp: hl.Tupler[L2]): Aux[T, N, tp.Out] =
new Take[T, N] {
type Out = tp.Out
def apply(t: T): tp.Out = tp(take(gen.to(t)))
}
}
/**
* Type class supporting removal of the first ''n'' elements of this tuple. Available only if this tuple has at
* least ''n'' elements.
*
* @author Miles Sabin
*/
trait Drop[T, N <: Nat] extends DepFn1[T]
object Drop {
def apply[T, N <: Nat](implicit drop: Drop[T, N]): Aux[T, N, drop.Out] = drop
type Aux[T, N <: Nat, Out0] = Drop[T, N] { type Out = Out0 }
implicit def tupleDrop[T, L1 <: HList, N <: Nat, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], drop: hl.Drop.Aux[L1, N, L2], tp: hl.Tupler[L2]): Aux[T, N, tp.Out] =
new Drop[T, N] {
type Out = tp.Out
def apply(t: T): tp.Out = tp(drop(gen.to(t)))
}
}
/**
* Type class supporting splitting this tuple at the ''nth'' element returning the prefix and suffix as a pair.
* Available only if this tuple has at least ''n'' elements.
*
* @author Miles Sabin
*/
trait Split[T, N <: Nat] extends DepFn1[T]
object Split {
def apply[T, N <: Nat](implicit split: Split[T, N]): Aux[T, N, split.Out] = split
type Aux[T, N <: Nat, Out0] = Split[T, N] { type Out = Out0 }
implicit def tupleSplit[T, L <: HList, N <: Nat, LP <: HList, LS <: HList]
(implicit
gen: Generic.Aux[T, L],
split: hl.Split.Aux[L, N, LP, LS],
tpp: hl.Tupler[LP],
tps: hl.Tupler[LS]
): Aux[T, N, (tpp.Out, tps.Out)] =
new Split[T, N] {
type Out = (tpp.Out, tps.Out)
def apply(t: T): Out = { val p :: s :: HNil = split.product(gen.to(t)) ; (tpp(p), tps(s)) }
}
}
/**
* Type class supporting splitting this tuple at the ''nth'' element returning the reverse prefix and suffix as a
* pair. Available only if this tuple has at least ''n'' elements.
*
* @author Miles Sabin
*/
trait ReverseSplit[T, N <: Nat] extends DepFn1[T]
object ReverseSplit {
def apply[T, N <: Nat](implicit split: ReverseSplit[T, N]): Aux[T, N, split.Out] = split
type Aux[T, N <: Nat, Out0] = ReverseSplit[T, N] { type Out = Out0 }
implicit def tupleReverseSplit[T, L <: HList, N <: Nat, LP <: HList, LS <: HList]
(implicit
gen: Generic.Aux[T, L],
split: hl.ReverseSplit.Aux[L, N, LP, LS],
tpp: hl.Tupler[LP],
tps: hl.Tupler[LS]
): Aux[T, N, (tpp.Out, tps.Out)] =
new ReverseSplit[T, N] {
type Out = (tpp.Out, tps.Out)
def apply(t: T): Out = { val p :: s :: HNil = split.product(gen.to(t)) ; (tpp(p), tps(s)) }
}
}
/**
* Type class supporting splitting this tuple at the first occurrence of an element of type `U` returning the prefix
* and suffix as a pair. Available only if this tuple contains an element of type `U`.
*
* @author Miles Sabin
*/
trait SplitLeft[T, U] extends DepFn1[T]
object SplitLeft {
def apply[T, U](implicit split: SplitLeft[T, U]): Aux[T, U, split.Out] = split
type Aux[T, U, Out0] = SplitLeft[T, U] { type Out = Out0 }
implicit def tupleSplitLeft[T, L <: HList, U, LP <: HList, LS <: HList]
(implicit
gen: Generic.Aux[T, L],
split: hl.SplitLeft.Aux[L, U, LP, LS],
tpp: hl.Tupler[LP],
tps: hl.Tupler[LS]
): Aux[T, U, (tpp.Out, tps.Out)] =
new SplitLeft[T, U] {
type Out = (tpp.Out, tps.Out)
def apply(t: T): Out = { val p :: s :: HNil = split.product(gen.to(t)) ; (tpp(p), tps(s)) }
}
}
/**
* Type class supporting splitting this tuple at the first occurrence of an element of type `U` returning the reverse
* prefix and suffix as a pair. Available only if this tuple contains an element of type `U`.
*
* @author Miles Sabin
*/
trait ReverseSplitLeft[T, U] extends DepFn1[T]
object ReverseSplitLeft {
def apply[T, U](implicit split: ReverseSplitLeft[T, U]): Aux[T, U, split.Out] = split
type Aux[T, U, Out0] = ReverseSplitLeft[T, U] { type Out = Out0 }
implicit def tupleReverseSplitLeft[T, L <: HList, U, LP <: HList, LS <: HList]
(implicit
gen: Generic.Aux[T, L],
split: hl.ReverseSplitLeft.Aux[L, U, LP, LS],
tpp: hl.Tupler[LP],
tps: hl.Tupler[LS]
): Aux[T, U, (tpp.Out, tps.Out)] =
new ReverseSplitLeft[T, U] {
type Out = (tpp.Out, tps.Out)
def apply(t: T): Out = { val p :: s :: HNil = split.product(gen.to(t)) ; (tpp(p), tps(s)) }
}
}
/**
* Type class supporting splitting this tuple at the last occurrence of an element of type `U` returning the prefix
* and suffix as a pair. Available only if this tuple contains an element of type `U`.
*
* @author Miles Sabin
*/
trait SplitRight[T, U] extends DepFn1[T]
object SplitRight {
def apply[T, U](implicit split: SplitRight[T, U]): Aux[T, U, split.Out] = split
type Aux[T, U, Out0] = SplitRight[T, U] { type Out = Out0 }
implicit def tupleSplitRight[T, L <: HList, U, LP <: HList, LS <: HList]
(implicit
gen: Generic.Aux[T, L],
split: hl.SplitRight.Aux[L, U, LP, LS],
tpp: hl.Tupler[LP],
tps: hl.Tupler[LS]
): Aux[T, U, (tpp.Out, tps.Out)] =
new SplitRight[T, U] {
type Out = (tpp.Out, tps.Out)
def apply(t: T): Out = { val p :: s :: HNil = split.product(gen.to(t)) ; (tpp(p), tps(s)) }
}
}
/**
* Type class supporting splitting this tuple at the last occurrence of an element of type `U` returning the reverse
* prefix and suffix as a pair. Available only if this tuple contains an element of type `U`.
*
* @author Miles Sabin
*/
trait ReverseSplitRight[T, U] extends DepFn1[T]
object ReverseSplitRight {
def apply[T, U](implicit split: ReverseSplitRight[T, U]): Aux[T, U, split.Out] = split
type Aux[T, U, Out0] = ReverseSplitRight[T, U] { type Out = Out0 }
implicit def tupleReverseSplitRight[T, L <: HList, U, LP <: HList, LS <: HList]
(implicit
gen: Generic.Aux[T, L],
split: hl.ReverseSplitRight.Aux[L, U, LP, LS],
tpp: hl.Tupler[LP],
tps: hl.Tupler[LS]
): Aux[T, U, (tpp.Out, tps.Out)] =
new ReverseSplitRight[T, U] {
type Out = (tpp.Out, tps.Out)
def apply(t: T): Out = { val p :: s :: HNil = split.product(gen.to(t)) ; (tpp(p), tps(s)) }
}
}
/**
* Type class supporting reversing this tuple.
*
* @author Miles Sabin
*/
trait Reverse[T] extends DepFn1[T]
object Reverse {
def apply[T](implicit reverse: Reverse[T]): Aux[T, reverse.Out] = reverse
type Aux[T, Out0] = Reverse[T] { type Out = Out0 }
implicit def tupleReverseAux[T, L1 <: HList, L2 <: HList, Out]
(implicit gen: Generic.Aux[T, L1], reverse: hl.Reverse.Aux[L1, L2], tp: hl.Tupler[L2]): Aux[T, tp.Out] =
new Reverse[T] {
type Out = tp.Out
def apply(t: T): tp.Out = tp(reverse(gen.to(t)))
}
}
/**
* Type class supporting mapping a higher ranked function over this tuple.
*
* @author Miles Sabin
*/
trait Mapper[T, P] extends DepFn1[T]
object Mapper {
def apply[T, P](implicit mapper: Mapper[T, P]): Aux[T, P, mapper.Out] = mapper
type Aux[T, P, Out0] = Mapper[T, P] { type Out = Out0 }
implicit def mapper[T, P, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], mapper: hl.Mapper.Aux[P, L1, L2], tp: hl.Tupler[L2]): Aux[T, P, tp.Out] =
new Mapper[T, P] {
type Out = tp.Out
def apply(t: T): tp.Out = tp(mapper(gen.to(t)))
}
}
/**
* Type class supporting flatmapping a higher ranked function over this tuple.
*
* @author Miles Sabin
*/
trait FlatMapper[T, P] extends DepFn1[T]
object FlatMapper {
def apply[T, P](implicit mapper: FlatMapper[T, P]): Aux[T, P, mapper.Out] = mapper
import poly.Compose
type Aux[T, P, Out0] = FlatMapper[T, P] { type Out = Out0 }
implicit def mapper[T, P, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], mapper: hl.FlatMapper.Aux[Compose[productElements.type, P], L1, L2], tp: hl.Tupler[L2]): Aux[T, P, tp.Out] =
new FlatMapper[T, P] {
type Out = tp.Out
def apply(t: T): tp.Out = tp(mapper(gen.to(t)))
}
}
/**
* Type class supporting mapping a constant valued function over this tuple.
*
* @author Miles Sabin
*/
trait ConstMapper[T, C] extends DepFn2[T, C]
object ConstMapper {
def apply[T, C](implicit mapper: ConstMapper[T, C]): Aux[T, C, mapper.Out] = mapper
type Aux[T, C, Out0] = ConstMapper[T, C] { type Out = Out0 }
implicit def mapper[T, C, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], mapper: hl.ConstMapper.Aux[C, L1, L2], tp: hl.Tupler[L2]): Aux[T, C, tp.Out] =
new ConstMapper[T, C] {
type Out = tp.Out
def apply(t: T, c: C): tp.Out = tp(mapper(c, gen.to(t)))
}
}
/**
* Type class supporting mapping a polymorphic function over this tuple and then folding the result using a
* monomorphic function value.
*
* @author Miles Sabin
*/
trait MapFolder[T, R, P] { // Nb. Not a dependent function signature
def apply(t: T, in: R, op: (R, R) => R): R
}
object MapFolder {
def apply[T, R, P](implicit folder: MapFolder[T, R, P]) = folder
implicit def mapper[T, L <: HList, R, P]
(implicit gen: Generic.Aux[T, L], mapper: hl.MapFolder[L, R, P]): MapFolder[T, R, P] =
new MapFolder[T, R, P] {
def apply(t: T, in: R, op: (R, R) => R): R = mapper(gen.to(t), in, op)
}
}
/**
* Type class supporting left-folding a polymorphic binary function over this tuple.
*
* @author Miles Sabin
*/
trait LeftFolder[T, U, P] extends DepFn2[T, U]
object LeftFolder {
def apply[T, U, P](implicit folder: LeftFolder[T, U, P]): Aux[T, U, P, folder.Out] = folder
type Aux[T, U, P, Out0] = LeftFolder[T, U, P] { type Out = Out0 }
implicit def folder[T, L <: HList, U, P]
(implicit gen: Generic.Aux[T, L], folder: hl.LeftFolder[L, U, P]): Aux[T, U, P, folder.Out] =
new LeftFolder[T, U, P] {
type Out = folder.Out
def apply(t: T, u: U): Out = folder(gen.to(t), u)
}
}
/**
* Type class supporting right-folding a polymorphic binary function over this tuple.
*
* @author Miles Sabin
*/
trait RightFolder[T, U, P] extends DepFn2[T, U]
object RightFolder {
def apply[T, U, P](implicit folder: RightFolder[T, U, P]): Aux[T, U, P, folder.Out] = folder
type Aux[T, U, P, Out0] = RightFolder[T, U, P] { type Out = Out0 }
implicit def folder[T, L <: HList, U, P]
(implicit gen: Generic.Aux[T, L], folder: hl.RightFolder[L, U, P]): Aux[T, U, P, folder.Out] =
new RightFolder[T, U, P] {
type Out = folder.Out
def apply(t: T, u: U): Out = folder(gen.to(t), u)
}
}
/**
* Type class supporting left-reducing a polymorphic binary function over this tuple.
*
* @author Miles Sabin
*/
trait LeftReducer[T, P] extends DepFn1[T]
object LeftReducer {
def apply[T, P](implicit reducer: LeftReducer[T, P]): Aux[T, P, reducer.Out] = reducer
type Aux[T, P, Out0] = LeftReducer[T, P] { type Out = Out0 }
implicit def folder[T, L <: HList, P]
(implicit gen: Generic.Aux[T, L], folder: hl.LeftReducer[L, P]): Aux[T, P, folder.Out] =
new LeftReducer[T, P] {
type Out = folder.Out
def apply(t: T): Out = folder(gen.to(t))
}
}
/**
* Type class supporting right-reducing a polymorphic binary function over this tuple.
*
* @author Miles Sabin
*/
trait RightReducer[T, P] extends DepFn1[T]
object RightReducer {
def apply[T, P](implicit reducer: RightReducer[T, P]): Aux[T, P, reducer.Out] = reducer
type Aux[T, P, Out0] = RightReducer[T, P] { type Out = Out0 }
implicit def folder[T, L <: HList, P]
(implicit gen: Generic.Aux[T, L], folder: hl.RightReducer[L, P]): Aux[T, P, folder.Out] =
new RightReducer[T, P] {
type Out = folder.Out
def apply(t: T): Out = folder(gen.to(t))
}
}
/**
* Type class supporting transposing this tuple.
*
* @author Miles Sabin
*/
trait Transposer[T] extends DepFn1[T]
object Transposer {
def apply[T](implicit transposer: Transposer[T]): Aux[T, transposer.Out] = transposer
type Aux[T, Out0] = Transposer[T] { type Out = Out0 }
implicit def transpose[T, L1 <: HList, L2 <: HList, L3 <: HList, L4 <: HList]
(implicit
gen: Generic.Aux[T, L1],
mpe: hl.Mapper.Aux[productElements.type, L1, L2],
tps: hl.Transposer.Aux[L2, L3],
mtp: hl.Mapper.Aux[tupled.type, L3, L4],
tp: hl.Tupler[L4]
): Aux[T, tp.Out] =
new Transposer[T] {
type Out = tp.Out
def apply(t: T): Out = ((gen.to(t) map productElements).transpose map tupled).tupled
}
}
/**
* Type class supporting zipping this this tuple of monomorphic function values with its argument tuple of
* correspondingly typed function arguments returning the result of each application as a tuple. Available only if
* there is evidence that the corresponding function and argument elements have compatible types.
*
* @author Miles Sabin
*/
trait ZipApply[FT, AT] extends DepFn2[FT, AT]
object ZipApply {
def apply[FT, AT](implicit zip: ZipApply[FT, AT]): Aux[FT, AT, zip.Out] = zip
type Aux[FT, AT, Out0] = ZipApply[FT, AT] { type Out = Out0 }
implicit def zipApply[FT, FL <: HList, AT, AL <: HList, RL <: HList]
(implicit
genf: Generic.Aux[FT, FL],
gena: Generic.Aux[AT, AL],
zip: hl.ZipApply.Aux[FL, AL, RL],
tp: hl.Tupler[RL]
): Aux[FT, AT, tp.Out] =
new ZipApply[FT, AT] {
type Out = tp.Out
def apply(ft: FT, at: AT): Out = (genf.to(ft) zipApply gena.to(at)).tupled
}
}
/**
* Type class supporting zipping this tuple with a tuple of tuples returning a tuple of tuples with each
* element of this tuple prepended to the corresponding tuple element of the argument tuple.
*
* @author Miles Sabin
*/
trait ZipOne[H, T] extends DepFn2[H, T]
object ZipOne {
def apply[H, T](implicit zip: ZipOne[H, T]): Aux[H, T, zip.Out] = zip
type Aux[H, T, Out0] = ZipOne[H, T] { type Out = Out0 }
implicit def zipOne[HT, HL <: HList, TT, TL <: HList, TLL <: HList, RLL <: HList, RL <: HList]
(implicit
genh: Generic.Aux[HT, HL],
gent: Generic.Aux[TT, TL],
mpet: hl.Mapper.Aux[productElements.type, TL, TLL],
zone: hl.ZipOne.Aux[HL, TLL, RLL],
mtp: hl.Mapper.Aux[tupled.type, RLL, RL],
tp: hl.Tupler[RL]
): Aux[HT, TT, tp.Out] =
new ZipOne[HT, TT] {
type Out = tp.Out
def apply(h: HT, t: TT): Out = ((genh.to(h) zipOne (gent.to(t) map productElements)) map tupled).tupled
}
}
/**
* Type class supporting zipping a tuple with a constant, resulting in a tuple of tuples of the form
* ({element from input tuple}, {supplied constant})
*
* @author Miles Sabin
*/
trait ZipConst[T, C] extends DepFn2[T, C]
object ZipConst {
def apply[T, C](implicit zip: ZipConst[T, C]): Aux[T, C, zip.Out] = zip
type Aux[T, C, Out0] = ZipConst[T, C] { type Out = Out0 }
implicit def zipConst[T, C, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], zipper: hl.ZipConst.Aux[C, L1, L2], tp: hl.Tupler[L2]): Aux[T, C, tp.Out] =
new ZipConst[T, C] {
type Out = tp.Out
def apply(t: T, c: C): tp.Out = tp(zipper(c, gen.to(t)))
}
}
/**
* Type class supporting unification of this tuple.
*
* @author Miles Sabin
*/
trait Unifier[T] extends DepFn1[T]
object Unifier {
def apply[T](implicit unifier: Unifier[T]): Aux[T, unifier.Out] = unifier
type Aux[T, Out0] = Unifier[T] { type Out = Out0 }
implicit def unifier[T, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], unifier: hl.Unifier.Aux[L1, L2], tp: hl.Tupler[L2]): Aux[T, tp.Out] =
new Unifier[T] {
type Out = tp.Out
def apply(t: T): Out = unifier(gen.to(t)).tupled
}
}
/**
* Type class supporting unification of all elements that are subtypes of `B` in this tuple to `B`, with all other
* elements left unchanged.
*
* @author Miles Sabin
*/
trait SubtypeUnifier[T, B] extends DepFn1[T]
object SubtypeUnifier {
def apply[T, B](implicit unifier: SubtypeUnifier[T, B]): Aux[T, B, unifier.Out] = unifier
type Aux[T, B, Out0] = SubtypeUnifier[T, B] { type Out = Out0 }
implicit def subtypeUnifier[T, B, L1 <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L1], unifier: hl.SubtypeUnifier.Aux[L1, B, L2], tp: hl.Tupler[L2]): Aux[T, B, tp.Out] =
new SubtypeUnifier[T, B] {
type Out = tp.Out
def apply(t: T): Out = unifier(gen.to(t)).tupled
}
}
/**
* Type class supporting computing the type-level Nat corresponding to the length of this tuple.
*
* @author Miles Sabin
*/
trait Length[T] extends DepFn1[T]
object Length {
def apply[T](implicit length: Length[T]): Aux[T, length.Out] = length
type Aux[T, Out0] = Length[T] { type Out = Out0 }
implicit def length[T, L <: HList]
(implicit gen: Generic.Aux[T, L], length: hl.Length[L]): Aux[T, length.Out] =
new Length[T] {
type Out = length.Out
def apply(t: T): Out = length()
}
}
/**
* Type class supporting conversion of this tuple to a `M` with elements typed as the least upper bound
* of the types of the elements of this tuple.
*
* @author Alexandre Archambault
*/
trait ToTraversable[T, M[_]] extends DepFn1[T] {
type Lub
type Out = M[Lub]
}
object ToTraversable {
def apply[T, M[_]]
(implicit toTraversable: ToTraversable[T, M]): Aux[T, M, toTraversable.Lub] = toTraversable
type Aux[T, M[_], Lub0] = ToTraversable[T, M] { type Lub = Lub0 }
implicit def toTraversableNothing[M[_]](implicit tt: hl.ToTraversable.Aux[HNil, M, Nothing]): Aux[Unit, M, Nothing] =
new ToTraversable[Unit, M] {
type Lub = Nothing
def apply(t: Unit) = tt(HNil)
}
implicit def toTraversable[T, L <: HList, M[_], Lub]
(implicit gen: Generic.Aux[T, L], toTraversable: hl.ToTraversable.Aux[L, M, Lub]): Aux[T, M, Lub] =
new ToTraversable[T, M] {
type Lub = toTraversable.Lub
def apply(t: T) = gen.to(t).to[M]
}
}
/**
* Type class supporting conversion of this tuple to a `List` with elements typed as the least upper bound
* of the types of the elements of this tuple.
*
* Provided for backward compatibility.
*
* @author Miles Sabin
*/
trait ToList[T, Lub] extends DepFn1[T]
object ToList {
type Aux[T, Lub, Out0] = ToList[T, Lub] { type Out = Out0 }
implicit def toList[T, L <: HList, Lub]
(implicit toTraversable: ToTraversable.Aux[T, List, Lub]): Aux[T, Lub, List[Lub]] =
new ToList[T, Lub] {
type Out = List[Lub]
def apply(t: T) = toTraversable(t)
}
}
/**
* Type class supporting conversion of this tuple to an `Array` with elements typed as the least upper bound
* of the types of the elements of this tuple.
*
* Provided for backward compatibility.
*
* @author Miles Sabin
*/
trait ToArray[T, Lub] extends DepFn1[T]
object ToArray {
type Aux[T, Lub, Out0] = ToArray[T, Lub] { type Out = Out0 }
implicit def toArray[T, L <: HList, Lub]
(implicit toTraversable: ToTraversable.Aux[T, Array, Lub]): Aux[T, Lub, Array[Lub]] =
new ToArray[T, Lub] {
type Out = Array[Lub]
def apply(t: T) = toTraversable(t)
}
}
/**
* Type class supporting conversion of this tuple to a `Sized[M[Lub], N]` with elements typed as
* the least upper bound Lub of the types of the elements of this tuple.
*
* @author Alexandre Archambault
*/
trait ToSized[T, M[_]] extends DepFn1[T]
object ToSized {
def apply[T, M[_]](implicit toSized: ToSized[T, M]): Aux[T, M, toSized.Out] = toSized
type Aux[T, M[_], Out0] = ToSized[T, M] { type Out = Out0 }
implicit def toSized[T, L <: HList, M[_]]
(implicit gen: Generic.Aux[T, L], toSized: hl.ToSized[L, M]): Aux[T, M, toSized.Out] =
new ToSized[T, M] {
type Out = toSized.Out
def apply(t: T) = gen.to(t).toSized[M]
}
}
/**
* Type Class witnessing that this tuple can be collected with a 'Poly' to produce a new tuple
*
* @author Stacy Curl
*/
trait Collect[T, P <: Poly] extends DepFn1[T]
object Collect {
def apply[T, P <: Poly](implicit collect: Collect[T, P]): Aux[T, P, collect.Out] = collect
type Aux[T, P <: Poly, Out0] = Collect[T, P] { type Out = Out0 }
implicit def collect[T, L <: HList, L2 <: HList, P <: Poly]
(implicit gen: Generic.Aux[T, L], collect: hl.Collect.Aux[L, P, L2], tp: hl.Tupler[L2])
: Aux[T, P, tp.Out] = new Collect[T, P] {
type Out = tp.Out
def apply(t: T): tp.Out = tp(collect(gen.to(t)))
}
}
/**
* Typer class supporting the calculation of every permutation of this tuple
*
* @author Stacy Curl
*/
trait Permutations[T] extends DepFn1[T]
object Permutations {
def apply[T](implicit permutations: Permutations[T]): Aux[T, permutations.Out] = permutations
type Aux[T, Out0] = Permutations[T] { type Out = Out0 }
implicit def permutations[T, L <: HList, L2 <: HList, L3 <: HList]
(implicit gen: Generic.Aux[T, L], collect: hl.Permutations.Aux[L, L2],
mapper: hl.Mapper.Aux[tupled.type, L2, L3], tp: hl.Tupler[L3]
): Aux[T, tp.Out] = new Permutations[T] {
type Out = tp.Out
def apply(t: T): Out = tp(collect(gen.to(t)).map(tupled))
}
}
/**
* Type class supporting rotating a tuple left
*
* @author Stacy Curl
*/
trait RotateLeft[T, N <: Nat] extends DepFn1[T]
object RotateLeft {
def apply[T, N <: Nat](implicit rotateLeft: RotateLeft[T, N]): Aux[T, N, rotateLeft.Out] = rotateLeft
type Aux[T, N <: Nat, Out0] = RotateLeft[T, N] { type Out = Out0 }
implicit def tupleRotateLeft[T, N <: Nat, L <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L], rotateLeft: hl.RotateLeft.Aux[L, N, L2], tp: hl.Tupler[L2])
: Aux[T, N, tp.Out] = new RotateLeft[T, N] {
type Out = tp.Out
def apply(t: T): Out = tp(rotateLeft(gen.to(t)))
}
}
/**
* Type class supporting rotating a tuple right
*
* @author Stacy Curl
*/
trait RotateRight[T, N <: Nat] extends DepFn1[T]
object RotateRight {
def apply[T, N <: Nat](implicit rotateRight: RotateRight[T, N]): Aux[T, N, rotateRight.Out] = rotateRight
type Aux[T, N <: Nat, Out0] = RotateRight[T, N] { type Out = Out0 }
implicit def tupleRotateRight[T, N <: Nat, L <: HList, L2 <: HList]
(implicit gen: Generic.Aux[T, L], rotateRight: hl.RotateRight.Aux[L, N, L2], tp: hl.Tupler[L2])
: Aux[T, N, tp.Out] = new RotateRight[T, N] {
type Out = tp.Out
def apply(t: T): Out = tp(rotateRight(gen.to(t)))
}
}
/**
* Type class supporting left-scanning a binary polymorphic function over this tuple.
*
* @author Owein Reese
*/
trait LeftScanner[T, In, P <: Poly] extends DepFn2[T, In]
object LeftScanner{
def apply[T, In, P <: Poly](implicit scanL: LeftScanner[T, In, P]): Aux[T, In, P, scanL.Out] = scanL
type Aux[T, In, P <: Poly, Out0] = LeftScanner[T, In, P] { type Out = Out0 }
implicit def scanner[T, L <: HList, In, P <: Poly, R <: HList]
(implicit gen: Generic.Aux[T, L],
scanL: hl.LeftScanner.Aux[L, In, P, R],
tp: hl.Tupler[R]
): Aux[T, In, P, tp.Out] =
new LeftScanner[T, In, P] {
type Out = tp.Out
def apply(t: T, in: In): Out = tp(scanL(gen.to(t), in))
}
}
/**
* Type class supporting right-scanning a binary polymorphic function over this tuple.
*
* @author Owein Reese
*/
trait RightScanner[T, In, P <: Poly] extends DepFn2[T, In]
object RightScanner{
def apply[T, In, P <: Poly](implicit scanR: RightScanner[T, In, P]): Aux[T, In, P, scanR.Out] = scanR
type Aux[T, In, P <: Poly, Out0] = RightScanner[T, In, P] { type Out = Out0 }
implicit def scanner[T, L <: HList, In, P <: Poly, R <: HList]
(implicit gen: Generic.Aux[T, L],
scanR: hl.RightScanner.Aux[L, In, P, R],
tp: hl.Tupler[R]
): Aux[T, In, P, tp.Out] =
new RightScanner[T, In, P] {
type Out = tp.Out
def apply(t: T, in: In): Out = tp(scanR(gen.to(t), in))
}
}
/**
* Type class supporting producing a tuple of shape `N` filled with elements of type `A`.
*
* @author Alexandre Archambault
*/
trait Fill[N, A] extends DepFn1[A]
object Fill {
def apply[N, A](implicit fill: Fill[N, A]) = fill
type Aux[N, A, Out0] = Fill[N, A] { type Out = Out0 }
implicit def fill1[N <: Nat, A, L <: HList, P]
(implicit fill: hlist.Fill.Aux[N, A, L], tupler: hlist.Tupler[L]): Aux[N, A, tupler.Out] =
new Fill[N, A] {
type Out = tupler.Out
def apply(elem: A) = tupler(fill(elem))
}
implicit def fill2[A, N1 <: Nat, N2 <: Nat, SubOut]
(implicit subFill: Fill.Aux[N2, A, SubOut], fill: Fill[N1, SubOut]): Aux[(N1, N2), A, fill.Out] =
new Fill[(N1, N2), A] {
type Out = fill.Out
def apply(elem: A) = fill(subFill(elem))
}
}
/**
* Type class supporting the patching of a tuple.
*
* @author Owein Reese
*/
trait Patcher[N <: Nat, M <: Nat, T, InT] extends DepFn2[T, InT]
object Patcher{
def apply[N <: Nat, M <: Nat, T, InT](implicit patch: Patcher[N, M, T, InT]) = patch
implicit def tuplePatch[N <: Nat, M <: Nat, T, L <: HList, InT, InL <: HList, OutL <: HList]
(implicit gen: Generic.Aux[T, L],
genIn: Generic.Aux[InT, InL],
patch: hl.Patcher.Aux[N, M, L, InL, OutL],
tp: hl.Tupler[OutL]) =
new Patcher[N, M, T, InT]{
type Out = tp.Out
def apply(t: T, in: InT) = tp(patch(gen.to(t), genIn.to(in)))
}
}
}
| japgolly/shapeless | core/src/main/scala/shapeless/ops/tuples.scala | Scala | apache-2.0 | 37,785 |
#!/usr/bin/env bash
./gradlew clean assemble bintray -DcommitTag=true
git push --follow-tags
| alibaba/atlas | atlas-core/publish.sh | Shell | apache-2.0 | 93 |
/*
* Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.bpmn.parser;
import java.io.Serializable;
/**
* @author Daniel Meyer
*/
public class CompensateEventDefinition implements Serializable {
private static final long serialVersionUID = 1L;
protected String activityRef;
protected boolean waitForCompletion;
public String getActivityRef() {
return activityRef;
}
public void setActivityRef(String activityRef) {
this.activityRef = activityRef;
}
public boolean isWaitForCompletion() {
return waitForCompletion;
}
public void setWaitForCompletion(boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
}
}
| xasx/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/impl/bpmn/parser/CompensateEventDefinition.java | Java | apache-2.0 | 1,307 |
# Geranium sanguineum var. latilobum Rochel VARIETY
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Geraniales/Geraniaceae/Geranium/Geranium sanguineum/ Syn. Geranium sanguineum latilobum/README.md | Markdown | apache-2.0 | 198 |
#
# Copyright 2012 Christian Giacomi http://www.christiangiacomi.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module R2do
class Command
# @return [String] the value for the command switch.
attr_reader :short
# @return [String] the name of this command.
attr_reader :extended
# @return [String] the description for the command.
attr_reader :description
# Creates an instance of a Command
#
# @param [String] short the short option name for this command
# @param [String] extended the full option name for this command
# @param [String] argument the optional argument for commands that have arguments
# @param [String] description the command's description
def initialize(short, extended, description)
if short.nil? or extended.nil? or description.nil?
raise ArgumentError
end
@short = short
@extended = extended
@description = description
end
# Executes the callback of this command
#
# @param [Array] args the collection of arguments
# @return [void]
def execute(args)
raise ScriptError, "Cannot call execute on an abstract command"
end
def help()
return "No help available for this command."
end
# Returns a string representation of this Command
#
# @return [String] the representation of this Command
def to_s()
return "%2s, %-10s \t# %s" % [@short, @extended, @description]
end
end
end | cgiacomi/r2do | lib/r2do/commands/command.rb | Ruby | apache-2.0 | 1,980 |
<?php
require_once "logging.php";
class Saasu
{
private $baseUrl = "https://secure.saasu.com/webservices/rest/r1/";
private $fileUid;
private $log;
private $resourceType = "Tasks";
private $wsAccessKey;
function __construct($wsAccessKey, $fileUid)
{
$this->wsAccessKey = $wsAccessKey;
$this->fileUid = $fileUid;
$this->log = new Logging();
// set path and name of log file
$time = @date('Y-m-d');
$this->log->lfile("./logs/$time.txt");
}
function __destruct()
{
// close log file
$this->log->lclose();
}
function getContact($emailAddress)
{
$xml = $this->getContactList();
$contactUid = null;
if ($xml)
{
foreach($xml->children()->children() as $contact)
{
if ($contact->emailAddress == $emailAddress)
{
$contactUid = $contact->contactUid;
//var_dump($contact);
//echo $contact->contactUid;
//echo $contact->emailAddress;
break;
}
}
}
return $contactUid;
}
function getContactList($rawXml = false)
{
$this->resourceType = "contactList";
$response = $this->sendRequest(false, "");
if (($rawXml == false) && ($response['errno'] == 0))
{
$xml = simplexml_load_string($response['content']);
}
else
{
$xml = $response['content'];
}
return $xml;
}
function insertContact($givenName, $familyName, $emailAddress, $phone)
{
$contactUid = null;
$this->resourceType = "tasks";
$xmlRequest = "<?xml version=\"1.0\" encoding=\"utf-8\"?>
<tasks>
<insertContact>
<contact uid=\"0\">
<givenName><![CDATA[$givenName]]></givenName>
<familyName><![CDATA[$familyName]]></familyName>
<email><![CDATA[$emailAddress]]></email>
<mainPhone><![CDATA[$phone]]></mainPhone>
<isActive>true</isActive>
<isPartner>false</isPartner>
<isCustomer>true</isCustomer>
<isSupplier>false</isSupplier>
</contact>
</insertContact>
</tasks>";
$response = $this->sendRequest(true, $xmlRequest);
if ($response['errno'] == 0)
{
//var_dump($response['content']);
//var_dump($response);
$xml = simplexml_load_string($response['content']);
foreach($xml->insertContactResult[0]->attributes() as $name => $value)
{
if ($name == "insertedEntityUid")
{
$contactUid = $value;
break;
}
}
}
return $contactUid;
}
// not being used yet
function updateContact()
{
$this->resourceType = "tasks";
//TOOD - finish later on, if needed
$xmlRequest = "<?xml version=\"1.0\" encoding=\"utf-8\"?>
<tasks>
<updateContact>
<contact uid=\"22730\" lastUpdatedUid=\"AAAAAAAVA8A=\">
<givenName><![CDATA[Mary]]></givenName>
<familyName><![CDATA[Smith]]></familyName>
<email><![CDATA[mary.smith@mrandmrssmith.com.au]]></email>
<mainPhone><![CDATA[02 4444 4444]]></mainPhone>
<isActive>true</isActive>
</contact>
</updateContact>
</tasks>";
}
function insertInvoice($contactUid, $accountUid, $dateDue, $totalAmount,
$purchaseOrderNumberExtension, $description, $notes)
{
$invoiceUid = "";
$this->resourceType = "Tasks";
$today = date("Y-m-d");
$formatDate = DateTime::createFromFormat('d/m/Y', $dateDue);
$dateDue = $formatDate->format('Y-m-d');
$xmlRequest = "<insertInvoice emailToContact=\"true\">
<invoice uid=\"0\">
<transactionType>S</transactionType>
<date><![CDATA[$dateDue]]></date>
<contactUid>$contactUid</contactUid>
<folderUid>0</folderUid>
<summary><![CDATA[$description]]></summary>
<notes><![CDATA[$notes]]></notes>
<requiresFollowUp>false</requiresFollowUp>
<dueOrExpiryDate><![CDATA[$dateDue]]></dueOrExpiryDate>
<layout>S</layout>
<status>I</status>
<invoiceNumber><Auto Number></invoiceNumber>
<purchaseOrderNumber><![CDATA[$purchaseOrderNumberExtension]]></purchaseOrderNumber>
<invoiceItems>
<serviceInvoiceItem>
<description><![CDATA[$description]]></description>
<!--accountUid>1073198</accountUid-->
<accountUid>$accountUid</accountUid>
<taxCode>G1</taxCode>
<totalAmountInclTax>$totalAmount</totalAmountInclTax>
</serviceInvoiceItem>
</invoiceItems>
<isSent>false</isSent>
</invoice>
<createAsAdjustmentNote>false</createAsAdjustmentNote>
</insertInvoice>";
//echo "xml: $xmlRequest<br />\r\n";
$response = $this->sendRequest(true, $xmlRequest);
//var_dump($response);
if ($response['errno'] == 0)
{
$xml = simplexml_load_string($response['content']);
$contactUid = null;
if ($xml->errors)
{
$errors = "begin: ";
foreach($xml->errors as $error)
{
$errors = $errors." ".$error->message;
}
// echo $error;
$this->log->lwrite("problem with creating invoice, (xml-errors) errors: $errors");
$output = var_export($response, true);
$globalRequest = var_export($_REQUEST, true);
// $browser = get_browser(null, true);
// $browser_info = var_export($browser, true);
$this->log->lwrite("xmlRequest: $xmlRequest");
$this->log->lwrite("request: $globalRequest");
$this->log->lwrite("response: $output");
// $this->log->lwrite("browser: $browser_info");
$message = "problem with creating invoice:\n".
"errors: (xml-errors) $errors\n".
// "browser: $browser_info\n".
"xmlRequest: $xmlRequest\n".
"request: $globalRequest\n".
"response: $output\n";
mail("james@methodit.co.jp, jamesjohnmcguire@gmail.com, daniel@methodit.co.jp",
"Sassu: problem with creating invoice", $message,
"From: Aussie Airport Parking <bookings@aussieairportparking.com.au>\nX-Mailer: PHP/" . phpversion());
}
else
{
foreach($xml->insertInvoiceResult[0]->attributes() as $name => $value)
{
if ($name == "insertedEntityUid")
{
$invoiceUid = $value;
break;
}
}
}
}
else
{
$this->log->lwrite("problem with creating invoice");
$output = var_export($response, true);
$globalRequest = var_export($_REQUEST, true);
// $browser = get_browser(null, true);
// $browser_info = var_export($browser, true);
$this->log->lwrite("xmlRequest: $xmlRequest");
$this->log->lwrite("request: $globalRequest");
$this->log->lwrite("response: $output");
// $this->log->lwrite("browser: $browser_info");
$message = "problem with creating invoice:\n".
"errors: (xml-errors) $errors\n".
// "browser: $browser_info\n".
"xmlRequest: $xmlRequest\n".
"request: $globalRequest\n".
"response: $output\n";
mail("james@methodit.co.jp, jamesjohnmcguire@gmail.com, daniel@methodit.co.jp",
"Sassu: problem with creating invoice", $message,
"From: Aussie Airport Parking <bookings@aussieairportparking.com.au>\nX-Mailer: PHP/" . phpversion());
}
return $invoiceUid;
}
private function makeUrl()
{
return "$this->baseUrl$this->resourceType?wsaccesskey=$this->wsAccessKey&fileuid=$this->fileUid";
}
private function sendRequest($isPost, $fields)
{
$url = $this->makeUrl();
$curlObject = curl_init($url);
if (true == $isPost)
{
curl_setopt($curlObject, CURLOPT_POST, 1);
curl_setopt($curlObject, CURLOPT_POSTFIELDS, $fields);
}
curl_setopt($curlObject, CURLOPT_RETURNTRANSFER, true); // return web page
curl_setopt($curlObject, CURLOPT_HEADER, false); // don't return headers
curl_setopt($curlObject, CURLOPT_FOLLOWLOCATION, true); // follow redirects
curl_setopt($curlObject, CURLOPT_ENCODING, ""); // handle all encodings
curl_setopt($curlObject, CURLOPT_USERAGENT, "Saasu Client"); // who am i
curl_setopt($curlObject, CURLOPT_AUTOREFERER, true); // set referer on redirect
curl_setopt($curlObject, CURLOPT_CONNECTTIMEOUT, 120); // timeout on connect
curl_setopt($curlObject, CURLOPT_TIMEOUT, 120); // timeout on response
curl_setopt($curlObject, CURLOPT_MAXREDIRS, 10); // stop after 10 redirects
//curl_setopt($curlObject, CURLOPT_HTTPHEADER, Array("Content-Type: text/xml"));
//$headers = array(
// "Content-type: text/xml"
// ,"Connection: close"
//);
// curl_setopt($curlObject, CURLOPT_HTTPHEADER, $headers);
curl_setopt ($curlObject, CURLOPT_SSL_VERIFYPEER, TRUE);
// curl_setopt ($curlObject, CURLOPT_CAINFO, "c:/Util/Xampp/cacert.pem");
curl_setopt ($curlObject, CURLINFO_HEADER_OUT, TRUE);
$content = curl_exec( $curlObject);
$err = curl_errno($curlObject);
$errmsg = curl_error($curlObject);
$header = curl_getinfo($curlObject);
curl_close($curlObject);
$header['errno'] = $err;
$header['errmsg'] = $errmsg;
$header['content'] = $content;
//echo "output: <br\>\r\n";
//var_dump($header);
return $header;
}
} // end class
?>
| jamesjohnmcguire/Saasu | Saasu.php | PHP | apache-2.0 | 8,552 |
//src: test/specs/fixtures/whitespace.tag
riot.tag2('my-tag', '<div style=" top:0; left:0" a=" " expr="{{ foo:\'bar\', bar:⁗\'⁗ }}"> Foo\' </div> <p></p>', 'p { display: none; }', 'style=" top:0; left:0" a=" " expr="{{ foo:⁗bar⁗ }}"', function(opts) {
this.click = function(e)
{}.bind(this)
});
| eric1foard/riot-pvfm | node_modules/riot-compiler/test/specs/expect/whitespace.js | JavaScript | apache-2.0 | 335 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_31) on Tue Mar 17 10:23:13 EDT 2015 -->
<title>ShapeBug</title>
<meta name="date" content="2015-03-17">
<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
<script type="text/javascript" src="script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ShapeBug";
}
}
catch(err) {
}
//-->
var methods = {"i0":10,"i1":6,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li><a href="index-all.html">Index</a></li>
<li><a href="help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li><a href="ZBugInherited.html" title="class in <Unnamed>"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="index.html?ShapeBug.html" target="_top">Frames</a></li>
<li><a href="ShapeBug.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<h2 title="Class ShapeBug" class="title">Class ShapeBug</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>info.gridworld.actor.Actor</li>
<li>
<ul class="inheritance">
<li>info.gridworld.actor.Bug</li>
<li>
<ul class="inheritance">
<li>ShapeBug</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>Direct Known Subclasses:</dt>
<dd><a href="ZBugInherited.html" title="class in <Unnamed>">ZBugInherited</a></dd>
</dl>
<hr>
<br>
<pre>public abstract class <span class="typeNameLabel">ShapeBug</span>
extends info.gridworld.actor.Bug</pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="ShapeBug.html#ShapeBug-int-">ShapeBug</a></span>(int sideLength)</code>
<div class="block">Constructs a generic shape bug.</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#act--">act</a></span>()</code>
<div class="block">Moves to the next location of the shape.</div>
</td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>abstract void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#endOfSide--">endOfSide</a></span>()</code>
<div class="block">Method for determining whether an open shape has reached the
end of its side, thus determining whether or not it has to
turn its direction.</div>
</td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#getRotateCount--">getRotateCount</a></span>()</code>
<div class="block">Access the value in <code>rotateCount</code>.</div>
</td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#getSideLength--">getSideLength</a></span>()</code>
<div class="block">Access the value in <code>sideLength</code>.</div>
</td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#getSteps--">getSteps</a></span>()</code>
<div class="block">Access the value in <code>steps</code>.</div>
</td>
</tr>
<tr id="i5" class="rowColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#keepMoving--">keepMoving</a></span>()</code>
<div class="block">Determine whether or not the bug should continue to move, if
necessary.</div>
</td>
</tr>
<tr id="i6" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="ShapeBug.html#setSideLength-int-">setSideLength</a></span>(int sideLength)</code>
<div class="block">Set the value in <code>sideLength</code>.</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.info.gridworld.actor.Bug">
<!-- -->
</a>
<h3>Methods inherited from class info.gridworld.actor.Bug</h3>
<code>canMove, move, turn</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.info.gridworld.actor.Actor">
<!-- -->
</a>
<h3>Methods inherited from class info.gridworld.actor.Actor</h3>
<code>getColor, getDirection, getGrid, getLocation, moveTo, putSelfInGrid, removeSelfFromGrid, setColor, setDirection, toString</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="ShapeBug-int-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>ShapeBug</h4>
<pre>public ShapeBug(int sideLength)</pre>
<div class="block">Constructs a generic shape bug.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>length</code> - the side length</dd>
</dl>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="act--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>act</h4>
<pre>public void act()</pre>
<div class="block">Moves to the next location of the shape.</div>
<dl>
<dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
<dd><code>act</code> in class <code>info.gridworld.actor.Bug</code></dd>
</dl>
</li>
</ul>
<a name="keepMoving--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>keepMoving</h4>
<pre>public boolean keepMoving()</pre>
<div class="block">Determine whether or not the bug should continue to move, if
necessary. Ultimately, it is up to the child class to decide
how to implement this. If a shape does not need this method,
then it will default to true and have no effect on the class.</div>
</li>
</ul>
<a name="endOfSide--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>endOfSide</h4>
<pre>public abstract void endOfSide()</pre>
<div class="block">Method for determining whether an open shape has reached the
end of its side, thus determining whether or not it has to
turn its direction. This is up to the child to truly define.</div>
</li>
</ul>
<a name="getRotateCount--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getRotateCount</h4>
<pre>public int getRotateCount()</pre>
<div class="block">Access the value in <code>rotateCount</code>.</div>
</li>
</ul>
<a name="getSideLength--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getSideLength</h4>
<pre>public int getSideLength()</pre>
<div class="block">Access the value in <code>sideLength</code>.</div>
</li>
</ul>
<a name="getSteps--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getSteps</h4>
<pre>public int getSteps()</pre>
<div class="block">Access the value in <code>steps</code>.</div>
</li>
</ul>
<a name="setSideLength-int-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>setSideLength</h4>
<pre>public void setSideLength(int sideLength)</pre>
<div class="block">Set the value in <code>sideLength</code>.</div>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li><a href="index-all.html">Index</a></li>
<li><a href="help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li><a href="ZBugInherited.html" title="class in <Unnamed>"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="index.html?ShapeBug.html" target="_top">Frames</a></li>
<li><a href="ShapeBug.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| jflory7/SFHSAPCompSci2015 | src/main/java/com/justinwflory/labs/gridworld/projects/boxBug/doc/ShapeBug.html | HTML | apache-2.0 | 13,285 |
# Impatiens holstii Engl. & Warb. ex Engl. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Balsaminaceae/Impatiens/Impatiens holstii/README.md | Markdown | apache-2.0 | 190 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.