repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
FREEZX/StudentHackathon
|
public/js/stores/LibraryStore.js
|
<reponame>FREEZX/StudentHackathon<filename>public/js/stores/LibraryStore.js<gh_stars>0
'use strict';
var m = require('mithril.elements');
var _ = require('lodash');
var LibraryStore = {
loggedin: m.prop(false),
library: []
};
LibraryStore.loadLibrary = function(){
m.startComputation();
// primus.request('/article/list').then(function(data){
// LibraryStore.articles = data;
// m.endComputation();
// });
LibraryStore.library = [
{
_id: '0',
name: '<NAME>'
},
{
_id: '1',
name: '<NAME>'
},
{
_id: '2',
name: '<NAME>'
},
{
_id: '3',
name: 'Оперативни системи'
},
{
_id: '4',
name: '<NAME>'
},
{
_id: '5',
name: '<NAME>'
}
];
m.endComputation();
};
LibraryStore.loadLibrary();
module.exports = LibraryStore;
|
dev-raul/facul-fitCard-mobile
|
src/pages/DashBoard/ItemListStudant/styles.js
|
<filename>src/pages/DashBoard/ItemListStudant/styles.js
import styled from 'styled-components/native';
export const Container = styled.TouchableOpacity`
align-self: stretch;
flex-direction: row;
align-items: center;
justify-content: space-between;
margin: 3px 0px;
padding: 5px;
`;
export const StudantView = styled.View`
align-items: center;
flex-direction: row;
`;
export const StudantText = styled.Text`
margin-left: 15px;
font-size: 16px;
font-weight: 400;
`;
|
MissThee/springboot-mybatis-jwt-shiro
|
main-project/manage-form/src/main/java/com/github/form/service/imp/letter/dictionary/DicResultTypeImp.java
|
<filename>main-project/manage-form/src/main/java/com/github/form/service/imp/letter/dictionary/DicResultTypeImp.java
package com.github.form.service.imp.letter.dictionary;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.common.db.entity.primary.DicResultType;
import com.github.form.db.mapper.primary.letter.dictionary.DicResultTypeMapper;
import com.github.form.models.dto.letter.dictionary.DicCommonInsertDTO;
import com.github.form.models.dto.letter.dictionary.DicCommonUpdateDTO;
import com.github.form.service.interf.letter.dictionary.DicResultTypeService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import ma.glasnost.orika.MapperFacade;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* <p>
* 信访信访室处理结果 服务实现类
* </p>
*
* @author WORK,MT
* @since 2019-06-03
*/
@Service
public class DicResultTypeImp extends ServiceImpl<DicResultTypeMapper, DicResultType> implements DicResultTypeService {
private final DicResultTypeMapper dicResultTypeMapper;
private final MapperFacade mapperFacade;
@Autowired
public DicResultTypeImp(MapperFacade mapperFacade, DicResultTypeMapper dicResultTypeMapper) {
this.mapperFacade = mapperFacade;
this.dicResultTypeMapper = dicResultTypeMapper;
}
@Override
public Integer insertOne(DicCommonInsertDTO dicCommonInsertDTO) {
DicResultType dicResultType = mapperFacade.map(dicCommonInsertDTO, DicResultType.class);
dicResultTypeMapper.insert(dicResultType);
return dicResultType.getId();
}
@Override
public Boolean deleteOne(Integer id) {
if (id == null) {
return false;
}
return dicResultTypeMapper.updateById(new DicResultType().setId(id).setIsDelete(true)) > 0;
}
@Override
public Boolean updateOne(DicCommonUpdateDTO dicCommonUpdateDTO) {
DicResultType dicResultType = mapperFacade.map(dicCommonUpdateDTO, DicResultType.class);
return (dicResultTypeMapper.updateById(dicResultType) > 0);
}
@Override
public List<DicResultType> selectList(Boolean isDelete) {
QueryWrapper<DicResultType> queryWrapper = new QueryWrapper<>();
queryWrapper.eq(DicResultType.IS_DELETE, isDelete)
.orderBy(true, true, DicResultType.INDEX_NUMBER);
return dicResultTypeMapper.selectList(queryWrapper);
}
}
|
rohitagarwal0910/vvp-cnf-validation-scripts
|
ice_validator/tests/test_contrail_vn_resource_id.py
|
# -*- coding: utf8 -*-
# ============LICENSE_START====================================================
# org.onap.vvp/validation-scripts
# ===================================================================
# Copyright © 2019 AT&T Intellectual Property. All rights reserved.
# ===================================================================
#
# Unless otherwise specified, all software contained herein is licensed
# under the Apache License, Version 2.0 (the "License");
# you may not use this software except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# Unless otherwise specified, all documentation contained herein is licensed
# under the Creative Commons License, Attribution 4.0 Intl. (the "License");
# you may not use this documentation except in compliance with the License.
# You may obtain a copy of the License at
#
# https://creativecommons.org/licenses/by/4.0/
#
# Unless required by applicable law or agreed to in writing, documentation
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ============LICENSE_END============================================
#
#
"""
neutron net resource id
"""
import pytest
from .helpers import validates
from .structures import Heat
from .structures import ContrailV2VirtualNetworkProcessor
VERSION = "2.0.0"
# pylint: disable=invalid-name
@validates("R-99110")
def test_neutron_net_resource_id(yaml_file):
"""
A VNF's Heat Orchestration Template's Resource
OS::ContrailV2::VirtualNetwork Resource ID
**MUST** use the naming convention
1) int_{network-role}_network
or
2) int_{network-role}_RVN`` where RVN represents Resource Virtual
"""
heat = Heat(filepath=yaml_file)
heat_object_class = ContrailV2VirtualNetworkProcessor
resource_type = heat_object_class.resource_type
resources = heat.get_resource_by_type(resource_type)
if not resources:
pytest.skip("No %s resources found" % resource_type)
heat_object = heat_object_class()
bad = []
for rid in resources:
if not heat_object.get_rid_match_tuple(rid)[0]:
bad.append(rid)
assert not bad, "%s resource ids %s do not match %s" % (
resource_type,
bad,
heat_object.get_rid_patterns().values(),
)
|
opensim-org/opensim-gui
|
Gui/opensim/modeling/src/org/opensim/modeling/MovingPathPoint.java
|
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 4.0.2
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package org.opensim.modeling;
/**
* A class implementing a moving muscle point, which is a muscle point that<br>
* moves in a body's reference frame as a function of a coordinate.<br>
* <br>
* @author <NAME><br>
* @version 1.0
*/
public class MovingPathPoint extends AbstractPathPoint {
private transient long swigCPtr;
public MovingPathPoint(long cPtr, boolean cMemoryOwn) {
super(opensimSimulationJNI.MovingPathPoint_SWIGUpcast(cPtr), cMemoryOwn);
swigCPtr = cPtr;
}
public static long getCPtr(MovingPathPoint obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
@SuppressWarnings("deprecation")
protected void finalize() {
delete();
}
public synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
opensimSimulationJNI.delete_MovingPathPoint(swigCPtr);
}
swigCPtr = 0;
}
super.delete();
}
public static MovingPathPoint safeDownCast(OpenSimObject obj) {
long cPtr = opensimSimulationJNI.MovingPathPoint_safeDownCast(OpenSimObject.getCPtr(obj), obj);
return (cPtr == 0) ? null : new MovingPathPoint(cPtr, false);
}
public void assign(OpenSimObject aObject) {
opensimSimulationJNI.MovingPathPoint_assign(swigCPtr, this, OpenSimObject.getCPtr(aObject), aObject);
}
public static String getClassName() {
return opensimSimulationJNI.MovingPathPoint_getClassName();
}
public OpenSimObject clone() {
long cPtr = opensimSimulationJNI.MovingPathPoint_clone(swigCPtr, this);
return (cPtr == 0) ? null : new MovingPathPoint(cPtr, true);
}
public String getConcreteClassName() {
return opensimSimulationJNI.MovingPathPoint_getConcreteClassName(swigCPtr, this);
}
public void copyProperty_x_location(MovingPathPoint source) {
opensimSimulationJNI.MovingPathPoint_copyProperty_x_location(swigCPtr, this, MovingPathPoint.getCPtr(source), source);
}
public Function get_x_location(int i) {
return new Function(opensimSimulationJNI.MovingPathPoint_get_x_location__SWIG_0(swigCPtr, this, i), false);
}
public Function upd_x_location(int i) {
return new Function(opensimSimulationJNI.MovingPathPoint_upd_x_location__SWIG_0(swigCPtr, this, i), false);
}
public void set_x_location(int i, Function value) {
opensimSimulationJNI.MovingPathPoint_set_x_location__SWIG_0(swigCPtr, this, i, Function.getCPtr(value), value);
}
public int append_x_location(Function value) {
return opensimSimulationJNI.MovingPathPoint_append_x_location(swigCPtr, this, Function.getCPtr(value), value);
}
public void constructProperty_x_location() {
opensimSimulationJNI.MovingPathPoint_constructProperty_x_location__SWIG_0(swigCPtr, this);
}
public void constructProperty_x_location(Function initValue) {
opensimSimulationJNI.MovingPathPoint_constructProperty_x_location__SWIG_1(swigCPtr, this, Function.getCPtr(initValue), initValue);
}
public Function get_x_location() {
return new Function(opensimSimulationJNI.MovingPathPoint_get_x_location__SWIG_1(swigCPtr, this), false);
}
public Function upd_x_location() {
return new Function(opensimSimulationJNI.MovingPathPoint_upd_x_location__SWIG_1(swigCPtr, this), false);
}
public void set_x_location(Function value) {
opensimSimulationJNI.MovingPathPoint_set_x_location__SWIG_1(swigCPtr, this, Function.getCPtr(value), value);
}
public void copyProperty_y_location(MovingPathPoint source) {
opensimSimulationJNI.MovingPathPoint_copyProperty_y_location(swigCPtr, this, MovingPathPoint.getCPtr(source), source);
}
public Function get_y_location(int i) {
return new Function(opensimSimulationJNI.MovingPathPoint_get_y_location__SWIG_0(swigCPtr, this, i), false);
}
public Function upd_y_location(int i) {
return new Function(opensimSimulationJNI.MovingPathPoint_upd_y_location__SWIG_0(swigCPtr, this, i), false);
}
public void set_y_location(int i, Function value) {
opensimSimulationJNI.MovingPathPoint_set_y_location__SWIG_0(swigCPtr, this, i, Function.getCPtr(value), value);
}
public int append_y_location(Function value) {
return opensimSimulationJNI.MovingPathPoint_append_y_location(swigCPtr, this, Function.getCPtr(value), value);
}
public void constructProperty_y_location() {
opensimSimulationJNI.MovingPathPoint_constructProperty_y_location__SWIG_0(swigCPtr, this);
}
public void constructProperty_y_location(Function initValue) {
opensimSimulationJNI.MovingPathPoint_constructProperty_y_location__SWIG_1(swigCPtr, this, Function.getCPtr(initValue), initValue);
}
public Function get_y_location() {
return new Function(opensimSimulationJNI.MovingPathPoint_get_y_location__SWIG_1(swigCPtr, this), false);
}
public Function upd_y_location() {
return new Function(opensimSimulationJNI.MovingPathPoint_upd_y_location__SWIG_1(swigCPtr, this), false);
}
public void set_y_location(Function value) {
opensimSimulationJNI.MovingPathPoint_set_y_location__SWIG_1(swigCPtr, this, Function.getCPtr(value), value);
}
public void copyProperty_z_location(MovingPathPoint source) {
opensimSimulationJNI.MovingPathPoint_copyProperty_z_location(swigCPtr, this, MovingPathPoint.getCPtr(source), source);
}
public Function get_z_location(int i) {
return new Function(opensimSimulationJNI.MovingPathPoint_get_z_location__SWIG_0(swigCPtr, this, i), false);
}
public Function upd_z_location(int i) {
return new Function(opensimSimulationJNI.MovingPathPoint_upd_z_location__SWIG_0(swigCPtr, this, i), false);
}
public void set_z_location(int i, Function value) {
opensimSimulationJNI.MovingPathPoint_set_z_location__SWIG_0(swigCPtr, this, i, Function.getCPtr(value), value);
}
public int append_z_location(Function value) {
return opensimSimulationJNI.MovingPathPoint_append_z_location(swigCPtr, this, Function.getCPtr(value), value);
}
public void constructProperty_z_location() {
opensimSimulationJNI.MovingPathPoint_constructProperty_z_location__SWIG_0(swigCPtr, this);
}
public void constructProperty_z_location(Function initValue) {
opensimSimulationJNI.MovingPathPoint_constructProperty_z_location__SWIG_1(swigCPtr, this, Function.getCPtr(initValue), initValue);
}
public Function get_z_location() {
return new Function(opensimSimulationJNI.MovingPathPoint_get_z_location__SWIG_1(swigCPtr, this), false);
}
public Function upd_z_location() {
return new Function(opensimSimulationJNI.MovingPathPoint_upd_z_location__SWIG_1(swigCPtr, this), false);
}
public void set_z_location(Function value) {
opensimSimulationJNI.MovingPathPoint_set_z_location__SWIG_1(swigCPtr, this, Function.getCPtr(value), value);
}
public void setPropertyIndex_socket_x_coordinate(SWIGTYPE_p_PropertyIndex value) {
opensimSimulationJNI.MovingPathPoint_PropertyIndex_socket_x_coordinate_set(swigCPtr, this, SWIGTYPE_p_PropertyIndex.getCPtr(value));
}
public SWIGTYPE_p_PropertyIndex getPropertyIndex_socket_x_coordinate() {
return new SWIGTYPE_p_PropertyIndex(opensimSimulationJNI.MovingPathPoint_PropertyIndex_socket_x_coordinate_get(swigCPtr, this), true);
}
public void connectSocket_x_coordinate(OpenSimObject object) {
opensimSimulationJNI.MovingPathPoint_connectSocket_x_coordinate(swigCPtr, this, OpenSimObject.getCPtr(object), object);
}
public void setPropertyIndex_socket_y_coordinate(SWIGTYPE_p_PropertyIndex value) {
opensimSimulationJNI.MovingPathPoint_PropertyIndex_socket_y_coordinate_set(swigCPtr, this, SWIGTYPE_p_PropertyIndex.getCPtr(value));
}
public SWIGTYPE_p_PropertyIndex getPropertyIndex_socket_y_coordinate() {
return new SWIGTYPE_p_PropertyIndex(opensimSimulationJNI.MovingPathPoint_PropertyIndex_socket_y_coordinate_get(swigCPtr, this), true);
}
public void connectSocket_y_coordinate(OpenSimObject object) {
opensimSimulationJNI.MovingPathPoint_connectSocket_y_coordinate(swigCPtr, this, OpenSimObject.getCPtr(object), object);
}
public void setPropertyIndex_socket_z_coordinate(SWIGTYPE_p_PropertyIndex value) {
opensimSimulationJNI.MovingPathPoint_PropertyIndex_socket_z_coordinate_set(swigCPtr, this, SWIGTYPE_p_PropertyIndex.getCPtr(value));
}
public SWIGTYPE_p_PropertyIndex getPropertyIndex_socket_z_coordinate() {
return new SWIGTYPE_p_PropertyIndex(opensimSimulationJNI.MovingPathPoint_PropertyIndex_socket_z_coordinate_get(swigCPtr, this), true);
}
public void connectSocket_z_coordinate(OpenSimObject object) {
opensimSimulationJNI.MovingPathPoint_connectSocket_z_coordinate(swigCPtr, this, OpenSimObject.getCPtr(object), object);
}
public MovingPathPoint() {
this(opensimSimulationJNI.new_MovingPathPoint(), true);
}
public void updateFromXMLNode(SWIGTYPE_p_SimTK__Xml__Element aNode, int versionNumber) {
opensimSimulationJNI.MovingPathPoint_updateFromXMLNode(swigCPtr, this, SWIGTYPE_p_SimTK__Xml__Element.getCPtr(aNode), versionNumber);
}
public boolean hasXCoordinate() {
return opensimSimulationJNI.MovingPathPoint_hasXCoordinate(swigCPtr, this);
}
public boolean hasYCoordinate() {
return opensimSimulationJNI.MovingPathPoint_hasYCoordinate(swigCPtr, this);
}
public boolean hasZCoordinate() {
return opensimSimulationJNI.MovingPathPoint_hasZCoordinate(swigCPtr, this);
}
public Coordinate getXCoordinate() {
return new Coordinate(opensimSimulationJNI.MovingPathPoint_getXCoordinate(swigCPtr, this), false);
}
public Coordinate getYCoordinate() {
return new Coordinate(opensimSimulationJNI.MovingPathPoint_getYCoordinate(swigCPtr, this), false);
}
public Coordinate getZCoordinate() {
return new Coordinate(opensimSimulationJNI.MovingPathPoint_getZCoordinate(swigCPtr, this), false);
}
public void setXCoordinate(Coordinate coordinate) {
opensimSimulationJNI.MovingPathPoint_setXCoordinate(swigCPtr, this, Coordinate.getCPtr(coordinate), coordinate);
}
public void setYCoordinate(Coordinate coordinate) {
opensimSimulationJNI.MovingPathPoint_setYCoordinate(swigCPtr, this, Coordinate.getCPtr(coordinate), coordinate);
}
public void setZCoordinate(Coordinate coordinate) {
opensimSimulationJNI.MovingPathPoint_setZCoordinate(swigCPtr, this, Coordinate.getCPtr(coordinate), coordinate);
}
public boolean isActive(State s) {
return opensimSimulationJNI.MovingPathPoint_isActive(swigCPtr, this, State.getCPtr(s), s);
}
/**
* Get the local location of the MovingPathPoint in its Frame
*/
public Vec3 getLocation(State s) {
return new Vec3(opensimSimulationJNI.MovingPathPoint_getLocation(swigCPtr, this, State.getCPtr(s), s), true);
}
/**
* Get the local velocity of the MovingPathPoint w.r.t to and <br>
* expressed in its Frame. To get the velocity of the point w.r.t.<br>
* and expressed in Ground, call getVelocityInGround().
*/
public Vec3 getVelocity(State s) {
return new Vec3(opensimSimulationJNI.MovingPathPoint_getVelocity(swigCPtr, this, State.getCPtr(s), s), true);
}
public Vec3 getdPointdQ(State s) {
return new Vec3(opensimSimulationJNI.MovingPathPoint_getdPointdQ(swigCPtr, this, State.getCPtr(s), s), true);
}
/**
* Scale the underlying MultiplierFunctions associated with the<br>
* MovingPathPoint.
*/
public void extendScale(State s, ScaleSet scaleSet) {
opensimSimulationJNI.MovingPathPoint_extendScale(swigCPtr, this, State.getCPtr(s), s, ScaleSet.getCPtr(scaleSet), scaleSet);
}
}
|
Ron423c/chromium
|
chrome/browser/chromeos/full_restore/full_restore_arc_task_handler.cc
|
<filename>chrome/browser/chromeos/full_restore/full_restore_arc_task_handler.cc<gh_stars>0
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/full_restore/full_restore_arc_task_handler.h"
#include "chrome/browser/chromeos/full_restore/full_restore_arc_task_handler_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "components/full_restore/full_restore_save_handler.h"
namespace chromeos {
namespace full_restore {
// static
FullRestoreArcTaskHandler* FullRestoreArcTaskHandler::GetForProfile(
Profile* profile) {
return FullRestoreArcTaskHandlerFactory::GetForProfile(profile);
}
FullRestoreArcTaskHandler::FullRestoreArcTaskHandler(Profile* profile) {
ArcAppListPrefs* prefs = ArcAppListPrefs::Get(profile);
if (!prefs)
return;
arc_prefs_observer_.Observe(prefs);
}
FullRestoreArcTaskHandler::~FullRestoreArcTaskHandler() = default;
void FullRestoreArcTaskHandler::OnTaskCreated(int task_id,
const std::string& package_name,
const std::string& activity,
const std::string& intent) {
const std::string app_id = ArcAppListPrefs::GetAppId(package_name, activity);
::full_restore::FullRestoreSaveHandler::GetInstance()->OnTaskCreated(app_id,
task_id);
}
void FullRestoreArcTaskHandler::OnTaskDestroyed(int task_id) {
::full_restore::FullRestoreSaveHandler::GetInstance()->OnTaskDestroyed(
task_id);
}
} // namespace full_restore
} // namespace chromeos
|
GeorgeK95/JavaOOPBasics
|
Defining Classes - Exercises/src/J_FamilyTree/Person.java
|
<reponame>GeorgeK95/JavaOOPBasics
package J_FamilyTree;
import java.util.ArrayList;
import java.util.List;
/**
* Created by George-Lenovo on 6/29/2017.
*/
class Person {
private String name;
private String date;
private List<Person> parents;
private List<Person> children;
public Person(String name, String date) {
this.setName(name);
this.setDate(date);
this.parents = new ArrayList<>();
this.children = new ArrayList<>();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(this.getName() + " " + this.getDate());
sb.append(System.getProperty("line.separator"));
sb.append("Parents:");
sb.append(System.getProperty("line.separator"));
for (Person parent : this.parents) {
sb.append(parent.getName() + " " + parent.getDate());
sb.append(System.getProperty("line.separator"));
}
sb.append("Children:");
sb.append(System.getProperty("line.separator"));
for (Person child : this.children) {
sb.append(child.getName() + " " + child.getDate());
sb.append(System.getProperty("line.separator"));
}
return sb.toString();
}
void addChild(Person child) {
this.children.add(child);
}
void addParent(Person parent) {
this.parents.add(parent);
}
String getName() {
return name;
}
private void setName(String name) {
this.name = name;
}
String getDate() {
return date;
}
private void setDate(String date) {
this.date = date;
}
List<Person> getParents() {
return parents;
}
private void setParents(List<Person> parents) {
this.parents = parents;
}
List<Person> getChildren() {
return children;
}
private void setChildren(List<Person> children) {
this.children = children;
}
}
|
smokhov/comp477-samples
|
src/Demos/Mocap/wxmv-src/bvh.cpp
|
///////////////////////////////////////////////////////////////////////////////
//
// bvh.cpp
//
// Purpose: Implementation of classes for Biovision Hierarchy (BVH) file
// format loading.
// Classes: BvhFile.
//
// Created: <NAME>, 13/10/2003
//
///////////////////////////////////////////////////////////////////////////////
// #define DEBUG
#include <string>
#include <sstream>
#include <iostream>
#include <fstream>
#include <stdexcept>
#include <list>
#include <vector>
using namespace std;
#include "base.h"
#include "algebra.h"
#include "motion.h"
#include "skeleton.h"
#include "bvh.h"
///////////////////////////////////////////////////////////////////////////////
//
// Used BVH file format gramatics:
//
// bvh -> hierarchy motion end-of-file
// hierarchy -> "HIERARCHY" root
// root -> "ROOT" name { joint-data }
// joint -> "JOINT" name { joint-data }
// joint-data -> offset [channels] (joint/end-site)+
// end-site -> "End" "Site" { offset }
// offset -> "OFFSET" double double double
// channels -> "CHANNELS" int (Xposition/Yposition/Zposition/Xrotation/Yrotation/Zrotation)*
// motion -> "MOTION" "Frames:" int "Frame" "Time:" double motion-data
// motion-data -> (double)+
//
// where
//
// [item] means an optional item
// "token" means token (without quotes) in the input file
// foo/bar means either foo or bar
// (item)* means any number (including zero) of items
// (item)+ means at least one item
// { } means { or } in the input file
//
///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
//
// class BvhFile - public methods
//
///////////////////////////////////////////////////////////////////////////////
// Loads an animation from the file. Creates a skeleton hierarchy of
// joints and a recorded motion consisting of all degrees-of-freedom.
// Pointers to the newly created skeleton and the motion are stored to
// parameters' destination. Reports a message if an error occured and returns
// a bool if the file has been loaded successfully.
bool BvhFile::Load(Joint** skeleton, RecordedMotion** motion) throw()
{
bool ok = true;
#ifdef DEBUG
cerr << "Loading BVH file " << filename << "..." << endl;
#endif
// open file
file.clear();
file.open(filename.c_str());
if (!file)
{
stringstream error;
error << "Unable to open file " << filename;
ErrorMessage(error.str());
*skeleton = NULL;
*motion = NULL;
return false;
}
m_skeleton = NULL;
m_motion = NULL;
tridof_names.clear();
// parse
line = 1;
try {
parse_bvh();
}
// delete created structures and show message if a parse error occured
catch (parse_error &pe)
{
delete m_skeleton;
delete m_motion;
m_skeleton = NULL;
m_motion = NULL;
stringstream error;
error << "Parse error at " << filename << ':' << line << ": "
<< pe.what();
ErrorMessage(error.str());
ok = false;
}
// store pointers to created structures
*skeleton = m_skeleton;
*motion = m_motion;
#ifdef DEBUG
if (ok) cerr << "...OK (BVH file loaded)" << endl;
#endif
// close file
file.close();
return ok;
}
///////////////////////////////////////////////////////////////////////////////
//
// class BvhFile - private methods
//
///////////////////////////////////////////////////////////////////////////////
// Read one token from file, skiping all whitespace (' ','\t','\n'),
// "end-of-file" token is returned at the end of file.
void BvhFile::read_next_token() throw(parse_error)
{
char c;
// skip leading whitespace while counting lines
while (file.get(c) && strchr(" \t\n", c))
if (c == '\n') line++;
// test end of file
if (!file)
{
token = "end-<PASSWORD>";
return;
}
file.unget();
file >> token;
}
// Test if current token is a specified token.
bool BvhFile::token_is(const string& tok) throw()
{
return (token == tok);
}
// If current token is the specified one, read next token else throw
// parse error exception about misplaced token.
void BvhFile::take_token(const string& tok) throw(parse_error)
{
if (token_is(tok)) read_next_token();
else
{
stringstream error;
if (tok != "end-of-file") error << "Token \"" << tok << '"';
else error << "end of file";
error << " expected, but \"" << token << "\" found instead";
throw parse_error(error.str());
}
}
// Store current token to a string variable and reads next token.
void BvhFile::token_to_string(string& str) throw(parse_error)
{
str = token;
read_next_token();
}
// Convert current token to double and read next token. If conversion failed,
// throw parse error exception about it.
void BvhFile::token_to_double(double& num) throw(parse_error)
{
char *c;
errno = 0;
num = strtod(token.c_str(), &c);
if (errno || c != token.c_str() + token.length())
{
stringstream error;
error << "\"" << token << "\" cannot be converted to double";
throw parse_error(error.str());
}
read_next_token();
}
// Convert current token to int and read next token. If conversion failed,
// throw parse error exception about it.
void BvhFile::token_to_int(int& num) throw(parse_error)
{
stringstream s;
s << token;
s >> num;
if (!s)
{
stringstream error;
error << "\"" << token << "\" cannot be converted to int";
throw parse_error(error.str());
}
read_next_token();
}
// bvh -> hierarchy motion end-of-file
void BvhFile::parse_bvh() throw(parse_error)
{
// read first token
read_next_token();
parse_hierarchy();
parse_motion();
take_token("end-of-file");
}
// hierarchy -> "HIERARCHY" root
void BvhFile::parse_hierarchy() throw(parse_error)
{
take_token("HIERARCHY");
parse_root();
}
// root -> "ROOT" name { joint-data }
void BvhFile::parse_root() throw(parse_error)
{
take_token("ROOT");
token_to_string(joint_name);
take_token("{");
n_tridofs = 0;
parse_joint_data(NULL);
take_token("}");
}
// joint -> "JOINT" name { joint-data }
void BvhFile::parse_joint(Joint *parent) throw(parse_error)
{
take_token("JOINT");
token_to_string(joint_name);
take_token("{");
parse_joint_data(parent);
take_token("}");
}
// joint-data -> offset [channels] (joint/end-site)+
void BvhFile::parse_joint_data(Joint *parent) throw(parse_error)
{
parse_offset();
joint_position_tridof = joint_rotation_tridof = -1;
if (token_is("CHANNELS")) parse_channels();
// create the joint
Joint *joint = new Joint(joint_name, joint_offset, identity_quaternion,
joint_position_tridof, joint_rotation_tridof);
// link joint to the hierarchy or set it as a root
if (parent)
parent->AddChild(joint);
else
m_skeleton = joint;
do {
if (token_is("JOINT")) parse_joint(joint);
else if (token_is("End")) parse_end_site(joint);
else
{
stringstream error;
error << "\"JOINT\" or \"End Site\" expected, but \"" << token
<< "\" found instead";
throw parse_error(error.str());
}
}
while (token_is("JOINT") || token_is("End"));
}
// end-site -> "End" "Site" { offset }
void BvhFile::parse_end_site(Joint *parent) throw(parse_error)
{
take_token("End");
take_token("Site");
// end-site is without name, create name as parent joint end-effector
joint_name = parent->name + "-EF";
take_token("{");
parse_offset();
// create the joint (end-effector)
Joint *joint = new Joint(joint_name, joint_offset, identity_quaternion,
-1, -1);
// link joint to the hierarchy or set it as a root
if (parent)
parent->AddChild(joint);
else
m_skeleton = joint; // Note: this should never occur
take_token("}");
}
// offset -> "OFFSET" double double double
void BvhFile::parse_offset() throw(parse_error)
{
take_token("OFFSET");
token_to_double(joint_offset.x);
token_to_double(joint_offset.y);
token_to_double(joint_offset.z);
}
// channels -> "CHANNELS" int (Xposition/Yposition/Zposition/Xrotation/Yrotation/Zrotation)*
void BvhFile::parse_channels() throw(parse_error)
{
int declared_channels;
take_token("CHANNELS");
token_to_int(declared_channels);
if (declared_channels != 3 && declared_channels != 6)
throw parse_error("Only 3 or 6 channels per joint is supported");
if (declared_channels == 6)
{
// position channels are expected first and in XYZ order
take_token("Xposition");
take_token("Yposition");
take_token("Zposition");
// add the position triDOF name to the list
tridof_names.push_back(joint_name + ".position");
// set the index of joint's triDOF
joint_position_tridof = n_tridofs;
n_tridofs++;
}
// rotation channels are expected then in ZXY order
take_token("Zrotation");
take_token("Xrotation");
take_token("Yrotation");
// add the rotation triDOF name to the list
tridof_names.push_back(joint_name + ".rotation");
// set the index of joint's triDOF
joint_rotation_tridof = n_tridofs;
n_tridofs++;
}
// motion -> "MOTION" "Frames:" int "Frame" "Time:" double motion-data
void BvhFile::parse_motion() throw(parse_error)
{
take_token("MOTION");
take_token("Frames:");
token_to_int(n_frames);
take_token("Frame");
take_token("Time:");
token_to_double(frame_time);
parse_motion_data();
}
// motion-data -> (double)*
void BvhFile::parse_motion_data() throw(parse_error)
{
if (n_frames <= 0 || n_tridofs <= 0)
{
stringstream error;
error << "Number of frames and number of degrees-of-freedom must be > 0";
throw parse_error(error.str());
}
#ifdef DEBUG
cerr << "Reading motion data...";
#endif
// read sampled motion data, create one array for each DOF
int n_dofs = 3 * n_tridofs;
double* sampled_motion[n_dofs];
for (int i = 0; i < n_dofs; i++)
sampled_motion[i] = new double[n_frames];
for (int i = 0; i < n_frames; i++)
for (int j = 0; j < n_dofs; j++)
token_to_double(sampled_motion[j][i]);
#ifdef DEBUG
cerr << "OK" << endl;
#endif
// allocate array of pointers to triDOFs
TriDOF **tridof = new (TriDOF*)[n_tridofs];
double duration = (n_frames - 1) * frame_time;
list<string>::iterator l = tridof_names.begin();
// create individual triDOFs
for (int i = 0; i < n_tridofs; i++, l++)
{
// if tridof name constais ".rotation" create a rotational triDOF
// note that dofs in sampled motion are in ZXY order
if (l->find(".rotation", 0) != string::npos)
tridof[i] = new RotationalTriDOF(*l, n_frames,
sampled_motion[3*i+1], sampled_motion[3*i+2],
sampled_motion[3*i]);
// else create a translational triDOF in XYZ order
else
tridof[i] = new TranslationalTriDOF(*l, n_frames,
sampled_motion[3*i], sampled_motion[3*i+1],
sampled_motion[3*i+2]);
}
// create motion name without path and extension
string motion_name = filename;
StripExtension(motion_name);
string::size_type i = motion_name.find_last_of("/\\");
if (i != string::npos)
motion_name = motion_name.substr(i + 1, motion_name.length());
// create a motion
m_motion = new RecordedMotion(motion_name, duration, n_tridofs, tridof);
}
// create a simple one bone skeleton with one triDOF for testing purpose
// may be called from OnOpen instead of reading a chosen BVH file
bool create_test(Joint** skeleton, RecordedMotion** motion)
{
// create skeleton
Joint *root = new Joint("root", zero_vector, identity_quaternion, -1, 0);
Joint *son = new Joint("son", Vector(0.0, 50.0, 0.0), identity_quaternion, -1, -1);
root->AddChild(son);
*skeleton = root;
// create motion
int n_frames = 10;
double duration = 15.0;
double sampled_motion[3][n_frames];
for (int i = 0; i < n_frames; i++)
{
sampled_motion[0][i] = i * 40.0;
sampled_motion[1][i] = 0;
sampled_motion[2][i] = 0;
}
TriDOF **tridof = new (TriDOF*)[1];
/* tridof[0] = new TranslationalTriDOF("test_3dof", n_frames,
sampled_motion[0], sampled_motion[1],
sampled_motion[2]);
*/
tridof[0] = new RotationalTriDOF("test_3dof", n_frames,
sampled_motion[0], sampled_motion[1],
sampled_motion[2]);
*motion = new RecordedMotion("test_motion", duration, n_frames, tridof);
return true;
}
|
AYCHB/BudgetMaster
|
src/main/java/de/deadlocker8/budgetmaster/authentication/UserService.java
|
<gh_stars>0
package de.deadlocker8.budgetmaster.authentication;
import de.deadlocker8.budgetmaster.ProgramArgs;
import de.deadlocker8.budgetmaster.accounts.AccountService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Service;
@Service
public class UserService
{
private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
public static final String DEFAULT_PASSWORD = "<PASSWORD>";
@Autowired
public UserService(UserRepository userRepository, AccountService accountService)
{
if(ProgramArgs.getArgs().contains("--resetPassword"))
{
LOGGER.info("Password reset");
userRepository.deleteAll();
}
if(userRepository.findAll().isEmpty())
{
BCryptPasswordEncoder bCryptPasswordEncoder = new BCryptPasswordEncoder();
String encryptedPassword = bCryptPasswordEncoder.encode(DEFAULT_PASSWORD);
User user = new User("Default", encryptedPassword);
userRepository.save(user);
LOGGER.info("Created default user");
accountService.selectAccount(accountService.getRepository().findByIsSelected(true).getID());
}
}
}
|
victorlmneves/minipress
|
_packages/app/src/create-app.js
|
<gh_stars>10-100
import Vue from 'vue'
import createRouter from './router'
import Layouts from '#minipress/layouts'
import AppEnhancers from '#minipress/app-enhancers'
import Components from '#minipress/components'
import rootOptions from './root-options.vue'
import Router from 'vue-router'
// eslint-disable-next-line no-undef
const MINIPRESS_TEMP_DIR = /** @type {string} */(__MINIPRESS_TEMP_DIR__)
Vue.config.devtools = true
// Expose $minipress everywhere
Vue.mixin({
beforeCreate() {
this.$minipress = this.$root
}
})
export function createApp(context) {
const scollHub = new Vue()
Vue.use(Router)
Vue.use(Components)
Vue.use(Layouts)
const router = createRouter(context, scollHub)
AppEnhancers({ Vue })
const app = new Vue({ ...rootOptions, router })
return { app, router }
}
if (module.hot) {
const createTempPath = name => `./${MINIPRESS_TEMP_DIR}/${name}/index.js`
// MINIPRESS_TEMP_DIR is something like this: '.minipress/.temp'
const paths = [
createTempPath('site-data'),
createTempPath('routes')
]
module.hot.accept(paths, () => { })
}
|
megahertz0/android_thunder
|
dex_src/com/xunlei/thundersniffer/sniff/sniffer/ai.java
|
package com.xunlei.thundersniffer.sniff.sniffer;
import android.text.TextUtils;
import com.xunlei.xiazaibao.BuildConfig;
import java.util.ArrayList;
final class ai {
private String a;
private String[] b;
private String[] c;
public ai(String str) throws IllegalArgumentException {
this.a = str;
if (TextUtils.isEmpty(str)) {
throw new IllegalArgumentException("Word must not be empty");
}
this.b = b(this.a);
this.c = c(this.a);
}
private static String[] b(String str) {
String[] strArr;
int i = 0;
if (TextUtils.isEmpty(str)) {
strArr = null;
} else if (TextUtils.isEmpty(str)) {
strArr = null;
} else {
ArrayList arrayList = new ArrayList();
String[] split = str.split("[\\s\\+]");
int length = split.length;
for (int i2 = 0; i2 < length; i2++) {
String str2 = split[i2];
int i3 = -1;
int i4 = -1;
for (int i5 = 0; i5 < str2.length(); i5++) {
if (str2.charAt(i5) <= '\u00ff') {
if (i4 < 0) {
i4 = i5;
}
i3 = i5;
} else {
if (i3 >= i4 && i3 >= 0) {
arrayList.add(str2.substring(i4, i3));
i3 = -1;
i4 = -1;
}
arrayList.add(new String(new char[]{r11}));
}
}
if (i3 >= i4 && i3 >= 0) {
arrayList.add(str2.substring(i4, i3));
}
}
strArr = arrayList.isEmpty() ? null : (String[]) arrayList.toArray(new String[arrayList.size()]);
}
if (strArr != null && strArr.length > 0) {
while (i < strArr.length) {
strArr[i] = strArr[i].toLowerCase();
i++;
}
}
return strArr;
}
private static String[] c(String str) {
String[] b = b(str);
if (b == null || b.length <= 0) {
return null;
}
ArrayList arrayList = new ArrayList();
if (b != null && b.length > 0) {
String str2 = BuildConfig.VERSION_NAME;
int length = b.length;
String str3 = str2;
for (int i = 0; i < b.length; i++) {
String toLowerCase = b[i].toLowerCase();
String c = ar.c(toLowerCase);
if (toLowerCase.equals(c)) {
str3 = str3 + toLowerCase;
if (i == length - 1) {
arrayList.add(str3);
break;
}
} else {
if (!str3.isEmpty()) {
arrayList.add(str3);
str3 = BuildConfig.VERSION_NAME;
}
arrayList.add(c);
}
}
}
return (String[]) arrayList.toArray(new String[arrayList.size()]);
}
public final boolean a(String str) {
if (this.b == null || this.b.length == 0 || TextUtils.isEmpty(str)) {
return false;
}
int i;
boolean z;
Object toLowerCase = str.toLowerCase();
String[] strArr = this.b;
int length = strArr.length;
int i2 = 0;
for (i = 0; i < length; i++) {
if (toLowerCase.contains(strArr[i])) {
i2++;
}
}
if (i2 <= 0 || (((double) i2) * 1.0d) / ((double) this.b.length) <= 0.2d || (((double) i2) * 1.0d) / ((double) d(toLowerCase)) <= 0.1d) {
z = false;
} else {
z = true;
}
if (z) {
return z;
}
if (this.c == null || this.c.length == 0 || TextUtils.isEmpty(toLowerCase)) {
return false;
}
String toLowerCase2 = toLowerCase.toLowerCase();
strArr = this.c;
length = strArr.length;
i2 = 0;
for (i = 0; i < length; i++) {
if (toLowerCase2.contains(strArr[i])) {
i2++;
}
}
return i2 > 0 && (((double) i2) * 1.0d) / ((double) this.c.length) > 0.2d && (((double) i2) * 1.0d) / ((double) d(toLowerCase2)) > 0.1d;
}
private static int d(String str) {
if (TextUtils.isEmpty(str)) {
return 0;
}
String[] split = str.split("\\s\\+");
int length = split.length;
int i = 0;
int i2 = 0;
while (i < length) {
String str2 = split[i];
int i3 = -1;
int i4 = -1;
int i5 = i2;
for (i2 = 0; i2 < str2.length(); i2++) {
if (str2.charAt(i2) <= '\u00ff') {
if (i4 < 0) {
i4 = i2;
}
i3 = i2;
} else {
if (i3 >= i4 && i3 >= 0) {
i5++;
i3 = -1;
i4 = -1;
}
i5++;
}
}
if (i3 >= i4 && i3 >= 0) {
i5++;
}
i++;
i2 = i5;
}
return i2;
}
}
|
ry99/jfxhacc
|
engine/src/main/java/com/ostrichemulators/jfxhacc/model/impl/PayeeImpl.java
|
<reponame>ry99/jfxhacc
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.ostrichemulators.jfxhacc.model.impl;
import com.ostrichemulators.jfxhacc.model.Payee;
import com.ostrichemulators.jfxhacc.model.vocabulary.Payees;
import org.openrdf.model.URI;
/**
*
* @author ryan
*/
public class PayeeImpl extends NamedIDableImpl implements Payee {
public PayeeImpl( URI id, String name ) {
super( Payees.TYPE, id, name );
}
}
|
MagicPrince666/rt-boot
|
rt-boot/soc/mtk.mips/mt7628/clock/clock.c
|
/*
* The early init code for MIPS
* Copyright ZhaoXiaowei 2018
* Github:github.com/zhaohengbo
*/
#include <kernel/rtthread.h>
#include <global/global.h>
#include <soc/mt7628/mt7628_mmap.h>
#include <soc/mt7628/mt7628_clock.h>
void mt7628_clock_init(void)
{
rt_uint32_t reg;
rt_uint32_t mips_cpu_feq,mips_bus_feq;
reg = __REG(RALINK_CLKCFG0_REG);
if (reg & (0x1<<1)) {
mips_cpu_feq = (480*1000*1000)/CPU_FRAC_DIV;
}else if (reg & 0x1) {
mips_cpu_feq = ((__REG(RALINK_SYS_CGF0_REG)>>6)&0x1) ? (40*1000*1000)/CPU_FRAC_DIV \
: (25*1000*1000)/CPU_FRAC_DIV;
}else {
if ((__REG(RALINK_SYS_CGF0_REG)>>6)&0x1)
mips_cpu_feq = (580*1000*1000)/CPU_FRAC_DIV;
else
mips_cpu_feq = (575*1000*1000)/CPU_FRAC_DIV;
}
mips_bus_feq = mips_cpu_feq/3;
rtboot_data.cpu_clk = mips_cpu_feq;
rtboot_data.bus_clk = mips_bus_feq;
rtboot_data.system_frequency = rtboot_data.cpu_clk;
}
|
IMsistemas/aqua-riego
|
public/app/controllers/InvetarioItemKardex.js
|
app.controller('Kardex', function($scope, $http, API_URL) {
$scope.FechaK=now(); // Cargar por default el dia actual
$scope.FechaF=now();
$scope.FechaI=first();
$scope.Bodegas=[]; //listas Bodegas
$scope.Categoria1=[]; //lista categoria 1
$scope.Categoria2=[]; //lista categoria 2 (subcategoria)
$scope.CategoriaItem="";
$scope.SubCategoriaItem="";
$scope.BodegaItem="";
$scope.Inventario=[];
$scope.Kardex=[];
///---
$scope.pageChanged = function(newPage) {
$scope.initLoad(newPage);
};
$scope.initLoad = function(pageNumber){
/*if ($scope.busqueda == undefined) {
var search = null;
} else var search = $scope.busqueda;
var filtros = {
search: search
};*/
/*$http.get(API_URL + 'DocumentoVenta/getAllFitros?page=' + pageNumber + '&filter=' + JSON.stringify(filtros))
.success(function(response){
/*$scope.Allventas=response;
console.log(response);*/
/*$scope.Allventas = response.data;
$scope.totalItems = response.total;
});*/
var filtro={
Fecha: convertDatetoDB($("#FechaK").val()),
Categoria: $scope.CategoriaItem,
SubCategria: $scope.SubCategoriaItem,
Bodega : $scope.BodegaItem,
Search: $scope.search,
Tipo: 'B'
};
if($("#FechaK").val()!=""){
if($scope.BodegaItem!=""){
console.log(API_URL);
$http.get(API_URL + 'procesoskardex/loadinventario?page=' + pageNumber + '&filter='+JSON.stringify(filtro))
.success(function(response){
$scope.Inventario=response.data;
$scope.totalItems = response.total;
});
}else{
QuitarClasesMensaje();
$("#titulomsm").addClass("btn-warning");
$scope.Mensaje="Seleccione Una Bodega";
$("#msm").modal("show");
}
}else{
QuitarClasesMensaje();
$("#titulomsm").addClass("btn-warning");
$scope.Mensaje="Seleccione Un Fecha";
$("#msm").modal("show");
}
};
///---
$scope.CargarBodegas=function(){
$http.get(API_URL + 'procesoskardex/loadbodegas')
.success(function(response){
$scope.Bodegas=response;
});
};
///---
$scope.CargarCategoriaNivel1=function(){
$http.get(API_URL + 'procesoskardex/loadcategoria')
.success(function(response){
$scope.Categoria1=response;
});
};
///---
$scope.CargarCategoriaNivel2=function(){
if($scope.CategoriaItem!=""){
$http.get(API_URL + 'procesoskardex/loadsubcategoria/'+$scope.CategoriaItem)
.success(function(response){
$scope.Categoria2=response;
});
}
};
///---
$scope.search="";
$scope.CargarInventario=function () {
var filtro={
Fecha: convertDatetoDB($("#FechaK").val()),
Categoria: $scope.CategoriaItem,
SubCategria: $scope.SubCategoriaItem,
Bodega : $scope.BodegaItem,
Search: $scope.search,
Tipo: 'B'
};
if($("#FechaK").val()!=""){
if($scope.BodegaItem!=""){
$scope.EnviarFiltroInventario(filtro);
}else{
QuitarClasesMensaje();
$("#titulomsm").addClass("btn-warning");
$scope.Mensaje="Seleccione Una Bodega";
$("#msm").modal("show");
}
}else{
QuitarClasesMensaje();
$("#titulomsm").addClass("btn-warning");
$scope.Mensaje="Seleccione Un Fecha";
$("#msm").modal("show");
}
};
$scope.EnviarFiltroInventario=function(data){
$http.get(API_URL + 'procesoskardex/loadinventario/'+JSON.stringify(data))
.success(function(response){
$scope.Inventario=response;
});
};
///---
$scope.ActivasInactivas="A";
$scope.itemproductobodega={};
$scope.RegistroKardexPP=function (item) {
$("#RegistroKardePromedioPonderado").modal("show");
var filtro={
FechaI: convertDatetoDB($("#FechaI").val()),
FechaF: convertDatetoDB($("#FechaF").val()),
idproducto_bodega: item.idproducto_bodega,
Estado: $scope.ActivasInactivas,
Tipo: 'B'
};
$scope.itemproductobodega=item;
$http.get(API_URL + 'procesoskardex/loadkardex/'+JSON.stringify(filtro))
.success(function(response){
$scope.Kardex=response;
});
};
$scope.RegistroKardexPPActualizar=function () {
$("#RegistroKardePromedioPonderado").modal("show");
var filtro={
FechaI: convertDatetoDB($("#FechaI").val()),
FechaF: convertDatetoDB($("#FechaF").val()),
idproducto_bodega: $scope.itemproductobodega.idproducto_bodega,
Estado: $scope.ActivasInactivas,
Tipo: 'B'
};
$http.get(API_URL + 'procesoskardex/loadkardex/'+JSON.stringify(filtro))
.success(function(response){
$scope.Kardex=response;
});
};
});
function convertDatetoDB(now, revert){
if (revert == undefined){
var t = now.split('/');
return t[2] + '-' + t[1] + '-' + t[0];
} else {
var t = now.split('-');
return t[2] + '/' + t[1] + '/' + t[0];
}
}
function now(){
var now = new Date();
var dd = now.getDate();
if (dd < 10) dd = '0' + dd;
var mm = now.getMonth() + 1;
if (mm < 10) mm = '0' + mm;
var yyyy = now.getFullYear();
return dd + "\/" + mm + "\/" + yyyy;
}
function first(){
var now = new Date();
var yyyy = now.getFullYear();
return "01/01/"+ yyyy;
}
function completarNumer(valor){
if(valor.toString().length>0){
var i=5;
var completa="0";
var aux_com=i-valor.toString().length;
for(x=0;x<aux_com;x++){
completa+="0";
}
}
return completa+valor.toString();
}
function QuitarClasesMensaje() {
$("#titulomsm").removeClass("btn-primary");
$("#titulomsm").removeClass("btn-warning");
$("#titulomsm").removeClass("btn-success");
$("#titulomsm").removeClass("btn-info");
$("#titulomsm").removeClass("btn-danger");
}
$(document).ready(function(){
$('.datepicker').datetimepicker({
locale: 'es',
format: 'DD/MM/YYYY',
ignoreReadonly: true
});
});
|
boku-inc/boku-http-auth
|
integration-tests/src/test/java/com/boku/auth/http/it/ClientTest.java
|
package com.boku.auth.http.it;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import com.boku.auth.http.AuthorizationHeader;
import com.boku.auth.http.it.support.Servlets;
import com.boku.auth.http.client.BokuAPIClientResponse;
import com.boku.auth.http.client.exception.BokuAPIClientException;
import com.boku.auth.http.client.exception.InvalidAPIEntityException;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpResponseException;
import org.hamcrest.Matchers;
import org.hamcrest.text.IsEqualIgnoringCase;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.boku.auth.http.it.support.HttpRequestHandler;
/**
* Assuming the server component is working properly, test all client functionality against it.
*/
public class ClientTest extends CWAIntegrationTestBase {
@Rule
public ExpectedException exception = ExpectedException.none();
public ClientTest() {
env.server.addServlet("/echo", Servlets.noAuth(new Servlets.EchoHandler()));
env.server.addServlet("/auth/echo", Servlets.withAuth(env.authContextProvider, new Servlets.EchoHandler()));
env.server.addServlet("/auth/ping", Servlets.withAuth(env.authContextProvider, new Servlets.PingHandler()));
}
@Test
public void testRoundTripPOSTNoAuth() throws IOException {
final String requestText = "five, and unicode文字列もある";
String responseText = env.client
.post(url("/echo?qparam=1"))
.withHeader("X-BOKU-Test", "2")
.withHeader("X-BOKU-Test", "3")
.withHeader("X-BOKU-Something", "4")
.withEntityString(requestText)
.execute();
Assert.assertEquals(
"POST /echo?qparam=1\n" +
"Content-Type: text/plain; charset=UTF-8\n" +
"X-BOKU-Something: 4\n" +
"X-BOKU-Test: 2\n" +
"X-BOKU-Test: 3\n" +
"\n" +
requestText,
responseText
);
}
@Test
public void testRoundTripPOSTWithAuth() throws IOException {
String requestText = "five, and unicode文字列もある";
String responseText = env.client
.post(url("/auth/echo?qparam=1"))
.withAuthorization(authorization("X-BOKU-Test", "X-DoesntExist"))
.withHeader("X-BOKU-Test", "2")
.withHeader("X-boku-test", "3")
.withHeader("X-BOKU-Something", "4")
.withEntityString(requestText)
.execute();
Assert.assertNotNull(responseText);
Assert.assertEquals(
"POST /auth/echo?qparam=1\n" +
"Authorization: auth contents\n" +
"Content-Type: text/plain; charset=UTF-8\n" +
"X-BOKU-Something: 4\n" +
"X-BOKU-Test: 2\n" +
"X-BOKU-Test: 3\n" +
"\n" +
requestText,
responseText.replaceFirst("Authorization: [^\n]+", "Authorization: auth contents")
);
}
@Test
public void testGET() throws IOException {
String responseText = env.client
.get(url("/auth/ping"))
.withAuthorization(authorization())
.execute();
Assert.assertEquals("GET: pong", responseText);
}
@Test
public void testPUT() throws IOException {
String responseText = env.client
.put(url("/auth/ping"))
.withAuthorization(authorization())
.withEntityString("putty")
.execute();
Assert.assertEquals("PUT 'putty': pong", responseText);
}
@Test
public void testDELETE() throws IOException {
String responseText = env.client
.delete(url("/auth/ping"))
.withAuthorization(authorization())
.execute();
Assert.assertEquals("DELETE: pong", responseText);
}
@Test
public void testNoSignatureReturned() throws IOException {
env.server.addServlet("/no-signature", Servlets.noAuth(new Servlets.PingHandler()));
exception.expect(BokuAPIClientException.class);
exception.expectMessage(Matchers.containsString("Got HTTP/1.1 200 OK with 0 X-SignedResponse headers, expected 1!"));
env.client
.get(url("/no-signature"))
.withAuthorization(authorization())
.execute();
}
@Test
public void testNoResponseSignatureRequired() throws IOException {
env.server.addServlet("/no-signature", Servlets.noAuth(new Servlets.PingHandler()));
String responseText = env.client
.get(url("/no-signature"))
.withAuthorization(authorization())
.withOptionRequireSignedResponse(false)
.execute();
Assert.assertEquals("GET: pong", responseText);
}
@Test
public void testInvalidSignatureReturned() throws IOException {
env.server.addServlet("/invalid-signature", Servlets.noAuth(
new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
new Servlets.PingHandler().handle(req, resp, requestEntity);
AuthorizationHeader ah = AuthorizationHeader.parse(req.getHeader(AuthorizationHeader.REQUEST_HEADER));
resp.setHeader(AuthorizationHeader.RESPONSE_HEADER, ah.toString());
}
}
));
exception.expect(BokuAPIClientException.class);
exception.expectMessage(Matchers.containsString("Failed to verify signature of HTTP/1.1 200 OK response"));
env.client
.get(url("/invalid-signature"))
.withAuthorization(authorization())
.execute();
}
@Test
public void testSignatureNotRequiredButInvalidSignatureReturned() throws IOException {
env.server.addServlet("/invalid-signature", Servlets.noAuth(
new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
new Servlets.PingHandler().handle(req, resp, requestEntity);
AuthorizationHeader ah = AuthorizationHeader.parse(req.getHeader(AuthorizationHeader.REQUEST_HEADER));
resp.setHeader(AuthorizationHeader.RESPONSE_HEADER, ah.toString());
}
}
));
exception.expect(BokuAPIClientException.class);
exception.expectMessage(Matchers.containsString("Failed to verify signature of HTTP/1.1 200 OK response"));
env.client
.get(url("/invalid-signature"))
.withAuthorization(authorization())
.withOptionRequireSignedResponse(false)
.execute();
}
@Test
public void testServerReturnsMojibake() throws IOException {
final String text = "僕は文字化けになりたくないんだよぉ〜!";
final byte[] sjis = text.getBytes("shift-jis");
final String garbled = new String(sjis, StandardCharsets.UTF_8); // This is where parts of the SJIS data get lost
// Check that our input is actually lossy
{
String mojibake = new String(garbled.getBytes(StandardCharsets.UTF_8), "shift-jis");
Assert.assertNotEquals(text, mojibake);
}
env.server.addServlet("/auth/mojibake", Servlets.noAuth(
new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setHeader("Content-Type", "text/plain; charset=UTF-8");
resp.getOutputStream().write(sjis);
}
}
));
String result = env.client
.get(url("/auth/mojibake"))
.withAuthorization(authorization())
.execute();
// Garbled, but auth should have no problem
Assert.assertEquals(new String(sjis, StandardCharsets.UTF_8), result);
}
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
private static class X {
String a;
String b;
}
@Test
public void testMarshalling() throws IOException {
X x = new X();
x.a = "foo";
x.b = "bar";
String responseText = env.client
.post(url("/auth/echo"))
.withHeader("X-BOKU-Test", "2")
.withAuthorization(authorization())
.withEntity(x)
.execute();
Assert.assertNotNull(responseText);
Assert.assertEquals(
"POST /auth/echo\n" +
"Authorization: auth contents\n" +
"Content-Type: application/xml; charset=\"UTF-8\"\n" +
"X-BOKU-Test: 2\n" +
"\n" +
"<x><a>foo</a><b>bar</b></x>",
responseText.replaceFirst("Authorization: [^\n]+", "Authorization: auth contents")
);
}
@Test
public void testUnmarshalling() throws IOException {
env.server.addServlet("/auth/json", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setContentType("application/json; charset=UTF-8");
resp.getOutputStream().print("<x><a>foo</a><b>bar</b></x>");
}
}
));
X x = env.client.get(url("/auth/json"))
.withAuthorization(authorization())
.execute(X.class);
Assert.assertNotNull(x);
Assert.assertEquals("foo", x.a);
Assert.assertEquals("bar", x.b);
}
@Test
public void testUnmarshalFails() throws IOException {
env.server.addServlet("/auth/json", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setContentType("application/json; charset=UTF-8");
resp.getOutputStream().print("this is not xml!");
}
}
));
exception.expect(InvalidAPIEntityException.class);
env.client.get(url("/auth/json"))
.withAuthorization(authorization())
.execute(X.class);
}
@Test
public void testRawResponseEntity() throws IOException {
BokuAPIClientResponse response = env.client
.post(url("/auth/ping"))
.withAuthorization(authorization())
.withEntityString("test")
.execute(BokuAPIClientResponse.class);
Assert.assertNotNull(response);
BokuAPIClientResponse.Entity entity = response.getEntity();
Assert.assertNotNull(entity);
Assert.assertEquals("text/plain", entity.getContentType().getMimeType());
Assert.assertNotNull(entity.getCharset());
Assert.assertEquals("UTF-8", entity.getCharset().name());
String expectedString = "POST 'test': pong";
Assert.assertArrayEquals(expectedString.getBytes(entity.getCharset()), entity.getData());
Assert.assertEquals(expectedString, entity.getDataAsText());
}
@Test
public void testNoCharsetAuthSucceeds() throws IOException {
env.server.addServlet("/auth/broken", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.getOutputStream().write(new byte[]{(byte)0xAA, (byte)0xBB, (byte)0xCC, (byte)0xDD});
}
}
));
BokuAPIClientResponse response = env.client
.get(url("/auth/broken"))
.withAuthorization(authorization())
.execute(BokuAPIClientResponse.class);
Assert.assertNotNull(response);
BokuAPIClientResponse.Entity entity = response.getEntity();
Assert.assertNotNull(entity);
Assert.assertEquals("application/octet-stream", entity.getContentType().getMimeType());
Assert.assertArrayEquals(new byte[]{(byte)0xAA, (byte)0xBB, (byte)0xCC, (byte)0xDD}, entity.getData());
}
@Test
public void testNoCharsetDefaultsUTF8() throws IOException {
env.server.addServlet("/auth/broken", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setContentType("text/plain");
resp.getOutputStream().write("This is a test! これはテスト!".getBytes(StandardCharsets.UTF_8));
}
}
));
BokuAPIClientResponse response = env.client
.get(url("/auth/broken"))
.withAuthorization(authorization())
.execute(BokuAPIClientResponse.class);
Assert.assertEquals("text/plain", response.getEntity().getContentType().toString());
Assert.assertEquals("This is a test! これはテスト!", response.getEntity().getDataAsText());
}
@Test
public void testAlternateCharsetWorks() throws IOException {
final String altCharset = "shift-jis";
final String respText = "sjisテスト";
env.server.addServlet("/auth/altcharset", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setContentType("text/plain; charset=" + altCharset);
resp.getOutputStream().write(respText.getBytes(altCharset));
}
}
));
String response = env.client
.get(url("/auth/altcharset"))
.withAuthorization(authorization())
.execute();
Assert.assertEquals(respText, response);
}
@Test
public void testNoResponseEntity() throws IOException {
env.server.addServlet("/auth/no-entity", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) {
resp.setStatus(HttpStatus.SC_NO_CONTENT);
}
}
));
env.server.addServlet("/auth/zero-entity", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) {
}
}
));
// No entity
Assert.assertNull(
env.client
.get(url("/auth/no-entity"))
.withAuthorization(authorization())
.execute()
);
// No entity using BokuAPIClientResponse
BokuAPIClientResponse response = env.client
.get(url("/auth/no-entity"))
.withAuthorization(authorization())
.execute(BokuAPIClientResponse.class);
Assert.assertNotNull(response);
Assert.assertNull(response.getEntity());
// Zero-length entity
response = env.client
.get(url("/auth/zero-entity"))
.withAuthorization(authorization())
.execute(BokuAPIClientResponse.class);
Assert.assertNotNull(response);
Assert.assertNotNull(response.getEntity());
Assert.assertEquals(0, response.getEntity().getData().length);
}
@Test
public void testHTTP500() throws IOException {
env.server.addServlet("/auth/error", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setStatus(HttpStatus.SC_INTERNAL_SERVER_ERROR);
resp.setContentType("text/plain; charset=UTF-8");
resp.getOutputStream().print("Ohnoes");
}
}
));
exception.expect(HttpResponseException.class);
exception.expectMessage(new IsEqualIgnoringCase("HTTP/1.1 500 Server Error: text/plain; charset=UTF-8[Ohnoes]"));
env.client
.get(url("/auth/error"))
.withAuthorization(authorization())
.execute();
}
@Test
public void testHTTP301() throws IOException {
env.server.addServlet("/auth/redirect", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setStatus(HttpStatus.SC_MOVED_PERMANENTLY);
resp.setHeader("Location", "/auth/somewhereelse");
resp.setContentType("text/plain; charset=UTF-8");
resp.getOutputStream().print("Moved!");
}
}
));
exception.expect(HttpResponseException.class);
exception.expectMessage(new IsEqualIgnoringCase("HTTP/1.1 301 Moved Permanently: text/plain; charset=UTF-8[Moved!]"));
env.client
.get(url("/auth/redirect"))
.withAuthorization(authorization())
.execute();
}
@Test
public void testHTTPErrorAPIResponse() throws IOException {
env.server.addServlet("/auth/conflict", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setStatus(HttpStatus.SC_CONFLICT);
resp.setContentType("application/json; charset=UTF-8");
resp.getOutputStream().print("<x><a>foo</a><b>bar</b></x>");
}
}
));
BokuAPIClientResponse response = env.client
.get(url("/auth/conflict"))
.withAuthorization(authorization())
.execute(BokuAPIClientResponse.class);
Assert.assertNotNull(response);
Assert.assertEquals(409, response.getStatusLine().getStatusCode());
BokuAPIClientResponse.Entity entity = response.getEntity();
Assert.assertNotNull(entity);
Assert.assertEquals("<x><a>foo</a><b>bar</b></x>", entity.getDataAsText());
X x = entity.getDataAs(X.class);
Assert.assertNotNull(x);
Assert.assertEquals("foo", x.a);
Assert.assertEquals("bar", x.b);
}
@Test
public void testHeaders() throws IOException {
env.server.addServlet("/auth/headers", Servlets.withAuth(
env.authContextProvider, new HttpRequestHandler() {
@Override
public void handle(HttpServletRequest req, HttpServletResponse resp, byte[] requestEntity) throws IOException {
resp.setStatus(HttpStatus.SC_NO_CONTENT);
resp.addHeader("X-BOKU-Test", "1");
resp.addHeader("X-BOKU-Test", "2");
resp.addHeader("X-BOKU-Something", "3");
}
}
));
BokuAPIClientResponse response = env.client
.get(url("/auth/headers"))
.withAuthorization(authorization())
.execute(BokuAPIClientResponse.class);
Assert.assertNotNull(response);
assertHeader(response, "X-BOKU-Test", 2, "1", "2");
assertHeader(response, "X-BOKU-Something", 1, "3", "3");
Assert.assertFalse(response.containsHeader("X-Nothing"));
Assert.assertEquals(0, response.getHeaders("X-Nothing").length);
Assert.assertNull(response.getFirstHeader("X-Nothing"));
Assert.assertNull(response.getLastHeader("X-Nothing"));
Assert.assertNotNull(response.getAllHeaders());
Assert.assertTrue(response.getAllHeaders().length >= 3);
}
private static void assertHeader(BokuAPIClientResponse response, String header, int count, String first, String last) {
String[] variants = new String[] {
header, header.toUpperCase(), header.toLowerCase()
};
for (String variant : variants) {
Assert.assertTrue(variant, response.containsHeader(variant));
Assert.assertEquals(variant, count, response.getHeaders(variant).length);
Assert.assertEquals(variant, first, response.getFirstHeader(variant).getValue());
Assert.assertEquals(variant, last, response.getLastHeader(variant).getValue());
}
}
}
|
wayshall/onetwo
|
core/modules/boot/src/main/java/org/onetwo/boot/module/timer/BaseRedisLockTask.java
|
package org.onetwo.boot.module.timer;
import org.onetwo.boot.module.redis.RedisLockRunner;
import org.onetwo.common.log.JFishLoggerFactory;
import org.slf4j.Logger;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.integration.redis.util.RedisLockRegistry;
/**
Cron表达式是一个字符串,是由空格隔开的6或7个域组成,每一个域对应一个含义(秒 分 时 每月第几天 月 星期 年)其中年是可选字段。
spring的schedule值支持6个域的表达式,也就是不能设定年,如果超过六个则会报错
* : 表示匹配该域的任意值,比如在秒*, 就表示每秒都会触发事件。;
? : 只能用在每月第几天和星期两个域。表示不指定值,当2个子表达式其中之一被指定了值以后,为了避免冲突,需要将另一个子表达式的值设为“?”;
- : 表示范围,例如在分域使用5-20,表示从5分到20分钟每分钟触发一次
/ : 表示起始时间开始触发,然后每隔固定时间触发一次,例如在分域使用5/20,则意味着5分,25分,45分,分别触发一次.
, : 表示列出枚举值。例如:在分域使用5,20,则意味着在5和20分时触发一次。
L : 表示最后,只能出现在星期和每月第几天域,如果在星期域使用1L,意味着在最后的一个星期日触发。
W : 表示有效工作日(周一到周五),只能出现在每月第几日域,系统将在离指定日期的最近的有效工作日触发事件。注意一点,W的最近寻找不会跨过月份
LW : 这两个字符可以连用,表示在某个月最后一个工作日,即最后一个星期五。
# : 用于确定每个月第几个星期几,只能出现在每月第几天域。例如在1#3,表示某月的第三个星期日。
* @author <NAME>
* <br/>
*/
public class BaseRedisLockTask implements InitializingBean {
private static final String LOCKER_PREFIX = "TimerJobLock:";
protected final Logger logger = JFishLoggerFactory.getLogger(getClass());
@Autowired
private RedisLockRegistry redisLockRegistry;
private String taskLockKey;
private String taskLockTimeout = "3m";
public BaseRedisLockTask(String taskLockKey) {
super();
this.taskLockKey = LOCKER_PREFIX + taskLockKey;
}
@Override
public void afterPropertiesSet() throws Exception {
// Assert.notNull(redisLockRegistry, "redisLockRegistry not found!");
}
protected void executeTaskInLock(String name, Runnable action) {
getRedisLockRunner().tryLock(() -> {
logger.info("\n---------------[{}]开始执行任务……", name);
action.run();
logger.info("\n---------------[{}]结束执行任务……\n", name);
return null;
}, () -> {
logger.info("[{}]有正在执行的任务,忽略本次任务调度……", name);
return null;
});
}
public void doTask() {
this.executeTaskInLock(taskLockKey, ()->{
});
}
protected RedisLockRunner getRedisLockRunner(){
RedisLockRunner redisLockRunner = RedisLockRunner.createLocker(redisLockRegistry, taskLockKey, taskLockTimeout);
return redisLockRunner;
}
public void setTaskLockTimeout(String taskLockTimeout) {
this.taskLockTimeout = taskLockTimeout;
}
public String getTaskLockKey() {
return taskLockKey;
}
}
|
chocholee/arm4j
|
arm4j-weixin/src/main/java/com/arm4j/weixin/request/user/response/UserListResponse.java
|
<filename>arm4j-weixin/src/main/java/com/arm4j/weixin/request/user/response/UserListResponse.java
package com.arm4j.weixin.request.user.response;
import com.alibaba.fastjson.annotation.JSONField;
public class UserListResponse {
@JSONField(name = "total")
private Integer total;
@JSONField(name = "count")
private Integer count;
@JSONField(name = "data")
private UserListDataResponse data;
@JSONField(name = "next_openid")
private String nextOpenId;
public Integer getTotal() {
return total;
}
public Integer getCount() {
return count;
}
public UserListDataResponse getData() {
return data;
}
public String getNextOpenId() {
return nextOpenId;
}
public void setTotal(Integer total) {
this.total = total;
}
public void setCount(Integer count) {
this.count = count;
}
public void setData(UserListDataResponse data) {
this.data = data;
}
public void setNextOpenId(String nextOpenId) {
this.nextOpenId = nextOpenId;
}
}
|
Dalma-Systems/broker
|
broker-api-service/src/main/java/com/dalma/broker/service/exception/warehouse/WarehouseNotFoundException.java
|
package com.dalma.broker.service.exception.warehouse;
import com.dalma.broker.base.error.exception.RestResponseException;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.HttpStatus;
import static com.dalma.broker.service.error.WarehouseExceptionError.NOT_FOUND;
@Slf4j
public class WarehouseNotFoundException extends RestResponseException {
private static final long serialVersionUID = 3544621144649085589L;
public WarehouseNotFoundException() {
super(NOT_FOUND.getMessage());
log.error(NOT_FOUND.getMessage());
}
@Override
public HttpStatus getHttpStatus() {
return HttpStatus.BAD_REQUEST;
}
@Override
public String getErrorCode() {
return NOT_FOUND.code();
}
}
|
magcicada/energy-meters
|
src/main/java/com/vladmarica/energymeters/client/model/TexturedQuadCache.java
|
<reponame>magcicada/energy-meters<gh_stars>1-10
package com.vladmarica.energymeters.client.model;
import com.vladmarica.energymeters.EnergyMetersMod;
import java.util.HashMap;
import java.util.Map;
import net.minecraft.client.renderer.block.model.BakedQuad;
import net.minecraft.client.renderer.block.model.BakedQuadRetextured;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class TexturedQuadCache {
public static final TexturedQuadCache INSTANCE = new TexturedQuadCache();
private Map<ResourceLocation, TextureAtlasSprite> spriteMap;
private Map<EnumFacing, BakedQuad> cubeQuadMap;
private Map<EnumFacing, Map<ResourceLocation, BakedQuadRetextured>> texturedQuadCache = new HashMap<>();
public BakedQuadRetextured getBakedQuad(EnumFacing side, ResourceLocation texture) {
// Check cache first
if (this.texturedQuadCache.get(side).containsKey(texture)) {
// EnergyMetersMod.LOGGER.debug("TexturedQuadCache hit for {}-{}", side.getName(), texture);
return this.texturedQuadCache.get(side).get(texture);
}
// In the cache of cache miss, build and store the requested retextured quad
// EnergyMetersMod.LOGGER.debug("TexturedQuadCache miss for {}-{}", side.getName(), texture);
BakedQuadRetextured retexturedQuad = buildRetexturedQuad(side, texture);
this.texturedQuadCache.get(side).put(texture, retexturedQuad);
return retexturedQuad;
}
private BakedQuadRetextured buildRetexturedQuad(EnumFacing side, ResourceLocation texture) {
BakedQuad originalQuad = cubeQuadMap.get(side);
TextureAtlasSprite sprite = spriteMap.get(texture);
return new BakedQuadRetextured(originalQuad, sprite);
}
public void setTextureMap(Map<ResourceLocation, TextureAtlasSprite> spriteMap) {
this.spriteMap = spriteMap;
}
public void setCubeQuadMap(Map<EnumFacing, BakedQuad> cubeQuadMap) {
if (cubeQuadMap.size() != 6) {
throw new RuntimeException(
String.format("Cube quad map has %d quads, expected 6", cubeQuadMap.size()));
}
this.cubeQuadMap = cubeQuadMap;
for (EnumFacing side : this.cubeQuadMap.keySet()) {
this.texturedQuadCache.put(side, new HashMap<>());
}
}
private TexturedQuadCache() {}
}
|
suansuancu/avalonWxj
|
src/vtree/voidTag.js
|
<reponame>suansuancu/avalonWxj<filename>src/vtree/voidTag.js
export var voidTag = {
area: 1,
base: 1,
basefont: 1,
bgsound: 1,
br: 1,
col: 1,
command: 1,
embed: 1,
frame: 1,
hr: 1,
img: 1,
input: 1,
keygen: 1,
link: 1,
meta: 1,
param: 1,
source: 1,
track: 1,
wbr: 1
}
|
CS-SI/Rugged
|
src/main/java/org/orekit/rugged/refraction/AtmosphericComputationParameters.java
|
/* Copyright 2013-2020 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.rugged.refraction;
import org.orekit.rugged.errors.RuggedException;
import org.orekit.rugged.errors.RuggedMessages;
import org.orekit.rugged.linesensor.LineSensor;
import org.orekit.rugged.utils.GridCreation;
/**
* Atmospheric refraction computation parameters.
* Defines for inverse location a set of parameters in order to be able to perform the computation.
* @author <NAME>
* @since 2.1
*/
public class AtmosphericComputationParameters {
/** Margin for definition of the interpolation grid.
* To be inside the min line and max line range to avoid problem with inverse location grid computation. */
private static final int MARGIN_LINE = 10;
/** Default value for pixel step. */
private static final int DEFAULT_STEP_PIXEL = 100;
/** Default value for line step. */
private static final int DEFAULT_STEP_LINE = 100;
/** Actual values for pixel step in case default are overwritten. */
private int pixelStep;
/** Actual values for line step in case default are overwritten. */
private int lineStep;
// Definition of grids for sensor (u = along pixel; v = along line)
/** Linear grid in pixel. */
private double[] uGrid;
/** Linear grid in line. */
private double[] vGrid;
/** Size of uGrid = nbPixelGrid. */
private int nbPixelGrid;
/** Size of vGrid = nbLineGrid. */
private int nbLineGrid;
// Definition of the associated sensor
/** Current min line. */
private double minLineSensor = Double.NaN;
/** Current max line. */
private double maxLineSensor = Double.NaN;
/** Current sensor name. */
private String sensorName = null;
/**
* Default constructor.
*/
public AtmosphericComputationParameters() {
this.pixelStep = DEFAULT_STEP_PIXEL;
this.lineStep = DEFAULT_STEP_LINE;
}
/** Configuration of the interpolation grid. This grid is associated to the given sensor,
* with the given min and max lines.
* @param sensor line sensor
* @param minLine min line defined for the inverse location
* @param maxLine max line defined for the inverse location
*/
public void configureCorrectionGrid(final LineSensor sensor, final int minLine, final int maxLine) {
// Keep information about the sensor and the required search lines.
// Needed to test if the grid is initialized with this context.
this.minLineSensor = minLine;
this.maxLineSensor = maxLine;
this.sensorName = sensor.getName();
// Compute the number of pixels and lines for the grid (round value is sufficient)
final int sensorNbPxs = sensor.getNbPixels();
this.nbPixelGrid = sensorNbPxs / this.pixelStep;
// check the validity of the min and max lines
if ((maxLine - minLine + 1 - 2 * MARGIN_LINE) < 2 * this.lineStep) {
final String info = ": (maxLine - minLine + 1 - 2*" + MARGIN_LINE + ") < 2*" + this.lineStep;
throw new RuggedException(RuggedMessages.INVALID_RANGE_FOR_LINES, minLine, maxLine, info);
}
this.nbLineGrid = (maxLine - minLine + 1 - 2 * MARGIN_LINE) / this.lineStep;
// Compute the linear grids in pixel (u index) and line (v index)
this.uGrid = GridCreation.createLinearGrid(0, sensorNbPxs - 1, this.nbPixelGrid);
this.vGrid = GridCreation.createLinearGrid(minLine + MARGIN_LINE, maxLine - MARGIN_LINE, this.nbLineGrid);
}
/**
* Set the grid steps in pixel and line (used to compute inverse location).
* Overwrite the default values, for time optimization if necessary.
* @param gridPixelStep grid pixel step for the inverse location computation
* @param gridLineStep grid line step for the inverse location computation
*/
public void setGridSteps(final int gridPixelStep, final int gridLineStep) {
if (gridPixelStep <= 0) {
final String reason = " pixelStep <= 0";
throw new RuggedException(RuggedMessages.INVALID_STEP, gridPixelStep, reason);
}
if (gridLineStep <= 0) {
final String reason = " lineStep <= 0";
throw new RuggedException(RuggedMessages.INVALID_STEP, gridLineStep, reason);
}
this.pixelStep = gridPixelStep;
this.lineStep = gridLineStep;
}
/**
* @return the size of pixel grid
*/
public int getNbPixelGrid() {
return nbPixelGrid;
}
/**
* @return the size of line grid
*/
public int getNbLineGrid() {
return nbLineGrid;
}
/**
* @return the pixel grid
*/
public double[] getUgrid() {
return uGrid.clone();
}
/**
* @return the line grid
*/
public double[] getVgrid() {
return vGrid.clone();
}
/**
* @return the min line used to compute the current grids
*/
public double getMinLineSensor() {
return minLineSensor;
}
/**
* @return the max line used to compute the current grids
*/
public double getMaxLineSensor() {
return maxLineSensor;
}
/**
* @return the sensor name used to compute the current grids
*/
public String getSensorName() {
return sensorName;
}
}
|
transposit/graalpython
|
graalpython/com.oracle.graal.python/src/com/oracle/graal/python/nodes/call/CallDispatchNode.java
|
/*
* Copyright (c) 2017, 2019, Oracle and/or its affiliates.
* Copyright (c) 2014, Regents of the University of California
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.oracle.graal.python.nodes.call;
import com.oracle.graal.python.PythonLanguage;
import com.oracle.graal.python.builtins.objects.code.PCode;
import com.oracle.graal.python.builtins.objects.function.PBuiltinFunction;
import com.oracle.graal.python.builtins.objects.function.PFunction;
import com.oracle.graal.python.nodes.builtins.FunctionNodes.GetFunctionCodeNode;
import com.oracle.graal.python.runtime.PythonOptions;
import com.oracle.truffle.api.Assumption;
import com.oracle.truffle.api.RootCallTarget;
import com.oracle.truffle.api.dsl.Cached;
import com.oracle.truffle.api.dsl.GenerateUncached;
import com.oracle.truffle.api.dsl.ImportStatic;
import com.oracle.truffle.api.dsl.ReportPolymorphism;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.Frame;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.nodes.Node;
@ImportStatic(PythonOptions.class)
@ReportPolymorphism
@GenerateUncached
public abstract class CallDispatchNode extends Node {
protected static FunctionInvokeNode createInvokeNode(PFunction callee) {
return FunctionInvokeNode.create(callee);
}
protected static FunctionInvokeNode createInvokeNode(PBuiltinFunction callee) {
return FunctionInvokeNode.create(callee);
}
protected static CallTargetInvokeNode createCtInvokeNode(PFunction callee) {
return CallTargetInvokeNode.create(callee);
}
protected static CallTargetInvokeNode createCtInvokeNode(PBuiltinFunction callee) {
return CallTargetInvokeNode.create(callee);
}
public static CallDispatchNode create() {
return CallDispatchNodeGen.create();
}
public static CallDispatchNode getUncached() {
return CallDispatchNodeGen.getUncached();
}
protected Assumption singleContextAssumption() {
return PythonLanguage.getCurrent().singleContextAssumption;
}
public final Object executeCall(VirtualFrame frame, PFunction callee, Object[] arguments) {
return executeInternal(frame, callee, arguments);
}
public final Object executeCall(VirtualFrame frame, PBuiltinFunction callee, Object[] arguments) {
return executeInternal(frame, callee, arguments);
}
protected abstract Object executeInternal(Frame frame, PFunction callee, Object[] arguments);
protected abstract Object executeInternal(Frame frame, PBuiltinFunction callee, Object[] arguments);
// We only have a single context and this function never changed its code
@Specialization(guards = {"callee == cachedCallee"}, limit = "getCallSiteInlineCacheMaxDepth()", assumptions = {"singleContextAssumption()", "cachedCallee.getCodeStableAssumption()"})
protected Object callFunctionCached(VirtualFrame frame, @SuppressWarnings("unused") PFunction callee, Object[] arguments,
@SuppressWarnings("unused") @Cached("callee") PFunction cachedCallee,
@Cached("createInvokeNode(cachedCallee)") FunctionInvokeNode invoke) {
return invoke.execute(frame, arguments);
}
// We only have a single context and this function changed its code before, but now it's
// constant
protected PCode getCode(GetFunctionCodeNode getFunctionCodeNode, PFunction function) {
return getFunctionCodeNode.execute(function);
}
@Specialization(guards = {"callee == cachedCallee", "getCode(getFunctionCodeNode, callee) == cachedCode"}, limit = "getCallSiteInlineCacheMaxDepth()", assumptions = {"singleContextAssumption()"})
protected Object callFunctionCachedCode(VirtualFrame frame, @SuppressWarnings("unused") PFunction callee, Object[] arguments,
@SuppressWarnings("unused") @Cached("callee") PFunction cachedCallee,
@SuppressWarnings("unused") @Cached("create()") GetFunctionCodeNode getFunctionCodeNode,
@SuppressWarnings("unused") @Cached("getCode(getFunctionCodeNode, callee)") PCode cachedCode,
@Cached("createInvokeNode(cachedCallee)") FunctionInvokeNode invoke) {
return invoke.execute(frame, arguments);
}
// We have multiple contexts, don't cache the objects so that contexts can be cleaned up
@Specialization(guards = {"callee.getCallTarget() == ct"}, limit = "getCallSiteInlineCacheMaxDepth()", replaces = "callFunctionCachedCode")
protected Object callFunctionCachedCt(VirtualFrame frame, PFunction callee, Object[] arguments,
@SuppressWarnings("unused") @Cached("callee.getCallTarget()") RootCallTarget ct,
@Cached("createCtInvokeNode(callee)") CallTargetInvokeNode invoke) {
return invoke.execute(frame, callee.getGlobals(), callee.getClosure(), arguments);
}
@Specialization(guards = {"callee == cachedCallee"}, limit = "getCallSiteInlineCacheMaxDepth()", assumptions = "singleContextAssumption()")
protected Object callBuiltinFunctionCached(VirtualFrame frame, @SuppressWarnings("unused") PBuiltinFunction callee, Object[] arguments,
@SuppressWarnings("unused") @Cached("callee") PBuiltinFunction cachedCallee,
@Cached("createInvokeNode(cachedCallee)") FunctionInvokeNode invoke) {
return invoke.execute(frame, arguments);
}
@Specialization(guards = "callee.getCallTarget() == ct", limit = "getCallSiteInlineCacheMaxDepth()")
protected Object callBuiltinFunctionCachedCt(VirtualFrame frame, @SuppressWarnings("unused") PBuiltinFunction callee, Object[] arguments,
@SuppressWarnings("unused") @Cached("callee.getCallTarget()") RootCallTarget ct,
@Cached("createCtInvokeNode(callee)") CallTargetInvokeNode invoke) {
return invoke.execute(frame, null, null, arguments);
}
@Specialization(replaces = {"callFunctionCached", "callFunctionCachedCode", "callFunctionCachedCt"})
protected Object callFunctionUncached(Frame frame, PFunction callee, Object[] arguments,
@Cached GenericInvokeNode invoke) {
return invoke.executeInternal(frame, callee, arguments);
}
@Specialization(replaces = {"callBuiltinFunctionCached", "callBuiltinFunctionCachedCt"})
protected Object callBuiltinFunctionUncached(Frame frame, PBuiltinFunction callee, Object[] arguments,
@Cached GenericInvokeNode invoke) {
return invoke.executeInternal(frame, callee, arguments);
}
}
|
lechium/tvOS145Headers
|
System/Library/PrivateFrameworks/Announce.framework/ANAnnounce.h
|
<filename>System/Library/PrivateFrameworks/Announce.framework/ANAnnounce.h
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:10:49 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/PrivateFrameworks/Announce.framework/Announce
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
@protocol ANAnnounceDelegate;
@class NSXPCConnection, NSUUID, NSArray;
@interface ANAnnounce : NSObject {
id<ANAnnounceDelegate> _delegate;
NSXPCConnection* _connection;
NSUUID* _endpointIdentifier;
}
@property (nonatomic,readonly) NSXPCConnection * connection; //@synthesize connection=_connection - In the implementation block
@property (nonatomic,readonly) NSArray * unplayedAnnouncements;
@property (nonatomic,readonly) NSArray * receivedAnnouncements;
@property (nonatomic,readonly) NSUUID * endpointIdentifier; //@synthesize endpointIdentifier=_endpointIdentifier - In the implementation block
@property (assign,nonatomic,__weak) id<ANAnnounceDelegate> delegate; //@synthesize delegate=_delegate - In the implementation block
-(id)init;
-(void)dealloc;
-(void)invalidate;
-(id<ANAnnounceDelegate>)delegate;
-(void)setDelegate:(id<ANAnnounceDelegate>)arg1 ;
-(NSXPCConnection *)connection;
-(void)sendRequest:(id)arg1 completion:(/*^block*/id)arg2 ;
-(NSUUID *)endpointIdentifier;
-(void)localParticipant:(/*^block*/id)arg1 ;
-(void)mockAnnouncement:(id)arg1 forHomeWithName:(id)arg2 playbackDeadline:(id)arg3 completion:(/*^block*/id)arg4 ;
-(void)lastPlayedAnnouncementInfo:(/*^block*/id)arg1 ;
-(void)getScanningDeviceCandidates:(/*^block*/id)arg1 ;
-(NSArray *)unplayedAnnouncements;
-(id)homeNamesForContext:(id)arg1 ;
-(BOOL)isLocalDeviceInRoom:(id)arg1 ;
-(BOOL)isEndpointWithUUID:(id)arg1 inRoomWithName:(id)arg2 ;
-(id)initWithEndpointIdentifier:(id)arg1 ;
-(void)sendAnnouncement:(id)arg1 toRoomsWithNames:(id)arg2 andZonesWithNames:(id)arg3 inHomeWithName:(id)arg4 completion:(/*^block*/id)arg5 ;
-(void)_sendRequestLegacy:(id)arg1 completion:(/*^block*/id)arg2 ;
-(void)sendAnnouncement:(id)arg1 toRoomsWithIDs:(id)arg2 andZonesWithIDs:(id)arg3 inHomeWithID:(id)arg4 completion:(/*^block*/id)arg5 ;
-(void)broadcastReply:(id)arg1 forAnnouncementID:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)sendAnnouncement:(id)arg1 toHomeWithName:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)sendAnnouncement:(id)arg1 toHomeWithID:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)sendReply:(id)arg1 forAnnouncement:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)broadcastReply:(id)arg1 forAnnouncement:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)broadcastReply:(id)arg1 completion:(/*^block*/id)arg2 ;
-(void)receivedAnnouncementIDs:(/*^block*/id)arg1 ;
-(id)receivedAnnouncementIDs;
-(void)announcementForID:(id)arg1 reply:(/*^block*/id)arg2 ;
-(id)announcementForID:(id)arg1 ;
-(void)getReceivedAnnouncementsWithCompletionHandler:(/*^block*/id)arg1 ;
-(NSArray *)receivedAnnouncements;
-(id)contextFromAnnouncement:(id)arg1 ;
-(BOOL)isAnnounceEnabledForAnyAccessoryInHome:(id)arg1 ;
-(BOOL)isAnnounceEnabledForAnyAccessoryOrUserInHome:(id)arg1 ;
-(void)prewarmWithHandler:(/*^block*/id)arg1 ;
-(void)sendAnnouncement:(id)arg1 toRoomsWithNames:(id)arg2 inHomeWithName:(id)arg3 completion:(/*^block*/id)arg4 ;
-(void)sendAnnouncement:(id)arg1 toZonesWithNames:(id)arg2 inHomeWithName:(id)arg3 completion:(/*^block*/id)arg4 ;
-(void)sendAnnouncement:(id)arg1 toRoomsWithIDs:(id)arg2 inHomeWithID:(id)arg3 completion:(/*^block*/id)arg4 ;
-(void)sendAnnouncement:(id)arg1 toZonesWithIDs:(id)arg2 inHomeWithID:(id)arg3 completion:(/*^block*/id)arg4 ;
@end
|
Bpowers4/turicreate
|
src/external/boost/boost_1_68_0/libs/spirit/example/support/utree/error_handler.hpp
|
/*==============================================================================
Copyright (c) 2001-2011 <NAME>
Copyright (c) 2010-2011 <NAME>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file BOOST_LICENSE_1_0.rst or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(BOOST_SPIRIT_UTREE_EXAMPLE_ERROR_HANDLER_HPP)
#define BOOST_SPIRIT_UTREE_EXAMPLE_ERROR_HANDLER_HPP
#include <string>
#include <sstream>
#include <boost/spirit/home/support/info.hpp>
#include <boost/spirit/include/support_line_pos_iterator.hpp>
namespace sexpr
{
using boost::spirit::info;
template <typename Out>
struct print_info
{
typedef boost::spirit::utf8_string string;
print_info(Out& out) : out(out), first(true) {}
void element(string const& tag, string const& value, int) const
{
if (!first) {
out << ' ';
first = false;
}
if (value == "")
out << tag;
else
out << "\"" << value << '"';
}
Out& out;
mutable bool first;
};
struct expected_component : std::exception
{
std::string msg;
expected_component(std::string const& source, std::size_t line
, info const& w)
{
using boost::spirit::basic_info_walker;
std::ostringstream oss;
oss << "(exception \"" << source << "\" ";
if (line == -1)
oss << -1;
else
oss << line;
oss << " '(expected_component (";
print_info<std::ostringstream> pr(oss);
basic_info_walker<print_info<std::ostringstream> >
walker(pr, w.tag, 0);
boost::apply_visitor(walker, w.value);
oss << ")))";
msg = oss.str();
}
virtual ~expected_component() throw() {}
virtual char const* what() const throw()
{
return msg.c_str();
}
};
template <typename Iterator>
struct error_handler
{
template <typename, typename, typename, typename>
struct result
{
typedef void type;
};
std::string source;
error_handler(std::string const& source_ = "<string>") : source(source_) {}
void operator()(Iterator first, Iterator last, Iterator err_pos
, info const& what) const
{
using boost::spirit::get_line;
Iterator eol = err_pos;
std::size_t line = get_line(err_pos);
throw expected_component(source, line, what);
}
};
} // sexpr
#endif // BOOST_SPIRIT_UTREE_EXAMPLE_ERROR_HANDLER_HPP
|
OpenFermentor/Web-Monitor
|
src/presentation/router.js
|
import React, { Component } from 'react'
import { BrowserRouter, Route } from 'react-router-dom'
import MainNavigation from './navigation'
import Dashboard from './dashboard'
import SetUp from './set_up'
import Experiment from './experiment'
import Alerts from './alerts'
const isRemoteClient = process.env.REACT_APP_REMOTE_WEB_APPLICATION === 'true'
const LOCAL_CLIENT_ROUTES = [{
path: '/',
exact: true,
component: Dashboard,
title: 'Monitor'
}, {
path: '/experiments',
component: Experiment,
title: 'Experimentos'
}, {
path: '/calibration',
component: SetUp,
title: 'Calibración'
}]
const REMOTE_CLIENT_ROUTES = [{
path: '/',
exact: true,
component: Dashboard,
title: 'Monitor'
}, {
path: '/experiments',
component: Experiment,
title: 'Experimentos'
}]
const ROUTES = isRemoteClient ? REMOTE_CLIENT_ROUTES : LOCAL_CLIENT_ROUTES
export const FUNCTIONALITY_ACCESS = {
showStartExperiment: !isRemoteClient,
showExperimentCreation: !isRemoteClient,
showExperimentEdition: !isRemoteClient,
showExperimentFinalization: !isRemoteClient,
showAddExternalReading: !isRemoteClient,
showUserMenu: isRemoteClient
}
export default class Router extends Component {
render () {
return (
<BrowserRouter>
<div className='base'>
<div className='mainWrapper'>
<MainNavigation routes={ROUTES} />
<div className='mainContent'>
{ ROUTES.map(({ path, component, exact }, index) => (
<Route
key={index}
path={path}
exact={exact}
component={component}
/>
))}
</div>
</div>
<Alerts />
</div>
</BrowserRouter>
)
}
}
|
emmt/TiPi
|
src/commands/EdgePreservingDeconvolutionCommand.java
|
/*
* This file is part of TiPi (a Toolkit for Inverse Problems and Imaging)
* developed by the MitiV project.
*
* Copyright (c) 2014 the MiTiV project, http://mitiv.univ-lyon1.fr/
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package commands;
import java.io.IOException;
import java.io.PrintStream;
import java.util.List;
import java.util.Locale;
import org.kohsuke.args4j.Argument;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import mitiv.array.ArrayUtils;
import mitiv.array.ShapedArray;
import mitiv.base.Shape;
import mitiv.invpb.EdgePreservingDeconvolution;
import mitiv.io.ColorModel;
import mitiv.io.DataFormat;
import mitiv.optim.OptimTask;
import mitiv.utils.FFTUtils;
public class EdgePreservingDeconvolutionCommand {
private PrintStream stream = System.out;
@Option(name = "-init", usage = "Name of initial image file.", metaVar = "INIT")
private String initName = null;
@Option(name = "-psf", usage = "Name of point spread function file.", metaVar = "FILENAME")
private String psfName = null;
@Option(name = "-normalize", usage = "Normalize the point spread function.")
private boolean normalizePSF = false;
@Option(name = "-weights", usage = "Name statistical weights file.", metaVar = "FILENAME")
private String weightsName = null;
@Option(name = "-noise", usage = "Standard deviation of the noise.", metaVar = "SIGMA")
private double sigma = Double.NaN;
@Option(name = "-gain", usage = "Detector gain.", metaVar = "GAMMA")
private double gamma = Double.NaN;
@Option(name = "-invalid", usage = "Name of invalid data file.", metaVar = "FILENAME")
private String invalidName = null;
@Option(name = "-mu", usage = "Regularization level.", metaVar = "MU")
private double mu = 10.0;
@Option(name = "-tau", usage = "Edge threshold.", metaVar = "TAU")
private double tau = 1.0;
@Option(name = "-gatol", usage = "Absolute gradient tolerance for the convergence.", metaVar = "GATOL")
private double gatol = 0.0;
@Option(name = "-grtol", usage = "Relative gradient tolerance for the convergence.", metaVar = "GRTOL")
private double grtol = 1e-3;
@Option(name = "-mem", usage = "If M > 0, use quasi-Newton method with M previous steps; otherwise, use non-linear conjugate gradient.", metaVar = "M")
private int limitedMemorySize = 5;
@Option(name = "-min", usage = "Lower bound for the variables.", metaVar = "LOWER")
private double lowerBound = Double.NEGATIVE_INFINITY;
@Option(name = "-max", usage = "Upper bound for the variables.", metaVar = "UPPER")
private double upperBound = Double.POSITIVE_INFINITY;
@Option(name = "-single", usage = "Force single precision.")
private boolean single = false;
@Option(name = "-help", aliases = {"--help", "-h", "-?"}, usage = "Display help.")
private boolean help;
@Option(name = "-verbose", usage = "Verbose mode.")
private boolean verbose = false;
@Option(name = "-debug", usage = "Debug mode.")
private boolean debug = false;
@Option(name = "-maxiter", usage = "Maximum number of iterations, -1 for no limits.")
private int maxiter = 200;
@Option(name = "-maxeval", usage = "Maximum number of evaluations, -1 for no limits.")
private int maxeval = -1;
@Option(name = "-pad", usage = "Padding method.", metaVar = "\"auto\"|\"min\"|NUMBER")
private String paddingMethod = "auto";
@Option(name = "-fill", usage = "Value for padding.", metaVar = "VALUE")
private double fillValue = Double.NaN;
@Option(name = "-crop", usage = "Crop result to same size as input.")
private boolean crop = false;
@Argument
private List<String> arguments;
public static ShapedArray loadData(String name, boolean single) {
ShapedArray arr = DataFormat.load(name);
ColorModel colorModel = ColorModel.guessColorModel(arr);
if (colorModel == ColorModel.NONE) {
return (single ? arr.toFloat() : arr.toDouble());
} else {
return (single
? ColorModel.filterImageAsFloat(arr, ColorModel.GRAY)
: ColorModel.filterImageAsDouble(arr, ColorModel.GRAY));
}
}
static private void usage(CmdLineParser parser, int code) {
PrintStream stream = (code == 0 ? System.out : System.err);
stream.println("Usage: deconv [OPTIONS] INPUT OUTPUT");
if (code == 0) {
stream.println("Options:");
parser.getProperties().withUsageWidth(80);
parser.printUsage(stream);
} else {
stream.println("Try option -help for a more complete description of options.");
}
System.exit(code);
}
public static void main(String[] args) {
// Switch to "US" locale to avoid problems with number formats.
Locale.setDefault(Locale.US);
// Parse options.
EdgePreservingDeconvolutionCommand job = new EdgePreservingDeconvolutionCommand();
CmdLineParser parser = new CmdLineParser(job);
try {
parser.parseArgument(args);
} catch (CmdLineException e) {
System.err.format("Error: %s\n", e.getMessage());
usage(parser, 1);
}
if (job.help) {
usage(parser, 0);
}
// Deal with remaining arguments.
int size = (job.arguments == null ? 0 : job.arguments.size());
if (size != 2) {
System.err.format("Too %s arguments.\n", (size < 2 ? "few" : "many"));
usage(parser, 1);
}
String inputName = job.arguments.get(0);
String outputName = job.arguments.get(1);
EdgePreservingDeconvolution solver = new EdgePreservingDeconvolution();
try {
// Read the blurred data and the PSF.
solver.setForceSinglePrecision(job.single);
solver.setData(loadData(inputName, job.single));
if (job.psfName != null) {
solver.setPSF(loadData(job.psfName, job.single), job.normalizePSF);
}
// Deal with the weights.
System.err.format("sigma = %g, gamma = %g\n", job.sigma, job.gamma);
if (job.weightsName != null) {
if (! isnan(job.sigma) || ! isnan(job.gamma)) {
System.err.println("Warning: options `-gain` and `-noise` are ignored when `-weights` is specified.");
}
solver.setWeights(loadData(job.weightsName, job.single));
} else {
if (isnan(job.sigma) && ! isnan(job.gamma)) {
System.err.println("Warning: option `-gain` alone is ignored, use it with `-noise`.");
}
solver.setDetectorNoise(job.sigma);
solver.setDetectorGain(job.gamma);
}
// Deal with bad pixels.
if (job.invalidName != null) {
// FIXME: there should be a way to load a mask (i.e. as a boolean array)
solver.setBads(loadData(job.invalidName, job.single));
}
// Compute dimensions of result.
Shape dataShape = solver.getData().getShape();
Shape psfShape = solver.getPSF().getShape();
int rank = dataShape.rank();
int[] objDims = new int[rank];
if (job.paddingMethod.equals("auto")) {
for (int k = 0; k < rank; ++k) {
int dataDim = dataShape.dimension(k);
int psfDim = psfShape.dimension(k);
objDims[k] = FFTUtils.bestDimension(dataDim + psfDim - 1);
}
} else if (job.paddingMethod.equals("min")) {
for (int k = 0; k < rank; ++k) {
int dataDim = dataShape.dimension(k);
int psfDim = psfShape.dimension(k);
objDims[k] = FFTUtils.bestDimension(Math.max(dataDim, psfDim));
}
} else {
int pad;
try {
pad = Integer.parseInt(job.paddingMethod);
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid value for option `-pad`, must be \"auto\", \"min\" or an integer");
}
if (pad < 0) {
throw new IllegalArgumentException("Padding value must be nonnegative");
}
for (int k = 0; k < rank; ++k) {
int dataDim = dataShape.dimension(k);
int psfDim = psfShape.dimension(k);
objDims[k] = FFTUtils.bestDimension(Math.max(dataDim, psfDim) + pad);
}
}
solver.setObjectShape(objDims);
solver.setFillValue(job.fillValue);
// Result and initial solution.
if (job.initName != null) {
solver.setInitialSolution(loadData(job.initName, job.single));
}
solver.setAbsoluteTolerance(job.gatol);
solver.setRelativeTolerance(job.grtol);
solver.setLowerBound(job.lowerBound);
solver.setUpperBound(job.upperBound);
solver.setLimitedMemorySize(Math.max(0, job.limitedMemorySize));
solver.setRegularizationLevel(job.mu);
solver.setEdgeThreshold(job.tau);
solver.setMaximumIterations(job.maxiter);
solver.setMaximumEvaluations(job.maxeval);
solver.setDebug(job.debug);
solver.setSaveBest(true);
OptimTask task = solver.start();
while (true) {
if (task == OptimTask.ERROR) {
fatal(solver.getReason());
}
if (task == OptimTask.WARNING) {
warn(solver.getReason());
break;
}
if (job.verbose && (task == OptimTask.NEW_X || task == OptimTask.FINAL_X)) {
double elapsed = solver.getElapsedTime();
int evaluations = solver.getEvaluations();
int iterations = solver.getIterations();
solver.getRestarts();
job.stream.format("iter: %4d eval: %4d time: %7.3f s. fx = %22.16e |gx| = %8.2e\n",
iterations, evaluations,
elapsed, solver.getCost(),
solver.getGradient().norm2());
if (task == OptimTask.FINAL_X) {
job.stream.format("Total time in cost function: %.3f s (%.3f ms/eval.)\n",
elapsed, (evaluations > 0 ? 1e3*elapsed/evaluations : 0.0));
}
// if (fdata instanceof WeightedConvolutionCost) {
// WeightedConvolutionCost f = fdata;
// elapsed = f.getElapsedTimeInFFT();
// System.out.format("Total time in FFT: %.3f s (%.3f ms/eval.)\n",
// elapsed, (evaluations > 0 ? 1e3*elapsed/evaluations : 0.0));
// elapsed = f.getElapsedTime() - elapsed;
// System.out.format("Total time in other parts of the convolution operator: %.3f s (%.3f ms/eval.)\n",
// elapsed, (evaluations > 0 ? 1e3*elapsed/evaluations : 0.0));
// }
}
if (task == OptimTask.FINAL_X) {
break;
}
task = solver.iterate();
}
} catch (RuntimeException e) {
fatal(e.getMessage());
}
try {
ShapedArray arr = solver.getBestSolution().asShapedArray();
if (job.crop) {
arr = ArrayUtils.crop(arr, solver.getData().getShape());
}
DataFormat.save(arr, outputName);
} catch (final IOException e) {
if (job.debug) {
e.printStackTrace();
}
fatal("Failed to write output image (" + e.getMessage() + ")");
}
System.exit(0);
}
private final static boolean isnan(double x) {
return Double.isNaN(x);
}
private static void fatal(String mesg) {
System.err.format("Error: %s.\n", mesg);
System.exit(1);
}
private static void warn(String mesg) {
System.err.format("Warning: %s.\n", mesg);
}
}
|
atp42/jks-ros-pkg
|
teleop_and_haptics/haptic_sandbox/src/haptic_sandbox/abstract_interaction_tool.h
|
#ifndef _ABSTRACT_INTERACTION_TOOL_H_
#define _ABSTRACT_INTERACTION_TOOL_H_
#include <haptic_sandbox/tf_scenegraph_object.h>
#include <haptic_sandbox/abstract_handle.h>
#include <Eigen/Geometry>
namespace something {
typedef tf::Vector3 Vector3;
typedef tf::Quaternion Quaternion;
typedef tf::Transform Transform;
class AbstractInteractionTool: public tf::SceneGraphNode{
public:
// Methods only!
// Constructor
// Constructor
AbstractInteractionTool(const std::string &frame_id,
tf::TransformListener *tfl, tf::TransformBroadcaster *tfb)
: SceneGraphNode(frame_id, tfl, tfb)
{
init();
}
virtual ~AbstractInteractionTool()
{
if(handle_) delete handle_;
}
void init()
{
handle_ = new something::AbstractHandle(transform_.child_frame_id_ + "_handle", tfl_, tfb_);
addChild(handle_);
}
const something::AbstractHandle* getHandle()
{
return handle_;
}
// Read the state of the binary switches on the tool.
bool getToolButtonState(const unsigned int &index) const
{
if(index >= getToolButtonCount()) return false;
return button_state_[index];
}
// Get the number of buttons available on the tool.
unsigned int getToolButtonCount() const
{
return (unsigned int)button_state_.size();
}
// Set the force applied to the tool
virtual void setToolForce(const Vector3 &force) { last_tool_force_ = force; }
// Set the torque applied to the tool
virtual void setToolTorque(const Vector3 &torque) { last_tool_torque_ = torque; }
// Set the force and torque applied to the tool
virtual void setToolForceAndTorque(const Vector3 &force, const Vector3 &torque) { setToolForce(force); setToolTorque(torque); }
// // Set the gripper force on the tool
// virtual void setToolGripperForce(const float &force);
protected:
// Methods
void setToolButtonCount(const unsigned int &count)
{
button_state_.resize(count, false);
}
void setToolButtonState(const size_t &index, const bool &state)
{
if(index >= button_state_.size())
{
ROS_ERROR("Can't set button %zd state, max size is %zd", index, button_state_.size());
return;
}
button_state_[index] = state;
}
// virtual void updateDevice()
// {
// // Here is where, for example, we:
// // readDevicePosition();
// // getAttachedFramePosition();
// // computeVirtualCouplingForce();
// // sendDeviceForce();
// }
// Members
something::AbstractHandle *handle_;
Vector3 last_tool_force_;
Vector3 last_tool_torque_;
std::vector<bool> button_state_;
};
} // namespace something
#endif
|
zouyoujin/jpscloud
|
jpscloud-common/src/main/java/com/jpscloud/common/utils/excelRd/ExcelRdRow.java
|
<reponame>zouyoujin/jpscloud<filename>jpscloud-common/src/main/java/com/jpscloud/common/utils/excelRd/ExcelRdRow.java
package com.jpscloud.common.utils.excelRd;
import java.util.ArrayList;
import java.util.List;
public class ExcelRdRow {
private List<Object> row;
public List<Object> getRow() {
return row;
}
protected void addCell(Object cellContent) {
if(this.row==null) {
this.row = new ArrayList<Object>();
}
this.row.add(cellContent);
}
}
|
vincentfung13/TwitterRepManagement
|
twitter_services/tweet_processing/classifying/classifiers.py
|
<filename>twitter_services/tweet_processing/classifying/classifiers.py
from TwitterRepManagement import settings
import nltk.classify
from sklearn import cross_validation
from sklearn.svm import LinearSVC
if __name__ == '__main__':
import os
import django
os.environ['DJANGO_SETTINGS_MODULE'] = 'TwitterRepManagement.settings'
django.setup()
from twitter_services.models import TweetTrainingSet
from twitter_services.tweet_processing.normalizing import TweetNormalizer
class DimensionClassifier:
def __init__(self):
# Build a dictionary
training_tweets = [obj.tweet for obj in TweetTrainingSet.objects.all()]
dictionary = []
for training_tweet in training_tweets:
dictionary.extend(TweetNormalizer.get_tokens(training_tweet, json=True))
# Build a feature set
self.word_feature = [entry[0] for entry in list(nltk.FreqDist(dictionary).most_common(2000))][100:2000]
self.feature_sets = [(self.__extract_feature__(tweet), tweet.get('reputation_dimension'))
for tweet in training_tweets if tweet.get('reputation_dimension') is not None]
# Initialize and train a classifier
self.classifier = nltk.classify.SklearnClassifier(LinearSVC())
self.trained = False
def train(self, **kwargs):
# Train the classifier with all tweets in the training set
if not kwargs.has_key('training_set'):
self.classifier.train(self.feature_sets)
else:
self.classifier.train(kwargs['training_set'])
self.trained = True
def classify(self, document):
if not self.trained:
self.train()
return self.classifier.classify(self.__extract_feature__(document))
def __extract_feature__(self, tweet):
tweet_words = TweetNormalizer.get_tokens(tweet, json=True)
features = {}
for word in self.word_feature:
features['contains({})'.format(word)] = (word in tweet_words)
return features
# This classifier is used to identify spam tweets
class SpamDetector(object):
def __init__(self):
file_path = settings.BASE_DIR + '/twitter_services/tweet_processing/classifying/SpamCollection.txt'
with open(file_path, 'r') as spam_collection:
text_collections = []
i = 0
for line in spam_collection:
if i > 2500:
break
text = line.strip().split('\t')[1]
category = line.strip().split('\t')[0]
text_collections.append((TweetNormalizer.get_tokens(text), category))
i += 1
# build a word_feature dictionary of the data set
all_words = []
for text in text_collections:
all_words.extend(text[0])
self.word_feature = [entry[0] for entry in list(nltk.FreqDist(all_words).most_common(2000))]
feature_sets = [(self.__extract_feature__(text[0], is_token=True), text[1])
for text in text_collections]
# Initialize and train the classifier
self.classifier = nltk.classify.SklearnClassifier(LinearSVC()).train(feature_sets)
# Return true if the tweet is spam and false otherwise
def is_spam(self, text):
if self.classifier.classify(self.__extract_feature__(text)).lower() == 'spam':
return True
else:
return False
def __extract_feature__(self, data, is_token=False):
if not is_token:
tokens = TweetNormalizer.get_tokens(data, json=True)
else:
tokens = data
features = {}
for word in self.word_feature:
features[u'contains({})'.format(word)] = (word in tokens)
return features
if __name__ == '__main__':
# Trying to apply cross validation
feature_sets = DimensionClassifier().feature_sets
cv = cross_validation.KFold(len(feature_sets), n_folds=100, shuffle=True, random_state=None)
for traincv, testcv in cv:
classifier_cv = DimensionClassifier()
classifier_cv.train(training_set=feature_sets[traincv[0]:traincv[len(traincv)-1]])
print 'SVM accuracy:', nltk.classify.util.accuracy(classifier_cv.classifier,
feature_sets[testcv[0]:testcv[len(testcv)-1]])
|
xroah/fs-cp
|
lib/rmdir/rmdirSync.js
|
"use strict"
const fs = require("fs");
const path = require("path");
const supportRetry = require("./supportRetry");
const shouldRetry = require("./shouldRetry");
function rmdir(dirPath, options) {
const maxRetries = options.maxRetries || 0;
let err;
try {
fs.rmdirSync(dirPath);
} catch (error) {
err = error;
}
if (shouldRetry(err) && maxRetries) {
let count = 0;
let retryDelay = options.retryDelay || 0;
let now = Date.now();
do {
let _now = Date.now();
if (_now - now >= retryDelay) {
count++;
now = _now;
try {
fs.rmdirSync(dirPath);
err = null;
break;
} catch (error) {
err = error;
}
}
} while (count < maxRetries)
}
if (err) {
throw err;
}
}
function rmSubs(parentPath, options) {
const files = fs.readdirSync(parentPath);
if (!files.length) {
return rmdir(parentPath, options);
}
files.forEach(file => {
const filePath = path.join(parentPath, file);
const stat = fs.lstatSync(filePath);
if (stat.isDirectory()) {
rmSubs(filePath, options);
} else {
fs.unlinkSync(filePath);
}
});
rmdir(parentPath, options);
}
function rmdirSync(dirPath, options) {
options = options || {};
const {
recursive,
...others
} = options;
//if support retry, also support recursive
if (supportRetry) {
return rmdir(dirPath, options);
}
if (!recursive) {
rmdir(dirPath, options);
} else {
rmSubs(dirPath, others);
}
}
module.exports = rmdirSync;
|
wenmobo/WBCommonDevelopTools
|
WB_UITextField/ThirdLib/InputKit/Vsersion v1.1.11/InputKit/TXMatchManager.h
|
//
// TXMatchManager.h
// InputKit
//
// Created by tingxins on 02/06/2017.
// Copyright © 2017 tingxins. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface TXMatchManager : NSObject
/**
Just regular.
@param component Regular component.
@param matchStr Regular value.
@return Result.
*/
+ (BOOL)matchLimitedTextTypePriceWithComponent:(id)component
value:(NSString *)matchStr;
/**
@param regEx Custom regEx.
*/
+ (BOOL)matchLimitedTextTypeCustomWithRegEx:(NSString *)regEx
component:(id)component
value:(NSString *)matchStr;
/**
@param regExs 按 && 正则匹配数组中所有正则.
*/
+ (BOOL)matchLimitedTextTypeCustomWithRegExs:(NSArray *)regExs
component:(id)component
value:(NSString *)matchStr;
+ (NSString *)getMatchContentWithOriginalText:(NSString *)originalText
replaceText:(NSString *)replaceText
range:(NSRange)range;
@end
|
sourcecode-reloaded/Xming
|
extras/Mesa/src/mesa/drivers/dri/i915/i915_program.h
|
/**************************************************************************
*
* Copyright 2003 Tungsten Graphics, Inc., <NAME>, Texas.
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
**************************************************************************/
#ifndef I915_PROGRAM_H
#define I915_PROGRAM_H
#include "i915_context.h"
#include "i915_reg.h"
/* Having zero and one in here makes the definition of swizzle a lot
* easier.
*/
#define UREG_TYPE_SHIFT 29
#define UREG_NR_SHIFT 24
#define UREG_CHANNEL_X_NEGATE_SHIFT 23
#define UREG_CHANNEL_X_SHIFT 20
#define UREG_CHANNEL_Y_NEGATE_SHIFT 19
#define UREG_CHANNEL_Y_SHIFT 16
#define UREG_CHANNEL_Z_NEGATE_SHIFT 15
#define UREG_CHANNEL_Z_SHIFT 12
#define UREG_CHANNEL_W_NEGATE_SHIFT 11
#define UREG_CHANNEL_W_SHIFT 8
#define UREG_CHANNEL_ZERO_NEGATE_MBZ 5
#define UREG_CHANNEL_ZERO_SHIFT 4
#define UREG_CHANNEL_ONE_NEGATE_MBZ 1
#define UREG_CHANNEL_ONE_SHIFT 0
#define UREG_BAD 0xffffffff /* not a valid ureg */
#define X SRC_X
#define Y SRC_Y
#define Z SRC_Z
#define W SRC_W
#define ZERO SRC_ZERO
#define ONE SRC_ONE
/* Construct a ureg:
*/
#define UREG( type, nr ) (((type)<< UREG_TYPE_SHIFT) | \
((nr) << UREG_NR_SHIFT) | \
(X << UREG_CHANNEL_X_SHIFT) | \
(Y << UREG_CHANNEL_Y_SHIFT) | \
(Z << UREG_CHANNEL_Z_SHIFT) | \
(W << UREG_CHANNEL_W_SHIFT) | \
(ZERO << UREG_CHANNEL_ZERO_SHIFT) | \
(ONE << UREG_CHANNEL_ONE_SHIFT))
#define GET_CHANNEL_SRC( reg, channel ) ((reg<<(channel*4)) & (0xf<<20))
#define CHANNEL_SRC( src, channel ) (src>>(channel*4))
#define GET_UREG_TYPE(reg) (((reg)>>UREG_TYPE_SHIFT)®_TYPE_MASK)
#define GET_UREG_NR(reg) (((reg)>>UREG_NR_SHIFT)®_NR_MASK)
#define UREG_XYZW_CHANNEL_MASK 0x00ffff00
/* One neat thing about the UREG representation:
*/
static __inline int swizzle( int reg, int x, int y, int z, int w )
{
return ((reg & ~UREG_XYZW_CHANNEL_MASK) |
CHANNEL_SRC( GET_CHANNEL_SRC( reg, x ), 0 ) |
CHANNEL_SRC( GET_CHANNEL_SRC( reg, y ), 1 ) |
CHANNEL_SRC( GET_CHANNEL_SRC( reg, z ), 2 ) |
CHANNEL_SRC( GET_CHANNEL_SRC( reg, w ), 3 ));
}
/* Another neat thing about the UREG representation:
*/
static __inline int negate( int reg, int x, int y, int z, int w )
{
return reg ^ (((x&1)<<UREG_CHANNEL_X_NEGATE_SHIFT)|
((y&1)<<UREG_CHANNEL_Y_NEGATE_SHIFT)|
((z&1)<<UREG_CHANNEL_Z_NEGATE_SHIFT)|
((w&1)<<UREG_CHANNEL_W_NEGATE_SHIFT));
}
extern GLuint i915_get_temp( struct i915_fragment_program *p );
extern GLuint i915_get_utemp( struct i915_fragment_program *p );
extern void i915_release_utemps( struct i915_fragment_program *p );
extern GLuint i915_emit_texld( struct i915_fragment_program *p,
GLuint dest,
GLuint destmask,
GLuint sampler,
GLuint coord,
GLuint op );
extern GLuint i915_emit_arith( struct i915_fragment_program *p,
GLuint op,
GLuint dest,
GLuint mask,
GLuint saturate,
GLuint src0,
GLuint src1,
GLuint src2 );
extern GLuint i915_emit_decl( struct i915_fragment_program *p,
GLuint type, GLuint nr, GLuint d0_flags );
extern GLuint i915_emit_const1f( struct i915_fragment_program *p,
GLfloat c0 );
extern GLuint i915_emit_const2f( struct i915_fragment_program *p,
GLfloat c0, GLfloat c1 );
extern GLuint i915_emit_const4fv( struct i915_fragment_program *p,
const GLfloat *c );
extern GLuint i915_emit_const4f( struct i915_fragment_program *p,
GLfloat c0, GLfloat c1,
GLfloat c2, GLfloat c3 );
extern GLuint i915_emit_param4fv( struct i915_fragment_program *p,
const GLfloat *values );
extern void i915_program_error( struct i915_fragment_program *p,
const GLubyte *msg );
extern void i915_init_program( i915ContextPtr i915,
struct i915_fragment_program *p );
extern void i915_upload_program( i915ContextPtr i915,
struct i915_fragment_program *p );
extern void i915_fini_program( struct i915_fragment_program *p );
#endif
|
CiscoDevNet/ydk-go
|
ydk/models/cisco_ios_xr/ptp_pd_cfg/ptp_pd_cfg.go
|
<reponame>CiscoDevNet/ydk-go
// This module contains a collection of YANG definitions
// for Cisco IOS-XR ptp-pd package configuration.
//
// This module contains definitions
// for the following management objects:
// log-servo-root: Servo Log for Platform
//
// Copyright (c) 2013-2018 by Cisco Systems, Inc.
// All rights reserved.
package ptp_pd_cfg
import (
"fmt"
"github.com/CiscoDevNet/ydk-go/ydk"
"github.com/CiscoDevNet/ydk-go/ydk/types"
"github.com/CiscoDevNet/ydk-go/ydk/types/yfilter"
"github.com/CiscoDevNet/ydk-go/ydk/models/cisco_ios_xr"
"reflect"
)
func init() {
ydk.YLogDebug(fmt.Sprintf("Registering top level entities for package ptp_pd_cfg"))
ydk.RegisterEntity("{http://cisco.com/ns/yang/Cisco-IOS-XR-ptp-pd-cfg log-servo-root}", reflect.TypeOf(LogServoRoot{}))
ydk.RegisterEntity("Cisco-IOS-XR-ptp-pd-cfg:log-servo-root", reflect.TypeOf(LogServoRoot{}))
}
// LogServoRoot
// Servo Log for Platform
type LogServoRoot struct {
EntityData types.CommonEntityData
YFilter yfilter.YFilter
// Enable Servo change log events. The type is bool.
ServoEventEnable interface{}
}
func (logServoRoot *LogServoRoot) GetEntityData() *types.CommonEntityData {
logServoRoot.EntityData.YFilter = logServoRoot.YFilter
logServoRoot.EntityData.YangName = "log-servo-root"
logServoRoot.EntityData.BundleName = "cisco_ios_xr"
logServoRoot.EntityData.ParentYangName = "Cisco-IOS-XR-ptp-pd-cfg"
logServoRoot.EntityData.SegmentPath = "Cisco-IOS-XR-ptp-pd-cfg:log-servo-root"
logServoRoot.EntityData.AbsolutePath = logServoRoot.EntityData.SegmentPath
logServoRoot.EntityData.CapabilitiesTable = cisco_ios_xr.GetCapabilities()
logServoRoot.EntityData.NamespaceTable = cisco_ios_xr.GetNamespaces()
logServoRoot.EntityData.BundleYangModelsLocation = cisco_ios_xr.GetModelsPath()
logServoRoot.EntityData.Children = types.NewOrderedMap()
logServoRoot.EntityData.Leafs = types.NewOrderedMap()
logServoRoot.EntityData.Leafs.Append("servo-event-enable", types.YLeaf{"ServoEventEnable", logServoRoot.ServoEventEnable})
logServoRoot.EntityData.YListKeys = []string {}
return &(logServoRoot.EntityData)
}
|
eamanu/HistorialClinica-LaRioja
|
back-end/hospital-api/src/main/java/net/pladema/medicalconsultation/diary/repository/entity/Diary.java
|
package net.pladema.medicalconsultation.diary.repository.entity;
import java.time.LocalDate;
import java.time.LocalDateTime;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.Entity;
import javax.persistence.EntityListeners;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import ar.lamansys.sgx.shared.auditable.DeleteableEntity;
import ar.lamansys.sgx.shared.auditable.entity.Deleteable;
import ar.lamansys.sgx.shared.auditable.entity.SGXAuditListener;
import ar.lamansys.sgx.shared.auditable.entity.SGXAuditableEntity;
import org.hibernate.annotations.ColumnDefault;
@Entity
@Table(name = "diary")
@EntityListeners(SGXAuditListener.class)
@Getter
@Setter
@ToString
public class Diary extends SGXAuditableEntity implements DeleteableEntity<Integer> {
/**
*
*/
private static final long serialVersionUID = -7771492167471325392L;
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
@Column(name = "healthcare_professional_id", nullable = false)
private Integer healthcareProfessionalId;
@Column(name = "doctors_office_id", nullable = false)
private Integer doctorsOfficeId;
@Column(name = "start_date", nullable = false)
private LocalDate startDate;
@Column(name = "end_date", nullable = false)
private LocalDate endDate;
@Column(name = "appointment_duration", nullable = false)
private Short appointmentDuration;
@Column(name = "automatic_renewal", nullable = false)
@ColumnDefault("false")
private boolean automaticRenewal = false;
@Column(name = "days_before_renew", columnDefinition = "smallint default 0", nullable = false)
private Short daysBeforeRenew = 0;
@Column(name = "professional_asign_shift", nullable = false)
@ColumnDefault("false")
private boolean professionalAsignShift = false;
@Column(name = "include_holiday", nullable = false)
@ColumnDefault("false")
private boolean includeHoliday = false;
@Column(name = "active", nullable = false)
@ColumnDefault("true")
private boolean active = true;
@Embedded
private Deleteable deleteable = new Deleteable();
@Override
public Integer getDeleteBy() {
if (deleteable != null)
return deleteable.getDeletedBy();
return null;
}
@Override
public void setDeleteBy(Integer user) {
if (deleteable == null)
deleteable = new Deleteable();
deleteable.setDeletedBy(user);
}
@Override
public LocalDateTime getDeletedOn() {
if (deleteable != null)
return deleteable.getDeletedOn();
return null;
}
@Override
public void setDeletedOn(LocalDateTime dateTime) {
if (deleteable == null)
deleteable = new Deleteable();
deleteable.setDeletedOn(dateTime);
}
@Override
public boolean isDeleted() {
if (deleteable != null)
return deleteable.isDeleted();
return false;
}
@Override
public void setDeleted(Boolean deleted) {
if (deleteable == null)
deleteable = new Deleteable();
deleteable.setDeleted(deleted);
}
}
|
rahulraj/Cryptology
|
DeciphererTester/MonoalphabetAnalyzer.java
|
<filename>DeciphererTester/MonoalphabetAnalyzer.java
import java.util.LinkedList;
import java.util.ArrayList;
public class MonoalphabetAnalyzer
{
private String cipher;
private char[][] keys;
private LetterCounter counter;
private String plain;
private int myNumber; // for debug purposes
public MonoalphabetAnalyzer(String cip)
{
cipher = cip.toUpperCase();
keys = new char[2][26];
// 2 rows, 26 columns
// upper row is the cipher letters
// lower is their plain equivalents
for (int c = 0; c < keys[0].length; c++)
{
keys[0][c] = (char)(c + 65);
keys[1][c] = '?';
}
counter = new LetterCounter(cipher);
counter.getFrequencies(); // stores frequency data in counter
plain = "";
myNumber = 0;
//solveFreqs(); // fills keys, initializes plain in the process
}
public ArrayList<Character> getUnsolvedLetters()
{
// returns an ArrayList of all the CIPHERTEXT letters in this monoalphabet that have not been solved
ArrayList<Character> unsolved = new ArrayList<Character>();
for (int loc = 0; loc < keys[0].length; loc++)
{
if (keys[1][loc] == '?')
{
// unsolved letter
unsolved.add(new Character(keys[0][loc]));
}
}
return unsolved;
}
public void setNumber(int num)
{
myNumber = num;
}
public char getPlainLetterAt(int index)
{
return plain.charAt(index);
}
public char getCipherLetterAt(int index)
{
return cipher.charAt(index);
}
public int getLength()
{
return cipher.length();
}
public String getCipher()
{
return cipher;
}
public void solveAsCaesar()
{
// find the letter identified as E
int letterLoc = -1;
for (int loc = 0; loc < keys[1].length; loc++)
{
if (keys[1][loc] == 'E')
{
letterLoc = loc;
loc = keys[1].length; // break out
}
}
if (letterLoc == -1)
{
// E was not found, use T instead
for (int loc = 0; loc < keys[1].length; loc++)
{
if (keys[1][loc] == 'T')
{
letterLoc = loc;
loc = keys[1].length; // break out
}
}
}
int encShift = makeMod26(keys[0][letterLoc] - keys[1][letterLoc]);
int decShift = makeMod26(26 - encShift);
StringBuilder pl = new StringBuilder("");
for (int index = 0; index < cipher.length(); index++)
{
char aCipLetter = cipher.charAt(index);
int toConvert = aCipLetter - 64; // convert to a range of 1 to 26
toConvert += decShift;
toConvert = makeMod26(toConvert);
toConvert += 64;
pl.append((char)toConvert);
}
plain = pl.toString();
}
private int makeMod26(int anInt)
{
while (anInt >= 26)
{
anInt -= 26;
}
while (anInt < 0)
{
anInt += 26;
}
return anInt;
}
public void makeFirstGuess()
{
// make an initial first guess as to the identitiy of a subsitution ciphertext
// this is based purely on single letter frequencies and WILL NOT be accurate!
// it requires refinement!
char[] plainFreqRanks = {'E', 'T', 'N', 'R', 'I', 'O', 'A', 'S', 'D', 'H', 'L', 'C', 'F',
'P', 'U', 'M', 'Y', 'G', 'W', 'V', 'B', 'X', 'K', 'Q', 'J', 'Z'};
for (int freqRank = 1; freqRank <= 26; freqRank++)
{
char toCheck = counter.getLetterOfFrequency(freqRank);
keys[1][toCheck - 65] = plainFreqRanks[freqRank - 1];
}
makePlaintext();
}
public void solveFreqs()
{
/**
* Fills some of the basic keys, with frequency analysis, the more complex ones have to wait
*/
solveE();
solveT();
solveO();
solveA();
//solveL();
//solveS();
//solveH();
//solveN();
//solveD();
/**
* Solve for letters based on the knowledge of other letters
*/
//solveR();
//solveU();
//solveI();
//solveY();
//solveG();
//solveQ();
//solveM();
//solveFAndW();
//solveC();
//solveVAndB();
makePlaintext();
}
public LinkedList<Integer> getOccurancesOf(char toFind)
{
// gets all occurances of toFind in the PLAINTEXT!
LinkedList<Integer> occuranceList = new LinkedList<Integer>();
int startIndex = 0;
int strIndex = plain.indexOf(toFind, startIndex);
while (strIndex != -1)
{
//System.out.print("String found at index " + (strIndex + 1));
occuranceList.add(strIndex);
startIndex = strIndex;
strIndex = plain.indexOf(toFind, startIndex + 1);
}
return occuranceList;
}
public boolean letterIsSolved(char letter)
{
// searches for the plaintext letter (letter being the input)
// if it is tied to a ciphertext letter already, then return true
for (int loc = 0; loc < keys[1].length; loc++)
{
if (keys[1][loc] == letter)
{
return true;
}
}
return false;
}
public boolean cipherIsSolved(char cipLetter)
{
// checks to see if the cipLetter ciphertext letter has a plaintext letter with it
// returns true if so
return (keys[1][cipLetter - 65] != '?');
}
public void setPlainLetterAs(char plain, char cip)
{
keys[1][cip - 65] = plain;
}
public char getCipherFor(char plain)
{
for (int loc = 0; loc < keys[0].length; loc++)
{
if (keys[1][loc] == plain)
{
return keys[0][loc];
}
}
return '?';
}
private void solveE()
{
/*
* Attempt to identify E's cipher letter
* It is the most common letter
* The 2nd most common repeat
* The 2nd letter in "HE" (the 2nd most common digraph)
* The 1st letter in "ER" (another common digraph) this has been the 3rd, 4th, and 5th most in the samples
* (ER clue has not yet been implemented)
* And the 3rd letter in "THE" (the most common trigraph)
*/
char[] poss = new char[1]; //possibilities for the cipher letter
poss[0] = counter.getLetterOfFrequency(1);
//poss[1] = counter.getRepeatOfFrequency(2).getLetters().charAt(0);
//poss[2] = counter.getPairOfFrequency(2).getLetters().charAt(1);
//poss[3] = counter.getTrigraphOfFrequency(1).getThird();
char cip = getMostCommon(poss);
if (cip != '?')
{
keys[1][cip - 65] = 'E';
}
}
private void solveT()
{
/*
* Attempt to identify T's cipher
* 2nd most common letter
* 1st letter in "TH" (the most common digraph)
* 1st letter in "THE" (the most common trigraph)
*/
char[] poss = new char[1];
poss[0] = counter.getLetterOfFrequency(2);
//poss[1] = counter.getPairOfFrequency(1).getLetters().charAt(0);
//poss[2] = counter.getTrigraphOfFrequency(1).getFirst();
char cip = getMostCommon(poss);
if (cip != '?' && keys[1][cip - 65] == '?')
{
// if conditions are
// necessary to ensure that cip is definite and avoid "collisions" with other determined keys
keys[1][cip - 65] = 'T';
}
}
private void solveO()
{
/*
* Attempt to ID O's cipher
* Implemented:
* 3rd most common letter
* 3rd most common repeat
* 1st letter of "OU" which could be the 4th - 6th most common digraph
*
* Not implemented yet:
* 2nd letter in "YOU" (3rd-5th most common trigraph)
*/
char [] poss = new char[1];
poss[0] = counter.getLetterOfFrequency(3);
//poss[1] = counter.getRepeatOfFrequency(3).getLetters().charAt(0);
//poss[2] = counter.getPairOfFrequency(4).getLetters().charAt(0);
//poss[3] = counter.getPairOfFrequency(5).getLetters().charAt(0);
//poss[4] = counter.getPairOfFrequency(6).getLetters().charAt(0);
char cip = getMostCommon(poss);
if (cip != '?' && keys[1][cip - 65] == '?')
{
keys[1][cip - 65] = 'O';
}
}
private void solveA()
{
/*
* Attempt to ID A's cipher
* Implemented:
* 4th most common letter
* 1st letter in "AND" (2nd most common trigraph)
* 1st letter in "AN" (3rd or 4th most common digraph)
*/
char[] poss = new char[1];
poss[0] = counter.getLetterOfFrequency(4);
//poss[1] = counter.getTrigraphOfFrequency(2).getFirst();
//poss[2] = counter.getPairOfFrequency(3).getLetters().charAt(0);
//poss[3] = counter.getPairOfFrequency(4).getLetters().charAt(0);
char cip = getMostCommon(poss);
if (cip != '?' && keys[1][cip - 65] == '?')
{
keys[1][cip - 65] = 'A';
}
}
public void solveI()
{
// most common not already solved when calling this methods
char iCip = counter.getHighestFreqUnknownLetter(this);
keys[1][iCip - 65] = 'I';
/*for (int freqRank = 1; freqRank <= 26; freqRank--)
{
char letterOfFreq = counter.getLetterOfFrequency(freqRank);
if ((letterOfFreq - 65) == -33)
{
System.out.println("Breakpoint here.");
}
if (keys[1][letterOfFreq - 65] == '?')
{
keys[1][letterOfFreq - 65] = 'I';
return; // terminates method
}
}*/
}
private char getMostCommon(char[] chars)
{
// get most common char in the array
int[] charFreqs = new int[chars.length]; // parallel array to chars
// populate charFreqs
for (int loc = 0; loc < charFreqs.length; loc++)
{
char current = chars[loc];
for (char ch : chars)
{
if (current == ch)
{
charFreqs[loc]++;
}
}
}
// find most common letter
int highFreq = 0;
int highLoc = 0;
for (int loc = 0; loc < charFreqs.length; loc++)
{
if (charFreqs[loc] > highFreq)
{
highFreq = charFreqs[loc];
highLoc = loc;
}
}
// make sure there are no ties
for (int loc = 0; loc < charFreqs.length; loc++)
{
if (charFreqs[loc] == highFreq && chars[loc] != chars[highLoc])
{
// there is a tie with two or more characters in the array!
// so multiple characters fulfilled the same number of conditions
// and there is not plurality
return '?'; // still indefinite
}
}
return chars[highLoc];
}
public void makePlaintext()
{
StringBuilder pl = new StringBuilder("");
for (int index = 0; index < cipher.length(); index++)
{
if (cipher.charAt(index) >= 65 && cipher.charAt(index) <= 90)
{
pl.append(keys[1][cipher.charAt(index) - 65]);
}
else
{
pl.append(cipher.charAt(index));
}
}
plain = pl.toString();
}
public String getKeys()
{
String output = "<Plaintext> : <Ciphertext>\n";
for (int col = 0; col < keys[0].length; col++)
{
output += keys[1][col] + " : " + keys[0][col] + "\n";
}
return output;
}
public String toString() // debug purposes
{
return "Monoalphabet number " + myNumber;
}
}
|
janeirodigital/shapetrees-java
|
shapetrees-java-core/src/main/java/com/janeirodigital/shapetrees/core/vocabularies/ShapeTreeVocabulary.java
|
<filename>shapetrees-java-core/src/main/java/com/janeirodigital/shapetrees/core/vocabularies/ShapeTreeVocabulary.java
package com.janeirodigital.shapetrees.core.vocabularies;
public final class ShapeTreeVocabulary {
private ShapeTreeVocabulary() {
}
public static final String HAS_ROOT_ASSIGNMENT = Namespaces.SHAPETREE + "hasRootAssignment";
public static final String MANAGES_RESOURCE = Namespaces.SHAPETREE + "manages";
public static final String ASSIGNS_SHAPE_TREE = Namespaces.SHAPETREE + "assigns";
public static final String REFERENCES_SHAPE_TREE = Namespaces.SHAPETREE + "referencesShapeTree";
public static final String SHAPETREE_MANAGER = Namespaces.SHAPETREE + "Manager";
public static final String SHAPETREE_ASSIGNMENT = Namespaces.SHAPETREE + "Assignment";
public static final String EXPECTS_TYPE = Namespaces.SHAPETREE + "expectsType";
public static final String REFERENCES = Namespaces.SHAPETREE + "references";
public static final String VIA_SHAPE_PATH = Namespaces.SHAPETREE + "viaShapePath";
public static final String VIA_PREDICATE = Namespaces.SHAPETREE + "viaPredicate";
public static final String CONTAINS = Namespaces.SHAPETREE + "contains";
public static final String HAS_ASSIGNMENT = Namespaces.SHAPETREE + "hasAssignment";
public static final String SHAPE = Namespaces.SHAPETREE + "shape";
public static final String FOCUS_NODE = Namespaces.SHAPETREE + "focusNode";
public static final String CONTAINER = Namespaces.SHAPETREE + "Container";
public static final String RESOURCE = Namespaces.SHAPETREE + "Resource";
public static final String NON_RDF_RESOURCE = Namespaces.SHAPETREE + "NonRDFResource";
}
|
martinChenZ/spring-boot-demo
|
leetcode/src/main/java/com/easy/leetcode/Sub21.java
|
package com.easy.leetcode;
/*
21. 合并两个有序链表
将两个有序链表合并为一个新的有序链表并返回。新链表是通过拼接给定的两个链表的所有节点组成的。
示例:
输入:1->2->4, 1->3->4
输出:1->1->2->3->4->4
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/merge-two-sorted-lists
*/
public class Sub21 {
public static void main(String[] args) {
Solution_21 solution = new Solution_21();
ListNode list1 = new ListNode(1);
list1.next = new ListNode(2);
list1.next.next = new ListNode(4);
ListNode list2 = new ListNode(1);
list2.next = new ListNode(3);
list2.next.next = new ListNode(4);
ListNode root = solution.mergeTwoLists(list1, list2);
System.out.println("合并两个有序链表结果为:");
do {
System.out.print(root.val + "->");
root = root.next;
} while (root != null);
}
}
class Solution_21 {
public ListNode mergeTwoLists(ListNode l1, ListNode l2) {
ListNode root = new ListNode(0);
ListNode t = root;
while (l1 != null || l2 != null) {
if (l2 == null) {
t = t.next = new ListNode(l1.val);
l1 = l1.next;
} else if (l1 == null) {
t = t.next = new ListNode(l2.val);
l2 = l2.next;
} else if (l1.val <= l2.val) {
t = t.next = new ListNode(l1.val);
l1 = l1.next;
} else {
t = t.next = new ListNode(l2.val);
l2 = l2.next;
}
}
return root.next;
}
}
|
Garinmckayl/researchhub-web
|
pages/post/create/index.js
|
import AskQuestionForm from "~/components/Paper/AskQuestionForm";
import Head from "~/components/Head";
import React, { Fragment, useEffect } from "react";
import { useRouter } from "next/router";
import { css, StyleSheet } from "aphrodite";
export default function Index() {
const router = useRouter();
return (
<Fragment>
<Head title={`Upload Paper`} description="Upload paper to ResearchHub" />
<div className={css(styles.background)}>
<div className={css(styles.content)}>
<div className={css(styles.title)}>Ask a Question</div>
<AskQuestionForm />
</div>
</div>
</Fragment>
);
}
const styles = StyleSheet.create({
background: {
backgroundColor: "#FCFCFC",
display: "flex",
flexDirection: "column",
justifyContent: "flex-start",
alignItems: "center",
scrollBehavior: "smooth",
position: "relative",
minHeight: "100vh",
paddingTop: "45px",
},
title: {
display: "flex",
justifyContent: "flex-start",
fontWeight: 500,
fontSize: "30px",
lineHeight: "38px",
width: "100%",
marginBottom: "30px",
},
content: {
display: "flex",
flexDirection: "column",
},
});
|
konsorten/ktn-build-info
|
ver/git_test.go
|
<filename>ver/git_test.go
package ver
import (
"os"
"testing"
log "github.com/sirupsen/logrus"
)
func TestRunGitHere(t *testing.T) {
log.SetLevel(log.DebugLevel)
// try read revision
vi, err := TryReadFromGit()
if err != nil {
t.Fatal(err)
}
t.Logf("Revision: %v", vi.Revision)
}
func TestRunGitNoRepository(t *testing.T) {
log.SetLevel(log.DebugLevel)
// change to test data dir
currDir, _ := os.Getwd()
os.Chdir(os.TempDir())
defer os.Chdir(currDir)
// try read revision
vi, err := TryReadFromGit()
if err != nil {
t.Fatal(err)
}
if vi != nil {
t.Fatal("No version information was expected")
}
}
|
mcx/ml_logger
|
ml-dash-server/ml_dash/schema/files/videos.py
|
from os.path import split
from graphene import ObjectType, relay, String
from ml_dash import schema
class File(ObjectType):
class Meta:
interfaces = relay.Node,
name = String(description='name of the directory')
# description = String(description='string serialized data')
# experiments = List(lambda: schema.Experiments)
@classmethod
def get_node(cls, info, id):
return get_file(id)
class FileConnection(relay.Connection):
class Meta:
node = File
def get_file(id):
# path = os.path.join(Args.logdir, id[1:])
return File(id=id, name=split(id[1:])[1])
|
urlofmar/cradle
|
tests/fs/app_dirs.cpp
|
<filename>tests/fs/app_dirs.cpp
#include <cradle/fs/app_dirs.h>
#include <filesystem>
#include <cradle/utilities/testing.h>
#include <cradle/fs/file_io.h>
#include <cradle/fs/utilities.h>
#include <cradle/utilities/environment.h>
using namespace cradle;
#ifdef _WIN32
TEST_CASE("Windows app directories", "[fs][app_dirs]")
{
// These are hard to test for several reasons: the results are dependent
// on Windows version and user name, much of what's being tested is
// related to inter-user permissions, their effects are impossible to
// encapsulate within the test directory, etc.
// So for now, I'm leaving these untested at the unit test level and
// hoping that higher-level testing will be sufficient.
}
#else
TEST_CASE("XDG app directories", "[fs][app_dirs]")
{
auto author = some(string("not_used_here"));
auto app = string("cradle_xdg_test_case_app");
// Keep everything we're doing local to the test directory.
auto cwd = std::filesystem::current_path();
auto home_dir = cwd / "xdg_home";
reset_directory(home_dir);
// If none of the relevant environment variables are set, it should be an
// error to get the app directories.
set_environment_variable("HOME", "");
set_environment_variable("XDG_CONFIG_HOME", "");
set_environment_variable("XDG_CONFIG_DIRS", "");
set_environment_variable("XDG_CACHE_HOME", "");
REQUIRE_THROWS(get_user_config_dir(author, app));
REQUIRE_THROWS(get_user_cache_dir(author, app));
REQUIRE_THROWS(get_user_logs_dir(author, app));
// If only HOME is set, the results should be based on that.
set_environment_variable("HOME", home_dir.string());
// config
auto default_config_dir = home_dir / ".config" / app;
REQUIRE(get_user_config_dir(author, app) == default_config_dir);
REQUIRE(exists(default_config_dir));
reset_directory(home_dir);
// cache
auto default_cache_dir = home_dir / ".cache" / app;
REQUIRE(get_user_cache_dir(author, app) == default_cache_dir);
REQUIRE(exists(default_cache_dir));
reset_directory(home_dir);
// logs
auto default_logs_dir = home_dir / ".local" / "share" / app / "logs";
REQUIRE(get_user_logs_dir(author, app) == default_logs_dir);
REQUIRE(exists(default_logs_dir));
reset_directory(home_dir);
// If we try getting the config search path now, it should be empty
// because the app directory doesn't exist.
REQUIRE(get_config_search_path(author, app) == std::vector<file_path>());
// If we create the user config directory, it should then be part of
// the path.
get_user_config_dir(author, app);
REQUIRE(
get_config_search_path(author, app)
== std::vector<file_path>({get_user_config_dir(author, app)}));
reset_directory(home_dir);
// Check that relative paths aren't used.
// config
set_environment_variable("XDG_CONFIG_HOME", "abc/def");
REQUIRE(get_user_config_dir(author, app) == default_config_dir);
// cache
set_environment_variable("XDG_CACHE_HOME", "abc/def");
REQUIRE(get_user_cache_dir(author, app) == default_cache_dir);
// data/logs
set_environment_variable("XDG_DATA_HOME", "abc/def");
REQUIRE(get_user_logs_dir(author, app) == default_logs_dir);
// Set some custom directories and check that they're used.
// config
auto custom_config_dir = cwd / "xdg_config";
reset_directory(custom_config_dir);
set_environment_variable("XDG_CONFIG_HOME", custom_config_dir.string());
REQUIRE(get_user_config_dir(author, app) == custom_config_dir / app);
// cache
auto custom_cache_dir = cwd / "xdg_cache";
reset_directory(custom_cache_dir);
set_environment_variable("XDG_CACHE_HOME", custom_cache_dir.string());
REQUIRE(get_user_cache_dir(author, app) == custom_cache_dir / app);
// data/logs
auto custom_data_dir = cwd / "xdg_data";
reset_directory(custom_data_dir);
set_environment_variable("XDG_DATA_HOME", custom_data_dir.string());
REQUIRE(get_user_logs_dir(author, app) == custom_data_dir / app / "logs");
// Also add some config dirs (including some relative ones and some
// without app dirs) and check that the search path is adjusted correctly.
auto system_config_dir_a = cwd / "xdg_sys_config_a";
reset_directory(system_config_dir_a);
create_directory(system_config_dir_a / app);
auto system_config_dir_b = cwd / "xdg_sys_config_b";
reset_directory(system_config_dir_b);
auto system_config_dir_c = cwd / "xdg_sys_config_c";
reset_directory(system_config_dir_c);
create_directory(system_config_dir_c / app);
set_environment_variable(
"XDG_CONFIG_DIRS",
system_config_dir_b.string() + ":" + system_config_dir_a.string()
+ ":xdg_sys_config_c");
REQUIRE(
get_config_search_path(author, app)
== std::vector<file_path>(
{custom_config_dir / app, system_config_dir_a / app}));
// This isn't really implemented, but check that it's doing the correct
// fallback.
REQUIRE(
get_shared_cache_dir(author, app) == get_user_cache_dir(author, app));
}
#endif
TEST_CASE("search paths", "[fs][app_dirs]")
{
auto cwd = std::filesystem::current_path();
auto search_dir = cwd / "search_paths";
reset_directory(search_dir);
create_directory(search_dir / "a");
create_directory(search_dir / "b");
create_directory(search_dir / "c");
dump_string_to_file(search_dir / "a" / "foo.txt", "foo");
dump_string_to_file(search_dir / "c" / "foo.txt", "foo");
auto search_path = std::vector<file_path>(
{search_dir,
search_dir / "b",
search_dir / "d",
search_dir / "c",
search_dir / "a"});
REQUIRE(
search_in_path(search_path, "foo.txt")
== search_dir / "c" / "foo.txt");
}
|
jinlongliu/AliOS-Things
|
platform/mcu/csky/csi/csi_driver/sanechips/common/include/zx29_trng.h
|
/**
* File: zx2975100_trng.h
* Brief: Implementation of Sanechips trng
*
* Copyright (C) 2017 Sanechips Technology Co., Ltd.
* Author:
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef ZX297100_TRNG_H
#define ZX297100_TRNG_H
/*******************************************************************************
* Include header files *
*******************************************************************************/
#include "drv_trng.h"
#include "soc.h"
/*******************************************************************************
* Macro definitions *
*******************************************************************************/
#define SEC_MOD_CLKEN0 (ZX29_AP_CRPM_BASE +0x080)
/*******************************************************************************
* Type definitions *
*******************************************************************************/
struct trng_reg_t {
uint32_t in_output[4]; /*0x00~0c*/
uint32_t sta_intack; /*0x10*/
uint32_t conctrl; /*0x14*/
uint32_t config; /*0x18*/
uint32_t alarmcnt; /*0x1c*/
uint32_t fro_enable; /*0x20*/
uint32_t fro_detune; /*0x24*/
uint32_t alarm_mask; /*0x28*/
uint32_t alarm_stop; /*0x2c*/
uint32_t lfsr0; /*0x30*/
uint32_t lfsr1; /*0x34*/
uint32_t lfsr2; /*0x38*/
uint32_t count; /*0x3c*/
uint32_t run_cnt; /*0x40*/
uint32_t run_1; /*0x44*/
uint32_t run_2; /*0x48*/
uint32_t run_3; /*0x4c*/
uint32_t run_4; /*0x50*/
uint32_t run_5; /*0x54*/
uint32_t run_6; /*0x58*/
uint32_t monobit_cnt; /*0x5c*/
uint32_t POKER_3_0; /*0x60*/
uint32_t POKER_7_4; /*0x64*/
uint32_t POKER_B_8; /*0x68*/
uint32_t POKER_F_C; /*0x6c*/
uint32_t test; /*0x70*/
uint32_t block_cnt; /*0x74*/
uint32_t options; /*0x78*/
uint32_t eip_rev; /*0x7c*/
uint32_t endian; /*0x80*/
uint32_t fro_tst; /*0x84*/
uint32_t fsm; /*0x88*/
uint32_t strap; /*0x8c*/
};
enum buf_ram_size_t {
Buffer_NoRam = 0,
Buffer_Resev = 1,
Buffer_512bit = 2,
Buffer_1Kbit = 3,
Buffer_2Kbit = 4,
Buffer_4Kbit = 5,
Buffer_8Kbit = 6,
Buffer_16Kbit = 7,
Buffer_End
};
enum conctrl_bit_t {
Ready_mask = 0,
Shutdown_oflo_mask = 1,
Stuck_out_mask = 2,
Noise_fail_mask = 3,
Run_fail_mask = 4,
Long_run_fail_mask = 5,
Poker_fail_mask = 6,
Monobit_fail_mask = 7,
Test_mode = 8,
Enable_trng = 10,
Post_proc_en = 12,
Re_seed = 15,
conctrlbit_end
};
#endif //#ifndef ZX297100_TRNG_H
|
ai-lab-science/LEGORoboticsPython
|
Demos/TOF_Demo.py
|
#!/usr/bin/env pybricks-micropython
from pybricks import ev3brick as brick
from pybricks.parameters import (Port, Button)
from pybricks.tools import print, wait
from pybricks.ev3devices import ColorSensor
#Fuege den Tools Ordner zum PYTHONPATH hinzu. Nicht notwendig wenn TOF.py im selben Ordner ist
import sys
sys.path.append("/home/robot/LEGORoboticsPython/Tools")
from TOF import TOF
#Port des Seonsors festlegen
tof = TOF(Port.S2)
c = ColorSensor(Port.S3)
while not Button.DOWN in brick.buttons():
result = 'TOF: ' + str(tof.distance())
print(result)
print("Color"+str(c.color()))
tof.close()
|
flyingjoe/aerofiles
|
setup.py
|
import os
from setuptools import setup, find_packages
GITHUB_URL = 'https://github.com/Turbo87/aerofiles/'
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='aerofiles',
version='1.0.0',
description='waypoint, task, tracklog readers and writers for aviation',
long_description=read('README.rst'),
url=GITHUB_URL,
license='MIT',
author='<NAME>',
author_email='<EMAIL>',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
packages=find_packages(exclude=['tests*']),
)
|
EijiGard/marketplace
|
webapp/src/components/TxStatus/TxStatusAsset/utils.js
|
import { txUtils } from 'decentraland-eth'
import { buildCoordinate } from 'shared/parcel'
export function isParcelPendingTransaction(parcel, tx) {
return (
tx.status === txUtils.TRANSACTION_STATUS.pending &&
buildCoordinate(tx.payload.x, tx.payload.y) === parcel.id
)
}
|
esteng/guiding-multi-step
|
learning/datasets/aux_data_providers.py
|
import torch
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import cv2
from scipy.ndimage.filters import gaussian_filter
from data_io.env import load_env_config, load_path, load_env_img
from data_io.env import load_template
from env_config.definitions.landmarks import get_landmark_name_to_index, NUM_LANDMARKS, get_null_landmark_name
from env_config.definitions.nlp_templates import N_LANDMARKS, N_SIDES, get_side_name2idx
from data_io.instructions import split_instruction, clean_instruction, words_to_terms, load_landmark_alignments, get_instruction_segment
import learning.datasets.top_down_dataset as tdd
from learning.datasets.masking import get_obs_mask_every_n_and_segstart, get_obs_mask_segstart
from learning.datasets.dynamic_ground_truth import get_dynamic_ground_truth_v2
from learning.inputs.vision import standardize_image, standardize_2d_prob_dist
from learning.models.semantic_map.pinhole_camera_inv import PinholeCameraProjection
from data_io.units import UnrealUnits
from transformations import cf_to_img, pos_m_to_px
from visualization import Presenter
from learning.inputs.pose import Pose, get_noisy_poses_np, stack_poses_np
import parameters.parameter_server as P
from utils.simple_profiler import SimpleProfiler
"""
This file contains auxiliary data providers.
When rolling out the oracle in the environment, we collect a dataset of trajectories of samples.
Each sample is an image, pose and instruction, along with metadata about which environment and instruction
segment the sample came from.
Given this metadata, the functions in this file are used to collect and return various metadata about
the
"""
PROVIDER_LM_POS_DATA = "lm_pos_lm_indices_fpv"
PROVIDER_GOAL_POS = "goal_loc"
PROVIDER_TRAJECTORY_GROUND_TRUTH_STATIC = "trajectory_map_static"
PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC = "trajectory_map_dynamic"
PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC_NOISY = "trajectory_map_dynamic_noisy"
PROVIDER_LANG_TEMPLATE = "mentioned_tplt"
PROVIDER_TOP_DOWN_IMAGES = "top_down_images"
PROVIDER_ROT_TOP_DOWN = "rot_top_down"
PROVIDER_LANDMARKS_MENTIONED = "lm_mentioned"
PROVIDER_TRAJ_HISTORY = "past_trajectory_map"
PROVIDER_NOISY_POSES = "noisy_poses"
PROVIDER_START_POSES = "start_poses"
PROVIDER_POSE_NOISE = "pose_noise"
LANDMARKS_ON_FLOOR = True
ADD_NULL_LANDMARK = True
def draw_null_landmark_pos(landmark_positions):
"""
Given an array of real landmark positions, finds a place far enough away from all
other landmarks
:param landmark_positions:
:return:
"""
world_size = P.get_current_parameters()["Setup"]["world_size_m"]
dst_t = world_size * 0.2
pos_good = False
while not pos_good:
new_pos = np.random.uniform(0, world_size, 2)
pos_good = min([np.linalg.norm(new_pos - p[:2]) for p in landmark_positions]) > dst_t
return np.asarray([new_pos[0], new_pos[1], 0])
def get_landmark_locations_airsim(config_json):
landmark_names = []
landmark_positions = []
units = UnrealUnits()
for i, landmarkName in enumerate(config_json["landmarkName"]):
x_pos = config_json["xPos"][i]
y_pos = config_json["zPos"][i]
pt = np.asarray([x_pos, y_pos])
pt_as = np.zeros(3)
pt_as[0:2] = units.pos2d_to_as(pt)
# TODO: Grab this from the parameter server
pt_as[2] = 0.0 if LANDMARKS_ON_FLOOR else -1.0 # Landmarks assumed to be floating 1m above ground.
landmark_names.append(landmarkName)
landmark_positions.append(pt_as)
if ADD_NULL_LANDMARK:
null_pos = draw_null_landmark_pos(landmark_positions)
landmark_names.append(get_null_landmark_name())
landmark_positions.append(null_pos)
name2idx = get_landmark_name_to_index(add_empty=ADD_NULL_LANDMARK)
landmark_indices = [name2idx[name] for name in landmark_names]
return landmark_names, landmark_indices, landmark_positions
def get_mentioned_landmarks_nl(str_instruction):
thesaurus = load_landmark_alignments()
if thesaurus is None:
return [], []
split_instr = split_instruction(clean_instruction(str_instruction))
word2term = thesaurus["word2term"]
term_groundings = thesaurus["term_groundings"]
lm_name2index = get_landmark_name_to_index()
# Map each word in the instruction to it's corresponding term:
split_instr_terms = words_to_terms(split_instr, word2term)
mentioned_landmark_names = set()
# For each term, find all the landmarks that have been mentioned
for term in split_instr_terms:
for landmark_name in term_groundings[term]["landmarks"]:
mentioned_landmark_names.add(landmark_name)
mentioned_landmark_names = list(mentioned_landmark_names)
mentioned_landmark_indices = [lm_name2index[name] for name in mentioned_landmark_names]
return mentioned_landmark_names, mentioned_landmark_indices
def any_string_is_substring(stringlist, str):
appears = False
for referent in stringlist:
if str.find(referent) > 0:
appears = True
return appears
def get_mentioned_landmarks_tplt(str_instruction):
mentioned_names = set()
for landmark_name, referents in N_LANDMARKS.items():
if any_string_is_substring(referents, str_instruction):
mentioned_names.add(landmark_name)
mentioned_names = list(mentioned_names)
lm_name2index = get_landmark_name_to_index()
mentioned_indices = [lm_name2index[name] for name in mentioned_names]
return mentioned_names, mentioned_indices
def get_mentioned_landmark_side_tplt(env_id):
template = load_template(env_id)
mentioned_lm = template["landmark1"]
lm_name2index = get_landmark_name_to_index()
mentioned_index = lm_name2index[mentioned_lm]
mentioned_side = template["side"]
side_name2index = get_side_name2idx()
side_idx = side_name2index[mentioned_side]
return mentioned_lm, mentioned_index, mentioned_side, side_idx
def get_mentioned_sides_tplt(str_instruction):
for i, side_name in enumerate(sorted(N_SIDES.keys())):
referents = N_SIDES[side_name]
if any_string_is_substring(referents, str_instruction):
return side_name, i
return 0, 0
def get_top_down_image_env(env_id, map_w, map_h, img_w, img_h):
"""
To be called externally to retrieve a top-down environment image oriented with the start of the requested segment
:param env_id: environment id
:return:
"""
env_image_in = load_env_img(env_id, map_w, map_h)
# If we need to return a bigger image resolution than we loaded
if map_w != img_w or map_h != img_h:
env_image = np.zeros([img_h, img_w, env_image_in.shape[2]])
env_image[0:map_h, 0:map_w, :] = env_image_in
else:
env_image = env_image_in
#path_img = cf_to_img(path, [env_image.shape[0], env_image.shape[1]])
#self.plot_path_on_img(env_image, path_img)
env_image = standardize_image(env_image)
env_img_t = torch.from_numpy(env_image).unsqueeze(0).float()
#presenter = Presenter()
#presenter.show_image(env_img_t[0], "data_img", torch=True, scale=1)
return env_img_t
def get_top_down_ground_truth_static_ego(env_id, start_idx, img_w, img_h, map_w, map_h):
"""
Returns the ground-truth label oriented in the global map frame
:param env_id:
:param start_idx:
:param img_w:
:param img_h:
:param map_w:
:param map_h:
:return:
"""
path = load_path(env_id)
#instruction_segments = [self.all_instr[env_id][set_idx]["instructions"][seg_idx]]
start_pt, dir_yaw = tdd.get_start_pt_and_yaw(path, start_idx, map_w, map_h, 0)
affine = tdd.get_affine_matrix(start_pt, dir_yaw, img_w, img_h)
seg_labels = np.zeros([img_w, img_h, 2]).astype(float)
path_in_img = cf_to_img(path, np.array([map_w, map_h]))
#gauss_sigma = map_w / 96
gauss_sigma = map_w / 32
seg_labels[:, :, 0] = tdd.plot_path_on_img(seg_labels[:, :, 0], path_in_img)
if len(path_in_img) > 1:
seg_labels[:, :, 1] = tdd.plot_dot_on_img(seg_labels[:, :, 1], path_in_img[-1], gauss_sigma)
seg_labels_rot = tdd.apply_affine(seg_labels, affine, img_w, img_h)
seg_labels_rot[:, :, 0] = gaussian_filter(seg_labels_rot[:, :, 0], gauss_sigma)
seg_labels_rot[:, :, 1] = gaussian_filter(seg_labels_rot[:, :, 1], gauss_sigma)
DEBUG = True
if DEBUG:
cv2.imshow("l_traj", seg_labels_rot[:, :, 0])
cv2.imshow("l_endpt", seg_labels_rot[:, :, 1])
cv2.waitKey(0)
# Standardize both channels separately (each has mean zero, unit variance)
seg_labels_path = standardize_2d_prob_dist(seg_labels_rot[:, :, 0:1])
seg_labels_endpt = standardize_2d_prob_dist(seg_labels_rot[:, :, 1:2])
seg_labels_rot = np.concatenate((seg_labels_path, seg_labels_endpt), axis=0)
seg_labels_t = torch.from_numpy(seg_labels_rot).unsqueeze(0).float()
return seg_labels_t
def resolve_and_get_ground_truth_static_global(env_id, set_idx, seg_idx, map_size_px, world_size_px):
seg = get_instruction_segment(env_id, set_idx, seg_idx)
start_idx = seg["start_idx"]
end_idx = seg["end_idx"]
return get_top_down_ground_truth_static_global(env_id, start_idx, end_idx,
map_size_px, map_size_px, world_size_px, world_size_px)
def get_top_down_ground_truth_static_global(env_id, start_idx, end_idx, img_w, img_h, map_w, map_h):
"""
Returns the ground-truth label oriented in the global map frame
:param env_id:
:param start_idx:
:param img_w:
:param img_h:
:param map_w:
:param map_h:
:return:
"""
path = load_path(env_id)
path = path[start_idx:end_idx]
#instruction_segments = [self.all_instr[env_id][set_idx]["instructions"][seg_idx]]
seg_labels = np.zeros([img_w, img_h, 2]).astype(float)
path_in_img = cf_to_img(path, np.array([map_w, map_h]))
gauss_sigma = map_w / 96
seg_labels[:, :, 0] = tdd.plot_path_on_img(seg_labels[:, :, 0], path_in_img)
if len(path_in_img) > 1:
seg_labels[:, :, 1] = tdd.plot_dot_on_img(seg_labels[:, :, 1], path_in_img[-1], gauss_sigma)
seg_labels[:, :, 0] = gaussian_filter(seg_labels[:, :, 0], gauss_sigma)
seg_labels[:, :, 1] = gaussian_filter(seg_labels[:, :, 1], gauss_sigma)
# Standardize both channels separately (each has mean zero, unit variance)
seg_labels_path = standardize_2d_prob_dist(seg_labels[:, :, 0:1])
seg_labels_endpt = standardize_2d_prob_dist(seg_labels[:, :, 1:2])
DEBUG = False
if DEBUG:
cv2.imshow("l_traj", seg_labels_path[0, :, :])
cv2.imshow("l_endpt", seg_labels_endpt[0, :, :])
cv2.waitKey(10)
seg_labels = np.concatenate((seg_labels_path, seg_labels_endpt), axis=0)
seg_labels_t = torch.from_numpy(seg_labels).unsqueeze(0).float()
return seg_labels_t
def get_top_down_ground_truth_dynamic_global(env_id, start_idx, end_idx, drone_pos_as, img_w, img_h, map_w, map_h):
"""
Returns the ground-truth label oriented in the global map frame
:param env_id:
:param start_idx:
:param img_w:
:param img_h:
:param map_w:
:param map_h:
:return:
"""
PROFILE = False
prof = SimpleProfiler(False, PROFILE)
path = load_path(env_id, anno=True)
#print(len(path), start_idx, end_idx)
path = path[start_idx:end_idx]
#instruction_segments = [self.all_instr[env_id][set_idx]["instructions"][seg_idx]]
prof.tick("load_path")
units = UnrealUnits(1.0)
drone_pos_cf = units.pos3d_from_as(drone_pos_as)
#print("Dynamic ground truth for ", env_id, start_idx, end_idx)
gt_dynamic = get_dynamic_ground_truth_v2(path, drone_pos_cf[:2])
#Presenter().plot_path(env_id, [path[start_idx:end_idx], gt_dynamic])
prof.tick("gen_gt_path")
seg_labels = np.zeros([img_w, img_h, 2]).astype(float)
path_in_img = cf_to_img(gt_dynamic, np.array([map_w, map_h]))
gauss_sigma = map_w / 96
seg_labels[:, :, 0] = tdd.plot_path_on_img(seg_labels[:, :, 0], path_in_img)
if len(path_in_img) > 1:
seg_labels[:, :, 1] = tdd.plot_dot_on_img(seg_labels[:, :, 1], path_in_img[-1], gauss_sigma)
prof.tick("plot_path")
seg_labels[:, :, 0] = gaussian_filter(seg_labels[:, :, 0], gauss_sigma)
seg_labels[:, :, 1] = gaussian_filter(seg_labels[:, :, 1], gauss_sigma)
# Standardize both channels separately (each has mean zero, unit variance)
seg_labels_path = standardize_2d_prob_dist(seg_labels[:, :, 0:1])
seg_labels_endpt = standardize_2d_prob_dist(seg_labels[:, :, 1:2])
prof.tick("process_img")
DEBUG = False
if DEBUG:
gt_path_in_img = cf_to_img(path, np.asarray([map_w, map_h]))
dbg_labels_gt = np.zeros([img_w, img_h, 1])
dbg_labels_gt[:, :, 0] = tdd.plot_path_on_img(dbg_labels_gt[:, :, 0], gt_path_in_img)
Presenter().show_image(dbg_labels_gt, "dbg", torch=False, waitkey=10, scale=4)
Presenter().show_image(torch.from_numpy(seg_labels_path), "l_path", torch=True, waitkey=10, scale=4)
Presenter().show_image(torch.from_numpy(seg_labels_endpt), "l_endp", torch=True, waitkey=100, scale=4)
seg_labels = np.concatenate((seg_labels_path, seg_labels_endpt), axis=0)
seg_labels_t = torch.from_numpy(seg_labels).unsqueeze(0).float()
prof.tick("prep_out")
prof.print_stats()
return seg_labels_t
def __get_goal_location_airsim(goal):
units = UnrealUnits()
goal_x = goal[0]
goal_y = goal[1]
pt = np.asarray([goal_x, goal_y])
pt_as = np.zeros(2)
pt_as[0:2] = units.pos2d_to_as(pt)
return pt_as
def provider_lm_pos_lm_indices_fpv(segment_data, data):
"""
Data provider that gives the positions and indices of all landmarks visible in the FPV image.
:param segment_data: segment dataset for which to provide data
:return: ("lm_pos", lm_pos) - lm_pos is a list (over timesteps) of lists (over landmarks visible in image) of the
landmark locations in image pixel coordinates
("lm_indices", lm_indices) - lm_indices is a list (over timesteps) of lists (over landmarks visible in image)
of the landmark indices for every landmark included in lm_pos. These are the landmark classifier labels
"""
env_id = segment_data[0]["metadata"]["env_id"]
domain = segment_data[0]["metadata"]["domain"]
#if INSTRUCTIONS_FROM_FILE:
# env_instr = load_instructions(env_id)
conf_json = load_env_config(env_id)
all_landmark_indices = get_landmark_name_to_index()
landmark_names, landmark_indices, landmark_pos = get_landmark_locations_airsim(conf_json)
params = P.get_current_parameters().get("Model") or P.get_current_parameters().get("ModelPVN").get("Stage1")
projector = PinholeCameraProjection(
map_size_px=params["global_map_size"],
world_size_px=params["world_size_px"],
world_size_m=params["world_size_m"],
img_x=params["img_w"],
img_y=params["img_h"],
cam_fov=params["cam_h_fov"],
domain=domain,
use_depth=False
)
traj_len = len(segment_data)
lm_pos_fpv = []
lm_indices = []
lm_mentioned = []
lm_pos_map = []
for timestep in range(traj_len):
t_lm_pos_fpv = []
t_lm_indices = []
t_lm_mentioned = []
t_lm_pos_map = []
if segment_data[timestep]["state"] is not None:
cam_pos = segment_data[timestep]["state"].get_cam_pos_3d()
cam_rot = segment_data[timestep]["state"].get_cam_rot()
instruction_str = segment_data[timestep]["instruction"]
mentioned_landmark_names, mentioned_landmark_indices = get_mentioned_landmarks_nl(instruction_str)
for i, landmark_in_world in enumerate(landmark_pos):
landmark_idx = landmark_indices[i]
landmark_in_img, landmark_in_cam, status = projector.world_point_to_image(cam_pos, cam_rot, landmark_in_world)
this_lm_mentioned = 1 if landmark_idx in mentioned_landmark_indices else 0
# This is None if the landmark is behind the camera.
if landmark_in_img is not None:
# presenter.save_image(images[timestep], name="tmp.png", torch=True, draw_point=landmark_in_img)
t_lm_pos_fpv.append(landmark_in_img[0:2])
t_lm_pos_map.append(landmark_in_world[0:2])
t_lm_indices.append(landmark_idx)
t_lm_mentioned.append(this_lm_mentioned)
if len(t_lm_pos_fpv) > 0:
t_lm_pos_fpv = torch.from_numpy(np.asarray(t_lm_pos_fpv)).float()
t_lm_pos_map = torch.from_numpy(np.asarray(t_lm_pos_map)).float()
t_lm_indices = torch.from_numpy(np.asarray(t_lm_indices)).long()
t_lm_mentioned = torch.from_numpy(np.asarray(t_lm_mentioned)).long()
else:
t_lm_pos_fpv = None
t_lm_pos_map = None
t_lm_indices = None
t_lm_mentioned = None
lm_pos_fpv.append(t_lm_pos_fpv)
lm_pos_map.append(t_lm_pos_map)
lm_indices.append(t_lm_indices)
lm_mentioned.append(t_lm_mentioned)
return [("lm_pos_fpv", lm_pos_fpv), ("lm_indices", lm_indices), ("lm_mentioned", lm_mentioned), ("lm_pos_map", lm_pos_map)]
def provider_goal_pos_map(segment_data, data):
"""
Data provider that gives the positions and indices of all landmarks visible in the FPV image.
:param segment_data: segment dataset for which to provide data
:return: ("lm_pos", lm_pos) - lm_pos is a list (over timesteps) of lists (over landmarks visible in image) of the
landmark locations in image pixel coordinates
("lm_indices", lm_indices) - lm_indices is a list (over timesteps) of lists (over landmarks visible in image)
of the landmark indices for every landmark included in lm_pos. These are the landmark classifier labels
"""
env_id = segment_data[0]["metadata"]["env_id"]
path = load_path(env_id)
traj_len = len(segment_data)
goal_loc = []
for timestep in range(traj_len):
if segment_data[timestep] is None:
goal_loc.append(np.asarray([0.0, 0.0]))
continue
set_idx = segment_data[timestep]["metadata"]["set_idx"]
seg_idx = segment_data[timestep]["metadata"]["seg_idx"]
seg = get_instruction_segment(env_id, set_idx, seg_idx)
end_idx = seg["end_idx"]
if end_idx < len(path):
end_pt = path[end_idx]
else:
end_pt = path[-1]
goal_as = __get_goal_location_airsim(end_pt)
goal_loc.append(goal_as)
goal_loc = np.asarray(goal_loc)
goal_loc_t = torch.from_numpy(goal_loc).float()
return [("goal_loc", goal_loc_t)]
def provider_mentioned_lang_template(segment_data, data):
traj_len = len(segment_data)
all_mentioned_lm_indices = []
all_mentioned_side_indices = []
lm_name, lm_idx, side_name, side_idx = get_mentioned_landmark_side_tplt(segment_data[0]["metadata"]["env_id"])
for timestep in range(traj_len):
if segment_data[timestep] is not None:
# TODO: for natural language, we'll use the NL functions above, instead of the tlpt ones
all_mentioned_lm_indices.append(lm_idx)
all_mentioned_side_indices.append(side_idx)
else:
all_mentioned_lm_indices.append(0)
all_mentioned_side_indices.append(0)
amlit = torch.from_numpy(np.asarray(all_mentioned_lm_indices))
amsit = torch.from_numpy(np.asarray(all_mentioned_side_indices))
return [("lm_mentioned_tplt", amlit), ("side_mentioned_tplt", amsit)]
def provider_trajectory_ground_truth(segment_data, data, kind="static"):
# For now, use only the first label
traj_len = len(segment_data)
env_id = segment_data[0]["metadata"]["env_id"]
labels = []
# TODO: This could be more general than PVN model, but for now it's really not gonna be
model_params = P.get_current_parameters()["ModelPVN"]["Stage1"]
plan_every_n_steps = model_params["plan_every_n_steps"]
#m_size = model_params["local_map_size"]
m_size = model_params["global_map_size"]
w_size = model_params["world_size_px"]
# True for planning timesteps, False for the other timesteps
obs_mask = get_obs_mask_every_n_and_segstart(plan_every_n_steps, segment_data)
firstseg_mask = get_obs_mask_segstart(segment_data)
for timestep in range(traj_len):
# TODO: Shouldn't do this for every single timestep, otherwise it takes really long!
if segment_data[timestep] is not None and obs_mask[timestep]:
md = segment_data[timestep]["metadata"]
seg = get_instruction_segment(md["env_id"], md["set_idx"], md["seg_idx"])
start_idx = seg["start_idx"]
end_idx = seg["end_idx"]
if kind == "dynamic":
pos = segment_data[timestep]["state"].state[9:12]
labels_t = get_top_down_ground_truth_dynamic_global(env_id, start_idx, end_idx, pos, m_size, m_size, w_size, w_size)
elif kind == "dynamic_noisy":
assert "noisy_poses" in data, "Noisy poses must be computed before computing dynamic ground truth!"
pos = data["noisy_poses"][timestep].position
labels_t = get_top_down_ground_truth_dynamic_global(env_id, start_idx, end_idx, pos, m_size, m_size, w_size, w_size)
elif kind == "static":
labels_t = get_top_down_ground_truth_static_global(env_id, start_idx, end_idx, m_size, m_size, w_size, w_size)
else:
raise Exception("Unknown trajectory ground truth kind")
# append CxHxW
labels.append(labels_t[0])
# TODO: for natural language, we'll use the NL functions above, instead of the tlpt ones
#else:
# labels.append(labels[-1])
# create labels SxCxHxW
labels = torch.stack(labels, dim=0)
return [("traj_ground_truth", labels), ("plan_mask", obs_mask), ("firstseg_mask", firstseg_mask)]
def provider_trajectory_ground_truth_static(segment_data, data):
return provider_trajectory_ground_truth(segment_data, data, "static")
def provider_trajectory_ground_truth_dynamic(segment_data, data):
return provider_trajectory_ground_truth(segment_data, data, "dynamic")
def provider_trajectory_ground_truth_dynamic_noisy(segment_data, data):
return provider_trajectory_ground_truth(segment_data, data, "dynamic_noisy")
def provider_top_down_images(segment_data, data):
traj_len = len(segment_data.metadata)
env_id = segment_data.metadata[0]["env_id"]
top_down_images = []
#env_image is CxHxW
env_image = get_top_down_image_env(env_id, 256, 256, 512, 512)[0]
prev_seg = {"env_id": -1, "set_idx": -1, "seg_idx": -1}
for timestep in range(1):
top_down_images.append(env_image)
# SxCxHxW
top_down_images_t = torch.stack(top_down_images, dim=0)
return [("top_down_images", top_down_images_t)]
def provider_rot_top_down_images(segment_data, data):
env_id = segment_data.metadata[0]["env_id"]
path = load_path(env_id)
env_image = load_env_img(env_id, 256, 256)
top_down_images = []
top_down_labels = []
for md in segment_data.metadata:
if md is None:
break
set_idx = md["set_idx"]
seg_idx = md["seg_idx"]
instr_seg = get_instruction_segment(env_id, set_idx, seg_idx)
start_idx = instr_seg["start_idx"]
end_idx = instr_seg["end_idx"]
start_pt, dir_yaw = tdd.get_start_pt_and_yaw(path, start_idx, 256, 256, 0)
affine = tdd.get_affine_matrix(start_pt, dir_yaw, 512, 512)
seg_img_t = tdd.gen_top_down_image(env_image, affine, 512, 512, 256, 256)
seg_labels_t = tdd.gen_top_down_labels(path[start_idx:end_idx], affine, 512, 512, 256, 256, True, True)
seg_labels_t = F.max_pool2d(Variable(seg_labels_t), 8).data
top_down_images.append(seg_img_t)
top_down_labels.append(seg_labels_t)
tdimg_t = torch.cat(top_down_images, dim=0)
tdlab_t = torch.cat(top_down_labels, dim=0)
return[("top_down_images", tdimg_t), ("traj_ground_truth", tdlab_t)]
def provider_landmarks_mentioned(segment_data, data):
traj_len = len(segment_data)
mentioned_lm_indices = []
mentioned_lm_names = []
mentioned_lm_stack = []
for timestep in range(traj_len):
if segment_data[timestep] is not None:
mentioned_lm_t = torch.zeros([NUM_LANDMARKS]).long()
instruction_str = segment_data[timestep]["instruction"]
mentioned_landmark_names, mentioned_landmark_indices = get_mentioned_landmarks_nl(instruction_str)
mentioned_lm_indices.append(mentioned_landmark_indices)
mentioned_lm_names.append(mentioned_lm_names)
# TODO: Why is this a double-list?
for index in mentioned_lm_indices[0]:
mentioned_lm_t[index] = 1
mentioned_lm_stack.append(mentioned_lm_t)
mentioned_lms_t = torch.stack(mentioned_lm_stack, dim=0)
return [("lang_lm_mentioned_indices", mentioned_lm_indices),
("lang_lm_mentioned_names", mentioned_lm_names),
("lang_lm_mentioned", mentioned_lms_t)]
def provider_past_trajectory(segment_data, data):
traj_len = len(segment_data)
canvas = np.zeros((64, 64))
canvases_t = []
last_pos = None
for timestep in range(traj_len):
if segment_data[timestep]["state"] is None:
break
pos_as = segment_data.state[timestep].state[9:12]
pos_map = pos_m_to_px(pos_as[np.newaxis, :], img_size_px=32)[0]
if last_pos != None:
coords = [last_pos, pos_map]
last_pos = pos_map
tdd.plot_path_on_img(canvas, coords)
cv2.imshow("past_traje", canvas)
canvas_t = torch.from_numpy(canvas.copy())
canvases_t.append(canvas_t)
canvases_t = torch.stack(canvases_t, dim=0)
return [("past_trajectory_map", canvases_t)]
def provider_noisy_poses(segment_data, data):
"""
This provider returns noisy poses of type learning.inputs.Pose
These noisy poses are used during training to rotate the semantic map by a random angle before predicting visitation
probabilities as a form of data augmentation.
:param segment_data:
:param data:
:return:
"""
traj_len = len(segment_data)
last_pos = None
clean_poses = []
model_params = P.get_current_parameters()["ModelPVN"]["Stage1"]
use_first_pose = model_params["predict_in_start_frame"]
seg_idx = -1
first_step = 0
for timestep in range(traj_len):
if segment_data[timestep]["state"] is None:
break
if segment_data[timestep]["metadata"]["seg_idx"] != seg_idx:
first_step = timestep
seg_idx = segment_data[timestep]["metadata"]["seg_idx"]
if use_first_pose:
# X["state"] is a DroneState object
pos_as = segment_data[first_step]["state"].state[9:12]
rot_as = segment_data[first_step]["state"].state[12:16]
else:
pos_as = segment_data[timestep]["state"].state[9:12]
rot_as = segment_data[timestep]["state"].state[12:16]
clean_pose = Pose(pos_as, rot_as)
clean_poses.append(clean_pose)
params = P.get_current_parameters()["Data"]
noisy_poses = get_noisy_poses_np(clean_poses, params["noisy_pos_variance"], params["noisy_rot_variance"])
noisy_poses_t = noisy_poses.to_torch()
return [("noisy_poses", noisy_poses_t)]
def provider_start_poses(segment_data, data):
traj_len = len(segment_data)
start_poses = []
seg_idx = -2
for timestep in range(traj_len):
if segment_data[timestep] is None:
break
if segment_data[timestep]["metadata"]["seg_idx"] != seg_idx:
seg_idx = segment_data[timestep]["metadata"]["seg_idx"]
pos_as = segment_data[timestep]["state"].state[9:12]
rot_as = segment_data[timestep]["state"].state[12:16]
start_pose = Pose(pos_as, rot_as)
start_poses.append(start_pose)
start_poses = stack_poses_np(start_poses)
sart_poses_t = start_poses.to_torch()
return [("start_poses", sart_poses_t)]
def resolve_data_provider(aux_provider_name):
"""
Given a name of one of the auxiliary data providers, returns a function that takes a data segment and returns the
multiple auxiliary data sources
:param aux_provider_name: one of lm_pos_lm_indices_fpv, lm_pos_lm_indices_map, goal_pos_map, trajectory_map
:return:
"""
if aux_provider_name == PROVIDER_LM_POS_DATA:
return provider_lm_pos_lm_indices_fpv
elif aux_provider_name == PROVIDER_TRAJECTORY_GROUND_TRUTH_STATIC:
return provider_trajectory_ground_truth_static
elif aux_provider_name == PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC:
return provider_trajectory_ground_truth_dynamic
elif aux_provider_name == PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC_NOISY:
return provider_trajectory_ground_truth_dynamic_noisy
elif aux_provider_name == PROVIDER_GOAL_POS:
return provider_goal_pos_map
elif aux_provider_name == PROVIDER_LANG_TEMPLATE:
return provider_mentioned_lang_template
elif aux_provider_name == PROVIDER_TOP_DOWN_IMAGES:
return provider_top_down_images
elif aux_provider_name == PROVIDER_ROT_TOP_DOWN:
return provider_rot_top_down_images
elif aux_provider_name == PROVIDER_LANDMARKS_MENTIONED:
return provider_landmarks_mentioned
elif aux_provider_name == PROVIDER_TRAJ_HISTORY:
return provider_past_trajectory
elif aux_provider_name == PROVIDER_NOISY_POSES:
return provider_noisy_poses
elif aux_provider_name == PROVIDER_START_POSES:
return provider_start_poses
def get_aux_label_names(aux_provider_names):
"""
:param aux_provider_names:
:return:
"""
label_names = []
for provider in aux_provider_names:
if provider == PROVIDER_LM_POS_DATA:
label_names += ["lm_pos_fpv", "lm_pos_map", "lm_indices", "lm_mentioned"]
elif provider == PROVIDER_GOAL_POS:
label_names += ["goal_loc"]
elif provider == PROVIDER_TRAJECTORY_GROUND_TRUTH_STATIC:
label_names += ["traj_ground_truth", "plan_mask", "firstseg_mask"]
elif provider == PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC:
label_names += ["traj_ground_truth", "plan_mask", "firstseg_mask"]
elif provider == PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC_NOISY:
label_names += ["traj_ground_truth", "plan_mask", "firstseg_mask"]
elif provider == PROVIDER_LANG_TEMPLATE:
label_names += ["lm_mentioned_tplt", "side_mentioned_tplt"]
elif provider == PROVIDER_TOP_DOWN_IMAGES:
label_names += ["top_down_images"]
elif provider == PROVIDER_ROT_TOP_DOWN:
label_names += ["top_down_images", "traj_ground_truth"]
elif provider == PROVIDER_LANDMARKS_MENTIONED:
label_names += ["lang_lm_mentioned", "lang_lm_mentioned_indices", "lang_lm_mentioned_names"]
elif provider == PROVIDER_TRAJ_HISTORY:
label_names += ["past_trajectory_map"]
elif provider == PROVIDER_NOISY_POSES:
label_names += ["noisy_poses"]
elif provider == PROVIDER_START_POSES:
label_names += ["start_poses"]
return label_names
def get_stackable_label_names(aux_provider_names):
"""
Returns a list of label names that can be stacked as tensors within the collate function.
Some labels are variable length, some are lists and can't be trivially stacked.
This should basically include all data that's in form of uniform-length tensors
:param aux_provider_names:
:return:
"""
label_names = []
for provider in aux_provider_names:
if provider == PROVIDER_LANG_TEMPLATE:
label_names += ["lm_mentioned_tplt", "side_mentioned_tplt"]
elif provider == PROVIDER_TOP_DOWN_IMAGES:
label_names += ["top_down_images"]
elif provider == PROVIDER_TRAJECTORY_GROUND_TRUTH_STATIC:
label_names += ["traj_ground_truth"]
elif provider == PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC:
label_names += ["traj_ground_truth"]
elif provider == PROVIDER_TRAJECTORY_GROUND_TRUTH_DYNAMIC_NOISY:
label_names += ["traj_ground_truth"]
elif provider == PROVIDER_ROT_TOP_DOWN:
label_names += ["top_down_images", "traj_ground_truth"]
elif provider == PROVIDER_LANDMARKS_MENTIONED:
label_names += ["lang_lm_mentioned"]
elif provider == PROVIDER_TRAJ_HISTORY:
label_names += ["past_trajectory_map"]
elif provider == PROVIDER_NOISY_POSES:
pass
elif provider == PROVIDER_START_POSES:
pass
return label_names
|
gandis0713/vtk-js
|
Sources/Rendering/OpenGL/SphereMapper/index.js
|
import { mat4 } from 'gl-matrix';
import { ObjectType } from 'vtk.js/Sources/Rendering/OpenGL/BufferObject/Constants';
import macro from 'vtk.js/Sources/macro';
import vtkBufferObject from 'vtk.js/Sources/Rendering/OpenGL/BufferObject';
import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';
import vtkShaderProgram from 'vtk.js/Sources/Rendering/OpenGL/ShaderProgram';
import vtkOpenGLPolyDataMapper from 'vtk.js/Sources/Rendering/OpenGL/PolyDataMapper';
import vtkSphereMapperVS from 'vtk.js/Sources/Rendering/OpenGL/glsl/vtkSphereMapperVS.glsl';
import vtkPolyDataFS from 'vtk.js/Sources/Rendering/OpenGL/glsl/vtkPolyDataFS.glsl';
const { vtkErrorMacro } = macro;
// ----------------------------------------------------------------------------
// vtkOpenGLSphereMapper methods
// ----------------------------------------------------------------------------
function vtkOpenGLSphereMapper(publicAPI, model) {
// Set our className
model.classHierarchy.push('vtkOpenGLSphereMapper');
// Capture 'parentClass' api for internal use
const superClass = { ...publicAPI };
publicAPI.getShaderTemplate = (shaders, ren, actor) => {
shaders.Vertex = vtkSphereMapperVS;
shaders.Fragment = vtkPolyDataFS;
shaders.Geometry = '';
};
publicAPI.replaceShaderValues = (shaders, ren, actor) => {
let VSSource = shaders.Vertex;
let FSSource = shaders.Fragment;
VSSource = vtkShaderProgram.substitute(VSSource, '//VTK::Camera::Dec', [
'uniform mat4 VCPCMatrix;\n',
'uniform mat4 MCVCMatrix;',
]).result;
FSSource = vtkShaderProgram.substitute(FSSource, '//VTK::PositionVC::Dec', [
'varying vec4 vertexVCVSOutput;',
]).result;
// we create vertexVC below, so turn off the default
// implementation
FSSource = vtkShaderProgram.substitute(
FSSource,
'//VTK::PositionVC::Impl',
['vec4 vertexVC = vertexVCVSOutput;\n']
).result;
// for lights kit and positional the VCPC matrix is already defined
// so don't redefine it
const replacement = [
'uniform float invertedDepth;\n',
'uniform int cameraParallel;\n',
'varying float radiusVCVSOutput;\n',
'varying vec3 centerVCVSOutput;\n',
'uniform mat4 VCPCMatrix;\n',
];
FSSource = vtkShaderProgram.substitute(
FSSource,
'//VTK::Normal::Dec',
replacement
).result;
let fragString = '';
if (model.context.getExtension('EXT_frag_depth')) {
fragString = 'gl_FragDepthEXT = (pos.z / pos.w + 1.0) / 2.0;\n';
}
if (model.openGLRenderWindow.getWebgl2()) {
fragString = 'gl_FragDepth = (pos.z / pos.w + 1.0) / 2.0;\n';
}
FSSource = vtkShaderProgram.substitute(FSSource, '//VTK::Depth::Impl', [
// compute the eye position and unit direction
' vec3 EyePos;\n',
' vec3 EyeDir;\n',
' if (cameraParallel != 0) {\n',
' EyePos = vec3(vertexVC.x, vertexVC.y, vertexVC.z + 3.0*radiusVCVSOutput);\n',
' EyeDir = vec3(0.0,0.0,-1.0); }\n',
' else {\n',
' EyeDir = vertexVC.xyz;\n',
' EyePos = vec3(0.0,0.0,0.0);\n',
' float lengthED = length(EyeDir);\n',
' EyeDir = normalize(EyeDir);\n',
// we adjust the EyePos to be closer if it is too far away
// to prevent floating point precision noise
' if (lengthED > radiusVCVSOutput*3.0) {\n',
' EyePos = vertexVC.xyz - EyeDir*3.0*radiusVCVSOutput; }\n',
' }\n',
// translate to Sphere center
' EyePos = EyePos - centerVCVSOutput;\n',
// scale to radius 1.0
' EyePos = EyePos/radiusVCVSOutput;\n',
// find the intersection
' float b = 2.0*dot(EyePos,EyeDir);\n',
' float c = dot(EyePos,EyePos) - 1.0;\n',
' float d = b*b - 4.0*c;\n',
' vec3 normalVCVSOutput = vec3(0.0,0.0,1.0);\n',
' if (d < 0.0) { discard; }\n',
' else {\n',
' float t = (-b - invertedDepth*sqrt(d))*0.5;\n',
// compute the normal, for unit sphere this is just
// the intersection point
' normalVCVSOutput = invertedDepth*normalize(EyePos + t*EyeDir);\n',
// compute the intersection point in VC
' vertexVC.xyz = normalVCVSOutput*radiusVCVSOutput + centerVCVSOutput;\n',
' }\n',
// compute the pixel's depth
// ' normalVCVSOutput = vec3(0,0,1);\n'
' vec4 pos = VCPCMatrix * vertexVC;\n',
fragString,
]).result;
// Strip out the normal line -- the normal is computed as part of the depth
FSSource = vtkShaderProgram.substitute(FSSource, '//VTK::Normal::Impl', '')
.result;
if (model.haveSeenDepthRequest) {
// special depth impl
FSSource = vtkShaderProgram.substitute(FSSource, '//VTK::ZBuffer::Impl', [
'if (depthRequest == 1) {',
'float computedZ = (pos.z / pos.w + 1.0) / 2.0;',
'float iz = floor(computedZ * 65535.0 + 0.1);',
'float rf = floor(iz/256.0)/255.0;',
'float gf = mod(iz,256.0)/255.0;',
'gl_FragData[0] = vec4(rf, gf, 0.0, 1.0); }',
]).result;
}
shaders.Vertex = VSSource;
shaders.Fragment = FSSource;
superClass.replaceShaderValues(shaders, ren, actor);
};
publicAPI.setMapperShaderParameters = (cellBO, ren, actor) => {
if (
cellBO.getCABO().getElementCount() &&
(model.VBOBuildTime > cellBO.getAttributeUpdateTime().getMTime() ||
cellBO.getShaderSourceTime().getMTime() >
cellBO.getAttributeUpdateTime().getMTime()) &&
cellBO.getProgram().isAttributeUsed('offsetMC')
) {
if (
!cellBO.getVAO().addAttributeArray(
cellBO.getProgram(),
cellBO.getCABO(),
'offsetMC',
12, // 12:this->VBO->ColorOffset+sizeof(float)
cellBO.getCABO().getStride(),
model.context.FLOAT,
2,
false
)
) {
vtkErrorMacro("Error setting 'offsetMC' in shader VAO.");
}
}
if (cellBO.getProgram().isUniformUsed('invertedDepth')) {
cellBO
.getProgram()
.setUniformf('invertedDepth', model.invert ? -1.0 : 1.0);
}
superClass.setMapperShaderParameters(cellBO, ren, actor);
};
publicAPI.setCameraShaderParameters = (cellBO, ren, actor) => {
const program = cellBO.getProgram();
const cam = ren.getActiveCamera();
const keyMats = model.openGLCamera.getKeyMatrices(ren);
if (program.isUniformUsed('VCPCMatrix')) {
program.setUniformMatrix('VCPCMatrix', keyMats.vcpc);
}
if (program.isUniformUsed('MCVCMatrix')) {
if (!actor.getIsIdentity()) {
const actMats = model.openGLActor.getKeyMatrices();
const tmp4 = new Float64Array(16);
mat4.multiply(tmp4, keyMats.wcvc, actMats.mcwc);
program.setUniformMatrix('MCVCMatrix', tmp4);
} else {
program.setUniformMatrix('MCVCMatrix', keyMats.wcvc);
}
}
if (program.isUniformUsed('cameraParallel')) {
cellBO
.getProgram()
.setUniformi('cameraParallel', cam.getParallelProjection());
}
};
publicAPI.getOpenGLMode = (rep, type) => model.context.TRIANGLES;
publicAPI.buildBufferObjects = (ren, actor) => {
const poly = model.currentInput;
if (poly === null) {
return;
}
model.renderable.mapScalars(poly, 1.0);
const c = model.renderable.getColorMapColors();
const vbo = model.primitives[model.primTypes.Tris].getCABO();
const pointData = poly.getPointData();
const points = poly.getPoints();
const numPoints = points.getNumberOfPoints();
const pointArray = points.getData();
const pointSize = 5; // x,y,z,orientation1,orientation2
let scales = null;
if (
model.renderable.getScaleArray() != null &&
pointData.hasArray(model.renderable.getScaleArray())
) {
scales = pointData.getArray(model.renderable.getScaleArray()).getData();
}
let colorData = null;
let colorComponents = 0;
let packedUCVBO = null;
if (c) {
colorComponents = c.getNumberOfComponents();
vbo.setColorOffset(0);
vbo.setColorBOStride(4);
colorData = c.getData();
packedUCVBO = new Uint8Array(3 * numPoints * 4);
if (!vbo.getColorBO()) {
vbo.setColorBO(vtkBufferObject.newInstance());
}
vbo.getColorBO().setOpenGLRenderWindow(model.openGLRenderWindow);
} else if (vbo.getColorBO()) {
vbo.setColorBO(null);
}
vbo.setColorComponents(colorComponents);
const packedVBO = new Float32Array(pointSize * numPoints * 3);
vbo.setStride(pointSize * 4);
const cos30 = Math.cos(vtkMath.radiansFromDegrees(30.0));
let pointIdx = 0;
let colorIdx = 0;
//
// Generate points and point data for sides
//
let vboIdx = 0;
let ucIdx = 0;
for (let i = 0; i < numPoints; ++i) {
let radius = model.renderable.getRadius();
if (scales) {
radius = scales[i];
}
pointIdx = i * 3;
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = -2.0 * radius * cos30;
packedVBO[vboIdx++] = -radius;
if (colorData) {
colorIdx = i * colorComponents;
packedUCVBO[ucIdx++] = colorData[colorIdx];
packedUCVBO[ucIdx++] = colorData[colorIdx + 1];
packedUCVBO[ucIdx++] = colorData[colorIdx + 2];
packedUCVBO[ucIdx++] = colorData[colorIdx + 3];
}
pointIdx = i * 3;
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = 2.0 * radius * cos30;
packedVBO[vboIdx++] = -radius;
if (colorData) {
packedUCVBO[ucIdx++] = colorData[colorIdx];
packedUCVBO[ucIdx++] = colorData[colorIdx + 1];
packedUCVBO[ucIdx++] = colorData[colorIdx + 2];
packedUCVBO[ucIdx++] = colorData[colorIdx + 3];
}
pointIdx = i * 3;
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = pointArray[pointIdx++];
packedVBO[vboIdx++] = 0.0;
packedVBO[vboIdx++] = 2.0 * radius;
if (colorData) {
packedUCVBO[ucIdx++] = colorData[colorIdx];
packedUCVBO[ucIdx++] = colorData[colorIdx + 1];
packedUCVBO[ucIdx++] = colorData[colorIdx + 2];
packedUCVBO[ucIdx++] = colorData[colorIdx + 3];
}
}
vbo.setElementCount(vboIdx / pointSize);
vbo.upload(packedVBO, ObjectType.ARRAY_BUFFER);
if (c) {
vbo.getColorBO().upload(packedUCVBO, ObjectType.ARRAY_BUFFER);
}
model.VBOBuildTime.modified();
};
}
// ----------------------------------------------------------------------------
// Object factory
// ----------------------------------------------------------------------------
const DEFAULT_VALUES = {};
// ----------------------------------------------------------------------------
export function extend(publicAPI, model, initialValues = {}) {
Object.assign(model, DEFAULT_VALUES, initialValues);
// Inheritance
vtkOpenGLPolyDataMapper.extend(publicAPI, model, initialValues);
// Object methods
vtkOpenGLSphereMapper(publicAPI, model);
}
// ----------------------------------------------------------------------------
export const newInstance = macro.newInstance(extend, 'vtkOpenGLSphereMapper');
// ----------------------------------------------------------------------------
export default { newInstance, extend };
|
acgist/demo
|
esc/esc-eureka-gateway/src/main/java/com/acgist/config/FilterConfig.java
|
package com.acgist.config;
import org.springframework.context.annotation.Configuration;
@Configuration
public class FilterConfig {
// 不使用注解时设置过滤器
// @Bean
// public AuthoFilter authoFilter() {
// return new AuthoFilter();
// }
// // 自动扫描filter/pre和filter/post读取过滤器
// @Bean
// @Autowired
// public FilterLoader filterLoader(ZuulFilterConfig config) {
// FilterLoader loader = FilterLoader.getInstance();
// loader.setCompiler(new GroovyCompiler());
// FilterFileManager.setFilenameFilter(new GroovyFileFilter());
// try {
// FilterFileManager.init(config.getInterval(), config.getRoot() + "/pre", config.getRoot() + "/post");
// } catch (Exception e) {
// // TODO 异常处理
// throw new RuntimeException(e);
// }
// return loader;
// }
}
|
SoftwareVerde/dev-tokens
|
src/main/java/com/softwareverde/bitcoindotcom/AddressUtxos.java
|
package com.softwareverde.bitcoindotcom;
import com.softwareverde.bitcoin.address.Address;
import com.softwareverde.bitcoin.address.AddressInflater;
import com.softwareverde.cryptography.hash.sha256.Sha256Hash;
import com.softwareverde.json.Json;
import com.softwareverde.logging.Logger;
import java.util.ArrayList;
import java.util.List;
public class AddressUtxos {
protected List<Utxo> _utxos;
protected Address _address;
protected String _lockingScriptHex;
protected String _lockingScriptAssembly;
public static AddressUtxos fromJson(final Json json) {
final AddressUtxos addressUtxos = new AddressUtxos();
final Json utxosJson = json.getOrNull("utxos", Json.Types.ARRAY);
if (utxosJson == null) {
Logger.warn("Invalid UTXO List JSON: expected utxos to be an array.");
return null;
}
final List<Utxo> utxos = new ArrayList<Utxo>();
for (int i = 0; i < utxosJson.length(); ++i) {
final Json utxoJson = utxosJson.getOrNull(i, Json.Types.OBJECT);
if (utxoJson == null) {
Logger.warn("Invalid UTXO List JSON: null UTXO object in position " + i);
return null;
}
final String transactionHashString = utxoJson.getOrNull("txid", Json.Types.STRING);
final Integer outputIndex = utxoJson.getOrNull("vout", Json.Types.INTEGER);
final Double amount = utxoJson.getOrNull("amount", Json.Types.DOUBLE);
final Long satoshis = utxoJson.getOrNull("satoshis", Json.Types.LONG);
final Long blockHeight = utxoJson.getOrNull("height", Json.Types.LONG);
final Long confirmations = utxoJson.getOrNull("confirmations", Json.Types.LONG);
final Sha256Hash transactionHash = Sha256Hash.fromHexString(transactionHashString);
final Utxo utxo = new Utxo();
utxo.setTransactionHash(transactionHash);
utxo.setOutputIndex(outputIndex);
utxo.setAmount(amount);
utxo.setSatoshis(satoshis);
utxo.setBlockHeight(blockHeight);
utxo.setConfirmations(confirmations);
utxos.add(utxo);
}
final AddressInflater addressInflater = new AddressInflater();
final String addressString = json.getOrNull("legacyAddress", Json.Types.STRING);
addressUtxos._utxos = utxos;
addressUtxos._address = addressInflater.fromBase58Check(addressString);
addressUtxos._lockingScriptHex = json.getOrNull("scriptPubKey", Json.Types.STRING);
addressUtxos._lockingScriptAssembly = json.getOrNull("asm", Json.Types.STRING);
return addressUtxos;
}
public List<Utxo> getUtxos() {
return _utxos;
}
public Address getAddress() {
return _address;
}
public String getLockingScriptHex() {
return _lockingScriptHex;
}
public String getLockingScriptAssembly() {
return _lockingScriptAssembly;
}
}
|
aannex-ccri/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/IteratorTrigger.scala
|
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import com.typesafe.scalalogging.slf4j.Logging
import org.geotools.data.DataUtilities
import org.geotools.factory.Hints
import org.geotools.process.vector.TransformProcess
import org.locationtech.geomesa.accumulo.index.QueryHints._
import org.locationtech.geomesa.accumulo.index._
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
import org.locationtech.geomesa.utils.stats.IndexCoverage
import org.opengis.feature.`type`.AttributeDescriptor
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import org.opengis.filter.expression.{Function, PropertyName}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
sealed trait IteratorChoice
// spatio-temporal index choices
case object IndexOnlyIterator extends IteratorChoice
case object SpatioTemporalIterator extends IteratorChoice
// attribute index choices
case object RecordJoinIterator extends IteratorChoice
case class IteratorConfig(iterator: IteratorChoice, hasTransformOrFilter: Boolean, transformCoversFilter: Boolean)
object IteratorTrigger extends Logging {
/**
* Convenience class for inspecting simple feature types
*
*/
implicit class IndexAttributeNames(sft: SimpleFeatureType) {
def geoName = sft.getGeometryDescriptor.getLocalName
def attributeNameHandler(attributeKey: String, attributeDefault:String): Option[String] = {
// try to get the name from the user data, which may not exist, then check if the attribute exists
val nameFromUserData = Option(sft.getUserData.get(attributeKey)).map { _.toString }.filter { attributePresent }
// check if an attribute with this name(which was sometimes used) exists.
val nameFromOldDefault = Some(attributeKey).filter { attributePresent }
// check if an attribute with the default name exists
val nameFromCurrentDefault = Some(attributeDefault).filter { attributePresent }
nameFromUserData orElse nameFromOldDefault orElse nameFromCurrentDefault
}
def attributePresent(attributeKey: String): Boolean = Option(sft.getDescriptor(attributeKey)).isDefined
def indexAttributeNames = IndexValueEncoder.getIndexValueFields(sft)
}
/**
* Scans the ECQL, query, and sourceSFTspec and determines which Iterators should be configured.
*/
def chooseIterator(filter: Filter, ecql: Option[Filter], hints: Hints, sourceSFT: SimpleFeatureType): IteratorConfig = {
val transformsCoverFilter = doTransformsCoverFilters(hints, filter)
if (useIndexOnlyIterator(ecql, hints, sourceSFT)) {
// if the transforms cover the filtered attributes, we can decode into the transformed feature
// otherwise, we need to decode into the original feature, apply the filter, and then transform
IteratorConfig(IndexOnlyIterator, hasTransformOrFilter = false, transformsCoverFilter)
} else {
IteratorConfig(SpatioTemporalIterator, useSimpleFeatureFilteringIterator(ecql, hints), transformsCoverFilter)
}
}
/**
* Scans the ECQL predicate and the transform definition in order to determine if only index attributes are
* used/requested, and thus the IndexIterator can be used
*
*/
def useIndexOnlyIterator(ecqlPredicate: Option[Filter],
hints: Hints,
sft: SimpleFeatureType,
indexedAttribute: Option[String] = None): Boolean =
if (hints.isDensityQuery) {
// the Density Iterator is run in place of the SFFI. If it is requested we keep the SFFI
// config in the stack, and do NOT run the IndexIterator.
false
} else if (indexedAttribute.exists(a => sft.getDescriptor(a).getIndexCoverage() == IndexCoverage.FULL)) {
// the attribute index is a covering index, so we can use the index iterator regardless
true
} else {
// get transforms if they exist
val transformDefs = hints.getTransformDefinition
// if the transforms exist, check if the transform is simple enough to be handled by the IndexIterator
// if it does not exist, then set this variable to false
val isIndexTransform = transformDefs
.map(tDef => isOneToOneIndexTransformation(tDef, sft, indexedAttribute))
.orElse(Some(false))
// if the ecql predicate exists, check that it is a trivial filter that does nothing
val isPassThroughFilter = ecqlPredicate.map(passThroughFilter)
// require both to be true
(isIndexTransform ++ isPassThroughFilter).forall(_ == true)
}
/**
* Tests whether the attributes being filtered on are a subset of the attribute transforms requested. If so,
* then we can optimize by decoding each feature directly to the transformed spec, vs decoding to the
* original spec and then transforming.
*
* @param hints
* @return
*/
def doTransformsCoverFilters(hints: Hints, filter: Filter): Boolean =
hints.getTransformDefinition.map { transformString =>
val filterAttributes = getFilterAttributes(filter) // attributes we are filtering on
val transforms: Seq[String] = // names of the attributes the transform contains
TransformProcess.toDefinition(transformString).asScala
.flatMap { _.expression match {
case p if p.isInstanceOf[PropertyName] => Seq(p.asInstanceOf[PropertyName].getPropertyName)
case f if f.isInstanceOf[Function] =>
f.asInstanceOf[Function].getParameters.asScala
.collect {
case p if p.isInstanceOf[PropertyName] => p.asInstanceOf[PropertyName].getPropertyName
}
case u =>
logger.warn(s"Unhandled transform: $u")
Seq.empty
}
}
filterAttributes.forall(transforms.contains(_))
}.getOrElse(true)
/**
* Scans the ECQL predicate,the transform definition and Density Key in order to determine if the
* SimpleFeatureFilteringIterator or DensityIterator needs to be run
*/
def useSimpleFeatureFilteringIterator(ecqlPredicate: Option[Filter], hints: Hints): Boolean = {
// get transforms if they exist
val transformDefs = hints.getTransformDefinition
// if the ecql predicate exists, check that it is a trivial filter that does nothing
val nonPassThroughFilter = ecqlPredicate.exists { ecql => !passThroughFilter(ecql)}
// the Density Iterator is run in place of the SFFI. If it is requested we keep the SFFI config in the stack
val useDensity = hints.isDensityQuery
// SFFI is needed if a transform and/or non-trivial filter is defined
transformDefs.isDefined || nonPassThroughFilter || useDensity
}
/**
* Tests if the transformation is a one-to-one transform of index attributes:
* This allows selection and renaming of index attributes only
*/
def isOneToOneIndexTransformation(transformDefs: String,
schema: SimpleFeatureType,
indexedAttribute: Option[String]): Boolean = {
// convert to a TransformProcess Definition
val theDefinitions = TransformProcess.toDefinition(transformDefs).asScala
val attributeNames = IndexValueEncoder.getIndexValueFields(schema) ++ indexedAttribute
// check that, for each definition, the expression is simply the name of an index attribute in the schema
// multi-valued attributes only get partially encoded in the index
theDefinitions.map(_.expression.toString).forall { aDef =>
attributeNames.contains(aDef) && !schema.getDescriptor(aDef).isMultiValued
}
}
/**
* Tests if the filter is a trivial filter that does nothing
*/
def passThroughFilter(filter: Filter): Boolean = getFilterAttributes(filter).isEmpty
/**
* convert the ECQL to a filter, then obtain a set of its attributes
*/
def getFilterAttributes(filter: Filter) = DataUtilities.attributeNames(filter).toSet
/**
* Scans the ECQL, query, and sourceSFTspec and determines which Iterators should be configured.
*/
def chooseAttributeIterator(ecqlPredicate: Option[Filter],
hints: Hints,
sourceSFT: SimpleFeatureType,
indexedAttribute: String): IteratorConfig = {
// if the transforms cover the filtered attributes, we can decode into the transformed feature
// otherwise, we need to decode into the original feature, apply the filter, and then transform
if (useIndexOnlyIterator(ecqlPredicate, hints, sourceSFT, Some(indexedAttribute))) {
IteratorConfig(IndexOnlyIterator, hasTransformOrFilter = false, transformCoversFilter = true)
} else {
val hasEcqlOrTransform = useSimpleFeatureFilteringIterator(ecqlPredicate, hints)
val transformsCoverFilter = if (hasEcqlOrTransform) {
doTransformsCoverFilters(hints, ecqlPredicate.getOrElse(Filter.INCLUDE))
} else {
true
}
IteratorConfig(RecordJoinIterator, hasEcqlOrTransform, transformsCoverFilter)
}
}
/**
* Determines if the given filter and transform can operate on index encoded values.
*/
def canUseIndexValues(sft: SimpleFeatureType,
filter: Option[Filter],
transform: Option[SimpleFeatureType]): Boolean = {
lazy val indexSft = IndexValueEncoder.getIndexSft(sft)
// verify that transform *does* exists and only contains fields in the index sft,
// and that filter *does not* exist or can be fulfilled by the index sft
transform.exists(_.getAttributeDescriptors.map(_.getLocalName).forall(indexSft.indexOf(_) != -1)) &&
filter.forall(supportsFilter(indexSft, _))
}
/**
* Returns true if the filters can be evaluated successfully against the feature type.
*/
def supportsFilter(sft: SimpleFeatureType, filter: Filter): Boolean =
DataUtilities.attributeNames(filter).forall(sft.indexOf(_) != -1)
}
|
pylangstudy/201705
|
29/08/4.py
|
<filename>29/08/4.py
v3 = 333
def Sum(value1, value2=200, value3=v3):
return value1 + value2 + value3
v3 = 999
print(Sum(10))
|
ifge-token/Waves
|
lang/shared/src/main/scala/com/wavesplatform/lang/v1/repl/Repl.scala
|
<gh_stars>1-10
package com.wavesplatform.lang.v1.repl
import cats.{Functor, Monoid}
import cats.implicits._
import com.wavesplatform.lang.v1.compiler.CompilerContext
import com.wavesplatform.lang.v1.evaluator.ctx.EvaluationContext
import com.wavesplatform.lang.v1.repl.node.http.NodeConnectionSettings
import com.wavesplatform.lang.v1.traits.Environment
import monix.execution.atomic.Atomic
import scala.concurrent.ExecutionContext.Implicits.{global => g}
import scala.concurrent.Future
case class Repl(
settings: Option[NodeConnectionSettings] = None,
lastСontext: (CompilerContext, EvaluationContext[Environment, Future]) =
(CompilerContext.empty, Monoid[EvaluationContext[Environment, Future]].empty)
) {
private val environment = buildEnvironment(settings)
private val initialState = state(
(
lastСontext._1 |+| initialCtx.compilerContext,
lastСontext._2 |+| initialCtx.evaluationContext(environment)
),
view
)
private val currentState = Atomic(initialState)
private val engine = new ReplEngine[Future]()
private def state[S, V](s: S, view: S => V): (S, V) = (s, view(s))
private def view(ctx: (CompilerContext, EvaluationContext[Environment, Future])) = StateView(ctx._1)
def clear(): Unit = currentState.set(initialState)
def reconfigure(settings: NodeConnectionSettings): Repl =
Repl(Some(settings), currentState.get()._1)
def info(str: String): String = currentState.get()._2.declMap(str)
def totalInfo: String = currentState.get()._2.totalCtx
def execute(expr: String): Future[Either[String, String]] =
perform(
currentState,
view,
(oldCtx: (CompilerContext, EvaluationContext[Environment, Future])) =>
engine.eval(expr, oldCtx._1, oldCtx._2).map {
case Left(e) => (Left(e), oldCtx)
case Right((r, newCtx)) => (Right(r), newCtx)
}: Future[(Either[String, String], (CompilerContext, EvaluationContext[Environment, Future]))]
)
private def perform[F[_] : Functor, S, R, V](
value: Atomic[(S, V)],
view: S => V,
transition: S => F[(R, S)]
): F[R] = {
val (current, _) = value.get()
transition(current).map { case (result, next) =>
value.set(state(next, view))
result
}
}
}
|
lstyles/nsgflowlogsbeat
|
vendor/github.com/elastic/beats/vendor/golang.org/x/net/proxy/dial.go
|
<reponame>lstyles/nsgflowlogsbeat<gh_stars>1-10
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package proxy
import (
"context"
"net"
)
// A ContextDialer dials using a context.
type ContextDialer interface {
DialContext(ctx context.Context, network, address string) (net.Conn, error)
}
// Dial works like DialContext on net.Dialer but using a dialer returned by FromEnvironment.
//
// The passed ctx is only used for returning the Conn, not the lifetime of the Conn.
//
// Custom dialers (registered via RegisterDialerType) that do not implement ContextDialer
// can leak a goroutine for as long as it takes the underlying Dialer implementation to timeout.
//
// A Conn returned from a successful Dial after the context has been cancelled will be immediately closed.
func Dial(ctx context.Context, network, address string) (net.Conn, error) {
d := FromEnvironment()
if xd, ok := d.(ContextDialer); ok {
return xd.DialContext(ctx, network, address)
}
return dialContext(ctx, d, network, address)
}
// WARNING: this can leak a goroutine for as long as the underlying Dialer implementation takes to timeout
// A Conn returned from a successful Dial after the context has been cancelled will be immediately closed.
func dialContext(ctx context.Context, d Dialer, network, address string) (net.Conn, error) {
var (
conn net.Conn
done = make(chan struct{}, 1)
err error
)
go func() {
conn, err = d.Dial(network, address)
close(done)
if conn != nil && ctx.Err() != nil {
conn.Close()
}
}()
select {
case <-ctx.Done():
err = ctx.Err()
case <-done:
}
return conn, err
}
|
codingJWilliams/Nightborn
|
api/api_helpers/getGuild.js
|
<filename>api/api_helpers/getGuild.js
module.exports = (client) => client.guilds.find("name", "Nightborn Estate");
|
pavanpej/indexing
|
secondary/indexer/plasma_enterprise.go
|
// +build !community
package indexer
// Copyright 2014-Present Couchbase, Inc.
//
// Use of this software is governed by the Business Source License included
// in the file licenses/BSL-Couchbase.txt. As of the Change Date specified
// in that file, in accordance with the Business Source License, use of this
// software will be governed by the Apache License, Version 2.0, included in
// the file licenses/APL2.txt.
import (
"fmt"
"github.com/couchbase/indexing/secondary/common"
"github.com/couchbase/plasma"
)
var errStorageCorrupted = fmt.Errorf("Storage corrupted and unrecoverable")
func NewPlasmaSlice(storage_dir string, log_dir string, path string, sliceId SliceId, idxDefn common.IndexDefn,
idxInstId common.IndexInstId, partitionId common.PartitionId,
isPrimary bool, numPartitions int,
sysconf common.Config, idxStats *IndexStats, indexerStats *IndexerStats, isNew bool) (*plasmaSlice, error) {
return newPlasmaSlice(storage_dir, log_dir, path, sliceId,
idxDefn, idxInstId, partitionId, isPrimary, numPartitions,
sysconf, idxStats, indexerStats, isNew)
}
func DestroyPlasmaSlice(storageDir string, path string) error {
return destroyPlasmaSlice(storageDir, path)
}
func ListPlasmaSlices() ([]string, error) {
return listPlasmaSlices()
}
func BackupCorruptedPlasmaSlice(storageDir string, prefix string, rename func(string) (string, error), clean func(string)) error {
return backupCorruptedPlasmaSlice(storageDir, prefix, rename, clean)
}
func RecoveryDone() {
plasma.RecoveryDone()
}
|
lietschie/crater-mobile
|
src/features/settings/containers/Company/index.js
|
<reponame>lietschie/crater-mobile<filename>src/features/settings/containers/Company/index.js<gh_stars>1-10
import React from 'react';
import { connect } from 'react-redux';
import { Company } from '../../components/Company';
import { reduxForm, getFormValues } from 'redux-form';
import { EDIT_COMPANY } from '../../constants';
import * as CompanyAction from '../../actions';
import { validate } from './validation';
import * as AddressAction from '../../../customers/actions';
const mapStateToProps = (state) => {
const {
settings: {
loading: {
editCompanyInfoLoading,
getCompanyInfoLoading
}
},
global: { language },
customers: {
countries,
loading: {
countriesLoading,
}
},
} = state
return {
formValues: getFormValues(EDIT_COMPANY)(state) || {},
language,
editCompanyLoading: editCompanyInfoLoading,
getCompanyInfoLoading,
countries,
countriesLoading,
};
};
const mapDispatchToProps = {
editCompanyInformation: CompanyAction.editCompanyInformation,
getCompanyInformation: CompanyAction.getCompanyInformation,
getCountries: AddressAction.getCountries,
};
// Redux Forms
const CompanyReduxForm = reduxForm({
form: EDIT_COMPANY,
validate,
})(Company);
// connect
const CompanyContainer = connect(
mapStateToProps,
mapDispatchToProps,
)(CompanyReduxForm);
CompanyContainer.navigationOptions = () => ({
header: null,
});
export default CompanyContainer;
|
jkpubsrc/python-module-jk-console
|
examples/simpletable.py
|
#!/usr/bin/env python3
#
# This example demonstrates the use of SimpleTable with alignments, text transformations and colors.
#
import os
import sys
from jk_console import *
t = SimpleTable()
with t.addRow("Key", "Value") as r:
r.color = Console.ForeGround.STD_LIGHTGREEN
#r.halign = SimpleTable.HALIGN_LEFT
r.case = SimpleTable.CASE_UPPER
r.hlineAfterRow = True
t.addRow("Country", "Germany")
t.addRow("State", "Berlin")
t.addRow("Area", "891.7 km2 ")
t.addRow("State", "Berlin")
t.addRow("Elevation", "34 m")
t.addRow("Population", "3,748,148")
t.addRow("Website", "www.berlin.de")[1].color = Console.ForeGround.STD_LIGHTCYAN
with t.column(0) as c:
c.color = Console.ForeGround.STD_WHITE
c.halign = SimpleTable.HALIGN_RIGHT
c.vlineAfterColumn = True
t.column(0).color = Console.ForeGround.STD_LIGHTBLUE
print()
print(t.raw())
print()
t.print(prefix = "\t")
print()
|
balinterdi/cardstack
|
packages/tools/addon/helpers/cs-card-meta-caption.js
|
import Helper from '@ember/component/helper';
import { inject as service } from '@ember/service';
export default Helper.extend({
data: service('cardstack-data'),
compute([model, caption, isPageModel]) {
if (isPageModel) {
return caption;
}
let humanId = this.get('data').getCardMeta(model, 'human-id');
return `${humanId}: ${caption}`;
}
});
|
SonicFreak94/ds4wizard
|
ds4wizard-device-toggle/main.cpp
|
<reponame>SonicFreak94/ds4wizard
#include <iostream>
#include <string>
#include <devicetoggle.h>
int main(int argc, char** argv)
{
if (argc < 3)
{
return -1;
}
if (strcmp(argv[1], "--toggle-device") != 0)
{
return -2;
}
const std::string instanceIdA = argv[2];
const std::wstring instanceIdW(instanceIdA.begin(), instanceIdA.end());
try
{
toggleDevice(instanceIdW);
}
catch (const std::exception& ex)
{
std::cout << ex.what() << std::endl;
return -3;
}
return 0;
}
|
Quentincestino/brutal
|
sources/libs/bal/ipc/pack.h
|
#pragma once
#include <bal/abi.h>
#include <brutal/ds.h>
typedef struct
{
void *buf;
size_t len;
size_t curr;
BrMemObj obj;
} BalPack;
typedef void BalPackFn(BalPack *self, void const *buf);
void bal_pack_init(BalPack *self);
void bal_pack_deinit(BalPack *self);
void bal_pack_ensure(BalPack *self, size_t cap);
void bal_pack(BalPack *self, void const *buf, size_t len);
void bal_pack_enum(BalPack *self, int const *s);
void bal_pack_size(BalPack *self, size_t const *s);
void bal_pack_s8(BalPack *self, int8_t const *s);
void bal_pack_u8(BalPack *self, uint8_t const *u);
void bal_pack_s16(BalPack *self, int16_t const *s);
void bal_pack_u16(BalPack *self, uint16_t const *u);
void bal_pack_s32(BalPack *self, int32_t const *s);
void bal_pack_u32(BalPack *self, uint32_t const *u);
void bal_pack_s64(BalPack *self, int64_t const *s);
void bal_pack_u64(BalPack *self, uint64_t const *u);
void bal_pack_f32(BalPack *self, float const *f);
void bal_pack_f64(BalPack *self, double const *f);
void bal_pack_str(BalPack *self, Str const *str);
void bal_pack_slice_impl(BalPack *self, SliceImpl const *v, BalPackFn *el);
#define bal_pack_slice(SELF, SLICE, EL) bal_pack_slice_impl(SELF, &slice_impl$(*(SLICE)), (BalPackFn *)EL)
|
ywtnhm/framework1
|
framework-cache/src/main/java/cn/vansky/framework/cache/redis/RedisCallBack.java
|
/*
* Copyright (C) 2015 CK, Inc. All Rights Reserved.
*/
package cn.vansky.framework.cache.redis;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* Author: CK
* Date: 2015/12/1
*/
public interface RedisCallBack<T> {
/**
* do real operation to redis server
*
* @param client
* {@link RedisClient} list
* @param read
* if read action
* @param key
* 关键字
* @param notifier
* {@link RedisClientStatusNotifier} instance
* @return true:成功,false:失败
*/
boolean operation(List<RedisClient> client, boolean read, Object key, RedisClientStatusNotifier notifier);
/**
* 获取操作类型
*
* @return 操作类型
*/
String getOptionType();
/**
* 获取操作结果
*
* @return 操作结果
*/
T getResult();
/**
* 获取操作异常,成功为null
*
* @return 操作异常
*/
Exception getException();
/**
* get if support retry get on return null value
*
* @return true support retry get on return null value
*/
boolean isNullValueReGet();
/**
* set if support retry get on return null value
*
* @param nullValueReGet
* true support retry get on return null value
*/
void setNullValueReGet(boolean nullValueReGet);
}
|
JutelSiulad/Simulink2dL
|
Simulink2dL-Src/org.conqat.lib.simulink/external/commons-src/org/conqat/lib/commons/color/ColorUtils.java
|
<gh_stars>0
/*-------------------------------------------------------------------------+
| |
| Copyright 2005-2011 The ConQAT Project |
| |
| Licensed under the Apache License, Version 2.0 (the "License"); |
| you may not use this file except in compliance with the License. |
| You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
+-------------------------------------------------------------------------*/
package org.conqat.lib.commons.color;
import java.awt.Color;
import java.util.function.Function;
import java.util.regex.Pattern;
import org.conqat.lib.commons.assertion.CCSMAssert;
import org.conqat.lib.commons.enums.EnumUtils;
/**
* Methods for handling colors.
*/
public class ColorUtils {
/** Pattern used for finding */
private static Pattern HTML_COLOR_PATTERN = Pattern.compile("#[0-9a-f]{6}", Pattern.CASE_INSENSITIVE);
/** Converts a color to a HTML color in the format #RRGGBB. */
public static String toHtmlString(Color color) {
return String.format("#%06X", color.getRGB() & 0xffffff);
}
/**
* Returns a color from a string. If the string could not be decoded, null
* is returned. This methods supports the html color format (i.e. #RRGGBB)
* and some predefined names (e.g. red, green, etc.).
*/
public static Color fromString(String s) {
if (HTML_COLOR_PATTERN.matcher(s).matches()) {
return Color.decode("0x" + s.substring(1));
}
if (s.startsWith("ccsm-")) {
ECCSMColor color = EnumUtils.valueOfIgnoreCase(ECCSMColor.class, s.substring(5));
if (color != null) {
return color.getColor();
}
}
EAWTColors color = EnumUtils.valueOfIgnoreCase(EAWTColors.class, s);
if (color != null) {
return color.getColor();
}
return null;
}
/** List of colors defined in AWT used as a lookup table. */
private static enum EAWTColors {
/** Red */
RED(Color.RED),
/** Green */
GREEN(Color.GREEN),
/** Blue */
BLUE(Color.BLUE),
/** Yellow */
YELLOW(Color.YELLOW),
/** Orange */
ORANGE(Color.ORANGE),
/** White */
WHITE(Color.WHITE),
/** Black */
BLACK(Color.BLACK),
/** Gray */
GRAY(Color.GRAY),
/** Cyan */
CYAN(Color.CYAN),
/** Magenta */
MAGENTA(Color.MAGENTA);
/** The color actual color. */
private final Color color;
/** Constructor. */
private EAWTColors(Color color) {
this.color = color;
}
/** Returns the color for a enum constant. */
public Color getColor() {
return color;
}
}
/**
* Blend together two colors, using the specified factor to indicate the
* weight given to the first color.
*/
public static Color blend(double factor, Color color1, Color color2) {
CCSMAssert.isTrue(factor >= 0 && factor <= 1, "Factor must be between 0 and 1 but is: " + factor);
int r = getColorComponent(factor, color1, color2, Color::getRed);
int g = getColorComponent(factor, color1, color2, Color::getGreen);
int b = getColorComponent(factor, color1, color2, Color::getBlue);
return new Color(r, g, b);
}
/** Get the color component. */
protected static int getColorComponent(double factor, Color color1, Color color2,
Function<Color, Integer> function) {
return (int) Math.round(factor * function.apply(color1) + (1.0 - factor) * function.apply(color2));
}
}
|
babyrobot-eu/core-modules
|
sums-of-numbers-game/prepare_sum_objects.py
|
<gh_stars>1-10
import pickle
from random import shuffle
sums = [(5, 105) , (205, 305), (405, 1005), (1105, 1205), (1305, 1405)]
shuffle(sums)
a = {'sums': sums, 'current_sum': 0}
with open('child_data/child1.pkl', 'wb') as f:
pickle.dump(obj=a, file=f)
print(a)
shuffle(sums)
b = {'sums': sums, 'current_sum': 0}
with open('child_data/child2.pkl', 'wb') as f:
pickle.dump(obj=b, file=f)
print(b)
shuffle(sums)
c = {'sums': sums, 'current_sum': 0}
with open('child_data/child3.pkl', 'wb') as f:
pickle.dump(obj=c, file=f)
print(c)
|
giosakti/kafka-cookbook
|
cookbooks/prometheus/attributes/kafka_exporter.rb
|
#
# Cookbook:: prometheus
# Attributes:: kafka_exporter
#
# Copyright:: 2018, BaritoLog.
# Kafka Exporter directory
default["kafka_exporter"]["dir"] = "#{node["prometheus"]["dir"]}/kafka_exporter"
default["kafka_exporter"]["log_dir"] = "#{node["prometheus"]["log_dir"]}"
default["kafka_exporter"]["binary"] = "#{node["kafka_exporter"]["dir"]}/kafka_exporter"
# Kafka Exporter version
default["kafka_exporter"]["version"] = "1.2.0"
default["kafka_exporter"]["checksum"] = "478e50e08a3104caaa2e60c7997936705b77ce8d187b83ab060de1c69d32fe13"
default["kafka_exporter"]["binary_url"] = "https://github.com/danielqsj/kafka_exporter/releases/download/v#{node["kafka_exporter"]["version"]}/kafka_exporter-#{node["kafka_exporter"]["version"]}.linux-amd64.tar.gz"
# Kafka Exporter flags
default["kafka_exporter"]["flags"]["log.level"] = "info"
default["kafka_exporter"]["flags"]["kafka.server"] = "localhost:9092"
|
DennisHeimbigner/ast
|
src/test/java/unidata/ast/test/Enumtests.java
|
<gh_stars>0
package unidata.ast.test;
import unidata.ast.runtime.*;
import static unidata.ast.runtime.ASTRuntime.*;
import java.io.IOException;
public class Enumtests
{
static public class Enumtest extends unidata.ast.runtime.AbstractMessage
{
static public enum Testenum {
ECON1(1),
ECON2(2),
ECON3(3),
ECON4(4),
ECON5(5),
ECON6(6);
private final int value;
public int getValue() {return value;}
Testenum(int value) {this.value = value;};
static Testenum toEnum(int i) {
switch (i) {
case 1: return Testenum.ECON1;
case 2: return Testenum.ECON2;
case 3: return Testenum.ECON3;
case 4: return Testenum.ECON4;
case 5: return Testenum.ECON5;
case 6: return Testenum.ECON6;
default: return null;
}
}
} //enum Testenum
Testenum renum = null;
Testenum oenum = null;
Testenum[] penum = null;
public Enumtest(ASTRuntime rt)
throws IOException
{
super(rt);
}
public Enumtest(ASTRuntime rt,
Testenum renum,
Testenum oenum,
Testenum[] penum
)
{
super(rt);
this.renum = renum;
this.oenum = oenum;
this.penum = penum;
}
public void
write()
throws IOException
{
int size = 0;
size = getSize();
write_size(size);
write_tag(Sort.Ast_enum,1);
write_primitive(Sort.Ast_enum,renum.getValue());
if(oenum != null) {
write_tag(Sort.Ast_enum,2);
write_primitive(Sort.Ast_enum,oenum.getValue());
}
if(penum != null)
for(int i=0;i<penum.length;i++) {
write_tag(Sort.Ast_enum,3);
write_primitive(Sort.Ast_enum,penum[i].getValue());
}
} /*Enumtest_write*/
public Enumtest
read()
throws IOException
{
int[] wiretype = new int[1];
int[] fieldno = new int[1];
int size = 0;
int pos = 0;
{int readsize = read_size();
mark(readsize);}
for(;;) {
if(!read_tag(wiretype,fieldno)) break;
switch (fieldno[0]) {
case 1: { // renum
renum = Testenum.toEnum(read_primitive_int(Sort.Ast_int32));
} break;
case 2: { // oenum
oenum = Testenum.toEnum(read_primitive_int(Sort.Ast_int32));
} break;
case 3: { // penum
penum = (Testenum[])repeat_extend((Object)penum,Testenum.class);
penum[penum.length-1] = Testenum.toEnum(read_primitive_int(Sort.Ast_int32));
} break;
default:
skip_field(wiretype[0],fieldno[0]);
} /*switch*/
}/*for*/
unmark();
return this;
} /*Enumtest_read*/
public int
getSize()
throws IOException
{
int totalsize = 0;
int fieldsize = 0;
totalsize += getTagSize(Sort.Ast_packed,1);
totalsize += getSize(Sort.Ast_enum,renum.getValue());
if(oenum != null) {
totalsize += getTagSize(Sort.Ast_packed,2);
totalsize += getSize(Sort.Ast_enum,oenum.getValue());
}
for(int i=0;i<penum.length;i++) {
totalsize += getTagSize(Sort.Ast_packed,3);
totalsize += getSize(Sort.Ast_enum,penum[i].getValue());
}
return totalsize;
} /*Enumtest.getSize*/
};
}
|
ericvrp/PicturePerfect
|
src/all-albums/ome_willem.js
|
module.exports = {
name: '<NAME>',
pictures: [
{
image: {
link: 'http://67.media.tumblr.com/849bb567261b3aa1c463f8f21a28acd7/tumblr_inline_nia6r1DAe21s2n70p.jpg',
width: 500,
height: 281
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQHTqJar3n-rWyzxmbCgGfY9BuXP7m51ej6u0uPIq1VgBsXN-i1hqJMAO4',
width: 130,
height: 73
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/userfiles/images/OW%20persfoto%20arm.png',
width: 179,
height: 250
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQI_z40-5FjppP9gs0ijcJmM5iSYE3OBlMj1bY0PJuBd_3UQlAHy8ZKALc',
width: 79,
height: 111
}
}
,
{
image: {
link: 'http://www.mediasmarties.nl/media/uploads/producties/d/def/de%20film%20van%20ome%20willem.jpg',
width: 640,
height: 486
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcQWzNKdVVXL817WifjNVW5mSKJ4aqvjZPQ397sXs2FZTvWBKFeC1tl9Jo0',
width: 137,
height: 104
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/Dswrim8LMIM/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcT8qb5zmEGcm0ZrtS5kX4CV6SLYfUFXKz1DR1tRbLG4yLzUwa_r82gZqeKF',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://gerardjp.com/wp-content/uploads/2008/05/omewillem.jpg',
width: 789,
height: 800
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcTBNx4ro27g9vBzEKRzvx20s5-9Sy1eEYhE7wihdOZ3rJSZULf_7kpPIylx',
width: 141,
height: 143
}
}
,
{
image: {
link: 'http://fast.mediamatic.nl/f/tqgr/image/904/234793-500-500.jpg',
width: 500,
height: 500
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcSnwrRVP5P7mNtODAoeJI_JiFd0oUyiqDcZlULepivNwL-EJ4hVJg48QctJiQ',
width: 130,
height: 130
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/iCs87Wru6ek/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQoVtfAJmjyH7CyZdYCXYnxsg-sQ_kg6qPYwalwj9hGeI0Dh1bPfYJpL_U',
width: 129,
height: 97
}
}
,
{
image: {
link: 'https://pbs.twimg.com/profile_images/1363396536/OmeWillem.jpg',
width: 200,
height: 200
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcTkGpZvTVxuw0fKJdX9oeAdb3nRNE_PAi7eo-nbxInzCNLAGN2BiqLScHk',
width: 104,
height: 104
}
}
,
{
image: {
link: 'http://www.123boekingen.nl/img-items/ome-willem.jpg',
width: 720,
height: 405
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcS6c-3HqOgp5jy5d_Crnz4cT69Gy3hMSZEudN91fAULBylCBVbommj0jfWt',
width: 140,
height: 79
}
}
,
{
image: {
link: 'http://vara.nl/data/image/i/1000/mod_media_image/1863.w1244.r1244-700.422df45.png',
width: 1244,
height: 700
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSHZ60CJwPNIhUYWkRM8fS2ztPnzwq-iDbHGglU6V-gLNWhXLFu8gkiHAE',
width: 150,
height: 84
}
}
,
{
image: {
link: 'https://s-media-cache-ak0.pinimg.com/originals/f3/67/2c/f3672c01ef03bba3a4e6f3fae7072ba1.jpg',
width: 768,
height: 432
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSyTiIiyDdpEkCd0Iz3SQe0H1Ucr1V0NMxCCcKRpO3rtAmOfAftJeaUPYMX',
width: 142,
height: 80
}
}
,
{
image: {
link: 'http://www.mery.nl/edwinrutten/Afbeeldingen/Afbeelding3194.jpg',
width: 696,
height: 696
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcRA99NgppfK0TGq6RiBkNqSiijTPe4NLobJCNAIbGpzse1ws8NZsbJnMsDQNQ',
width: 139,
height: 139
}
}
,
{
image: {
link: 'http://www.alternatiefkostuum.nl/blog/images/ome_willem.jpg',
width: 280,
height: 160
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSFec1VNrcUoIzRbnaK1m46r6peHC2gODTQXhhcUw1yddE6d1SaYnsY17o',
width: 114,
height: 65
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/userfiles/images/Ome%20W%20shirt%20achter.png',
width: 350,
height: 350
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcSWsjymqStnsrLcAbPXosHZJOpWpLg31v0QGRvfBz2HQkG4y6d-Ajao4Q',
width: 120,
height: 120
}
}
,
{
image: {
link: 'http://resolver.kb.nl/resolve?urn=urn:gvn:NAGO02:TIN-J1982-006&size=large',
width: 1500,
height: 2186
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSI7McOaL4kktfaeRXlbwfwuKN8V1jLIOVYkxnYEdOHPtAWwe-3NB6uxqg',
width: 103,
height: 150
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/userfiles/images/Omewillem-shirt.jpg',
width: 245,
height: 200
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcQ2JQo3xvOMSJU8nlUMD4ve6uBG2bfPKPxUecZEwkxC-_VupVtEHFqslx4',
width: 110,
height: 90
}
}
,
{
image: {
link: 'http://bin.snmmd.nl/m/2jaw6uuppgiw_wd1280.jpg/ome-willem-1943.jpg',
width: 1280,
height: 720
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcTNGKUOklfrOujrDNZhs7VwfRHvxWanx3ParEayPdYN5YkI6a8kXQ-J_y9h',
width: 150,
height: 84
}
}
,
{
image: {
link: 'http://radio.nl/i/810450/introtext_image/300/300/uit-het-media-archief-de-film-van-ome-willem',
width: 300,
height: 229
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSv6ji4znQSOq2kGNyFbrMtkbYHouDyTVaeB1hO7f-MaHXQ3KgC_cKmWQ',
width: 116,
height: 89
}
}
,
{
image: {
link: 'http://tvblik.nl/afbeelding/programma-groot/de-film-van-ome-willem.jpg',
width: 200,
height: 150
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRLfZl6PcUN-dA7chyYjgt3ZYqE_Uf_h2M87wpT0QbwPqXX2pqj2TPOcg',
width: 104,
height: 78
}
}
,
{
image: {
link: 'http://www.omewillem.nl/afbeeldingen/algemeen/flyer.jpg',
width: 212,
height: 300
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSGYl1KbkAdBJMMgRys4ZYMfwP8y3QyHKJ7BomP1dH1T8lFxcwVK8XlOw',
width: 82,
height: 116
}
}
,
// {
// image: {
// link: 'http://tvenradiodb.nl/ie_fcache/0000002007_De-Film-van-Ome-Willem-19791103---Nieuwe-Kleren.jpg',
// width: 480,
// height: 384
// },
// thumbnail: {
// link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcTibxL17fcuQeVZQrnVU6Mb7lBC-L-hYNQqBwOJ3dsKym_CE_Br2vs3Z6Z_',
// width: 129,
// height: 103
// }
// }
// ,
{
image: {
link: 'http://images.fok.nl/upload/OmeW.gif',
width: 183,
height: 329
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcRGywwO2fPVTsgK4bRcwlj7c8LdEkiuGaOt-tBePWGBXmPDZMRoYywc6eA',
width: 66,
height: 119
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/MdbrRyL7oVA/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcQYiggtk0y9XBBpO3xWNfzUKM5liuwZWjlVf4_Dyb4QhX0RBurLQbVBviXR',
width: 129,
height: 97
}
}
,
{
image: {
link: 'https://s.s-bol.com/imgbase0/imagebase/large/FC/7/9/8/1/1002004000091897.jpg',
width: 550,
height: 677
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRrfCqKArjBNASPyO3QBbShdpf5yzaCKYvbjs7EkctNyN0_6yZsLXXkJvk',
width: 113,
height: 139
}
}
,
{
image: {
link: 'http://fast.mediamatic.nl/f/tqgr/image/118/234791-500-500.jpg',
width: 500,
height: 500
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRNXjXwA7PPpCLPWtWQzeoICeG-8uMSPLs43gMpko8oMzOGq2qVeWXrmUw8',
width: 130,
height: 130
}
}
,
{
image: {
link: 'https://s.s-bol.com/imgbase0/imagebase/large/FC/6/6/5/9/1002004000089566.jpg',
width: 432,
height: 615
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRmZ6Yg3YEIwmRdc0GMUahBLALG-3WHJfKv9UT65Xq31bE5BakYDpcoWKA',
width: 96,
height: 136
}
}
,
{
image: {
link: 'http://resolver.kb.nl/resolve?urn=urn:gvn:NIBG01:89488-kb3-23&role=image&size=medium',
width: 750,
height: 526
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcTQRyfmty4q4uUCBfPanmfNg1gg2olCt-4r0HPSNNtJ5NODW2jQF5wygQA',
width: 141,
height: 99
}
}
,
{
image: {
link: 'https://onsverleden.files.wordpress.com/2010/11/vara-ome-willem.jpg',
width: 620,
height: 466
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcS3QNTB5RluypYwdRflFhdg7oDCPQCdLwHmdfBoTGrpK_bKfAY4TwF8PX6i',
width: 136,
height: 102
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/uVDGwdh9H0E/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcSOhRNJx8jkabSF6c7ioCYLbSrq3Qo9-WEze_QYkldbtsVCbKr0W3iQ57Ef',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://www.justwebshop.nl/media/catalog/product/cache/1/image/9df78eab33525d08d6e5fb8d27136e95/2/4/2495294.jpg',
width: 1532,
height: 2161
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcR-hfjK6sjAfmswLPlQLHHHLqaTOI20EpOFE3_Bj_bBKRlK7oD6o_Xmuqqa',
width: 106,
height: 150
}
}
,
{
image: {
link: 'http://www.omewillem.nl/afbeeldingen/wedstrijd/OmeWillem.jpg',
width: 448,
height: 336
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTF3JgS9BIhQlZGr0h6T2MzAOMGhCMXBg78vmklI5DNh3u5cfQoAZyCOQ',
width: 127,
height: 95
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/tTMf5mtyPcA/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQBnQqsemnHwraveqMBZ_VyDVzXyqI5qzHdboy3gjFrX5v49ELt3k3hImsw',
width: 129,
height: 97
}
}
,
{
image: {
link: 'https://ronnydeschepper.files.wordpress.com/2010/12/film_van_ome_willem.jpg',
width: 246,
height: 200
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcTvaKzr9SVnv7huNd4nzyK_mZBd5izcw3RuyEca2v8KrRafXuqQMpLjAw',
width: 110,
height: 89
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/jopie_products_photos/21.jpg',
width: 350,
height: 350
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcQ6qExDkyOFSIuYzemRFkd-7F9PgzNw3Og05cdZU809RPWxTYP87uxOVdo',
width: 120,
height: 120
}
}
,
{
image: {
link: 'https://pbs.twimg.com/profile_images/1363396536/OmeWillem_400x400.jpg',
width: 400,
height: 400
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRo3BlJslRW-Y_6r7HjKHKTxRanAv18p_G0CO318VFuB0cIAaiLmyWNtw',
width: 124,
height: 124
}
}
,
// {
// image: {
// link: 'http://tvenradiodb.nl/ie_fcache/0000002008_De-Film-van-Ome-Willem-19740925---Warm-of-Koud-02.jpg',
// width: 480,
// height: 384
// },
// thumbnail: {
// link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcQfm9RN-72QDhRrxRfVTdYOsQnpreB-Aj4MVzpnM95foqgbREZUffmCavf2',
// width: 129,
// height: 103
// }
// }
// ,
{
image: {
link: 'https://www.waarkeekjijvroegernaar.nl/screenshots/albums/screenshots/Film_van_Ome_Willem/omewillem17.jpg',
width: 250,
height: 170
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQ34mBu6kYN5YJi-JgHuSYaeSRgxloc1Mt62JfsME04rWVy1A6clSfVA1g',
width: 111,
height: 75
}
}
,
{
image: {
link: 'http://fast.mediamatic.nl/f/tqgr/image/215/234586-494-500.jpg',
width: 494,
height: 500
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcQBVG-xSNSuSQ_0VFMf7r8tcjhJMMFN3ozO_kc5OENoiOvctrwC34sn8yhIZQ',
width: 128,
height: 130
}
}
,
{
image: {
link: 'https://s.s-bol.com/imgbase0/imagebase/large/FC/7/6/5/9/1002004000089567.jpg',
width: 433,
height: 616
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcSo52W2vtiZ1nY7IPV7LnfigaBYgzKO6LzcXyXLJc2JnwXBuqtXOYmBszU',
width: 96,
height: 136
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/QyTYrpqQe1Q/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcTbNftGTrgK4FjDQ4QytIb4wmVy9sS8dkrcwPi4DWj1iZTpTDyHXVz5iu8',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/userfiles/images/Ome%20W%20shirt%20voor.png',
width: 350,
height: 350
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcRSwWADwp-lUtxGuG3B4_FDdVQGnuNUJAKTufBB2_BDJNDlPuWXKpPxQ5w',
width: 120,
height: 120
}
}
,
{
image: {
link: 'http://www.seriesvanvroeger.nl/images/screenshots/vihbfb_willem.jpg',
width: 416,
height: 351
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcR4O-q-laiKI_2p_eTRMRPgnYvHB5Ak17lJLEFC2hHDnH02V-Zo_YLZ-Q',
width: 125,
height: 105
}
}
,
{
image: {
link: 'http://www.mery.nl/edwinrutten/Afbeeldingen/Afbeelding3192.jpg',
width: 461,
height: 474
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQUNWLDnHttP2HAiTadV54rP0NpYFANmrEsVc_wlSLLdn3NtrjFqQZDTC94',
width: 125,
height: 129
}
}
,
{
image: {
link: 'http://www.boek-entertainment.nl/media/1065_ome_willem_2.jpg',
width: 600,
height: 450
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcTajFxDg608way14IgkMLsVRKv6jhRrUUMgYW8d1lEd2jkDnBULAUKfjZTJ',
width: 135,
height: 101
}
}
,
{
image: {
link: 'http://www.babyenkind.nl/typo3temp/pics/7af335ef2d.jpg',
width: 250,
height: 354
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSw57gkBH14EmA7oCEkLMpuga7kszxqVyymws8IkzHMjpcrWMRgo0H6hUs',
width: 85,
height: 121
}
}
,
{
image: {
link: 'https://s.s-bol.com/imgbase0/imagebase/large/FC/8/9/8/1/1002004000091898.jpg',
width: 390,
height: 541
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSXSMWRZSVy_Z4yW4znDDPiz2EvXwNm6nn133D-SpyfYpF-p6ea_PIqVLEk',
width: 95,
height: 132
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/-Lg5rNuxerI/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcTGDcqGpk91NPdYxe6TIBSn132pXlCOIIefSfqjnhJAXuZdnoiN7xRivz-c',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://humortv.s3-eu-central-1.amazonaws.com/wp-content/uploads/original/tv-20090513-dwdd-jakhalzen_ome_willem_3-780x444.jpg',
width: 780,
height: 444
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSoDIvROSsd2Rjtv8OJ91tiZ-GyqjySRL4MNytc33mNoowlNUh4IrPWj-8',
width: 142,
height: 81
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/eXvWsOzXASU/maxresdefault.jpg',
width: 1920,
height: 1080
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcS6CF3KTxHUimiy--DpU02dXeUHGPlYJVLwkoAvAmcjLbYtftmSZnBSrkA',
width: 150,
height: 84
}
}
,
{
image: {
link: 'http://fast.mediamatic.nl/f/tqgr/image/635/234093-500-500.jpg',
width: 500,
height: 500
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTYDp2kv95utCDLpbKBl2VEB0izESlBVd06Ynxiv1lc7d43d0vEO9f--q_C',
width: 130,
height: 130
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/userfiles/images/OmeWillemPet.jpg',
width: 204,
height: 200
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTcswWUWNKlscMXkMZs7hO4sQi4iF3Yk0NXZaOcgLzbDNHOJMtIFx5-Npc',
width: 105,
height: 103
}
}
,
{
image: {
link: 'http://www.omewillem.nl/afbeeldingen/wedstrijd/Kitty_&_Ome_Willem.jpg',
width: 359,
height: 450
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQYkz6sk37xvEFKg9Xlfl35XE_MU5viI_xzUncsEwqBZH9McD4W0O9oyQ',
width: 101,
height: 127
}
}
,
{
image: {
link: 'http://images2.images-speurders.nl/images/15/1507/150738102_1_big.jpg',
width: 519,
height: 692
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcQIB0YjJuDKyoMiRUqC0vecjQQIrtw5UqrUTNP3Ym_85hbU5W-hMGt32Vo',
width: 104,
height: 139
}
}
,
{
image: {
link: 'https://oscargriffioen.files.wordpress.com/2014/03/de-film-van-ome-willem.jpg',
width: 246,
height: 200
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcT17d5LZrv0mDnWroVewYGTrB1sBAiv1xwg_Ol3BQ4ZTs28tEmKKEcDXA',
width: 110,
height: 89
}
}
,
{
image: {
link: 'http://www.omewillem.nl/afbeeldingen/wedstrijd/ome_willem_juni_2005_021.jpg',
width: 325,
height: 450
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcTGxmHpr16LOI5sCdNfOCqQF0bn8jhE3apNYc2w-Tn61G_A-A_ZLfmISpU',
width: 92,
height: 127
}
}
,
{
image: {
link: 'http://fast.mediamatic.nl/f/tqgr/image/812/234589-506-500.jpg',
width: 506,
height: 500
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcR6WT091iS0aM3uF8usLWNaD68PbVR9FTKQQs12lVQX_7XtXOflDGRDqjRdOg',
width: 131,
height: 129
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/PfY6QmrCrFI/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSiozx0xrGVd4DmrsiAXXcqbZmkCHkfTW9IvP9X3gHSi4CppNQfFulSb4Kt',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://www.trotsemoeders.nl/wp-content/uploads/foto-4.jpg',
width: 2362,
height: 1569
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQF1MfamMH02AGCFXJZGDyWKnKRZfvDqwPsSbgjphHPI-YPkoyOwQuG2L22',
width: 150,
height: 100
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/cOiNOdPWpSw/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcT9QJUy0ARPDHy1CVPHfZ9CVn2vEJCM0O7Fj1CIc2HOtR4u5vP9iTiD5DWy',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://wienkske.borsatoweblog.nl/upload/176_6mmors3t8qp.jpg',
width: 250,
height: 199
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcRbBtLp-hVhO_AMqadHDPkxckKNGS5-0FGeJUiCTfRQHDEocXhqOO01hP4',
width: 111,
height: 88
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/fNRaJhi15W0/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQiCblByJoimTcCYJJvR1LB_7OF5IrY2lLYYnqbEvFD_DQvTxCGpP0G_-8',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://cdn.c.photoshelter.com/img-get2/I000034rgbL7mrf8/fit=1000x750/030924-017.jpg',
width: 1000,
height: 651
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRBuLWzE-fNa3N9qI3RUNVx0saYMfYDtQ-nqZUmikz43QiC_xcY1Zfw1BC6',
width: 149,
height: 97
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/XYz3QeSaeis/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcREFmjot1HD8e7sISWE7bQKxUXyTtE3Ux9h7zT68L0lwWNqbfPbuYSTGW5i',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://www.seniorplaza.nl/images/old/copied/hfd._08_-_de_film_van_ome_willem_(1974).jpg',
width: 150,
height: 243
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQnVK3VR7T0bA8zhAb91kFtW9VZ9vuAH0-FUzDszen-pALqTa3rXscAjw',
width: 68,
height: 110
}
}
,
{
image: {
link: 'http://0.static.upcoming.nl/static/images/fc0842f1b6_1427830787_In-Nederland-was-de-populariteit-van-Ome-Willem-op-zn-hoogtepunt__list-noup.jpg',
width: 774,
height: 773
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSL5F0lIU_RWCpYBl1AsUOJR_vvnn2APjtHNoFaSZbLDHePvbee0gHhyUuavQ',
width: 142,
height: 142
}
}
,
{
image: {
link: 'http://www.totaaltv.nl/content/images/20121022_224930_OmeWillem.jpg',
width: 588,
height: 328
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcT2qgpo95wcPNrtiyeLDFybFOjFPuUjIuzWcfRnfvT0zeBSFP9HSuNOeKSu',
width: 135,
height: 75
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/nBTAMqNBs-k/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcR8SMLfuPzJC5KjPeubm6iZgNOwF-O-E4XIRRTpknweD9qsQHedVlb_coft',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://fast.mediamatic.nl/f/tqgr/image/491/234906-500-500.jpg',
width: 500,
height: 500
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcRRpC1u_Zaj4Uw1zg8aT3KKy56Ncm8h8KBH7m--rjsC8uGELhDsCNE1uhB-5A',
width: 130,
height: 130
}
}
,
{
image: {
link: 'http://cdn.c.photoshelter.com/img-get2/I0000OA5I41nunBY/fit=1000x750/030924-009.jpg',
width: 1000,
height: 651
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRUhS4I1YPyJUJPl2LeFlhDx61Xpy0hlgVp-B3MRV0Pbcg97ac7-xwf6lI',
width: 149,
height: 97
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/jopie_products_photos/19.jpg',
width: 650,
height: 850
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcS67tcU332oeIhI_J2Mtqn3law--LVBNmNDXYper1rxEw8P26CmueNSdH8',
width: 111,
height: 145
}
}
,
{
image: {
link: 'http://bin.snmmd.nl/m/acmwkvcp1tb8_std640.jpg',
width: 640,
height: 480
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcQfh_5wxCg-MiAk3jL48RWVEOTz_tqDRm228CsrDZE4suRdgK7FUgkQuhU',
width: 137,
height: 103
}
}
,
{
image: {
link: 'http://www.joepiedepoepie.nl/uploads/userfiles/images/omewillem-romper-voor.png',
width: 650,
height: 850
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQRTHsqsnurlnn4pdtlMuEpQ-zkfexBlEz3NstLx-OURPz3W_rEbPBZaFk',
width: 111,
height: 145
}
}
,
{
image: {
link: 'http://2.bp.blogspot.com/-q7uj0SB-zL8/UysCv-f7XUI/AAAAAAAAHEg/3CD3E2FoAQ4/s1600/alig.jpeg',
width: 460,
height: 345
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcTI0qvH13eadZ42eCjm3gv5-pUCAv6sHTQ0Niazy3aLaGfo88vEbdMVJxGs',
width: 128,
height: 96
}
}
,
{
image: {
link: 'http://www.lc.nl/images/v2yaby-B82543473Z.1_20120912150733_000GM59VDPI.2.jpg/ALTERNATES/WIDE_768/B82543473Z.1_20120912150733_000+GM59VDPI.2.jpg',
width: 768,
height: 384
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcT1taEOYIFC6VLLwpV_qwj3ukzUEULyjdNTOmZHKJuvPkyxLkLB8M27idFx',
width: 142,
height: 71
}
}
,
{
image: {
link: 'http://www.omewillem.nl/afbeeldingen/fotos/DavidvDamDesaillyMammam4.7.06.jpg',
width: 520,
height: 348
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQbT4jZnUCgMyLafqTk1vRR94hSYqQTpypUmRzg3Ry9Udm6893gxHgQAhVw',
width: 131,
height: 88
}
}
,
{
image: {
link: 'http://www.omewillem.nl/archief/IMAGES/augustA.jpg',
width: 140,
height: 129
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcR56H6LYMxG4ohmhaV4DGYDp_outm_zU3YwzlGo9Jvjbw1dyehqCAXyjw',
width: 93,
height: 86
}
}
,
{
image: {
link: 'http://leidseglibber.punt.nl/_files/2008-11-30/ome-willem-2557.jpg',
width: 300,
height: 302
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcRHm8eJoiN6j-VIjZm7s5Tr8o7yqOWml4AcX_66VY0g02yWIflUR71ENI0',
width: 115,
height: 116
}
}
,
{
image: {
link: 'http://resolver.kb.nl/resolve?urn=urn:gvn:NIBG01:85784-19&role=image&size=medium',
width: 750,
height: 754
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcTiIsORLYrJhe1NsNPaCOULFqHsZ4vbF41UWOFAowJzOMrsReIq3HBnXw1j',
width: 141,
height: 142
}
}
,
{
image: {
link: 'http://www.lyonpartners.nl/wp-content/uploads/2014/03/ome_willem.jpg',
width: 150,
height: 172
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRIKXCFm6QJQ_Kr_DEVzrOQ8Luyfp8nWnuukdZ3u0h4HvLSoFmJL-N-Dw',
width: 87,
height: 100
}
}
,
{
image: {
link: 'http://photos1.blogger.com/blogger/6631/47/1600/edwin_rutten_ome_wil_81395a.jpg',
width: 453,
height: 242
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcSPC8o9GU41Sg_WCqSpZFI3btPodxfFEIuYx2-m2iZoxKkEMkMP4y4mmg',
width: 127,
height: 68
}
}
,
{
image: {
link: 'http://www.hotspotsmagazine.nl/get.asp?id=64&tabel=aanbod&size=606',
width: 606,
height: 254
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRTDrhxn0Cbnty2B1wDdE2TmkpXduTm8PWxjhIqh_AR92BdJ3ZDJqkwOJeV',
width: 136,
height: 57
}
}
,
{
image: {
link: 'http://www.omewillem.nl/afbeeldingen/wedstrijd/KopievanOmeWillem01-06-05.jpg',
width: 520,
height: 348
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcR_4xHApZBlSGkuch4hJxhG6K0SANJCNuDpxsVlUu_9uzbDRtRZdqN7yTc',
width: 131,
height: 88
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/VhiRbZEpG2A/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcSfLcIBEHfHB-UFIdOGJh1pNDwbjeIYFkruvSox2w1xBA4fEWsDSXlmBjsG',
width: 129,
height: 97
}
}
,
{
image: {
link: 'http://www.omewillem.nl/archief/IMAGES/ergensintheater.jpg',
width: 200,
height: 164
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRzaUIp7NgFuZePRP7Y4wjDf9RHPVQyvT9Jbu6UBL0341gC1hHueplslKc',
width: 104,
height: 85
}
}
,
{
image: {
link: 'http://www.1limburg.nl/sites/default/files/public/styles/article-detail/public/anp-1000_23406187.jpg?itok=p2r7mehW',
width: 800,
height: 450
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcTZEmIUAyX-Oa38jnzAhTCz_dc1n3SPPViUMqN754Tu-qg-_jKsRYafvJxS',
width: 143,
height: 80
}
}
,
{
image: {
link: 'http://www.seriesvanvroeger.nl/images/screenshots/liblj_willem.jpg',
width: 470,
height: 343
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQCJNT0BdcCZ5mqAXzaXNe35G_wse7yXY6Y-LI4SrN1dafIBdnZpM2QwXRK',
width: 129,
height: 94
}
}
,
{
image: {
link: 'http://projectkoorvoq.nl/media/ode_aan_willem_wilmink_2013/05-ome_willem.jpg',
width: 600,
height: 400
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcSnfcbYarh0udVk7bMFAgJq6OYtzjKdL5TijRd8mAN3dHTQ1DUv2FRWqk4',
width: 135,
height: 90
}
}
,
{
image: {
link: 'http://resolver.kb.nl/resolve?urn=urn:gvn:NIBG01:74689-6&size=large',
width: 750,
height: 750
},
thumbnail: {
link: 'https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcR1lfj5piIVOeeiHOPQbSePvr1cvVB74aeBg9DK2FKVPdmdv-tyqFMhSOH1',
width: 141,
height: 141
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/yHnWhWcE3Kg/maxresdefault.jpg',
width: 1920,
height: 1080
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcTyIk8l6aKn77SKBFs8cMzalxYHJO0oKDvyjI70ONmhcg1QYt0WWPapV4yS',
width: 150,
height: 84
}
}
,
{
image: {
link: 'http://resolver.kb.nl/resolve?urn=urn:gvn:NIBG01:79294-kb-5-5a&size=large',
width: 750,
height: 524
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTXDtC8eq1Wd9f5HXJGIP1dHjBRkAW2nXOVhnpmHNOARxiwjJUg0sK7PH6e',
width: 141,
height: 99
}
}
,
{
image: {
link: 'http://www.omewillem.nl/archief/IMAGES/ow-in-cam.jpg',
width: 200,
height: 151
},
thumbnail: {
link: 'https://encrypted-tbn3.gstatic.com/images?q=tbn:ANd9GcR3FY7rLV6DyAwYR10Mv1pfT8vADgsOWmihKyJ83Kb85dX98-867JI1UQ',
width: 104,
height: 79
}
}
,
{
image: {
link: 'https://i.ytimg.com/vi/qRH8uz2G77g/hqdefault.jpg',
width: 480,
height: 360
},
thumbnail: {
link: 'https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcRECMSzfYSivITLKmPZuwg4_YZYz17SPKD2NNw7_uHXiZzNI9SXVBb9Wcso',
width: 129,
height: 97
}
}
,
{
image: {
link: 'https://s-media-cache-ak0.pinimg.com/236x/6a/88/b2/6a88b2811a693d9dba384c59c9490f02.jpg',
width: 223,
height: 251
},
thumbnail: {
link: 'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcR7Wcecm_nso-LuNgH-vEjP3uttBLQBccfrj0--wYM8EA_R9v-Qq8vzlbo',
width: 99,
height: 111
}
}
]
}
|
femmebot/shape
|
db/migrate/20190430234650_add_joinable_group_id_to_collections.rb
|
class AddJoinableGroupIdToCollections < ActiveRecord::Migration[5.1]
def change
add_column :collections, :joinable_group_id, :bigint
end
end
|
idcf/idcfcloud-cli
|
lib/idcf/cli/version.rb
|
module Idcf
module Cli
VERSION = '1.1.2'.freeze
end
end
|
YunLemon/smsn
|
monitron/src/main/java/net/fortytwo/smsn/monitron/listeners/sensors/VibrationLevelSensorListener.java
|
package net.fortytwo.smsn.monitron.listeners.sensors;
import net.fortytwo.smsn.monitron.Context;
import net.fortytwo.smsn.monitron.data.GaussianData;
import net.fortytwo.smsn.monitron.events.MonitronEvent;
import net.fortytwo.smsn.monitron.events.VibrationLevelObservation;
import org.openrdf.model.IRI;
public class VibrationLevelSensorListener extends GaussianSensorListener {
public VibrationLevelSensorListener(final Context context,
final IRI sensor) {
super(context, sensor);
}
protected MonitronEvent handleSample(final GaussianData data) {
return new VibrationLevelObservation(context, sensor, data);
}
}
|
Jerrypiglet/Total3DUnderstanding
|
utils_OR/DatasetCreation/combineImage.py
|
import glob
import os
import os.path as osp
import cv2
srcs = glob.glob('main*_xml*')
for src in srcs:
scenes = glob.glob(osp.join(src, 'scene*') )
for scene in scenes:
print(scene )
imsNames = glob.glob(osp.join(scene, 'ims_*.rgbe') )
if len(imsNames ) != 0:
for imsName in imsNames:
imName = imsName.replace('ims_', 'im_')
im = cv2.imread(imName, -1)
ims = cv2.imread(imsName, -1)
imn = 1.0/3.0 * im + 2.0/3.0 * ims
imnName = imName.replace('.rgbe', '.hdr')
cv2.imwrite(imnName, imn )
os.system('rm %s' % imName )
os.system('rm %s' % imsName )
else:
imNames = glob.glob(osp.join(scene, 'im_*.rgbe') )
for imName in imNames:
imNewName = imName.replace('.rgbe', '.hdr')
os.system('mv %s %s' % (imName, imNewName ) )
|
BreakerOfThings/o3de
|
Gems/EditorPythonBindings/Code/Source/PythonMarshalTuple.h
|
<reponame>BreakerOfThings/o3de
/*
* Copyright (c) Contributors to the Open 3D Engine Project.
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
*
* SPDX-License-Identifier: Apache-2.0 OR MIT
*
*/
#pragma once
#include <Source/PythonMarshalComponent.h>
#include <AzCore/Serialization/SerializeContext.h>
namespace EditorPythonBindings
{
class TypeConverterTuple final : public PythonMarshalComponent::TypeConverter
{
public:
TypeConverterTuple(
[[maybe_unused]] AZ::GenericClassInfo* genericClassInfo,
const AZ::SerializeContext::ClassData* classData,
const AZ::TypeId& typeId)
: m_classData(classData)
, m_typeId(typeId)
{
}
AZStd::optional<PythonMarshalTypeRequests::BehaviorValueResult> PythonToBehaviorValueParameter(
PythonMarshalTypeRequests::BehaviorTraits traits, pybind11::object pyObj, AZ::BehaviorArgument& outValue) override;
AZStd::optional<PythonMarshalTypeRequests::PythonValueResult> BehaviorValueParameterToPython(
AZ::BehaviorArgument& behaviorValue) override;
bool CanConvertPythonToBehaviorValue(PythonMarshalTypeRequests::BehaviorTraits traits, pybind11::object pyObj) const override;
protected:
const AZ::SerializeContext::ClassData* m_classData = nullptr;
const AZ::TypeId m_typeId = {};
bool IsValidList(pybind11::object pyObj) const;
bool IsValidTuple(pybind11::object pyObj) const;
bool IsCompatibleProxy(pybind11::object pyObj) const;
static bool LoadPythonToTupleElement(
PyObject* pyItem,
PythonMarshalTypeRequests::BehaviorTraits traits,
const AZ::SerializeContext::ClassElement* itemElement,
AZ::SerializeContext::IDataContainer* tupleContainer,
size_t index,
AZ::SerializeContext* serializeContext,
void* newTuple);
};
} // namespace EditorPythonBindings
|
LittleBoy18/kubeedge
|
edge/pkg/metamanager/client/pod.go
|
<reponame>LittleBoy18/kubeedge<gh_stars>1000+
package client
import (
"fmt"
corev1 "k8s.io/api/core/v1"
"github.com/kubeedge/beehive/pkg/core/model"
"github.com/kubeedge/kubeedge/edge/pkg/common/message"
"github.com/kubeedge/kubeedge/edge/pkg/common/modules"
)
//PodsGetter is interface to get pods
type PodsGetter interface {
Pods(namespace string) PodsInterface
}
//PodsInterface is pod interface
type PodsInterface interface {
Create(*corev1.Pod) (*corev1.Pod, error)
Update(*corev1.Pod) error
Delete(name, options string) error
Get(name string) (*corev1.Pod, error)
}
type pods struct {
namespace string
send SendInterface
}
func newPods(namespace string, s SendInterface) *pods {
return &pods{
send: s,
namespace: namespace,
}
}
func (c *pods) Create(cm *corev1.Pod) (*corev1.Pod, error) {
return nil, nil
}
func (c *pods) Update(cm *corev1.Pod) error {
return nil
}
func (c *pods) Delete(name, options string) error {
resource := fmt.Sprintf("%s/%s/%s", c.namespace, model.ResourceTypePod, name)
podDeleteMsg := message.BuildMsg(modules.MetaGroup, "", modules.EdgedModuleName, resource, model.DeleteOperation, options)
c.send.Send(podDeleteMsg)
return nil
}
func (c *pods) Get(name string) (*corev1.Pod, error) {
return nil, nil
}
|
sim-wangyan/x7
|
demo/src/test/java/x7/AppTest.java
|
package x7;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.Assert;
import x7.demo.entity.TestBoo;
import x7.demo.remote.OrderRemote;
import javax.annotation.Resource;
@SpringBootTest
@RunWith(SpringRunner.class)
public class AppTest {
@Autowired
private XxxTest xxxTest;
@Resource(name = "x7.demo.remote.OrderRemote")
private OrderRemote orderRemote;
@Test
public void testMoreInstance(){
System.out.println(this.orderRemote.verify());
}
@Test
public void testAll() throws Exception {
Assert.isTrue(TestBoo.BOO != TestBoo.HLL, "XXX");
// xxxTest.createId();
// xxxTest.testInCondtion();
xxxTest.listCat();
xxxTest.testFindToHandle();
xxxTest.testTemporaryTable();
xxxTest.inOrder();
xxxTest.testOrderFindByAlia();
xxxTest.testOrderFind();
xxxTest.testNonPaged();
xxxTest.testOrderFindByAlia();
xxxTest.testListWithEnum();
xxxTest.testResultMapSimpleSource();
xxxTest.testListPlainValue();
xxxTest.testAlia();
xxxTest.resultKeyFuntion();
xxxTest.testSimple();
xxxTest.testCriteria();
xxxTest.testOne();
// xxxTest.testCreate();
xxxTest.refreshByCondition();
// xxxTest.testRemove();
// xxxTest.createBatch();
// xxxTest.removeRefreshCreate();
// xxxTest.testRefreshConditionRemote();
// xxxTest.testCriteriaRemote();
// xxxTest.testResultMappedRemote();
// xxxTest.testCreate();
// xxxTest.testSimple();
}
}
|
brymatPC/SdFat
|
doc/html/search/defines_3.js
|
var searchData=
[
['f_990',['F',['../_sys_call_8h.html#a0e3009529aac180ed5f48296d6670d6b',1,'SysCall.h']]],
['fat12_5fsupport_991',['FAT12_SUPPORT',['../_sd_fat_config_8h.html#a28998c5daf4bd038f4f93172698320b1',1,'SdFatConfig.h']]]
];
|
eugeneilyin/mdi-norm
|
es/PhotoAlbum.js
|
<reponame>eugeneilyin/mdi-norm
import { createThemedIcon } from './utils/createThemedIcon';
import { FilledPhotoAlbum } from './FilledPhotoAlbum';
import { OutlinePhotoAlbum } from './OutlinePhotoAlbum';
import { RoundPhotoAlbum } from './RoundPhotoAlbum';
import { SharpPhotoAlbum } from './SharpPhotoAlbum';
import { TwoTonePhotoAlbum } from './TwoTonePhotoAlbum';
export var PhotoAlbum =
/*#__PURE__*/
function PhotoAlbum(props) {
return createThemedIcon(props, FilledPhotoAlbum, OutlinePhotoAlbum, RoundPhotoAlbum, SharpPhotoAlbum, TwoTonePhotoAlbum);
};
|
wangwenwang/TY_Order
|
Order/Controller/Main/ChartViewController.h
|
//
// ChartViewController.h
// Order
//
// Created by 凯东源 on 16/10/20.
// Copyright © 2016年 凯东源. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ChartViewController : UIViewController
/// 图表信息
@property (strong, nonatomic) NSMutableArray *arrM;
@end
|
brisbane/hpc-container-maker
|
recipes/lammps/lammps.py
|
<reponame>brisbane/hpc-container-maker
########
# LAMMPS recipe
#
# User arguments:
# arch: x86_64 or aarch64 (default: x86_64)
# build_image
# gdrcopy (default: 1.3)
# gpu_arch: Pascal60, Volta70, or Turing75 (default: Volta70)
# knem (default: 1.1.3)
# lammps_version (default: patch_19Sep2019)
# mlnx_ofed (default: 4.6-1.0.1.1)
# ompi (default: 4.0.2)
# qemu (default: False)
# runtime_image
# ucx (default: 1.6.1)
########
from distutils.version import StrictVersion
import hpccm.version
if StrictVersion(hpccm.__version__) < StrictVersion('19.11.0'):
raise Exception('requires HPCCM version 19.11.0 or later')
# Use appropriate container base images based on the CPU architecture
arch = USERARG.get('arch', 'x86_64')
if arch == 'aarch64':
# Early Access images - NGC registration required to use
default_build_image = 'nvcr.io/ea-cuda-sc19/arm-partners/cuda-aarch64:10.2-devel-ubuntu18.04'
default_runtime_image = 'nvcr.io/ea-cuda-sc19/arm-partners/cuda-aarch64:10.2-base-ubuntu18.04'
elif arch == 'x86_64':
default_build_image = 'nvidia/cuda:10.1-devel-ubuntu18.04'
default_runtime_image = 'nvidia/cuda:10.1-base-ubuntu18.04'
else:
raise Exception('unrecognized architecture: {}'.format(arch))
########
# Build stage (Stage 0)
########
# Base image
Stage0 += baseimage(image=USERARG.get('build_image', default_build_image),
_arch=arch, _as='build')
if arch == 'aarch64' and USERARG.get('qemu', False):
# Install QEMU emulator for aarch64 container image builds on x86 systems
Stage0 += copy(_from='multiarch/qemu-user-static',
src='/usr/bin/qemu-aarch64-static', dest='/usr/bin')
# Base development environment
Stage0 += gnu(version='8')
Stage0 += cmake(eula=True)
# Communication stack: OpenMPI + UCX + KNEM + Mellanox OFED + gdrcopy
# (x86 only)
Stage0 += mlnx_ofed(version=USERARG.get('mlnx_ofed', '4.6-1.0.1.1'))
if hpccm.config.g_cpu_arch == hpccm.config.cpu_arch.X86_64:
Stage0 += gdrcopy(ldconfig=True, version=USERARG.get('gdrcopy', '1.3'))
Stage0 += knem(ldconfig=True, version=USERARG.get('knem', '1.1.3'))
Stage0 += ucx(knem='/usr/local/knem', ldconfig=True,
version=USERARG.get('ucx', '1.6.1'))
mpi = openmpi(ldconfig=True, version=USERARG.get('ompi', '4.0.2'),
ucx='/usr/local/ucx')
Stage0 += mpi
########
# LAMMPS
########
gpu_arch = USERARG.get('gpu_arch', 'Volta70')
if gpu_arch not in ['Pascal60', 'Volta70', 'Turing75']:
raise Exception('unrecognized GPU architecture: {}'.format(gpu_arch))
lammps_version = USERARG.get('lammps_version', 'patch_19Sep2019')
compute_capability = 'sm' + gpu_arch[-2:]
srcdir = '/var/tmp/lammps-{}'.format(lammps_version)
Stage0 += comment('LAMMPS version {0} for CUDA compute capability {1}'.format(
lammps_version, compute_capability))
# LAMMPS dependencies
Stage0 += apt_get(ospackages=['bc', 'git', 'libgomp1', 'libhwloc-dev', 'make',
'tar', 'wget'])
# LAMMPS build
Stage0 += generic_cmake(
build_directory='{0}/build-{1}'.format(srcdir, gpu_arch),
cmake_opts=['-D BUILD_SHARED_LIBS=ON',
'-D CUDA_USE_STATIC_CUDA_RUNTIME=OFF',
'-D KOKKOS_ARCH={}'.format(gpu_arch),
'-D CMAKE_BUILD_TYPE=Release',
'-D MPI_C_COMPILER={}'.format(mpi.toolchain.CC),
'-D BUILD_MPI=yes',
'-D PKG_MPIIO=on',
'-D BUILD_OMP=yes',
'-D BUILD_LIB=no',
'-D CMAKE_CXX_COMPILER={}/lib/kokkos/bin/nvcc_wrapper'.format(srcdir),
'-D PKG_USER-REAXC=yes',
'-D PKG_KSPACE=yes',
'-D PKG_MOLECULE=yes',
'-D PKG_REPLICA=yes',
'-D PKG_RIGID=yes',
'-D PKG_MISC=yes',
'-D PKG_MANYBODY=yes',
'-D PKG_ASPHERE=yes',
'-D PKG_GPU=no',
'-D PKG_KOKKOS=yes',
'-D KOKKOS_ENABLE_CUDA=yes',
'-D KOKKOS_ENABLE_HWLOC=yes'],
directory='{}/cmake'.format(srcdir),
# Force CUDA dynamic linking, see
# https://github.com/openucx/ucx/wiki/NVIDIA-GPU-Support
preconfigure=['sed -i \'s/^cuda_args=""/cuda_args="--cudart shared"/g\' {}/lib/kokkos/bin/nvcc_wrapper'.format(srcdir)],
prefix='/usr/local/lammps-{}'.format(compute_capability),
url='https://github.com/lammps/lammps/archive/{}.tar.gz'.format(lammps_version))
########
# Runtime stage (Stage 1)
########
Stage1 += baseimage(image=USERARG.get('runtime_image', default_runtime_image))
# Build stage runtime support + LAMMPS
Stage1 += Stage0.runtime()
########
# LAMMPS
########
Stage1 += environment(variables={
'LD_LIBRARY_PATH': '/usr/local/lammps-{}/lib:$LD_LIBRARY_PATH'.format(
compute_capability),
'PATH': '/usr/local/lammps-{}/bin:$PATH'.format(compute_capability),
# Workaround, see https://github.com/openucx/ucx/wiki/NVIDIA-GPU-Support
'UCX_MEMTYPE_CACHE': 'n'})
|
pantacor/pantahub-base
|
trails/trails-api.go
|
//
// Copyright 2016-2020 Pantacor Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Package trails offer a two party master/slave relationship enabling
// the master to asynchronously deploy configuration changes to its
// slave in a stepwise manner.
//
// ## Trail API Overview
//
// A trail represents a RESTful device state management endpoint optimized for
// high latency, asynchronous configuration management as found in the problem
// space of management of edge compute device world.
//
// XXX: add proper API high level doc here (deleted outdated content)
// handler func inline doc should stay up to date though...
//
// Detailed documentation for the various operations on the API endpoints can be
// at the handler functions below.
//
// TODOs:
// - properly document trails API once finalized
// - abstract access control in a better managable manner and less
// mistake/oversight likely manner (probably descriptive/configuration style)
// - ensure step and progres time can be effectively read from trail meta info
// probably async/delayed update to ensure scalability (e.g. once every 5
// minute if there has been any step touch we update last progress etc
// - ensure that devices can query steps that need enqueing efficiently
// - enusre that in-sync time and status is timely updated based on step and
// progress
// - find smart way to figure when device is in sync based on reported state
// - consider enforcing sequential processing of steps to have a clean tail?
package trails
import (
"errors"
"log"
"net/http"
"strings"
"time"
"context"
"github.com/ant0ine/go-json-rest/rest"
jwtgo "github.com/dgrijalva/jwt-go"
jwt "github.com/pantacor/go-json-rest-middleware-jwt"
"gitlab.com/pantacor/pantahub-base/devices"
"gitlab.com/pantacor/pantahub-base/objects"
"gitlab.com/pantacor/pantahub-base/utils"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"gopkg.in/mgo.v2/bson"
)
// PvrRemote pvr remote specification payload
type PvrRemote struct {
RemoteSpec string `json:"pvr-spec"` // the pvr remote protocol spec available
JSONGetURL string `json:"json-get-url"` // where to pvr post stuff
JSONKey string `json:"json-key"` // what key is to use in post json [default: json]
ObjectsEndpointURL string `json:"objects-endpoint"` // where to store/retrieve objects
PostURL string `json:"post-url"` // where to post/announce new revisions
PostFields []string `json:"post-fields"` // what fields require input
PostFieldsOpt []string `json:"post-fields-opt"` // what optional fields are available [default: <empty>]
}
// App trails rest application
type App struct {
jwtMiddleware *jwt.JWTMiddleware
API *rest.Api
mongoClient *mongo.Client
}
// Trail define the structure of a trail
type Trail struct {
ID primitive.ObjectID `json:"id" bson:"_id"`
Owner string `json:"owner"`
Device string `json:"device"`
// Admins []string `json:"admins"` // XXX: maybe this is best way to do delegating device access....
LastInSync time.Time `json:"last-insync" bson:"last-insync"`
LastTouched time.Time `json:"last-touched" bson:"last-touched"`
FactoryState map[string]interface{} `json:"factory-state" bson:"factory-state"`
UsedObjects []string `bson:"used_objects" json:"used_objects"`
}
// Step wanted can be added by the device owner or delegate.
// steps that were not reported can be deleted still. other steps
// cannot be deleted until the device gets deleted as well.
type Step struct {
ID string `json:"id" bson:"_id"` // XXX: make type
Owner string `json:"owner"`
Device string `json:"device"`
Committer string `json:"committer"`
TrailID primitive.ObjectID `json:"trail-id" bson:"trail-id"` //parent id
Rev int `json:"rev"`
CommitMsg string `json:"commit-msg" bson:"commit-msg"`
State map[string]interface{} `json:"state"` // json blurb
StateSha string `json:"state-sha" bson:"statesha"`
StepProgress StepProgress `json:"progress" bson:"progress"`
StepTime time.Time `json:"step-time" bson:"step-time"`
ProgressTime time.Time `json:"progress-time" bson:"progress-time"`
Meta map[string]interface{} `json:"meta"` // json blurb
UsedObjects []string `bson:"used_objects" json:"used_objects"`
IsPublic bool `json:"-" bson:"ispublic"`
MarkPublicProcessed bool `json:"mark_public_processed" bson:"mark_public_processed"`
Garbage bool `json:"garbage" bson:"garbage"`
TimeCreated time.Time `json:"time-created" bson:"timecreated"`
TimeModified time.Time `json:"time-modified" bson:"timemodified"`
}
// StepProgress progression of a step
type StepProgress struct {
Progress int `json:"progress"` // progress number. steps or 1-100
Downloads DownloadProgress `json:"downloads" bson:"downloads"` // progress number. steps or 1-100
StatusMsg string `json:"status-msg" bson:"statusmsg"` // message of progress status
Data interface{} `json:"data,omitempty" bson:"data"` // data field that can hold things the device wants to remember
Status string `json:"status"` // status code
Log string `json:"log"` // log if available
}
// DownloadProgress holds info about total and individual download progress
type DownloadProgress struct {
Total ObjectProgress `json:"total" bson:"total"`
Objects []ObjectProgress `json:"objects" bson:"objects"`
}
// ObjectProgress holds info object download progress
type ObjectProgress struct {
ObjectName string `json:"object_name,omitempty" bson:"object_name,omitempty"`
ObjectID string `json:"object_id,omitempty" bson:"object_id,omitempty"`
TotalSize int64 `json:"total_size" bson:"total_size"`
StartTime int64 `json:"start_time" bson:"start_time"`
CurrentTime int64 `json:"current_time" bson:"currentb_time"`
TotalDownloaded int64 `json:"total_downloaded" bson:"total_downloaded"`
}
// TrailSummary details about a trail
type TrailSummary struct {
DeviceID string `json:"deviceid" bson:"deviceid"`
Device string `json:"device" bson:"device"`
DeviceNick string `json:"device-nick" bson:"device_nick"`
Rev int `json:"revision" bson:"revision"`
ProgressRev int `json:"progress-revision" bson:"progress_revision"`
Progress int `json:"progress" bson:"progress"` // progress number. steps or 1-100
IsPublic bool `json:"public" bson:"public"`
StateSha string `json:"state-sha" bson:"state_sha256"`
StatusMsg string `json:"status-msg" bson:"status_msg"` // message of progress status
Status string `json:"status" bson:"status"` // status code
Timestamp time.Time `json:"timestamp" bson:"timestamp"` // greater of last seen and last modified
StepTime time.Time `json:"step-time" bson:"step_time"`
ProgressTime time.Time `json:"progress-time" bson:"progress_time"`
TrailTouchedTime time.Time `json:"trail-touched-time" bson:"trail_touched_time"`
RealIP string `json:"real-ip" bson:"real_ip"`
FleetGroup string `json:"fleet-group" bson:"fleet_group"`
FleetModel string `json:"fleet-model" bson:"fleet_model"`
FleetLocation string `json:"fleet-location" bson:"fleet_location"`
FleetRev string `json:"fleet-rev" bson:"fleet_rev"`
Owner string `json:"-" bson:"owner"`
}
func handleAuth(w rest.ResponseWriter, r *rest.Request) {
jwtClaims := r.Env["JWT_PAYLOAD"]
w.WriteJson(jwtClaims)
}
// XXX: no product without fixing this to only parse ids that belong to this
// service instance
func prnGetID(prn string) string {
idx := strings.Index(prn, "/")
return prn[idx+1:]
}
func (a *App) getLatestStePrev(trailID primitive.ObjectID) (int, error) {
collSteps := a.mongoClient.Database(utils.MongoDb).Collection("pantahub_steps")
if collSteps == nil {
return -1, errors.New("bad database connetivity")
}
step := &Step{}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
findOneOptions := options.FindOne()
findOneOptions.SetSort(bson.M{"rev": -1})
err := collSteps.FindOne(ctx, bson.M{
"trail-id": trailID,
"garbage": bson.M{"$ne": true},
}, findOneOptions).
Decode(&step)
if err != nil {
return -1, err
}
if step == nil {
return -1, errors.New("no step found for trail: " + trailID.Hex())
}
return step.Rev, err
}
func (a *App) handlePutStepsObject(w rest.ResponseWriter, r *rest.Request) {
owner, ok := r.Env["JWT_PAYLOAD"].(jwtgo.MapClaims)["prn"]
if !ok {
// XXX: find right error
utils.RestErrorWrapper(w, "You need to be logged in", http.StatusForbidden)
return
}
authType, ok := r.Env["JWT_PAYLOAD"].(jwtgo.MapClaims)["type"]
coll := a.mongoClient.Database(utils.MongoDb).Collection("pantahub_steps")
if coll == nil {
utils.RestErrorWrapper(w, "Error with Database connectivity", http.StatusInternalServerError)
return
}
step := Step{}
trailID := r.PathParam("id")
rev := r.PathParam("rev")
putID := r.PathParam("obj")
if authType != "DEVICE" && authType != "USER" && authType != "SESSION" {
utils.RestErrorWrapper(w, "Unknown AuthType", http.StatusBadRequest)
return
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err := coll.FindOne(ctx, bson.M{
"_id": trailID + "-" + rev,
"garbage": bson.M{"$ne": true},
}).Decode(&step)
if err != nil {
utils.RestErrorWrapper(w, "Not Accessible Resource Id", http.StatusForbidden)
return
}
if authType == "DEVICE" && step.Device != owner {
utils.RestErrorWrapper(w, "No access for device", http.StatusForbidden)
return
} else if (authType == "USER" || authType == "SESSION") && step.Owner != owner {
utils.RestErrorWrapper(w, "No access for user/session", http.StatusForbidden)
return
}
newObject := objects.Object{}
collection := a.mongoClient.Database(utils.MongoDb).Collection("pantahub_objects")
if collection == nil {
utils.RestErrorWrapper(w, "Error with Database connectivity", http.StatusInternalServerError)
return
}
sha, err := utils.DecodeSha256HexString(putID)
if err != nil {
utils.RestErrorWrapper(w, "Put Trails Steps Object id must be a valid sha256", http.StatusBadRequest)
return
}
storageID := objects.MakeStorageID(step.Owner, sha)
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
err = collection.FindOne(ctx, bson.M{
"_id": storageID,
"garbage": bson.M{"$ne": true},
}).Decode(&newObject)
if err != nil {
utils.RestErrorWrapper(w, "Not Accessible Resource Id", http.StatusForbidden)
return
}
if newObject.Owner != step.Owner {
utils.RestErrorWrapper(w, "Not Accessible Resource Id", http.StatusForbidden)
return
}
nID := newObject.ID
nOwner := newObject.Owner
nStorageID := newObject.StorageID
r.DecodeJsonPayload(&newObject)
if newObject.ID != nID {
utils.RestErrorWrapper(w, "Illegal Call Parameter Id", http.StatusConflict)
return
}
if newObject.Owner != nOwner {
utils.RestErrorWrapper(w, "Illegal Call Parameter Owner", http.StatusConflict)
return
}
if newObject.StorageID != nStorageID {
utils.RestErrorWrapper(w, "Illegal Call Parameter StorageId", http.StatusConflict)
return
}
objects.SyncObjectSizes(&newObject)
result, err := objects.CalcUsageAfterPut(newObject.Owner, a.mongoClient, newObject.ID, newObject.SizeInt)
if err != nil {
log.Println("Error to calc diskquota: " + err.Error())
utils.RestErrorWrapper(w, "Error posting object", http.StatusInternalServerError)
return
}
quota, err := objects.GetDiskQuota(newObject.Owner)
if err != nil {
log.Println("Error get diskquota setting: " + err.Error())
utils.RestErrorWrapper(w, "Error to calc quota", http.StatusInternalServerError)
return
}
if result.Total > quota {
utils.RestErrorWrapperUser(
w,
err.Error(),
"Quota exceeded; delete some objects or request a quota bump from <EMAIL>",
http.StatusPreconditionFailed)
}
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
updateOptions := options.Update()
updateOptions.SetUpsert(true)
updateResult, err := collection.UpdateOne(
ctx,
bson.M{"_id": storageID},
bson.M{"$set": newObject},
updateOptions,
)
if updateResult.MatchedCount == 0 {
w.WriteHeader(http.StatusConflict)
w.Header().Add("X-PH-Error", "Error inserting object into database ")
}
if err != nil {
w.WriteHeader(http.StatusConflict)
w.Header().Add("X-PH-Error", "Error inserting object into database "+err.Error())
}
issuerURL := utils.GetAPIEndpoint("/trails")
newObjectWithAccess := objects.MakeObjAccessible(issuerURL, newObject.Owner, newObject, storageID)
w.WriteJson(newObjectWithAccess)
}
// ProcessObjectsInState :
/*
1.Get Object List from the State field
2.UnMark All Objects As Garbages if they are marked as garbage
*/
func ProcessObjectsInState(
owner string,
state map[string]interface{},
autoLink bool,
a *App,
) (
objects []string,
err error,
) {
objectList, err := GetStateObjects(owner, state, autoLink, a)
if err != nil {
return objectList, err
}
err = RestoreObjects(objectList, a)
if err != nil {
return objectList, err
}
return objectList, nil
}
// GetStateObjects : Get State Objects
func GetStateObjects(
owner string,
state map[string]interface{},
autoLink bool,
a *App,
) (
[]string,
error,
) {
objectList := []string{}
objMap := map[string]bool{}
if len(state) == 0 {
return objectList, nil
}
spec, ok := state["#spec"]
if !ok {
return nil, errors.New("state_object: Invalid state:#spec is missing")
}
specValue, ok := spec.(string)
if !ok {
return nil, errors.New("state_object: Invalid state:Value of #spec should be string")
}
if specValue != "pantavisor-multi-platform@1" && specValue != "pantavisor-service-embed@1" &&
specValue != "pantavisor-service-system@1" {
return nil, errors.New("state_object: Invalid state:Value of #spec should not be " + specValue)
}
objectsApp := objects.Build(a.mongoClient)
for key, v := range state {
if strings.HasSuffix(key, ".json") ||
key == "#spec" {
continue
}
sha, found := v.(string)
if !found {
return nil, errors.New("state_object: Object is not a string[sha:" + sha + "]")
}
object, err := objectsApp.ResolveObjectWithLinks(owner, sha, autoLink)
if err != nil {
return nil, err
}
// Save object
err = objectsApp.SaveObject(object, false)
if err != nil {
return nil, errors.New("Error saving object:" + err.Error())
}
if _, ok := objMap[object.StorageID]; !ok {
objectList = append(objectList, object.StorageID)
}
}
return objectList, nil
}
// RestoreObjects : Takes the list of objects and unmarks them garbage.
func RestoreObjects(
objectList []string,
a *App,
) error {
for _, storageSha := range objectList {
result, err := IsObjectGarbage(storageSha, a)
if err != nil {
return errors.New("Error checking garbage object:" + err.Error() + "[sha:" + storageSha + "]")
}
if result {
err := UnMarkObjectAsGarbage(storageSha, a)
if err != nil {
return errors.New("Error unmarking object as garbage:" + err.Error() + "[sha:" + storageSha + "]")
}
}
}
return nil
}
// IsObjectGarbage : to check if an object is garbage or not
func IsObjectGarbage(ObjectID string, a *App) (
bool,
error,
) {
collection := a.mongoClient.Database(utils.MongoDb).Collection("pantahub_objects")
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
objectCount, err := collection.CountDocuments(ctx,
bson.M{
"_id": ObjectID,
"garbage": true,
},
)
if err != nil {
return false, errors.New("Error Finding Object:" + err.Error())
}
return (objectCount == 1), nil
}
// UnMarkObjectAsGarbage : to unmark object as garbage
func UnMarkObjectAsGarbage(ObjectID string, a *App) error {
collection := a.mongoClient.Database(utils.MongoDb).Collection("pantahub_objects")
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
updateResult, err := collection.UpdateOne(
ctx,
bson.M{
"_id": ObjectID,
},
bson.M{"$set": bson.M{
"garbage": false,
}},
)
if updateResult.MatchedCount == 0 {
return errors.New("unmark_object_as_garbage:Error updating object: not found")
}
if err != nil {
return errors.New("unmark_object_as_garbage:Error updating object:" + err.Error())
}
return nil
}
// IsDevicePublic checks if a device is public or not
func (a *App) IsDevicePublic(ID primitive.ObjectID) (bool, error) {
devicesApp := devices.Build(a.mongoClient)
device := devices.Device{}
err := devicesApp.FindDeviceByID(ID, &device)
if err != nil {
return false, err
}
return device.IsPublic, nil
}
|
hbzhang/web
|
bower_components/ui/angular-dropdowns/node_modules/gulp-jshint/node_modules/lodash/collection/sortByAll.js
|
var baseFlatten = require('../internal/baseFlatten'),
baseSortByOrder = require('../internal/baseSortByOrder'),
isIterateeCall = require('../internal/isIterateeCall'),
restParam = require('../function/restParam');
/**
* This method is like `_.sortBy` except that it can sort by multiple iteratees
* or property names.
*
* If a property name is provided for an iteratee the created `_.property`
* style callback returns the property value of the given element.
*
* If an object is provided for an iteratee the created `_.matches` style
* callback returns `true` for elements that have the properties of the given
* object, else `false`.
*
* @static
* @memberOf _
* @category Collection
* @param {Array|Object|string} collection The collection to iterate over.
* @param {...(Function|Function[]|Object|Object[]|string|string[])} iteratees
* The iteratees to sort by, specified as individual values or arrays of values.
* @returns {Array} Returns the new sorted array.
* @example
*
* var users = [
* { 'users': 'fred', 'age': 48 },
* { 'users': 'barney', 'age': 36 },
* { 'users': 'fred', 'age': 42 },
* { 'users': 'barney', 'age': 34 }
* ];
*
* _.map(_.sortByAll(users, ['users', 'age']), _.values);
* // => [['barney', 34], ['barney', 36], ['fred', 42], ['fred', 48]]
*
* _.map(_.sortByAll(users, 'users', function(chr) {
* return Math.floor(chr.age / 10);
* }), _.values);
* // => [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 42]]
*/
var sortByAll = restParam(function(collection, iteratees) {
if (collection == null) {
return [];
}
var guard = iteratees[2];
if (guard && isIterateeCall(iteratees[0], iteratees[1], guard)) {
iteratees.length = 1;
}
return baseSortByOrder(collection, baseFlatten(iteratees), []);
});
module.exports = sortByAll;
|
qbrick-cms/qbrick
|
spec/models/video_brick_spec.rb
|
require 'spec_helper'
describe Qbrick::VideoBrick, type: :model do
let :video_brick do
Qbrick::VideoBrick.new
end
describe '#valid' do
before do
video_brick.valid?
end
context 'without any video source' do
it 'has en error' do
expect(video_brick.errors[:any_source].count).to eq(1)
end
end
end
describe '#bricks' do
it 'can not have childs' do
expect(video_brick).not_to respond_to(:bricks)
end
end
describe '#user_can_add_childs?' do
it 'returns false' do
expect(video_brick.user_can_add_childs?).to be_falsey
end
end
end
|
jnvshubham7/cpp-programming
|
450Question(DayWise)/106.cpp
|
<filename>450Question(DayWise)/106.cpp
class Solution {
public:
double findMedianSortedArrays(vector& nums1, vector& nums2 {
int m = nums1.size();
int n = nums2.size();
vector<double> res;
for (int i=0;i<n;i++){
res.push_back(nums1[i]);
}
for (int i=0;i<n;i++){
res.push_back(nums2[i]);
}
sort(res.begin(),res.end());
int k = res.size();
double ans = 0;
if (k%2 == 0){
ans = (res[k/2] + res[k/2-1])/2;
}
else{
ans = res[k/2];
}
return ans;
}
};
|
weisenzcharles/weinode
|
Java/spring-cloud-demo/spring-cloud-consumer/src/main/java/org/charles/springcloud/consumer/controller/DeptController.java
|
<gh_stars>0
package org.charles.springcloud.consumer.controller;
import org.charles.springcloud.api.entity.Dept;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestTemplate;
import java.util.List;
@RestController
public class DeptController {
@Autowired
private RestTemplate restTemplate;
private static final String BASE_REST_URL = "http://SPRING-CLOUD-PROVIDER";
@Autowired
DiscoveryClient discoveryClient;
int count = 0;
@GetMapping("/list")
public List<Dept> hello(String name) {
List<ServiceInstance> list = discoveryClient.getInstances("SPRING-CLOUD-PROVIDER");
ServiceInstance instance = list.get(count % list.size());
count++;
String host = instance.getHost();
int port = instance.getPort();
List<Dept> deptList = restTemplate.getForObject("http://" + host + ":" + port + "/dept/list", List.class);
return deptList;
}
@RequestMapping("/consumer/dept/add")
public boolean add(Dept dept) {
return restTemplate.postForObject(BASE_REST_URL + "/dept/add", dept, Boolean.class);
}
@RequestMapping("/consumer/dept/{id}")
public Dept get(@PathVariable("id") Integer id) {
return restTemplate.getForObject(BASE_REST_URL + "/dept/" + id, Dept.class);
}
@RequestMapping("/consumer/dept/list")
public List<Dept> list() {
return restTemplate.getForObject(BASE_REST_URL + "/dept/list", List.class);
}
}
|
VickyKoblinski/verdigris
|
build/theme-definition-loader/src/themeDefinitionHandler.js
|
<reponame>VickyKoblinski/verdigris<gh_stars>0
const getPropType = require('react-docgen/dist/utils/getPropType');
const getPropertyName = require('react-docgen/dist/utils/getPropertyName');
const isReactModuleName = require('react-docgen/dist/utils/isReactModuleName');
const isStatelessComponent = require('react-docgen/dist/utils/isStatelessComponent');
const getMemberValuePath = require('react-docgen/dist/utils/getMemberValuePath');
const printValue = require('react-docgen/dist/utils/printValue');
const recast = require('recast');
const resolveFunctionDefinitionToReturnValue = require('react-docgen/dist/utils/resolveFunctionDefinitionToReturnValue');
const resolveToModule = require('react-docgen/dist/utils/resolveToModule');
const resolveToValue = require('react-docgen/dist/utils/resolveToValue');
const setPropDescription = require('react-docgen/dist/utils/setPropDescription');
const {
types: { namedTypes: types },
} = recast;
function isPropTypesExpression(path) {
const moduleName = resolveToModule.default(path);
if (moduleName) {
return (
isReactModuleName.default(moduleName) || moduleName === 'ReactPropTypes'
);
}
return false;
}
function amendPropTypes(getDescriptor, path) {
if (!types.ObjectExpression.check(path.node)) {
return;
}
path.get('properties').each(propertyPath => {
switch (propertyPath.node.type) {
case types.Property.name: {
const propDescriptor = getDescriptor(
getPropertyName.default(propertyPath),
);
const valuePath = propertyPath.get('value');
const type = isPropTypesExpression(valuePath)
? getPropType.default(valuePath)
: { name: 'custom', raw: printValue.default(valuePath) };
if (type) {
propDescriptor.type = type;
}
break;
}
case types.SpreadElement.name: {
const resolvedValuePath = resolveToValue.default(
propertyPath.get('argument'),
);
switch (resolvedValuePath.node.type) {
case types.ObjectExpression.name: // normal object literal
amendPropTypes(getDescriptor, resolvedValuePath);
break;
default:
break;
}
break;
}
default:
break;
}
});
}
function resolveDocumentation(documentation, path) {
if (!types.ObjectExpression.check(path.node)) {
return;
}
path.get('properties').each(propertyPath => {
if (types.Property.check(propertyPath.node)) {
setPropDescription.default(documentation, propertyPath);
} else if (types.SpreadElement.check(propertyPath.node)) {
const resolvedValuePath = resolveToValue.default(
propertyPath.get('argument'),
);
resolveDocumentation(documentation, resolvedValuePath);
}
});
}
function getDefaultValue(path) {
const { node } = path;
let newPath = path;
let defaultValue;
if (types.Literal.check(node)) {
defaultValue = node.raw;
} else {
if (types.AssignmentPattern.check(newPath.node)) {
newPath = resolveToValue.default(newPath.get('right'));
} else {
newPath = resolveToValue.default(newPath);
}
if (types.ImportDeclaration.check(newPath.node)) {
defaultValue = node.name;
} else {
defaultValue = printValue.default(newPath);
}
}
if (typeof defaultValue !== 'undefined') {
return {
value: defaultValue,
computed:
types.CallExpression.check(node) ||
types.MemberExpression.check(node) ||
types.Identifier.check(node),
};
}
return null;
}
function getStatelessPropsPath(componentDefinition) {
return resolveToValue.default(componentDefinition).get('params', 0);
}
function getDefaultPropsPath(componentDefinition) {
let defaultPropsPath = getMemberValuePath.default(
componentDefinition,
'defaultThemeValues',
);
if (!defaultPropsPath) {
return null;
}
defaultPropsPath = resolveToValue.default(defaultPropsPath);
if (!defaultPropsPath) {
return null;
}
if (types.FunctionExpression.check(defaultPropsPath.node)) {
// Find the value that is returned from the function and process it if it is
// an object literal.
const returnValue = resolveFunctionDefinitionToReturnValue.default(
defaultPropsPath,
);
if (returnValue && types.ObjectExpression.check(returnValue.node)) {
defaultPropsPath = returnValue;
}
}
return defaultPropsPath;
}
function getDefaultValuesFromProps(properties, documentation, isStateless) {
properties
.filter(propertyPath => types.Property.check(propertyPath.node))
// Don't evaluate property if component is functional and the node is not an AssignmentPattern
.filter(
propertyPath =>
!isStateless ||
types.AssignmentPattern.check(propertyPath.get('value').node),
)
.forEach(propertyPath => {
const propDescriptor = documentation.getPropDescriptor(
getPropertyName.default(propertyPath),
);
const defaultValue = getDefaultValue(
isStateless
? propertyPath.get('value', 'right')
: propertyPath.get('value'),
);
if (defaultValue) {
propDescriptor.defaultValue = defaultValue;
}
});
}
const getPropTypeHandler = propName => {
return (documentation, path) => {
let propTypesPath = getMemberValuePath.default(path, propName);
if (!propTypesPath) {
return;
}
propTypesPath = resolveToValue.default(propTypesPath);
if (!propTypesPath) {
return;
}
const getDescriptor = documentation.getPropDescriptor;
amendPropTypes(getDescriptor.bind(documentation), propTypesPath);
resolveDocumentation(documentation, propTypesPath);
let statelessProps = null;
const defaultPropsPath = getDefaultPropsPath(path);
if (isStatelessComponent.default(path)) {
statelessProps = getStatelessPropsPath(path);
}
// Do both statelessProps and defaultProps if both are available so defaultProps can override
if (statelessProps && types.ObjectPattern.check(statelessProps.node)) {
getDefaultValuesFromProps(
statelessProps.get('properties'),
documentation,
true,
);
}
if (
defaultPropsPath &&
types.ObjectExpression.check(defaultPropsPath.node)
) {
getDefaultValuesFromProps(
defaultPropsPath.get('properties'),
documentation,
false,
);
}
};
};
module.exports = getPropTypeHandler('themeDefinition');
|
egraba/vbox_openbsd
|
VirtualBox-5.0.0/src/VBox/Devices/Storage/DrvHostDVD.cpp
|
<reponame>egraba/vbox_openbsd<gh_stars>1-10
/* $Id: DrvHostDVD.cpp $ */
/** @file
* DrvHostDVD - Host DVD block driver.
*/
/*
* Copyright (C) 2006-2015 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*/
/*******************************************************************************
* Header Files *
*******************************************************************************/
#define LOG_GROUP LOG_GROUP_DRV_HOST_DVD
#define __STDC_LIMIT_MACROS
#define __STDC_CONSTANT_MACROS
#ifdef RT_OS_DARWIN
# include <mach/mach.h>
# include <Carbon/Carbon.h>
# include <IOKit/IOKitLib.h>
# include <IOKit/IOCFPlugIn.h>
# include <IOKit/scsi/SCSITaskLib.h>
# include <IOKit/scsi/SCSICommandOperationCodes.h>
# include <IOKit/storage/IOStorageDeviceCharacteristics.h>
# include <mach/mach_error.h>
# define USE_MEDIA_POLLING
#elif defined RT_OS_LINUX
# include <sys/ioctl.h>
# include <linux/version.h>
/* All the following crap is apparently not necessary anymore since Linux
* version 2.6.29. */
# if LINUX_VERSION_CODE < KERNEL_VERSION(2, 6, 29)
/* This is a hack to work around conflicts between these linux kernel headers
* and the GLIBC tcpip headers. They have different declarations of the 4
* standard byte order functions. */
# define _LINUX_BYTEORDER_GENERIC_H
/* This is another hack for not bothering with C++ unfriendly byteswap macros. */
/* Those macros that are needed are defined in the header below. */
# include "swab.h"
# endif
# include <linux/cdrom.h>
# include <sys/fcntl.h>
# include <errno.h>
# include <limits.h>
# include <iprt/mem.h>
# define USE_MEDIA_POLLING
#elif defined(RT_OS_SOLARIS)
# include <stropts.h>
# include <fcntl.h>
# include <errno.h>
# include <pwd.h>
# include <unistd.h>
# include <syslog.h>
# ifdef VBOX_WITH_SUID_WRAPPER
# include <auth_attr.h>
# endif
# include <sys/dkio.h>
# include <sys/sockio.h>
# include <sys/scsi/scsi.h>
# define USE_MEDIA_POLLING
#elif defined(RT_OS_WINDOWS)
# pragma warning(disable : 4163)
# define _interlockedbittestandset they_messed_it_up_in_winnt_h_this_time_sigh__interlockedbittestandset
# define _interlockedbittestandreset they_messed_it_up_in_winnt_h_this_time_sigh__interlockedbittestandreset
# define _interlockedbittestandset64 they_messed_it_up_in_winnt_h_this_time_sigh__interlockedbittestandset64
# define _interlockedbittestandreset64 they_messed_it_up_in_winnt_h_this_time_sigh__interlockedbittestandreset64
# include <Windows.h>
# include <winioctl.h>
# include <ntddscsi.h>
# pragma warning(default : 4163)
# undef _interlockedbittestandset
# undef _interlockedbittestandreset
# undef _interlockedbittestandset64
# undef _interlockedbittestandreset64
# undef USE_MEDIA_POLLING
#elif defined(RT_OS_FREEBSD)
# include <sys/cdefs.h>
# include <sys/param.h>
# include <stdio.h>
# include <cam/cam.h>
# include <cam/cam_ccb.h>
# define USE_MEDIA_POLLING
#else
# error "Unsupported Platform."
#endif
#include <iprt/asm.h>
#include <VBox/vmm/pdmdrv.h>
#include <iprt/asm.h>
#include <iprt/assert.h>
#include <iprt/file.h>
#include <iprt/string.h>
#include <iprt/thread.h>
#include <iprt/critsect.h>
#include <VBox/scsi.h>
#include "VBoxDD.h"
#include "DrvHostBase.h"
/*******************************************************************************
* Internal Functions *
*******************************************************************************/
static DECLCALLBACK(int) drvHostDvdDoLock(PDRVHOSTBASE pThis, bool fLock);
#ifdef VBOX_WITH_SUID_WRAPPER
static int solarisCheckUserAuth();
static int solarisEnterRootMode(uid_t *pEffUserID);
static int solarisExitRootMode(uid_t *pEffUserID);
#endif
/** @copydoc PDMIMOUNT::pfnUnmount */
static DECLCALLBACK(int) drvHostDvdUnmount(PPDMIMOUNT pInterface, bool fForce, bool fEject)
{
PDRVHOSTBASE pThis = PDMIMOUNT_2_DRVHOSTBASE(pInterface);
RTCritSectEnter(&pThis->CritSect);
/*
* Validate state.
*/
int rc = VINF_SUCCESS;
if (!pThis->fLocked || fForce)
{
/* Unlock drive if necessary. */
if (pThis->fLocked)
drvHostDvdDoLock(pThis, false);
if (fEject)
{
/*
* Eject the disc.
*/
#if defined(RT_OS_DARWIN) || defined(RT_OS_FREEBSD)
uint8_t abCmd[16] =
{
SCSI_START_STOP_UNIT, 0, 0, 0, 2 /*eject+stop*/, 0,
0,0,0,0,0,0,0,0,0,0
};
rc = DRVHostBaseScsiCmd(pThis, abCmd, 6, PDMBLOCKTXDIR_NONE, NULL, NULL, NULL, 0, 0);
#elif defined(RT_OS_LINUX)
rc = ioctl(RTFileToNative(pThis->hFileDevice), CDROMEJECT, 0);
if (rc < 0)
{
if (errno == EBUSY)
rc = VERR_PDM_MEDIA_LOCKED;
else if (errno == ENOSYS)
rc = VERR_NOT_SUPPORTED;
else
rc = RTErrConvertFromErrno(errno);
}
#elif defined(RT_OS_SOLARIS)
rc = ioctl(RTFileToNative(pThis->hFileRawDevice), DKIOCEJECT, 0);
if (rc < 0)
{
if (errno == EBUSY)
rc = VERR_PDM_MEDIA_LOCKED;
else if (errno == ENOSYS || errno == ENOTSUP)
rc = VERR_NOT_SUPPORTED;
else if (errno == ENODEV)
rc = VERR_PDM_MEDIA_NOT_MOUNTED;
else
rc = RTErrConvertFromErrno(errno);
}
#elif defined(RT_OS_WINDOWS)
RTFILE hFileDevice = pThis->hFileDevice;
if (hFileDevice == NIL_RTFILE) /* obsolete crap */
rc = RTFileOpen(&hFileDevice, pThis->pszDeviceOpen, RTFILE_O_READ | RTFILE_O_OPEN | RTFILE_O_DENY_NONE);
if (RT_SUCCESS(rc))
{
/* do ioctl */
DWORD cbReturned;
if (DeviceIoControl((HANDLE)RTFileToNative(hFileDevice), IOCTL_STORAGE_EJECT_MEDIA,
NULL, 0,
NULL, 0, &cbReturned,
NULL))
rc = VINF_SUCCESS;
else
rc = RTErrConvertFromWin32(GetLastError());
/* clean up handle */
if (hFileDevice != pThis->hFileDevice)
RTFileClose(hFileDevice);
}
else
AssertMsgFailed(("Failed to open '%s' for ejecting this tray.\n", rc));
#else
AssertMsgFailed(("Eject is not implemented!\n"));
rc = VINF_SUCCESS;
#endif
}
/*
* Media is no longer present.
*/
DRVHostBaseMediaNotPresent(pThis); /** @todo This isn't thread safe! */
}
else
{
Log(("drvHostDvdUnmount: Locked\n"));
rc = VERR_PDM_MEDIA_LOCKED;
}
RTCritSectLeave(&pThis->CritSect);
LogFlow(("drvHostDvdUnmount: returns %Rrc\n", rc));
return rc;
}
/**
* Locks or unlocks the drive.
*
* @returns VBox status code.
* @param pThis The instance data.
* @param fLock True if the request is to lock the drive, false if to unlock.
*/
static DECLCALLBACK(int) drvHostDvdDoLock(PDRVHOSTBASE pThis, bool fLock)
{
#if defined(RT_OS_DARWIN) || defined(RT_OS_FREEBSD)
uint8_t abCmd[16] =
{
SCSI_PREVENT_ALLOW_MEDIUM_REMOVAL, 0, 0, 0, fLock, 0,
0,0,0,0,0,0,0,0,0,0
};
int rc = DRVHostBaseScsiCmd(pThis, abCmd, 6, PDMBLOCKTXDIR_NONE, NULL, NULL, NULL, 0, 0);
#elif defined(RT_OS_LINUX)
int rc = ioctl(RTFileToNative(pThis->hFileDevice), CDROM_LOCKDOOR, (int)fLock);
if (rc < 0)
{
if (errno == EBUSY)
rc = VERR_ACCESS_DENIED;
else if (errno == EDRIVE_CANT_DO_THIS)
rc = VERR_NOT_SUPPORTED;
else
rc = RTErrConvertFromErrno(errno);
}
#elif defined(RT_OS_SOLARIS)
int rc = ioctl(RTFileToNative(pThis->hFileRawDevice), fLock ? DKIOCLOCK : DKIOCUNLOCK, 0);
if (rc < 0)
{
if (errno == EBUSY)
rc = VERR_ACCESS_DENIED;
else if (errno == ENOTSUP || errno == ENOSYS)
rc = VERR_NOT_SUPPORTED;
else
rc = RTErrConvertFromErrno(errno);
}
#elif defined(RT_OS_WINDOWS)
PREVENT_MEDIA_REMOVAL PreventMediaRemoval = {fLock};
DWORD cbReturned;
int rc;
if (DeviceIoControl((HANDLE)RTFileToNative(pThis->hFileDevice), IOCTL_STORAGE_MEDIA_REMOVAL,
&PreventMediaRemoval, sizeof(PreventMediaRemoval),
NULL, 0, &cbReturned,
NULL))
rc = VINF_SUCCESS;
else
/** @todo figure out the return codes for already locked. */
rc = RTErrConvertFromWin32(GetLastError());
#else
AssertMsgFailed(("Lock/Unlock is not implemented!\n"));
int rc = VINF_SUCCESS;
#endif
LogFlow(("drvHostDvdDoLock(, fLock=%RTbool): returns %Rrc\n", fLock, rc));
return rc;
}
#ifdef RT_OS_LINUX
/**
* Get the media size.
*
* @returns VBox status code.
* @param pThis The instance data.
* @param pcb Where to store the size.
*/
static int drvHostDvdGetMediaSize(PDRVHOSTBASE pThis, uint64_t *pcb)
{
/*
* Query the media size.
*/
/* Clear the media-changed-since-last-call-thingy just to be on the safe side. */
ioctl(RTFileToNative(pThis->hFileDevice), CDROM_MEDIA_CHANGED, CDSL_CURRENT);
return RTFileSeek(pThis->hFileDevice, 0, RTFILE_SEEK_END, pcb);
}
#endif /* RT_OS_LINUX */
#ifdef USE_MEDIA_POLLING
/**
* Do media change polling.
*/
DECLCALLBACK(int) drvHostDvdPoll(PDRVHOSTBASE pThis)
{
/*
* Poll for media change.
*/
#if defined(RT_OS_DARWIN) || defined(RT_OS_FREEBSD)
#ifdef RT_OS_DARWIN
AssertReturn(pThis->ppScsiTaskDI, VERR_INTERNAL_ERROR);
#endif
/*
* Issue a TEST UNIT READY request.
*/
bool fMediaChanged = false;
bool fMediaPresent = false;
uint8_t abCmd[16] = { SCSI_TEST_UNIT_READY, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 };
uint8_t abSense[32];
int rc2 = DRVHostBaseScsiCmd(pThis, abCmd, 6, PDMBLOCKTXDIR_NONE, NULL, NULL, abSense, sizeof(abSense), 0);
if (RT_SUCCESS(rc2))
fMediaPresent = true;
else if ( rc2 == VERR_UNRESOLVED_ERROR
&& abSense[2] == 6 /* unit attention */
&& ( (abSense[12] == 0x29 && abSense[13] < 5 /* reset */)
|| (abSense[12] == 0x2a && abSense[13] == 0 /* parameters changed */) //???
|| (abSense[12] == 0x3f && abSense[13] == 0 /* target operating conditions have changed */) //???
|| (abSense[12] == 0x3f && abSense[13] == 2 /* changed operating definition */) //???
|| (abSense[12] == 0x3f && abSense[13] == 3 /* inquiry parameters changed */)
|| (abSense[12] == 0x3f && abSense[13] == 5 /* device identifier changed */)
)
)
{
fMediaPresent = false;
fMediaChanged = true;
/** @todo check this media change stuff on Darwin. */
}
#elif defined(RT_OS_LINUX)
bool fMediaPresent = ioctl(RTFileToNative(pThis->hFileDevice), CDROM_DRIVE_STATUS, CDSL_CURRENT) == CDS_DISC_OK;
#elif defined(RT_OS_SOLARIS)
bool fMediaPresent = false;
bool fMediaChanged = false;
/* Need to pass the previous state and DKIO_NONE for the first time. */
static dkio_state s_DeviceState = DKIO_NONE;
dkio_state PreviousState = s_DeviceState;
int rc2 = ioctl(RTFileToNative(pThis->hFileRawDevice), DKIOCSTATE, &s_DeviceState);
if (rc2 == 0)
{
fMediaPresent = (s_DeviceState == DKIO_INSERTED);
if (PreviousState != s_DeviceState)
fMediaChanged = true;
}
#else
# error "Unsupported platform."
#endif
RTCritSectEnter(&pThis->CritSect);
int rc = VINF_SUCCESS;
if (pThis->fMediaPresent != fMediaPresent)
{
LogFlow(("drvHostDvdPoll: %d -> %d\n", pThis->fMediaPresent, fMediaPresent));
pThis->fMediaPresent = false;
if (fMediaPresent)
rc = DRVHostBaseMediaPresent(pThis);
else
DRVHostBaseMediaNotPresent(pThis);
}
else if (fMediaPresent)
{
/*
* Poll for media change.
*/
#if defined(RT_OS_DARWIN) || defined(RT_OS_SOLARIS) || defined(RT_OS_FREEBSD)
/* taken care of above. */
#elif defined(RT_OS_LINUX)
bool fMediaChanged = ioctl(RTFileToNative(pThis->hFileDevice), CDROM_MEDIA_CHANGED, CDSL_CURRENT) == 1;
#else
# error "Unsupported platform."
#endif
if (fMediaChanged)
{
LogFlow(("drvHostDVDMediaThread: Media changed!\n"));
DRVHostBaseMediaNotPresent(pThis);
rc = DRVHostBaseMediaPresent(pThis);
}
}
RTCritSectLeave(&pThis->CritSect);
return rc;
}
#endif /* USE_MEDIA_POLLING */
/** @copydoc PDMIBLOCK::pfnSendCmd */
static int drvHostDvdSendCmd(PPDMIBLOCK pInterface, const uint8_t *pbCmd,
PDMBLOCKTXDIR enmTxDir, void *pvBuf, uint32_t *pcbBuf,
uint8_t *pabSense, size_t cbSense, uint32_t cTimeoutMillies)
{
PDRVHOSTBASE pThis = PDMIBLOCK_2_DRVHOSTBASE(pInterface);
int rc;
LogFlow(("%s: cmd[0]=%#04x txdir=%d pcbBuf=%d timeout=%d\n", __FUNCTION__, pbCmd[0], enmTxDir, *pcbBuf, cTimeoutMillies));
#if defined(RT_OS_DARWIN) || defined(RT_OS_FREEBSD)
/*
* Pass the request on to the internal scsi command interface.
* The command seems to be 12 bytes long, the docs a bit copy&pasty on the command length point...
*/
if (enmTxDir == PDMBLOCKTXDIR_FROM_DEVICE)
memset(pvBuf, '\0', *pcbBuf); /* we got read size, but zero it anyway. */
rc = DRVHostBaseScsiCmd(pThis, pbCmd, 12, PDMBLOCKTXDIR_FROM_DEVICE, pvBuf, pcbBuf, pabSense, cbSense, cTimeoutMillies);
if (rc == VERR_UNRESOLVED_ERROR)
/* sense information set */
rc = VERR_DEV_IO_ERROR;
#elif defined(RT_OS_LINUX)
int direction;
struct cdrom_generic_command cgc;
switch (enmTxDir)
{
case PDMBLOCKTXDIR_NONE:
Assert(*pcbBuf == 0);
direction = CGC_DATA_NONE;
break;
case PDMBLOCKTXDIR_FROM_DEVICE:
Assert(*pcbBuf != 0);
Assert(*pcbBuf <= SCSI_MAX_BUFFER_SIZE);
/* Make sure that the buffer is clear for commands reading
* data. The actually received data may be shorter than what
* we expect, and due to the unreliable feedback about how much
* data the ioctl actually transferred, it's impossible to
* prevent that. Returning previous buffer contents may cause
* security problems inside the guest OS, if users can issue
* commands to the CDROM device. */
memset(pThis->pbDoubleBuffer, '\0', *pcbBuf);
direction = CGC_DATA_READ;
break;
case PDMBLOCKTXDIR_TO_DEVICE:
Assert(*pcbBuf != 0);
Assert(*pcbBuf <= SCSI_MAX_BUFFER_SIZE);
memcpy(pThis->pbDoubleBuffer, pvBuf, *pcbBuf);
direction = CGC_DATA_WRITE;
break;
default:
AssertMsgFailed(("enmTxDir invalid!\n"));
direction = CGC_DATA_NONE;
}
memset(&cgc, '\0', sizeof(cgc));
memcpy(cgc.cmd, pbCmd, CDROM_PACKET_SIZE);
cgc.buffer = (unsigned char *)pThis->pbDoubleBuffer;
cgc.buflen = *pcbBuf;
cgc.stat = 0;
Assert(cbSense >= sizeof(struct request_sense));
cgc.sense = (struct request_sense *)pabSense;
cgc.data_direction = direction;
cgc.quiet = false;
cgc.timeout = cTimeoutMillies;
rc = ioctl(RTFileToNative(pThis->hFileDevice), CDROM_SEND_PACKET, &cgc);
if (rc < 0)
{
if (errno == EBUSY)
rc = VERR_PDM_MEDIA_LOCKED;
else if (errno == ENOSYS)
rc = VERR_NOT_SUPPORTED;
else
{
rc = RTErrConvertFromErrno(errno);
if (rc == VERR_ACCESS_DENIED && cgc.sense->sense_key == SCSI_SENSE_NONE)
cgc.sense->sense_key = SCSI_SENSE_ILLEGAL_REQUEST;
Log2(("%s: error status %d, rc=%Rrc\n", __FUNCTION__, cgc.stat, rc));
}
}
switch (enmTxDir)
{
case PDMBLOCKTXDIR_FROM_DEVICE:
memcpy(pvBuf, pThis->pbDoubleBuffer, *pcbBuf);
break;
default:
;
}
Log2(("%s: after ioctl: cgc.buflen=%d txlen=%d\n", __FUNCTION__, cgc.buflen, *pcbBuf));
/* The value of cgc.buflen does not reliably reflect the actual amount
* of data transferred (for packet commands with little data transfer
* it's 0). So just assume that everything worked ok. */
#elif defined(RT_OS_SOLARIS)
struct uscsi_cmd usc;
union scsi_cdb scdb;
memset(&usc, 0, sizeof(struct uscsi_cmd));
memset(&scdb, 0, sizeof(scdb));
switch (enmTxDir)
{
case PDMBLOCKTXDIR_NONE:
Assert(*pcbBuf == 0);
usc.uscsi_flags = USCSI_READ;
/* nothing to do */
break;
case PDMBLOCKTXDIR_FROM_DEVICE:
Assert(*pcbBuf != 0);
/* Make sure that the buffer is clear for commands reading
* data. The actually received data may be shorter than what
* we expect, and due to the unreliable feedback about how much
* data the ioctl actually transferred, it's impossible to
* prevent that. Returning previous buffer contents may cause
* security problems inside the guest OS, if users can issue
* commands to the CDROM device. */
memset(pvBuf, '\0', *pcbBuf);
usc.uscsi_flags = USCSI_READ;
break;
case PDMBLOCKTXDIR_TO_DEVICE:
Assert(*pcbBuf != 0);
usc.uscsi_flags = USCSI_WRITE;
break;
default:
AssertMsgFailedReturn(("%d\n", enmTxDir), VERR_INTERNAL_ERROR);
}
usc.uscsi_flags |= USCSI_RQENABLE;
usc.uscsi_rqbuf = (char *)pabSense;
usc.uscsi_rqlen = cbSense;
usc.uscsi_cdb = (caddr_t)&scdb;
usc.uscsi_cdblen = 12;
memcpy (usc.uscsi_cdb, pbCmd, usc.uscsi_cdblen);
usc.uscsi_bufaddr = (caddr_t)pvBuf;
usc.uscsi_buflen = *pcbBuf;
usc.uscsi_timeout = (cTimeoutMillies + 999) / 1000;
/* We need root privileges for user-SCSI under Solaris. */
#ifdef VBOX_WITH_SUID_WRAPPER
uid_t effUserID = geteuid();
solarisEnterRootMode(&effUserID); /** @todo check return code when this really works. */
#endif
rc = ioctl(RTFileToNative(pThis->hFileRawDevice), USCSICMD, &usc);
#ifdef VBOX_WITH_SUID_WRAPPER
solarisExitRootMode(&effUserID);
#endif
if (rc < 0)
{
if (errno == EPERM)
return VERR_PERMISSION_DENIED;
if (usc.uscsi_status)
{
rc = RTErrConvertFromErrno(errno);
Log2(("%s: error status. rc=%Rrc\n", __FUNCTION__, rc));
}
}
Log2(("%s: after ioctl: residual buflen=%d original buflen=%d\n", __FUNCTION__, usc.uscsi_resid, usc.uscsi_buflen));
#elif defined(RT_OS_WINDOWS)
int direction;
struct _REQ
{
SCSI_PASS_THROUGH_DIRECT spt;
uint8_t aSense[64];
} Req;
DWORD cbReturned = 0;
switch (enmTxDir)
{
case PDMBLOCKTXDIR_NONE:
direction = SCSI_IOCTL_DATA_UNSPECIFIED;
break;
case PDMBLOCKTXDIR_FROM_DEVICE:
Assert(*pcbBuf != 0);
/* Make sure that the buffer is clear for commands reading
* data. The actually received data may be shorter than what
* we expect, and due to the unreliable feedback about how much
* data the ioctl actually transferred, it's impossible to
* prevent that. Returning previous buffer contents may cause
* security problems inside the guest OS, if users can issue
* commands to the CDROM device. */
memset(pvBuf, '\0', *pcbBuf);
direction = SCSI_IOCTL_DATA_IN;
break;
case PDMBLOCKTXDIR_TO_DEVICE:
direction = SCSI_IOCTL_DATA_OUT;
break;
default:
AssertMsgFailed(("enmTxDir invalid!\n"));
direction = SCSI_IOCTL_DATA_UNSPECIFIED;
}
memset(&Req, '\0', sizeof(Req));
Req.spt.Length = sizeof(Req.spt);
Req.spt.CdbLength = 12;
memcpy(Req.spt.Cdb, pbCmd, Req.spt.CdbLength);
Req.spt.DataBuffer = pvBuf;
Req.spt.DataTransferLength = *pcbBuf;
Req.spt.DataIn = direction;
Req.spt.TimeOutValue = (cTimeoutMillies + 999) / 1000; /* Convert to seconds */
Assert(cbSense <= sizeof(Req.aSense));
Req.spt.SenseInfoLength = (UCHAR)RT_MIN(sizeof(Req.aSense), cbSense);
Req.spt.SenseInfoOffset = RT_OFFSETOF(struct _REQ, aSense);
if (DeviceIoControl((HANDLE)RTFileToNative(pThis->hFileDevice), IOCTL_SCSI_PASS_THROUGH_DIRECT,
&Req, sizeof(Req), &Req, sizeof(Req), &cbReturned, NULL))
{
if (cbReturned > RT_OFFSETOF(struct _REQ, aSense))
memcpy(pabSense, Req.aSense, cbSense);
else
memset(pabSense, '\0', cbSense);
/* Windows shares the property of not properly reflecting the actually
* transferred data size. See above. Assume that everything worked ok.
* Except if there are sense information. */
rc = (pabSense[2] & 0x0f) == SCSI_SENSE_NONE
? VINF_SUCCESS
: VERR_DEV_IO_ERROR;
}
else
rc = RTErrConvertFromWin32(GetLastError());
Log2(("%s: scsistatus=%d bytes returned=%d tlength=%d\n", __FUNCTION__, Req.spt.ScsiStatus, cbReturned, Req.spt.DataTransferLength));
#else
# error "Unsupported platform."
#endif
if (pbCmd[0] == SCSI_GET_EVENT_STATUS_NOTIFICATION)
{
uint8_t *pbBuf = (uint8_t*)pvBuf;
Log2(("Event Status Notification class=%#02x supported classes=%#02x\n", pbBuf[2], pbBuf[3]));
if (RT_BE2H_U16(*(uint16_t*)pbBuf) >= 6)
Log2((" event %#02x %#02x %#02x %#02x\n", pbBuf[4], pbBuf[5], pbBuf[6], pbBuf[7]));
}
LogFlow(("%s: rc=%Rrc\n", __FUNCTION__, rc));
return rc;
}
#ifdef VBOX_WITH_SUID_WRAPPER
/* These functions would have to go into a separate solaris binary with
* the setuid permission set, which would run the user-SCSI ioctl and
* return the value. BUT... this might be prohibitively slow.
*/
# ifdef RT_OS_SOLARIS
/**
* Checks if the current user is authorized using Solaris' role-based access control.
* Made as a separate function with so that it need not be invoked each time we need
* to gain root access.
*
* @returns VBox error code.
*/
static int solarisCheckUserAuth()
{
/* Uses Solaris' role-based access control (RBAC).*/
struct passwd *pPass = getpwuid(getuid());
if (pPass == NULL || chkauthattr("solaris.device.cdrw", pPass->pw_name) == 0)
return VERR_PERMISSION_DENIED;
return VINF_SUCCESS;
}
/**
* Setuid wrapper to gain root access.
*
* @returns VBox error code.
* @param pEffUserID Pointer to effective user ID.
*/
static int solarisEnterRootMode(uid_t *pEffUserID)
{
/* Increase privilege if required */
if (*pEffUserID != 0)
{
if (seteuid(0) == 0)
{
*pEffUserID = 0;
return VINF_SUCCESS;
}
return VERR_PERMISSION_DENIED;
}
return VINF_SUCCESS;
}
/**
* Setuid wrapper to relinquish root access.
*
* @returns VBox error code.
* @param pEffUserID Pointer to effective user ID.
*/
static int solarisExitRootMode(uid_t *pEffUserID)
{
/* Get back to user mode. */
if (*pEffUserID == 0)
{
uid_t realID = getuid();
if (seteuid(realID) == 0)
{
*pEffUserID = realID;
return VINF_SUCCESS;
}
return VERR_PERMISSION_DENIED;
}
return VINF_SUCCESS;
}
# endif /* RT_OS_SOLARIS */
#endif /* VBOX_WITH_SUID_WRAPPER */
/* -=-=-=-=- driver interface -=-=-=-=- */
/** @copydoc FNPDMDRVDESTRUCT */
DECLCALLBACK(void) drvHostDvdDestruct(PPDMDRVINS pDrvIns)
{
#ifdef RT_OS_LINUX
PDRVHOSTBASE pThis = PDMINS_2_DATA(pDrvIns, PDRVHOSTBASE);
if (pThis->pbDoubleBuffer)
{
RTMemFree(pThis->pbDoubleBuffer);
pThis->pbDoubleBuffer = NULL;
}
#endif
return DRVHostBaseDestruct(pDrvIns);
}
/**
* Construct a host dvd drive driver instance.
*
* @copydoc FNPDMDRVCONSTRUCT
*/
static DECLCALLBACK(int) drvHostDvdConstruct(PPDMDRVINS pDrvIns, PCFGMNODE pCfg, uint32_t fFlags)
{
PDRVHOSTBASE pThis = PDMINS_2_DATA(pDrvIns, PDRVHOSTBASE);
LogFlow(("drvHostDvdConstruct: iInstance=%d\n", pDrvIns->iInstance));
/*
* Init instance data.
*/
int rc = DRVHostBaseInitData(pDrvIns, pCfg, PDMBLOCKTYPE_DVD);
if (RT_SUCCESS(rc))
{
/*
* Validate configuration.
*/
if (CFGMR3AreValuesValid(pCfg, "Path\0Interval\0Locked\0BIOSVisible\0AttachFailError\0Passthrough\0"))
{
/*
* Override stuff.
*/
#ifdef RT_OS_LINUX
pThis->pbDoubleBuffer = (uint8_t *)RTMemAlloc(SCSI_MAX_BUFFER_SIZE);
if (!pThis->pbDoubleBuffer)
return VERR_NO_MEMORY;
#endif
bool fPassthrough;
rc = CFGMR3QueryBool(pCfg, "Passthrough", &fPassthrough);
if (RT_SUCCESS(rc) && fPassthrough)
{
pThis->IBlock.pfnSendCmd = drvHostDvdSendCmd;
/* Passthrough requires opening the device in R/W mode. */
pThis->fReadOnlyConfig = false;
#ifdef VBOX_WITH_SUID_WRAPPER /* Solaris setuid for Passthrough mode. */
rc = solarisCheckUserAuth();
if (RT_FAILURE(rc))
{
Log(("DVD: solarisCheckUserAuth failed. Permission denied!\n"));
return rc;
}
#endif /* VBOX_WITH_SUID_WRAPPER */
}
pThis->IMount.pfnUnmount = drvHostDvdUnmount;
pThis->pfnDoLock = drvHostDvdDoLock;
#ifdef USE_MEDIA_POLLING
if (!fPassthrough)
pThis->pfnPoll = drvHostDvdPoll;
else
pThis->pfnPoll = NULL;
#endif
#ifdef RT_OS_LINUX
pThis->pfnGetMediaSize = drvHostDvdGetMediaSize;
#endif
/*
* 2nd init part.
*/
rc = DRVHostBaseInitFinish(pThis);
}
else
{
pThis->fAttachFailError = true;
rc = VERR_PDM_DRVINS_UNKNOWN_CFG_VALUES;
}
}
if (RT_FAILURE(rc))
{
if (!pThis->fAttachFailError)
{
/* Suppressing the attach failure error must not affect the normal
* DRVHostBaseDestruct, so reset this flag below before leaving. */
pThis->fKeepInstance = true;
rc = VINF_SUCCESS;
}
DRVHostBaseDestruct(pDrvIns);
pThis->fKeepInstance = false;
}
LogFlow(("drvHostDvdConstruct: returns %Rrc\n", rc));
return rc;
}
/**
* Block driver registration record.
*/
const PDMDRVREG g_DrvHostDVD =
{
/* u32Version */
PDM_DRVREG_VERSION,
/* szName */
"HostDVD",
/* szRCMod */
"",
/* szR0Mod */
"",
/* pszDescription */
"Host DVD Block Driver.",
/* fFlags */
PDM_DRVREG_FLAGS_HOST_BITS_DEFAULT,
/* fClass. */
PDM_DRVREG_CLASS_BLOCK,
/* cMaxInstances */
~0U,
/* cbInstance */
sizeof(DRVHOSTBASE),
/* pfnConstruct */
drvHostDvdConstruct,
/* pfnDestruct */
drvHostDvdDestruct,
/* pfnRelocate */
NULL,
/* pfnIOCtl */
NULL,
/* pfnPowerOn */
NULL,
/* pfnReset */
NULL,
/* pfnSuspend */
NULL,
/* pfnResume */
NULL,
/* pfnAttach */
NULL,
/* pfnDetach */
NULL,
/* pfnPowerOff */
NULL,
/* pfnSoftReset */
NULL,
/* u32EndVersion */
PDM_DRVREG_VERSION
};
|
menify/sandbox
|
aql/benchmark/lib_49/class_5.cpp
|
#include "class_5.h"
#include "class_0.h"
#include "class_5.h"
#include "class_4.h"
#include "class_9.h"
#include "class_7.h"
#include <lib_34/class_4.h>
#include <lib_10/class_2.h>
#include <lib_44/class_9.h>
#include <lib_24/class_7.h>
#include <lib_13/class_3.h>
class_5::class_5() {}
class_5::~class_5() {}
|
iovar/experiments
|
javascript/hangman_bb/js/services.js
|
app.service("wordsService", function($http) {
var _words = {
words: null,
getWord: function() {
var word = '',
index = 0;
if(_words.words !== null && angular.isArray(_words.words)) {
index = Math.floor((Math.random() * _words.words.length));
word = _words.words[index];
}
return word;
},
ready: function() {
return words !== null;
}
};
$http.get("mwords.json").success( function(data) {
//send broadcast so the main controller
//can close the loading dialog
_words.words = data;
});
return _words;
});
app.service("gameService", function($rootScope,wordsService) {
var _game = {
state: 0, //0 not-in-game, 1 playing, 2 finished
word: '',
revealedWord: '',
lettersFound: [],
lettersMissed: [],
// see if letter is in word
// trigger game end, if word
// is found or 7 mistakes are made
//
// returns, -1 on new error, 1 on new hit, 0 otherwise
check: function(letter) {
if(_game.state !== 1) {
return;
}
var word=_game.word.toLowerCase(),
_letter=letter.toLowerCase();
//new mistake
if(word.indexOf(_letter) === -1 &&
_game.lettersMissed.indexOf(_letter) === -1) {
_game.lettersMissed.push(_letter);
$rootScope.$broadcast("letterMiss", {
number: _game.lettersMissed.length
});
if(_game.lettersMissed.length >=7){
_game.state = 2;
$rootScope.$broadcast("endGame", {
word: _game.word,
won: 'lost'
});
}
return -1;
}
//new correct
else if(word.indexOf(_letter) >=0 &&
_game.lettersFound.indexOf(_letter) === -1) {
_game.revealLetter(_letter);
_game.lettersFound.push(_letter);
$rootScope.$broadcast("letterHit", {
revealedWord: _game.revealedWord
});
if(_game.revealedWord === _game.word) {
_game.state = 2;
$rootScope.$broadcast("endGame", {
word: _game.word,
won: 'won'
});
}
return 1;
}
return 0;
},
revealLetter: function(letter) {
var word = _game.word,
lc_word = word.toLowerCase(),
revealedWord = _game.revealedWord,
newRevealed = "";
for(var i=0;i<word.length;i++) {
if(lc_word[i] === letter) {
newRevealed += word[i];
}
else {
newRevealed += revealedWord[i];
}
}
_game.revealedWord = newRevealed;
},
start: function() {
_game.clear();
_game.state = 1;
_game.word = wordsService.getWord();
for(var i=0;i<_game.word.length;i++) {
_game.revealedWord+="_";
}
},
clear: function() {
_game.state = 0;
_game.revealedWord = '';
_game.lettersFound = [];
_game.lettersMissed = [];
}
};
return _game;
});
|
blocklang/blocklang.com
|
server/src/main/java/com/blocklang/core/runner/common/CliCommand.java
|
<reponame>blocklang/blocklang.com<gh_stars>10-100
package com.blocklang.core.runner.common;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Path;
import org.apache.commons.lang3.SystemUtils;
import org.springframework.util.Assert;
public class CliCommand {
private CliLogger logger;
public CliCommand(CliLogger logger) {
this.logger = logger;
}
public boolean run(Path workingDirectory, String... commands) {
Assert.isTrue(commands.length > 0, "至少要包含一个命令");
// 兼容 windows 和 linux
// 处理第一个命令,在 windows 中增加 .cmd 后缀
commands[0] = getCommandName(commands[0]);
ProcessBuilder processBuilder = new ProcessBuilder(commands).directory(workingDirectory.toFile());
try {
processBuilder.redirectErrorStream(true);
Process process = processBuilder.start();
try( BufferedReader outReader = new BufferedReader(new InputStreamReader(process.getInputStream()))){
outReader.lines().iterator().forEachRemaining(line -> {
logger.log(line);
});
}
if(process.isAlive()) {
process.waitFor();
}
return process.exitValue() == 0;
} catch (IOException | InterruptedException e) {
logger.error(e);
}
return false;
}
// FIXME: 重命名,此名字意图不够明确
private String getCommandName(String command) {
return SystemUtils.IS_OS_WINDOWS ? command + ".cmd" : command;
}
}
|
xraycat123/spaCy
|
spacy/tests/lang/ro/test_lemmatizer.py
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('string,lemma', [('câini', 'câine'),
('expedițiilor', 'expediție'),
('pensete', 'pensetă'),
('erau', 'fi')])
def test_lemmatizer_lookup_assigns(ro_tokenizer, string, lemma):
tokens = ro_tokenizer(string)
assert tokens[0].lemma_ == lemma
|
KaungHtetLin/PADC-MM-News
|
app/src/main/java/net/kaunghtetlin/sfc/receivers/InternetStateReceiver.java
|
package net.kaunghtetlin.sfc.receivers;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.widget.Toast;
/**
* Created by <NAME> on 11/25/2017.
*/
public class InternetStateReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
/***
* need to put permission ACCESS_NETWORK_STATE
*/
/* NetworkInfo networkInfo=connectivityManager.getActiveNetworkInfo();
if(networkInfo==null){
Toast.makeText(context,"Your device is no longer connected to internet",Toast.LENGTH_SHORT).show();
}
else if(networkInfo.isConnected()){
Toast.makeText(context, "Your device is connected to internet", Toast.LENGTH_SHORT).show();
}*/
}
}
|
dreamsxin/ultimatepp
|
uppdev/plugin/cairo/lib/cairo-xlib-display.c
|
/* Cairo - a vector graphics library with display and print output
*
* Copyright © 2007 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it either under the terms of the GNU Lesser General Public
* License version 2.1 as published by the Free Software Foundation
* (the "LGPL") or, at your option, under the terms of the Mozilla
* Public License Version 1.1 (the "MPL"). If you do not alter this
* notice, a recipient may use your version of this file under either
* the MPL or the LGPL.
*
* You should have received a copy of the LGPL along with this library
* in the file COPYING-LGPL-2.1; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
* You should have received a copy of the MPL along with this library
* in the file COPYING-MPL-1.1
*
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
* OF ANY KIND, either express or implied. See the LGPL or the MPL for
* the specific language governing rights and limitations.
*
* The Original Code is the cairo graphics library.
*
* The Initial Developer of the Original Code is <NAME>.
*
* Contributor(s):
* <NAME> <<EMAIL>>, Mozilla Corporation
*/
#include "cairoint.h"
#include "cairo-xlib-private.h"
#include "cairo-xlib-xrender-private.h"
#include <fontconfig/fontconfig.h>
#include <X11/Xlibint.h> /* For XESetCloseDisplay */
typedef int (*cairo_xlib_error_func_t) (Display *display,
XErrorEvent *event);
struct _cairo_xlib_job {
cairo_xlib_job_t *next;
enum {
RESOURCE,
WORK
} type;
union {
struct {
cairo_xlib_notify_resource_func notify;
XID xid;
} resource;
struct {
cairo_xlib_notify_func notify;
void *data;
void (*destroy) (void *);
} work;
} func;
};
static cairo_xlib_display_t *_cairo_xlib_display_list;
static void
_cairo_xlib_remove_close_display_hook_internal (cairo_xlib_display_t *display,
cairo_xlib_hook_t *hook);
static void
_cairo_xlib_call_close_display_hooks (cairo_xlib_display_t *display)
{
cairo_xlib_screen_info_t *screen;
cairo_xlib_hook_t *hook;
/* call all registered shutdown routines */
CAIRO_MUTEX_LOCK (display->mutex);
for (screen = display->screens; screen != NULL; screen = screen->next)
_cairo_xlib_screen_info_close_display (screen);
while (TRUE) {
hook = display->close_display_hooks;
if (hook == NULL)
break;
_cairo_xlib_remove_close_display_hook_internal (display, hook);
CAIRO_MUTEX_UNLOCK (display->mutex);
hook->func (display, hook);
CAIRO_MUTEX_LOCK (display->mutex);
}
display->closed = TRUE;
CAIRO_MUTEX_UNLOCK (display->mutex);
}
static void
_cairo_xlib_display_discard_screens (cairo_xlib_display_t *display)
{
cairo_xlib_screen_info_t *screens;
CAIRO_MUTEX_LOCK (display->mutex);
screens = display->screens;
display->screens = NULL;
CAIRO_MUTEX_UNLOCK (display->mutex);
while (screens != NULL) {
cairo_xlib_screen_info_t *screen = screens;
screens = screen->next;
_cairo_xlib_screen_info_destroy (screen);
}
}
cairo_xlib_display_t *
_cairo_xlib_display_reference (cairo_xlib_display_t *display)
{
assert (CAIRO_REFERENCE_COUNT_HAS_REFERENCE (&display->ref_count));
_cairo_reference_count_inc (&display->ref_count);
return display;
}
void
_cairo_xlib_display_destroy (cairo_xlib_display_t *display)
{
assert (CAIRO_REFERENCE_COUNT_HAS_REFERENCE (&display->ref_count));
if (! _cairo_reference_count_dec_and_test (&display->ref_count))
return;
/* destroy all outstanding notifies */
while (display->workqueue != NULL) {
cairo_xlib_job_t *job = display->workqueue;
display->workqueue = job->next;
if (job->type == WORK && job->func.work.destroy != NULL)
job->func.work.destroy (job->func.work.data);
_cairo_freelist_free (&display->wq_freelist, job);
}
_cairo_freelist_fini (&display->wq_freelist);
CAIRO_MUTEX_FINI (display->mutex);
free (display);
}
static int
_noop_error_handler (Display *display,
XErrorEvent *event)
{
return False; /* return value is ignored */
}
static int
_cairo_xlib_close_display (Display *dpy, XExtCodes *codes)
{
cairo_xlib_display_t *display, **prev, *next;
cairo_xlib_error_func_t old_handler;
CAIRO_MUTEX_LOCK (_cairo_xlib_display_mutex);
for (display = _cairo_xlib_display_list; display; display = display->next)
if (display->display == dpy)
break;
CAIRO_MUTEX_UNLOCK (_cairo_xlib_display_mutex);
if (display == NULL)
return 0;
/* protect the notifies from triggering XErrors */
XSync (dpy, False);
old_handler = XSetErrorHandler (_noop_error_handler);
_cairo_xlib_display_notify (display);
_cairo_xlib_call_close_display_hooks (display);
_cairo_xlib_display_discard_screens (display);
/* catch any that arrived before marking the display as closed */
_cairo_xlib_display_notify (display);
XSync (dpy, False);
XSetErrorHandler (old_handler);
/*
* Unhook from the global list
*/
CAIRO_MUTEX_LOCK (_cairo_xlib_display_mutex);
prev = &_cairo_xlib_display_list;
for (display = _cairo_xlib_display_list; display; display = next) {
next = display->next;
if (display->display == dpy) {
*prev = next;
break;
} else
prev = &display->next;
}
CAIRO_MUTEX_UNLOCK (_cairo_xlib_display_mutex);
assert (display != NULL);
_cairo_xlib_display_destroy (display);
/* Return value in accordance with requirements of
* XESetCloseDisplay */
return 0;
}
cairo_xlib_display_t *
_cairo_xlib_display_get (Display *dpy)
{
cairo_xlib_display_t *display;
cairo_xlib_display_t **prev;
XExtCodes *codes;
int major_unused, minor_unused;
/* There is an apparent deadlock between this mutex and the
* mutex for the display, but it's actually safe. For the
* app to call XCloseDisplay() while any other thread is
* inside this function would be an error in the logic
* app, and the CloseDisplay hook is the only other place we
* acquire this mutex.
*/
CAIRO_MUTEX_LOCK (_cairo_xlib_display_mutex);
for (prev = &_cairo_xlib_display_list; (display = *prev); prev = &(*prev)->next)
{
if (display->display == dpy) {
/*
* MRU the list
*/
if (prev != &_cairo_xlib_display_list) {
*prev = display->next;
display->next = _cairo_xlib_display_list;
_cairo_xlib_display_list = display;
}
break;
}
}
if (display != NULL) {
display = _cairo_xlib_display_reference (display);
goto UNLOCK;
}
display = malloc (sizeof (cairo_xlib_display_t));
if (display == NULL) {
_cairo_error_throw (CAIRO_STATUS_NO_MEMORY);
goto UNLOCK;
}
/* Xlib calls out to the extension close_display hooks in LIFO
* order. So we have to ensure that all extensions that we depend
* on in our close_display hook are properly initialized before we
* add our hook. For now, that means Render, so we call into its
* QueryVersion function to ensure it gets initialized.
*/
XRenderQueryVersion (dpy, &major_unused, &minor_unused);
codes = XAddExtension (dpy);
if (codes == NULL) {
_cairo_error_throw (CAIRO_STATUS_NO_MEMORY);
free (display);
display = NULL;
goto UNLOCK;
}
XESetCloseDisplay (dpy, codes->extension, _cairo_xlib_close_display);
_cairo_freelist_init (&display->wq_freelist, sizeof (cairo_xlib_job_t));
CAIRO_REFERENCE_COUNT_INIT (&display->ref_count, 2); /* add one for the CloseDisplay */
CAIRO_MUTEX_INIT (display->mutex);
display->display = dpy;
display->screens = NULL;
display->workqueue = NULL;
display->close_display_hooks = NULL;
display->closed = FALSE;
memset (display->cached_xrender_formats, 0,
sizeof (display->cached_xrender_formats));
display->buggy_repeat = FALSE;
/* This buggy_repeat condition is very complicated because there
* are multiple X server code bases (with multiple versioning
* schemes within a code base), and multiple bugs.
*
* The X servers:
*
* 1. The Vendor=="XFree86" code base with release numbers such
* as 4.7.0 (VendorRelease==40700000).
*
* 2. The Vendor=="X.Org" code base (a descendant of the
* XFree86 code base). It originally had things like
* VendorRelease==60700000 for release 6.7.0 but then changed
* its versioning scheme so that, for example,
* VendorRelease==10400000 for the 1.4.0 X server within the
* X.Org 7.3 release.
*
* The bugs:
*
* 1. The original bug that led to the buggy_repeat
* workaround. This was a bug that <NAME> investigated,
* understood well, and characterized against carious X
* servers. Confirmed X servers with this bug include:
*
* "XFree86" <= 40500000
* "X.Org" <= 60802000 (only with old numbering >= 60700000)
*
* 2. A separate bug resulting in a crash of the X server when
* using cairo's extend-reflect test case, (which, surprisingly
* enough was not passing RepeatReflect to the X server, but
* instead using RepeatNormal in a workaround). Nobody to date
* has understood the bug well, but it appears to be gone as of
* the X.Org 1.4.0 server. This bug is coincidentally avoided
* by using the same buggy_repeat workaround. Confirmed X
* servers with this bug include:
*
* "X.org" == 60900000 (old versioning scheme)
* "X.org" < 10400000 (new numbering scheme)
*
* For the old-versioning-scheme X servers we don't know
* exactly when second the bug started, but since bug 1 is
* present through 6.8.2 and bug 2 is present in 6.9.0 it seems
* safest to just blacklist all old-versioning-scheme X servers,
* (just using VendorRelase < 70000000), as buggy_repeat=TRUE.
*/
if (strstr (ServerVendor (dpy), "X.Org") != NULL) {
if (VendorRelease (dpy) >= 60700000 && VendorRelease (dpy) < 70000000)
display->buggy_repeat = TRUE;
if (VendorRelease (dpy) < 10400000)
display->buggy_repeat = TRUE;
} else if (strstr (ServerVendor (dpy), "XFree86") != NULL) {
if (VendorRelease (dpy) <= 40500000)
display->buggy_repeat = TRUE;
}
display->next = _cairo_xlib_display_list;
_cairo_xlib_display_list = display;
UNLOCK:
CAIRO_MUTEX_UNLOCK (_cairo_xlib_display_mutex);
return display;
}
void
_cairo_xlib_add_close_display_hook (cairo_xlib_display_t *display,
cairo_xlib_hook_t *hook)
{
CAIRO_MUTEX_LOCK (display->mutex);
hook->prev = NULL;
hook->next = display->close_display_hooks;
if (hook->next != NULL)
hook->next->prev = hook;
display->close_display_hooks = hook;
CAIRO_MUTEX_UNLOCK (display->mutex);
}
/* display->mutex must be held */
static void
_cairo_xlib_remove_close_display_hook_internal (cairo_xlib_display_t *display,
cairo_xlib_hook_t *hook)
{
if (display->close_display_hooks == hook)
display->close_display_hooks = hook->next;
else if (hook->prev != NULL)
hook->prev->next = hook->next;
if (hook->next != NULL)
hook->next->prev = hook->prev;
hook->prev = NULL;
hook->next = NULL;
}
void
_cairo_xlib_remove_close_display_hook (cairo_xlib_display_t *display,
cairo_xlib_hook_t *hook)
{
CAIRO_MUTEX_LOCK (display->mutex);
_cairo_xlib_remove_close_display_hook_internal (display, hook);
CAIRO_MUTEX_UNLOCK (display->mutex);
}
cairo_status_t
_cairo_xlib_display_queue_resource (cairo_xlib_display_t *display,
cairo_xlib_notify_resource_func notify,
XID xid)
{
cairo_xlib_job_t *job;
cairo_status_t status = CAIRO_STATUS_NO_MEMORY;
CAIRO_MUTEX_LOCK (display->mutex);
if (display->closed == FALSE) {
job = _cairo_freelist_alloc (&display->wq_freelist);
if (job != NULL) {
job->type = RESOURCE;
job->func.resource.xid = xid;
job->func.resource.notify = notify;
job->next = display->workqueue;
display->workqueue = job;
status = CAIRO_STATUS_SUCCESS;
}
}
CAIRO_MUTEX_UNLOCK (display->mutex);
return status;
}
cairo_status_t
_cairo_xlib_display_queue_work (cairo_xlib_display_t *display,
cairo_xlib_notify_func notify,
void *data,
void (*destroy) (void *))
{
cairo_xlib_job_t *job;
cairo_status_t status = CAIRO_STATUS_NO_MEMORY;
CAIRO_MUTEX_LOCK (display->mutex);
if (display->closed == FALSE) {
job = _cairo_freelist_alloc (&display->wq_freelist);
if (job != NULL) {
job->type = WORK;
job->func.work.data = data;
job->func.work.notify = notify;
job->func.work.destroy = destroy;
job->next = display->workqueue;
display->workqueue = job;
status = CAIRO_STATUS_SUCCESS;
}
}
CAIRO_MUTEX_UNLOCK (display->mutex);
return status;
}
void
_cairo_xlib_display_notify (cairo_xlib_display_t *display)
{
cairo_xlib_job_t *jobs, *job, *freelist;
Display *dpy = display->display;
CAIRO_MUTEX_LOCK (display->mutex);
jobs = display->workqueue;
while (jobs != NULL) {
display->workqueue = NULL;
CAIRO_MUTEX_UNLOCK (display->mutex);
/* reverse the list to obtain FIFO order */
job = NULL;
do {
cairo_xlib_job_t *next = jobs->next;
jobs->next = job;
job = jobs;
jobs = next;
} while (jobs != NULL);
freelist = jobs = job;
do {
job = jobs;
jobs = job->next;
switch (job->type){
case WORK:
job->func.work.notify (dpy, job->func.work.data);
if (job->func.work.destroy != NULL)
job->func.work.destroy (job->func.work.data);
break;
case RESOURCE:
job->func.resource.notify (dpy, job->func.resource.xid);
break;
}
} while (jobs != NULL);
CAIRO_MUTEX_LOCK (display->mutex);
do {
job = freelist;
freelist = job->next;
_cairo_freelist_free (&display->wq_freelist, job);
} while (freelist != NULL);
jobs = display->workqueue;
}
CAIRO_MUTEX_UNLOCK (display->mutex);
}
XRenderPictFormat *
_cairo_xlib_display_get_xrender_format (cairo_xlib_display_t *display,
cairo_format_t format)
{
XRenderPictFormat *xrender_format;
CAIRO_MUTEX_LOCK (display->mutex);
xrender_format = display->cached_xrender_formats[format];
if (xrender_format == NULL) {
int pict_format;
switch (format) {
case CAIRO_FORMAT_A1:
pict_format = PictStandardA1; break;
case CAIRO_FORMAT_A8:
pict_format = PictStandardA8; break;
case CAIRO_FORMAT_RGB24:
pict_format = PictStandardRGB24; break;
default:
ASSERT_NOT_REACHED;
case CAIRO_FORMAT_ARGB32:
pict_format = PictStandardARGB32; break;
}
xrender_format = XRenderFindStandardFormat (display->display,
pict_format);
display->cached_xrender_formats[format] = xrender_format;
}
CAIRO_MUTEX_UNLOCK (display->mutex);
return xrender_format;
}
|
lechium/tvOS145Headers
|
System/Library/PrivateFrameworks/SlideshowKit.framework/Frameworks/OpusFoundation.framework/OFMediaMetadataCache.h
|
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:08:21 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/PrivateFrameworks/SlideshowKit.framework/Frameworks/OpusFoundation.framework/OpusFoundation
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
@class NSString, NSManagedObjectModel, NSManagedObjectContext, NSPersistentStoreCoordinator, NSPredicate;
@interface OFMediaMetadataCache : NSObject {
NSString* _diskCacheFilepath;
NSManagedObjectModel* _managedObjectModel;
NSManagedObjectContext* _managedObjectContext;
NSManagedObjectContext* _parentManagedObjectContext;
NSPersistentStoreCoordinator* _persistentStoreCoordinator;
NSPredicate* _predicateEntryIdentifierTemplate;
}
@property (nonatomic,retain) NSManagedObjectModel * managedObjectModel; //@synthesize managedObjectModel=_managedObjectModel - In the implementation block
@property (nonatomic,retain) NSManagedObjectContext * managedObjectContext; //@synthesize managedObjectContext=_managedObjectContext - In the implementation block
@property (nonatomic,retain) NSPersistentStoreCoordinator * persistentStoreCoordinator; //@synthesize persistentStoreCoordinator=_persistentStoreCoordinator - In the implementation block
-(id)init;
-(void)dealloc;
-(NSPersistentStoreCoordinator *)persistentStoreCoordinator;
-(NSManagedObjectModel *)managedObjectModel;
-(NSManagedObjectContext *)managedObjectContext;
-(void)setPersistentStoreCoordinator:(NSPersistentStoreCoordinator *)arg1 ;
-(void)setManagedObjectContext:(NSManagedObjectContext *)arg1 ;
-(BOOL)save;
-(void)setManagedObjectModel:(NSManagedObjectModel *)arg1 ;
-(BOOL)_save;
-(void)_didEnterBackgroundNotification;
-(void)_willTerminateNotification;
-(void)invalidateMemoryCaches;
-(void)invalidateDiskCaches;
-(id)initWithDiskCacheFilepath:(id)arg1 ;
-(id)managedObjectForIdentifier:(id)arg1 ;
@end
|
Palem1988/nuls
|
protocol-module/protocol/src/main/java/io/nuls/protocol/model/tx/LogicData.java
|
/*
* MIT License
*
* Copyright (c) 2017-2019 nuls.io
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package io.nuls.protocol.model.tx;
import io.nuls.kernel.exception.NulsException;
import io.nuls.kernel.model.TransactionLogicData;
import io.nuls.kernel.utils.NulsByteBuffer;
import io.nuls.kernel.utils.NulsOutputStreamBuffer;
import io.nuls.kernel.utils.SerializeUtils;
import java.io.IOException;
import java.util.Set;
/**
* @author: <NAME>
* @date: 2018/7/24
*/
public class LogicData extends TransactionLogicData {
private byte[] bytes;
public LogicData() {
}
public LogicData(byte[] bytes) {
this.bytes = bytes;
}
@Override
protected void serializeToStream(NulsOutputStreamBuffer stream) throws IOException {
stream.writeBytesWithLength(bytes);
}
@Override
public void parse(NulsByteBuffer byteBuffer) throws NulsException {
bytes = byteBuffer.readByLengthByte();
}
@Override
public int size() {
return SerializeUtils.sizeOfBytes(bytes);
}
@Override
public Set<byte[]> getAddresses() {
return null;
}
public void setBytes(byte[] bytes) {
this.bytes = bytes;
}
public byte[] getBytes() {
return bytes;
}
}
|
enfoTek/tomato.linksys.e2000.nvram-mod
|
tools-src/gnu/glibc/sysdeps/mach/hurd/init-posix.c
|
/* We don't need the unix/bsd version. */
#include <sysdeps/generic/init-posix.c>
|
svvsdIC/Poseidon-raspberryPi
|
src/lib/CockpitMessaging.js
|
(function () {
var EventEmitter2 = require('eventemitter2').EventEmitter2;
var CockpitMessaging = function (io, opts) {
var listeners = [];
var sockets = [];
var ignoreEvents = [
'newListener',
'removeListener'
];
this.volatile = {
emit: function () {
if (ignoreEvents.includes(arguments[0])) {
return;
}
var args = new Array(arguments.length);
for (var i = 0; i < args.length; ++i) {
//i is always valid index in the arguments object
args[i] = arguments[i];
}
if (args[0] == [args[1]]) {
args.shift(); //After an upgrade of eventemitter, it appears the arguments now include the event type.
}
sockets.forEach(function (socket) {
socket.volatile.emit.apply(socket, args);
});
}
};
this.onAny(function () {
if (ignoreEvents.includes(arguments[0])) {
return;
}
var event = this.event;
var args = new Array(arguments.length);
for (var i = 0; i < args.length; ++i) {
//i is always valid index in the arguments object
args[i] = arguments[i];
}
if (args[0] == [args[1]]) {
args.shift(); //After an upgrade of eventemitter, it appears the arguments now include the event type.
}
sockets.forEach(function (socket) {
socket.emit.apply(socket, args);
});
});
this.on('newListener', function (aType, aListener) {
if (aType !== 'newListener') {
listeners.push({
type: aType,
fn: aListener
});
}
});
io.on('connection', function (socket) {
sockets.push(socket);
listeners.forEach(function (listener) {
socket.on(listener.type, listener.fn);
});
socket.on('disconnect', function () {
var i = sockets.indexOf(socket);
delete sockets[i];
});
});
};
CockpitMessaging.prototype = new EventEmitter2({
wildcard: true,
newListener: true
});
CockpitMessaging.prototype.constructor = CockpitMessaging;
module.exports = CockpitMessaging;
}());
|
oraziorillo/adrenalina
|
common/src/main/java/common/dto_model/KillShotDTO.java
|
<filename>common/src/main/java/common/dto_model/KillShotDTO.java<gh_stars>0
package common.dto_model;
import common.enums.PcColourEnum;
public class KillShotDTO implements DTO {
private PcColourEnum colour;
private boolean skulled;
private boolean overkilled;
public PcColourEnum getColour() {
return colour;
}
public void setColour(PcColourEnum colour) {
this.colour = colour;
}
public boolean isSkulled() {
return skulled;
}
public void setSkulled(boolean skulled) {
this.skulled = skulled;
}
public boolean isOverkilled() {
return overkilled;
}
public void setOverkilled(boolean overkilled) {
this.overkilled = overkilled;
}
}
|
riandakarizal/ITeung
|
module/cek_revisi_sidang.py
|
from module import kelas
from lib import wa, reply, message, numbers
import os, config, pandas
def auth(data):
if kelas.getNpmandNameMahasiswa(data[0]) != None or kelas.getKodeDosen(data[0]) != '':
ret = True
else:
ret = False
return ret
def replymsg(driver, data):
wmsg = reply.getWaitingMessage(os.path.basename(__file__).split('.')[0])
wa.typeAndSendMessage(driver, wmsg)
msg = data[3].lower()
num = numbers.normalize(data[0])
tahun_id = '20192'
if kelas.getKodeDosen(num):
kodeDosen = kelas.getKodeDosen(num)
try:
npm = [npm for npm in msg.split(' ') if npm.isdigit() and len(npm) == 7][0]
except:
npm = None
if npm:
try:
if checkMhs(npm):
if checkRevisi(npm, tahun_id):
msgreply = "Ini revisian dari Anda cuy..."+checkRevisi(npm, tahun_id)
else:
msgreply = "Emg udh masukin revisi???"
else:
msgreply = "Salah mahasiswa ato npm mungkin..."
except Exception as e:
msgreply = f"Error {str(e)}"
else:
try:
if checkRevisiPenguji(tahun_id, kodeDosen):
msgreply = "Ini revisian dari anda cuy..."+checkRevisiPenguji(tahun_id, kodeDosen)
else:
msgreply = "Emg udh masukin revisi???"
except Exception as e:
msgreply = f"Error {str(e)}"
elif kelas.getNpmandNameMahasiswa(num):
npm, nama=kelas.getNpmandNameMahasiswa(num)
try:
if checkMhs(npm):
if checkRevisi(npm, tahun_id):
msgreply = "Selamat revisian cuy..., semangat <3<3"+checkRevisi(npm, tahun_id)
else:
msgreply = "Kamu emg udh sidang? jgn ngadi-ngadi deh..., mungkin aja blm dibikin revisinya sama penguji bersangkutan..., semangat <3<3"
else:
msgreply = "Kamu emg ikutan sidang? jgn ngadi-ngadi deh..."
except Exception as e:
msgreply = f"Error {str(e)}"
else:
msgreply = f"Hayoo siapa kamu"
return msgreply
def checkMhs(npm):
db=kelas.dbConnect()
sql=f'select npm from bimbingan_data where npm="{npm}"'
with db:
cur=db.cursor()
cur.execute(sql)
row=cur.fetchone()
if row is not None:
return row[0]
else:
return False
def getListMhs(kodeDosen, tahun_id):
db=kelas.dbConnect()
listMhs = list()
sql=f'select distinct(npm) from revisi_data where penguji="{kodeDosen}" and tahun_id="{tahun_id}"'
with db:
cur=db.cursor()
cur.execute(sql)
rows=cur.fetchall()
if rows is not None:
for row in rows:
listMhs.append(row[0])
return listMhs
def getListPenguji(tahun_id, npm):
db=kelas.dbConnect()
listPenguji = list()
sql=f'select distinct(penguji) from revisi_data where npm="{npm}" and tahun_id="{tahun_id}"'
with db:
cur=db.cursor()
cur.execute(sql)
rows=cur.fetchall()
if rows is not None:
for row in rows:
listPenguji.append(row[0])
return listPenguji
def checkRevisiPenguji(tahun_id, kodeDosen):
db=kelas.dbConnect()
msg = ""
listMhs = getListMhs(kodeDosen, tahun_id)
# print(listMhs)
if listMhs:
for npm in listMhs:
listPenguji = getListPenguji(tahun_id, npm)
# print(listPenguji)
if listPenguji:
for penguji in listPenguji:
sql=f'select revisi, id from revisi_data where npm="{npm}" and tahun_id="{tahun_id}" and penguji="{penguji}"'
with db:
cur=db.cursor()
cur.execute(sql)
rows=cur.fetchall()
if rows:
msg += f"\n\n*Revisi untuk {npm} dari {penguji}*"
for i, row in enumerate(rows):
msg += f"\n{(i+1)}. {row[0]} ({row[1]})"
else:
return False
return msg
else:
return False
def checkRevisi(npm, tahun_id):
db=kelas.dbConnect()
msg = ""
listPenguji = list()
status =True
sql=f'select distinct penguji from revisi_data where npm="{npm}" and tahun_id="{tahun_id}"'
with db:
cur=db.cursor()
cur.execute(sql)
rows=cur.fetchall()
if rows is not None:
for row in rows:
listPenguji.append(row[0])
else:
status = False
if(status):
for penguji in listPenguji:
sql=f'select revisi, id from revisi_data where npm="{npm}" and penguji="{penguji}" and tahun_id="{tahun_id}"'
with db:
cur=db.cursor()
cur.execute(sql)
rows=cur.fetchall()
if rows:
msg += f"\n\n*Revisi untuk {npm} dari {penguji}*"
for i, row in enumerate(rows):
msg += f"\n{(i+1)}. {row[0]} ({row[1]})"
return msg
else:
return False
|
ThierryCurtil/trimou
|
core/src/main/java/org/trimou/engine/segment/ValueSegment.java
|
/*
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trimou.engine.segment;
import java.io.IOException;
import java.util.List;
import org.trimou.annotations.Internal;
import org.trimou.engine.MustacheTagType;
import org.trimou.engine.context.ExecutionContext;
import org.trimou.engine.context.ValueWrapper;
import org.trimou.engine.convert.ValueConverter;
import org.trimou.engine.parser.Template;
import org.trimou.engine.text.TextSupport;
import org.trimou.exception.MustacheException;
import org.trimou.exception.MustacheProblem;
import org.trimou.lambda.Lambda;
import org.trimou.util.Strings;
/**
* Value segment (aka variable tag).
*
* @author <NAME>
*/
@Internal
public class ValueSegment extends AbstractSegment
implements HelperAwareSegment {
private final boolean unescape;
private final TextSupport textSupport;
private final HelperExecutionHandler helperHandler;
private final ValueProvider provider;
private final List<ValueConverter> converters;
/**
*
* @param text
* @param origin
* @param unescape
*/
public ValueSegment(String text, Origin origin, boolean unescape) {
super(text, origin);
this.unescape = unescape;
this.helperHandler = isHandlebarsSupportEnabled()
? HelperExecutionHandler.from(text, getEngine(), this) : null;
if (helperHandler == null) {
this.textSupport = getEngineConfiguration().getTextSupport();
this.provider = new ValueProvider(text, getEngineConfiguration());
if (getEngineConfiguration().getValueConverters().isEmpty()) {
this.converters = null;
} else {
this.converters = getEngineConfiguration().getValueConverters();
}
} else {
this.textSupport = null;
this.provider = null;
this.converters = null;
}
}
public SegmentType getType() {
return SegmentType.VALUE;
}
public boolean isUnescape() {
return unescape;
}
public Appendable execute(Appendable appendable, ExecutionContext context) {
if (helperHandler != null) {
return helperHandler.execute(appendable, context);
} else {
ValueWrapper value = provider.get(context);
try {
if (value.isNull()) {
Object replacement = getEngineConfiguration()
.getMissingValueHandler().handle(getTagInfo());
if (replacement != null) {
processValue(appendable, context, replacement);
}
} else {
processValue(appendable, context, value.get());
}
} finally {
value.release();
}
return appendable;
}
}
@Override
public Appendable fn(Appendable appendable, ExecutionContext context) {
// No-op
return appendable;
}
@Override
protected String getSegmentName() {
return getText();
}
@Override
protected MustacheTagType getTagType() {
// Because of one segment is used for both a variable and an unescape
// variable
return unescape ? MustacheTagType.UNESCAPE_VARIABLE
: MustacheTagType.VARIABLE;
}
private void processValue(Appendable appendable, ExecutionContext context,
Object value) {
if (value instanceof Lambda) {
processLambda(appendable, context, value);
} else {
writeValue(appendable, convertValue(value));
}
}
private String convertValue(Object value) {
if (converters != null) {
for (ValueConverter converter : converters) {
String result = converter.convert(value);
if (result != null) {
return result;
}
}
}
return value.toString();
}
private void writeValue(Appendable appendable, String text) {
if (unescape) {
append(appendable, text);
} else {
try {
textSupport.appendEscapedHtml(text, appendable);
} catch (IOException e) {
throw new MustacheException(MustacheProblem.RENDER_IO_ERROR, e);
}
}
}
private void processLambda(Appendable appendable, ExecutionContext context,
Object value) {
Lambda lambda = (Lambda) value;
String returnValue = lambda.invoke(null);
if (returnValue == null) {
Object replacement = getEngineConfiguration()
.getMissingValueHandler().handle(getTagInfo());
if (replacement != null) {
processValue(appendable, context, replacement);
return;
}
} else if (!returnValue.equals(Strings.EMPTY)) {
if (lambda.isReturnValueInterpolated()) {
// Parse and interpolate the return value
StringBuilder interpolated = new StringBuilder();
Template temp = (Template) getEngine().compileMustache(
Lambdas.constructLambdaOneoffTemplateName(this),
returnValue);
temp.getRootSegment().execute(interpolated, context);
writeValue(appendable, interpolated.toString());
} else {
writeValue(appendable, returnValue);
}
}
}
}
|
amirlib/gca
|
Flow-Graph/Residual-Graph/index.js
|
<gh_stars>0
const FlowGraph = require("../");
const ForwardFlowEdge = require("../../Edges/ForwardFlowEdge");
const BackwardFlowEdge = require("../../Edges/BackwardFlowEdge");
const LinkedList = require("../../LinkedList");
/**
* Implementation of Residual Graph. There are tools for building a Residual Graph that will return a correct max flow from Edmonds Krap algorithm.
* @class FlowGraph
* @extends {FlowGraph}
*/
class ResidualGraph extends FlowGraph {
/**
* Creates an instance of ResidualGraph. Clones the edges and the nodes from graph.
* @param {FlowGraph} graph
* @memberof ResidualGraph
*/
constructor(graph) {
super();
this.edgesList = new LinkedList();
this.backwardEdgesList = new LinkedList();
this.cloneMatrix(graph);
this.initEdges(graph);
}
/**
* If capacity of the edge is zero, unmark from the graph. Otherwise, mark it.
* @param {FlowEdge} edge The edge.
* @memberof ResidualGraph
*/
ChooseToMarkOrUnmarkEdge(edge) {
if (edge.isCapacityZero()) {
this.unmarkEdge(edge.from, edge.to);
} else {
this.markEdge(edge.from, edge.to);
}
}
/**
* Clones the matrix from the Flow Graph.
* @param {FlowGraph} graph
* @memberof ResidualGraph
*/
cloneMatrix(graph) {
for (let i = 0; i < graph.matrix.length; i++) {
this.matrix[i] = Array.from(graph.matrix[i]);
}
this.nodesID = Array.from(graph.nodesID);
}
/**
* Creates backward and forward edges from the Flow Graph edges.
* @param {FlowGraph} graph
* @memberof ResidualGraph
*/
initEdges(graph) {
let current = graph.edgesList.head;
while (current != null) {
let forward = current.data;
let backward = undefined;
if (!this.backwardEdgesList.has(forward)) {
forward = new ForwardFlowEdge(current.data.from, current.data.to, current.data.capacity, current.data.flow);
if (graph.hasEdge(current.data.to, current.data.from)) {
const edge = graph.getEdge(current.data.to, current.data.from);
backward = new BackwardFlowEdge(edge.from, edge.to, edge.capacity, edge.flow);
} else {
backward = new BackwardFlowEdge(current.data.to, current.data.from, current.data.capacity, current.data.flow);
}
forward.backwardEdge = backward;
backward.forwardEdge = forward;
this.backwardEdgesList.enqueue(backward);
this.edgesList.enqueue(forward);
}
current = current.next;
}
}
/**
* ResidualGraph object cannot be cloned.
* @returns {boolean} False
* @memberof ResidualGraph
*/
clone() {
return null;
}
}
module.exports = ResidualGraph;
|
Gohla/pluto
|
test/build/pluto/test/EmptyBuildOutput.java
|
<reponame>Gohla/pluto<filename>test/build/pluto/test/EmptyBuildOutput.java
package build.pluto.test;
import build.pluto.output.Output;
public final class EmptyBuildOutput implements Output {
/**
*
*/
private static final long serialVersionUID = -931710373384110638L;
public static final EmptyBuildOutput instance = new EmptyBuildOutput();
private EmptyBuildOutput(){}
}
|
plotor/vertx-zero
|
vertx-gaia/vertx-up/src/main/java/io/vertx/up/atom/flux/IpcData.java
|
package io.vertx.up.atom.flux;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.json.JsonObject;
import io.vertx.up.eon.em.IpcType;
import java.io.Serializable;
public class IpcData implements Serializable {
/**
* Default community type
*/
private IpcType type = IpcType.UNITY;
/**
* Community Name
*/
private String name;
/**
* Community Port
*/
private Integer port;
/**
* Community Host
*/
private String host;
/**
* Community data
*/
private Buffer data;
/**
* Community address
*/
private String address;
/**
* Additional Config
*/
private JsonObject config = new JsonObject();
public String getName() {
return this.name;
}
public void setName(final String name) {
this.name = name;
}
public String getAddress() {
return this.address;
}
public void setAddress(final String address) {
this.address = address;
}
public IpcType getType() {
return this.type;
}
public void setType(final IpcType type) {
this.type = type;
}
public Integer getPort() {
return this.port;
}
public void setPort(final Integer port) {
this.port = port;
}
public String getHost() {
return this.host;
}
public void setHost(final String host) {
this.host = host;
}
public Buffer getData() {
return this.data;
}
public void setData(final Buffer data) {
this.data = data;
}
public JsonObject getConfig() {
return this.config;
}
public void setConfig(final JsonObject config) {
this.config = config;
}
@Override
public String toString() {
return "IpcData{" +
"type=" + this.type +
", port=" + this.port +
", host='" + this.host + '\'' +
", data=" + this.data +
", address=" + this.address +
", config=" + this.config +
'}';
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.