code
stringlengths 3
1.01M
| repo_name
stringlengths 5
116
| path
stringlengths 3
311
| language
stringclasses 30
values | license
stringclasses 15
values | size
int64 3
1.01M
|
|---|---|---|---|---|---|
# ************************************************************
# Sequel Pro SQL dump
# Version 4541
#
# http://www.sequelpro.com/
# https://github.com/sequelpro/sequelpro
#
# Host: 127.0.0.1 (MySQL 5.7.12)
# Database: recipes2
# Generation Time: 2017-07-09 21:50:04 +0000
# ************************************************************
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
# Dump of table category
# ------------------------------------------------------------
DROP TABLE IF EXISTS `category`;
CREATE TABLE `category` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`created_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`version` int(11) NOT NULL,
`asset_name` varchar(255) DEFAULT NULL,
`caption` varchar(255) DEFAULT NULL,
`title` varchar(50) NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `UK_lnmf77qvjnr2lmyxrrydom9hd` (`title`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table cookbook
# ------------------------------------------------------------
DROP TABLE IF EXISTS `cookbook`;
CREATE TABLE `cookbook` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`created_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`version` int(11) NOT NULL,
`cookbook_name` varchar(100) NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `UK_lvko2c1oic63jrntop8s378jy` (`cookbook_name`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table history_item
# ------------------------------------------------------------
DROP TABLE IF EXISTS `history_item`;
CREATE TABLE `history_item` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`created_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`version` int(11) NOT NULL,
`recipe_id` bigint(20) NOT NULL,
`when_viewed` datetime NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table log_record
# ------------------------------------------------------------
DROP TABLE IF EXISTS `log_record`;
CREATE TABLE `log_record` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`level` varchar(20) DEFAULT NULL,
`log_time` datetime DEFAULT NULL,
`logger_name` varchar(50) DEFAULT NULL,
`message` varchar(4000) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table recipe
# ------------------------------------------------------------
DROP TABLE IF EXISTS `recipe`;
CREATE TABLE `recipe` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`created_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`version` int(11) NOT NULL,
`asset_name` varchar(255) DEFAULT NULL,
`favorite` bit(1) NOT NULL,
`notes` varchar(2000) DEFAULT NULL,
`page_nrs` varchar(50) DEFAULT NULL,
`recipe_name` varchar(100) NOT NULL,
`recipe_url` varchar(500) DEFAULT NULL,
`category_id` bigint(20) NOT NULL,
`cookbook_id` bigint(20) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `FKrufhnv33hpfxstx9x108553kj` (`category_id`),
KEY `FKsn5npoilvjktpcg2tosxg3yg1` (`cookbook_id`),
CONSTRAINT `FKrufhnv33hpfxstx9x108553kj` FOREIGN KEY (`category_id`) REFERENCES `category` (`id`),
CONSTRAINT `FKsn5npoilvjktpcg2tosxg3yg1` FOREIGN KEY (`cookbook_id`) REFERENCES `cookbook` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table recipe_tag
# ------------------------------------------------------------
DROP TABLE IF EXISTS `recipe_tag`;
CREATE TABLE `recipe_tag` (
`recipe_id` bigint(20) NOT NULL,
`tags_tag` varchar(50) NOT NULL,
PRIMARY KEY (`recipe_id`,`tags_tag`),
UNIQUE KEY `UK_7x0dbcddoq5dybefqwdcimoqc` (`tags_tag`),
CONSTRAINT `FKq4myiba0qqys8dffxt7dgi6rw` FOREIGN KEY (`tags_tag`) REFERENCES `tagName` (`tagName`),
CONSTRAINT `FKshildcupwo2vlv8sjyxjlpi8l` FOREIGN KEY (`recipe_id`) REFERENCES `recipe` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table tagName
# ------------------------------------------------------------
DROP TABLE IF EXISTS `tagName`;
CREATE TABLE `tagName` (
`tagName` varchar(50) NOT NULL,
PRIMARY KEY (`tagName`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
ppolasek/Recipes2
|
recipe_server/src/main/resources/ddl/recipes2_2017-07-09.sql
|
SQL
|
bsd-3-clause
| 4,946
|
<?php
declare(strict_types=1);
namespace UliCMS\Helpers;
use Helper;
use Westsworld\TimeAgo;
use DateTime;
class NumberFormatHelper extends Helper
{
const SQL_DATE_WITH_SECONDS = "Y-m-d H:i:s";
const SQL_DATE_WITHOUT_SECONDS = "Y-m-d H:i";
// This method formats bytes in a human readable format
// Snippet from PHP Share: http://www.phpshare.org
public static function formatSizeUnits(float $bytes): string
{
if ($bytes >= 1073741824) {
$bytes = number_format($bytes / 1073741824, 2) . ' GB';
} elseif ($bytes >= 1048576) {
$bytes = number_format($bytes / 1048576, 2) . ' MB';
} elseif ($bytes >= 1024) {
$bytes = number_format($bytes / 1024, 2) . ' KB';
} elseif ($bytes > 1) {
$bytes = $bytes . ' Bytes';
} elseif ($bytes == 1) {
$bytes = $bytes . ' Byte';
} else {
$bytes = '0 Bytes';
}
return $bytes;
}
// use this to convert an integer timestamp to use it
// for a html5 datetime-local input
public static function timestampToSqlDate(
?int $timestamp = null,
string $format = self::SQL_DATE_WITHOUT_SECONDS
): string {
$time = !is_null($timestamp) ? $timestamp : time();
return date($format, $time);
}
// Use this to format the time at "Online since"
public static function formatTime(int $time): string
{
$dateTime = new DateTime();
$dateTime->setTimestamp($time);
$languageClass = "\\Westsworld\\TimeAgo\\Translations\\" .
ucfirst(getSystemLanguage());
$language = class_exists($languageClass) ? new $languageClass() :
new \Westsworld\TimeAgo\Translations\De();
$timeAgo = new TimeAgo($language);
return $timeAgo->inWords($dateTime);
}
}
|
derUli/ulicms
|
ulicms/classes/helpers/NumberFormatHelper.php
|
PHP
|
bsd-3-clause
| 1,873
|
//
// $Id$
package org.ductilej.tests;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Points out an edge case with finals that we can't help.
*/
public class InitOrderTest
{
public static abstract class A {
public int fooVal = foo();
protected abstract int foo ();
}
public static class B extends A {
public final int bar;
public B () {
bar = 5;
}
protected int foo () {
return bar;
}
}
@Test public void testInitOrder() {
B b = new B();
// assertEquals(0, b.fooVal); // will be 'null' in detyped code
assertEquals(5, b.bar);
}
}
|
scaladyno/ductilej
|
src/test/java/org/ductilej/tests/InitOrderTest.java
|
Java
|
bsd-3-clause
| 681
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/common/gpu/client/gpu_video_encode_accelerator_host.h"
#include "base/logging.h"
#include "base/message_loop/message_loop_proxy.h"
#include "content/common/gpu/client/gpu_channel_host.h"
#include "content/common/gpu/gpu_messages.h"
#include "content/common/gpu/media/gpu_video_accelerator_util.h"
#include "media/base/video_frame.h"
namespace content {
#define NOTIFY_ERROR(error) \
PostNotifyError(error); \
DLOG(ERROR)
GpuVideoEncodeAcceleratorHost::GpuVideoEncodeAcceleratorHost(
GpuChannelHost* channel,
CommandBufferProxyImpl* impl)
: channel_(channel),
encoder_route_id_(MSG_ROUTING_NONE),
client_(NULL),
impl_(impl),
next_frame_id_(0),
weak_this_factory_(this) {
DCHECK(channel_);
DCHECK(impl_);
impl_->AddDeletionObserver(this);
}
GpuVideoEncodeAcceleratorHost::~GpuVideoEncodeAcceleratorHost() {
DCHECK(CalledOnValidThread());
if (channel_ && encoder_route_id_ != MSG_ROUTING_NONE)
channel_->RemoveRoute(encoder_route_id_);
if (impl_)
impl_->RemoveDeletionObserver(this);
}
bool GpuVideoEncodeAcceleratorHost::OnMessageReceived(
const IPC::Message& message) {
bool handled = true;
IPC_BEGIN_MESSAGE_MAP(GpuVideoEncodeAcceleratorHost, message)
IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderHostMsg_RequireBitstreamBuffers,
OnRequireBitstreamBuffers)
IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderHostMsg_NotifyInputDone,
OnNotifyInputDone)
IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderHostMsg_BitstreamBufferReady,
OnBitstreamBufferReady)
IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderHostMsg_NotifyError,
OnNotifyError)
IPC_MESSAGE_UNHANDLED(handled = false)
IPC_END_MESSAGE_MAP()
DCHECK(handled);
// See OnNotifyError for why |this| mustn't be used after OnNotifyError might
// have been called above.
return handled;
}
void GpuVideoEncodeAcceleratorHost::OnChannelError() {
DCHECK(CalledOnValidThread());
if (channel_) {
if (encoder_route_id_ != MSG_ROUTING_NONE)
channel_->RemoveRoute(encoder_route_id_);
channel_ = NULL;
}
NOTIFY_ERROR(kPlatformFailureError) << "OnChannelError()";
}
media::VideoEncodeAccelerator::SupportedProfiles
GpuVideoEncodeAcceleratorHost::GetSupportedProfiles() {
DCHECK(CalledOnValidThread());
if (!channel_)
return media::VideoEncodeAccelerator::SupportedProfiles();
return GpuVideoAcceleratorUtil::ConvertGpuToMediaEncodeProfiles(
channel_->gpu_info().video_encode_accelerator_supported_profiles);
}
bool GpuVideoEncodeAcceleratorHost::Initialize(
media::VideoFrame::Format input_format,
const gfx::Size& input_visible_size,
media::VideoCodecProfile output_profile,
uint32 initial_bitrate,
Client* client) {
DCHECK(CalledOnValidThread());
client_ = client;
if (!impl_) {
DLOG(ERROR) << "impl_ destroyed";
return false;
}
int32 route_id = channel_->GenerateRouteID();
channel_->AddRoute(route_id, weak_this_factory_.GetWeakPtr());
bool succeeded = false;
Send(new GpuCommandBufferMsg_CreateVideoEncoder(impl_->GetRouteID(),
input_format,
input_visible_size,
output_profile,
initial_bitrate,
route_id,
&succeeded));
if (!succeeded) {
DLOG(ERROR) << "Send(GpuCommandBufferMsg_CreateVideoEncoder()) failed";
channel_->RemoveRoute(route_id);
return false;
}
encoder_route_id_ = route_id;
return true;
}
void GpuVideoEncodeAcceleratorHost::Encode(
const scoped_refptr<media::VideoFrame>& frame,
bool force_keyframe) {
DCHECK(CalledOnValidThread());
if (!channel_)
return;
if (!base::SharedMemory::IsHandleValid(frame->shared_memory_handle())) {
NOTIFY_ERROR(kPlatformFailureError)
<< "Encode(): cannot encode frame not backed by shared memory";
return;
}
base::SharedMemoryHandle handle =
channel_->ShareToGpuProcess(frame->shared_memory_handle());
if (!base::SharedMemory::IsHandleValid(handle)) {
NOTIFY_ERROR(kPlatformFailureError)
<< "Encode(): failed to duplicate buffer handle for GPU process";
return;
}
// We assume that planar frame data passed here is packed and contiguous.
const size_t plane_count = media::VideoFrame::NumPlanes(frame->format());
size_t frame_size = 0;
for (size_t i = 0; i < plane_count; ++i) {
// Cast DCHECK parameters to void* to avoid printing uint8* as a string.
DCHECK_EQ(reinterpret_cast<void*>(frame->data(i)),
reinterpret_cast<void*>((frame->data(0) + frame_size)))
<< "plane=" << i;
frame_size += frame->stride(i) * frame->rows(i);
}
Send(new AcceleratedVideoEncoderMsg_Encode(
encoder_route_id_, next_frame_id_, handle, frame->shared_memory_offset(),
frame_size, force_keyframe));
frame_map_[next_frame_id_] = frame;
// Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
next_frame_id_ = (next_frame_id_ + 1) & 0x3FFFFFFF;
}
void GpuVideoEncodeAcceleratorHost::UseOutputBitstreamBuffer(
const media::BitstreamBuffer& buffer) {
DCHECK(CalledOnValidThread());
if (!channel_)
return;
base::SharedMemoryHandle handle =
channel_->ShareToGpuProcess(buffer.handle());
if (!base::SharedMemory::IsHandleValid(handle)) {
NOTIFY_ERROR(kPlatformFailureError)
<< "UseOutputBitstreamBuffer(): failed to duplicate buffer handle "
"for GPU process: buffer.id()=" << buffer.id();
return;
}
Send(new AcceleratedVideoEncoderMsg_UseOutputBitstreamBuffer(
encoder_route_id_, buffer.id(), handle, buffer.size()));
}
void GpuVideoEncodeAcceleratorHost::RequestEncodingParametersChange(
uint32 bitrate,
uint32 framerate) {
DCHECK(CalledOnValidThread());
if (!channel_)
return;
Send(new AcceleratedVideoEncoderMsg_RequestEncodingParametersChange(
encoder_route_id_, bitrate, framerate));
}
void GpuVideoEncodeAcceleratorHost::Destroy() {
DCHECK(CalledOnValidThread());
if (channel_)
Send(new AcceleratedVideoEncoderMsg_Destroy(encoder_route_id_));
client_ = NULL;
delete this;
}
void GpuVideoEncodeAcceleratorHost::OnWillDeleteImpl() {
DCHECK(CalledOnValidThread());
impl_ = NULL;
// The CommandBufferProxyImpl is going away; error out this VEA.
OnChannelError();
}
void GpuVideoEncodeAcceleratorHost::PostNotifyError(Error error) {
DCHECK(CalledOnValidThread());
DVLOG(2) << "PostNotifyError(): error=" << error;
// Post the error notification back to this thread, to avoid re-entrancy.
base::MessageLoopProxy::current()->PostTask(
FROM_HERE,
base::Bind(&GpuVideoEncodeAcceleratorHost::OnNotifyError,
weak_this_factory_.GetWeakPtr(),
error));
}
void GpuVideoEncodeAcceleratorHost::Send(IPC::Message* message) {
DCHECK(CalledOnValidThread());
uint32 message_type = message->type();
if (!channel_->Send(message)) {
NOTIFY_ERROR(kPlatformFailureError) << "Send(" << message_type
<< ") failed";
}
}
void GpuVideoEncodeAcceleratorHost::OnRequireBitstreamBuffers(
uint32 input_count,
const gfx::Size& input_coded_size,
uint32 output_buffer_size) {
DCHECK(CalledOnValidThread());
DVLOG(2) << "OnRequireBitstreamBuffers(): input_count=" << input_count
<< ", input_coded_size=" << input_coded_size.ToString()
<< ", output_buffer_size=" << output_buffer_size;
if (client_) {
client_->RequireBitstreamBuffers(
input_count, input_coded_size, output_buffer_size);
}
}
void GpuVideoEncodeAcceleratorHost::OnNotifyInputDone(int32 frame_id) {
DCHECK(CalledOnValidThread());
DVLOG(3) << "OnNotifyInputDone(): frame_id=" << frame_id;
// Fun-fact: std::hash_map is not spec'd to be re-entrant; since freeing a
// frame can trigger a further encode to be kicked off and thus an .insert()
// back into the map, we separate the frame's dtor running from the .erase()
// running by holding on to the frame temporarily. This isn't "just
// theoretical" - Android's std::hash_map crashes if we don't do this.
scoped_refptr<media::VideoFrame> frame = frame_map_[frame_id];
if (!frame_map_.erase(frame_id)) {
DLOG(ERROR) << "OnNotifyInputDone(): "
"invalid frame_id=" << frame_id;
// See OnNotifyError for why this needs to be the last thing in this
// function.
OnNotifyError(kPlatformFailureError);
return;
}
frame = NULL; // Not necessary but nice to be explicit; see fun-fact above.
}
void GpuVideoEncodeAcceleratorHost::OnBitstreamBufferReady(
int32 bitstream_buffer_id,
uint32 payload_size,
bool key_frame) {
DCHECK(CalledOnValidThread());
DVLOG(3) << "OnBitstreamBufferReady(): "
"bitstream_buffer_id=" << bitstream_buffer_id
<< ", payload_size=" << payload_size
<< ", key_frame=" << key_frame;
if (client_)
client_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame);
}
void GpuVideoEncodeAcceleratorHost::OnNotifyError(Error error) {
DCHECK(CalledOnValidThread());
DVLOG(2) << "OnNotifyError(): error=" << error;
if (!client_)
return;
weak_this_factory_.InvalidateWeakPtrs();
// Client::NotifyError() may Destroy() |this|, so calling it needs to be the
// last thing done on this stack!
media::VideoEncodeAccelerator::Client* client = NULL;
std::swap(client_, client);
client->NotifyError(error);
}
} // namespace content
|
ltilve/chromium
|
content/common/gpu/client/gpu_video_encode_accelerator_host.cc
|
C++
|
bsd-3-clause
| 9,937
|
<?php
/**
* Weegbo bootstrap file.
*
* @author Dmitry Avseyenko <polsad@gmail.com>
* @package system
* @copyright Copyright © 2008-2011 Inspirativ
* @license http://weegbo.com/license/
* @since 0.8
*/
/*
* Define basic constant
*
* PATH_ROOT - path to application directory
* PATH_BASE - path to framework base files
*/
define('START_TIME', microtime(true));
define('PATH_ROOT', dirname(__file__).'/');
define('PATH_BASE', PATH_ROOT.'system/base/');
define('PATH_SCHEME', isset($_SERVER['HTTPS']) ? 'https' : 'http');
define('ENVIRONMENT', 'development');
/**
* Run application. PATH_ROOT.'system/' - path to application folder
*/
require_once(PATH_BASE.'base.class.php');
Base::createWebApplication(PATH_ROOT.'system/');
|
Inspirativ/eGift-Card-platform
|
index.php
|
PHP
|
bsd-3-clause
| 770
|
/**
*
*/
package gov.nih.nci.cagrid.portal.util;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* @author <a href="mailto:joshua.phillips@semanticbits.com">Joshua Phillips</a>
*
*/
public class RunMetaChange {
/**
* @param args
*/
public static void main(String[] args) throws Exception {
new ClassPathXmlApplicationContext(new String[] {
"classpath:applicationContext-db.xml",
"classpath:applicationContext-aggr-metachange.xml" });
}
}
|
NCIP/cagrid
|
cagrid/Software/portal/cagrid-portal/aggr/src/java/gov/nih/nci/cagrid/portal/util/RunMetaChange.java
|
Java
|
bsd-3-clause
| 499
|
- infos = Information about the passwd plugin is in keys below
- infos/author = Thomas Waser <thomas.waser@libelektra.org>
- infos/licence = BSD
- infos/needs =
- infos/provides = storage/passwd
- infos/recommends =
- infos/placements = getstorage setstorage
- infos/status = maintained reviewed conformant compatible coverage specific unittest tested nodep libc configurable experimental limited
- infos/metadata =
- infos/description = storage plugin for passwd files
## Introduction
This plugin parses `passwd` files, e.g. `/etc/passwd`.
## Implementation Details
If present, the not-posix compliant `fgetpwent` function will be used to read the file supplied by the resolver, otherwise `getpwent` will be used. For writing, if present, `putpwent` will be used, if not a simple implementation writing straight to the config file.
## Configuration
If the config key `index` is set to `name` passwd entries will be sorted by name, if not set or set to `uid` passwd entries will be sorted by uid
## Fields
- `gecos` contains the full name of the account
- `gid` contains the accounts primary group id
- `home` contains the path to the accounts home directory
- `shell` contains the accounts default shell
- `uid` contains the accounts uid
- `name` contains the account name
## Usage
kdb mount /etc/passwd system/passwd passwd index=name
kdb export system/passwd/root
gecos = root
gid = 0
home = /root
passwd = x
shell = /bin/zsh
uid = 0
|
e1528532/libelektra
|
src/plugins/passwd/README.md
|
Markdown
|
bsd-3-clause
| 1,481
|
#include <ctime>
#include <fstream>
#include <iostream>
#include <utility>
// TCLAP
#include "tclap/CmdLine.h"
#include "cereal/archives/json.hpp"
#include "rb-filesystem.hpp"
#include <cstdio>
#include <sdsl/bit_vectors.hpp>
#include <cstdlib>
#include <boost/dynamic_bitset.hpp>
#include <libgen.h>
#include <sparsepp/spp.h>
using spp::sparse_hash_map;
// Custom Headers
//#include "uint128_t.hpp"
//#include "debug.h"
#include "kmer.hpp"
// using namespace std;
// using namespace sdsl;
#include "rb-pack-color.hpp"
#include "rb-vec.hpp"
#include "xxhash.h"
#include <bitset>
#include <cstdlib>
#include <sys/timeb.h>
#include <memory>
int getMilliCount() {
timeb tb;
ftime(&tb);
int nCount = tb.millitm + (tb.time & 0xfffff) * 1000;
return nCount;
}
int getMilliSpan(int nTimeStart) {
int nSpan = getMilliCount() - nTimeStart;
if (nSpan < 0)
nSpan += 0x100000 * 1000;
return nSpan;
}
std::string file_extension = ".<extension>";
void parse_arguments(int argc, char** argv, parameters_t& params) {
TCLAP::CmdLine cmd("Rainbowfish pack-color", ' ', "0.1.0");
TCLAP::UnlabeledValueArg<std::string> input_filename_arg(
"input",
"Input file. Currently only supports KMC2's binary format (for k<=64).",
true, "", "input_file", cmd);
TCLAP::UnlabeledValueArg<std::string> num_colors_arg(
"num_colors", "Number of colors", true, "", "num colors", cmd);
TCLAP::UnlabeledValueArg<std::string> res_dir_arg(
"dir", "Result directory; this will be created if it doesn't exist", true,
"", "res_dir", cmd);
TCLAP::UnlabeledValueArg<std::string> pass_arg("pass", "1pass or 2pass",
false, "", "pass", cmd);
cmd.parse(argc, argv);
params.input_filename = input_filename_arg.getValue();
params.num_colors = atoi(num_colors_arg.getValue().c_str());
params.res_dir = res_dir_arg.getValue();
params.pass = pass_arg.getValue();
}
inline void deserialize_color_bv(std::ifstream& colorfile, color_bv& value) {
colorfile.read((char*)&value, sizeof(color_bv));
}
bool serialize_info(uint64_t num_colors, uint64_t num_edges, uint64_t num_eqCls,
std::string label_type, std::string select_type,
std::string eqtable_type, std::string res_dir,
bool isLblDynamic) {
std::string jsonFileName = res_dir + "/info.json";
std::ofstream jsonFile(jsonFileName);
{
cereal::JSONOutputArchive archive(jsonFile);
archive(cereal::make_nvp("label_type", label_type));
archive(cereal::make_nvp("select_type", select_type));
archive(cereal::make_nvp("eqtable_type", eqtable_type));
archive(cereal::make_nvp("num_colors", num_colors));
archive(cereal::make_nvp("num_edges", num_edges));
archive(cereal::make_nvp("num_eqCls", num_eqCls));
archive(cereal::make_nvp("is_label_dynamic", isLblDynamic));
archive(cereal::make_nvp("label_fixed_length", LOG2(num_eqCls) + 1));
}
jsonFile.close();
return true;
}
template <class T1, class T2, class T3> class ColorPacker {
public:
T1 lblvec;
T2 rnkvec;
T3 eqTvec;
public:
ColorPacker(uint64_t eqBitSize, uint64_t lblBitSize, bool isLblDynamic) {
lblvec = T1(lblBitSize);
if (isLblDynamic) {
rnkvec = T2(lblBitSize + 1);
}
eqTvec = T3(eqBitSize);
}
size_t insertColorLabel(uint64_t num, uint64_t pos) {
// most significant bit of number goes down to the end of the bitset
uint8_t nbits = static_cast<uint8_t>(LOG2(num + 2));
uint64_t lbl = num - ((1 << nbits) - 2);
lblvec.setInt(pos, lbl, nbits);
return pos + nbits;
/*uint8_t nbits = static_cast<uint8_t>(num==0?1:ceil(log2(num+1)));
lblvec.setInt(pos, num, nbits);
return pos + nbits;
*/
}
size_t insertFixedLengthColorLabel(uint64_t num, uint64_t pos,
uint64_t fixedLength) {
lblvec.setInt(pos, num, fixedLength);
return pos + fixedLength;
}
bool storeAll(std::string dir, uint64_t bitvecSize, uint64_t eqClsSize,
bool isLblDynamic) {
eqTvec.serialize(dir + "/eqTable", eqClsSize);
lblvec.serialize(dir + "/lbl", bitvecSize);
if (isLblDynamic) {
rnkvec.serialize(dir + "/rnk", bitvecSize + 1);
}
return true;
// return eqTvec.serialize(dir + "/eqTable", eqClsSize) &&
// lblvec.serialize(dir + "/lbl", bitvecSize) && rnkvec.serialize(dir +
// "/rnk", bitvecSize+1);
}
};
int main(int argc, char* argv[]) {
int startTime = getMilliCount();
bool sort = true;
bool compress = true;
bool dynamicLengthLbl = true;
// std::cerr << "pack-color compiled with supported colors=" << NUM_COLS <<
// std::endl;
// std::cerr <<"Starting" << std::endl;
parameters_t params;
parse_arguments(argc, argv, params);
if (params.pass == "1pass")
sort = false; // Anything else means apply sorting!! HeHe!!
if (!rainbowfish::fs::FileExists(params.res_dir.c_str())) {
rainbowfish::fs::MakeDir(params.res_dir.c_str());
}
const char* file_name = params.input_filename.c_str();
// std::cerr << "file name: " << file_name << std::endl;
const char* res_dir = params.res_dir.c_str(); //"bitvectors";
// Open File
std::ifstream colorfile(file_name, std::ios::in | std::ios::binary);
colorfile.seekg(0, colorfile.end);
size_t end = colorfile.tellg();
// std::cerr << "file size: " << end << std::endl;
// std::cerr << "sizeof(color_bv): " << sizeof(color_bv) << std::endl;
size_t num_color = params.num_colors;
size_t num_edges = end / sizeof(color_bv);
std::cerr << "Num Edges: " << num_edges << "\n";
std::cerr << "Num Colors: " << num_color << "\n";
int checkPointTime = getMilliCount();
int allocationTime = getMilliCount();
class color_bv_hasher {
public:
size_t operator()(const color_bv& cbv) const {
return XXH64(reinterpret_cast<void*>(const_cast<color_bv*>(&cbv)), sizeof(color_bv), 0);
}
};
using CPType = ColorPacker<RBVec, RBVecCompressed, RBVecCompressed>;
// FIRST ROUND going over all edges
// Read file and fill out equivalence classes
// std::cerr << "edges: " << num_edges << " colors: " << num_color << " Total:
// " << num_edges * num_color << std::endl;
// ColorPacker<RBVecCompressed, RBVecCompressed, RBVecCompressed> * cp;
// ColorPacker<RBVec, RBVec, RBVec> * cp;
std::unique_ptr<CPType> cp{nullptr};
std::vector<std::pair<color_bv, uint64_t>> eqClsVec;
uint64_t curPos = 0;
if (sort) {
sparse_hash_map<color_bv, uint64_t> eqCls;
colorfile.seekg(0, colorfile.beg);
for (size_t i = 0; i < num_edges; i++) {
if (i % 100000000 == 0) {
std::cerr << getMilliSpan(checkPointTime) << " ms : " << i << " out of "
<< num_edges << std::endl;
checkPointTime = getMilliCount();
}
color_bv value;
deserialize_color_bv(colorfile, value);
auto eqIt = eqCls.find(value);
if ( eqIt == eqCls.end()) {
eqCls[value] = 1;
} else {
eqIt->second += 1;
}
}
// std::cerr << getMilliSpan(allocationTime) << " ms : Succinct builder
// object allocated" << std::endl;
// checkPointTime = getMilliCount();
// Put data in hashmap to vector for further probable sorting!!
eqClsVec.reserve(eqCls.size());
for (const auto& c : eqCls) {
eqClsVec.push_back(c);
}
// sort the hashmap
auto cmp = [](std::pair<color_bv, uint64_t> const& a,
std::pair<color_bv, uint64_t> const& b) {
return a.second > b.second;
};
std::sort(eqClsVec.begin(), eqClsVec.end(), cmp);
// replacing labels instead of k-mer counts as the hash map values
int lbl = 0;
size_t totalBits = 0;
uint64_t total_edges = 0;
for (const auto& c : eqClsVec) {
// std::cout <<lbl<< " , "<< c.second<<"\n ";
total_edges += c.second;
// for (uint64_t k=0;k<num_color;k++) if (c.first[k] == true)
// std::cout<<k<<" ";
// std::cout<<"\n";
// totalBits += (lbl==0?c.second:ceil(log2(lbl+1))*c.second);
totalBits += (LOG2(lbl + 2) * c.second);
eqCls[c.first] = lbl++;
}
// std::cerr << getMilliSpan(checkPointTime) << " ms : (Sorting eq vector
// and ) assigning a label to each eq class." << std::endl;
// std::cerr << " Total edge vs total edge: "<<total_edges<<" vs "<<
// num_edges<<"\n";
size_t vecBits = totalBits;
totalBits *= 2;
// Choose between two approaches of dynamic or static label length
size_t fixedLength = LOG2(eqCls.size()) + 1;
size_t labelBitsetLength = num_edges * fixedLength;
if (labelBitsetLength < totalBits)
dynamicLengthLbl = false;
if (dynamicLengthLbl) {
std::cerr << "Going with Dynamic Length Label approach ....... \n";
totalBits += num_color * eqCls.size();
std::cerr << "total bits: " << totalBits << " or "
<< totalBits / (8 * pow(1024, 2)) << " MB\n";
cp.reset(new CPType(eqCls.size() * num_color, vecBits, dynamicLengthLbl));
// SECOND ROUND going over all edges
// checkPointTime = getMilliCount();
int packStartTime = getMilliCount();
// create label & rank vectors
colorfile.seekg(0, colorfile.beg);
for (size_t i = 0; i < num_edges; i++) {
if (i % 100000000 == 0) {
std::cerr << getMilliSpan(checkPointTime) << " ms : " << i
<< " out of " << num_edges << std::endl;
checkPointTime = getMilliCount();
}
color_bv value;
deserialize_color_bv(colorfile, value);
(cp->rnkvec).set(curPos);
auto eqIt = eqCls.find(value);
curPos = cp->insertColorLabel(eqIt->second, curPos);
}
(cp->rnkvec).set(curPos);
} else {
std::cerr << "Going with Fixed Length Labels ....... \n";
totalBits = labelBitsetLength + (num_color * eqCls.size());
std::cerr << "total bits: " << totalBits << " or "
<< totalBits / (8 * pow(1024, 2)) << " MB\n";
// cp = new ColorPacker<RBVecCompressed, RBVecCompressed,
//RBVecCompressed>(eqCls.size()*num_color, vecBits);
// cp = new ColorPacker<RBVec, RBVec,
//RBVec>(eqCls.size()*num_color, vecBits);
cp.reset(new CPType(eqCls.size() * num_color, labelBitsetLength,
dynamicLengthLbl));
// cp = new ColorPacker<RBVec, RBVecCompressed,
// RBVecCompressed>(eqCls.size()*num_color, labelBitsetLength,
// dynamicLengthLbl);
// SECOND ROUND going over all edges
// checkPointTime = getMilliCount();
int packStartTime = getMilliCount();
// create label & rank vectors
colorfile.seekg(0, colorfile.beg);
for (size_t i = 0; i < num_edges; i++) {
if (i % 100000000 == 0) {
std::cerr << getMilliSpan(checkPointTime) << " ms : " << i
<< " out of " << num_edges << std::endl;
checkPointTime = getMilliCount();
}
color_bv value;
deserialize_color_bv(colorfile, value);
curPos =
cp->insertFixedLengthColorLabel(eqCls[value], curPos, fixedLength);
}
}
// std::cerr << "\n" << getMilliSpan(packStartTime) << " ms : Packing label
// & rank into bitvector." << std::endl;
} else {
sparse_hash_map<color_bv, std::pair<uint64_t, uint64_t>> eqCls;
int packStartTime = getMilliCount();
// create label & rank vectors
colorfile.seekg(0, colorfile.beg);
// cp = new ColorPacker<RBVecCompressed, RBVecCompressed,
//RBVecCompressed>(num_color*num_color, 2*num_edges*num_color);
// cp = new ColorPacker<RBVec, RBVec, RBVec>(num_color*num_color,
//2*num_edges*num_color);
// cp = new ColorPacker<RBVec, RBVecCompressed,
// RBVecCompressed>(num_color*num_color, 2*num_edges*num_color,
// dynamicLengthLbl);
cp.reset(new CPType(num_color * num_color, 2 * num_edges * num_color,
dynamicLengthLbl));
for (size_t i = 0; i < num_edges; i++) {
if (i % 100000000 == 0) {
std::cerr << getMilliSpan(checkPointTime) << " ms : " << i << " out of "
<< num_edges << std::endl;
checkPointTime = getMilliCount();
}
color_bv value;
deserialize_color_bv(colorfile, value);
if (eqCls.find(value) == eqCls.end()) {
eqCls[value] = std::make_pair(eqClsVec.size(), 1);
eqClsVec.push_back(std::make_pair(value, eqCls[value].first));
} else
eqCls[value].second += 1;
(cp->rnkvec).set(curPos);
curPos = cp->insertColorLabel(eqCls[value].first, curPos);
}
(cp->rnkvec).set(curPos);
// std::cerr << "\n" << getMilliSpan(packStartTime) << " ms : Packing label
// & rank into bitvector." << std::endl;
uint64_t eqCntr = 0;
/*for (const auto& c : eqClsVec) {
std::cout<<eqCntr++<<" , "<<(eqCls[c.first]).second<<"\n";
}*/
}
// pack eqTable in bitvector
// checkPointTime = getMilliCount();
uint64_t i = 0;
for (const auto& c : eqClsVec) {
for (size_t j = 0; j < num_color; ++j) {
if (c.first[j])
(cp->eqTvec).set(i);
i++;
}
}
// std::cerr << getMilliSpan(checkPointTime) << " ms : Packing eq. table into
// bitvector." << std::endl;
// checkPointTime = getMilliCount();
cp->storeAll(res_dir, curPos, eqClsVec.size() * num_color, dynamicLengthLbl);
// std::cerr << getMilliSpan(checkPointTime) << " ms : Storing all three
// bitvectors." << std::endl << std::endl;
serialize_info(num_color, num_edges, eqClsVec.size(), "uncompressed",
"compressed", "compressed", res_dir, dynamicLengthLbl);
std::cerr << getMilliSpan(startTime) / 1000.0 << " s : Total Time."
<< std::endl;
}
|
COMBINE-lab/rainbowfish
|
rb-pack-color.cpp
|
C++
|
bsd-3-clause
| 13,749
|
using System;
using Shouldly.Tests.TestHelpers;
namespace Shouldly.Tests.ShouldNotBe.WithTolerance
{
public class TimeSpanScenario : ShouldlyShouldTestScenario
{
protected override void ShouldThrowAWobbly()
{
var timeSpan = TimeSpan.FromHours(1);
timeSpan.ShouldNotBe(timeSpan.Add(TimeSpan.FromHours(1.1d)), TimeSpan.FromHours(1.5d));
}
protected override string ChuckedAWobblyErrorMessage
{
get { return "timeSpan should not be within 01:30:00 of 02:06:00 but was 01:00:00"; }
}
protected override void ShouldPass()
{
var timeSpan = TimeSpan.FromHours(1);
timeSpan.ShouldNotBe(timeSpan.Add(TimeSpan.FromHours(1.1d)), TimeSpan.FromHours(1));
}
}
}
|
MitjaBezensek/shouldly
|
src/Shouldly.Tests/ShouldNotBe/WithTolerance/TimeSpanScenario.cs
|
C#
|
bsd-3-clause
| 794
|
#define args_t <%=name%>_args_t
typedef struct {
enum CBLAS_ORDER order;
enum CBLAS_UPLO uplo;
enum CBLAS_TRANSPOSE trans;
dtype alpha, beta;
blasint n, k;
} args_t;
#define func_p <%=func_name%>_p
static <%=func_name%>_t func_p = 0;
static void
<%=c_iter%>(na_loop_t *const lp)
{
dtype *a, *b, *c;
blasint lda, ldb, ldc;
args_t *g;
a = (dtype*)NDL_PTR(lp,0);
b = (dtype*)NDL_PTR(lp,1);
c = (dtype*)NDL_PTR(lp,2);
g = (args_t*)(lp->opt_ptr);
lda = NDL_STEP(lp,0) / sizeof(dtype);
ldb = NDL_STEP(lp,1) / sizeof(dtype);
ldc = NDL_STEP(lp,2) / sizeof(dtype);
(*func_p)(g->order, g->uplo, g->trans, g->n, g->k,
DP(g->alpha), a, lda, b, ldb, DP(g->beta), c, ldc);
}
/*<%
params = [
mat("a","n-by-k"),
mat("b","n-by-k"),
mat("c","n-by-n, optional",:inpace),
opt("alpha"),
opt("beta"),
opt("uplo"),
opt("trans"),
opt("order")
].select{|x| x}.join("\n ")
%>
@overload <%=name%>( a, b, [c, alpha:1, beta:0, uplo:'U', trans:'N', order:'R'] )
<%=params%>
@return [<%=class_name%>] returns c.
<%=description%>
*/
static VALUE
<%=c_func(-1)%>(int argc, VALUE const argv[], VALUE UNUSED(mod))
{
VALUE ans;
VALUE a, b, c=Qnil, alpha, beta;
narray_t *na1, *na2, *na3;
blasint na, ka, kb, nb, nc, tmp;
size_t shape[2];
ndfunc_arg_in_t ain[4] = {{cT,2},{cT,2},{OVERWRITE,2},{sym_init,0}};
ndfunc_arg_out_t aout[1] = {{cT,2,shape}};
ndfunc_t ndf = {<%=c_iter%>, NO_LOOP, 3, 0, ain, aout};
args_t g;
VALUE kw_hash = Qnil;
ID kw_table[5] = {id_alpha,id_beta,id_order,id_uplo,id_trans};
VALUE opts[5] = {Qundef,Qundef,Qundef,Qundef,Qundef};
CHECK_FUNC(func_p,"<%=func_name%>");
rb_scan_args(argc, argv, "21:", &a, &b, &c, &kw_hash);
rb_get_kwargs(kw_hash, kw_table, 0, 5, opts);
alpha = option_value(opts[0],Qnil);
g.alpha = RTEST(alpha) ? m_num_to_data(alpha) : m_one;
beta = option_value(opts[1],Qnil);
g.beta = RTEST(beta) ? m_num_to_data(beta) : m_zero;
g.order = option_order(opts[2]);
g.uplo = option_uplo(opts[3]);
g.trans = option_trans(opts[4]);
GetNArray(a,na1);
GetNArray(b,na2);
CHECK_DIM_GE(na1,2);
CHECK_DIM_GE(na2,2);
na = ROW_SIZE(na1); // n
ka = COL_SIZE(na1); // k (lda)
SWAP_IFCOLTR(g.order, g.trans, na, ka, tmp);
nb = ROW_SIZE(na2); // n
kb = COL_SIZE(na2); // k (ldb)
SWAP_IFCOLTR(g.order, g.trans, kb, nb, tmp);
CHECK_INT_EQ("na",na,"nb",nb);
CHECK_INT_EQ("ka",ka,"kb",kb);
g.n = nb;
g.k = kb;
SWAP_IFROW(g.order, na, nb, tmp);
if (c == Qnil) { // c is not given.
ndf.nout = 1;
ain[2] = ain[3];
c = INT2FIX(0);
shape[0] = nb;
shape[1] = na;
} else {
COPY_OR_CAST_TO(c,cT);
GetNArray(c,na3);
CHECK_DIM_GE(na3,2);
nc = ROW_SIZE(na3); // n
if (nc < nb) {
rb_raise(nary_eShapeError,"nc=%d must be >= nb=%d",nc,nb);
}
//CHECK_LEADING_GE("ldc",g.ldc,"n",na);
}
ans = na_ndloop3(&ndf, &g, 3, a, b, c);
if (ndf.nout = 1) { // c is not given.
return ans;
} else {
return c;
}
}
#undef func_p
#undef args_t
|
ruby-numo/linalg
|
ext/numo/linalg/blas/tmpl/syr2k.c
|
C
|
bsd-3-clause
| 3,260
|
using System;
#pragma warning disable 1591
// ReSharper disable UnusedMember.Global
// ReSharper disable UnusedParameter.Local
// ReSharper disable MemberCanBePrivate.Global
// ReSharper disable UnusedAutoPropertyAccessor.Global
// ReSharper disable IntroduceOptionalParameters.Global
// ReSharper disable MemberCanBeProtected.Global
// ReSharper disable InconsistentNaming
namespace FileBiggy.Properties
{
/// <summary>
/// Indicates that the value of the marked element could be <c>null</c> sometimes,
/// so the check for <c>null</c> is necessary before its usage
/// </summary>
/// <example><code>
/// [CanBeNull] public object Test() { return null; }
/// public void UseTest() {
/// var p = Test();
/// var s = p.ToString(); // Warning: Possible 'System.NullReferenceException'
/// }
/// </code></example>
[AttributeUsage(
AttributeTargets.Method | AttributeTargets.Parameter |
AttributeTargets.Property | AttributeTargets.Delegate |
AttributeTargets.Field, AllowMultiple = false, Inherited = true)]
public sealed class CanBeNullAttribute : Attribute
{
}
/// <summary>
/// Indicates that the value of the marked element could never be <c>null</c>
/// </summary>
/// <example><code>
/// [NotNull] public object Foo() {
/// return null; // Warning: Possible 'null' assignment
/// }
/// </code></example>
[AttributeUsage(
AttributeTargets.Method | AttributeTargets.Parameter |
AttributeTargets.Property | AttributeTargets.Delegate |
AttributeTargets.Field, AllowMultiple = false, Inherited = true)]
public sealed class NotNullAttribute : Attribute
{
}
/// <summary>
/// Indicates that the marked method builds string by format pattern and (optional) arguments.
/// Parameter, which contains format string, should be given in constructor. The format string
/// should be in <see cref="string.Format(IFormatProvider,string,object[])"/>-like form
/// </summary>
/// <example><code>
/// [StringFormatMethod("message")]
/// public void ShowError(string message, params object[] args) { /* do something */ }
/// public void Foo() {
/// ShowError("Failed: {0}"); // Warning: Non-existing argument in format string
/// }
/// </code></example>
[AttributeUsage(
AttributeTargets.Constructor | AttributeTargets.Method,
AllowMultiple = false, Inherited = true)]
public sealed class StringFormatMethodAttribute : Attribute
{
/// <param name="formatParameterName">
/// Specifies which parameter of an annotated method should be treated as format-string
/// </param>
public StringFormatMethodAttribute(string formatParameterName)
{
FormatParameterName = formatParameterName;
}
public string FormatParameterName { get; private set; }
}
/// <summary>
/// Indicates that the function argument should be string literal and match one
/// of the parameters of the caller function. For example, ReSharper annotates
/// the parameter of <see cref="System.ArgumentNullException"/>
/// </summary>
/// <example><code>
/// public void Foo(string param) {
/// if (param == null)
/// throw new ArgumentNullException("par"); // Warning: Cannot resolve symbol
/// }
/// </code></example>
[AttributeUsage(AttributeTargets.Parameter, AllowMultiple = false, Inherited = true)]
public sealed class InvokerParameterNameAttribute : Attribute
{
}
/// <summary>
/// Indicates that the method is contained in a type that implements
/// <see cref="System.ComponentModel.INotifyPropertyChanged"/> interface
/// and this method is used to notify that some property value changed
/// </summary>
/// <remarks>
/// The method should be non-static and conform to one of the supported signatures:
/// <list>
/// <item><c>NotifyChanged(string)</c></item>
/// <item><c>NotifyChanged(params string[])</c></item>
/// <item><c>NotifyChanged{T}(Expression{Func{T}})</c></item>
/// <item><c>NotifyChanged{T,U}(Expression{Func{T,U}})</c></item>
/// <item><c>SetProperty{T}(ref T, T, string)</c></item>
/// </list>
/// </remarks>
/// <example><code>
/// public class Foo : INotifyPropertyChanged {
/// public event PropertyChangedEventHandler PropertyChanged;
/// [NotifyPropertyChangedInvocator]
/// protected virtual void NotifyChanged(string propertyName) { ... }
///
/// private string _name;
/// public string Name {
/// get { return _name; }
/// set { _name = value; NotifyChanged("LastName"); /* Warning */ }
/// }
/// }
/// </code>
/// Examples of generated notifications:
/// <list>
/// <item><c>NotifyChanged("Property")</c></item>
/// <item><c>NotifyChanged(() => Property)</c></item>
/// <item><c>NotifyChanged((VM x) => x.Property)</c></item>
/// <item><c>SetProperty(ref myField, value, "Property")</c></item>
/// </list>
/// </example>
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = true)]
public sealed class NotifyPropertyChangedInvocatorAttribute : Attribute
{
public NotifyPropertyChangedInvocatorAttribute()
{
}
public NotifyPropertyChangedInvocatorAttribute(string parameterName)
{
ParameterName = parameterName;
}
public string ParameterName { get; private set; }
}
/// <summary>
/// Describes dependency between method input and output
/// </summary>
/// <syntax>
/// <p>Function Definition Table syntax:</p>
/// <list>
/// <item>FDT ::= FDTRow [;FDTRow]*</item>
/// <item>FDTRow ::= Input => Output | Output <= Input</item>
/// <item>Input ::= ParameterName: Value [, Input]*</item>
/// <item>Output ::= [ParameterName: Value]* {halt|stop|void|nothing|Value}</item>
/// <item>Value ::= true | false | null | notnull | canbenull</item>
/// </list>
/// If method has single input parameter, it's name could be omitted.<br/>
/// Using <c>halt</c> (or <c>void</c>/<c>nothing</c>, which is the same)
/// for method output means that the methos doesn't return normally.<br/>
/// <c>canbenull</c> annotation is only applicable for output parameters.<br/>
/// You can use multiple <c>[ContractAnnotation]</c> for each FDT row,
/// or use single attribute with rows separated by semicolon.<br/>
/// </syntax>
/// <examples><list>
/// <item><code>
/// [ContractAnnotation("=> halt")]
/// public void TerminationMethod()
/// </code></item>
/// <item><code>
/// [ContractAnnotation("halt <= condition: false")]
/// public void Assert(bool condition, string text) // regular assertion method
/// </code></item>
/// <item><code>
/// [ContractAnnotation("s:null => true")]
/// public bool IsNullOrEmpty(string s) // string.IsNullOrEmpty()
/// </code></item>
/// <item><code>
/// // A method that returns null if the parameter is null, and not null if the parameter is not null
/// [ContractAnnotation("null => null; notnull => notnull")]
/// public object Transform(object data)
/// </code></item>
/// <item><code>
/// [ContractAnnotation("s:null=>false; =>true,result:notnull; =>false, result:null")]
/// public bool TryParse(string s, out Person result)
/// </code></item>
/// </list></examples>
[AttributeUsage(AttributeTargets.Method, AllowMultiple = true, Inherited = true)]
public sealed class ContractAnnotationAttribute : Attribute
{
public ContractAnnotationAttribute([NotNull] string contract)
: this(contract, false)
{
}
public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStates)
{
Contract = contract;
ForceFullStates = forceFullStates;
}
public string Contract { get; private set; }
public bool ForceFullStates { get; private set; }
}
/// <summary>
/// Indicates that marked element should be localized or not
/// </summary>
/// <example><code>
/// [LocalizationRequiredAttribute(true)]
/// public class Foo {
/// private string str = "my string"; // Warning: Localizable string
/// }
/// </code></example>
[AttributeUsage(AttributeTargets.All, AllowMultiple = false, Inherited = true)]
public sealed class LocalizationRequiredAttribute : Attribute
{
public LocalizationRequiredAttribute() : this(true)
{
}
public LocalizationRequiredAttribute(bool required)
{
Required = required;
}
public bool Required { get; private set; }
}
/// <summary>
/// Indicates that the value of the marked type (or its derivatives)
/// cannot be compared using '==' or '!=' operators and <c>Equals()</c>
/// should be used instead. However, using '==' or '!=' for comparison
/// with <c>null</c> is always permitted.
/// </summary>
/// <example><code>
/// [CannotApplyEqualityOperator]
/// class NoEquality { }
/// class UsesNoEquality {
/// public void Test() {
/// var ca1 = new NoEquality();
/// var ca2 = new NoEquality();
/// if (ca1 != null) { // OK
/// bool condition = ca1 == ca2; // Warning
/// }
/// }
/// }
/// </code></example>
[AttributeUsage(
AttributeTargets.Interface | AttributeTargets.Class |
AttributeTargets.Struct, AllowMultiple = false, Inherited = true)]
public sealed class CannotApplyEqualityOperatorAttribute : Attribute
{
}
/// <summary>
/// When applied to a target attribute, specifies a requirement for any type marked
/// with the target attribute to implement or inherit specific type or types.
/// </summary>
/// <example><code>
/// [BaseTypeRequired(typeof(IComponent)] // Specify requirement
/// public class ComponentAttribute : Attribute { }
/// [Component] // ComponentAttribute requires implementing IComponent interface
/// public class MyComponent : IComponent { }
/// </code></example>
[AttributeUsage(AttributeTargets.Class, AllowMultiple = true, Inherited = true)]
[BaseTypeRequired(typeof (Attribute))]
public sealed class BaseTypeRequiredAttribute : Attribute
{
public BaseTypeRequiredAttribute([NotNull] Type baseType)
{
BaseType = baseType;
}
[NotNull]
public Type BaseType { get; private set; }
}
/// <summary>
/// Indicates that the marked symbol is used implicitly
/// (e.g. via reflection, in external library), so this symbol
/// will not be marked as unused (as well as by other usage inspections)
/// </summary>
[AttributeUsage(AttributeTargets.All, AllowMultiple = false, Inherited = true)]
public sealed class UsedImplicitlyAttribute : Attribute
{
public UsedImplicitlyAttribute()
: this(ImplicitUseKindFlags.Default, ImplicitUseTargetFlags.Default)
{
}
public UsedImplicitlyAttribute(ImplicitUseKindFlags useKindFlags)
: this(useKindFlags, ImplicitUseTargetFlags.Default)
{
}
public UsedImplicitlyAttribute(ImplicitUseTargetFlags targetFlags)
: this(ImplicitUseKindFlags.Default, targetFlags)
{
}
public UsedImplicitlyAttribute(
ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags)
{
UseKindFlags = useKindFlags;
TargetFlags = targetFlags;
}
public ImplicitUseKindFlags UseKindFlags { get; private set; }
public ImplicitUseTargetFlags TargetFlags { get; private set; }
}
/// <summary>
/// Should be used on attributes and causes ReSharper
/// to not mark symbols marked with such attributes as unused
/// (as well as by other usage inspections)
/// </summary>
[AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = true)]
public sealed class MeansImplicitUseAttribute : Attribute
{
public MeansImplicitUseAttribute()
: this(ImplicitUseKindFlags.Default, ImplicitUseTargetFlags.Default)
{
}
public MeansImplicitUseAttribute(ImplicitUseKindFlags useKindFlags)
: this(useKindFlags, ImplicitUseTargetFlags.Default)
{
}
public MeansImplicitUseAttribute(ImplicitUseTargetFlags targetFlags)
: this(ImplicitUseKindFlags.Default, targetFlags)
{
}
public MeansImplicitUseAttribute(
ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags)
{
UseKindFlags = useKindFlags;
TargetFlags = targetFlags;
}
[UsedImplicitly]
public ImplicitUseKindFlags UseKindFlags { get; private set; }
[UsedImplicitly]
public ImplicitUseTargetFlags TargetFlags { get; private set; }
}
[Flags]
public enum ImplicitUseKindFlags
{
Default = Access | Assign | InstantiatedWithFixedConstructorSignature,
/// <summary>Only entity marked with attribute considered used</summary>
Access = 1,
/// <summary>Indicates implicit assignment to a member</summary>
Assign = 2,
/// <summary>
/// Indicates implicit instantiation of a type with fixed constructor signature.
/// That means any unused constructor parameters won't be reported as such.
/// </summary>
InstantiatedWithFixedConstructorSignature = 4,
/// <summary>Indicates implicit instantiation of a type</summary>
InstantiatedNoFixedConstructorSignature = 8,
}
/// <summary>
/// Specify what is considered used implicitly
/// when marked with <see cref="MeansImplicitUseAttribute"/>
/// or <see cref="UsedImplicitlyAttribute"/>
/// </summary>
[Flags]
public enum ImplicitUseTargetFlags
{
Default = Itself,
Itself = 1,
/// <summary>Members of entity marked with attribute are considered used</summary>
Members = 2,
/// <summary>Entity marked with attribute and all its members considered used</summary>
WithMembers = Itself | Members
}
/// <summary>
/// This attribute is intended to mark publicly available API
/// which should not be removed and so is treated as used
/// </summary>
[MeansImplicitUse]
public sealed class PublicAPIAttribute : Attribute
{
public PublicAPIAttribute()
{
}
public PublicAPIAttribute([NotNull] string comment)
{
Comment = comment;
}
[NotNull]
public string Comment { get; private set; }
}
/// <summary>
/// Tells code analysis engine if the parameter is completely handled
/// when the invoked method is on stack. If the parameter is a delegate,
/// indicates that delegate is executed while the method is executed.
/// If the parameter is an enumerable, indicates that it is enumerated
/// while the method is executed
/// </summary>
[AttributeUsage(AttributeTargets.Parameter, Inherited = true)]
public sealed class InstantHandleAttribute : Attribute
{
}
/// <summary>
/// Indicates that a method does not make any observable state changes.
/// The same as <c>System.Diagnostics.Contracts.PureAttribute</c>
/// </summary>
/// <example><code>
/// [Pure] private int Multiply(int x, int y) { return x * y; }
/// public void Foo() {
/// const int a = 2, b = 2;
/// Multiply(a, b); // Waring: Return value of pure method is not used
/// }
/// </code></example>
[AttributeUsage(AttributeTargets.Method, Inherited = true)]
public sealed class PureAttribute : Attribute
{
}
/// <summary>
/// Indicates that a parameter is a path to a file or a folder
/// within a web project. Path can be relative or absolute,
/// starting from web root (~)
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public class PathReferenceAttribute : Attribute
{
public PathReferenceAttribute()
{
}
public PathReferenceAttribute([PathReference] string basePath)
{
BasePath = basePath;
}
[NotNull]
public string BasePath { get; private set; }
}
// ASP.NET MVC attributes
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
public sealed class AspMvcAreaMasterLocationFormatAttribute : Attribute
{
public AspMvcAreaMasterLocationFormatAttribute(string format)
{
}
}
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
public sealed class AspMvcAreaPartialViewLocationFormatAttribute : Attribute
{
public AspMvcAreaPartialViewLocationFormatAttribute(string format)
{
}
}
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
public sealed class AspMvcAreaViewLocationFormatAttribute : Attribute
{
public AspMvcAreaViewLocationFormatAttribute(string format)
{
}
}
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
public sealed class AspMvcMasterLocationFormatAttribute : Attribute
{
public AspMvcMasterLocationFormatAttribute(string format)
{
}
}
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
public sealed class AspMvcPartialViewLocationFormatAttribute : Attribute
{
public AspMvcPartialViewLocationFormatAttribute(string format)
{
}
}
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
public sealed class AspMvcViewLocationFormatAttribute : Attribute
{
public AspMvcViewLocationFormatAttribute(string format)
{
}
}
/// <summary>
/// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter
/// is an MVC action. If applied to a method, the MVC action name is calculated
/// implicitly from the context. Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]
public sealed class AspMvcActionAttribute : Attribute
{
public AspMvcActionAttribute()
{
}
public AspMvcActionAttribute([NotNull] string anonymousProperty)
{
AnonymousProperty = anonymousProperty;
}
[NotNull]
public string AnonymousProperty { get; private set; }
}
/// <summary>
/// ASP.NET MVC attribute. Indicates that a parameter is an MVC area.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public sealed class AspMvcAreaAttribute : PathReferenceAttribute
{
public AspMvcAreaAttribute()
{
}
public AspMvcAreaAttribute([NotNull] string anonymousProperty)
{
AnonymousProperty = anonymousProperty;
}
[NotNull]
public string AnonymousProperty { get; private set; }
}
/// <summary>
/// ASP.NET MVC attribute. If applied to a parameter, indicates that
/// the parameter is an MVC controller. If applied to a method,
/// the MVC controller name is calculated implicitly from the context.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]
public sealed class AspMvcControllerAttribute : Attribute
{
public AspMvcControllerAttribute()
{
}
public AspMvcControllerAttribute([NotNull] string anonymousProperty)
{
AnonymousProperty = anonymousProperty;
}
[NotNull]
public string AnonymousProperty { get; private set; }
}
/// <summary>
/// ASP.NET MVC attribute. Indicates that a parameter is an MVC Master.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Controller.View(String, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public sealed class AspMvcMasterAttribute : Attribute
{
}
/// <summary>
/// ASP.NET MVC attribute. Indicates that a parameter is an MVC model type.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Controller.View(String, Object)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public sealed class AspMvcModelTypeAttribute : Attribute
{
}
/// <summary>
/// ASP.NET MVC attribute. If applied to a parameter, indicates that
/// the parameter is an MVC partial view. If applied to a method,
/// the MVC partial view name is calculated implicitly from the context.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Html.RenderPartialExtensions.RenderPartial(HtmlHelper, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]
public sealed class AspMvcPartialViewAttribute : PathReferenceAttribute
{
}
/// <summary>
/// ASP.NET MVC attribute. Allows disabling all inspections
/// for MVC views within a class or a method.
/// </summary>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
public sealed class AspMvcSupressViewErrorAttribute : Attribute
{
}
/// <summary>
/// ASP.NET MVC attribute. Indicates that a parameter is an MVC display template.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Html.DisplayExtensions.DisplayForModel(HtmlHelper, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public sealed class AspMvcDisplayTemplateAttribute : Attribute
{
}
/// <summary>
/// ASP.NET MVC attribute. Indicates that a parameter is an MVC editor template.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Html.EditorExtensions.EditorForModel(HtmlHelper, String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public sealed class AspMvcEditorTemplateAttribute : Attribute
{
}
/// <summary>
/// ASP.NET MVC attribute. Indicates that a parameter is an MVC template.
/// Use this attribute for custom wrappers similar to
/// <c>System.ComponentModel.DataAnnotations.UIHintAttribute(System.String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter)]
public sealed class AspMvcTemplateAttribute : Attribute
{
}
/// <summary>
/// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter
/// is an MVC view. If applied to a method, the MVC view name is calculated implicitly
/// from the context. Use this attribute for custom wrappers similar to
/// <c>System.Web.Mvc.Controller.View(Object)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]
public sealed class AspMvcViewAttribute : PathReferenceAttribute
{
}
/// <summary>
/// ASP.NET MVC attribute. When applied to a parameter of an attribute,
/// indicates that this parameter is an MVC action name
/// </summary>
/// <example><code>
/// [ActionName("Foo")]
/// public ActionResult Login(string returnUrl) {
/// ViewBag.ReturnUrl = Url.Action("Foo"); // OK
/// return RedirectToAction("Bar"); // Error: Cannot resolve action
/// }
/// </code></example>
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property)]
public sealed class AspMvcActionSelectorAttribute : Attribute
{
}
[AttributeUsage(
AttributeTargets.Parameter | AttributeTargets.Property |
AttributeTargets.Field, Inherited = true)]
public sealed class HtmlElementAttributesAttribute : Attribute
{
public HtmlElementAttributesAttribute()
{
}
public HtmlElementAttributesAttribute([NotNull] string name)
{
Name = name;
}
[NotNull]
public string Name { get; private set; }
}
[AttributeUsage(
AttributeTargets.Parameter | AttributeTargets.Field |
AttributeTargets.Property, Inherited = true)]
public sealed class HtmlAttributeValueAttribute : Attribute
{
public HtmlAttributeValueAttribute([NotNull] string name)
{
Name = name;
}
[NotNull]
public string Name { get; private set; }
}
// Razor attributes
/// <summary>
/// Razor attribute. Indicates that a parameter or a method is a Razor section.
/// Use this attribute for custom wrappers similar to
/// <c>System.Web.WebPages.WebPageBase.RenderSection(String)</c>
/// </summary>
[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method, Inherited = true)]
public sealed class RazorSectionAttribute : Attribute
{
}
}
|
flumbee/filebiggy
|
src/FileBiggy/Properties/Annotations.cs
|
C#
|
bsd-3-clause
| 25,736
|
/*
import * as React from 'react'
import SelectOtherDevice from '.'
import {action, storiesOf} from '../../stories/storybook'
import * as Constants from '../../constants/provision'
import * as Types from '../../constants/types/provision'
const rd = {
cTime: 0,
encryptKey: '',
lastUsedTime: 0,
mTime: 0,
status: 0,
verifyKey: '',
}
const props = {
devices: [
Constants.rpcDeviceToDevice({
...rd,
deviceID: '1',
deviceNumberOfType: 1,
name: 'iphone',
type: 'mobile',
}),
Constants.rpcDeviceToDevice({
...rd,
deviceID: '2',
deviceNumberOfType: 2,
name: 'Home Computer',
type: 'desktop',
}),
Constants.rpcDeviceToDevice({
...rd,
deviceID: '3',
deviceNumberOfType: 3,
name: 'Android Nexus 5x',
type: 'mobile',
}),
Constants.rpcDeviceToDevice({
...rd,
deviceID: '4',
deviceNumberOfType: 4,
name: 'Tuba Contest',
type: 'backup',
}),
],
onBack: action('onBack'),
onResetAccount: action('onResetAccount'),
onSelect: action('onSelect'),
}
const tonsOfDevices: Array<Types.Device> = []
for (var i = 0; i < 100; ++i) {
let type: string
switch (i % 3) {
case 0:
type = 'desktop'
break
case 1:
type = 'mobile'
break
default:
type = 'backup'
break
}
tonsOfDevices.push(
Constants.rpcDeviceToDevice({
...rd,
deviceID: String(i + 1),
deviceNumberOfType: i,
name: 'name: ' + String(i),
type,
})
)
}
const load = () => {
storiesOf('Provision/SelectOtherDevice', module)
.add('Normal', () => <SelectOtherDevice {...props} />)
.add('Tons', () => <SelectOtherDevice {...props} devices={tonsOfDevices} />)
}
export default load
*/
export default {}
|
keybase/client
|
shared/provision/select-other-device/index.stories.tsx
|
TypeScript
|
bsd-3-clause
| 1,814
|
#include <cstdio>
#include <hare/base/current_thread.h>
#include <hare/base/singleton.h>
#include <hare/base/thread.h>
#include <hare/base/noncopyable.h>
class Test : public hare::NonCopyable {
public:
Test() {
printf("tid=%d, constructing %p\n", hare::CurrentThread::tid(), this);
}
~Test() {
printf("tid=%d, destructing %p %s\n", hare::CurrentThread::tid(), this,
name_.c_str());
}
const hare::String &name() const { return name_; }
void setName(const hare::String &n) { name_ = n; }
private:
hare::String name_;
};
class TestNoDestroy : public hare::NonCopyable {
public:
void no_destroy();
TestNoDestroy() {
printf("tid=%d, constructing TestNoDestroy %p\n",
hare::CurrentThread::tid(), this);
}
~TestNoDestroy() {
printf("tid=%d, destructing TestNoDestroy %p\n", hare::CurrentThread::tid(),
this);
}
};
void threadFunc() {
printf("tid=%d, %p name=%s\n", hare::CurrentThread::tid(),
&hare::Singleton<Test>::instance(),
hare::Singleton<Test>::instance().name().c_str());
hare::Singleton<Test>::instance().setName("only one, changed");
}
int main() {
hare::Singleton<Test>::instance().setName("only one");
hare::Thread t1(threadFunc);
t1.start();
t1.join();
printf("tid=%d, %p name=%s\n", hare::CurrentThread::tid(),
&hare::Singleton<Test>::instance(),
hare::Singleton<Test>::instance().name().c_str());
hare::Singleton<TestNoDestroy>::instance();
printf("with valgrind, you should see %zd-byte memory leak.\n",
sizeof(TestNoDestroy));
}
|
fallenwood/libhare
|
tests/base/singleton_test.cc
|
C++
|
bsd-3-clause
| 1,644
|
import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trips):
valid = False
if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - valid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def get_user_ls(all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
valid_user_ls.append(curr_user)
user_ls.append(curr_user)
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls
|
e-mission/e-mission-server
|
emission/analysis/modelling/tour_model/get_users.py
|
Python
|
bsd-3-clause
| 1,172
|
/**
* @module Audio
* @namespace Audio
*/
var TW = TW || {};
define(['./Sound', '../Utils/Polyfills'], function(Sound) {
TW.Audio = TW.Audio || {};
/**
* Channel class is an utility for manage multiple sounds with a same source.
*
* By default, a sound object can't be played twice simulaneously.
* The correct way to do this is to use the Channel class.
*
* A Channel is a group of many Sound objets using the same source (so the same music).
*
* @class Channel
* @constructor
* @param {String|String[]} src The source(s) of channel.
* If many values are passed, the first compatible are used.
* @param {Number} max The number of sound allocated in this channel.
* @param {Number} id The identifier of the channel.
*/
function Channel(src, max, id) {
/**
* Array of Sound.
*
* @property {Sound[]} _sounds
* @default []
*/
this._sounds = [];
/**
* Callback function when all sound is ready to play in this channel.
*
* @property {Function} allSoundsReady
* @default null
*/
this.allSoundsReady = null;
/**
* Source sound for this channel.
* Can contains many values (first compatible are used).
*
* @property {String|String[]} _src
* @private
*/
this._src = src;
/**
* Channel id.
*
* @property {Number} id
* @default id
* @readonly
*/
this.id = id;
this.add(max);
}
/**
* Add max sound instance with sources in channel.
*
* @method add
* @param {Number} max The number of sound allocated in this channel.
*/
Channel.prototype.add = function(max) {
while (this._sounds.length < max) {
this._sounds.push(new Sound(this._src));
}
};
/**
* Load all sound.
*
* @method load
*/
Channel.prototype.load = function() {
var handleAllSoundsReady = function() {
if (this.allSoundsReady !== null) {
this.allSoundsReady(this);
}
}.bind(this);
for (var i = 0; i < this._sounds.length; ++i) {
var sound = this._sounds[i];
if (i === 0) {
sound.onReady = handleAllSoundsReady;
}
sound.load(0, 0, 1);
}
};
/**
* Get a playable sound.
*
* @method getPlayableSound
* @return {Object} A playable sound.
*/
Channel.prototype.getPlayableSound = function() {
for (var i = 0; i < this._sounds.length; ++i) {
var sound = this._sounds[i];
if (sound.playState !== TW.Audio.AUDIO_PLAYED) {
return sound;
}
}
this._sounds[0].stop();
return this._sounds[0];
};
/**
* Applies the command to all sounds.
*
* @method _tellAllSounds
* @param {String} command commands availables:
*
* - `"pause"`
* - `"resume"`
* - `"setVolume"`
* - `"mute"`
* - `"stop"`
*
* @param {*} [value] argument
* @private
*/
Channel.prototype._tellAllSounds = function(command, value) {
for (var i = this._sounds.length - 1; i >= 0; --i) {
var sound = this._sounds[i];
switch (command) {
case "pause":
sound.pause();
break;
case "resume":
sound.resume();
break;
case "setVolume":
sound.setVolume(value);
break;
case "mute":
sound.mute(value);
break;
case "stop":
sound.stop();
break;
default:
}
}
};
/**
* Mute or Unmute all sound in this channel.
*
* @method setMute
* @param {Boolean} isMuted True for mute or false for unmute.
*/
Channel.prototype.setMute = function(isMuted) {
this._tellAllSounds("mute", isMuted);
};
/**
* Pause all sound in this channel.
*
* @method pause
*/
Channel.prototype.pause = function() {
this._tellAllSounds("pause", null);
};
/**
* Resume all sound in this channel.
*
* @method resume
*/
Channel.prototype.resume = function() {
this._tellAllSounds("resume", null);
};
/**
* Stop all sound in this channel.
*
* @method stop
*/
Channel.prototype.stop = function() {
this._tellAllSounds("stop", null);
};
/**
* Set a volume for all sound in this channel.
*
* @method setMasterVolume
* @param {Number} value The value of volume needed. min: 0.0 -> max: 1.0
*/
Channel.prototype.setMasterVolume = function(value) {
this._tellAllSounds("setVolume", value);
};
TW.Audio.Channel = Channel;
return Channel;
});
|
TumbleweedJS/TumbleweedJS
|
modules/Audio/Channel.js
|
JavaScript
|
bsd-3-clause
| 4,213
|
package edu.mit.simile.babel;
import java.io.OutputStream;
import java.io.Writer;
import java.util.Locale;
import java.util.Properties;
import org.openrdf.sail.Sail;
public interface BabelWriter {
public String getLabel(Locale locale);
public String getDescription(Locale locale);
public SemanticType getSemanticType();
public SerializationFormat getSerializationFormat();
public boolean takesWriter();
public void write(Writer writer, Sail sail, Properties properties, Locale locale) throws Exception;
public void write(OutputStream outputStream, Sail sail, Properties properties, Locale locale) throws Exception;
}
|
zepheira/babel
|
interfaces/src/main/java/edu/mit/simile/babel/BabelWriter.java
|
Java
|
bsd-3-clause
| 654
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.epic.canvascontrollibrary;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Gilgamesh
*/
public class CCLLineAreaGraphProps
{
public String CanvasID;
public String WindowID;
public String X;
public String Y;
public String Width;
public String Height;
public List<Object> Data;
public String XMaxValue;
public String YMaxValue;
public String NumMarksX;
public String NumMarksY;
public String Title;
public String TitleTextColor;
public String TitleTextHeight;
public String TitleTextFontString;
public String AxisLabelsColor;
public String AxisLabelsHeight;
public String AxisLabelsFontString;
public String H;
public String MarginLeft;
public String AlreadyUnregisteredAnimation;
public String IsLabledOnXAxis;
public Object Tag;
public String TabStopIndex;
CCLLineAreaGraphProps()
{
Data = new ArrayList<Object>();
}
}
|
akshaysrin/CanvasControlLibrary
|
NetBeansProjects/WebApplication1/src/java/com/epic/canvascontrollibrary/CCLLineAreaGraphProps.java
|
Java
|
bsd-3-clause
| 1,196
|
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_H_
#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_H_
#include <sstream>
#include "webrtc/modules/remote_bitrate_estimator/test/packet.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
namespace webrtc {
namespace testing {
namespace bwe {
const int kMinBitrateKbps = 150;
const int kMaxBitrateKbps = 2000;
class BweSender : public Module {
public:
BweSender() {}
virtual ~BweSender() {}
virtual int GetFeedbackIntervalMs() const = 0;
virtual void GiveFeedback(const FeedbackPacket& feedback) = 0;
virtual void OnPacketsSent(const Packets& packets) = 0;
private:
DISALLOW_COPY_AND_ASSIGN(BweSender);
};
class BweReceiver {
public:
explicit BweReceiver(int flow_id) : flow_id_(flow_id) {}
virtual ~BweReceiver() {}
virtual void ReceivePacket(int64_t arrival_time_ms,
const MediaPacket& media_packet) {}
virtual FeedbackPacket* GetFeedback(int64_t now_ms) { return NULL; }
protected:
int flow_id_;
};
enum BandwidthEstimatorType {
kNullEstimator,
kNadaEstimator,
kRembEstimator,
kFullSendSideEstimator
};
int64_t GetAbsSendTimeInMs(uint32_t abs_send_time);
BweSender* CreateBweSender(BandwidthEstimatorType estimator,
int kbps,
BitrateObserver* observer,
Clock* clock);
BweReceiver* CreateBweReceiver(BandwidthEstimatorType type,
int flow_id,
bool plot);
} // namespace bwe
} // namespace testing
} // namespace webrtc
#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_H_
|
svn2github/webrtc-Revision-8758
|
modules/remote_bitrate_estimator/test/bwe.h
|
C
|
bsd-3-clause
| 2,110
|
module Module1.Task10 where
fibonacci :: Integer -> Integer
fibonacci n
| n == 0 = 0
| n == 1 = 1
| n < 0 = -(-1) ^ (-n) * fibonacci (-n)
| n > 0 = fibonacciIter 0 1 (n - 2)
fibonacciIter acc1 acc2 0 = acc1 + acc2
fibonacciIter acc1 acc2 n =
fibonacciIter (acc2) (acc1 + acc2) (n - 1)
|
dstarcev/stepic-haskell
|
src/Module1/Task10.hs
|
Haskell
|
bsd-3-clause
| 309
|
from django.shortcuts import render_to_response, get_object_or_404
from django.http import Http404
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.dates import YearArchiveView, MonthArchiveView,\
DateDetailView
from .models import Article, Section
class ArticleListView(ListView):
template = "news/article_list.html"
paginate_by = 5
def get_queryset(self):
return Article.objects.published()
def get_context_data(self, **kwargs):
context = super(ArticleListView, self).get_context_data(**kwargs)
context['section_list'] = Section.objects.all()
return context
class ArticleDateDetailView(DateDetailView):
date_field = "published"
template = "news/article_detail.html"
def get_queryset(self):
return Article.objects.published()
def get_context_data(self, **kwargs):
# import ipdb; ipdb.set_trace()
context = super(ArticleDateDetailView, self).get_context_data(**kwargs)
context['section_list'] = Section.objects.all()
return context
class ArticleDetailView(DetailView):
queryset = Article.objects.published()
template = "news/post_detail.html"
def get_context_data(self, **kwargs):
context = super(ArticleDetailView, self).get_context_data(**kwargs)
context['section_list'] = Section.objects.all()
return context
class SectionListView(ListView):
queryset = Section.objects.all()
template = "news/section_list.html"
class SectionDetailView(DetailView):
queryset = Section.objects.all()
template = "news/section_detail.html"
class ArticleYearArchiveView(YearArchiveView):
queryset = Article.objects.published()
date_field = "published"
make_object_list = True
template = "news/post_archive_year.html"
class ArticleMonthArchiveView(MonthArchiveView):
queryset = Article.objects.all()
date_field = "published"
make_object_list = True
template = "news/post_archive_month.html"
|
ilendl2/chrisdev-cookiecutter
|
{{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/news/views.py
|
Python
|
bsd-3-clause
| 2,060
|
<?php
/*
* To change this license header choose License Headers in Project Properties.
* To change this template file choose Tools | Templates
* and open the template in the editor.
*/
/**
* Description of Customer
*
* @author Pedro
*/
namespace Sale\Model\Entity;
class Customer
{
public $customer_id;
public $firstname;
public $lastname;
public $email;
public $telephone;
public $birthday;
public $document_identity;
public $gender;
public $password;
public $salt;
public $cart;
public $wishlist;
public $newsletter;
public $address_default;
public $ip;
public $status;
public $approved;
public $token;
public $email_confirmed;
public $register_complete;
public $date_modified;
public $date_added;
//put your code here
public function __construct(Array $data = array())
{
$this->exchangeArray($data);
}
public function exchangeArray($data)
{
$attributes = array_keys($this->getArrayCopy());
foreach ($attributes as $attr) {
if (isset($data[$attr])) {
$this->{$attr} = $data[$attr];
}else{
$this->{$attr} = null;
}
}
}
public function getArrayCopy()
{
return get_object_vars($this);
}
}
|
Gimalca/piderapido
|
module/Sale/src/Sale/Model/Entity/Customer.php
|
PHP
|
bsd-3-clause
| 1,343
|
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2013, Willow Garage, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Willow Garage nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
/* Author: Acorn Pooley, Ioan Sucan */
#pragma once
#include <moveit/collision_detection/collision_env.h>
#include <moveit/macros/class_forward.h>
namespace collision_detection
{
MOVEIT_CLASS_FORWARD(CollisionDetectorAllocator); // Defines CollisionDetectorAllocatorPtr, ConstPtr, WeakPtr... etc
/** \brief An allocator for a compatible CollisionWorld/CollisionRobot pair. */
class CollisionDetectorAllocator
{
public:
virtual ~CollisionDetectorAllocator()
{
}
/** A unique name identifying the CollisionWorld/CollisionRobot pairing. */
virtual const std::string& getName() const = 0;
/** create a new CollisionWorld for checking collisions with the supplied world. */
virtual CollisionEnvPtr allocateEnv(const WorldPtr& world,
const moveit::core::RobotModelConstPtr& robot_model) const = 0;
/** create a new CollisionWorld by copying an existing CollisionWorld of the same type.s
* The world must be either the same world as used by \orig or a copy of that world which has not yet been modified.
*/
virtual CollisionEnvPtr allocateEnv(const CollisionEnvConstPtr& orig, const WorldPtr& world) const = 0;
/** create a new CollisionEnv given a robot_model with a new empty world */
virtual CollisionEnvPtr allocateEnv(const moveit::core::RobotModelConstPtr& robot_model) const = 0;
};
/** \brief Template class to make it easy to create an allocator for a specific CollisionWorld/CollisionRobot pair. */
template <class CollisionEnvType, class CollisionDetectorAllocatorType>
class CollisionDetectorAllocatorTemplate : public CollisionDetectorAllocator
{
public:
CollisionEnvPtr allocateEnv(const WorldPtr& world, const moveit::core::RobotModelConstPtr& robot_model) const override
{
return CollisionEnvPtr(new CollisionEnvType(robot_model, world));
}
CollisionEnvPtr allocateEnv(const CollisionEnvConstPtr& orig, const WorldPtr& world) const override
{
return CollisionEnvPtr(new CollisionEnvType(dynamic_cast<const CollisionEnvType&>(*orig), world));
}
CollisionEnvPtr allocateEnv(const moveit::core::RobotModelConstPtr& robot_model) const override
{
return CollisionEnvPtr(new CollisionEnvType(robot_model));
}
/** Create an allocator for collision detectors. */
static CollisionDetectorAllocatorPtr create()
{
return CollisionDetectorAllocatorPtr(new CollisionDetectorAllocatorType());
}
};
} // namespace collision_detection
|
ros-planning/moveit
|
moveit_core/collision_detection/include/moveit/collision_detection/collision_detector_allocator.h
|
C
|
bsd-3-clause
| 4,249
|
<link rel="stylesheet" type="text/css" href="/plugins/bootstrap-datepicker/css/datepicker.css"/>
<style>
.table th a{ color:#000;text-decoration:none;}
.table td a{text-decoration:none;}
.input-small,.input-large {width: 250px !important;}
</style>
<div class="row">
<div class="col-md-12">
<div class="portlet">
<div class="portlet-title">
<div class="caption cap-head">
<i class="icon icon-suitcase"></i>
<a>代理商支持 <i class="icon-angle-right"></i></a>
<a href="#">合规通知列表</a>
</div>
<div class="tools">
<a href="<?php echo $this->createUrl('/issue/createComplianceIssue'); ?>"><i class="icon-plus"></i> 添加</a>
<a href="javascript:location.reload();"><i class="icon-refresh"></i>刷新</a>
</div>
</div>
<div class="portlet-body form">
<form class="form-inline" role="form" method="get">
<div class="form-body">
<div class="form-group">
<input type="text" class="form-control input-small" name="issue_id" placeholder="通知编号" value="<?php echo $issue_id;?>">
</div>
<div class="form-group">
<div class="input-group input-large date-picker input-daterange">
<input style="cursor: pointer;" readonly id="start_date" type="text" class="form-control" name="start_date" value="<?php echo $start_date ; ?>">
<span class="input-group-addon">至</span>
<input style="cursor: pointer;" readonly id="end_date" type="text" class="form-control" name="end_date" value="<?php echo $end_date ; ?>">
</div>
</div>
</div>
<div class="form-body">
<div class="form-group">
<input type="text" class="form-control input-small" name="name" placeholder="通知标题" value="<?php echo $name;?>">
</div>
<div class="form-group">
<input type="text" class="form-control input-small" name="agent_name" placeholder="请输入代理商名称" value="<?php echo $agent_name;?>">
</div>
<button class="btn blue" type="submit"><i class="icon-search"></i> 查询</button>
</div>
</form>
</div>
<div class="portlet-body">
<ul class="nav nav-tabs">
<li class="<?php if($s==''){echo 'active';}?>">
<a href="<?php echo $this->createUrl('/issue/complianceIssue');?>">全部通知</a>
</li>
<li class="<?php if($s==Issue::ISSUE_STATE_COMMITTED){echo 'active';}?>">
<a href="<?php echo $this->createUrl('/issue/complianceIssue',array('s'=>Issue::ISSUE_STATE_COMMITTED));?>">待处理通知</a>
</li>
<li class="<?php if($s==Issue::ISSUE_STATE_PROCESSING){echo 'active';}?>">
<a href="<?php echo $this->createUrl('/issue/complianceIssue',array('s'=>Issue::ISSUE_STATE_PROCESSING));?>" >处理中通知</a>
</li>
<li class="<?php if($s==Issue::ISSUE_STATE_COMPLETED){echo 'active';}?>">
<a href="<?php echo $this->createUrl('/issue/complianceIssue',array('s'=>Issue::ISSUE_STATE_COMPLETED));?>" >已完成通知</a>
</li>
<!--<li class="--><?php //if($s==Issue::ISSUE_STATE_CANCEL){echo 'active';}?><!--">-->
<!-- <a href="--><?php //echo $this->createUrl('/issue/complianceIssue',array('s'=>Issue::ISSUE_STATE_CANCEL));?><!--" >已取消通知</a>-->
<!--</li>-->
</ul>
<table class="table table-striped table-hover table-bordered table-advance">
<thead>
<tr>
<th>编号</th>
<th>通知标题</th>
<th>状态</th>
<th>创建人</th>
<th>受理公司</th>
<th>创建时间</th>
<th>操作</th>
</tr>
</thead>
<tbody>
<?php if($model) { ?>
<?php foreach($model as $list) { ?>
<tr>
<td><?php echo $list['id'];?></td>
<td>
<a href="/issue/complianceView?id=<?php echo $list['guid']; ?>">
<?php echo $list['name'];?>
</a>
</td>
<td><?php
$status = $list['current_state'];
if($list['current_state']=='已提交'){
$status = '待处理';
$class="label-warning";
}else if($list['current_state']=="处理中"){
$class="label-primary";
}else{
$class="label-success";
}?>
<span class="label label-sm <?php echo $class;?>">
<?php echo '已关闭'==$status?'已取消':$status;?>
</span>
<?php if($list['priority']=='紧急') : ?>
<span class="label label-sm label-danger"><?php echo $list['priority']?></span>
<?php endif; ?></td>
<td><?php echo Service::get_user_name($list['creator']);?></td>
<td><?php echo Service::get_company_name_by_id($list['assignee_company_id']);?></td>
<td><?php echo $list['created'];?></td>
<td>
<a class="delete" href="/issue/complianceView?id=<?php echo $list['guid']; ?>">
查看
</a>
</td>
</tr>
<?php } ?>
<?php } ?>
</tbody>
</table>
<div class="pull-right">
<?php
$this->widget('CLinkPager',array(
'htmlOptions' => array('class' => 'pagination'),
'header'=>'',
'firstPageLabel' => '首页',
'lastPageLabel' => '末页',
'prevPageLabel' => '上一页',
'nextPageLabel' => '下一页',
'pages' => $pages,
'maxButtonCount'=>13
));
?>
</div>
</div>
</div>
</div>
</div>
<script type="text/javascript" src="/plugins/bootstrap-datepicker/js/bootstrap-datepicker.js"></script>
<script src="/js/form-components.js"></script>
<script>
jQuery(document).ready(function() {
var today = GetDateStr(0);
if (jQuery().datepicker) {
$('.date-picker').datepicker({
autoclose: true,
isRTL: App.isRTL(),
format: "yyyy-mm-dd",
endDate: today
});
$('body').removeClass("modal-open"); // fix bug when inline picker is used in modal
}
});
function GetDateStr(AddDayCount) {
var dd = new Date();
dd.setDate(dd.getDate()+AddDayCount);//获取AddDayCount天后的日期
var y = dd.getFullYear();
var m = dd.getMonth()+1;//获取当前月份的日期
var d = dd.getDate();
return y+"-"+m+"-"+d;
}
</script>
|
duanduan2288/vr
|
views/issue/compliance.php
|
PHP
|
bsd-3-clause
| 8,628
|
<?php
namespace common\models;
use yii\db\ActiveRecord;
use yii\db\Expression;
use yii\data\ActiveDataProvider;
use Yii;
/**
* This is the model class for table "reports".
*
* @property integer $id
* @property integer $revenue
* @property integer $expense_on_goods
* @property integer $other_expenses
* @property integer $salary
* @property integer $day_type
* @property string $date
* @property string $create_date
*/
class Reports extends \yii\db\ActiveRecord
{
// r=reports/performance fields
public $month;
public $totalRevenue;
public $averageSalaryPerDay;
public $numberOfWorkedDays;
public $totalSalary;
/**
* @inheritdoc
*/
public static function tableName()
{
return 'reports';
}
public function behaviors()
{
return [
'timestamp' => [
'class' => 'yii\behaviors\TimestampBehavior',
'attributes' => [
ActiveRecord::EVENT_BEFORE_INSERT => ['created_at', 'updated_at'],
ActiveRecord::EVENT_BEFORE_UPDATE => ['updated_at'],
],
'value' => new Expression('NOW()'),
],
];
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['revenue', 'day_type', 'date', 'user_id'], 'required'],
[['revenue', 'expense_on_goods', 'other_expenses', 'salary', 'day_type'], 'integer'],
[['date', 'created_at', 'updated_at', ], 'safe'],
[['month','totalRevenue','averageSalaryPerDay','numberOfWorkedDays','totalSalary'],'safe']
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => 'ID',
'user_id' => 'User ID',
'user' => Yii::t('app', 'Reported by'),
'revenue' => 'Revenue',
'expense_on_goods' => 'Expenses on goods',
'other_expenses' => 'Other expenses',
'salary' => 'Salary',
'day_type' => 'Day type',
'date' => 'Date',
'created_at' => 'First Reported',
'updated_at' => 'Last Updated',
];
}
public function getUser()
{
return $this->hasOne(User::className(), ['id' => 'user_id']);
}
public function beforeSave($insert) {
if (parent::beforeSave($insert)) {
$state_max_id = \backend\models\State::find()->max('id');
$state_model_with_max_id = \backend\models\State::findOne($state_max_id);
if ($this->isNewRecord) {
$old_shop_state = $state_model_with_max_id->shop_state;
$new_state = new \backend\models\State;
$new_state->shop_state = $old_shop_state - $this->revenue;
$new_state->cash_register_start = $state_model_with_max_id->cash_register_end;
$new_state->cash_register_end = $state_model_with_max_id->cash_register_end + $this->revenue;
$new_state->output = $this->revenue;
$new_state->input = 0;
$new_state->save();
} else {
$updating_model = $state_model_with_max_id;
$previous_id = \backend\models\State::find()->select('max(id)')->andWhere(['<', 'id', $state_max_id]);
$previous_model = \backend\models\State::findOne($previous_id);
$updating_model->shop_state = $previous_model->shop_state - $this->revenue;
$updating_model->cash_register_start = $previous_model->cash_register_end;
$updating_model->cash_register_end = $previous_model->cash_register_end + $this->revenue;
$updating_model->output = $this->revenue;
$updating_model->input = 0;
$updating_model->save();
}
return true;
} else {
return false;
}
}
public function beforeValidate()
{
if ($this->user_id == null)
{
$user_id = Yii::$app->user->identity->id;
$this->user_id = $user_id;
}
return parent::beforeValidate();
}
}
|
ara-martirossyan/market
|
common/models/Reports.php
|
PHP
|
bsd-3-clause
| 4,280
|
# Copyright (c) 2015, Mitchell Cooper
package F::Operation;
use warnings;
use strict;
use 5.010;
use parent qw(F::NodeExpression);
use Scalar::Util 'blessed';
sub left_side { shift->first_child }
# handle signs.
sub adopt {
my ($op, $maybe) = (shift, @_);
my $before = $op->last_child;
# first element is an operator.
# don't worry about anything except signs (+/-)
# because the constructor will not allow that to happen.
if (!$before && is_op($maybe)) {
$before = F::new('Number', value => 0, zero => 1);
$op->SUPER::adopt($before);
$maybe->{token} = 'OP_S'.uc($maybe->op_type); # super sub
}
# two operators in a row.
elsif (is_op($before) && is_op($maybe)) {
# it could be a negation.
if (is_op($maybe, 'sub')) {
# two negatives = positive.
#
# note how this calls ->adopt again, rather than SUPER::adopt.
# this is because the below check is_op($maybe, 'add')
# may ignore it altogether.
#
my $super = is_op($before, 'ssub');
if (is_op($before, 'sub') || $super) {
$op->abandon($before);
return $op->adopt(F::new('Operator', token =>
$super ? 'OP_SADD' : 'OP_ADD' # super add
));
}
# otherwise it's just a normal negation.
$op->SUPER::adopt(F::new('Number', value => 0, zero => 1));
$maybe->{token} = 'OP_S'.uc($maybe->op_type); # super sub
return $op->SUPER::adopt(@_);
}
# it's just a positive; do nothing.
if (is_op($maybe, 'add')) {
return;
}
# otherwise, not allowed.
return $maybe->unexpected();
}
return $op->SUPER::adopt(@_);
}
sub compile {
my $op = shift;
return @{ $op->{compiled} } if $op->{compiled};
my @children = $op->children;
# for each operator, while there are instances of that operator
foreach my $op_type (qw/
range pow
ssub sadd
mod mul div
sub add
less less_e
gr8r gr8r_e
equal_i nequal_i
equal nequal
sim nsim
band xor bor
and or
/) {
while (grep { is_op($_, $op_type) } @children) {
my ($i, $left, $right) = -1;
# for each operator of this type...
foreach my $child (@children) { $i++;
is_op($child, $op_type) or next;
# replace the left side, operator, and right side with
# an array reference to represent the operation.
my $a = [ $op_type ];
($left, undef, $right) = splice @children, $i - 1, 3, $a;
# FIXME: do something proper if there is no $right
die 'no right side' if !$right;
# if the last is the same type of operation, combine.
if (ref $left eq 'ARRAY' && $left->[0] eq $op_type) {
push @$a, @$left[1..$#$left], $right;
}
else {
push @$a, $left, $right;
}
last;
}
}}
$op->{compiled} = \@children;
return @children;
}
my %map = (
'sub' => '_sub',
'ssub' => '_sub',
'sadd' => 'add',
'or' => 'any_true',
'and' => 'all_true',
'equal_i' => 'refs_equal',
'nequal_i' => 'refs_nequal'
);
# these are wrapped with sub because they are evaluated only as needed
my %wrap_sub = map { $_ => 1 } qw(
any_true
all_true
);
sub op_fmt {
my ($op, $op_name, @items) = (shift, @{ +shift });
$op_name = $map{$op_name} || $op_name;
my $main = $op->main;
$main->{required_operations}{$op_name}++;
# consider return when wrapped with sub{}?
return operation => {
operation => $op_name,
pos => $op->{create_pos},
items => join ', ', map {
my $fmt = ref $_ eq 'ARRAY' ?
F::get_perl_fmt($op->op_fmt($_)) :
$_->perl_fmt_do;
$wrap_sub{$op_name} ? "sub { $fmt }" : $fmt
} @items
};
}
sub perl_fmt {
my $op = shift;
return $op->op_fmt($op->compile);
}
sub is_op {
my ($op, $type) = @_;
blessed $op or return;
$op->type eq 'Operator' or return;
$op->op_type eq $type or return if length $type;
return 1;
}
1
|
cooper/ferret
|
lib/F/Operation.pm
|
Perl
|
bsd-3-clause
| 4,498
|
--[er]test update(nvarchar) for list partition(have NULL value) with incorrect values out of range
create table list_test(id int not null ,
test_char char(50),
test_varchar varchar(2000),
test_bit bit(16),
test_varbit bit varying(20),
test_nchar nchar(50),
test_nvarchar nchar varying(2000),
test_string string,
test_datetime timestamp,
primary key (id, test_nvarchar))
PARTITION BY LIST (test_nvarchar) (
PARTITION p0 VALUES IN (N'aaa',NULL,N'bbb',N'ddd')
);
insert into list_test values(1,'aaa','aaa',B'1',B'1011',N'aaa',N'aaa','aaaaaaaaaa','2006-03-01 09:00:00');
insert into list_test values(2,'bbb','bbb',B'10',B'1100',N'bbb',N'bbb','bbbbbbbbbb','2006-03-01 09:00:00');
insert into list_test values(3,'ddd','ddd',B'100',B'1110',N'ddd',N'ddd','dddddddddd','2006-03-01 09:00:00');
insert into list_test values(4,'aaa','aaa',B'100',B'1110',N'aaa',N'aaa','aaaaaaaaaa','2006-03-01 09:00:00');
insert into list_test values(5,'bbb','bbb',B'100',B'1110',N'bbb',N'bbb','bbbbbbbbbb','2006-03-01 09:00:00');
insert into list_test values(6,'ddd','ddd',B'100',B'1110',N'ddd',N'ddd','dddddddddd','2006-03-01 09:00:00');
insert into list_test values(7,NULL,NULL,NULL,NULL,NULL,NULL,NULL,'2006-03-01 09:00:00');
update list_test set test_nvarchar = N'ccc' where test_nvarchar =N'aaa';
select * from list_test order by id;
drop table list_test;
|
CUBRID/cubrid-testcases
|
sql/_01_object/_09_partition/_004_manipulation/cases/1069.sql
|
SQL
|
bsd-3-clause
| 1,422
|
"""
Module to create topo and qinit data files for this example.
"""
from clawpack.geoclaw import topotools
from pylab import *
def maketopo_hilo():
x = loadtxt('x.txt')
y = loadtxt('y.txt')
z = loadtxt('z.txt')
# modify x and y so that cell size is truly uniform:
dx = 1. / (3.*3600.) # 1/3"
xx = linspace(x[0], x[-1], len(x))
yy = linspace(y[-1], y[0], len(y))
zz = flipud(z)
topo = topotools.Topography()
topo.x = xx
topo.y = yy
topo.Z = zz
topo.write('hilo_flattened.tt2',topo_type=2)
def maketopo_flat():
"""
Output topography file for the entire domain
"""
nxpoints = 201
nypoints = 301
xlower = 204.812
xupper = 205.012
ylower = 19.7
yupper = 20.0
outfile= "flat.tt2"
topotools.topo2writer(outfile,topo_flat,xlower,xupper,ylower,yupper,nxpoints,nypoints)
def topo_flat(x,y):
"""
flat
"""
z = where(x < 204.91213, 30., -30.)
return z
def plot_topo_big():
figure(figsize=(8,12))
topo1 = topotools.Topography()
topo1.read('flat.tt2',2)
contourf(topo1.x,topo1.y,topo1.Z,linspace(-30,20,51), extend='both')
topo2 = topotools.Topography()
topo2.read('hilo_flattened.tt2',2)
contourf(topo2.x,topo2.y,topo2.Z,linspace(-30,20,51), extend='both')
x1 = 204.90028
x2 = 204.96509
y1 = 19.71
y2 = 19.95
plot([x1,x2,x2,x1,x1],[y1,y1,y2,y2,y1],'w')
axis('scaled')
colorbar()
def plot_topo():
figure(figsize=(12,8))
topo1 = topotools.Topography()
topo1.read('flat.tt2',2)
contourf(topo1.x,topo1.y,topo1.Z,linspace(-30,20,51), extend='both')
topo2 = topotools.Topography()
topo2.read('hilo_flattened.tt2',2)
contourf(topo2.x,topo2.y,topo2.Z,linspace(-30,20,51), extend='both')
colorbar()
x1 = 204.9
x2 = 204.955
y1 = 19.715
y2 = 19.755
axis([x1,x2,y1,y2])
gca().set_aspect(1./cos(y1*pi/180.))
ticklabel_format(format='plain',useOffset=False)
contour(topo2.x,topo2.y,topo2.Z,[0.],colors='k')
plot([204.9447],[19.7308], 'ko') # from BM description
plot([204.9437],[19.7307], 'ro') # closer to pier
# from <http://tidesandcurrents.noaa.gov/stationhome.html?id=1617760>
# location is listed as: 19 degrees 43.8' N, 155 degrees, 3.3' W
xg = 360 - (155 + 3.3/60.)
yg = 19 + 43.8/60.
plot([xg],[yg], 'bo')
#gauges.append([1125, 204.91802, 19.74517, 0., 1.e9]) #Hilo
#gauges.append([1126, 204.93003, 19.74167, 0., 1.e9]) #Hilo
#gauges.append([3333, 204.93, 19.7576, 0., 1.e9])
if __name__=='__main__':
maketopo_hilo()
maketopo_flat()
|
rjleveque/tsunami_benchmarks
|
nthmp_currents_2015/problem2/maketopo.py
|
Python
|
bsd-3-clause
| 2,646
|
<?php
namespace Vivo\CMS\Api\Helper;
use Vivo\CMS\Model\Folder;
use Vivo\Transliterator\TransliteratorInterface;
/**
* Document helper for document comparison of two documents.
*/
class DocumentCompare
{
/**
* Transliterator for unicode string comparison
* @var TransliteratorInterface
*/
protected $transliteratorMbStringCompare;
/**
* Constructor
* @param \Vivo\Transliterator\TransliteratorInterface $transliteratorMbStringCompare
*/
public function __construct(TransliteratorInterface $transliteratorMbStringCompare)
{
$this->transliteratorMbStringCompare = $transliteratorMbStringCompare;
}
/**
* Returns document/folder
* @param array|Folder $document
* @return Folder
*/
protected function getDocument($document)
{
return is_array($document) ? $document['doc'] : $document;
}
/**
* Return properties of document pair
* @param array|Folder $doc1
* @param array|Folder $doc2
* @param string $propertyName
* @return array
*/
protected function getPropertiesToCompare($doc1, $doc2, $propertyName)
{
return array(
$this->getPropertyByName($this->getDocument($doc1), $propertyName),
$this->getPropertyByName($this->getDocument($doc2), $propertyName),
);
}
/**
* Parses criteria string into array.
* 'property_name' and 'sort_direction' properties are extracted
* @param string $criteriaString
* @return array
*/
protected function parseCriteriaString($criteriaString)
{
$criteria = array();
if(strpos($criteriaString, ":") !== false) {
$criteria['property_name'] = substr($criteriaString, 0, strpos($criteriaString,':'));
$criteria['sort_direction'] = substr($criteriaString, strpos($criteriaString,':')+1);
} else {
$criteria['property_name'] = $criteriaString;
$criteria['sort_direction'] = 'asc';
}
$criteria['sort_direction'] = $criteria['sort_direction'] == 'desc' ? SORT_DESC : SORT_ASC;
return $criteria;
}
/**
* Returns document property (generic getter)
* @param Folder $document
* @param string $property
* @return mixed
*/
protected function getPropertyByName(Folder $document, $property)
{
$getter = sprintf('get%s', ucfirst($property));
return method_exists($document, $getter) ? $document->$getter() : null;
}
/**
* Compares two documents based on given criteria
* @param array|Folder $doc1
* @param array|Folder $doc2
* @param string $criteriaString
* @return int Standart comparison output
* @see http://cz2.php.net/manual/en/function.strcmp.php
*/
public function compare($doc1, $doc2, $criteriaString)
{
$criteria = $this->parseCriteriaString($criteriaString);
if($criteria['property_name'] === 'random') {
return rand(-1, 1);
}
$comparisonResult = 0;
list($doc1Prop, $doc2Prop) = $this->getPropertiesToCompare($doc1, $doc2, $criteria['property_name']);
//comparison functions
if(($doc1Prop instanceof \DateTime) && ($doc2Prop instanceof \DateTime)) {
$comparisonResult = $doc1Prop->getTimestamp() - $doc2Prop->getTimestamp();
} else {
$comparisonResult = strcmp(
$this->transliteratorMbStringCompare->transliterate($doc1Prop),
$this->transliteratorMbStringCompare->transliterate($doc2Prop)
);
}
return ($criteria['sort_direction'] == SORT_ASC) ? $comparisonResult : -$comparisonResult;
}
}
|
miroslavhajek/vivoportal
|
src/Vivo/CMS/Api/Helper/DocumentCompare.php
|
PHP
|
bsd-3-clause
| 3,725
|
package converter
import (
"go/token"
"reflect"
"strings"
"testing"
)
func TestIdentifierAt(t *testing.T) {
type args struct {
src string
idx int
}
tests := []struct {
name string
args args
wantStart int
wantEnd int
}{
{
name: "basic",
args: args{"abc", 0},
wantStart: 0,
wantEnd: 3,
}, {
name: "basic",
args: args{"_a", 0},
wantStart: 0,
wantEnd: 2,
}, {
args: args{"abc", 1},
wantStart: 0,
wantEnd: 3,
}, {
args: args{"abc", 3},
wantStart: 0,
wantEnd: 3,
}, {
args: args{"abc", 10},
wantStart: -1,
wantEnd: -1,
}, {
args: args{"abc", -1},
wantStart: -1,
wantEnd: -1,
}, {
args: args{"1034", 2},
wantStart: -1,
wantEnd: -1,
}, {
args: args{"a034", 2},
wantStart: 0,
wantEnd: 4,
}, {
args: args{"a+b", 2},
wantStart: 2,
wantEnd: 3,
}, {
args: args{"a+b", 1},
wantStart: 0,
wantEnd: 1,
}, {
name: "multibytes",
args: args{"こんにちは", 6},
wantStart: 0,
wantEnd: 15,
}, {
name: "multibytes_invalidpos",
args: args{"こんにちは", 5},
wantStart: -1,
wantEnd: -1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotStart, gotEnd := identifierAt(tt.args.src, tt.args.idx)
if gotStart != tt.wantStart {
t.Errorf("identifierAt() gotStart = %v, want %v", gotStart, tt.wantStart)
}
if gotEnd != tt.wantEnd {
t.Errorf("identifierAt() gotEnd = %v, want %v", gotEnd, tt.wantEnd)
}
})
}
}
func Test_findLastDot(t *testing.T) {
type args struct {
src string
idx int
}
tests := []struct {
name string
args args
wantDot int
wantIDStart int
wantIDEnd int
}{
{
name: "basic",
args: args{"ab.cd", 3},
wantDot: 2,
wantIDStart: 3,
wantIDEnd: 5,
}, {
name: "eos",
args: args{"ab.cd", 5},
wantDot: 2,
wantIDStart: 3,
wantIDEnd: 5,
}, {
name: "dot",
args: args{"ab.cd", 2},
wantDot: -1,
wantIDStart: -1,
wantIDEnd: -1,
}, {
name: "space",
args: args{"ab. cd", 6},
wantDot: 2,
wantIDStart: 5,
wantIDEnd: 7,
}, {
name: "newline",
args: args{"ab.\ncd", 5},
wantDot: 2,
wantIDStart: 4,
wantIDEnd: 6,
}, {
name: "not_dot",
args: args{"a.b/cd", 4},
wantDot: -1,
wantIDStart: -1,
wantIDEnd: -1,
}, {
name: "empty_src",
args: args{"", 0},
wantDot: -1,
wantIDStart: -1,
wantIDEnd: -1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotDot, gotIDStart, gotIDEnd := findLastDot(tt.args.src, tt.args.idx)
if gotDot != tt.wantDot {
t.Errorf("findLastDot() gotDot = %v, want %v", gotDot, tt.wantDot)
}
if gotIDStart != tt.wantIDStart {
t.Errorf("findLastDot() gotIDStart = %v, want %v", gotIDStart, tt.wantIDStart)
}
if gotIDEnd != tt.wantIDEnd {
t.Errorf("findLastDot() gotIDEnd = %v, want %v", gotIDEnd, tt.wantIDEnd)
}
})
}
}
func Test_isPosInFuncBody(t *testing.T) {
tests := []struct {
name string
src string
want bool
}{
{"before", `func sum(a, b int) int[cur] { return a + b }`, false},
{"brace_open", `func sum(a, b int) int [cur]{ return a + b }`, false},
{"first", `func sum(a, b int) int {[cur] return a + b }`, true},
{"last", `func sum(a, b int) int { return a + b[cur] }`, true},
{"brace_close", `func sum(a, b int) int { return a + b [cur]}`, true},
{"after", `func sum(a, b int) int { return a + b }[cur]`, false},
{"funclit", `f := func (a, b int) int { [cur]return a + b }`, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
src := tt.src
var pos token.Pos
pos = token.Pos(strings.Index(src, "[cur]") + 1)
if pos == token.NoPos {
t.Error("[cur] not found in src")
return
}
src = strings.Replace(src, "[cur]", "", -1)
_, blk, err := parseLesserGoString(src)
if err != nil {
t.Errorf("Failed to parse: %v", err)
return
}
if got := isPosInFuncBody(blk, pos); got != tt.want {
t.Errorf("isPosInFuncBody() = %v, want %v", got, tt.want)
}
})
}
}
func TestComplete(t *testing.T) {
const selectorSpecExample = `
type T0 struct {
x int
}
func (*T0) M0()
type T1 struct {
y int
}
func (T1) M1()
type T2 struct {
z int
T1
*T0
}
func (*T2) M2()
type Q *T2
var t T2 // with t.T0 != nil
var p *T2 // with p != nil and (*p).T0 != nil
var q Q = p
`
tests := []struct {
name string
src string
want []string
ignoreWant bool
wantInclude []string
wantExclude []string
}{
{
name: "go_keyword",
src: `
import (
"bytes"
)
go bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "go_keyword_in_func",
src: `
import (
"bytes"
)
func f() {
go bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "go_with_defer_keyword",
src: `
import (
"bytes"
)
func f(){
}
defer f()
go bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "defer_before_go_keyword",
src: `
func foo(){
}
func bar(){
}
defer fo[cur]
go bar()`,
want: []string{"foo"},
}, {
name: "defer_between_2_go_keywords",
src: `
func foo(){
}
func bar(){
}
go bar()
defer fo[cur]
go bar()`,
want: []string{"foo"},
}, {
name: "non_go_defer_function_call_with_go_keyword",
src: `
func foo(){
}
func bar(){
}
fo[cur]
go bar()`,
want: []string{"foo"},
}, {
name: "package",
src: `
import (
"bytes"
)
var buf bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "package_in_func",
src: `
import (
"bytes"
)
func f() {
var buf bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "package_upper",
src: `
import (
"bytes"
)
var buf bytes.SP[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "value",
src: `
import (
"bytes"
)
var buf bytes.Buffer
buf.un[cur]`,
want: []string{"UnreadByte", "UnreadRune"},
}, {
name: "value_in_func",
src: `
import (
"bytes"
)
func f() {
var buf bytes.Buffer
buf.un[cur]`,
want: []string{"UnreadByte", "UnreadRune"},
}, {
name: "pointer",
src: `
import (
"bytes"
)
var buf *bytes.Buffer
buf.un[cur]`,
want: []string{"UnreadByte", "UnreadRune"},
}, {
name: "selector_example1",
src: `
[selector_example]
t.[cur]`,
want: []string{"M0", "M1", "M2", "T0", "T1", "x", "y", "z"},
}, {
name: "selector_example2",
src: `
[selector_example]
p.[cur]`,
want: []string{"M0", "M1", "M2", "T0", "T1", "x", "y", "z"},
}, {
name: "selector_example3",
src: `
[selector_example]
q.[cur]`,
want: []string{"T0", "T1", "x", "y", "z"},
}, {
// ".(" is parsed as TypeAssertExpr.
name: "dot_paren",
src: `
[selector_example]
q.[cur](`,
want: []string{"T0", "T1", "x", "y", "z"},
}, {
name: "before_type_assert",
src: `
[selector_example]
var x interface{}
x.(T0).[cur]`,
want: []string{"M0", "x"},
}, {
name: "before_type_switch",
src: `
[selector_example]
type I0 interface {
M0()
}
var i I0
switch i.[cur](type) {
default:
}`,
want: []string{"M0"},
}, {
name: "lgo_context",
src: `
_ctx.val[cur]`,
want: []string{"Value"},
}, {
name: "lgo_context_infunc",
src: `
func f() {
_ctx.val[cur]
}`,
want: []string{"Value"},
}, {
name: "id_simple",
src: `
abc := 100
xyz := "hello"
[cur]
zzz := 1.23
`,
ignoreWant: true,
wantInclude: []string{"abc", "xyz"},
wantExclude: []string{"zzz"},
}, {
name: "id_upper",
src: `
abc := 100
xyz := "hello"
XY[cur]
zzz := 1.23
`,
want: []string{"xyz"},
}, {
name: "id_camel_case",
src: `
func testFunc(){}
testf[cur]
`,
want: []string{"testFunc"},
}, {
name: "id_partial",
src: `
abc := 100
xyz := "hello"
xy[cur]
`,
want: []string{"xyz"},
}, {
name: "id_in_func",
src: `
func fn() {
abc := 100
xyz := "hello"
[cur]
zzz := 1.23
}`,
ignoreWant: true,
wantInclude: []string{"abc", "xyz", "int64"},
wantExclude: []string{"zzz"},
}, {
name: "id_partial_in_func",
src: `
func fn() {
abc := 100
xyz := "hello"
xy[cur]
}`,
want: []string{"xyz"},
}, {
name: "sort",
src: `
type data struct {
abc int
DEF int
xyz int
}
var d data
d.[cur]
`,
want: []string{"abc", "DEF", "xyz"},
}, {
// https://github.com/yunabe/lgo/issues/18
name: "bug18",
src: `var [cur]`,
ignoreWant: true,
wantInclude: []string{"int64"},
}, {
name: "bug17",
src: `
import "bytes"
var buf bytes.Buffer
buf.[cur]
y := 10`,
ignoreWant: true,
// TODO: Fix issue #17.
// wantInclude: []string{"Bytes", "Grow", "Len"},
}, {
// Similar to bug17, but Complete works in this case.
name: "bug17ok",
src: `
import "bytes"
var buf bytes.Buffer
buf.un[cur]
y := 10`,
want: []string{"UnreadByte", "UnreadRune"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
src := tt.src
src = strings.Replace(src, "[selector_example]", selectorSpecExample, -1)
pos := token.Pos(strings.Index(src, "[cur]") + 1)
if pos <= 0 {
t.Error("[cur] not found")
return
}
got, _, _ := Complete(strings.Replace(src, "[cur]", "", -1), pos, &Config{})
if !tt.ignoreWant && !reflect.DeepEqual(got, tt.want) {
t.Errorf("Expected %#v but got %#v", tt.want, got)
}
if len(tt.wantInclude) == 0 && len(tt.wantExclude) == 0 {
return
}
m := make(map[string]bool)
for _, c := range got {
m[c] = true
}
for _, c := range tt.wantInclude {
if !m[c] {
t.Errorf("%q is not suggested; Got %#v", c, got)
}
}
for _, c := range tt.wantExclude {
if m[c] {
t.Errorf("%q is suggested unexpectedly", c)
}
}
})
}
}
func TestCompleteKeywords(t *testing.T) {
// Checks autocomplete works even if identifiers have keyword prefixes.
// https://golang.org/ref/spec#Keywords
kwds := []string{
"break", "default", "func", "interface", "select",
"case", "defer", "go", "map", "struct",
"chan", "else", "goto", "package", "switch",
"const", "fallthrough", "if", "range", "type",
"continue", "for", "import", "return", "var",
}
tests := []struct {
name string
code string
want []string
}{
{
name: "id",
code: `
var [kwd]xyz, [kwd]abc int
[kwd][cur]`,
want: []string{"[kwd]abc", "[kwd]xyz"},
}, {
name: "idspace",
code: `
var [kwd]def, [kwd]ghi int
[kwd][cur] + 10`,
want: []string{"[kwd]def", "[kwd]ghi"},
}, {
name: "dot",
code: `
type data struct {
[kwd]123 int
[kwd]456 string
}
var d data
d.[kwd][cur]`,
want: []string{"[kwd]123", "[kwd]456"},
},
}
for _, kwd := range kwds {
for _, src := range tests {
t.Run(kwd+"_"+src.name, func(t *testing.T) {
code := strings.Replace(src.code, "[kwd]", kwd, -1)
pos := token.Pos(strings.Index(code, "[cur]") + 1)
if pos <= 0 {
t.Fatal("[cur] not found")
return
}
got, _, _ := Complete(strings.Replace(code, "[cur]", "", -1), pos, &Config{})
var want []string
for _, w := range src.want {
want = append(want, strings.Replace(w, "[kwd]", kwd, -1))
}
if !reflect.DeepEqual(got, want) {
t.Errorf("got %v; want %v", got, want)
}
})
}
}
}
|
yunabe/lgo
|
converter/complete_test.go
|
GO
|
bsd-3-clause
| 11,983
|
## split_multimol2
*split_multimol2(mol2_path)*
Splits a multi-mol2 file into individual Mol2 file contents.
**Parameters**
- `mol2_path` : str
Path to the multi-mol2 file. Parses gzip files if the filepath
ends on .gz.
**Returns**
A generator object for lists for every extracted mol2-file. Lists contain
the molecule ID and the mol2 file contents.
e.g., ['ID1234', ['@<TRIPOS>MOLECULE\n', '...']]. Note that bytestrings
are returned (for reasons of efficieny) if the Mol2 content is read
from a gzip (.gz) file.
|
rasbt/biopandas
|
docs/api_modules/biopandas.mol2/split_multimol2.md
|
Markdown
|
bsd-3-clause
| 546
|
<?php
namespace Meerkat\Slot;
use Meerkat\Slot\Slot;
use Meerkat\Core\Theme;
use \Kohana as Kohana;
use \Profiler as Profiler;
class Slot_PathTemplate extends Slot {
protected $_lifetime = 1;
function load() {
$token = Profiler::start(__CLASS__, $this->_id);
$themes_dirs = array();
if ($theme = Theme::instance()->get()) {
$themes_dirs['user'] = Theme::instance()->get();
}
$themes_dirs['default'] = '!';
$path = false;
foreach ($themes_dirs as $themes_dir) {
$tpl_dir = 'tpl' . '/' . $themes_dir;
$path = Kohana::find_file($tpl_dir, $this->_id, 'html');
}
if (!$path) {
//print $tpl_dir. $this->_id. 'html';exit;
throw new \Exception('Template ' . $this->_id . ' not found');
}
Profiler::stop($token);
return $path;
}
}
|
aberdnikov/meerkat-twig
|
classes/Meerkat/Slot/Slot/PathTemplate.php
|
PHP
|
bsd-3-clause
| 895
|
// This file has been generated by Py++.
#include "boost/python.hpp"
#include "wrap_osganimation.h"
#include "wrap_referenced.h"
#include "rigtransform.pypp.hpp"
namespace bp = boost::python;
struct RigTransform_wrapper : osgAnimation::RigTransform, bp::wrapper< osgAnimation::RigTransform > {
RigTransform_wrapper( )
: osgAnimation::RigTransform( )
, bp::wrapper< osgAnimation::RigTransform >(){
// null constructor
}
virtual void operator()( ::osgAnimation::RigGeometry & arg0 ) {
if( bp::override func___call__ = this->get_override( "__call__" ) )
func___call__( boost::ref(arg0) );
else{
this->osgAnimation::RigTransform::operator()( boost::ref(arg0) );
}
}
void default___call__( ::osgAnimation::RigGeometry & arg0 ) {
osgAnimation::RigTransform::operator()( boost::ref(arg0) );
}
virtual void setThreadSafeRefUnref( bool threadSafe ) {
if( bp::override func_setThreadSafeRefUnref = this->get_override( "setThreadSafeRefUnref" ) )
func_setThreadSafeRefUnref( threadSafe );
else{
this->osg::Referenced::setThreadSafeRefUnref( threadSafe );
}
}
void default_setThreadSafeRefUnref( bool threadSafe ) {
osg::Referenced::setThreadSafeRefUnref( threadSafe );
}
};
void register_RigTransform_class(){
bp::class_< RigTransform_wrapper, bp::bases< ::osg::Referenced >, osg::ref_ptr< RigTransform_wrapper >, boost::noncopyable >( "RigTransform", bp::init< >() )
.def(
"__call__"
, (void ( ::osgAnimation::RigTransform::* )( ::osgAnimation::RigGeometry & ))(&::osgAnimation::RigTransform::operator())
, (void ( RigTransform_wrapper::* )( ::osgAnimation::RigGeometry & ))(&RigTransform_wrapper::default___call__)
, ( bp::arg("arg0") ) );
}
|
JaneliaSciComp/osgpyplusplus
|
src/modules/osgAnimation/generated_code/RigTransform.pypp.cpp
|
C++
|
bsd-3-clause
| 1,893
|
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. epub3 to make an epub3
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
echo. dummy to check syntax errors of document sources
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 1>NUL 2>NUL
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pyradiomics.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pyradiomics.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "epub3" (
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
if "%1" == "dummy" (
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
if errorlevel 1 exit /b 1
echo.
echo.Build finished. Dummy builder generates no files.
goto end
)
:end
|
Radiomics/pyradiomics
|
docs/make.bat
|
Batchfile
|
bsd-3-clause
| 7,742
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 7, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 12);
|
antoinecarme/pyaf
|
tests/artificial/transf_Difference/trend_Lag1Trend/cycle_7/ar_12/test_artificial_128_Difference_Lag1Trend_7_12_20.py
|
Python
|
bsd-3-clause
| 266
|
@ruby "%~dpn0" %*
|
raphaelr/convolution
|
bin/convolution.bat
|
Batchfile
|
bsd-3-clause
| 18
|
CC = gcc
EXEC = ea
CFLAGS = -g -w -O2 -DDEBUG -D__USE_GNU -D_GNU_SOURCE
CCFLAGS = -lid3tag -lmad -lz -lpthread $(CFLAGS)
OBJS = command.o list.o socket.o ea.o util.o search.o playlist.o
all: $(EXEC)
$(EXEC): $(OBJS)
$(CC) $(CCFLAGS) -o $(EXEC) $(OBJS)
clean:
rm -fr $(OBJS) $(EXEC) *.~*.~ gmon.out
command.o: command.c command.h util.h ea.h
ea.o: ea.c ea.h util.h ea.h
list.o: list.c list.h util.h ea.h
socket.o: socket.c list.c socket.h list.h util.h ea.h
util.o: util.c util.h ea.h
search.o: search.c util.c search.h util.h ea.h
playlist.o: playlist.h ea.h
|
rpj/epic-audio
|
Makefile
|
Makefile
|
bsd-3-clause
| 573
|
<div align="center">
<img src="images/logo.png" width="40%" alt="WARP17, The Stateful Traffic Generator">
</div>
_WARP17, The Stateful Traffic Generator for L1-L7_ is a lightweight solution
for generating high volumes of session based traffic with very high setup
rates. WARP17 currently focuses on L5-L7 application traffic (e.g., _HTTP_)
running on top of TCP as this kind of traffic requires a complete TCP
implementation.
Nevertheless, WARP17 also supports application traffic running on top of UDP.
Developing network components or services usually requires expensive
proprietary solutions for validating the implemented functionalities and
scalability or performance requirements.
WARP17 is a platform agnostic tool based on [DPDK](http://dpdk.org/) which:
* allows extremely fast generation and sustaining of stateful sessions
* offers configurable TCP/UDP infrastructure which can be used for generating
high connection setup and data rates for application traffic
* is [Linux](https://kernel.org/) based so all the openly available tools can
be integrated by the users of WARP17.
The WARP17 TCP/IP implementation runs completely in user-space thus avoiding
the additional latency in the kernel stack. From a hardware perspective,
WARP17 will be able to run on all the platforms that are supported by DPDK.
# Performance benchmarks
## Reference platform HW configuration
The configuration of the server on which the WARP17 benchmarks were run is:
* [Super X10DRX](www.supermicro.com/products/motherboard/Xeon/C600/X10DRX.cfm)
dual socket motherboard
* Two [Intel® Xeon® Processor E5-2660 v3](http://ark.intel.com/products/81706/Intel-Xeon-Processor-E5-2660-v3-25M-Cache-2_60-GHz)
* 128GB RAM, using 16x 8G DDR4 2133Mhz to fill all the memory slots
* 2 40G [Intel® Ethernet Converged Network Adapter XL710-QDA1](http://ark.intel.com/products/83966/Intel-Ethernet-Converged-Network-Adapter-XL710-QDA1)
__NOTE__: In order for the best performance to be achieved when running only
one instance of WARP17, both NICs have to be installed on the same PCI bus. In
our case the two XL710 40G adapters were both installed on __socket 1__.
For all tests we used the following WARP17 configuration (detailed descriptions
of the command line arguments can be found in the [WARP17 command-line arguments](#warp17-command-line-arguments) section):
* The 40G adapters were connected back to back
* 22 lcores (hardware threads): `-c 0xFFC00FFC03`
- 20 lcores (10-19, 30-39) were reserved for generating/receiving
traffic
- 2 lcores are used for management of the test
* 32GB RAM memory allocated from hugepages: `-m 32768`
Three types of session setup benchmarks were run, __while emulating both
the servers and the clients__ when using 10 lcores for each ethernet port:
* TCP sessions with raw (random) application payload
* TCP sessions with __real__ HTTP payload
* UDP traffic with raw (random) payload
For each type of traffic 3 charts are presented with results collected
when running the test with different request (top scale of each chart)
and response (bottom scale of each chart) message sizes. These charts
show the client port:
* Session setup rate
* Packets per second transmitted and received
* Ethernet link utilization (percentage of 40G)
It is interesting to see that when emulating real HTTP traffic on top of
4 million TCP sessions, WARP17 can easily exhaust the 40Gbps throughput of
the link.
NOTE: the script used to obtain the benchmark results is available in the
codebase at `examples/python/test_2_perf_benchmark.py`. The script spawns WARP17
for each of the test configurations we were interested in.
## TCP setup and data rates for RAW application traffic
__NOTE__: In the case when we only want to test the TCP control implementation
(i.e., the TCP 3-way handshake and TCP CLOSE sequence), WARP17 achieved the
maximum setup rate of 3.4M clients/s and 3.4M servers/s, __so a total of
6.8M TCP sessions are handled every second__.
The tests set up 4 million TCP sessions (i.e., 4 million TCP clients and 4
million TCP servers) on which clients continuously send fixed size requests
(with random payload) and wait for fixed size responses from the servers.
The tests stop after all the clients sent at least one request.
* TCP raw traffic setup rate (clients and servers) varies between
__1.8M sess/s__ when sending small requests and responses and
__1.4M sess/s__ when using bigger messages:

* TCP raw traffic packets per second :

* TCP raw traffic link utilization reaches line rate (40Gbps) as we increase
the size of the requests and responses:

## TCP setup and data rates for HTTP application traffic
The tests set up 4 million TCP sessions (i.e., 4 million TCP clients and 4
million TCP servers) on which the clients continuously send _HTTP GET_
requests and wait for the _HTTP_ responses from the servers.
The tests stop after all the clients sent at least one request.
* HTTP traffic setup rate (clients and servers) varies between __1.8M sess/s__
when sending small requests and responses and __1.3M sess/s__ when using
bigger messages.

* HTTP traffic packets per second:

* HTTP traffic link utilization reaches line rate (40Gbps) as we increase the
size of the requests and responses:

## UDP setup and data rates for RAW application traffic
The tests continuously send UDP fixed size requests size requests (with random
payload) from 4 million clients and wait for fixed size responses from the servers.
The tests stop after 4 million clients sent at least one request.
* UDP raw traffic packets per second varies between __22.5M pkts/s__ when
sending small requests and __9.5M pkts/s__ when sending bigger packets:

* UDP raw traffic link utilization:

# Installing and configuring
## Prerequisites
Any 64 bit Linux distribution will do, however we have been testing this with
Ubuntu Server 14.04 LTS. In addition we have made an OVF virtual machine image
available, details can be found in the respective [documentation](ovf/README.md).
### Install build essential, python and ncurses
```
sudo apt-get install build-essential python ncurses-dev
```
### Install DPDK 16.11
* Download [DPDK 16.11](http://dpdk.org/browse/dpdk/refs/)
```
tar xf dpdk-16.11.tar.xz
cd dpdk-16.11
```
* Install DPDK:
```
make install T=x86_64-native-linuxapp-gcc
```
* Load the `igb_uio` DPDK module, either as shown below or by running the
`$RTE_SDK/tools/dpdk-setup.sh` script and selecting option
`[16] Insert IGB UIO module`:
```
sudo modprobe uio
sudo insmod x86_64-native-linuxapp-gcc/kmod/igb_uio.ko
```
* Enable at least 32 1G hugepages and configure them (see section 2.3.2.1 from
the [DPDK Guide](http://dpdk.org/doc/guides/linux_gsg/sys_reqs.html)):
- add the following line to `/etc/default/grub`:
```
GRUB_CMDLINE_LINUX="default_hugepagesz=1G hugepagesz=1G hugepages=32"
```
- update grub:
```
sudo update-grub
```
- reboot the machine
```
sudo reboot
```
* Mount hugepages (see section 2.3.2.2 from the
[DPDK Guide](http://dpdk.org/doc/guides/linux_gsg/sys_reqs.html)):
- add the mountpoint:
```
sudo mkdir /mnt/huge_1GB
```
- make the mountpoint permanent by adding to `/etc/fstab`:
```
nodev /mnt/huge_1GB hugetlbfs pagesize=1GB 0 0
```
* Export the path to the DPDK SDK (where DPDK was installed) into the variable
RTE_SDK. For example:
```
export RTE_SDK=/home/<user>/src/dpdk-16.11
```
* Export the target of the DPDK SDK into the variable RTE_TARGET. For example:
```
export RTE_TARGET=x86_64-native-linuxapp-gcc
```
### Install Google Protocol Buffers
* First install the protobuf compilers and python libraries.
```
sudo apt-get install protobuf-compiler libprotobuf-dev python-protobuf
```
* If using Ubuntu Server 14.04 LTS then just install:
```
sudo apt-get install libprotobuf-c0 libprotobuf-c0-dev libprotobuf8 libprotoc8 protobuf-c- compiler
```
* Otherwise (Ubuntu version >= 15.10):
* Install [libprotobuf-c](http://packages.ubuntu.com/trusty/amd64/libprotobuf-c0/download),
[libprotobuf-c-dev](http://packages.ubuntu.com/trusty/amd64/libprotobuf-c0-dev/download)
from Ubuntu 14.04LTS:
```
sudo dpkg -i libprotobuf-c0_0.15-1build1_amd64.deb
sudo dpkg -i libprotobuf-c0-dev_0.15-1build1_amd64.deb
```
* Install [libprotobuf8](http://packages.ubuntu.com/trusty/amd64/libprotobuf8/download)
from Ubuntu 14.04LTS:
sudo dpkg -i libprotobuf8_2.5.0-9ubuntu1_amd64.deb
* Install [libprotoc8](http://packages.ubuntu.com/trusty/amd64/libprotoc8/download)
from Ubuntu 14.04LTS:
sudo dpkg -i libprotoc8_2.5.0-9ubuntu1_amd64.deb
* Install [protobuf-c-compiler](http://packages.ubuntu.com/trusty/amd64/protobuf-c-compiler/download)
from ubuntu 14.04LTS:
sudo dpkg -i protobuf-c-compiler_0.15-1build1_amd64.deb
## Get WARP17
Get the `warp17-<ver>.tgz` archive or clone the desired release.
## Compile WARP17
```
tar xfz warp17-<ver>.tgz
cd warp17
make
```
## Configure Python virtualenv
```
sudo apt-get install python-pip
sudo pip install virtualenv
virtualenv warp17-venv
source warp17-venv/bin/activate
pip install -r python/requirements.txt
```
Once installed, whenever python tests need to run the virtual environment must
be activated:
```
source warp17-venv/bin/activate
```
To exit the virtual environment and return to the default python interpretor
and libraries:
```
deactivate
```
## Configure DPDK ports
Use the `$RTE_SDK/tools/dpdk-setup.sh` script (as described in the
[DPDK Guide](http://dpdk.org/doc/guides/linux_gsg/quick_start.html)). Select
which ports to be controlled by the IGB UIO module: option `[22] Bind
Ethernet/Crypto device to IGB UIO module`.
# How to run
From the top directory of WARP17:
```
./build/warp17 <dpdk-command-line-args> -- <warp17-command-line-args>
```
## Running as non-root
After compiling WARP17 change the owner of the binary to `root` (in order to
allow access to `/proc/self/pagemap`:):
```
sudo chown root build/warp17
```
Set the `suid` bit on the binary in order to allow the user to keep
permissions:
```
sudo chmod u+s build/warp17
```
## Command-line arguments
### DPDK command-line arguments
* `-c <hex_mask>`: bitmask specifying which physical cores the application
will use. Each bit corresponds to a physical core (0-`<max_cores>`).
* `-n <chan_no>` : number of memory channels to be used.
* `-m <mem_in_MB>`: total memory available to the application (in MB).
Please check section 3.1 of the
[DPDK App Guide](http://dpdk.org/doc/guides/testpmd_app_ug/run_app.html) for
more info about DPDK command-line arguments.
__NOTE: For now WARP17 supports at most 64 cores.__
### WARP17 command-line arguments
* `--qmap <port>.<hex_mask>`: bitmask specifying which physical cores will
control the physical port <eth_port>.
* `--qmap-default max-c`: maximize the number of independent cores handling
each physical port.
* `--qmap-default max-q`: maximize the number of transmit queues per physical
port.
* `--tcb-pool-sz`: configure the size of the TCP control block pool (one TCB is
used per TCP connection endpoint). The size of the pool will be given by the
argument of this option multiplied by 1024. By default 10M TCBs are
allocated.
* `--ucb-pool-sz`: configure the size of the UDP control block pool (one UCB is
used per UDP connection endpoint). The size of the pool will be given by the
argument of this option multiplied by 1024. By default 10M UCBs are
allocated.
* `--mbuf-pool-sz`: configure the size of the packet pool. The size of the
pool will be given by the argument of this option multiplied by 1024. By
default 768K packets are allocated.
* `--mbuf-hdr-pool-sz`: configure the size of the packet headers pool. The
size of the pool will be given by the argument of this option multiplied by
1024. By default 512K packet headers are allocated.
* `--ring-if-pairs`: configure the number of _in-memory-ring-based_ interfaces.
__NOTE: please check section
[Using In-Memory-Ring-Based Interfaces](#using-in-memory-ring-based-interfaces)
for more information.__
* `--kni-ifs`: configure the number of _kni_ interfaces.
__NOTE: please check section
[Using Kernel Network Interface (KNI) Interfaces](#using-kernel-network-interface-kni-interfaces) for more information.__
* `--cmd-file=<file>`: CLI command file to be executed when the application
starts
__NOTE: Options qmap, qmap-default max-c/max-q, cannot be combined. Only one can
be passed at a given time.__
__NOTE: Users are encouraged to use the "qmap-default max-q" option whenever
ethernet ports are on the same socket as the PKT cores as this usually gives
the best performance!__
__NOTE: The lowest two cores will be dedicated to CLI and management processing,
and can not be assigned to a physical port for packet processing using the
`--qmap` option!__
### Example (on a x86 server with 32G RAM for WARP17 and 4 memory channels):
* Determine the number of physical cores:
```
$ lscpu | grep "CPU(s)"
CPU(s): 12
```
Decide how many cores WARP17 should use. In this example we consider WARP17
uses 8 cores:
- cores 6, 7, 8, 9, 10, 11 for packet processing
- cores 0, 1 for CLI and management
Based on that we determine the bitmask corresponding to the ids of the cores
we would like to use. The bit index in the bit mask corresponds to the core
id:
```
Bitmask: 0 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 => 0xFC3
Bit idx: 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
```
This corresponds to the `-c` command line argument.
* Determine the number of memory channels:
```
$ dmidecode | grep Channel
Bank Locator: P0_Node0_Channel0_Dimm0
Bank Locator: P0_Node0_Channel1_Dimm0
Bank Locator: P0_Node0_Channel2_Dimm0
Bank Locator: P0_Node0_Channel3_Dimm0 <<<< the system has 4 channels (0-3)
```
The `-n` command line argument should be usually set to the max number of
channels available in the system.
WARP17 should be using 32G of memory in this example so the `-m` command
line argument should be set to 32768.
In order for WARP17 to use the default core to port mapping while
maximizing the number of transmit queues the `--qmap-default` command line
argument should be set to `max-q`.
* _Optional_: the startup commands file can be specified through the `--cmd-file`
command line argument.
For our example this translates into the following command:
```
./build/warp17 -c FC3 -n 4 -m 32768 -- --qmap-default max-q --tcb-pool-sz 32768 --cmd-file cfg.txt
```
which will start WARP17 with:
* 8 cores to be used by the application (`-c FC3`):
- 2 cores will be used by CLI and MGMT
- 6 cores for processing packets
* 4 mem channels (`-n 4`)
* 32G of available memory (`-m 32768`)
* all 6 PKT cores will process all physical ports (`--qmap-default max-q`)
* allocates 32 million TCBs (`--tcb-pool-sz 32768`): for the configs in the
examples sections we need 20M TCBs, i.e., 10M clients and 10M servers.
* will execute the CLI commands in file cfg.txt after starting WARP17
### Using In-Memory-Ring-Based Interfaces
WARP17 can also be run when no physical interface is available. This is
especially useful when developing new features as it removes the requirement
of a specific hardware configuration. It also allows users to quickly try out
WARP17 on their own laptop/VM.
_In-Memory-Ring-Based Interfaces_ (let's just call them _ring interfaces_)
are always created in pairs. The two interfaces in a pair act as if they
would be physical interfaces connected back to back.
By default the support for ring interfaces is disabled. However the user can
easily enable it by compiling WARP17 with the following command:
```
make all-ring-if
```
Using the `--ring-if-pairs <number>` command line argument the user can
specify the number of ring interface pairs that WARP17 will create. Updating
the previous command line example we end up with:
```
./build/warp17 -c FC3 -n 4 -m 32768 -- --qmap-default max-q --tcb-pool-sz 32768 --ring-if-pairs 1 --cmd-file cfg.txt
```
This will start WARP17 and add a pair of ring interfaces connected back to
back.
The user can also use custom queue mappings for ring interfaces. The ring
interface pairs are always created after physical interfaces. This means that
their IDs will be allocated in order after physical IDs. For example:
```
./build/warp17 -c FC3 -n 4 -m 32768 -w 0000:82:00.0 -- --ring-if-pairs 1
```
This will start WARP17 with three interfaces (one physical and two ring
interfaces). The physical interface (`0000:82:00.0`) will have ID 0 while
the two ring interfaces will have IDs 1 and 2.
__NOTE: There's a restriction in place when using ring interfaces: the user
must make sure that the same number of TX/RX queues is created through qmaps
for both ring interfaces in a pair. Otherwise the command line will be
rejected.__
### Using Kernel Network Interface (KNI) Interfaces
WARP17 can also be run with a virtual interface into the Linux kernel. This
is especially useful when developing a new protocol and you want to test it
agains a known working server or client. See the HTTP example below.
By default the support for KNI interfaces is disabled. However the user can
easily enable it by compiling WARP17 with the following command:
```
make all-kni-if
```
Using the `--kni-ifs <number>` command line argument the user can specify
the number of KNI interfaces that WARP17 will create. Updating
the previous command line example we end up with:
```
./build/warp17 -c FC3 -n 4 -m 32768 -- --qmap-default max-q --tcb-pool-sz 32768 --kni-ifs 2 --cmd-file cfg.txt
```
The user can also use custom queue mappings for KNI interfaces, however they
can only be assigned to a single core. The KNI interfaces are always created
after the physical and ring interfaces. This means that
their IDs will be allocated in order after physical IDs. For example:
```
./build/warp17 -c FC3 -n 4 -m 32768 -w 0000:82:00.0 -- --ring-if-pairs 1 --kni-ifs 2
```
This will start WARP17 with five interfaces (one physical, two ring
interfaces and two KNI interfaces). The physical interface (`0000:82:00.0`)
will have ID 0, the two ring interfaces will have IDs 1 and 2, and the two
KNI interfaces will have IDs 3 and 4.
For the example above the two Kernel interfaces will be named `warp3` and `warp4`,
so the naming convention is `warp<eth_port>`
The following example will show how to use the KNI interface to get some HTTP
data from the built in HTTP server trough Linux. We assume no physical ports
are configured, if you have them make sure you increase all the referenced
ports:
* Load the `rte_kni` DPDK module, either as shown below or by running the
`$RTE_SDK/tools/dpdk-setup.sh` script and selecting option
`[18] Insert KNI module`:
```
sudo insmod $RTE_SDK/x86_64-native-linuxapp-gcc/kmod/rte_kni.ko
```
* Start WARP17:
```
./build/warp17 -c FC3 -n 4 -m 32768 -- --kni-ifs 1
```
* Configure the Linux kernel interface:
```
sudo ip link set warp0 up
sudo ip addr add 172.16.1.1/24 dev warp0
```
* Configure WARP17 as follows:
```
add tests l3_intf port 0 ip 172.16.1.2 mask 255.255.255.0
add tests l3_gw port 0 gw 172.16.1.1
add tests server tcp port 0 test-case-id 0 src 172.16.1.2 172.16.1.2 sport 80 80
set tests server http port 0 test-case-id 0 200-OK resp-size 2000
start tests port 0
```
* Now do a HTTP request using wget:
```
[WARP17:~]$ wget 172.16.1.2
--2016-10-25 11:40:43-- http://172.16.1.2/
Connecting to 172.16.1.2:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 2000 (2.0K)
Saving to: ‘index.html’
index.html 100%[================================================================>] 1.95K --.-KB/s in 0s
2016-10-25 11:40:43 (478 MB/s) - ‘index.html’ saved [2000/2000]
```
# CLI
## Test configuration commands
__NOTE: Only IPv4 is supported for now!__
* __Configure Ethernet Port MTU__:
```
set tests mtu port <eth_port> <mtu-value>
```
* __Add L3 interfaces__: configure an IP interface with the specified `ip`
address and `mask`. Currently only 10 IP interfaces are supported per port.
```
add tests l3_intf port <eth_port> ip <ip> mask <mask>
```
* __Add L3 default gateway__: configure 'gw' as the default gateway for
`eth_port`.
```
add tests l3_gw port <eth_port> gw <gw_ip>
```
* __Configure server test cases__: configure a server test case with ID
`test-case-id` on `eth_port`. The underlying L4 traffic can be TCP or UDP.
`ip_range` and `port_range` define the `<ip:port>` sockets on which the
servers will be listening. By default, the application (L5-L7) traffic will
be RAW traffic.
```
add tests server tcp|udp port <eth_port> test-case-id <tcid>
src <ip_range> sport <port_range>
```
* __Configure client test cases (per port)__: configure a client test case with
ID `test-case-id` on `eth_port`. The underlying L4 traffic can be TCP or UDP.
The source IP/l4-port and destination IP/l4-port ranges define the
`<src_ip, src_port:dst_ip, dst_port>` TCP/UDP connections that will be
established. By default, the application (L5-L7) traffic will be RAW traffic.
```
add tests client tcp|udp port <eth_port> test-case-id <tcid>
src <ip-range> sport <l4-ports>
dest <ip-range> dport <l4-ports>
```
* __Configure test profile timeouts__: each test has a specific timeout profile
which is defined by the initial delay after which client connections are
initiated, how long a connection should live and how long a connection should
stay down (after closing) before the client reconnects.
- __initial_delay__: amount of time (in seconds) the clients defined in the
test should wait before initiating a connection. `infinite` is allowed but
doesn't really make sense for the initial delay as it would cause the
clients to never initiate a connection.
```
set tests timeouts port <eth_port> test-case-id <tcid> init <timeout>|infinite
```
- __conn_uptime__: amount of time (in seconds) the clients defined in the
test should keep the connection up (and send application traffic) before
initiating a close. `infinite` allows the clients to stay up forever.
```
set tests timeouts port <eth_port> test-case-id <tcid> uptime <timeout>|infinite
```
- __conn_downtime__: amount of time (in seconds) the clients defined in the
test should keep the connection down after a closebefore initiating a
reconnect. `infinite` allows the clients to stay down forever.
```
set tests timeouts port <eth_port> test-case-id <tcid> downtime <timeout>|infinite
```
* __Configure test profile rates__: each test has a specific rate limiting
profile which is defined by the connection open, close and send rate.
- __setup rate__: number of connections that the clients in the test are
allowed to initiate __per second__. `infinite` removes any rate limiting
for initiating sessions (i.e., WARP17 will try to do it as fast as possible).
```
set tests rate port <eth_port> test-case-id <tcid> open <rate>|infinite
```
- __close rate__: number of connections that the clients in the test are
allowed to close __per second__. `infinite` removes any rate limiting for
closing sessions (i.e., WARP17 will try to do it as fast as possible).
```
set tests rate port <eth_port> test-case-id <tcid> close <rate>|infinite
```
- __data rate__: number of connections that the clients in the test are
allowed to send traffic on __per second__. `infinite` removes any rate
limiting for sending traffic (i.e., WARP17 will try to do it as fast as
possible).
```
set tests rate port <eth_port> test-case-id <tcid> send <rate>|infinite
```
* __Configure test criteria__: different criteria can be configured for each
test case. The criteria will be used for declaring a test as _PASSED_ or
_FAILED_.
- __run-time__: declare the test case with ID `tcid` as _PASSED_ after
`value` seconds.
```
set tests criteria port <eth_port> test-case-id <tcid> run-time <count>
```
- __servers-up__: declare the test case with ID `tcid` as _PASSED_ when
`count` servers are UP (listening for incoming connections).
```
set tests criteria port <eth_port> test-case-id <tcid> servers-up <count>
```
- __clients-up__: declare the test case with ID `tcid` as _PASSED_ when
`count` clients are UP (ready to initiate a connection).
```
set tests criteria port <eth_port> test-case-id <tcid> clients-up <count>
```
- __clients-established__: declare the test case with ID `tcid` as _PASSED_
when `count` clients have established a connection.
```
set tests criteria port <eth_port> test-case-id <tcid> clients-estab <count>
```
- __data-MB__: declare the test case with ID `tcid` as _PASSED_ when
`count` MB of data have been sent. __NOTE: NOT supported yet!__
```
set tests criteria port <eth_port> test-case-id <tcid> data-MB <count>
```
* __Configure tests as _asynchronous_:__ if multiple test cases are defined on
the same `eth_port`, by default, they will be executed in sequence (when a
test case ends the next one is started). To change the behaviour the user can
mark a test case as _async_ forcing the test engine to advance to the next
configured test case without waiting for the current one to finish.
```
set tests async port <eth_port> test-case-id <tcid>
```
* __Delete test cases__: delete a configured test case with ID `tcid` from port
`eth_port`.
__NOTE: if a test case is already running it has to be stopped
before it can be deleted!__
```
del tests port <eth_port> test-case-id <tcid>
```
* __Start tests__: start all the test cases configured on `eth_port`. Test
cases will be started in sequence (after the previous test case ended) except
for the ones that are marked as _async_.
```
start tests port <eth_port>
```
* __Stop tests__: stop all the test cases currently running on `eth_port`.
```
stop tests port <eth_port>
```
* __Customize TCP stack settings__: customize the behavior of the TCP stack
running on test case with ID `tcid` on port `eth_port`. The following
settings are customizable:
- `win-size`: the size of the TCP send window.
```
set tests tcp-options port <eth_port> test-case-id <tcid> win-size <size>
```
- `syn-retry`: number of times to retry sending `SYN` packets before
aborting the connection.
```
set tests tcp-options port <eth_port> test-case-id <tcid> syn-retry <cnt>
```
- `syn-ack-retry`: number of times to retry sending `SYN + ACK` packets
before aborting the connection.
```
set tests tcp-options port <eth_port> test-case-id <tcid> syn-ack-retry <cnt>
```
- `data-retry`: number of times to retry sending data packets before
aborting the connection.
```
set tests tcp-options port <eth_port> test-case-id <tcid> data-retry <cnt>
```
- `retry`: number of times to retry sending other control packets before
aborting the connection.
```
set tests tcp-options port <eth_port> test-case-id <tcid> retry <cnt>
```
- `rto`: retransmission timeout (in ms) to be used before retransmitting
a packet.
```
set tests tcp-options port <eth_port> test-case-id <tcid> rto <rto_ms>
```
- `fin-to`: `FIN` timeout (in ms) in order to avoid staying in state
`FIN-WAIT-II` forever.
```
set tests tcp-options port <eth_port> test-case-id <tcid> fin-to <fin_to_ms>
```
- `twait-to`: `TIME-WAIT` timeout (in ms) to wait before cleaning up the
connection.
```
set tests tcp-options port <eth_port> test-case-id <tcid> twait-to <twait_to_ms>
```
- `orphan-to`: `ORPHAN` timeout (in ms) in order to avoid staying in state
`FIN-WAIT-I` forever.
```
set tests tcp-options port <eth_port> test-case-id <tcid> orphan-to <orphan_to_us>
```
- `twait_skip`: boolean to decide if state `TIME-WAIT` should be skipped or
not.
```
set tests tcp-options port <eth_port> test-case-id <tcid> twait-skip <true|false>
```
## Application configuration and statistics commands
Currently only _RAW TCP_ (L5-L7 payload is random) and a sub-set of _HTTP 1.1_
(_GET_/_HEAD_ and _200 OK_/_404 NOT FOUND_) traffic is supported.
Before configuring the application behavior the user must have previously
defined the client or server test cases.
* __HTTP 1.1 application traffic__: the _HTTP 1.1_ application allows the user
to simulate different types of HTTP requests (for clients) and responses
(for servers):
- __HTTP 1.1 client configuration__: _GET_/_HEAD_ requests are supported. A
`req-size` must also be specified (0 is also valid) in order to define
the size of the body of the HTTP request.
```
set tests client http port <eth_port> test-case-id <tcid> GET|HEAD <host-name> <obj-name> req-size <req-size>
```
- __HTTP 1.1 server configuration__: _200 OK_/_404 NOT FOUND_ responses are
supported. A `resp-size` must also be specified (0 is also valid) in order
to define the size of the body of the HTTP response.
```
set tests server http port <eth_port> test-case-id <tcid> 200-OK|404-NOT-FOUND resp-size <resp-size>
```
- __HTTP 1.1 global stats__: display (detailed) statistics for the ethernet ports
currently in use (e.g., allocation errors/parse errors). If detailed
stats are requested then the information is displayed per port + lcore.
```
show http statistics [details]
```
* __RAW application traffic__: the RAW application emulates _request_ and
_response_ traffic. The client sends a request packet of a fixed configured
size and waits for a fixed size response packet from the server. The user
should configure the _request_/_response_ size for both client and server
test cases.
__NOTE: the user has to make sure that the _request_/_response_
sizes match between clients and servers!__
```
set tests client raw port <eth_port> test-case-id <tcid>data-req-plen <len> data-resp- plen <len>
```
```
set tests server raw port <eth_port> test-case-id <tcid>data-req-plen <len> data-resp- plen <len>
```
## Displaying test information
* __Current test configuration__: the current test configuration (including per
port L3 interfaces and default gateway) will be displayed for a given ethernet
port.
```
show tests config port <eth_port>
```
* __Current test state__: the current test state (including per test case
quick statistics) will be displayed for a given ethernet port.
```
show tests state port <eth_port>
```
* __Detailed test statistics__: the detailed test staistics will be displayed
for a given ethernet port and test-case.
```
show tests stats port <eth_port> test-case-id <tcid>
```
## Statistics and operational information
Different types of statistics can be dumped from the CLI. Currently all these
stats are not directly linked to any test case ID but they are aggregate per
ethernet port.
* __Port information and statistics__
- __Port information__: display general port information.
```
show port info
```
- __Port-core mappings__: display the mappings between ethernet port RX/TX
queues and lcore IDs. The socket IDs of the ports and lcores are also
displayed.
__NOTE: Having lcores handling ports that have their PCI bus on a different
socket than the lcore will affect performance!__
```
show port map
```
- __Port link information__: display the current link status of the ethernet
ports.
```
show port link
```
- __Port statistics__: display (detailed) statistics for the ethernet ports
currently in use (e.g., _received/transmitted packets/bytes_). If detailed
stats are requested then the information is displayed per port + lcore.
```
show port statistics [details]
```
* __Ethernet statistics__: display (detailed) statistics regarding the Ethernet
layer processing (e.g., _ethernet type, errors_). If detailed stats are
requested then the information is displayed per port + lcore.
```
show ethernet statistics [details]
```
* __ARP information and statistics__
- __ARP tables__: display the ARP tables for each ethernet port currently in
use. For now L3 interfaces are defined per ethernet port and not per test
case ID. This enforces a unique ARP table per port.
__NOTE: The current ARP implementation is limited in the sense that whenever tests
are started on a port, gratuituous ARPs are sent for all the L3 interfaces that were
defined on that port and an ARP request is sent for the default gateway.
All ARP requests and replies are properly processed but there is no timeout
mechanism in place for aging entries!__
```
show arp entries
```
- __ARP statistics__: display (detailed) statistics regarding ARP
processing (e.g., _request/response count, errors_). If detailed stats
are requested then the information is displayed per port + lcore.
```
show arp statistics [details]
```
* __Route statistics__: display (detailed) statistics for the routing module
(e.g., _interface/gateway creation/deletion count, errors_). The current
routing implementation is minimal and only handles L3 interface
creation/deletion and default gateways.
```
show route statistics [details]
```
* __IPv4 statistics__: display (detailed) statistics regarding IPv4 processing
(e.g., _received packet/bytes counts, per L4 protocol counters, errors_). If
detailed stats are requested then the information is displayed per
port + lcore.
```
show ipv4 statistics [details]
```
* __TCP statistics__: display (detailed) statistics regarding TCP processing
(e.g., _received packets/bytes counts, sent control/data packets/bytes counts,
allocation counters, errors_). If detailed stats are requested then the
information is displayed per port + lcore.
```
show tcp statistics [details]
```
* __TCP state machine statistics__: display (detailed) statistics regarding
TCP state machine processing (e.g., _per TCP state counters, retransmission
counters, missing sequence counters_). If detailed stats are requested then
the information is displayed per port + lcore.
```
show tsm statistics [details]
```
* __UDP statistics__: display (detailed) statistics regarding UDP processing
(e.g., _received packets/bytes counts, sent packets/bytes counts, allocation
counters, errors_). If detailed stats are requested then the information
is displayed per port + lcore.
```
show udp statistics [details]
```
* __Timer statistics__: there are currently three types of supported timers:
_fast_ retransmission timers, slow_ timers (e.g., TIME-WAIT) and _test_
timers. _Test_ timers are used by the test engine and the others are used
by the TCP/UDP stack implementations. The command displays (detailed)
statistics regarding these types of timers. If detailed stats are requested
then the information is displayed per port + lcore.
```
show timer statistics [details]
```
## Infrastructure statistics
* __Message queues statistics__: all communication between lcores (PKT or CLI)
is done by means of message passing. Each lcore has two message queues (a
local and a global queue storing messages based on the originator of the
message). The command displays (detailed) statistics regarding the message
queues (e.g., _messages sent/received/allocated, errors_). If detailed stats
are requested then the information is displayed per port + lcore.
```
show msg statistics [details]
```
* __Memory statistics__: most of the memory used during the tests is allocated
from different mempools (mbufs, TCP/UDP control blocks). The command displays
(detailed) statistics regarding the usage of the memory pools. If detailed
stats are requested then the information is displayed per port + lcore.
```
show memory statistics [details]
```
# UI
`show tests ui` displays an UI which allows monitoring the test execution.
The UI is split in 4 main areas:
* test status area
* detailed test statistics area: Open/Closed/Send statistics and Application
statistics are displayed for the currently selected test case.
* detailed test configuration area: display the complete configuration of the
currently selected test case. The user can navigate between test cases
by pressing `n` for moving to the next test case and `b` for moving to the
previous test case. Switching between the configuration view and statistics
view can be done using the `c` and `s` keys.
* statistics area: for each of the ethernet ports various statistics will be
displayed for all levels of the TCP/IP stack.
# Example run
Some example configuration files can be found in the `examples/` directory. The
configuration files can either be passed as a command-line argument, `--cmd-file=<file>`, when running
WARP17 or executed directly in the CLI.
* __examples/test\_1\_raw\_tcp\_connection.cfg__: single TCP client-server
connection on a back to back setup using _RAW_ application data (requests of
size 100 and responses of size 200 bytes). The client connects immediately
when the test starts and sends requests continuously (and waits for responses)
until the `uptime` expires (5 seconds), closes the connection and reconnects
after the `downtime` expires (15 seconds).
* __examples/test\_2\_raw\_udp\_connection.cfg__: single UDP client-server
connection on a back to back setup using _RAW_ application data (requests of
size 100 and responses of size 200 bytes). The client connects with a delay
of 10 seconds (`init`) then sends requests continuously (and waits for
responses) until the `uptime` expires (5 seconds), closes the connection and
reconnects `downtime` expires (15 seconds).
* __examples/test\_3\_http\_multiple.cfg__: two client test cases each with a
single HTTP client. The first client test case sends _GET_ requests while
the second one sends _HEAD_ requests. The first test case is marked as
_async_ which will determine WARP17 to start both of them in parallel. The
HTTP server test case is configured to reply with _200 OK_.
* __examples/test\_4\_http\_10M_sessions.cfg__: single test case per port
configuring 10M HTTP sessions. The test case on port 0 will establish
connections from `10.0.0.1:[10000, 60000)` to `10.0.0.253:[6000, 6200)`.
On each of those connections HTTP _GET_ requests will be sent continuously
until the `uptime` of 30 seconds expires. Then the connections are closed.
After another 15 seconds of `downtime` the clients reconnect and start over.
* __examples/test\_5\_raw\_10M\_sessions.cfg__: single test case per port
configuring 10M RAW sessions. The test case on port 0 will establish
connections from `10.0.0.1:[10000, 60000)` to `10.0.0.253:[6000, 6200)`.
On each of those connections RAW requests of size 1K will be sent
continuously. `uptime` is configured as `infinite` so the clients will
stay UP forever. If the connection goes down (e.g., TCP session fails)
then the client will reconnect after a `downtime` of 10 seconds.
The RAW servers reply with responses of size 4K. The clients are also rate
limited to 1M sessions/s `open` and 900K sess/s `send` rate (clients will)
* __examples/test\_6\_http\_40M\_sessions.cfg__: single test case per port
configuring __40M HTTP sessions__. The test case on port 0 will establish
connections from `[10.0.0.1, 10.0.0.4]:[10000, 60000)` to
`10.0.0.253:[6000, 6200)`. On each of those connections HTTP _GET_
requests will be sent continuously.
* __examples/test\_7\_routing\_raw\_8M\_sesssions.cfg__: example config to
be used when having (multiple) routers in between the client and server
ports.
# Python scripting API
WARP17 offers an RPC-based API which allows users to write scripts and automate
the tests that WARP17 would run. WARP17 listens to incoming RPC connections on TCP
port `42424`.
The RPC object definitions can be found in the `api/*.proto` files. The main
RPC interface is defined in `api/warp17-service.proto`. All `*.proto` files are
compiled into Python classes when building WARP17. The generated code is saved
in the `api/generated/py` directory (one `.py` file for each `.proto`
definition file).
A short example about how to use the Python API can be found in
`examples/python/test_1_http_4M.py`. The example sets up 4M _HTTP_ clients
and servers, polls for statistics and stops the tests after a while.
# Perl scripting API
WARP17 can also be scripted through Perl by using the `Inline::Python` module.
A short example about how to use Perl to script WARP17 can be found in
`examples/perl/test_1_http_4M.pl`. Requirements for running the Perl scripts:
```
sudo apt-get install python2.7-dev
sudo apt-get install cpanminus
sudo cpanm Inline::Python
```
# Contributing a new L7 Application implementation
WARP17 currently supports _RAW TCP_ and _HTTP 1.1_ application traffic. Even
though we are currently working on adding support for more application
implementations, external contributions are welcome.
As a future development WARP17 will offer a socket API in order to allow
applications to be easily plugged in. Until then any new application must
be directly added to the WARP17 code. As an example, a good starting point
is the _HTTP 1.1_ implementation itself.
In general, an application called `foo` should implement the following:
* `warp17-app-foo.proto` definition file in `api/`: should contain
the application configuration definitions (for clients and servers) and
preferably application specific statistics definitions.
- `warp17-app-foo.proto` should be included in both
`warp17-client.proto` and `warp17-server.proto` and the application
definitions should be added to the `TcpUdpClient` and `TcpUdpServer`
structures.:
```
message TcpUdpClient {
[...]
/* Add different App configs below as optionals. */
optional RawClient ac_raw = 8 [(warp17_union_anon) = true];
optional HttpClient ac_http = 9 [(warp17_union_anon) = true];
optional FooClient ac_foo = 10 [(warp17_union_anon) = true];
}
[...]
message TcpUdpServer {
[...]
/* Add different App configs below as optionals. */
optional RawServer as_raw = 2 [(warp17_union_anon) = true];
optional HttpServer as_http = 3 [(warp17_union_anon) = true];
optional FooServer as_foo = 4 [(warp17_union_anon) = true];
}
```
- `warp17-app-foo.proto` should also be included in
`warp17-test-case.proto` and the application specific statistics should
be added to the `TestCaseAppStats` definition:
```
message TestCaseAppStats {
[...]
optional RawStats tcas_raw = 1 [(warp17_union_anon) = true];
optional HttpStats tcas_http = 2 [(warp17_union_anon) = true];
optional FooStats tcas_foo = 3 [(warp17_union_anon) = true];
}
```
- a new entry for the application type should be added to the `AppProto`
enum in `warp17-common.proto`:
```
enum AppProto {
RAW = 0;
HTTP = 1;
FOO = 2;
APP_PROTO_MAX = 3;
}
```
- the new protocol buffer file (`warp17-app-foo.proto`) should also
be added to `api/Makefile.api`:
```
SRCS-y += tpg_test_app.c
SRCS-y += tpg_test_http_1_1_app.c
SRCS-y += tpg_test_raw_app.c
SRCS-y += tpg_test_foo_app.c
```
- include `warp17-app-foo.proto` in `tcp_generator.h`:
```
#include "warp17-app-raw.proto.xlate.h"
#include "warp17-app-http.proto.xlate.h"
#include "warp17-app-foo.proto.xlate.h"
```
* RPC WARP17 to protobuf translation code:
- a new case entry in `tpg_xlate_tpg_union_AppClient` where the client
translation function should be called:
```
case APP_PROTO__HTTP:
out->ac_http = rte_zmalloc("TPG_RPC_GEN", sizeof(*out->ac_http), 0);
if (!out->ac_http)
return -ENOMEM;
tpg_xlate_tpg_HttpClient(&in->ac_http, out->ac_http);
break;
case APP_PROTO__FOO:
out->ac_foo = rte_zmalloc("TPG_RPC_GEN", sizeof(*out->ac_foo), 0);
if (!out->ac_foo)
return -ENOMEM;
tpg_xlate_tpg_FooClient(&in->ac_foo, out->ac_foo);
break;
```
- a new case entry in `tpg_xlate_tpg_union_AppServer` where the server
translation function should be called:
```
case APP_PROTO__HTTP:
out->as_http = rte_zmalloc("TPG_RPC_GEN", sizeof(*out->as_http), 0);
if (!out->as_http)
return -ENOMEM;
tpg_xlate_tpg_HttpServer(&in->as_http, out->as_http);
break;
case APP_PROTO__FOO:
out->as_foo = rte_zmalloc("TPG_RPC_GEN", sizeof(*out->as_foo), 0);
if (!out->as_foo)
return -ENOMEM;
tpg_xlate_tpg_FooServer(&in->as_foo, out->as_foo);
break;
```
- a new case entry in `tpg_xlate_tpg_TestStatusResult` when translating
application statistics:
```
case APP_PROTO__HTTP:
out->tsr_app_stats->tcas_http = rte_zmalloc("TPG_RPC_GEN",
sizeof(*out->tsr_app_stats->tcas_http),
0);
if (!out->tsr_app_stats->tcas_http)
return -ENOMEM;
err = tpg_xlate_tpg_HttpStats(&in->tsr_app_stats.tcas_http,
out->tsr_app_stats->tcas_http);
if (err)
return err;
break;
case APP_PROTO__FOO:
out->tsr_app_stats->tcas_foo = rte_zmalloc("TPG_RPC_GEN",
sizeof(*out->tsr_app_stats->tcas_foo),
0);
if (!out->tsr_app_stats->tcas_foo)
return -ENOMEM;
err = tpg_xlate_tpg_FooStats(&in->tsr_app_stats.tcas_foo,
out->tsr_app_stats->tcas_foo);
if (err)
return err;
break;
```
* `appl/tpg_test_app.h` interface implementation:
- application `foo` should be added to the `app_data_t` definition in
`inc/appl/tpg_test_app.h`. Type `foo_app_t` should be defined in the
application headers and should represent a state storage for the
`foo` application. The state is part of the L4 control block structures
(TCB/UCB).
```
typedef struct app_data_s {
tpg_app_proto_t ad_type;
union {
raw_app_t ad_raw;
http_app_t ad_http;
foo_app_t ad_foo;
generic_app_t ad_generic;
};
} app_data_t;
```
- `foo` must also provide callback functions corresponding to the callback
types defined in `inc/appl/tpg_test_app.h`. The callbacks should be
added to the callback arrays in `src/appl/tpg_test_app.c`. These
functions will be called by the test engine whenever application
intervention is required:
- `app_default_cfg_cb_t`: should initialize the `foo` application
config to default values
- `app_validate_cfg_cb_t`: should validate the config corresponding to
the `foo` application
- `app_print_cfg_cb_t`: should display the part of the configuration
corresponding to the `foo` application by using the supplied printer
- `app_delete_cfg_cb_t`: will be called whenever a test case is deleted
so `foo` can cleanup anything it initialized for that test case.
- `app_init_cb_t`: will be called whenever a session is initialized
and should initialize the application state.
- `app_tc_start_stop_cb_t`: `foo` should define two callbacks (for
test case start and stop). The application should initialize and
cleanup any data that is required during the test case (e.g.,
predefined static data headers)
- `app_conn_up_cb_t`: will be called whenever a session has been
established
- `app_conn_down_cb_t`: will be called whenever a session closed (
either because the underlying connection went down or because the
application itself decided to close the connection)
- `app_deliver_cb_t`: will be called whenever there was data received
for the application to process. The received data is passed as an
mbuf chain. The callback should return the total number of bytes
that were consumed. For example, in the case of TCP these bytes will
be ACK-ed and removed from the receive window.
- `app_send_cb_t`: will be called whenever the test engine can send
data on the current connection. The application can decide at any
time that it would like to start sending or stop sending data by
notifying the test engine through the
`TEST_NOTIF_APP_CLIENT/SERVER_SEND_STOP/START` notifications.
The `app_send_cb_t` callback should return an `mbuf` chain pointing
to the data it would like to send.
In general, freeing the `mbuf` upon sending is the job of the
TCP/IP stack so the application must make sure that it doesn't
continue using the mbuf after passing it to the test engine.
__NOTE: However, for some applications, in order to avoid building
packets every time, the implementation might prefer to reuse data
templates (e.g., HTTP requests can be easily prebuilt when the test
case is started). In such a situation the application can mark the
mbufs as _STATIC_ through the `DATA_SET_STATIC` call which will
inform the test engine that it shouldn't free the data itself. The
application must ensure in such a case that the data itself is never
freed during the execution of the test case!__
- `app_data_sent_cb_t`: will be called to notify the application that
(part of) the data was sent. It might happen that not all the data
could be sent in one shot
- `app_stats_add_cb_t`: should aggregate application specific
statistics
- `app_stats_print_cb_t`: should print application specific statistics
using the supplied printer
- the `foo` application can request the test engine to perform
operations by sending the following notifications:
- `TEST_NOTIF_APP_CLIENT/SERVER_SEND_START`: notifies the test engine
that the application would like to send data (when possible) on the
current connection
- `TEST_NOTIF_APP_CLIENT/SERVER_SEND_STOP`: notifies the test engine
that the application has finished sending data (for now) on the
current connection
- `TEST_NOTIF_APP_CLIENT/SERVER_CLOSE`: notifies the test engine that
the application would like to close the connection
* CLI: the `foo` application can define it's own CLI commands using the DPDK
cmdline infrastructure. These can be added to a local `cli_ctx` which can
be registered with the main CLI through a call to `cli_add_main_ctx`.
* module initialization: the `foo` application must implement two
module init functions:
- `foo_init`: should initialize global data to be used by the application
(e.g., CLI, statistics storage). `foo_init` should be called directly
from the `main` WARP17 function where all modules are initialized.
- `foo_lcore_init`: should initalize per core global data to be used by the
application (e.g., per core pointers to the statistics corresponding to
the current core). `foo_lcore_init` should be called from
`pkt_receive_loop` where all modules are initialized.
* example config: ideally, applications should also provide some
configuration examples which could go to the `examples/` directory.
* .dot file: applications will most likely be implemented as state machines.
A `.dot` file describing the state machine should be added to the
`dot/` directory
* tests: any new application shouldn't break any existing tests and __must__
have it's own tests:
- a configuration and functionality test file in `ut/test_foo.py` which
should try to extensively cover all the code introduced by the
application
- one or more scaling test entries (method) in `ut/test_perf.py`
(class TestPerf) which should define the desired performance/scalability
values.
* commit messages: please make sure that commit messages follow the
`.git-commit.template` provided in the repository. In order to enforce this
template locally you can execute the following command:
```
git config commit.template ./.git-commit.template
```
# Release notes
For a summary of the currently supported functionalities please check the
RELEASE_NOTES file.
# Roadmap for future releases
* Additional L7 application implementations (e.g., _FTP_, _TFTP_, _SIP_).
* _VLAN_ Support.
* Socket API.
* Fault injection at various levels in the L2-L7 stack.
# Contact
Feel free to also check out the [WARP17 google group](https://groups.google.com/forum/#!forum/warp17).
For a list of maintainers and contributors please check the MAINTAINERS and
CONTRIBUTORS files.
# License
WARP17 is released under BSD 3-Clause license.
The license file can be found [here](./LICENSE).
|
jlijian3/warp17
|
README.md
|
Markdown
|
bsd-3-clause
| 53,339
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Xunit;
using Biggy;
using Biggy.Postgres;
namespace Tests.Postgres
{
[Trait("Database Column Mapping", "")]
public class PGList_Column_Mapping
{
// WHAT DOES THIS DO? Runs basic CRUD against a table with non-platform-conformant column names (underscores, spaces, case mismatch, etc)
public string _connectionStringName = "chinookPG";
PGList<Client> _clients;
public PGList_Column_Mapping() {
// Set up a table with mangled column names:
this.SetUpWTFTable();
_clients = new PGList<Client>(_connectionStringName, "wtf");
}
[Fact(DisplayName = "Test Table Exists")]
public void Test_Table_Exists() {
bool exists = this.TableExists("wtf");
Assert.True(exists);
}
[Fact(DisplayName = "Loads Empty List From Table")]
public void Loads_FromTable_With_Mangled_Column_Names() {
Assert.True(_clients.Count == 0);
}
[Fact(DisplayName = "Adds a New Record")]
public void Adds_New_Record() {
int initialCount = _clients.Count;
var newMonkey = new Client() {
LastName = "Jimbo",
FirstName = "Jones",
Email = "jatten@example.com"
};
_clients.Add(newMonkey);
int newID = newMonkey.ClientId;
// Reload from scratch to be sure:
_clients = new PGList<Client>(_connectionStringName, "wtf");
var found = _clients.FirstOrDefault(c => c.ClientId == newID);
Assert.True(found.ClientId == newID && _clients.Count > initialCount);
}
[Fact(DisplayName = "Updates a Record")]
public void Updates_Record() {
var newMonkey = new Client() {
LastName = "Jones",
FirstName = "Davey",
Email = "jatten@example.com"
};
_clients.Add(newMonkey);
int currentCount = _clients.Count;
int newID = newMonkey.ClientId;
// Reload from scratch to be sure:
_clients = new PGList<Client>(_connectionStringName, "wtf");
var found = _clients.FirstOrDefault(c => c.ClientId == newID);
found.FirstName = "Mick";
_clients.Update(found);
Assert.True(found.ClientId == newID && _clients.Count == currentCount);
}
int _qtyInserted = 100;
[Fact(DisplayName = "Bulk Inserts Records")]
public void Bulk_Inserts_Records() {
int initialCount = _clients.Count();
var rangeToAdd = new List<Client>();
for(int i = 0; i < _qtyInserted; i++) {
var newCustomer = new Client() {
FirstName = string.Format("John{0}", i.ToString()),
LastName = "Atten",
Email = "jatten@example.com"
};
rangeToAdd.Add(newCustomer);
}
int qtyAdded = _clients.AddRange(rangeToAdd);
_clients.Reload();
Assert.True(_clients.Count == initialCount + _qtyInserted);
}
[Fact(DisplayName = "Deletes a record")]
public void Deletes_Record() {
var newCustomer = new Client() {
FirstName = "John",
LastName = "Atten",
Email = "jatten@example.com"
};
_clients.Add(newCustomer);
int idToFind = newCustomer.ClientId;
_clients.Reload();
var found = _clients.FirstOrDefault(c => c.ClientId == idToFind);
// After insert, no new record should be added:
int initialCount = _clients.Count();
_clients.Remove(found);
_clients.Reload();
Assert.True(_clients.Count < initialCount);
}
[Fact(DisplayName = "Deletes a range of records by Criteria")]
public void Deletes_Range_Of_Records() {
var newClient = new Client() {
FirstName = "John",
LastName = "Atten",
Email = "jatten@example.com"
};
_clients.Add(newClient);
_clients.Reload();
int initialCount = _clients.Count;
var removeThese = _clients.Where(c => c.Email.Contains("jatten@"));
_clients.RemoveSet(removeThese);
Assert.True(_clients.Count < initialCount);
}
// HELPER METHODS:
void SetUpWTFTable() {
bool exists = this.TableExists("wtf");
if (exists) {
this.DropTable("wtf");
}
this.CreateWTFTable();
}
void DropTable(string tableName) {
string sql = string.Format("DROP TABLE {0}", tableName);
var Model = new PGTable<dynamic>(_connectionStringName);
Model.Execute(sql);
}
bool TableExists(string tableName) {
bool exists = false;
string select = ""
+ "SELECT * FROM information_schema.tables "
+ "WHERE table_schema = 'public' "
+ "AND table_name = '{0}'";
string sql = string.Format(select, tableName);
var Model = new PGTable<dynamic>(_connectionStringName);
var query = Model.Query<dynamic>(sql);
if (query.Count() > 0) {
exists = true;
}
return exists;
}
void CreateWTFTable() {
string sql = ""
+ "CREATE TABLE wtf "
+ "(\"CLient_Id\" serial NOT NULL, "
+ "\"Last Name\" Text NOT NULL, "
+ "\"first_name\" Text NOT NULL, "
+ "\"Email\" Text NOT NULL, "
+ "CONSTRAINT wtf_pkey PRIMARY KEY (\"CLient_Id\"))";
var Model = new PGTable<dynamic>(_connectionStringName);
Model.Execute(sql);
}
}
}
|
upta/biggy
|
Tests/Postgres/PGList_Column_Mapping.cs
|
C#
|
bsd-3-clause
| 5,314
|
<?php namespace Milkyway\SS\Assets\Extensions;
/**
* Milkyway Multimedia
* Controller.php
*
* @package milkyway-multimedia/ss-mwm-assets
* @author Mellisa Hankins <mell@milkywaymultimedia.com.au>
*/
use Milkyway\SS\Assets\Requirements;
use Extension;
class Controller extends Extension
{
/**
* Disable cache busted file extensions for some classes (usually @LeftAndMain)
*/
function onBeforeInit()
{
foreach (Requirements::$disable_cache_busted_file_extensions_for as $class) {
if (is_a($this->owner, $class)) {
Requirements::$use_cache_busted_file_extensions = false;
}
}
}
/**
* @inheritdoc
*/
public function onAfterInit()
{
$this->blockDefaults();
$this->additionalLeftAndMainRequirements();
}
/**
* Block files using the @Injector
*/
protected function blockDefaults() {
foreach (Requirements::$disable_blocked_files_for as $class) {
if (is_a($this->owner, $class)) {
return;
}
}
singleton('require')->blockDefault();
}
/**
* Block some items from ajax
*/
protected function additionalLeftAndMainRequirements() {
if($this->owner instanceof \KickAssets) {
return;
}
singleton('require')->blockAjax('htmlEditorConfig');
singleton('require')->blockAjax('googlesuggestfield-script');
}
}
|
milkyway-multimedia/ss-mwm-assets
|
code/Extensions/Controller.php
|
PHP
|
bsd-3-clause
| 1,479
|
import random
import time
import sys
import Csound
import subprocess
import base64
import hashlib
import matrixmusic
csd = None
oscillator = None
buzzer = None
voice = None
truevoice = None
song_publisher = None
def add_motif(instrument, req):
global csd
time = req.motif_start_time
for note in req.score:
if note != "P":
csd.score(instrument.note(time,
req.note_duration,
note,
req.motif_amplitude))
time += req.internote_delay
def handle_create_song(req):
global csd, oscillator, buzzer, voice
global song_publisher
s = 'temp'
csd = Csound.CSD('%s.csd' % s)
csd.orchestra(oscillator, buzzer, voice)
for motif in req.motifs:
if motif.instrument == 'oscil':
add_motif(oscillator, motif)
elif motif.instrument == 'buzzer':
add_motif(buzzer, motif)
elif motif.instrument == 'voice':
add_motif(voice, motif)
csd.output()
args = ['csound', '-d', '%s.csd' % s]
subprocess.call(args)
f = open('%s.csd' % s)
csd_string = f.read()
f.close()
song_name = '%s.ogg' % req.song_name
args = ['oggenc', '-o', song_name, '%s.wav' % s]
subprocess.call(args)
args = ['vorbiscomment', '-a', song_name,
'-t', "ARTIST=%s" % req.artist,
'-t', "TITLE=%s" % req.song_name,
'-t', "ALBUM=%s" % req.album,
'-t', "GENRE=%s" % 'Electronica',
'-t', "CSOUND=%s" % csd_string]
subprocess.call(args)
args = ['ogg123', song_name]
subprocess.call(args)
class Motif(object):
def __init__(self, motif_start_time, motif_repeat, motif_amplitude, score, note_duration, internote_delay, instrument):
self.motif_start_time = motif_start_time
self.motif_repeat = motif_repeat
self.motif_amplitude = motif_amplitude
self.score = score
self.note_duration = note_duration
self.internote_delay = internote_delay
self.instrument = instrument
class Request(object):
def __init__(self, song_name, artist, album, motifs):
self.song_name = song_name
self.artist = artist
self.album = album
self.motifs = motifs
def heads():
return (random.random() < 0.5)
def biasedFlip(p):
return (random.random() < p)
def selectInstrument():
if heads():
return 'oscil'
else:
return 'buzzer'
def selectInterval():
return 0.15, 0.05
def triggerCreate(song_name, artist, album, motifs):
handle_create_song(Request(song_name, artist, album, motifs))
def random_note():
bases = ["A", "B", "C", "D", "E", "F", "G"]
unsharpable = ["E", "B"]
unflatable = ["C", "F"]
octaves = map(str, range(2,6))
mods = ["", "#"]
base = random.choice(bases)
mods = [""]
if not base in unsharpable:
mods.append("#")
mod = random.choice(mods)
octave = random.choice(octaves)
return base + mod + octave
def random_motif(start_time):
#notes = " ".join([random_note() for i in range(10)])
#notes = "A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5 A5 B5 D6 E6 F#6 P".split(" ")
notes = "C3 C#3 E3 F3 G3 G#3 B4 C4 C#4 E4 F4 G4 G#4".split(" ")
score = matrixmusic.create_pair_score(notes, 15) * 5
print("Random score: " + str(score))
opts = [("voice", 1.0, 1.5),
#("oscil", 1.0, 1.5),
("voice", 3.0, 1.5)]
#("oscil", 3.0, 1.5)]
opt = random.choice(opts)
return Motif(start_time, 12, 0.05, score, opt[1], opt[2], opt[0])
if __name__ == "__main__":
if len(sys.argv) < 3:
print "Usage: %s <artist> <album name>" % sys.argv[0]
exit()
else:
artist = sys.argv[1]
album = sys.argv[2]
global song_publisher, oscillator, buzzer, voice
oscillator = Csound.oscil()
buzzer = Csound.buzz()
voice = Csound.fmvoice()
#voice = Csound.voice()
for i in xrange(1, 16384):
song_title = "song_%d" % i
#motifs = [ Motif(0.0, 12, 0.32, "A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5 A5 B5 D6 E6 F#6", 0.15, 0.05, selectInstrument()) ]
motifs = [random_motif(i*0.8) for i in range(3)]
# if biasedFlip(0.8):
# motifs.append(Motif(3.0, 10, 0.32, "A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5 A5 B5 D6 E6 F#6", a, b, selectInstrument()))
# if biasedFlip(0.9):
# motifs.append(Motif(6.0, 4, 0.10, "A2 B2 D3 D3 F#3 A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5", 0.3, 0.1, selectInstrument()))
triggerCreate(song_title, artist, album, motifs)
print "Created song %s" % song_title
time.sleep(10)
|
andrewtron3000/jampy
|
generator_matrix.py
|
Python
|
bsd-3-clause
| 4,610
|
<?php
namespace app\controllers;
use app\models\UserModel;
use Yii;
use app\components\AdminController;
use yii\web\Response;
use yii\widgets\ActiveForm;
class UserManagmentController extends AdminController
{
public function actionIndex()
{
$dataProvider = UserModel::search();
return $this->render('index', [
'dataProvider' => $dataProvider
]);
}
public function actionCreate()
{
$model = new userModel;
$model->scenario = userModel::SCENARIO_SIGNUP;
if ($model->load(Yii::$app->request->post())) {
if (Yii::$app->request->isAjax) {
Yii::$app->response->format = Response::FORMAT_JSON;
return ActiveForm::validate($model);
} else {
if ($model->save()) {
$this->setFlash('success', Yii::t('app', 'User has been created'));
} else {
$this->setFlash('error', Yii::t('app', 'User has not been created'));
}
return $this->redirect(['index']);
}
}
return $this->render('form', [
'model' => $model,
]);
}
public function actionChangePassword()
{
$model = userModel::findIdentity(Yii::$app->user->id);
$model->scenario = userModel::SCENARIO_CHANGE_PASSWORD;
if ($model->load(Yii::$app->request->post())) {
if (Yii::$app->request->isAjax) {
Yii::$app->response->format = Response::FORMAT_JSON;
return ActiveForm::validate($model);
} else {
if ($model->save()) {
$this->setFlash('success', Yii::t('app', 'Password has been updated'));
} else {
$this->setFlash('error', Yii::t('app', 'Password has not been updated'));
}
return $this->redirect(['index']);
}
}
return $this->render('form', ['model' => $model]);
}
public function actionProfile()
{
$model = userModel::findIdentity(Yii::$app->user->id);
$model->scenario = userModel::SCENARIO_PROFILE;
if ($model->load(Yii::$app->request->post())) {
if (Yii::$app->request->isAjax) {
Yii::$app->response->format = Response::FORMAT_JSON;
return ActiveForm::validate($model);
} else {
if ($model->save()) {
$this->setFlash('success', Yii::t('app', 'User has been updated'));
} else {
$this->setFlash('error', Yii::t('app', 'User has not been updated'));
}
return $this->redirect(['index']);
}
}
return $this->render('form', ['model' => $model]);
}
}
|
Per1phery/wholetthedogout
|
controllers/UserManagmentController.php
|
PHP
|
bsd-3-clause
| 2,925
|
import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from pylons.templating import render_mako_def
from kai.lib.base import BaseController, render
from kai.lib.helpers import textilize
from kai.lib.serialization import render_feed
from kai.model import Comment
log = logging.getLogger(__name__)
class CommentsController(BaseController):
def preview(self):
data = request.POST['content']
return textilize(data)
def create(self, doc_id):
if not c.user:
abort(401)
# Ensure the doc exists
doc = self.db.get(doc_id)
if not doc:
abort(404)
comment = Comment(doc_id=doc_id, displayname=c.user.displayname,
email=c.user.email, human_id=c.user.id,
content=request.POST['content'])
comment.store(self.db)
return ''
def delete(self, id):
if not c.user or not c.user.in_group('admin'):
abort(401)
# Ensure doc exists
doc = self.db.get(id)
if not doc:
abort(404)
# Make sure its a comment
if not doc['type'] == 'Comment':
abort(404)
self.db.delete(doc)
return ''
def index(self, format='html'):
if format == 'html':
abort(404)
elif format in ['atom', 'rss']:
# Pull comments and grab the docs with them for their info
comments = list(Comment.by_anytime(c.db, descending=True, limit=20))
commentdata = []
for comment_doc in comments:
comment = {}
displayname = comment_doc.displayname or 'Anonymous'
comment['created'] = comment_doc.created
id = comment_doc.id
doc = c.db.get(comment_doc.doc_id)
if doc['type'] == 'Traceback':
comment['title'] = '%s: %s' % (doc['exception_type'], doc['exception_value'])
else:
comment['title'] = doc.get('title', '-- No title --')
comment['type'] = doc['type']
comment['link'] = render_mako_def(
'/widgets.mako', 'comment_link', title=comment['title'],
comment_id=comment_doc.id, doc=doc, type=doc['type'],
urlonly=True).strip()
comment['doc_id'] = comment_doc.doc_id
comment['description'] = textilize(comment_doc.content)
commentdata.append(comment)
response.content_type = 'application/atom+xml'
return render_feed(
title="PylonsHQ Comment Feed", link=url.current(qualified=True),
description="Recent PylonsHQ comments", objects=commentdata,
pub_date='created')
|
Pylons/kai
|
kai/controllers/comments.py
|
Python
|
bsd-3-clause
| 2,956
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" />
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<meta name="lang:clipboard.copy" content="Copy to clipboard">
<meta name="lang:clipboard.copied" content="Copied to clipboard">
<meta name="lang:search.language" content="en">
<meta name="lang:search.pipeline.stopwords" content="True">
<meta name="lang:search.pipeline.trimmer" content="True">
<meta name="lang:search.result.none" content="No matching documents">
<meta name="lang:search.result.one" content="1 matching document">
<meta name="lang:search.result.other" content="# matching documents">
<meta name="lang:search.tokenizer" content="[\s\-]+">
<link href="https://fonts.gstatic.com/" rel="preconnect" crossorigin>
<link href="https://fonts.googleapis.com/css?family=Roboto+Mono:400,500,700|Roboto:300,400,400i,700&display=fallback" rel="stylesheet">
<style>
body,
input {
font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif
}
code,
kbd,
pre {
font-family: "Roboto Mono", "Courier New", Courier, monospace
}
</style>
<link rel="stylesheet" href="../_static/stylesheets/application.css"/>
<link rel="stylesheet" href="../_static/stylesheets/application-palette.css"/>
<link rel="stylesheet" href="../_static/stylesheets/application-fixes.css"/>
<link rel="stylesheet" href="../_static/fonts/material-icons.css"/>
<meta name="theme-color" content="#3f51b5">
<script src="../_static/javascripts/modernizr.js"></script>
<title>statsmodels.regression.quantile_regression.QuantRegResults.HC3_se — statsmodels</title>
<link rel="icon" type="image/png" sizes="32x32" href="../_static/icons/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="../_static/icons/favicon-16x16.png">
<link rel="manifest" href="../_static/icons/site.webmanifest">
<link rel="mask-icon" href="../_static/icons/safari-pinned-tab.svg" color="#919191">
<meta name="msapplication-TileColor" content="#2b5797">
<meta name="msapplication-config" content="../_static/icons/browserconfig.xml">
<link rel="stylesheet" href="../_static/stylesheets/examples.css">
<link rel="stylesheet" href="../_static/stylesheets/deprecation.css">
<link rel="stylesheet" type="text/css" href="../_static/pygments.css" />
<link rel="stylesheet" type="text/css" href="../_static/material.css" />
<link rel="stylesheet" type="text/css" href="../_static/graphviz.css" />
<link rel="stylesheet" type="text/css" href="../_static/plot_directive.css" />
<script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script>
<script src="../_static/jquery.js"></script>
<script src="../_static/underscore.js"></script>
<script src="../_static/doctools.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<link rel="shortcut icon" href="../_static/favicon.ico"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="statsmodels.regression.quantile_regression.QuantRegResults.aic" href="statsmodels.regression.quantile_regression.QuantRegResults.aic.html" />
<link rel="prev" title="statsmodels.regression.quantile_regression.QuantRegResults.HC2_se" href="statsmodels.regression.quantile_regression.QuantRegResults.HC2_se.html" />
</head>
<body dir=ltr
data-md-color-primary=indigo data-md-color-accent=blue>
<svg class="md-svg">
<defs data-children-count="0">
<svg xmlns="http://www.w3.org/2000/svg" width="416" height="448" viewBox="0 0 416 448" id="__github"><path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19T128 352t-18.125-8.5-10.75-19T96 304t3.125-20.5 10.75-19T128 256t18.125 8.5 10.75 19T160 304zm160 0q0 10-3.125 20.5t-10.75 19T288 352t-18.125-8.5-10.75-19T256 304t3.125-20.5 10.75-19T288 256t18.125 8.5 10.75 19T320 304zm40 0q0-30-17.25-51T296 232q-10.25 0-48.75 5.25Q229.5 240 208 240t-39.25-2.75Q130.75 232 120 232q-29.5 0-46.75 21T56 304q0 22 8 38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0 37.25-1.75t35-7.375 30.5-15 20.25-25.75T360 304zm56-44q0 51.75-15.25 82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5T212 416q-19.5 0-35.5-.75t-36.875-3.125-38.125-7.5-34.25-12.875T37 371.5t-21.5-28.75Q0 312 0 260q0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25 30.875Q171.5 96 212 96q37 0 70 8 26.25-20.5 46.75-30.25T376 64q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34 99.5z"/></svg>
</defs>
</svg>
<input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer">
<input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search">
<label class="md-overlay" data-md-component="overlay" for="__drawer"></label>
<a href="#generated/statsmodels.regression.quantile_regression.QuantRegResults.HC3_se" tabindex="1" class="md-skip"> Skip to content </a>
<header class="md-header" data-md-component="header">
<nav class="md-header-nav md-grid">
<div class="md-flex navheader">
<div class="md-flex__cell md-flex__cell--shrink">
<a href="../index.html" title="statsmodels"
class="md-header-nav__button md-logo">
<img src="../_static/statsmodels-logo-v2-bw.svg" height="26"
alt="statsmodels logo">
</a>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--menu md-header-nav__button" for="__drawer"></label>
</div>
<div class="md-flex__cell md-flex__cell--stretch">
<div class="md-flex__ellipsis md-header-nav__title" data-md-component="title">
<span class="md-header-nav__topic">statsmodels v0.14.0.dev0 (+325)</span>
<span class="md-header-nav__topic"> statsmodels.regression.quantile_regression.QuantRegResults.HC3_se </span>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--search md-header-nav__button" for="__search"></label>
<div class="md-search" data-md-component="search" role="dialog">
<label class="md-search__overlay" for="__search"></label>
<div class="md-search__inner" role="search">
<form class="md-search__form" action="../search.html" method="get" name="search">
<input type="text" class="md-search__input" name="q" placeholder="Search"
autocapitalize="off" autocomplete="off" spellcheck="false"
data-md-component="query" data-md-state="active">
<label class="md-icon md-search__icon" for="__search"></label>
<button type="reset" class="md-icon md-search__icon" data-md-component="reset" tabindex="-1">

</button>
</form>
<div class="md-search__output">
<div class="md-search__scrollwrap" data-md-scrollfix>
<div class="md-search-result" data-md-component="result">
<div class="md-search-result__meta">
Type to start searching
</div>
<ol class="md-search-result__list"></ol>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<div class="md-header-nav__source">
<a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28">
<use xlink:href="#__github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
statsmodels
</div>
</a>
</div>
</div>
<script src="../_static/javascripts/version_dropdown.js"></script>
<script>
var json_loc = "../../versions-v2.json",
target_loc = "../../",
text = "Versions";
$( document ).ready( add_version_dropdown(json_loc, target_loc, text));
</script>
</div>
</nav>
</header>
<div class="md-container">
<nav class="md-tabs" data-md-component="tabs">
<div class="md-tabs__inner md-grid">
<ul class="md-tabs__list">
<li class="md-tabs__item"><a href="../user-guide.html" class="md-tabs__link">User Guide</a></li>
<li class="md-tabs__item"><a href="../regression.html" class="md-tabs__link">Linear Regression</a></li>
<li class="md-tabs__item"><a href="statsmodels.regression.quantile_regression.QuantRegResults.html" class="md-tabs__link">statsmodels.regression.quantile_regression.QuantRegResults</a></li>
</ul>
</div>
</nav>
<main class="md-main">
<div class="md-main__inner md-grid" data-md-component="container">
<div class="md-sidebar md-sidebar--primary" data-md-component="navigation">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--primary" data-md-level="0">
<label class="md-nav__title md-nav__title--site" for="__drawer">
<a href="../index.html" title="statsmodels" class="md-nav__button md-logo">
<img src="../_static/statsmodels-logo-v2-bw.svg" alt=" logo" width="48" height="48">
</a>
<a href="../index.html"
title="statsmodels">statsmodels v0.14.0.dev0 (+325)</a>
</label>
<div class="md-nav__source">
<a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28">
<use xlink:href="#__github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
statsmodels
</div>
</a>
</div>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../install.html" class="md-nav__link">Installing statsmodels</a>
</li>
<li class="md-nav__item">
<a href="../gettingstarted.html" class="md-nav__link">Getting started</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html" class="md-nav__link">User Guide</a>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../user-guide.html#background" class="md-nav__link">Background</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#regression-and-linear-models" class="md-nav__link">Regression and Linear Models</a>
<ul class="md-nav__list">
<li class="md-nav__item">
<a href="../regression.html" class="md-nav__link">Linear Regression</a>
</li>
<li class="md-nav__item">
<a href="../glm.html" class="md-nav__link">Generalized Linear Models</a>
</li>
<li class="md-nav__item">
<a href="../gee.html" class="md-nav__link">Generalized Estimating Equations</a>
</li>
<li class="md-nav__item">
<a href="../gam.html" class="md-nav__link">Generalized Additive Models (GAM)</a>
</li>
<li class="md-nav__item">
<a href="../rlm.html" class="md-nav__link">Robust Linear Models</a>
</li>
<li class="md-nav__item">
<a href="../mixed_linear.html" class="md-nav__link">Linear Mixed Effects Models</a>
</li>
<li class="md-nav__item">
<a href="../discretemod.html" class="md-nav__link">Regression with Discrete Dependent Variable</a>
</li>
<li class="md-nav__item">
<a href="../mixed_glm.html" class="md-nav__link">Generalized Linear Mixed Effects Models</a>
</li>
<li class="md-nav__item">
<a href="../anova.html" class="md-nav__link">ANOVA</a>
</li>
<li class="md-nav__item">
<a href="../other_models.html" class="md-nav__link">Other Models <code class="xref py py-mod docutils literal notranslate"><span class="pre">othermod</span></code></a>
</li></ul>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#time-series-analysis" class="md-nav__link">Time Series Analysis</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#other-models" class="md-nav__link">Other Models</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#statistics-and-tools" class="md-nav__link">Statistics and Tools</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#data-sets" class="md-nav__link">Data Sets</a>
</li>
<li class="md-nav__item">
<a href="../user-guide.html#sandbox" class="md-nav__link">Sandbox</a>
</li></ul>
</li>
<li class="md-nav__item">
<a href="../examples/index.html" class="md-nav__link">Examples</a>
</li>
<li class="md-nav__item">
<a href="../api.html" class="md-nav__link">API Reference</a>
</li>
<li class="md-nav__item">
<a href="../about.html" class="md-nav__link">About statsmodels</a>
</li>
<li class="md-nav__item">
<a href="../dev/index.html" class="md-nav__link">Developer Page</a>
</li>
<li class="md-nav__item">
<a href="../release/index.html" class="md-nav__link">Release Notes</a>
</li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-sidebar md-sidebar--secondary" data-md-component="toc">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--secondary">
<ul class="md-nav__list" data-md-scrollfix="">
<li class="md-nav__item"><a class="md-nav__extra_link" href="../_sources/generated/statsmodels.regression.quantile_regression.QuantRegResults.HC3_se.rst.txt">Show Source</a> </li>
<li id="searchbox" class="md-nav__item"></li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-content">
<article class="md-content__inner md-typeset" role="main">
<section id="statsmodels-regression-quantile-regression-quantregresults-hc3-se">
<h1 id="generated-statsmodels-regression-quantile-regression-quantregresults-hc3-se--page-root">statsmodels.regression.quantile_regression.QuantRegResults.HC3_se<a class="headerlink" href="#generated-statsmodels-regression-quantile-regression-quantregresults-hc3-se--page-root" title="Permalink to this headline">¶</a></h1>
<dl class="py attribute">
<dt class="sig sig-object py" id="statsmodels.regression.quantile_regression.QuantRegResults.HC3_se">
<span class="sig-prename descclassname"><span class="pre">QuantRegResults.</span></span><span class="sig-name descname"><span class="pre">HC3_se</span></span><a class="headerlink" href="#statsmodels.regression.quantile_regression.QuantRegResults.HC3_se" title="Permalink to this definition">¶</a></dt>
<dd></dd></dl>
</section>
</article>
</div>
</div>
</main>
</div>
<footer class="md-footer">
<div class="md-footer-nav">
<nav class="md-footer-nav__inner md-grid">
<a href="statsmodels.regression.quantile_regression.QuantRegResults.HC2_se.html" title="statsmodels.regression.quantile_regression.QuantRegResults.HC2_se"
class="md-flex md-footer-nav__link md-footer-nav__link--prev"
rel="prev">
<div class="md-flex__cell md-flex__cell--shrink">
<i class="md-icon md-icon--arrow-back md-footer-nav__button"></i>
</div>
<div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title">
<span class="md-flex__ellipsis">
<span
class="md-footer-nav__direction"> Previous </span> statsmodels.regression.quantile_regression.QuantRegResults.HC2_se </span>
</div>
</a>
<a href="statsmodels.regression.quantile_regression.QuantRegResults.aic.html" title="statsmodels.regression.quantile_regression.QuantRegResults.aic"
class="md-flex md-footer-nav__link md-footer-nav__link--next"
rel="next">
<div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"><span
class="md-flex__ellipsis"> <span
class="md-footer-nav__direction"> Next </span> statsmodels.regression.quantile_regression.QuantRegResults.aic </span>
</div>
<div class="md-flex__cell md-flex__cell--shrink"><i
class="md-icon md-icon--arrow-forward md-footer-nav__button"></i>
</div>
</a>
</nav>
</div>
<div class="md-footer-meta md-typeset">
<div class="md-footer-meta__inner md-grid">
<div class="md-footer-copyright">
<div class="md-footer-copyright__highlight">
© Copyright 2009-2019, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers.
</div>
Last updated on
Feb 23, 2022.
<br/>
Created using
<a href="http://www.sphinx-doc.org/">Sphinx</a> 4.4.0.
and
<a href="https://github.com/bashtage/sphinx-material/">Material for
Sphinx</a>
</div>
</div>
</div>
</footer>
<script src="../_static/javascripts/application.js"></script>
<script>app.initialize({version: "1.0.4", url: {base: ".."}})</script>
</body>
</html>
|
statsmodels/statsmodels.github.io
|
devel/generated/statsmodels.regression.quantile_regression.QuantRegResults.HC3_se.html
|
HTML
|
bsd-3-clause
| 18,597
|
<?php
namespace lukisongroup\widget\models;
use Yii;
use yii\base\Model;
use yii\data\ActiveDataProvider;
use lukisongroup\widget\models\DailyJobModul;
/**
* DailyJobModulSearch represents the model behind the search form about `lukisongroup\widget\models\DailyJobModul`.
*/
class DailyJobModulSearch extends DailyJobModul
{
/**
* @inheritdoc
*/
public function rules()
{
return [
[['ID', 'STATUS'], 'integer'],
[['MODUL_NM', 'MODUL_DEST', 'MODUL_POLICY', 'USER_ID', 'CREATE_BY', 'CREATE_AT', 'UPDATE_BY', 'UPDATE_AT'], 'safe'],
];
}
/**
* @inheritdoc
*/
public function scenarios()
{
// bypass scenarios() implementation in the parent class
return Model::scenarios();
}
/**
* Creates data provider instance with search query applied
*
* @param array $params
*
* @return ActiveDataProvider
*/
public function search($params)
{
$query = DailyJobModul::find();
// add conditions that should always apply here
$dataProvider = new ActiveDataProvider([
'query' => $query,
]);
$this->load($params);
if (!$this->validate()) {
// uncomment the following line if you do not want to return any records when validation fails
// $query->where('0=1');
return $dataProvider;
}
// grid filtering conditions
$query->andFilterWhere([
'ID' => $this->ID,
'STATUS' => $this->STATUS,
'CREATE_AT' => $this->CREATE_AT,
'UPDATE_AT' => $this->UPDATE_AT,
]);
$query->andFilterWhere(['like', 'MODUL_NM', $this->MODUL_NM])
->andFilterWhere(['like', 'MODUL_DEST', $this->MODUL_DEST])
->andFilterWhere(['like', 'MODUL_POLICY', $this->MODUL_POLICY])
->andFilterWhere(['like', 'USER_ID', $this->USER_ID])
->andFilterWhere(['like', 'CREATE_BY', $this->CREATE_BY])
->andFilterWhere(['like', 'UPDATE_BY', $this->UPDATE_BY]);
return $dataProvider;
}
}
|
adem-team/advanced
|
lukisongroup/widget/models/DailyJobModulSearch.php
|
PHP
|
bsd-3-clause
| 2,136
|
class Gadgets(object):
"""
A Gadgets object providing managing of various gadgets for display on analytics dashboard.
Gadgets are registered with the Gadgets using the register() method.
"""
def __init__(self):
self._registry = {} # gadget hash -> gadget object.
def get_gadget(self, id):
return self._registry[id]
def get_gadgets(self):
return self._registry.values()
def register(self, gadget):
"""
Registers a gadget object.
If a gadget is already registered, this will raise AlreadyRegistered.
"""
self._registry[gadget.id] = gadget
gadgets = Gadgets()
|
praekelt/django-analytics
|
analytics/sites.py
|
Python
|
bsd-3-clause
| 657
|
//////////////////////////////////////////////////////////////////////////////
//
// (C) Copyright Ion Gaztanaga 2005-2009. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/interprocess for documentation.
//
//////////////////////////////////////////////////////////////////////////////
#ifndef BOOST_INTERPROCESS_EXCEPTIONS_HPP
#define BOOST_INTERPROCESS_EXCEPTIONS_HPP
#if (defined _MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif
#include <boost/interprocess/detail/config_begin.hpp>
#include <boost/interprocess/detail/workaround.hpp>
#include <boost/interprocess/errors.hpp>
#include <stdexcept>
#include <new>
//!\file
//!Describes exceptions thrown by interprocess classes
namespace boost {
namespace interprocess {
//!This class is the base class of all exceptions
//!thrown by boost::interprocess
class interprocess_exception : public std::exception
{
public:
interprocess_exception(error_code_t ec = other_error )
: m_err(ec)
{
try { m_str = "boost::interprocess_exception::library_error"; }
catch (...) {}
}
interprocess_exception(native_error_t sys_err_code)
: m_err(sys_err_code)
{
try { fill_system_message(m_err.get_native_error(), m_str); }
catch (...) {}
}
interprocess_exception(const error_info &err_info)
: m_err(err_info)
{
try{
if(m_err.get_native_error() != 0){
fill_system_message(m_err.get_native_error(), m_str);
}/*
else{
m_str = "boost::interprocess_exception::library_error";
}*/
}
catch(...){}
}
virtual ~interprocess_exception() throw(){}
virtual const char * what() const throw()
{ return m_str.c_str(); }
native_error_t get_native_error()const { return m_err.get_native_error(); }
// Note: a value of other_error implies a library (rather than system) error
error_code_t get_error_code() const { return m_err.get_error_code(); }
/// @cond
private:
error_info m_err;
std::string m_str;
/// @endcond
};
//!This is the exception thrown by shared interprocess_mutex family when a deadlock situation
//!is detected or when using a interprocess_condition the interprocess_mutex is not locked
class lock_exception : public interprocess_exception
{
public:
lock_exception()
: interprocess_exception(lock_error)
{}
virtual const char* what() const throw()
{ return "boost::interprocess::lock_exception"; }
};
//!This is the exception thrown by named interprocess_semaphore when a deadlock situation
//!is detected or when an error is detected in the post/wait operation
/*
class sem_exception : public interprocess_exception
{
public:
sem_exception()
: interprocess_exception(lock_error)
{}
virtual const char* what() const throw()
{ return "boost::interprocess::sem_exception"; }
};
*/
//!This is the exception thrown by synchronization objects when there is
//!an error in a wait() function
/*
class wait_exception : public interprocess_exception
{
public:
virtual const char* what() const throw()
{ return "boost::interprocess::wait_exception"; }
};
*/
//!This exception is thrown when a named object is created
//!in "open_only" mode and the resource was not already created
/*
class not_previously_created : public interprocess_exception
{
public:
virtual const char* what() const throw()
{ return "boost::interprocess::not_previously_created"; }
};
*/
//!This exception is thrown when a memory request can't be
//!fulfilled.
class bad_alloc : public interprocess_exception
{
public:
virtual const char* what() const throw()
{ return "boost::interprocess::bad_alloc"; }
};
} // namespace interprocess {
} // namespace boost
#include <boost/interprocess/detail/config_end.hpp>
#endif // BOOST_INTERPROCESS_EXCEPTIONS_HPP
|
benkaraban/anima-games-engine
|
LibsExternes/Includes/boost/interprocess/exceptions.hpp
|
C++
|
bsd-3-clause
| 4,160
|
<?php
/**
* Interface for JSON-RPC sub processors.
*
* @package stubbles
* @subpackage service_jsonrpc_subprocessors
* @version $Id: stubJsonRpcSubProcessor.php 2222 2009-06-09 21:55:06Z mikey $
*/
stubClassLoader::load('net::stubbles::ioc::stubInjector',
'net::stubbles::ipo::request::stubRequest',
'net::stubbles::ipo::response::stubResponse',
'net::stubbles::ipo::session::stubSession',
'net::stubbles::lang::stubProperties'
);
/**
* Interface for JSON-RPC sub processors.
*
* @package stubbles
* @subpackage service_jsonrpc_subprocessors
*/
interface stubJsonRpcSubProcessor extends stubObject
{
/**
* does the processing of the subtask
*
* @param stubRequest $request current request
* @param stubSession $session current session
* @param stubResponse $response current response
* @param stubInjector $injector injector instance
* @param stubProperties $config json-rpc config
*/
public function process(stubRequest $request, stubSession $session, stubResponse $response, stubInjector $injector, stubProperties $config);
}
?>
|
stubbles/stubbles-1.x
|
src/main/php/net/stubbles/service/jsonrpc/subprocessors/stubJsonRpcSubProcessor.php
|
PHP
|
bsd-3-clause
| 1,220
|
{% extends "admin/layout.html" %}
{% block title %}{{ _("Cache Options") }}{% endblock %}
{% block contents %}
<h1>{{ _("Cache Options") }}</h1>
{% call form() %}
<p>{% trans %}
Zine comes with a cache system that allows you to survive a sudden
storm of visitors. The caching system has different backends you can
choose from and also different settings that you should set depending on
the type of server you have and the number of visitors.
{% endtrans %}</p>
<p>{% trans %}
Keep in mind that if caching is enabled templates will be cached too and
as a result of that, they are not reloaded. If you are trying to develop
plugins it might be a good idea to disable all caching.
{% endtrans %}</p>
<script type="text/javascript">
$(function() {
$('select').change(function() {
var activeItem = $(this).val();
$('div.optionbox').each(function() {
$(this).is('#' + activeItem + '-options')
? $(this).show() : $(this).hide();
});
}).change();
});
</script>
<h2>{{ _("Cache System") }}</h2>
<p>{{ _("Currently Zine supports three caching systems:") }}</p>
<ul>
<li>{% trans %}<strong>Simple Cache</strong>: The simple cache is a very
basic memory cache inside the server process. This cache works only
if you have a persistent environment. Roughly speaking this cache
works better the higher the number of threads in a process and the
lower the number or processes. If you have the chance to use a
memcached you should not use the simple cache.{% endtrans %}</li>
<li>{% trans %}<strong>memcached</strong>: This cache system uses one
or multiple remote memcached servers for storing the cache
information. It requires at least one running memcached daemon. This
is useful for high traffic sites.{% endtrans %}</li>
<li>{% trans %}<strong>Filesystem</strong>: This cache system stores the
cache information on the filesystem. If IO is a problem for you,
you should not use this cache. However for most of the cases the
filesystem it should be fast enough.{% endtrans %}</li>
</ul>
<p>{% trans %}Per default no cache system is active.{% endtrans %}</p>
<p>{{ form.cache_system() }}</p>
<div class="optionbox" id="memcached-options">
<h2>{{ _("Memcached Options") }}</h2>
<p>{% trans %}
In order to use the memcached system you have to provide the address
of at least one machine running memcached. For further details have
a look at <a href="http://www.danga.com/memcached/">the memcached
webpage</a>. Separate multiple addresses with commas. This also
requires either the <a href="http://gijsbert.org/cmemcache/"
>cmemcache</a> or <a href="ftp://ftp.tummy.com/pub/python-memcached/"
>python-memcached</a> module.
{% endtrans %}</p>
<p>{% trans %}
If you for example have a memcached running on localhost (127.0.0.1)
port 11211 you would have to enter <code>127.0.0.1:11211</code> into
the following text field.
{% endtrans %}</p>
<p>{{ form.memcached_servers(size=60) }}</p>
</div>
<div class="optionbox" id="filesystem-options">
<h2>{{ _("Filesystem Options") }}</h2>
<p>{% trans %}
When using the filesystem cache you can control where Zine puts
all the cache data. This path is relative to your Zine instance
folder and will be created automatically by Zine. Keep in mind
that if you change that path and old cache data is left in the old
folder, Zine won't clean up the data left for you.
{% endtrans %}</p>
<p>{% trans %}
One thing you have to keep in mind is that the cache path <em>must</em>
be a directory that only Zine uses for file management. The
Zine code cleans up there from time to time and it could delete
files that are otherwise not tracked by Zine. This is especially
the case if you try to cache into <code>/tmp</code> or a similar
folder that all kinds of applications use for storing temporary files.
{% endtrans %}</p>
<p>{{ form.filesystem_cache_path(size=40) }}</p>
</div>
<h2>{{ _("General Cache Settings") }}</h2>
<p>{% trans %}
The following configuration values are cache system-independent. You
can use them to fine-tune the caching. The timeout is the number of
seconds an item is cached per default. Some items have different cache
timeouts though. If “eager caching” is enabled the cache system will
cache a lot more but it will have visible side-effects. For example
new blog posts won't appear on the index or in the feed for up to the
default timeout.
{% endtrans %}</p>
<dl>
{{ form.cache_timeout.as_dd() }}
{{ form.enable_eager_caching.as_dd() }}
</dl>
<div class="actions">
<input type="submit" value="{{ _('Save') }}">
<input type="submit" name="clear_cache" value="{{ _('Clear Cache') }}">
</div>
{% endcall %}
{% endblock %}
|
mitsuhiko/zine
|
zine/templates/admin/cache.html
|
HTML
|
bsd-3-clause
| 5,230
|
#!/usr/bin/env python
#
# Written by Chema Garcia (aka sch3m4)
# Contact: chema@safetybits.net || http://safetybits.net || @sch3m4
#
import serial.tools.list_ports
from SerialCrypt import Devices
def locateDevice(devid):
'''
Returns the serial port path of the arduino if found, or None if it isn't connected
'''
retval = None
for port in serial.tools.list_ports.comports():
if port[2][:len(devid)] == devid:
retval = port[0]
break
return retval
def main():
print "HSM Device: %s" % locateDevice ( Devices.DEVICE_CRYPT_ID )
print "uToken Device: %s" % locateDevice ( Devices.DEVICE_UTOKEN_ID )
print "Debug Device: %s" % locateDevice ( Devices.DEVICE_DEBUG_ID )
if __name__ == "__main__":
main()
|
sch3m4/SerialCrypt
|
apps/locate.py
|
Python
|
bsd-3-clause
| 724
|
dojo.provide("dojox.charting.plot2d.Bars");
dojo.require("dojox.charting.plot2d.common");
dojo.require("dojox.charting.plot2d.Base");
dojo.require("dojox.lang.utils");
dojo.require("dojox.lang.functional");
dojo.require("dojox.lang.functional.reversed");
(function(){
var df = dojox.lang.functional, du = dojox.lang.utils,
dc = dojox.charting.plot2d.common,
purgeGroup = df.lambda("item.purgeGroup()");
dojo.declare("dojox.charting.plot2d.Bars", dojox.charting.plot2d.Base, {
defaultParams: {
hAxis: "x", // use a horizontal axis named "x"
vAxis: "y", // use a vertical axis named "y"
gap: 0, // gap between columns in pixels
shadows: null // draw shadows
},
optionalParams: {}, // no optional parameters
constructor: function(chart, kwArgs){
this.opt = dojo.clone(this.defaultParams);
du.updateWithObject(this.opt, kwArgs);
this.series = [];
this.hAxis = this.opt.hAxis;
this.vAxis = this.opt.vAxis;
},
calculateAxes: function(dim){
var stats = dc.collectSimpleStats(this.series), t;
stats.hmin -= 0.5;
stats.hmax += 0.5;
t = stats.hmin, stats.hmin = stats.vmin, stats.vmin = t;
t = stats.hmax, stats.hmax = stats.vmax, stats.vmax = t;
this._calc(dim, stats);
return this;
},
render: function(dim, offsets){
if(this.dirty){
dojo.forEach(this.series, purgeGroup);
this.cleanGroup();
var s = this.group;
df.forEachRev(this.series, function(item){ item.cleanGroup(s); });
}
var t = this.chart.theme, color, stroke, fill, f,
gap = this.opt.gap < this._vScaler.scale / 3 ? this.opt.gap : 0;
for(var i = this.series.length - 1; i >= 0; --i){
var run = this.series[i];
if(!this.dirty && !run.dirty){ continue; }
run.cleanGroup();
var s = run.group;
if(!run.fill || !run.stroke){
// need autogenerated color
color = run.dyn.color = new dojo.Color(t.next("color"));
}
stroke = run.stroke ? run.stroke : dc.augmentStroke(t.series.stroke, color);
fill = run.fill ? run.fill : dc.augmentFill(t.series.fill, color);
var baseline = Math.max(0, this._hScaler.bounds.lower),
xoff = offsets.l + this._hScaler.scale * (baseline - this._hScaler.bounds.lower),
yoff = dim.height - offsets.b - this._vScaler.scale * (1.5 - this._vScaler.bounds.lower) + gap;
for(var j = 0; j < run.data.length; ++j){
var v = run.data[j],
width = this._hScaler.scale * (v - baseline),
height = this._vScaler.scale - 2 * gap,
w = Math.abs(width);
if(w >= 1 && height >= 1){
var shape = s.createRect({
x: xoff + (width < 0 ? width : 0),
y: yoff - this._vScaler.scale * j,
width: w, height: height
}).setFill(fill).setStroke(stroke);
run.dyn.fill = shape.getFill();
run.dyn.stroke = shape.getStroke();
}
}
run.dirty = false;
}
this.dirty = false;
return this;
}
});
})();
|
lortnus/zf1
|
externals/dojo/dojox/charting/plot2d/Bars.js
|
JavaScript
|
bsd-3-clause
| 2,907
|
#!/bin/bash
# Rebuild all collections of stop words
path=${1-mots_vides/datas/}
for lang in $(ls $path)
do
file=$(echo $lang | cut -f1 -d.)
lang=$(echo $file | cut -f2 -d-)
./bin/merge-stop-words $lang $path$file.txt
done
|
Fantomas42/mots-vides
|
rebuild-collections.sh
|
Shell
|
bsd-3-clause
| 230
|
// ============================================================================
#ifndef OSTAP_GSL_UTILS_H
#define OSTAP_GSL_UTILS_H 1
// ============================================================================
// Include files
// ============================================================================
// STD&STL
// ============================================================================
#include <ostream>
// ============================================================================
// GSL
// ============================================================================
#include "gsl/gsl_vector.h"
#include "gsl/gsl_matrix.h"
// =============================================================================
/** @file Ostap/GSL_utils.h
* utilities for GSL
*/
// =============================================================================
namespace Ostap
{
// ==========================================================================
namespace Utils
{
// ========================================================================
/** print GSL-vector to the stream
* @param v the vector
* @param s the stream
* @return the stream
* @author Vanya BELYAEV Ivan.Belyaev@itep.ru
* @date 2012-05-28
*/
std::ostream& toStream
( const gsl_vector& v ,
std::ostream& s ) ;
// ========================================================================
/** print GSL-matrix to the stream
* @param m the matrix
* @param s the stream
* @return the stream
* @author Vanya BELYAEV Ivan.Belyaev@itep.ru
* @date 2012-05-28
*/
std::ostream& toStream
( const gsl_matrix& m ,
std::ostream& s ) ;
// ========================================================================
} // end of namespace Ostap::Utils
// ==========================================================================
} // end of namespace Ostap
// ============================================================================
/// print operator
inline std::ostream& operator<<( std::ostream& s , const gsl_vector& v )
{ return Ostap::Utils::toStream ( v , s ) ; }
// ============================================================================
/// print operator
inline std::ostream& operator<<( std::ostream& s , const gsl_matrix& m )
{ return Ostap::Utils::toStream ( m , s ) ; }
// ============================================================================
// The END
// ============================================================================
#endif // OSTAP_GSL_UTILS_H
// ============================================================================
|
OstapHEP/ostap
|
source/include/Ostap/GSL_utils.h
|
C
|
bsd-3-clause
| 2,853
|
// Package compression defines a response compressing Handler.
// It compresses the body of the http response sent back to a client.
package compression
import (
"compress/gzip"
"io"
"net/http"
"strings"
"sync"
"github.com/atdiar/xhttp"
)
// Gzipper defines the structure of the response compressing Handler.
type Gzipper struct {
pool *sync.Pool // useful here to recycle gzip buffers
skip map[string]bool
next xhttp.Handler
}
// NewHandler returns a response compressing Handler.
func NewHandler() Gzipper {
g := Gzipper{}
g.skip = map[string]bool{
"GET": false,
"POST": false,
"PUT": false,
"PATCH": false,
"DELETE": false,
"HEAD": false,
"OPTIONS": false,
}
g.pool = &sync.Pool{New: func() interface{} { return gzip.NewWriter(nil) }}
return g
}
// Skip is used to disable gzip compression for a given http method.
func (g Gzipper) Skip(method string) Gzipper {
if _, ok := g.skip[strings.ToUpper(method)]; !ok {
panic(method + " is not a valid method")
}
g.skip[method] = true
return g
}
// This is a type of wrapper around a http.ResponseWriter which buffers data
// before compressing the whole and writing.
type compressingWriter struct {
io.WriteCloser
http.ResponseWriter
p *sync.Pool
}
func newcompressingWriter(w http.ResponseWriter, p *sync.Pool) compressingWriter {
w1 := p.Get()
w2 := w1.(*gzip.Writer)
w2.Reset(w)
return compressingWriter{w2, w, p}
}
// Write is using the gzip writer Write method.
func (cw compressingWriter) Write(b []byte) (int, error) {
if cw.ResponseWriter.Header().Get("Content-Type") == "" {
cw.ResponseWriter.Header().Set("Content-Type", http.DetectContentType(b))
cw.ResponseWriter.Header().Del("Content-Length")
}
return cw.WriteCloser.Write(b)
}
// Close flushes the compressed bytestring to the underlying ResponseWriter.
// Then it releases the gzip.Writer, putting it back into the Pool.
func (cw compressingWriter) Close() error {
z := cw.WriteCloser.(*gzip.Writer)
err := z.Flush()
cw.p.Put(z)
return err
}
func (cw compressingWriter) Wrappee() http.ResponseWriter { return cw.ResponseWriter }
// ServeHTTP handles a http.Request by gzipping the http response body and
// setting the right http Headers.
func (g Gzipper) ServeHTTP(w http.ResponseWriter, req *http.Request) {
if mustSkip, exist := g.skip[strings.ToUpper(req.Method)]; exist && mustSkip {
if g.next != nil {
g.next.ServeHTTP(w, req)
}
return
}
// We create a compressingWriter that will enable
//the response writing w/ Compression.
wc := newcompressingWriter(w, g.pool)
w.Header().Add("Vary", "Accept-Encoding")
if !strings.Contains(req.Header.Get("Accept-Encoding"), "gzip") {
if g.next != nil {
g.next.ServeHTTP(w, req)
}
return
}
wc.Header().Set("Content-Encoding", "gzip")
// All the conditions are present : we shall compress the data before writing
// it out.
if g.next != nil {
g.next.ServeHTTP(wc, req)
}
err := wc.Close()
if err != nil {
panic(err)
}
}
// Link registers a next request Handler to be called by ServeHTTP method.
// It returns the result of the linking.
func (g Gzipper) Link(h xhttp.Handler) xhttp.HandlerLinker {
g.next = h
return g
}
|
atdiar/xhttp
|
handlers/compression/gzip.go
|
GO
|
bsd-3-clause
| 3,197
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "xwalk/runtime/browser/devtools/xwalk_devtools_delegate.h"
#include <string>
#include "base/base64.h"
#include "base/memory/ref_counted_memory.h"
#include "base/strings/utf_string_conversions.h"
#include "base/thread_task_runner_handle.h"
#include "content/public/browser/devtools_agent_host.h"
#include "content/public/browser/devtools_http_handler.h"
#include "content/public/browser/devtools_target.h"
#include "content/public/browser/favicon_status.h"
#include "content/public/browser/navigation_entry.h"
#include "content/public/browser/render_view_host.h"
#include "content/public/browser/render_widget_host_view.h"
#include "content/public/browser/web_contents.h"
#include "content/public/common/url_constants.h"
#include "grit/xwalk_resources.h"
#include "net/socket/tcp_listen_socket.h"
#include "ui/base/resource/resource_bundle.h"
#include "ui/snapshot/snapshot.h"
using content::DevToolsAgentHost;
using content::RenderViewHost;
using content::RenderWidgetHostView;
using content::WebContents;
namespace {
const char kTargetTypePage[] = "page";
const char kTargetTypeServiceWorker[] = "service_worker";
const char kTargetTypeOther[] = "other";
class Target : public content::DevToolsTarget {
public:
explicit Target(scoped_refptr<content::DevToolsAgentHost> agent_host);
std::string GetId() const override { return agent_host_->GetId(); }
std::string GetType() const override {
switch (agent_host_->GetType()) {
case content::DevToolsAgentHost::TYPE_WEB_CONTENTS:
return kTargetTypePage;
case content::DevToolsAgentHost::TYPE_SERVICE_WORKER:
return kTargetTypeServiceWorker;
default:
break;
}
return kTargetTypeOther;
}
std::string GetTitle() const override {
return agent_host_->GetTitle();
}
std::string GetDescription() const override { return std::string(); }
GURL GetURL() const override { return agent_host_->GetURL(); }
GURL GetFaviconURL() const override { return favicon_url_; }
base::TimeTicks GetLastActivityTime() const override {
return last_activity_time_;
}
std::string GetParentId() const override { return std::string(); }
bool IsAttached() const override {
return agent_host_->IsAttached();
}
scoped_refptr<DevToolsAgentHost> GetAgentHost() const override {
return agent_host_;
}
bool Activate() const override;
bool Close() const override;
private:
GURL GetFaviconDataURL(WebContents* web_contents) const;
scoped_refptr<DevToolsAgentHost> agent_host_;
std::string id_;
std::string title_;
GURL favicon_url_;
base::TimeTicks last_activity_time_;
};
Target::Target(scoped_refptr<content::DevToolsAgentHost> agent_host)
: agent_host_(agent_host) {
if (content::WebContents* web_contents = agent_host_->GetWebContents()) {
content::NavigationController& controller = web_contents->GetController();
content::NavigationEntry* entry = controller.GetActiveEntry();
if (entry != NULL && entry->GetURL().is_valid())
favicon_url_ = entry->GetFavicon().url;
if (favicon_url_.is_empty())
favicon_url_ = GetFaviconDataURL(web_contents);
last_activity_time_ = web_contents->GetLastActiveTime();
}
}
GURL Target::GetFaviconDataURL(WebContents* web_contents) const {
// Convert icon image to "data:" url.
#if defined(OS_ANDROID)
// TODO(YangangHan): Add a new base parent class of WebContents
// for both Tizen and Android, so we can remove the current macro
// in the future.
return GURL();
#endif
xwalk::Runtime* runtime =
static_cast<xwalk::Runtime*>(web_contents->GetDelegate());
if (!runtime || runtime->app_icon().IsEmpty())
return GURL();
scoped_refptr<base::RefCountedMemory> icon_bytes =
runtime->app_icon().Copy1xPNGBytes();
std::string str_url;
str_url.append(reinterpret_cast<const char*>(icon_bytes->front()),
icon_bytes->size());
base::Base64Encode(str_url, &str_url);
str_url.insert(0, "data:image/png;base64,");
return GURL(str_url);
}
bool Target::Activate() const {
return agent_host_->Activate();
}
bool Target::Close() const {
return agent_host_->Close();
}
} // namespace
namespace xwalk {
namespace {
Runtime* CreateWithDefaultWindow(
XWalkBrowserContext* browser_context, const GURL& url,
Runtime::Observer* observer) {
Runtime* runtime = Runtime::Create(browser_context);
runtime->set_observer(observer);
runtime->LoadURL(url);
#if !defined(OS_ANDROID)
runtime->set_ui_delegate(DefaultRuntimeUIDelegate::Create(runtime));
runtime->Show();
#endif
return runtime;
}
} // namespace
XWalkDevToolsHttpHandlerDelegate::XWalkDevToolsHttpHandlerDelegate() {
}
XWalkDevToolsHttpHandlerDelegate::~XWalkDevToolsHttpHandlerDelegate() {
}
std::string XWalkDevToolsHttpHandlerDelegate::GetDiscoveryPageHTML() {
return ResourceBundle::GetSharedInstance().GetRawDataResource(
IDR_DEVTOOLS_FRONTEND_PAGE_HTML).as_string();
}
void XWalkDevToolsDelegate::ProcessAndSaveThumbnail(
const GURL& url,
scoped_refptr<base::RefCountedBytes> png) {
if (!png.get())
return;
const std::vector<unsigned char>& png_data = png->data();
std::string png_string_data(reinterpret_cast<const char*>(&png_data[0]),
png_data.size());
thumbnail_map_[url] = png_string_data;
}
bool XWalkDevToolsHttpHandlerDelegate::BundlesFrontendResources() {
return true;
}
base::FilePath XWalkDevToolsHttpHandlerDelegate::GetDebugFrontendDir() {
return base::FilePath();
}
scoped_ptr<net::ServerSocket>
XWalkDevToolsHttpHandlerDelegate::CreateSocketForTethering(
std::string* name) {
return scoped_ptr<net::ServerSocket>();
}
XWalkDevToolsDelegate::XWalkDevToolsDelegate(XWalkBrowserContext* context)
: browser_context_(context),
weak_factory_(this) {
}
XWalkDevToolsDelegate::~XWalkDevToolsDelegate() {
}
base::DictionaryValue* XWalkDevToolsDelegate::HandleCommand(
content::DevToolsAgentHost* agent_host,
base::DictionaryValue* command_dict) {
return NULL;
}
std::string XWalkDevToolsDelegate::GetPageThumbnailData(const GURL& url) {
if (thumbnail_map_.find(url) != thumbnail_map_.end())
return thumbnail_map_[url];
// TODO(YangangHan): Support real time thumbnail.
content::DevToolsAgentHost::List agents =
content::DevToolsAgentHost::GetOrCreateAll();
for (auto& it : agents) {
WebContents* web_contents = it.get()->GetWebContents();
if (web_contents && web_contents->GetURL() == url) {
RenderWidgetHostView* render_widget_host_view =
web_contents->GetRenderWidgetHostView();
if (!render_widget_host_view)
continue;
gfx::Rect snapshot_bounds(
render_widget_host_view->GetViewBounds().size());
ui::GrabViewSnapshotAsync(
render_widget_host_view->GetNativeView(),
snapshot_bounds,
base::ThreadTaskRunnerHandle::Get(),
base::Bind(&XWalkDevToolsDelegate::ProcessAndSaveThumbnail,
weak_factory_.GetWeakPtr(),
url));
break;
}
}
return std::string();
}
scoped_ptr<content::DevToolsTarget>
XWalkDevToolsDelegate::CreateNewTarget(const GURL& url) {
Runtime* runtime = CreateWithDefaultWindow(
browser_context_, url, this);
return scoped_ptr<content::DevToolsTarget>(
new Target(DevToolsAgentHost::GetOrCreateFor(runtime->web_contents())));
}
void XWalkDevToolsDelegate::EnumerateTargets(TargetCallback callback) {
TargetList targets;
content::DevToolsAgentHost::List agents =
content::DevToolsAgentHost::GetOrCreateAll();
for (auto& it : agents) {
targets.push_back(new Target(it));
}
callback.Run(targets);
}
void XWalkDevToolsDelegate::OnNewRuntimeAdded(Runtime* runtime) {
runtime->set_observer(this);
runtime->set_ui_delegate(DefaultRuntimeUIDelegate::Create(runtime));
runtime->Show();
}
void XWalkDevToolsDelegate::OnRuntimeClosed(Runtime* runtime) {
delete runtime;
}
} // namespace xwalk
|
mrunalk/crosswalk
|
runtime/browser/devtools/xwalk_devtools_delegate.cc
|
C++
|
bsd-3-clause
| 8,155
|
//=============================================================================================================
/**
* @file surfaceset.h
* @author Lorenz Esch <lesch@mgh.harvard.edu>;
* Matti Hamalainen <msh@nmr.mgh.harvard.edu>;
* Christoph Dinh <chdinh@nmr.mgh.harvard.edu>
* @since 0.1.0
* @date March, 2013
*
* @section LICENSE
*
* Copyright (C) 2013, Lorenz Esch, Matti Hamalainen, Christoph Dinh. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that
* the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of MNE-CPP authors nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* @brief SurfaceSet class declaration
*
*/
#ifndef SURFACESET_H
#define SURFACESET_H
//=============================================================================================================
// INCLUDES
//=============================================================================================================
#include "fs_global.h"
#include "surface.h"
//=============================================================================================================
// QT INCLUDES
//=============================================================================================================
#include <QSharedPointer>
#include <QMap>
//=============================================================================================================
// DEFINE NAMESPACE FSLIB
//=============================================================================================================
namespace FSLIB
{
//=============================================================================================================
// FORWARD DECLARATIONS
//=============================================================================================================
//=============================================================================================================
/**
* The set of surfaces holds right and left hemipshere surfaces
*
* @brief A hemisphere set of surfaces
*/
class FSSHARED_EXPORT SurfaceSet
{
public:
typedef QSharedPointer<SurfaceSet> SPtr; /**< Shared pointer type for SurfaceSet class. */
typedef QSharedPointer<const SurfaceSet> ConstSPtr; /**< Const shared pointer type for SurfaceSet class. */
//=========================================================================================================
/**
* Default constructor
*/
SurfaceSet();
//=========================================================================================================
/**
* Construts the surface set by reading it of the given files.
*
* @param[in] subject_id Name of subject.
* @param[in] hemi Which hemisphere to load {0 -> lh, 1 -> rh, 2 -> both}.
* @param[in] surf Name of the surface to load (eg. inflated, orig ...).
* @param[in] subjects_dir Subjects directory.
*/
explicit SurfaceSet(const QString &subject_id, qint32 hemi, const QString &surf, const QString &subjects_dir);
//=========================================================================================================
/**
* Construts the surface set by reading it of the given files.
*
* @param[in] path path to surface directory.
* @param[in] hemi Which hemisphere to load {0 -> lh, 1 -> rh, 2 -> both}.
* @param[in] surf Name of the surface to load (eg. inflated, orig ...).
*/
explicit SurfaceSet(const QString &path, qint32 hemi, const QString &surf);
//=========================================================================================================
/**
* Constructs a surface set by assembling given surfaces
*
* @param[in] p_LHSurface Left hemisphere surface.
* @param[in] p_RHSurface Right hemisphere surface.
*/
explicit SurfaceSet(const Surface& p_LHSurface, const Surface& p_RHSurface);
//=========================================================================================================
/**
* Constructs an annotation set by reading from annotation files
*
* @param[in] p_sLHFileName Left hemisphere annotation file.
* @param[in] p_sRHFileName Right hemisphere annotation file.
*/
explicit SurfaceSet(const QString& p_sLHFileName, const QString& p_sRHFileName);
//=========================================================================================================
/**
* Destroys the SurfaceSet class.
*/
~SurfaceSet();
//=========================================================================================================
/**
* Initializes the AnnotationSet.
*/
void clear();
//=========================================================================================================
/**
* Returns The surface set map
*
* @return the surface set map.
*/
inline QMap<qint32, Surface>& data();
//=========================================================================================================
/**
* True if SurfaceSet is empty.
*
* @return true if SurfaceSet is empty.
*/
inline bool isEmpty() const;
//=========================================================================================================
/**
* Insert a surface
*
* @param[in] p_Surface Surface to insert.
*/
void insert(const Surface& p_Surface);
//=========================================================================================================
/**
* Reads different surface files and assembles them to a SurfaceSet
*
* @param[in] p_sLHFileName Left hemisphere surface file.
* @param[in] p_sRHFileName Right hemisphere surface file.
* @param[out] p_SurfaceSet The read surface set.
*
* @return true if succesfull, false otherwise.
*/
static bool read(const QString& p_sLHFileName, const QString& p_sRHFileName, SurfaceSet &p_SurfaceSet);
//=========================================================================================================
/**
* The kind of Surfaces which are held by the SurfaceSet (eg. inflated, orig ...)
*
* @return the loaded surfaces (eg. inflated, orig ...).
*/
inline QString surf() const;
//=========================================================================================================
/**
* Subscript operator [] to access surface by index
*
* @param[in] idx the hemisphere index (0 or 1).
*
* @return Surface related to the parameter index.
*/
const Surface& operator[] (qint32 idx) const;
//=========================================================================================================
/**
* Subscript operator [] to access surface by index
*
* @param[in] idx the hemisphere index (0 or 1).
*
* @return Surface related to the parameter index.
*/
Surface& operator[] (qint32 idx);
//=========================================================================================================
/**
* Subscript operator [] to access surface by identifier
*
* @param[in] idt the hemisphere identifier ("lh" or "rh").
*
* @return Surface related to the parameter identifier.
*/
const Surface& operator[] (QString idt) const;
//=========================================================================================================
/**
* Subscript operator [] to access surface by identifier
*
* @param[in] idt the hemisphere identifier ("lh" or "rh").
*
* @return Surface related to the parameter identifier.
*/
Surface& operator[] (QString idt);
//=========================================================================================================
/**
* Returns number of loaded hemispheres
*
* @return number of loaded hemispheres.
*/
inline qint32 size() const;
private:
//=========================================================================================================
/**
* Calculates the offset between two Surfaces and sets the offset to each surface accordingly
*/
void calcOffset();
QMap<qint32, Surface> m_qMapSurfs; /**< Hemisphere surfaces (lh = 0; rh = 1). */
};
//=============================================================================================================
// INLINE DEFINITIONS
//=============================================================================================================
inline QMap<qint32, Surface>& SurfaceSet::data()
{
return m_qMapSurfs;
}
//=============================================================================================================
inline bool SurfaceSet::isEmpty() const
{
return m_qMapSurfs.isEmpty();
}
//=============================================================================================================
inline QString SurfaceSet::surf() const
{
if(m_qMapSurfs.size() > 0)
return m_qMapSurfs.begin().value().surf();
else
return QString("");
}
//=============================================================================================================
inline qint32 SurfaceSet::size() const
{
return m_qMapSurfs.size();
}
} // NAMESPACE
#endif // SURFACESET_H
|
mne-tools/mne-cpp
|
libraries/fs/surfaceset.h
|
C
|
bsd-3-clause
| 10,874
|
# Zend Expressive CoderConf
[](https://secure.travis-ci.org/zendframework/zend-expressive-skeleton)
This repository create for my presentation in coderconf.org conference.
html authorization:
username: myuser
password: mypass
|
ooghry/Zend-Expressive-CoderConf
|
README.md
|
Markdown
|
bsd-3-clause
| 332
|
Anuglar Slides Editor
=================
|
aslubsky/angular-slides-editor
|
README.md
|
Markdown
|
bsd-3-clause
| 39
|
<?php
/* @var $this yii\web\View */
/* @var $name string */
/* @var $message string */
/* @var $exception Exception */
use yii\helpers\Html;
?>
<div class="site-error">
<h1><?= Html::encode($name) ?></h1>
<div class="alert alert-danger">
<?= nl2br(Html::encode($code.': '.$message)) ?>
</div>
<p>
The above error occurred while the Web server was processing your request.
</p>
<p>
Please contact us if you think this is a server error. Thank you.
</p>
</div>
|
Zingeon/yii2_categories_tree
|
views/categories/error.php
|
PHP
|
bsd-3-clause
| 518
|
# Makefile for streamcluster
TARGET=streamcluster
OBJS=streamcluster.o
CXXFLAGS := $(CXXFLAGS) -DENABLE_THREADS -pthread -m32 -static
OBJS += parsec_barrier.o
all: $(OBJS)
$(CXX) $(CXXFLAGS) $(LDFLAGS) $(OBJS) $(LIBS) -o $(TARGET)
%.o : %.cpp
$(CXX) $(CXXFLAGS) -c $<
clean:
rm -f *.o $(TARGET)
|
Multi2Sim/m2s-bench-parsec-3.0-src
|
streamcluster/Makefile
|
Makefile
|
bsd-3-clause
| 303
|
// Copyright 2021 The Cobalt Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef STARBOARD_STUB_FONT_H_
#define STARBOARD_STUB_FONT_H_
namespace starboard {
namespace stub {
const void* GetFontApi();
} // namespace stub
} // namespace starboard
#endif // STARBOARD_STUB_FONT_H_
|
youtube/cobalt
|
starboard/stub/font.h
|
C
|
bsd-3-clause
| 830
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>TrackerPeerManager class - hetimatorrent.tracker library - Dart API</title>
<!-- required because all the links are pseudo-absolute -->
<base href="..">
<link href='https://fonts.googleapis.com/css?family=Source+Code+Pro|Roboto:500,400italic,300,400' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="static-assets/prettify.css">
<link rel="stylesheet" href="static-assets/css/bootstrap.min.css">
<link rel="stylesheet" href="static-assets/styles.css">
<meta name="description" content="API docs for the TrackerPeerManager class from the hetimatorrent.tracker library, for the Dart programming language.">
<link rel="icon" href="static-assets/favicon.png">
<!-- Do not remove placeholder -->
<!-- Header Placeholder -->
</head>
<body>
<div id="overlay-under-drawer"></div>
<header class="container-fluid" id="title">
<nav class="navbar navbar-fixed-top">
<div class="container">
<button id="sidenav-left-toggle" type="button"> </button>
<ol class="breadcrumbs gt-separated hidden-xs">
<li><a href="index.html">hetimatorrent</a></li>
<li><a href="hetimatorrent.tracker/hetimatorrent.tracker-library.html">hetimatorrent.tracker</a></li>
<li class="self-crumb">TrackerPeerManager</li>
</ol>
<div class="self-name">TrackerPeerManager</div>
</div>
</nav>
<div class="container masthead">
<ol class="breadcrumbs gt-separated visible-xs">
<li><a href="index.html">hetimatorrent</a></li>
<li><a href="hetimatorrent.tracker/hetimatorrent.tracker-library.html">hetimatorrent.tracker</a></li>
<li class="self-crumb">TrackerPeerManager</li>
</ol>
<div class="title-description">
<h1 class="title">
<div class="kind">class</div> TrackerPeerManager
</h1>
<!-- p class="subtitle">
</p -->
</div>
<ul class="subnav">
<li><a href="hetimatorrent.tracker/TrackerPeerManager-class.html#instance-properties">Properties</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager-class.html#constructors">Constructors</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager-class.html#instance-methods">Methods</a></li>
</ul>
</div>
</header>
<div class="container body">
<div class="col-xs-6 col-sm-3 col-md-3 sidebar sidebar-offcanvas-left">
<h5><a href="index.html">hetimatorrent</a></h5>
<h5><a href="hetimatorrent.tracker/hetimatorrent.tracker-library.html">hetimatorrent.tracker</a></h5>
<ol>
<li class="section-title"><a href="hetimatorrent.tracker/hetimatorrent.tracker-library.html#classes">Classes</a></li>
<li><a href="hetimatorrent.tracker/StartResult-class.html">StartResult</a></li>
<li><a href="hetimatorrent.tracker/StopResult-class.html">StopResult</a></li>
<li><a href="hetimatorrent.tracker/TrackerClient-class.html">TrackerClient</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerInfo-class.html">TrackerPeerInfo</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager-class.html">TrackerPeerManager</a></li>
<li><a href="hetimatorrent.tracker/TrackerRequest-class.html">TrackerRequest</a></li>
<li><a href="hetimatorrent.tracker/TrackerRequestResult-class.html">TrackerRequestResult</a></li>
<li><a href="hetimatorrent.tracker/TrackerResponse-class.html">TrackerResponse</a></li>
<li><a href="hetimatorrent.tracker/TrackerServer-class.html">TrackerServer</a></li>
<li><a href="hetimatorrent.tracker/TrackerUrl-class.html">TrackerUrl</a></li>
</ol>
</div>
<div class="col-xs-12 col-sm-9 col-md-6 main-content">
<section class="desc markdown">
<p class="no-docs">Not documented.</p>
</section>
<section class="summary" id="instance-properties">
<h2>Properties</h2>
<dl class="properties">
<dt id="interval" class="property">
<span class="top-level-variable-type">int</span>
<a href="hetimatorrent.tracker/TrackerPeerManager/interval.html">interval</a>
</dt>
<dd>
<div class="readable-writable">
read / write
</div>
</dd>
<dt id="managedInfoHash" class="property">
<span class="top-level-variable-type">List<int></span>
<a href="hetimatorrent.tracker/TrackerPeerManager/managedInfoHash.html">managedInfoHash</a>
</dt>
<dd>
<div class="readable-writable">
read-only
</div>
</dd>
<dt id="managedPeerAddress" class="property">
<span class="top-level-variable-type"><a href="hetimatorrent/ShuffleLinkedList-class.html">ShuffleLinkedList</a><<a href="hetimatorrent.torrent.trackerpeerinfo/TrackerPeerInfo-class.html">TrackerPeerInfo</a>></span>
<a href="hetimatorrent.tracker/TrackerPeerManager/managedPeerAddress.html">managedPeerAddress</a>
</dt>
<dd>
<div class="readable-writable">
read / write
</div>
</dd>
<dt id="max" class="property">
<span class="top-level-variable-type">int</span>
<a href="hetimatorrent.tracker/TrackerPeerManager/max.html">max</a>
</dt>
<dd>
<div class="readable-writable">
read / write
</div>
</dd>
<dt id="numOfPeer" class="property">
<span class="top-level-variable-type">int</span>
<a href="hetimatorrent.tracker/TrackerPeerManager/numOfPeer.html">numOfPeer</a>
</dt>
<dd>
<div class="readable-writable">
read-only
</div>
</dd>
<dt id="torrentFile" class="property">
<span class="top-level-variable-type"><a href="hetimatorrent/TorrentFile-class.html">TorrentFile</a></span>
<a href="hetimatorrent.tracker/TrackerPeerManager/torrentFile.html">torrentFile</a>
</dt>
<dd>
<div class="readable-writable">
read-only
</div>
</dd>
</dl>
</section>
<section class="summary" id="constructors">
<h2>Constructors</h2>
<dl class="constructor-summary-list">
<dt id="TrackerPeerManager" class="callable">
<span class="name"><a href="hetimatorrent.tracker/TrackerPeerManager/TrackerPeerManager.html">TrackerPeerManager</a></span><span class="signature">(<span class="parameter" id="-param-infoHash"><span class="type-annotation">List<int></span> <span class="parameter-name">infoHash</span></span>, [<span class="parameter" id="-param-file"><span class="type-annotation"><a href="hetimatorrent/TorrentFile-class.html">TorrentFile</a></span> <span class="parameter-name">file</span> = <span class="default-value">null</span></span>])</span>
</dt>
<dd>
</dd>
</dl>
</section>
<section class="summary" id="instance-methods">
<h2>Methods</h2>
<dl class="callables">
<dt id="createResponse" class="callable">
<a href="hetimatorrent.tracker/TrackerPeerManager/createResponse.html"><span class="name ">createResponse</span></a><span class="signature">(<wbr>)
→
<span class="returntype"><a href="hetimatorrent.tracker/TrackerResponse-class.html">TrackerResponse</a></span>
</span>
</dt>
<dd>
</dd>
<dt id="isManagedInfoHash" class="callable">
<a href="hetimatorrent.tracker/TrackerPeerManager/isManagedInfoHash.html"><span class="name ">isManagedInfoHash</span></a><span class="signature">(<wbr><span class="parameter" id="isManagedInfoHash-param-infoHash"><span class="type-annotation">List<int></span> <span class="parameter-name">infoHash</span></span>)
→
<span class="returntype">bool</span>
</span>
</dt>
<dd>
</dd>
<dt id="update" class="callable">
<a href="hetimatorrent.tracker/TrackerPeerManager/update.html"><span class="name ">update</span></a><span class="signature">(<wbr><span class="parameter" id="update-param-request"><span class="type-annotation"><a href="hetimatorrent/TrackerRequest-class.html">TrackerRequest</a></span> <span class="parameter-name">request</span></span>)
→
<span class="returntype">void</span>
</span>
</dt>
<dd>
</dd>
</dl>
</section>
</div> <!-- /.main-content -->
<div class="col-xs-6 col-sm-6 col-md-3 sidebar sidebar-offcanvas-right">
<h5>TrackerPeerManager</h5>
<ol>
<li class="section-title"><a href="hetimatorrent.tracker/TrackerPeerManager-class.html#instance-properties">Properties</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/interval.html">interval</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/managedInfoHash.html">managedInfoHash</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/managedPeerAddress.html">managedPeerAddress</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/max.html">max</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/numOfPeer.html">numOfPeer</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/torrentFile.html">torrentFile</a>
</li>
<li class="section-title"><a href="hetimatorrent.tracker/TrackerPeerManager-class.html#constructors">Constructors</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/TrackerPeerManager.html">TrackerPeerManager</a></li>
<li class="section-title"><a href="hetimatorrent.tracker/TrackerPeerManager-class.html#methods">Methods</a></li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/createResponse.html">createResponse</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/isManagedInfoHash.html">isManagedInfoHash</a>
</li>
<li><a href="hetimatorrent.tracker/TrackerPeerManager/update.html">update</a>
</li>
</ol>
</div><!--/.sidebar-offcanvas-->
</div> <!-- container -->
<footer>
<div class="container-fluid">
<div class="container">
<p class="text-center">
<span class="no-break">
hetimatorrent 0.0.1 api docs
</span>
•
<span class="copyright no-break">
<a href="https://www.dartlang.org">
<img src="static-assets/favicon.png" alt="Dart" title="Dart"width="16" height="16">
</a>
</span>
•
<span class="copyright no-break">
<a href="http://creativecommons.org/licenses/by-sa/4.0/">cc license</a>
</span>
</p>
</div>
</div>
</footer>
<script src="static-assets/prettify.js"></script>
<script src="static-assets/script.js"></script>
<!-- Do not remove placeholder -->
<!-- Footer Placeholder -->
</body>
</html>
|
kyorohiro/dart_hetimatorrent
|
doc/api/hetimatorrent.tracker/TrackerPeerManager-class.html
|
HTML
|
bsd-3-clause
| 11,909
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>DIF_SHOWAMPERSAND</title>
<meta http-equiv="Content-Type" Content="text/html; charset=Windows-1251">
<link rel="stylesheet" type="text/css" href="../../../styles/styles.css">
<script language="javascript" src='../../links.js' type="text/javascript"></script>
</head>
<body>
<h1>DIF_SHOWAMPERSAND</h1>
<div class=navbar>
<a href="../../index.html">main</a> |
<a href="../index.html">Dialog API</a> |
<a href="index.html">Dialog item flags</a><br>
</div>
<div class=shortdescr>
<p>The <dfn>DIF_SHOWAMPERSAND</dfn> flag forces the Dialog Manager to show ampersands (<code>&</code>) in text
items and frames, not using it for the hot key definition.
</div>
<h3>Controls</h3>
<div class=descr>
<p>The <dfn>DIF_SHOWAMPERSAND</dfn> flag is applicable to the following dialog item:<br>
<table width="42%" class="cont">
<tr class="cont"><th class="cont" width="40%">Control</th><th class="cont" width="60%">Description</th></tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_button.html">DI_BUTTON</a></td>
<td class="cont" width="60%">Push button.</td>
</tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_checkbox.html">DI_CHECKBOX</a></td>
<td class="cont" width="60%">Check box. </td>
</tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_radiobutton.html">DI_RADIOBUTTON</a></td>
<td class="cont" width="60%">Radio button. </td>
</tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_text.html">DI_TEXT</a></td>
<td class="cont" width="60%">Text string.</td>
</tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_vtext.html">DI_VTEXT</a></td>
<td class="cont" width="60%">Vertical text string.</td></tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_doublebox.html">DI_DOUBLEBOX</a></td>
<td class="cont" width="60%">Double frame.</td>
</tr>
<tr class="cont"><td class="cont" width="40%"><a href="../controls/di_singlebox.html">DI_SINGLEBOX</a></td>
<td class="cont" width="60%">Single frame. </td>
</tr>
</table>
</div>
<h3>Remarks</h3>
<div class=descr>
</div>
<div class=see>See also:</div><div class=seecont>
</div>
</body>
</html>
|
data-man/FarAS
|
enc/enc_eng/meta/dialogapi/flags/dif_showampersand.html
|
HTML
|
bsd-3-clause
| 2,264
|
/*
*
* Copyright (C) 2000-2012, OFFIS e.V.
* All rights reserved. See COPYRIGHT file for details.
*
* This software and supporting documentation were developed by
*
* OFFIS e.V.
* R&D Division Health
* Escherweg 2
* D-26121 Oldenburg, Germany
*
*
* Module: dcmsr
*
* Author: Joerg Riesmeier
*
* Purpose:
* classes: DSRContainerTreeNode
*
*/
#ifndef DSRCONTN_H
#define DSRCONTN_H
#include "dcmtk/config/osconfig.h" /* make sure OS specific configuration is included first */
#include "dcmtk/dcmsr/dsrdoctr.h"
/*---------------------*
* class declaration *
*---------------------*/
/** Class for content item CONTAINER
*/
class DCMTK_DCMSR_EXPORT DSRContainerTreeNode
: public DSRDocumentTreeNode
{
public:
/** constructor
** @param relationshipType type of relationship to the parent tree node.
* Should not be RT_invalid or RT_isRoot.
* @param continuityOfContent Continuity of content flag (default: separate).
* Should be different from COC_invalid.
*/
DSRContainerTreeNode(const E_RelationshipType relationshipType,
const E_ContinuityOfContent continuityOfContent = COC_Separate);
/** destructor
*/
virtual ~DSRContainerTreeNode();
/** clear all member variables.
* Please note that the content item might become invalid afterwards.
*/
virtual void clear();
/** check whether the content item is valid.
* The content item is valid if the base class is valid, the continuity of content
* flag is valid, and the concept name is valid or the content item is not the root item.
** @return OFTrue if tree node is valid, OFFalse otherwise
*/
virtual OFBool isValid() const;
/** check whether the content is short.
* A container content item is defined to be never short (return always OFFalse).
** @param flags flag used to customize the output (see DSRTypes::HF_xxx)
** @return OFTrue if the content is short, OFFalse otherwise
*/
virtual OFBool isShort(const size_t flags) const;
/** print content item.
* A typical output looks like this: CONTAINER:(,,"Diagnosis")=SEPARATE for the root node
* and contains CONTAINER:=CONTINUOUS for a "normal" content item.
** @param stream output stream to which the content item should be printed
* @param flags flag used to customize the output (see DSRTypes::PF_xxx)
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition print(STD_NAMESPACE ostream &stream,
const size_t flags) const;
/** write content item in XML format
** @param stream output stream to which the XML document is written
* @param flags flag used to customize the output (see DSRTypes::XF_xxx)
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition writeXML(STD_NAMESPACE ostream &stream,
const size_t flags) const;
/** render content item in HTML/XHTML format.
* After rendering the current content item all child nodes (if any) are also rendered (see
* renderHTMLChildNodes() for details). This method overwrites the one specified in base class
* DSRDocumentTree since the rendering of the child nodes depends on the value of the flag
* 'ContinuityOfContent'.
** @param docStream output stream to which the main HTML/XHTML document is written
* @param annexStream output stream to which the HTML/XHTML document annex is written
* @param nestingLevel current nesting level. Used to render section headings.
* @param annexNumber reference to the variable where the current annex number is stored.
* Value is increased automatically by 1 after a new entry has been added.
* @param flags flag used to customize the output (see DSRTypes::HF_xxx)
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition renderHTML(STD_NAMESPACE ostream &docStream,
STD_NAMESPACE ostream &annexStream,
const size_t nestingLevel,
size_t &annexNumber,
const size_t flags) const;
/** get continuity of content flag.
* This flag specifies whether or not its contained content items (child nodes) are
* logically linked in a continuous textual flow, or are sparate items.
** @return continuity of content flag if successful, COC_invalid otherwise
*/
inline E_ContinuityOfContent getContinuityOfContent() const
{
return ContinuityOfContent;
}
/** set continuity of content flag.
* This flag specifies whether or not its contained content items (child nodes) are
* logically linked in a continuous textual flow, or are sparate items.
** @param continuityOfContent value to be set (should be different from COC_invalid)
* @param check dummy parameter (currently not used)
** @return status, EC_Normal if successful, an error code otherwise
*/
OFCondition setContinuityOfContent(const E_ContinuityOfContent continuityOfContent,
const OFBool check = OFTrue);
protected:
/** read content item (value) from dataset
** @param dataset DICOM dataset from which the content item should be read
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition readContentItem(DcmItem &dataset);
/** write content item (value) to dataset
** @param dataset DICOM dataset to which the content item should be written
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition writeContentItem(DcmItem &dataset) const;
/** read content item specific XML data
** @param doc document containing the XML file content
* @param cursor cursor pointing to the starting node
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition readXMLContentItem(const DSRXMLDocument &doc,
DSRXMLCursor cursor);
/** render content item (value) in HTML/XHTML format
** @param docStream output stream to which the main HTML/XHTML document is written
* @param annexStream output stream to which the HTML/XHTML document annex is written
* @param nestingLevel current nesting level. Used to render section headings.
* @param annexNumber reference to the variable where the current annex number is stored.
* Value is increased automatically by 1 after a new entry has been added.
* @param flags flag used to customize the output (see DSRTypes::HF_xxx)
** @return status, EC_Normal if successful, an error code otherwise
*/
virtual OFCondition renderHTMLContentItem(STD_NAMESPACE ostream &docStream,
STD_NAMESPACE ostream &annexStream,
const size_t nestingLevel,
size_t &annexNumber,
const size_t flags) const;
private:
/// continuity of content flag (associated DICOM VR=CS, mandatory)
E_ContinuityOfContent ContinuityOfContent;
// --- declaration of default/copy constructor and assignment operator
DSRContainerTreeNode();
DSRContainerTreeNode(const DSRContainerTreeNode &);
DSRContainerTreeNode &operator=(const DSRContainerTreeNode &);
};
#endif
|
NCIP/annotation-and-image-markup
|
AIMToolkit_v4.1.0_rv44/source/dcmtk-3.6.1_20121102/dcmsr/include/dcmtk/dcmsr/dsrcontn.h
|
C
|
bsd-3-clause
| 7,875
|
#!/bin/bash
#SBATCH --account=nstaff
#SBATCH --constraint=haswell
#SBATCH --image=docker:rcthomas/nersc-python-bench:0.3.2
#SBATCH --job-name=pynamic-cori-haswell-shifter-150
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=rcthomas@lbl.gov
#SBATCH --nodes=150
#SBATCH --ntasks-per-node=32
#SBATCH --output=logs/pynamic-cori-haswell-shifter-150-%j.out
#SBATCH --partition=regular
#SBATCH --qos=premium
#SBATCH --time=10
# Configuration.
commit=false
# Initialize benchmark result.
if [ $commit = true ]; then
shifter python /usr/local/bin/report-benchmark.py initialize
fi
# Run benchmark.
export OMP_NUM_THREADS=1
unset PYTHONSTARTUP
pynamic_dir=/opt/pynamic-master/pynamic-pyMPI-2.6a1
output=tmp/latest-$SLURM_JOB_NAME.txt
srun -c 2 shifter $pynamic_dir/pynamic-pyMPI $pynamic_dir/pynamic_driver.py $(date +%s) | tee $output
# Extract result.
startup_time=$( grep '^Pynamic: startup time' $output | awk '{ print $(NF-1) }' )
import_time=$( grep '^Pynamic: module import time' $output | awk '{ print $(NF-1) }' )
visit_time=$( grep '^Pynamic: module visit time' $output | awk '{ print $(NF-1) }' )
total_time=$( echo $startup_time + $import_time + $visit_time | bc )
echo total_time $total_time s
# Finalize benchmark result.
if [ $commit = true ]; then
shifter python /usr/local/bin/report-benchmark.py finalize $total_time
fi
|
NERSC/nersc-python-bench
|
scripts/pynamic-cori-haswell-shifter-150.sh
|
Shell
|
bsd-3-clause
| 1,344
|
/*
* Copyright (c) 1999-2012 Mark D. Hill and David A. Wood
* Copyright (c) 2013 Advanced Micro Devices, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __MEM_RUBY_STRUCTURES_CACHEMEMORY_HH__
#define __MEM_RUBY_STRUCTURES_CACHEMEMORY_HH__
#include <string>
#include <vector>
#include "base/hashmap.hh"
#include "base/statistics.hh"
#include "mem/protocol/CacheRequestType.hh"
#include "mem/protocol/CacheResourceType.hh"
#include "mem/protocol/RubyRequest.hh"
#include "mem/ruby/common/DataBlock.hh"
#include "mem/ruby/slicc_interface/AbstractCacheEntry.hh"
#include "mem/ruby/slicc_interface/RubySlicc_ComponentMapping.hh"
#include "mem/ruby/structures/AbstractReplacementPolicy.hh"
#include "mem/ruby/structures/BankedArray.hh"
#include "mem/ruby/system/CacheRecorder.hh"
#include "params/RubyCache.hh"
#include "sim/sim_object.hh"
class CacheMemory : public SimObject
{
public:
typedef RubyCacheParams Params;
CacheMemory(const Params *p);
~CacheMemory();
void init();
// Public Methods
// perform a cache access and see if we hit or not. Return true on a hit.
bool tryCacheAccess(const Address& address, RubyRequestType type,
DataBlock*& data_ptr);
// similar to above, but doesn't require full access check
bool testCacheAccess(const Address& address, RubyRequestType type,
DataBlock*& data_ptr);
// tests to see if an address is present in the cache
bool isTagPresent(const Address& address) const;
// Returns true if there is:
// a) a tag match on this address or there is
// b) an unused line in the same cache "way"
bool cacheAvail(const Address& address) const;
// find an unused entry and sets the tag appropriate for the address
AbstractCacheEntry* allocate(const Address& address,
AbstractCacheEntry* new_entry, bool touch);
AbstractCacheEntry* allocate(const Address& address,
AbstractCacheEntry* new_entry)
{
return allocate(address, new_entry, true);
}
void allocateVoid(const Address& address, AbstractCacheEntry* new_entry)
{
allocate(address, new_entry, true);
}
// Explicitly free up this address
void deallocate(const Address& address);
// Returns with the physical address of the conflicting cache line
Address cacheProbe(const Address& address) const;
// looks an address up in the cache
AbstractCacheEntry* lookup(const Address& address);
const AbstractCacheEntry* lookup(const Address& address) const;
Cycles getLatency() const { return m_latency; }
Cycles getTagLatency() const { return tagArray.getLatency(); }
Cycles getDataLatency() const { return dataArray.getLatency(); }
bool isBlockInvalid(int64 cache_set, int64 loc);
bool isBlockNotBusy(int64 cache_set, int64 loc);
// Hook for checkpointing the contents of the cache
void recordCacheContents(int cntrl, CacheRecorder* tr) const;
// Set this address to most recently used
void setMRU(const Address& address);
void setLocked (const Address& addr, int context);
void clearLocked (const Address& addr);
bool isLocked (const Address& addr, int context);
// Print cache contents
void print(std::ostream& out) const;
void printData(std::ostream& out) const;
void regStats();
bool checkResourceAvailable(CacheResourceType res, Address addr);
void recordRequestType(CacheRequestType requestType, Address addr);
public:
Stats::Scalar m_demand_hits;
Stats::Scalar m_demand_misses;
Stats::Formula m_demand_accesses;
Stats::Scalar m_sw_prefetches;
Stats::Scalar m_hw_prefetches;
Stats::Formula m_prefetches;
Stats::Vector m_accessModeType;
Stats::Scalar numDataArrayReads;
Stats::Scalar numDataArrayWrites;
Stats::Scalar numTagArrayReads;
Stats::Scalar numTagArrayWrites;
Stats::Scalar numTagArrayStalls;
Stats::Scalar numDataArrayStalls;
int getCacheSize() const { return m_cache_size; }
int getNumBlocks() const { return m_cache_num_sets * m_cache_assoc; }
Address getAddressAtIdx(int idx) const;
private:
// convert a Address to its location in the cache
int64 addressToCacheSet(const Address& address) const;
// Given a cache tag: returns the index of the tag in a set.
// returns -1 if the tag is not found.
int findTagInSet(int64 line, const Address& tag) const;
int findTagInSetIgnorePermissions(int64 cacheSet,
const Address& tag) const;
// Private copy constructor and assignment operator
CacheMemory(const CacheMemory& obj);
CacheMemory& operator=(const CacheMemory& obj);
private:
Cycles m_latency;
// Data Members (m_prefix)
bool m_is_instruction_only_cache;
// The first index is the # of cache lines.
// The second index is the the amount associativity.
m5::hash_map<Address, int> m_tag_index;
std::vector<std::vector<AbstractCacheEntry*> > m_cache;
AbstractReplacementPolicy *m_replacementPolicy_ptr;
BankedArray dataArray;
BankedArray tagArray;
int m_cache_size;
int m_cache_num_sets;
int m_cache_num_set_bits;
int m_cache_assoc;
int m_start_index_bit;
bool m_resource_stalls;
};
std::ostream& operator<<(std::ostream& out, const CacheMemory& obj);
#endif // __MEM_RUBY_STRUCTURES_CACHEMEMORY_HH__
|
alianmohammad/pd-gem5-latest
|
src/mem/ruby/structures/CacheMemory.hh
|
C++
|
bsd-3-clause
| 6,941
|
// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
struct NormalStruct1
{
char a;
};
/// Should not be packed.
struct StructWithAttr
{
int *a;
int *b;
} __attribute__((annotate("Attr is not __packed__")));
/// Should be packed with 1.
struct PackedAttr{
int a;
} __attribute__((__packed__));
/// Should be packed with 8.
struct PackedAttrAlign8{
int a;
} __attribute__((__packed__, aligned(8)));
#pragma pack(push, 2)
/// Should be packed with 2.
struct Pack2WithPragma{
int a;
};
#pragma pack(4)
/// Should be packed with 4.
struct Pack4WithPragma{
long long a;
};
#pragma pack(pop)
struct NormalStruct2
{
char a;
};
|
dart-lang/ffigen
|
test/header_parser_tests/packed_structs.h
|
C
|
bsd-3-clause
| 819
|
/*******************************************************************************
* Copyright SemanticBits, Northwestern University and Akaza Research
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caaers/LICENSE.txt for details.
******************************************************************************/
package gov.nih.nci.cabig.caaers.dao.query;
import org.apache.commons.lang.StringUtils;
/**
* @author Ion C. Olaru
*/
public class SiteResearchStaffQuery extends AbstractQuery {
private static String queryString = "SELECT distinct srs from SiteResearchStaff srs " ;
private static String FIRST_NAME = "firstName";
private static String LAST_NAME = "lastName";
private static String ORGANIZATION = "organization";
private static String NCI_CODE = "nciIdentifier";
private static String USER_NAME = "userName";
public SiteResearchStaffQuery() {
super(queryString);
leftJoinFetch("srs.researchStaff rs");
leftJoinFetch("srs.organization org");
orderBy("srs.id");
}
public void filterByFirstName(final String firstName) {
String searchString = "%" + firstName.toLowerCase() + "%";
andWhere("lower(rs.firstName) LIKE :" + FIRST_NAME);
setParameter(FIRST_NAME, searchString);
}
public void filterByName(final String name) {
String[] searchFields = StringUtils.split(name);
int i = 0;
if (searchFields != null) {
for (String searchField : searchFields) {
String searchString = "%" + StringUtils.lowerCase(searchField) + "%";
String firstNameKey = "FIRST_NAME_" + i;
String lastNameKey = "LAST_NAME_" + i;
String middleNameKey = "MIDDLE_NAME_" + i;
andWhere(String.format("(lower(rs.firstName) LIKE :%s OR lower(rs.lastName) LIKE :%s OR lower(rs.middleName) LIKE :%s)",
firstNameKey, lastNameKey, middleNameKey));
setParameter(firstNameKey, searchString);
setParameter(lastNameKey, searchString);
setParameter(middleNameKey, searchString);
i++;
}
}
}
public void filterByLastName(final String lastName) {
String searchString = "%" + lastName.toLowerCase() + "%";
andWhere("lower(rs.lastName) LIKE :" + LAST_NAME);
setParameter(LAST_NAME, searchString);
}
public void filterByOrganization(final String organization) {
String searchString = organization.trim();
andWhere("srs.organization.id =:" + ORGANIZATION);
setParameter(ORGANIZATION, Integer.parseInt(searchString));
}
public void filterByNciIdentifier(final String value) {
String searchString = "%" + value.toLowerCase() + "%";
andWhere("lower(rs.nciIdentifier) LIKE :" + NCI_CODE);
setParameter(NCI_CODE, searchString);
}
public void filterByUserName(final String value) {
join("srs.researchStaff.caaersUser cu");
String searchString = "%" + value.toLowerCase() + "%";
andWhere("lower(cu.loginName) LIKE :" + USER_NAME);
setParameter(USER_NAME, searchString);
}
public void excludeUsers(){
andWhere("rs.caaersUser is null");
}
}
|
NCIP/caaers
|
caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/dao/query/SiteResearchStaffQuery.java
|
Java
|
bsd-3-clause
| 3,425
|
# -*- coding: utf-8 -*-
"""
Display number of scratchpad windows and urgency hints.
Configuration parameters:
cache_timeout: refresh interval for i3-msg or swaymsg (default 5)
format: display format for this module
(default "\u232b [\?color=scratchpad {scratchpad}]")
thresholds: specify color thresholds to use
(default [(0, "darkgray"), (1, "violet")])
Format placeholders:
{scratchpad} number of scratchpads
{urgent} number of urgent scratchpads
Color thresholds:
xxx: print a color based on the value of `xxx` placeholder
Optional:
i3ipc: an improved python library to control i3wm and sway
Examples:
```
# hide zero scratchpad
scratchpad {
format = '[\?not_zero \u232b [\?color=scratchpad {scratchpad}]]'
}
# hide non-urgent scratchpad
scratchpad {
format = '[\?not_zero \u232b {urgent}]'
}
# bring up scratchpads on clicks
scratchpad {
on_click 1 = 'scratchpad show'
}
# add more colors
scratchpad {
thresholds = [
(0, "darkgray"), (1, "violet"), (2, "deepskyblue"), (3, "lime"),
(4, "yellow"), (5, "orange"), (6, "red"), (7, "tomato"),
]
}
```
@author shadowprince (counter), cornerman (async)
@license Eclipse Public License (counter), BSD (async)
SAMPLE OUTPUT
[{'full_text': '\u232b '}, {'full_text': u'0', 'color': '#a9a9a9'}]
violet
[{'full_text': '\u232b '}, {'full_text': u'5', 'color': '#ee82ee'}]
urgent
[{'full_text': '\u232b URGENT 1', 'urgent': True}]
"""
STRING_ERROR = "invalid ipc `{}`"
class Ipc:
"""
"""
def __init__(self, parent):
self.parent = parent
self.setup(parent)
class I3ipc(Ipc):
"""
i3ipc - an improved python library to control i3wm and sway
"""
def setup(self, parent):
from threading import Thread
self.parent.cache_timeout = self.parent.py3.CACHE_FOREVER
self.scratchpad_data = {"scratchpad": 0, "urgent": 0}
t = Thread(target=self.start)
t.daemon = True
t.start()
def start(self):
from i3ipc import Connection
i3 = Connection()
self.update(i3)
for event in ["window::move", "window::urgent"]:
i3.on(event, self.update)
i3.main()
def update(self, i3, event=None):
leaves = i3.get_tree().scratchpad().leaves()
temporary = {
"ipc": self.parent.ipc,
"scratchpad": len(leaves),
"urgent": sum(window.urgent for window in leaves),
}
if self.scratchpad_data != temporary:
self.scratchpad_data = temporary
self.parent.py3.update()
def get_scratchpad_data(self):
return self.scratchpad_data
class Msg(Ipc):
"""
i3-msg - send messages to i3 window manager
swaymsg - send messages to sway window manager
"""
def setup(self, parent):
from json import loads
self.json_loads = loads
wm_msg = {"i3msg": "i3-msg"}.get(parent.ipc, parent.ipc)
self.tree_command = [wm_msg, "-t", "get_tree"]
def get_scratchpad_data(self):
tree = self.json_loads(self.parent.py3.command_output(self.tree_command))
leaves = self.find_scratchpad(tree).get("floating_nodes", [])
return {
"ipc": self.parent.ipc,
"scratchpad": len(leaves),
"urgent": sum([window["urgent"] for window in leaves]),
}
def find_scratchpad(self, tree):
if tree.get("name") == "__i3_scratch":
return tree
for x in tree.get("nodes", []):
result = self.find_scratchpad(x)
if result:
return result
return {}
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = "\u232b [\?color=scratchpad {scratchpad}]"
thresholds = [(0, "darkgray"), (1, "violet")]
def post_config_hook(self):
# ipc: specify i3ipc, i3-msg, or swaymsg, otherwise auto
self.ipc = getattr(self, "ipc", "")
if self.ipc in ["", "i3ipc"]:
try:
from i3ipc import Connection # noqa f401
self.ipc = "i3ipc"
except Exception:
if self.ipc:
raise # module not found
self.ipc = (self.ipc or self.py3.get_wm_msg()).replace("-", "")
if self.ipc in ["i3ipc"]:
self.backend = I3ipc(self)
elif self.ipc in ["i3msg", "swaymsg"]:
self.backend = Msg(self)
else:
raise Exception(STRING_ERROR.format(self.ipc))
self.thresholds_init = self.py3.get_color_names_list(self.format)
def scratchpad(self):
scratchpad_data = self.backend.get_scratchpad_data()
for x in self.thresholds_init:
if x in scratchpad_data:
self.py3.threshold_get_color(scratchpad_data[x], x)
response = {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": self.py3.safe_format(self.format, scratchpad_data),
}
if scratchpad_data["urgent"]:
response["urgent"] = True
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
config = {"format": "\[{ipc}\] [\?color=scratchpad {scratchpad}]"}
module_test(Py3status, config=config)
|
Andrwe/py3status
|
py3status/modules/scratchpad.py
|
Python
|
bsd-3-clause
| 5,375
|
/*
* Copyright (c) 2017, 2018, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* * Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.storm.spout.dynamic;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
/**
* Identifier for a virtual spout.
*/
public final class DefaultVirtualSpoutIdentifier implements VirtualSpoutIdentifier {
/**
* The actual identifier.
*/
private final String id;
/**
* Create a new virtual spout identifier from a string.
* @param id String of the id
*/
public DefaultVirtualSpoutIdentifier(final String id) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id), "You must provide something in order to create an identifier!");
this.id = id;
}
/**
* Get the string form of the identifier.
* @return A string of the identifier
*/
@Override
public String toString() {
return id;
}
/**
* Is this identifier equal to another.
* @param obj The other identifier
* @return Whether or not the two identifiers are equal
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DefaultVirtualSpoutIdentifier that = (DefaultVirtualSpoutIdentifier) obj;
return id != null ? id.equals(that.id) : that.id == null;
}
/**
* Make a hash code for this object.
* @return Hash code
*/
@Override
public int hashCode() {
return id.hashCode();
}
}
|
salesforce/storm-dynamic-spout
|
src/main/java/com/salesforce/storm/spout/dynamic/DefaultVirtualSpoutIdentifier.java
|
Java
|
bsd-3-clause
| 3,090
|
---
layout: icon
title: America Football
categories: sports
icon-24: america-football-24.png
icon-18: america-football-18.png
icon-12: america-football-12.png
tags:
- football
- sports
---
|
keum/maki
|
_posts/poi/0500-01-08-america-football.md
|
Markdown
|
bsd-3-clause
| 193
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import stix
from stix.data_marking import MarkingStructure
import stix.bindings.extensions.marking.tlp as tlp_binding
@stix.register_extension
class TLPMarkingStructure(MarkingStructure):
_binding = tlp_binding
_binding_class = tlp_binding.TLPMarkingStructureType
_namespace = 'http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1'
_XSI_TYPE = "tlpMarking:TLPMarkingStructureType"
def __init__(self, color=None):
super(TLPMarkingStructure, self).__init__()
self.color = color
def to_obj(self, return_obj=None, ns_info=None):
super(TLPMarkingStructure, self).to_obj(return_obj=return_obj, ns_info=ns_info)
if not return_obj:
return_obj = self._binding_class()
MarkingStructure.to_obj(self, return_obj=return_obj, ns_info=ns_info)
return_obj.color = self.color
return return_obj
def to_dict(self):
d = MarkingStructure.to_dict(self)
if self.color:
d['color'] = self.color
return d
@classmethod
def from_obj(cls, obj, return_obj=None):
if not obj:
return None
if not return_obj:
return_obj = cls()
MarkingStructure.from_obj(obj, return_obj=return_obj)
return_obj.color = obj.color
return return_obj
@classmethod
def from_dict(cls, d, return_obj=None):
if not d:
return None
if not return_obj:
return_obj = cls()
MarkingStructure.from_dict(d, return_obj)
return_obj.color = d.get('color')
return return_obj
|
chriskiehl/python-stix
|
stix/extensions/marking/tlp.py
|
Python
|
bsd-3-clause
| 1,713
|
package main
import (
"api/handlers"
"fmt"
"net/http"
"runtime"
"time"
)
func init() {
runtime.GOMAXPROCS(runtime.NumCPU())
}
func main() {
fmt.Println("Server is start at ", time.Now().String(), " , on port 8080")
http.HandleFunc("/useage", handlers.Useage)
http.HandleFunc("/v1/", handlers.API_V1)
http.ListenAndServe(":8080", nil)
}
|
zhangbaitong/programming-language-tutorials
|
go/src/api/main.go
|
GO
|
bsd-3-clause
| 349
|
const logger = require('../../lib/logger')()
module.exports = function (req, res, next) {
const loginToken = req.loginToken || ''
const userId = req.userId || ''
const contentType = req.headers['content-type'] || ''
logger.debug({
method: req.method,
url: req.url,
content_type: contentType,
login_token: loginToken,
user_id: userId
})
next()
}
|
codeforamerica/streetmix
|
lib/request_handlers/request_log.js
|
JavaScript
|
bsd-3-clause
| 380
|
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import logging
import os
import pipes
import posixpath
import random
import re
import shlex
import sys
import devil_chromium
from devil import devil_env
from devil.android import apk_helper
from devil.android import device_errors
from devil.android import device_utils
from devil.android import flag_changer
from devil.android.sdk import adb_wrapper
from devil.android.sdk import intent
from devil.android.sdk import version_codes
from devil.utils import run_tests_helper
with devil_env.SysPath(os.path.join(os.path.dirname(__file__), '..', '..',
'third_party', 'colorama', 'src')):
import colorama
from incremental_install import installer
from pylib import constants
from pylib.symbols import deobfuscator
def _Colorize(color, text):
# |color| as a string to avoid pylint's no-member warning :(.
# pylint: disable=no-member
return getattr(colorama.Fore, color) + text + colorama.Fore.RESET
def _InstallApk(devices, apk, install_dict):
def install(device):
if install_dict:
installer.Install(device, install_dict, apk=apk)
else:
device.Install(apk)
logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
device_utils.DeviceUtils.parallel(devices).pMap(install)
def _UninstallApk(devices, install_dict, package_name):
def uninstall(device):
if install_dict:
installer.Uninstall(device, package_name)
else:
device.Uninstall(package_name)
device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
def _LaunchUrl(devices, input_args, device_args_file, url, apk):
if input_args and device_args_file is None:
raise Exception('This apk does not support any flags.')
if url:
view_activity = apk.GetViewActivityName()
if not view_activity:
raise Exception('APK does not support launching with URLs.')
def launch(device):
# The flags are first updated with input args.
changer = flag_changer.FlagChanger(device, device_args_file)
flags = []
if input_args:
flags = shlex.split(input_args)
changer.ReplaceFlags(flags)
# Then launch the apk.
if url is None:
# Simulate app icon click if no url is present.
cmd = ['monkey', '-p', apk.GetPackageName(), '-c',
'android.intent.category.LAUNCHER', '1']
device.RunShellCommand(cmd, check_return=True)
else:
launch_intent = intent.Intent(action='android.intent.action.VIEW',
activity=view_activity, data=url,
package=apk.GetPackageName())
device.StartActivity(launch_intent)
device_utils.DeviceUtils.parallel(devices).pMap(launch)
def _ChangeFlags(devices, input_args, device_args_file):
if input_args is None:
_DisplayArgs(devices, device_args_file)
else:
flags = shlex.split(input_args)
def update(device):
flag_changer.FlagChanger(device, device_args_file).ReplaceFlags(flags)
device_utils.DeviceUtils.parallel(devices).pMap(update)
def _TargetCpuToTargetArch(target_cpu):
if target_cpu == 'x64':
return 'x86_64'
if target_cpu == 'mipsel':
return 'mips'
return target_cpu
def _RunGdb(device, package_name, output_directory, target_cpu, extra_args,
verbose):
gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
cmd = [
gdb_script_path,
'--package-name=%s' % package_name,
'--output-directory=%s' % output_directory,
'--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
'--device=%s' % device.serial,
# Use one lib dir per device so that changing between devices does require
# refetching the device libs.
'--pull-libs-dir=/tmp/adb-gdb-libs-%s' % device.serial,
]
# Enable verbose output of adb_gdb if it's set for this script.
if verbose:
cmd.append('--verbose')
if target_cpu:
cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
cmd.extend(extra_args)
logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
print _Colorize('YELLOW', 'All subsequent output is from adb_gdb script.')
os.execv(gdb_script_path, cmd)
def _PrintPerDeviceOutput(devices, results, single_line=False):
for d, result in zip(devices, results):
if not single_line and d is not devices[0]:
sys.stdout.write('\n')
sys.stdout.write(
_Colorize('YELLOW', '%s (%s):' % (d, d.build_description)))
sys.stdout.write(' ' if single_line else '\n')
yield result
def _RunMemUsage(devices, package_name):
def mem_usage_helper(d):
ret = []
proc_map = d.GetPids(package_name)
for name, pids in proc_map.iteritems():
for pid in pids:
ret.append((name, pid, d.GetMemoryUsageForPid(pid)))
return ret
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
for result in _PrintPerDeviceOutput(devices, all_results):
if not result:
print 'No processes found.'
else:
for name, pid, usage in sorted(result):
print '%s(%s):' % (name, pid)
for k, v in sorted(usage.iteritems()):
print ' %s=%d' % (k, v)
print
def _DuHelper(device, path_spec, run_as=None):
"""Runs "du -s -k |path_spec|" on |device| and returns parsed result.
Args:
device: A DeviceUtils instance.
path_spec: The list of paths to run du on. May contain shell expansions
(will not be escaped).
run_as: Package name to run as, or None to run as shell user. If not None
and app is not android:debuggable (run-as fails), then command will be
run as root.
Returns:
A dict of path->size in kb containing all paths in |path_spec| that exist on
device. Paths that do not exist are silently ignored.
"""
# Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
# 144 /data/data/org.chromium.chrome/cache
# 8 /data/data/org.chromium.chrome/files
# <snip>
# du: .*: No such file or directory
# The -d flag works differently across android version, so use -s instead.
cmd_str = 'du -s -k ' + path_spec
lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
check_return=False)
output = '\n'.join(lines)
# run-as: Package 'com.android.chrome' is not debuggable
if output.startswith('run-as:'):
# check_return=False needed for when some paths in path_spec do not exist.
lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
check_return=False)
ret = {}
try:
for line in lines:
# du: .*: No such file or directory
if line.startswith('du:'):
continue
size, subpath = line.split(None, 1)
ret[subpath] = int(size)
return ret
except ValueError:
logging.error('Failed to parse du output:\n%s', output)
def _RunDiskUsage(devices, package_name, verbose):
# Measuring dex size is a bit complicated:
# https://source.android.com/devices/tech/dalvik/jit-compiler
#
# For KitKat and below:
# dumpsys package contains:
# dataDir=/data/data/org.chromium.chrome
# codePath=/data/app/org.chromium.chrome-1.apk
# resourcePath=/data/app/org.chromium.chrome-1.apk
# nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
# To measure odex:
# ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex
#
# For Android L and M (and maybe for N+ system apps):
# dumpsys package contains:
# codePath=/data/app/org.chromium.chrome-1
# resourcePath=/data/app/org.chromium.chrome-1
# legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
# To measure odex:
# # Option 1:
# /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex
# /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex
# ls -l /data/dalvik-cache/profiles/org.chromium.chrome
# (these profiles all appear to be 0 bytes)
# # Option 2:
# ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
#
# For Android N+:
# dumpsys package contains:
# dataDir=/data/user/0/org.chromium.chrome
# codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
# resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
# legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
# Instruction Set: arm
# path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
# status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
# ilter=quicken]
# Instruction Set: arm64
# path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
# status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
# uicken]
# To measure odex:
# ls -l /data/app/.../oat/arm/base.odex
# ls -l /data/app/.../oat/arm/base.vdex (optional)
# To measure the correct odex size:
# cmd package compile -m speed org.chromium.chrome # For webview
# cmd package compile -m speed-profile org.chromium.chrome # For others
def disk_usage_helper(d):
package_output = '\n'.join(d.RunShellCommand(
['dumpsys', 'package', package_name], check_return=True))
# Prints a message but does not return error when apk is not installed.
if 'Unable to find package:' in package_output:
return None
# Ignore system apks.
idx = package_output.find('Hidden system packages:')
if idx != -1:
package_output = package_output[:idx]
try:
data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
code_path = re.search(r'codePath=(.*)', package_output).group(1)
lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
package_output).group(1)
except AttributeError:
raise Exception('Error parsing dumpsys output: ' + package_output)
compilation_filters = set()
# Match "compilation_filter=value", where a line break can occur at any spot
# (refer to examples above).
awful_wrapping = r'\s*'.join('compilation_filter=')
for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
compilation_filter = ','.join(sorted(compilation_filters))
data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
# Measure code_cache separately since it can be large.
code_cache_sizes = {}
code_cache_dir = next(
(k for k in data_dir_sizes if k.endswith('/code_cache')), None)
if code_cache_dir:
data_dir_sizes.pop(code_cache_dir)
code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
run_as=package_name)
apk_path_spec = code_path
if not apk_path_spec.endswith('.apk'):
apk_path_spec += '/*.apk'
apk_sizes = _DuHelper(d, apk_path_spec)
if lib_path.endswith('/lib'):
# Shows architecture subdirectory.
lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
else:
lib_sizes = _DuHelper(d, lib_path)
# Look at all possible locations for odex files.
odex_paths = []
for apk_path in apk_sizes:
mangled_apk_path = apk_path[1:].replace('/', '@')
apk_basename = posixpath.basename(apk_path)[:-4]
for ext in ('dex', 'odex', 'vdex', 'art'):
# Easier to check all architectures than to determine active ones.
for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
odex_paths.append(
'%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
# No app could possibly have more than 6 dex files.
for suffix in ('', '2', '3', '4', '5'):
odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
arch, mangled_apk_path, suffix, ext))
# This path does not have |arch|, so don't repeat it for every arch.
if arch == 'arm':
odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
mangled_apk_path, suffix))
odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
compilation_filter)
def print_sizes(desc, sizes):
print '%s: %dkb' % (desc, sum(sizes.itervalues()))
if verbose:
for path, size in sorted(sizes.iteritems()):
print ' %s: %skb' % (path, size)
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
for result in _PrintPerDeviceOutput(devices, all_results):
if not result:
print 'APK is not installed.'
continue
(data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
compilation_filter) = result
total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
print_sizes('Apk', apk_sizes)
print_sizes('App Data (non-code cache)', data_dir_sizes)
print_sizes('App Data (code cache)', code_cache_sizes)
print_sizes('Native Libs', lib_sizes)
show_warning = compilation_filter and 'speed' not in compilation_filter
compilation_filter = compilation_filter or 'n/a'
print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
if show_warning:
logging.warning('For a more realistic odex size, run:')
logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0])
print 'Total: %skb (%.1fmb)' % (total, total / 1024.0)
def _RunLogcat(device, package_name, verbose, mapping_path):
if mapping_path:
try:
deobfuscate = deobfuscator.Deobfuscator(mapping_path)
except OSError:
sys.stderr.write('Error executing "bin/java_deobfuscate". '
'Did you forget to build it?\n')
sys.exit(1)
def get_my_pids():
my_pids = []
for pids in device.GetPids(package_name).values():
my_pids.extend(pids)
return [int(pid) for pid in my_pids]
def process_line(line, fast=False):
if verbose:
if fast:
return
else:
if not line or line.startswith('------'):
return
tokens = line.split(None, 4)
pid = int(tokens[2])
priority = tokens[4]
if pid in my_pids or (not fast and priority == 'F'):
pass # write
elif pid in not_my_pids:
return
elif fast:
# Skip checking whether our package spawned new processes.
not_my_pids.add(pid)
return
else:
# Check and add the pid if it is a new one from our package.
my_pids.update(get_my_pids())
if pid not in my_pids:
not_my_pids.add(pid)
return
if mapping_path:
line = '\n'.join(deobfuscate.TransformLines([line.rstrip()])) + '\n'
sys.stdout.write(line)
try:
my_pids = set(get_my_pids())
not_my_pids = set()
nonce = 'apk_wrappers.py nonce={}'.format(random.random())
device.RunShellCommand(['log', nonce])
fast = True
for line in device.adb.Logcat(logcat_format='threadtime'):
try:
process_line(line, fast)
except:
sys.stderr.write('Failed to process line: ' + line)
raise
if fast and nonce in line:
fast = False
except KeyboardInterrupt:
pass # Don't show stack trace upon Ctrl-C
finally:
if mapping_path:
deobfuscate.Close()
def _RunPs(devices, package_name):
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_pids = parallel_devices.GetPids(package_name).pGet(None)
for proc_map in _PrintPerDeviceOutput(devices, all_pids):
if not proc_map:
print 'No processes found.'
else:
for name, pids in sorted(proc_map.items()):
print name, ','.join(pids)
def _RunShell(devices, package_name, cmd):
if cmd:
parallel_devices = device_utils.DeviceUtils.parallel(devices)
outputs = parallel_devices.RunShellCommand(
cmd, run_as=package_name).pGet(None)
for output in _PrintPerDeviceOutput(devices, outputs):
for line in output:
print line
else:
adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
cmd = [adb_path, '-s', devices[0].serial, 'shell']
# Pre-N devices do not support -t flag.
if devices[0].build_version_sdk >= version_codes.NOUGAT:
cmd += ['-t', 'run-as', package_name]
else:
print 'Upon entering the shell, run:'
print 'run-as', package_name
print
os.execv(adb_path, cmd)
def _RunCompileDex(devices, package_name, compilation_filter):
cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
package_name]
parallel_devices = device_utils.DeviceUtils.parallel(devices)
outputs = parallel_devices.RunShellCommand(cmd).pGet(None)
for output in _PrintPerDeviceOutput(devices, outputs):
for line in output:
print line
def _GenerateAvailableDevicesMessage(devices):
devices_obj = device_utils.DeviceUtils.parallel(devices)
descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
msg = 'Available devices:\n'
for d, desc in zip(devices, descriptions):
msg += ' %s (%s)\n' % (d, desc)
return msg
# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
def _GenerateMissingAllFlagMessage(devices):
return ('More than one device available. Use --all to select all devices, ' +
'or use --device to select a device by serial.\n\n' +
_GenerateAvailableDevicesMessage(devices))
def _DisplayArgs(devices, device_args_file):
def flags_helper(d):
changer = flag_changer.FlagChanger(d, device_args_file)
return changer.GetCurrentFlags()
parallel_devices = device_utils.DeviceUtils.parallel(devices)
outputs = parallel_devices.pMap(flags_helper).pGet(None)
print 'Existing flags per-device (via /data/local/tmp/%s):' % device_args_file
for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
quoted_flags = ' '.join(pipes.quote(f) for f in flags)
print quoted_flags or 'No flags set.'
def _DeviceCachePath(device, output_directory):
file_name = 'device_cache_%s.json' % device.serial
return os.path.join(output_directory, file_name)
def _LoadDeviceCaches(devices, output_directory):
if not output_directory:
return
for d in devices:
cache_path = _DeviceCachePath(d, output_directory)
if os.path.exists(cache_path):
logging.debug('Using device cache: %s', cache_path)
with open(cache_path) as f:
d.LoadCacheData(f.read())
# Delete the cached file so that any exceptions cause it to be cleared.
os.unlink(cache_path)
else:
logging.debug('No cache present for device: %s', d)
def _SaveDeviceCaches(devices, output_directory):
if not output_directory:
return
for d in devices:
cache_path = _DeviceCachePath(d, output_directory)
with open(cache_path, 'w') as f:
f.write(d.DumpCacheData())
logging.info('Wrote device cache: %s', cache_path)
class _Command(object):
name = None
description = None
needs_package_name = False
needs_output_directory = False
needs_apk_path = False
supports_incremental = False
accepts_command_line_flags = False
accepts_args = False
accepts_url = False
all_devices_by_default = False
calls_exec = False
def __init__(self, from_wrapper_script):
self._parser = None
self._from_wrapper_script = from_wrapper_script
self.args = None
self.apk_helper = None
self.install_dict = None
self.devices = None
# Do not support incremental install outside the context of wrapper scripts.
if not from_wrapper_script:
self.supports_incremental = False
def _RegisterExtraArgs(self, subp):
pass
def RegisterArgs(self, parser):
subp = parser.add_parser(self.name, help=self.description)
self._parser = subp
subp.set_defaults(command=self)
subp.add_argument('--all',
action='store_true',
default=self.all_devices_by_default,
help='Operate on all connected devices.',)
subp.add_argument('-d',
'--device',
action='append',
default=[],
dest='devices',
help='Target device for script to work on. Enter '
'multiple times for multiple devices.')
subp.add_argument('-v',
'--verbose',
action='count',
default=0,
dest='verbose_count',
help='Verbose level (multiple times for more)')
group = subp.add_argument_group('%s arguments' % self.name)
if self.needs_package_name:
# Always gleaned from apk when using wrapper scripts.
group.add_argument('--package-name',
help=argparse.SUPPRESS if self._from_wrapper_script else (
"App's package name."))
if self.needs_apk_path or self.needs_package_name:
# Adding this argument to the subparser would override the set_defaults()
# value set by on the parent parser (even if None).
if not self._from_wrapper_script:
group.add_argument('--apk-path',
required=self.needs_apk_path,
help='Path to .apk')
if self.supports_incremental:
group.add_argument('--incremental',
action='store_true',
default=False,
help='Always install an incremental apk.')
group.add_argument('--non-incremental',
action='store_true',
default=False,
help='Always install a non-incremental apk.')
# accepts_command_line_flags and accepts_args are mutually exclusive.
# argparse will throw if they are both set.
if self.accepts_command_line_flags:
group.add_argument('--args', help='Command-line flags.')
if self.accepts_args:
group.add_argument('--args', help='Extra arguments.')
if self.accepts_url:
group.add_argument('url', nargs='?', help='A URL to launch with.')
if not self._from_wrapper_script and self.accepts_command_line_flags:
# Provided by wrapper scripts.
group.add_argument(
'--command-line-flags-file-name',
help='Name of the command-line flags file')
self._RegisterExtraArgs(group)
def ProcessArgs(self, args):
devices = device_utils.DeviceUtils.HealthyDevices(
device_arg=args.devices,
enable_device_files_cache=bool(args.output_directory),
default_retries=0)
self.args = args
self.devices = devices
# TODO(agrieve): Device cache should not depend on output directory.
# Maybe put int /tmp?
_LoadDeviceCaches(devices, args.output_directory)
# Ensure these keys always exist. They are set by wrapper scripts, but not
# always added when not using wrapper scripts.
args.__dict__.setdefault('apk_path', None)
args.__dict__.setdefault('incremental_json', None)
try:
if len(devices) > 1:
if self.calls_exec:
self._parser.error(device_errors.MultipleDevicesError(devices))
if not args.all and not args.devices:
self._parser.error(_GenerateMissingAllFlagMessage(devices))
if self.supports_incremental:
if args.incremental and args.non_incremental:
self._parser.error('Must use only one of --incremental and '
'--non-incremental')
elif args.non_incremental:
if not args.apk_path:
self._parser.error('Apk has not been built.')
args.incremental_json = None
elif args.incremental:
if not args.incremental_json:
self._parser.error('Incremental apk has not been built.')
args.apk_path = None
if args.apk_path and args.incremental_json:
self._parser.error('Both incremental and non-incremental apks exist. '
'Select using --incremental or --non-incremental')
if self.needs_apk_path or args.apk_path or args.incremental_json:
if args.incremental_json:
with open(args.incremental_json) as f:
install_dict = json.load(f)
apk_path = os.path.join(args.output_directory,
install_dict['apk_path'])
if os.path.exists(apk_path):
self.install_dict = install_dict
self.apk_helper = apk_helper.ToHelper(
os.path.join(args.output_directory,
self.install_dict['apk_path']))
if not self.apk_helper and args.apk_path:
self.apk_helper = apk_helper.ToHelper(args.apk_path)
if not self.apk_helper:
self._parser.error(
'Neither incremental nor non-incremental apk is built.')
if self.needs_package_name and not args.package_name:
if self.apk_helper:
args.package_name = self.apk_helper.GetPackageName()
elif self._from_wrapper_script:
self._parser.error(
'Neither incremental nor non-incremental apk is built.')
else:
self._parser.error('One of --package-name or --apk-path is required.')
# Save cache now if command will not get a chance to afterwards.
if self.calls_exec:
_SaveDeviceCaches(devices, args.output_directory)
except:
_SaveDeviceCaches(devices, args.output_directory)
raise
class _DevicesCommand(_Command):
name = 'devices'
description = 'Describe attached devices.'
all_devices_by_default = True
def Run(self):
print _GenerateAvailableDevicesMessage(self.devices)
class _InstallCommand(_Command):
name = 'install'
description = 'Installs the APK to one or more devices.'
needs_apk_path = True
supports_incremental = True
def Run(self):
_InstallApk(self.devices, self.apk_helper, self.install_dict)
class _UninstallCommand(_Command):
name = 'uninstall'
description = 'Removes the APK to one or more devices.'
needs_package_name = True
def Run(self):
_UninstallApk(self.devices, self.install_dict, self.args.package_name)
class _LaunchCommand(_Command):
name = 'launch'
description = ('Sends a launch intent for the APK after first writing the '
'command-line flags file.')
# TODO(agrieve): Launch could be changed to require only package name by
# parsing "dumpsys package" for launch & view activities.
needs_apk_path = True
accepts_command_line_flags = True
accepts_url = True
all_devices_by_default = True
def Run(self):
_LaunchUrl(self.devices, self.args.args, self.args.command_line_flags_file,
self.args.url, self.apk_helper)
class _RunCommand(_Command):
name = 'run'
description = 'Install and then launch.'
needs_apk_path = True
supports_incremental = True
needs_package_name = True
accepts_command_line_flags = True
accepts_url = True
def Run(self):
logging.warning('Installing...')
_InstallApk(self.devices, self.apk_helper, self.install_dict)
logging.warning('Sending launch intent...')
_LaunchUrl(self.devices, self.args.args, self.args.command_line_flags_file,
self.args.url, self.apk_helper)
class _StopCommand(_Command):
name = 'stop'
description = 'Force-stops the app.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
device_utils.DeviceUtils.parallel(self.devices).ForceStop(
self.args.package_name)
class _ClearDataCommand(_Command):
name = 'clear-data'
descriptions = 'Clears all app data.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
self.args.package_name)
class _ArgvCommand(_Command):
name = 'argv'
description = 'Display and optionally update command-line flags file.'
needs_package_name = True
accepts_command_line_flags = True
all_devices_by_default = True
def Run(self):
_ChangeFlags(self.devices, self.args.args,
self.args.command_line_flags_file)
class _GdbCommand(_Command):
name = 'gdb'
description = 'Runs //build/android/adb_gdb with apk-specific args.'
needs_package_name = True
needs_output_directory = True
accepts_args = True
calls_exec = True
def Run(self):
extra_args = shlex.split(self.args.args or '')
_RunGdb(self.devices[0], self.args.package_name, self.args.output_directory,
self.args.target_cpu, extra_args, bool(self.args.verbose_count))
class _LogcatCommand(_Command):
name = 'logcat'
description = 'Runs "adb logcat" filtering to just the current APK processes'
needs_package_name = True
calls_exec = True
def Run(self):
mapping = self.args.proguard_mapping_path
if self.args.no_deobfuscate:
mapping = None
_RunLogcat(self.devices[0], self.args.package_name,
bool(self.args.verbose_count), mapping)
def _RegisterExtraArgs(self, group):
if self._from_wrapper_script:
group.add_argument('--no-deobfuscate', action='store_true',
help='Disables ProGuard deobfuscation of logcat.')
else:
group.set_defaults(no_deobfuscate=False)
group.add_argument('--proguard-mapping-path',
help='Path to ProGuard map (enables deobfuscation)')
class _PsCommand(_Command):
name = 'ps'
description = 'Show PIDs of any APK processes currently running.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
_RunPs(self.devices, self.args.package_name)
class _DiskUsageCommand(_Command):
name = 'disk-usage'
description = 'Show how much device storage is being consumed by the app.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
_RunDiskUsage(self.devices, self.args.package_name,
bool(self.args.verbose_count))
class _MemUsageCommand(_Command):
name = 'mem-usage'
description = 'Show memory usage of currently running APK processes.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
_RunMemUsage(self.devices, self.args.package_name)
class _ShellCommand(_Command):
name = 'shell'
description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
'(via run-as). Useful for inspecting the app\'s data '
'directory.')
needs_package_name = True
@property
def calls_exec(self):
return not self.args.cmd
def _RegisterExtraArgs(self, group):
group.add_argument(
'cmd', nargs=argparse.REMAINDER, help='Command to run.')
def Run(self):
_RunShell(self.devices, self.args.package_name, self.args.cmd)
class _CompileDexCommand(_Command):
name = 'compile-dex'
description = ('Applicable only for Android N+. Forces .odex files to be '
'compiled with the given compilation filter. To see existing '
'filter, use "disk-usage" command.')
needs_package_name = True
all_devices_by_default = True
def _RegisterExtraArgs(self, group):
group.add_argument(
'compilation_filter',
choices=['verify', 'quicken', 'space-profile', 'space',
'speed-profile', 'speed'],
help='For WebView/Monochrome, use "speed". For other apks, use '
'"speed-profile".')
def Run(self):
_RunCompileDex(self.devices, self.args.package_name,
self.args.compilation_filter)
_COMMANDS = [
_DevicesCommand,
_InstallCommand,
_UninstallCommand,
_LaunchCommand,
_RunCommand,
_StopCommand,
_ClearDataCommand,
_ArgvCommand,
_GdbCommand,
_LogcatCommand,
_PsCommand,
_DiskUsageCommand,
_MemUsageCommand,
_ShellCommand,
_CompileDexCommand,
]
def _ParseArgs(parser, from_wrapper_script):
subparsers = parser.add_subparsers()
commands = [clazz(from_wrapper_script) for clazz in _COMMANDS]
for command in commands:
if from_wrapper_script or not command.needs_output_directory:
command.RegisterArgs(subparsers)
# Show extended help when no command is passed.
argv = sys.argv[1:]
if not argv:
argv = ['--help']
return parser.parse_args(argv)
def _RunInternal(parser, output_directory=None):
colorama.init()
parser.set_defaults(output_directory=output_directory)
from_wrapper_script = bool(output_directory)
args = _ParseArgs(parser, from_wrapper_script)
run_tests_helper.SetLogLevel(args.verbose_count)
args.command.ProcessArgs(args)
args.command.Run()
# Incremental install depends on the cache being cleared when uninstalling.
if args.command.name != 'uninstall':
_SaveDeviceCaches(args.command.devices, output_directory)
# TODO(agrieve): Remove =None from target_cpu on or after October 2017.
# It exists only so that stale wrapper scripts continue to work.
def Run(output_directory, apk_path, incremental_json, command_line_flags_file,
target_cpu, proguard_mapping_path):
"""Entry point for generated wrapper scripts."""
constants.SetOutputDirectory(output_directory)
devil_chromium.Initialize(output_directory=output_directory)
parser = argparse.ArgumentParser()
exists_or_none = lambda p: p if p and os.path.exists(p) else None
parser.set_defaults(
command_line_flags_file=command_line_flags_file,
target_cpu=target_cpu,
apk_path=exists_or_none(apk_path),
incremental_json=exists_or_none(incremental_json),
proguard_mapping_path=proguard_mapping_path)
_RunInternal(parser, output_directory=output_directory)
def main():
devil_chromium.Initialize()
_RunInternal(argparse.ArgumentParser(), output_directory=None)
if __name__ == '__main__':
main()
|
chrisdickinson/nojs
|
build/android/apk_operations.py
|
Python
|
bsd-3-clause
| 34,076
|
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#ifndef __itkImportMitkImageContainer_h
#define __itkImportMitkImageContainer_h
#include <itkImportImageContainer.h>
#include <mitkImageDataItem.h>
#include <mitkImageAccessorBase.h>
namespace itk
{
/** \class ImportMitkImageContainer
* Defines an itk::Image front-end to an mitk::Image. This container
* conforms to the ImageContainerInterface. This is a full-fleged Object,
* so there is modification time, debug, and reference count information.
*
* Template parameters for ImportMitkImageContainer:
*
* TElementIdentifier =
* An INTEGRAL type for use in indexing the imported buffer.
*
* TElement =
* The element type stored in the container.
*/
template <typename TElementIdentifier, typename TElement>
class ImportMitkImageContainer: public ImportImageContainer<TElementIdentifier, TElement>
{
public:
/** Standard class typedefs. */
typedef ImportMitkImageContainer Self;
typedef Object Superclass;
typedef SmartPointer<Self> Pointer;
typedef SmartPointer<const Self> ConstPointer;
/** Save the template parameters. */
typedef TElementIdentifier ElementIdentifier;
typedef TElement Element;
/** Method for creation through the object factory. */
itkFactorylessNewMacro(Self)
itkCloneMacro(Self)
/** Standard part of every itk Object. */
itkTypeMacro(ImportMitkImageContainer, ImportImageContainer);
///** Get the pointer from which the image data is imported. */
//TElement *GetImportPointer() {return m_ImportPointer;};
/** \brief Set the mitk::ImageDataItem to be imported */
//void SetImageDataItem(mitk::ImageDataItem* imageDataItem);
void SetImageAccessor(mitk::ImageAccessorBase* imageAccess, size_t noBytes);
protected:
ImportMitkImageContainer();
virtual ~ImportMitkImageContainer();
/** PrintSelf routine. Normally this is a protected internal method. It is
* made public here so that Image can call this method. Users should not
* call this method but should call Print() instead. */
void PrintSelf(std::ostream& os, Indent indent) const;
private:
ImportMitkImageContainer(const Self&); //purposely not implemented
void operator=(const Self&); //purposely not implemented
//mitk::ImageDataItem::Pointer m_ImageDataItem;
mitk::ImageAccessorBase* m_imageAccess;
};
} // end namespace itk
// Define instantiation macro for this template.
#define ITK_TEMPLATE_ImportMitkImageContainer(_, EXPORT, x, y) namespace itk { \
_(2(class EXPORT ImportMitkImageContainer< ITK_TEMPLATE_2 x >)) \
namespace Templates { typedef ImportMitkImageContainer< ITK_TEMPLATE_2 x > ImportMitkImageContainer##y; } \
}
//#if ITK_TEMPLATE_EXPLICIT
//# include "Templates/itkImportMitkImageContainer+-.h"
//#endif
#if ITK_TEMPLATE_TXX
# include "itkImportMitkImageContainer.txx"
#endif
#endif
|
danielknorr/MITK
|
Core/Code/Algorithms/itkImportMitkImageContainer.h
|
C
|
bsd-3-clause
| 3,325
|
<?php
namespace ZfcUserTest\Authentication\Adapter;
use ZfcUserTest\Authentication\Adapter\TestAsset\AbstractAdapterExtension;
class AbstractAdapterTest extends \PHPUnit_Framework_TestCase
{
/**
* The object to be tested.
*
* @var AbstractAdapterExtension
*/
protected $adapter;
public function setUp()
{
$this->adapter = new AbstractAdapterExtension();
}
/**
* @covers \ZfcUser\Authentication\Adapter\AbstractAdapter::getStorage
*/
public function testGetStorageWithoutStorageSet()
{
$this->assertInstanceOf('Zend\Authentication\Storage\Session', $this->adapter->getStorage());
}
/**
* @covers \ZfcUser\Authentication\Adapter\AbstractAdapter::getStorage
* @covers \ZfcUser\Authentication\Adapter\AbstractAdapter::setStorage
*/
public function testSetGetStorage()
{
$storage = new \Zend\Authentication\Storage\Session('ZfcUser');
$storage->write('zfcUser');
$this->adapter->setStorage($storage);
$this->assertInstanceOf('Zend\Authentication\Storage\Session', $this->adapter->getStorage());
$this->assertSame('zfcUser', $this->adapter->getStorage()->read());
}
/**
* @covers \ZfcUser\Authentication\Adapter\AbstractAdapter::isSatisfied
*/
public function testIsSatisfied()
{
$this->assertFalse($this->adapter->isSatisfied());
}
public function testSetSatisfied()
{
$result = $this->adapter->setSatisfied();
$this->assertInstanceOf('ZfcUser\Authentication\Adapter\AbstractAdapter', $result);
$this->assertTrue($this->adapter->isSatisfied());
$result = $this->adapter->setSatisfied(false);
$this->assertInstanceOf('ZfcUser\Authentication\Adapter\AbstractAdapter', $result);
$this->assertFalse($this->adapter->isSatisfied());
}
}
|
ZF-Commons/ZfcUser
|
tests/ZfcUserTest/Authentication/Adapter/AbstractAdapterTest.php
|
PHP
|
bsd-3-clause
| 1,883
|
//
// FLEXDetectViewsTableViewController.h
// UICatalog
//
// Created by viczxwang on 15/12/20.
// Copyright © 2015年 f. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface FLEXDetectViewsTableViewController : UITableViewController
@end
|
wzxing55/wzxing55-FLEX
|
Classes/LeakDetect/FLEXDetectViewsTableViewController.h
|
C
|
bsd-3-clause
| 253
|
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_WEBUI_SETTINGS_SHARED_SETTINGS_LOCALIZED_STRINGS_PROVIDER_H_
#define CHROME_BROWSER_UI_WEBUI_SETTINGS_SHARED_SETTINGS_LOCALIZED_STRINGS_PROVIDER_H_
namespace content {
class WebUIDataSource;
} // namespace content
namespace settings {
// Adds strings used by the <settings-captions> element.
void AddCaptionSubpageStrings(content::WebUIDataSource* html_source);
// Adds strings used by the <settings-personalization-options> element.
void AddPersonalizationOptionsStrings(content::WebUIDataSource* html_source);
// Adds strings used by the <settings-sync-controls> element.
void AddSyncControlsStrings(content::WebUIDataSource* html_source);
// Adds strings used by the <settings-sync-account-control> element.
void AddSyncAccountControlStrings(content::WebUIDataSource* html_source);
#if defined(OS_CHROMEOS)
// Adds strings used by the <settings-password-prompt-dialog> element.
void AddPasswordPromptDialogStrings(content::WebUIDataSource* html_source);
#endif
// Adds strings used by the <settings-sync-page> element.
void AddSyncPageStrings(content::WebUIDataSource* html_source);
} // namespace settings
#endif // CHROME_BROWSER_UI_WEBUI_SETTINGS_SHARED_SETTINGS_LOCALIZED_STRINGS_PROVIDER_H_
|
endlessm/chromium-browser
|
chrome/browser/ui/webui/settings/shared_settings_localized_strings_provider.h
|
C
|
bsd-3-clause
| 1,404
|
import numpy as np
from numpy.testing import (assert_equal, assert_array_almost_equal,
assert_raises)
from skimage.transform._geometric import _stackcopy
from skimage.transform._geometric import GeometricTransform
from skimage.transform import (estimate_transform, matrix_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
SRC = np.array([
[-12.3705, -10.5075],
[-10.7865, 15.4305],
[8.6985, 10.8675],
[11.4975, -9.5715],
[7.8435, 7.4835],
[-5.3325, 6.5025],
[6.7905, -6.3765],
[-6.1695, -0.8235],
])
DST = np.array([
[0, 0],
[0, 5800],
[4900, 5800],
[4900, 0],
[4479, 4580],
[1176, 3660],
[3754, 790],
[1024, 1931],
])
def test_stackcopy():
layers = 4
x = np.empty((3, 3, layers))
y = np.eye(3, 3)
_stackcopy(x, y)
for i in range(layers):
assert_array_almost_equal(x[..., i], y)
def test_estimate_transform():
for tform in ('similarity', 'affine', 'projective', 'polynomial'):
estimate_transform(tform, SRC[:2, :], DST[:2, :])
assert_raises(ValueError, estimate_transform, 'foobar',
SRC[:2, :], DST[:2, :])
def test_matrix_transform():
tform = AffineTransform(scale=(0.1, 0.5), rotation=2)
assert_equal(tform(SRC), matrix_transform(SRC, tform._matrix))
def test_similarity_estimation():
# exact solution
tform = estimate_transform('similarity', SRC[:2, :], DST[:2, :])
assert_array_almost_equal(tform(SRC[:2, :]), DST[:2, :])
assert_equal(tform._matrix[0, 0], tform._matrix[1, 1])
assert_equal(tform._matrix[0, 1], - tform._matrix[1, 0])
# over-determined
tform2 = estimate_transform('similarity', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
assert_equal(tform2._matrix[0, 0], tform2._matrix[1, 1])
assert_equal(tform2._matrix[0, 1], - tform2._matrix[1, 0])
# via estimate method
tform3 = SimilarityTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
def test_similarity_init():
# init with implicit parameters
scale = 0.1
rotation = 1
translation = (1, 1)
tform = SimilarityTransform(scale=scale, rotation=rotation,
translation=translation)
assert_array_almost_equal(tform.scale, scale)
assert_array_almost_equal(tform.rotation, rotation)
assert_array_almost_equal(tform.translation, translation)
# init with transformation matrix
tform2 = SimilarityTransform(tform._matrix)
assert_array_almost_equal(tform2.scale, scale)
assert_array_almost_equal(tform2.rotation, rotation)
assert_array_almost_equal(tform2.translation, translation)
# test special case for scale if rotation=0
scale = 0.1
rotation = 0
translation = (1, 1)
tform = SimilarityTransform(scale=scale, rotation=rotation,
translation=translation)
assert_array_almost_equal(tform.scale, scale)
assert_array_almost_equal(tform.rotation, rotation)
assert_array_almost_equal(tform.translation, translation)
def test_affine_estimation():
# exact solution
tform = estimate_transform('affine', SRC[:3, :], DST[:3, :])
assert_array_almost_equal(tform(SRC[:3, :]), DST[:3, :])
# over-determined
tform2 = estimate_transform('affine', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = AffineTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
def test_affine_init():
# init with implicit parameters
scale = (0.1, 0.13)
rotation = 1
shear = 0.1
translation = (1, 1)
tform = AffineTransform(scale=scale, rotation=rotation, shear=shear,
translation=translation)
assert_array_almost_equal(tform.scale, scale)
assert_array_almost_equal(tform.rotation, rotation)
assert_array_almost_equal(tform.shear, shear)
assert_array_almost_equal(tform.translation, translation)
# init with transformation matrix
tform2 = AffineTransform(tform._matrix)
assert_array_almost_equal(tform2.scale, scale)
assert_array_almost_equal(tform2.rotation, rotation)
assert_array_almost_equal(tform2.shear, shear)
assert_array_almost_equal(tform2.translation, translation)
def test_piecewise_affine():
tform = PiecewiseAffineTransform()
tform.estimate(SRC, DST)
# make sure each single affine transform is exactly estimated
assert_array_almost_equal(tform(SRC), DST)
assert_array_almost_equal(tform.inverse(DST), SRC)
def test_projective_estimation():
# exact solution
tform = estimate_transform('projective', SRC[:4, :], DST[:4, :])
assert_array_almost_equal(tform(SRC[:4, :]), DST[:4, :])
# over-determined
tform2 = estimate_transform('projective', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = ProjectiveTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
def test_projective_init():
tform = estimate_transform('projective', SRC, DST)
# init with transformation matrix
tform2 = ProjectiveTransform(tform._matrix)
assert_array_almost_equal(tform2._matrix, tform._matrix)
def test_polynomial_estimation():
# over-determined
tform = estimate_transform('polynomial', SRC, DST, order=10)
assert_array_almost_equal(tform(SRC), DST, 6)
# via estimate method
tform2 = PolynomialTransform()
tform2.estimate(SRC, DST, order=10)
assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_init():
tform = estimate_transform('polynomial', SRC, DST, order=10)
# init with transformation parameters
tform2 = PolynomialTransform(tform._params)
assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_default_order():
tform = estimate_transform('polynomial', SRC, DST)
tform2 = estimate_transform('polynomial', SRC, DST, order=2)
assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_inverse():
assert_raises(Exception, PolynomialTransform().inverse, 0)
def test_union():
tform1 = SimilarityTransform(scale=0.1, rotation=0.3)
tform2 = SimilarityTransform(scale=0.1, rotation=0.9)
tform3 = SimilarityTransform(scale=0.1 ** 2, rotation=0.3 + 0.9)
tform = tform1 + tform2
assert_array_almost_equal(tform._matrix, tform3._matrix)
tform1 = AffineTransform(scale=(0.1, 0.1), rotation=0.3)
tform2 = SimilarityTransform(scale=0.1, rotation=0.9)
tform3 = SimilarityTransform(scale=0.1 ** 2, rotation=0.3 + 0.9)
tform = tform1 + tform2
assert_array_almost_equal(tform._matrix, tform3._matrix)
assert tform.__class__ == ProjectiveTransform
def test_geometric_tform():
tform = GeometricTransform()
assert_raises(NotImplementedError, tform, 0)
assert_raises(NotImplementedError, tform.inverse, 0)
assert_raises(NotImplementedError, tform.__add__, 0)
def test_invalid_input():
assert_raises(ValueError, ProjectiveTransform, np.zeros((2, 3)))
assert_raises(ValueError, AffineTransform, np.zeros((2, 3)))
assert_raises(ValueError, SimilarityTransform, np.zeros((2, 3)))
assert_raises(ValueError, AffineTransform,
matrix=np.zeros((2, 3)), scale=1)
assert_raises(ValueError, SimilarityTransform,
matrix=np.zeros((2, 3)), scale=1)
assert_raises(ValueError, PolynomialTransform, np.zeros((3, 3)))
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
almarklein/scikit-image
|
skimage/transform/tests/test_geometric.py
|
Python
|
bsd-3-clause
| 7,870
|
package gov.nasa.jpl.mbee.mdk.mms.actions;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.nomagic.magicdraw.annotation.Annotation;
import com.nomagic.magicdraw.annotation.AnnotationAction;
import com.nomagic.magicdraw.core.Application;
import com.nomagic.magicdraw.core.Project;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Element;
import gov.nasa.jpl.mbee.mdk.api.incubating.convert.Converters;
import gov.nasa.jpl.mbee.mdk.http.ServerException;
import gov.nasa.jpl.mbee.mdk.mms.MMSUtils;
import gov.nasa.jpl.mbee.mdk.util.TaskRunner;
import gov.nasa.jpl.mbee.mdk.util.Utils;
import gov.nasa.jpl.mbee.mdk.validation.IRuleViolationAction;
import gov.nasa.jpl.mbee.mdk.validation.RuleViolationAction;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.utils.URIBuilder;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.GeneralSecurityException;
import java.util.Collection;
import java.util.Map;
public class ExportImage extends RuleViolationAction implements AnnotationAction, IRuleViolationAction {
private static final long serialVersionUID = 1L;
private Element element;
private Map<String, ObjectNode> images;
public ExportImage(Element e, Map<String, ObjectNode> images) {
super("ExportImage", "Commit image", null, null);
this.element = e;
this.images = images;
}
@Override
public boolean canExecute(Collection<Annotation> arg0) {
return true;
}
public static boolean postImage(Project project, String key, Map<String, ObjectNode> is) {
if (is == null || is.get(key) == null) {
Utils.guilog("[ERROR] Image data with id " + key + " not found.");
return false;
}
URIBuilder requestUri = MMSUtils.getServiceProjectsRefsElementsUri(project);
if (requestUri == null) {
return false;
}
String id = key.replace(".", "%2E");
requestUri.setPath(requestUri.getPath() + "/" + id);
JsonNode value;
String cs = "";
if ((value = is.get(key).get("cs")) != null && value.isTextual()) {
cs = value.asText();
}
requestUri.setParameter("cs", cs);
String extension = "";
if ((value = is.get(key).get("extension")) != null && value.isTextual()) {
extension = value.asText();
}
requestUri.setParameter("extension", extension);
String filename = "";
if ((value = is.get(key).get("abspath")) != null && value.isTextual()) {
filename = value.asText();
}
File file = new File(filename);
try {
HttpRequestBase request = MMSUtils.buildImageRequest(requestUri, file);
TaskRunner.runWithProgressStatus(progressStatus -> {
try {
MMSUtils.sendMMSRequest(project, request, progressStatus);
} catch (IOException | ServerException | URISyntaxException | GeneralSecurityException e) {
// TODO Implement error handling that was previously not possible due to OutputQueue implementation
e.printStackTrace();
}
}, "Image Create/Update", true, TaskRunner.ThreadExecutionStrategy.SINGLE);
} catch (IOException | URISyntaxException e) {
Application.getInstance().getGUILog().log("[ERROR] Unable to commit image " + filename + ". Reason: " + e.getMessage());
e.printStackTrace();
return false;
}
return true;
}
@Override
public void execute(Collection<Annotation> annos) {
for (Annotation anno : annos) {
Element e = (Element) anno.getTarget();
String key = Converters.getElementToIdConverter().apply(e);
postImage(Project.getProject(e), key, images);
}
Utils.guilog("[INFO] Requests are added to queue.");
}
@Override
public void actionPerformed(ActionEvent e) {
String key = Converters.getElementToIdConverter().apply(element);
if (postImage(Project.getProject(element), key, images)) {
Utils.guilog("[INFO] Request is added to queue.");
}
}
}
|
Open-MBEE/MDK
|
src/main/java/gov/nasa/jpl/mbee/mdk/mms/actions/ExportImage.java
|
Java
|
bsd-3-clause
| 4,365
|
/* Copyright (c) 2007-2008 The Florida State University
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Stephen Hines
*/
#ifndef __ARCH_ARM_INSTS_STATICINST_HH__
#define __ARCH_ARM_INSTS_STATICINST_HH__
#include "base/trace.hh"
#include "cpu/static_inst.hh"
namespace ArmISA
{
class ArmStaticInst : public StaticInst
{
protected:
int32_t shift_rm_imm(uint32_t base, uint32_t shamt,
uint32_t type, uint32_t cfval) const;
int32_t shift_rm_rs(uint32_t base, uint32_t shamt,
uint32_t type, uint32_t cfval) const;
bool shift_carry_imm(uint32_t base, uint32_t shamt,
uint32_t type, uint32_t cfval) const;
bool shift_carry_rs(uint32_t base, uint32_t shamt,
uint32_t type, uint32_t cfval) const;
bool arm_add_carry(int32_t result, int32_t lhs, int32_t rhs) const;
bool arm_sub_carry(int32_t result, int32_t lhs, int32_t rhs) const;
bool arm_add_overflow(int32_t result, int32_t lhs, int32_t rhs) const;
bool arm_sub_overflow(int32_t result, int32_t lhs, int32_t rhs) const;
// Constructor
ArmStaticInst(const char *mnem, MachInst _machInst, OpClass __opClass)
: StaticInst(mnem, _machInst, __opClass)
{
}
/// Print a register name for disassembly given the unique
/// dependence tag number (FP or int).
void printReg(std::ostream &os, int reg) const;
void printMnemonic(std::ostream &os,
const std::string &suffix = "",
bool withPred = true) const;
void printMemSymbol(std::ostream &os, const SymbolTable *symtab,
const std::string &prefix, const Addr addr,
const std::string &suffix) const;
void printShiftOperand(std::ostream &os) const;
void printDataInst(std::ostream &os, bool withImm) const;
std::string generateDisassembly(Addr pc, const SymbolTable *symtab) const;
static uint32_t
cpsrWriteByInstr(CPSR cpsr, uint32_t val,
uint8_t byteMask, bool affectState)
{
bool privileged = (cpsr.mode != MODE_USER);
uint32_t bitMask = 0;
if (bits(byteMask, 3)) {
unsigned lowIdx = affectState ? 24 : 27;
bitMask = bitMask | mask(31, lowIdx);
}
if (bits(byteMask, 2)) {
bitMask = bitMask | mask(19, 16);
}
if (bits(byteMask, 1)) {
unsigned highIdx = affectState ? 15 : 9;
unsigned lowIdx = privileged ? 8 : 9;
bitMask = bitMask | mask(highIdx, lowIdx);
}
if (bits(byteMask, 0)) {
if (privileged) {
bitMask = bitMask | mask(7, 6);
bitMask = bitMask | mask(5);
}
if (affectState)
bitMask = bitMask | (1 << 5);
}
return ((uint32_t)cpsr & ~bitMask) | (val & bitMask);
}
static uint32_t
spsrWriteByInstr(uint32_t spsr, uint32_t val,
uint8_t byteMask, bool affectState)
{
uint32_t bitMask = 0;
if (bits(byteMask, 3))
bitMask = bitMask | mask(31, 24);
if (bits(byteMask, 2))
bitMask = bitMask | mask(19, 16);
if (bits(byteMask, 1))
bitMask = bitMask | mask(15, 8);
if (bits(byteMask, 0))
bitMask = bitMask | mask(7, 0);
return ((spsr & ~bitMask) | (val & bitMask));
}
};
}
#endif //__ARCH_ARM_INSTS_STATICINST_HH__
|
liangwang/m5
|
src/arch/arm/insts/static_inst.hh
|
C++
|
bsd-3-clause
| 4,968
|
<HTML>
<CENTER><A HREF = "http://mapreduce.sandia.gov">MapReduce-MPI WWW Site</A> - <A HREF = "Manual.html">MapReduce-MPI Documentation</A>
</CENTER>
<HR>
<H3>Getting Started
</H3>
<P>Once you have
<A HREF = "http://www.sandia.gov/~sjplimp/download.html">downloaded</A> the
MapReduce MPI (MR-MPI) library, you should have the tarball
mapreduce.tar.gz on your machine. Unpack it with the following
commands:
</P>
<PRE>gunzip mapreduce.tar.gz
tar xvf mapreduce.tar
</PRE>
<P>which should create a mapreduce directory containing the following:
</P>
<UL><LI>README
<LI>LICENSE
<LI>doc
<LI>examples
<LI>mpistubs
<LI>oink
<LI>oinkdoc
<LI>python
<LI>src
<LI>user
</UL>
<P>The doc directory contains this documentation. The oink and oinkdoc
directories contain the <A HREF = "../oinkdoc/Manual.html">OINK scripting
interface</A> to the MR-MPI library and its
separate documentation. The examples directory contains a few simple
MapReduce programs which call the MR-MPI library. These are
documented by a README file in that directory and are discussed below.
The mpistubs directory contains a dummy MPI library which can be used
to build a MapReduce program on a serial machine. The python
directory contains the Python wrapper files needed to call the MR-MPI
library from Python. The src directory contains the files that
comprise the MR-MPI library. The user directory contains
user-contributed MapReduce programs. See the README in that directory
for further details.
</P>
<H5><B>Static library:</B>
</H5>
<P>To build a static library for use by a C++ or C program (*.a file on
Linux), go to the src directory and type
</P>
<PRE>make
</PRE>
<P>You will see a list of machine names, each of which has their own
Makefile.machine file in the src/MAKE directory. You can choose
one of these and attempt to build the MR-MPI library by typing
</P>
<PRE>make machine
</PRE>
<P>If you are successful, this will produce the file "libmrmpi_machine.a"
which can be linked by other programs. If not, you will need to
create a src/MAKE/Makefile.machine file compatible with your platform,
using one of the existing files as a template.
</P>
<P>The only settings in a Makefile.machine file that need to be specified
are those for the compiler and the MPI library on your machine. If
MPI is not already installed, you can install one of several free
versions that work on essentially all platforms. MPICH and OpenMPI
are the most common.
</P>
<P>Within Makefile.machine you can either specify via -I and -L switches
where the MPI include and library files are found, or you can use a
compiler wrapper provided with MPI, like mpiCC or mpic++, which will
know where those files are.
</P>
<P>You can also build the MR-MPI library without MPI, using the dummy MPI
library provided in the mpistubs directory. In this case you can only
run the library on a single processor. To do this, first build the
dummy MPI library, by typing "make" from within the mpistubs
directory. Again, you may need to edit mpistubs/Makefile for your
machine. Then from the src directory, type "make serial" which uses
the src/MAKE/Makefile.serial file.
</P>
<P>Both a C++ and <A HREF = "Interface_c.html">C interface</A> are part of the MR-MPI
library, so it should be usable from any hi-level language.
</P>
<H5><B>Shared library:</B>
</H5>
<P>You can also build the MR-MPI library as a dynamic shared library
(*.so file instead of *.a on Linux). This is required if
you want to use the library from Python. To do this, type
</P>
<PRE>make -f Makefile.shlib machine
</PRE>
<P>This will create the file libmrmpi_machine.so, as well as a soft link
libmrmpi.so, which is what the Python wrapper will load by default.
Note that if you are building multiple machine versions of the shared
library, the soft link is always set to the most recently built
version.
</P>
<H5><B>Additional requirement for using a shared library:</B>
</H5>
<P>The operating system finds shared libraries to load at run-time using
the environment variable LD_LIBRARY_PATH. So you may wish to copy the
file src/libmrmpi.so or src/libmrmpi_g++.so (for example) to a place
the system can find it by default, such as /usr/local/lib, or you may
wish to add the MR-MPI src directory to LD_LIBRARY_PATH, so that the
current version of the shared library is always available to programs
that use it.
</P>
<P>For the csh or tcsh shells, you would add something like this to your
~/.cshrc file:
</P>
<PRE>setenv LD_LIBRARY_PATH $<I>LD_LIBRARY_PATH</I>:/home/sjplimp/mrmpi/src
</PRE>
<HR>
<P>The MapReduce programs in the examples directory can be built by
typing
</P>
<PRE>make -f Makefile.machine
</PRE>
<P>from within the examples directory, where Makefile.machine is one of
the Makefiles in the examples directory. Again, you may need to
modify one of the existing ones to create a new one for your machine.
Some of the example programs are provided as a C++ program, a C
program, as a Python script, or as an OINK input script.
Once you have built OINK, the latter can be run as, for example,
</P>
<PRE>oink_linux < in.rmat
</PRE>
<P>When you run one of the example MapReduce programs or your own, if you
get an immediate error about the MRMPI_BIGINT data type, you will need
to edit the file src/mrtype.h and re-compile the library. Mrtype.h
and the error check insures that your MPI will perform operations on
8-byte unsigned integers as required by the MR-MPI library. For the
MPI on most machines, this is satisfied by the MPI data type
MPI_UNSIGNED_LONG_LONG. But some machines do not support the "long
long" data type, and you may need a different setting for your machine
and installed MPI, such as MPI_UNSIGNED_LONG.
</P>
</HTML>
|
ravikanthreddy89/MR-MPI
|
doc/Start.html
|
HTML
|
bsd-3-clause
| 5,701
|
/*
# This file is part of the Astrometry.net suite.
# Licensed under a 3-clause BSD style license - see LICENSE
*/
#include <stdio.h>
#include <stdlib.h>
#include "codekd.h"
#include "kdtree_fits_io.h"
#include "starutil.h"
#include "errors.h"
static codetree_t* codetree_alloc() {
codetree_t* s = calloc(1, sizeof(codetree_t));
if (!s) {
fprintf(stderr, "Failed to allocate a code kdtree struct.\n");
return NULL;
}
return s;
}
int codetree_append_to(codetree_t* s, FILE* fid) {
return kdtree_fits_append_tree_to(s->tree, s->header, fid);
}
int codetree_N(codetree_t* s) {
return s->tree->ndata;
}
int codetree_nodes(codetree_t* s) {
return s->tree->nnodes;
}
int codetree_D(codetree_t* s) {
return s->tree->ndim;
}
qfits_header* codetree_header(codetree_t* s) {
return s->header;
}
int codetree_get_permuted(codetree_t* s, int index) {
if (s->tree->perm) return s->tree->perm[index];
else return index;
}
static codetree_t* my_open(const char* fn, anqfits_t* fits) {
codetree_t* s;
kdtree_fits_t* io;
char* treename = CODETREE_NAME;
s = codetree_alloc();
if (!s)
return s;
if (fits) {
io = kdtree_fits_open_fits(fits);
fn = fits->filename;
} else
io = kdtree_fits_open(fn);
if (!io) {
ERROR("Failed to open FITS file \"%s\"", fn);
goto bailout;
}
if (!kdtree_fits_contains_tree(io, treename))
treename = NULL;
s->tree = kdtree_fits_read_tree(io, treename, &s->header);
if (!s->tree) {
ERROR("Failed to read code kdtree from file %s\n", fn);
goto bailout;
}
// kdtree_fits_t is a typedef of fitsbin_t
fitsbin_close_fd(io);
return s;
bailout:
free(s);
return NULL;
}
codetree_t* codetree_open_fits(anqfits_t* fits) {
return my_open(NULL, fits);
}
codetree_t* codetree_open(const char* fn) {
return my_open(fn, NULL);
}
int codetree_close(codetree_t* s) {
if (!s) return 0;
if (s->inverse_perm)
free(s->inverse_perm);
if (s->header)
qfits_header_destroy(s->header);
if (s->tree)
kdtree_fits_close(s->tree);
free(s);
return 0;
}
static int Ndata(codetree_t* s) {
return s->tree->ndata;
}
void codetree_compute_inverse_perm(codetree_t* s) {
// compute inverse permutation vector.
s->inverse_perm = malloc(Ndata(s) * sizeof(int));
if (!s->inverse_perm) {
fprintf(stderr, "Failed to allocate code kdtree inverse permutation vector.\n");
return;
}
kdtree_inverse_permutation(s->tree, s->inverse_perm);
}
int codetree_get(codetree_t* s, unsigned int codeid, double* code) {
if (s->tree->perm && !s->inverse_perm) {
codetree_compute_inverse_perm(s);
if (!s->inverse_perm)
return -1;
}
if (codeid >= Ndata(s)) {
fprintf(stderr, "Invalid code ID: %u >= %u.\n", codeid, Ndata(s));
return -1;
}
if (s->inverse_perm)
kdtree_copy_data_double(s->tree, s->inverse_perm[codeid], 1, code);
else
kdtree_copy_data_double(s->tree, codeid, 1, code);
return 0;
}
codetree_t* codetree_new() {
codetree_t* s = codetree_alloc();
s->header = qfits_header_default();
if (!s->header) {
fprintf(stderr, "Failed to create a qfits header for code kdtree.\n");
free(s);
return NULL;
}
qfits_header_add(s->header, "AN_FILE", AN_FILETYPE_CODETREE, "This file is a code kdtree.", NULL);
return s;
}
int codetree_write_to_file(codetree_t* s, const char* fn) {
return kdtree_fits_write(s->tree, fn, s->header);
}
int codetree_write_to_file_flipped(codetree_t* s, const char* fn) {
return kdtree_fits_write_flipped(s->tree, fn, s->header);
}
|
olebole/astrometry.net
|
util/codekd.c
|
C
|
bsd-3-clause
| 3,477
|
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
use yii\helpers\ArrayHelper;
/* @var $this yii\web\View */
/* @var $model app\models\Admin */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="admin-form">
<?php $form = ActiveForm::begin(); ?>
<?= $form->field($model, 'username')->textInput(['style'=>'width:300px']) ?>
<div class="form-group field-admin-password has-success">
<label for="admin-password" class="control-label">密码</label> <?php if (!$model->isNewRecord) {echo '不需要修改请不用填写';}?>
<input type="password" style="width:300px" value="" name="Admin[password]" class="form-control" id="admin-password">
<div class="help-block"></div>
</div>
<div class="form-group field-admin-password_repeat has-success">
<label for="admin-password_repeat" class="control-label">重复密码</label>
<input type="password" style="width:300px" name="Admin[password_repeat]" class="form-control" id="admin-password_repeat">
<div class="help-block"></div>
</div>
<?= $form->field($model, 'email')->textInput(['style'=>'width:300px']) ?>
<div class="form-group field-admin-group_id">
<label for="admin-group_id" class="control-label">权限组</label>
<input type="hidden" value="" name="Admin[group_id]"><div id="admin-group_id">
<?php foreach (\app\models\Admin::$menuTitle as $key => $val) :?>
<label><input type="checkbox" <?php if (strstr($model->group_id, (string)$key)) { echo 'checked=""';}?> value="<?= $key?>" name="Admin[group_id][]"> <?= $val?></label>
<?php endforeach;?>
</div>
<div class="help-block"></div>
</div>
<?= $form->field($model, 'type')->dropDownList(\app\models\Admin::$typeList,['style'=>'width:200px']) ?>
<div class="form-group">
<?= Html::submitButton($model->isNewRecord ? '新建' : '更新', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
|
hanxiao84322/coach_system
|
views/admin/admin/_form.php
|
PHP
|
bsd-3-clause
| 2,086
|
/*
# This file is part of the Astrometry.net suite.
# Licensed under a 3-clause BSD style license - see LICENSE
*/
#ifndef PLOTXY_H
#define PLOTXY_H
#include "astrometry/plotstuff.h"
struct plotxy_args {
char* fn;
int ext;
char* xcol;
char* ycol;
double xoff, yoff;
int firstobj;
int nobjs;
double scale;
// coordinates added with xy_val <x> <y>
dl* xyvals;
// if WCS is set, x,y are treated as FITS pixel coords;
// that is, this are pushed through the WCS unmodified, then the resulting
// RA,Dec is pushed through the plot WCS, producing FITS coords, from which
// 1,1 is subtracted to yield 0-indexed image coords.
anwcs_t* wcs;
};
typedef struct plotxy_args plotxy_t;
plotxy_t* plot_xy_get(plot_args_t* pargs);
// Called prior to cairo surface initialization.
void* plot_xy_init(plot_args_t* args);
// Set the plot size based on IMAGEW,IMAGEH in the xylist header.
int plot_xy_setsize(plot_args_t* args, plotxy_t* xyargs);
// Clears the list of points.
void plot_xy_clear_list(plotxy_t* args);
void plot_xy_set_xcol(plotxy_t* args, const char* col);
void plot_xy_set_ycol(plotxy_t* args, const char* col);
void plot_xy_set_filename(plotxy_t* args, const char* fn);
int plot_xy_set_wcs_filename(plotxy_t* args, const char* fn, int ext);
int plot_xy_set_offsets(plotxy_t* args, double xo, double yo);
int plot_xy_command(const char* command, const char* cmdargs,
plot_args_t* args, void* baton);
int plot_xy_plot(const char* command, cairo_t* cairo,
plot_args_t* plotargs, void* baton);
void plot_xy_free(plot_args_t* args, void* baton);
void plot_xy_vals(plotxy_t* args, double x, double y);
DECLARE_PLOTTER(xy);
#endif
|
olebole/astrometry.net
|
include/astrometry/plotxy.h
|
C
|
bsd-3-clause
| 1,666
|
declare @hasFullText int
select @hasFullText = convert(int, SERVERPROPERTY('IsFullTextInstalled'))
if (@hasFullText = 1)
begin
begin try
exec sp_fulltext_catalog 'FTCatalog', 'create'
exec sp_fulltext_table '$schema$.[Entry]', 'create', 'FTCatalog', 'PK_Entry_Id'
exec sp_fulltext_column '$schema$.[Entry]', 'Name', 'add', 0x0409
exec sp_fulltext_column '$schema$.[Entry]', 'Title', 'add', 0x0409
exec sp_fulltext_column '$schema$.[Entry]', 'Summary', 'add', 0x0409
exec sp_fulltext_column '$schema$.[Entry]', 'MetaDescription', 'add', 0x0409
exec sp_fulltext_column '$schema$.[Entry]', 'MetaKeywords', 'add', 0x0409
exec sp_fulltext_table '$schema$.[Entry]', 'activate'
exec sp_fulltext_catalog 'FTCatalog', 'start_full'
exec sp_fulltext_table '$schema$.[Entry]', 'start_change_tracking'
exec sp_fulltext_table '$schema$.[Entry]', 'start_background_updateindex'
end try
begin catch
--Full text not installed
PRINT 'Full text catalog not installed'
end catch
end
|
funnelweblog/FunnelWeb
|
src/FunnelWeb/DatabaseDeployer/Scripts/Script0008.sql
|
SQL
|
bsd-3-clause
| 984
|
#!/bin/bash
#On Debian 8, armf architecture
# see: http://wiki.ros.org/kinetic/Installation/Source
# Installing bootstrap dependencies
sudo pip install -U rosdep rosinstall_generator wstool rosinstall
sudo pip install --upgrade setuptools
# Initializing rosdep
sudo rm -rf /etc/ros/rosdep/sources.list.d/20-default.list
sudo rosdep init
rosdep update
# Building the catkin Packages
mkdir -p $HOME/build
cd $HOME/build
mkdir ros_catkin_ws_kinetic
cd ros_catkin_ws_kinetic
rosinstall_generator ros_comm --rosdistro kinetic --deps --wet-only --tar > kinetic-ros_comm-wet.rosinstall
wstool init src kinetic-ros_comm-wet.rosinstall
rosinstall_generator orocos_kdl python_orocos_kdl tf2_geometry_msgs diagnostics image_common image_transport_plugins common_msgs rosconsole_bridge --rosdistro kinetic --deps --wet-only --tar > add-pkgs.rosinstall
wstool merge -t src add-pkgs.rosinstall
wstool update -t src
rosdep install --from-paths src --ignore-src --rosdistro kinetic -y
# NOTE: since we compile on a system with only 512MB of RAM, configure gcc gargabe collector to be aggresive and save temporary files to disk.
# see: http://jkroon.blogs.uls.co.za/it/scriptingprogramming/preventing-gcc-from-trashing-the-system
# https://gcc.gnu.org/onlinedocs/gcc/Option-Summary.html
CATKIN_COMPILE_FLAGS="\"-O3 -save-temps --param ggc-min-expand=10 --param ggc-min-heapsize=4096\""
./src/catkin/bin/catkin_make_isolated --install -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_FLAGS=$CATKIN_COMPILE_FLAGS -DCMAKE_CXX_FLAGS=$CATKIN_COMPILE_FLAGS --install-space /opt/ros/kinetic
echo "source /opt/ros/kinetic/setup.bash" >> $HOME/.profile
|
sbrodeur/ros-icreate-bbb
|
scripts/install_ros.sh
|
Shell
|
bsd-3-clause
| 1,640
|
package net.nablux.dockergen
import org.scalatest.{BeforeAndAfter, Matchers, FlatSpec}
class DockerImageSpec
extends FlatSpec
with Matchers
with BeforeAndAfter {
class MinimalImage extends DockerImage {
override def image: String = "test.img"
override def tag: String = "0.1"
}
var desc: DockerImage = null
before {
desc = new MinimalImage()
}
"An empty description" should "create a minimal Dockerfile" in {
desc.toDockerString shouldBe "FROM test.img:0.1\n"
}
"MAINTAINER()" should "set the MAINTAINER" in {
desc.MAINTAINER("John Doe", "doe@example.net")
desc.toDockerString should
include("\nMAINTAINER John Doe <doe@example.net>\n")
}
"ENV()" should "add an environment variable" in {
desc.ENV("LANG", "de_DE.UTF-8")
desc.toDockerString should
include("\nENV LANG de_DE.UTF-8\n")
}
"RUN()" should "add a RUN command" in {
desc.RUN("echo hello")
desc.toDockerString should
include("\nRUN echo hello\n")
}
"CMD()" should "add a CMD command" in {
desc.CMD("/bin/bash")
desc.toDockerString should
include("\nCMD /bin/bash\n")
}
"##()" should "add a comment" in {
desc.##("Helpful comment")
desc.toDockerString should
include("\n# Helpful comment\n")
}
}
|
tgpfeiffer/dockergen
|
src/test/scala/net/nablux/dockergen/DockerImageSpec.scala
|
Scala
|
bsd-3-clause
| 1,292
|
# Copyright (c) 2009-2014, Curiost.com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met: 1) Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer. 2) Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution. 3) Neither the name of the curiost.com nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author:: Yegor Bugayenko (mailto:yegor@tpc2.com)
# Copyright:: Copyright (c) 2009-2014, Curiost.com
# License:: Free to use and change if the author is mentioned
require_relative '../job'
require 'net/http'
require 'json'
class ChAdzuna
def initialize(args)
@endpoint = args['endpoint']
raise 'endpoint is empty' if @endpoint.empty?
end
def fetch
JSON.parse(Net::HTTP.get(URI(@endpoint)))['results'].map { |entry|
Job.new(
entry['redirect_url'],
entry['company']['display_name'],
entry['title']
)
}
end
end
|
yegor256/jobspotting
|
lib/channels/ch_adzuna.rb
|
Ruby
|
bsd-3-clause
| 2,107
|
/* $NetBSD: irq.c,v 1.2 2002/03/24 23:37:42 bjh21 Exp $ */
/*-
* Copyright (c) 2000, 2001 Ben Harris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* irq.c - IOC IRQ handler.
*/
#include <sys/param.h>
__RCSID("$NetBSD: irq.c,v 1.2 2002/03/24 23:37:42 bjh21 Exp $");
#include <sys/device.h>
#include <sys/kernel.h> /* for cold */
#include <sys/malloc.h>
#include <sys/queue.h>
#include <sys/syslog.h>
#include <sys/systm.h>
#include <uvm/uvm_extern.h>
#include <machine/bus.h>
#include <machine/frame.h>
#include <machine/intr.h>
#include <machine/irq.h>
#include <machine/machdep.h>
#include <arch/acorn26/iobus/iocreg.h>
#include <arch/acorn26/iobus/iocvar.h>
#include "opt_ddb.h"
#include "opt_flashything.h"
#include "fiq.h"
#include "ioeb.h"
#include "unixbp.h"
#ifdef DDB
#include <ddb/db_output.h>
#endif
#if NFIQ > 0
#include <machine/fiq.h>
#endif
#if NIOEB > 0
#include <arch/acorn26/ioc/ioebvar.h>
#endif
#if NUNIXBP > 0
#include <arch/acorn26/podulebus/unixbpvar.h>
#endif
#define NIRQ 20
extern char *irqnames[];
int current_intr_depth = 0;
#if NFIQ > 0
void (*fiq_downgrade_handler)(void);
int fiq_want_downgrade;
#endif
/*
* Interrupt masks are held in 32-bit integers. At present, the
* bottom eight bits are interrupt register A on the IOC, and the next
* eight are interrupt register B. After that, on systems with Unix
* backplanes, there are four bits from there. FIQs should
* be represented eventually.
*/
static u_int32_t irqmask[NIPL];
LIST_HEAD(irq_handler_head, irq_handler) irq_list_head =
LIST_HEAD_INITIALIZER(irq_list_head);
struct irq_handler {
LIST_ENTRY(irq_handler) link;
int (*func) __P((void *));
void *arg;
u_int32_t mask;
int irqnum;
int ipl;
int enabled;
struct evcnt *ev;
};
volatile static int current_spl = IPL_HIGH;
__inline int hardsplx(int);
void
irq_init(void)
{
irq_genmasks();
softintr_init();
}
/*
* This is optimised for speed rather than generality. It expects to
* be called an awful lot.
*
* On entry, IRQs are disabled on the CPU.
*/
void
irq_handler(struct irqframe *irqf)
{
int s, status, result, stray;
struct irq_handler *h;
current_intr_depth++;
KASSERT(the_ioc != NULL);
/* Get the current interrupt state */
status = ioc_irq_status_full();
#if NUNIXBP > 0
status |= unixbp_irq_status_full() << IRQ_UNIXBP_BASE;
#endif
/* We're already in splhigh, but make sure the kernel knows that. */
s = splhigh();
#if 0
printf("*");
#endif
uvmexp.intrs++;
stray = 1;
#if NFIQ > 0
/* Check for downgraded FIQs. */
if (fiq_want_downgrade) {
KASSERT(fiq_downgrade_handler != NULL);
fiq_want_downgrade = 0;
(fiq_downgrade_handler)();
goto handled;
}
#endif
/* Find the highest-priority requested interrupt. */
for (h = irq_list_head.lh_first;
h != NULL && h->ipl > s;
h = h->link.le_next)
if (h->enabled && ((status & h->mask) == h->mask)) {
splx(h->ipl);
#if 0
printf("IRQ %d...", h->irqnum);
#endif
if (h->mask & IOC_IRQ_CLEARABLE_MASK)
ioc_irq_clear(h->mask);
#if NIOEB > 0
else if ((h->mask & IOEB_IRQ_CLEARABLE_MASK) &&
the_ioeb != NULL)
ioeb_irq_clear(h->mask);
#endif
if (h->arg == NULL)
result = (h->func)(irqf);
else
result = (h->func)(h->arg);
if (result == IRQ_HANDLED) {
stray = 0;
h->ev->ev_count++;
break; /* XXX handle others? */
}
if (result == IRQ_MAYBE_HANDLED)
stray = 0;
}
if (__predict_false(stray)) {
log(LOG_WARNING, "Stray IRQ, status = 0x%x, spl = %d, "
"mask = 0x%x\n", status, s, irqmask[s]);
#ifdef DDB
Debugger();
#endif
}
#if NFIQ > 0
handled:
#endif /* NFIQ > 0 */
#if 0
printf(" handled\n");
#endif
dosoftints(s); /* May lower spl to s + 1, but no lower. */
hardsplx(s);
current_intr_depth--;
}
struct irq_handler *
irq_establish(int irqnum, int ipl, int (*func)(void *), void *arg,
struct evcnt *ev)
{
struct irq_handler *h, *new;
#ifdef DIAGNOSTIC
if (irqnum >= NIRQ)
panic("irq_register: bad irq: %d", irqnum);
#endif
MALLOC(new, struct irq_handler *, sizeof(struct irq_handler),
M_DEVBUF, M_WAITOK);
bzero(new, sizeof(*new));
new->irqnum = irqnum;
new->mask = 1 << irqnum;
#if NUNIXBP > 0
if (irqnum >= IRQ_UNIXBP_BASE)
new->mask |= 1 << IRQ_PIRQ;
#endif
new->ipl = ipl;
new->func = func;
new->arg = arg;
new->enabled = 1;
new->ev = ev;
if (irq_list_head.lh_first == NULL ||
irq_list_head.lh_first->ipl <= ipl)
/* XXX This shouldn't need to be a special case */
LIST_INSERT_HEAD(&irq_list_head, new, link);
else {
for (h = irq_list_head.lh_first;
h->link.le_next != NULL && h->link.le_next->ipl > ipl;
h = h->link.le_next);
LIST_INSERT_AFTER(h, new, link);
}
if (new->mask & IOC_IRQ_CLEARABLE_MASK)
ioc_irq_clear(new->mask);
#if NIOEB > 0
else if ((h->mask & IOEB_IRQ_CLEARABLE_MASK) && the_ioeb != NULL)
ioeb_irq_clear(h->mask);
#endif
irq_genmasks();
return new;
}
char const *
irq_string(struct irq_handler *h)
{
static char irq_string_store[10];
#if NUNIXBP > 0
if (h->irqnum >= IRQ_UNIXBP_BASE)
snprintf(irq_string_store, 9, "IRQ 13.%d",
h->irqnum - IRQ_UNIXBP_BASE);
else
#endif
snprintf(irq_string_store, 9, "IRQ %d", h->irqnum);
irq_string_store[9] = '\0';
return irq_string_store;
}
void
irq_disable(struct irq_handler *h)
{
int s;
s = splhigh();
h->enabled = 0;
irq_genmasks();
splx(s);
}
void
irq_enable(struct irq_handler *h)
{
int s;
s = splhigh();
h->enabled = 1;
irq_genmasks();
splx(s);
}
void irq_genmasks()
{
struct irq_handler *h;
int s, i;
/* Paranoia? */
s = splhigh();
/* Disable anything we don't understand */
for (i = 0; i < NIPL; i++)
irqmask[i] = 0;
/* Enable interrupts in levels lower than their own */
for (h = irq_list_head.lh_first; h != NULL; h = h->link.le_next)
if (h->enabled) {
if ((h->mask & irqmask[IPL_NONE]) == h->mask)
/* Shared interrupt -- use lowest priority. */
for (i = h->ipl; i < NIPL; i++)
irqmask[i] &= ~h->mask;
else
for (i = 0; i < h->ipl; i++)
irqmask[i] |= h->mask;
}
splx(s);
}
#ifdef FLASHYTHING
#include <machine/memcreg.h>
#include <arch/acorn26/vidc/vidcreg.h>
static const int iplcolours[] = {
VIDC_PALETTE_ENTRY( 0, 0, 0, 0), /* Black: IPL_NONE */
VIDC_PALETTE_ENTRY( 0, 0, 15, 0), /* Blue: IPL_SOFTCLOCK */
VIDC_PALETTE_ENTRY( 6, 4, 2, 0), /* Brown: IPL_SOFTNET */
VIDC_PALETTE_ENTRY(15, 0, 0, 0), /* Red: IPL_BIO */
VIDC_PALETTE_ENTRY(15, 9, 1, 0), /* Orange: IPL_NET */
VIDC_PALETTE_ENTRY(15, 15, 0, 0), /* Yellow: IPL_TTY */
VIDC_PALETTE_ENTRY( 0, 15, 0, 0), /* Green: IPL_IMP */
VIDC_PALETTE_ENTRY( 5, 8, 14, 0), /* Light Blue: IPL_AUDIO */
VIDC_PALETTE_ENTRY( 15, 0, 15, 0), /* Magenta: IPL_SERIAL */
VIDC_PALETTE_ENTRY( 0, 15, 15, 0), /* Cyan: IPL_CLOCK */
VIDC_PALETTE_ENTRY( 8, 8, 8, 0), /* Grey: IPL_STATCLOCK */
VIDC_PALETTE_ENTRY( 8, 8, 8, 0), /* Grey: IPL_SCHED */
VIDC_PALETTE_ENTRY(15, 15, 15, 0), /* White: IPL_HIGH */
};
#endif
int schedbreak = 0;
#include <machine/db_machdep.h>
#include <ddb/db_interface.h>
__inline int
hardsplx(int s)
{
int was;
u_int32_t mask;
KASSERT(s < IPL_HIGH);
int_off();
#ifdef FLASHYTHING
VIDC_WRITE(VIDC_PALETTE_BCOL | iplcolours[s]);
#endif
was = current_spl;
mask = irqmask[s];
#if NFIQ > 0
if (fiq_want_downgrade)
mask |= IOC_IRQ_1;
#endif
/* Don't try this till we've found the IOC */
if (the_ioc != NULL)
ioc_irq_setmask(mask);
#if NUNIXBP > 0
unixbp_irq_setmask(mask >> IRQ_UNIXBP_BASE);
#endif
current_spl = s;
int_on();
return was;
}
int
splhigh(void)
{
int was;
int_off();
#ifdef FLASHYTHING
VIDC_WRITE(VIDC_PALETTE_BCOL | iplcolours[IPL_HIGH]);
#endif
was = current_spl;
current_spl = IPL_HIGH;
#ifdef DEBUG
/* Make sure that anything that turns off the I flag gets spotted. */
if (the_ioc != NULL)
ioc_irq_setmask(0xffff);
#endif
return was;
}
int
raisespl(int s)
{
if (s > current_spl)
return hardsplx(s);
else
return current_spl;
}
void
lowerspl(int s)
{
if (s < current_spl) {
dosoftints(s);
hardsplx(s);
}
}
#ifdef DDB
void
irq_stat(void (*pr)(const char *, ...))
{
struct irq_handler *h;
int i;
u_int32_t last;
for (h = irq_list_head.lh_first; h != NULL; h = h->link.le_next)
(*pr)("%12s: ipl %2d, IRQ %2d, mask 0x%05x, count %llu\n",
h->ev->ev_group, h->ipl, h->irqnum,
h->mask, h->ev->ev_count);
(*pr)("\n");
last = -1;
for (i = 0; i < NIPL; i++)
if (irqmask[i] != last) {
(*pr)("ipl %2d: mask 0x%05x\n", i, irqmask[i]);
last = irqmask[i];
}
}
#endif
|
MarginC/kame
|
netbsd/sys/arch/acorn26/acorn26/irq.c
|
C
|
bsd-3-clause
| 9,890
|
using System;
using MongoDB.Bson;
using MongoDB.Driver;
using NUnit.Framework;
namespace DataAccess.Tests.Repository
{
[TestFixture]
public class ExtensionsFixture
{
[Test]
public void ToQueryDocReturnsNullForNullString()
{
Assert.Null(((string)null).ToQueryDocument());
}
[Test]
public void ToQueryDocReturnsNullForEmptyString()
{
Assert.Null("".ToQueryDocument());
}
[Test]
public void ToQueryDocReturnsNullForWhitespace()
{
Assert.Null(" ".ToQueryDocument());
}
[Test]
public void ToQueryDocParsesCorrectly()
{
var str = "{name: \"bob\", occupation: \"developer\"} ";
var doc = str.ToQueryDocument();
Assert.That(doc["name"].AsString, Is.EqualTo("bob"));
Assert.That(doc["occupation"].AsString, Is.EqualTo("developer"));
}
}
}
|
TellagoDevLabs/Hermes
|
src/DataAccess.MongoDB.Tests/Repository/ExtensionsTest.cs
|
C#
|
bsd-3-clause
| 1,003
|
#include "zglew.h"
#include "z3dgpuinfo.h"
#include <QStringList>
#include <QProcess>
#include "QsLog.h"
Z3DGpuInfo& Z3DGpuInfo::getInstance()
{
static Z3DGpuInfo gpuInfo;
return gpuInfo;
}
Z3DGpuInfo::Z3DGpuInfo()
: m_isSupported(false)
{
detectGpuInfo();
}
int Z3DGpuInfo::getGlslMajorVersion() const
{
if (isSupported()) {
return m_glslMajorVersion;
} else {
LERROR() << "Current GPU card not supported. This function call should not happen.";
return -1;
}
}
int Z3DGpuInfo::getGlslMinorVersion() const
{
if (isSupported()) {
return m_glslMinorVersion;
} else {
LERROR() << "Current GPU card not supported. This function call should not happen.";
return -1;
}
}
int Z3DGpuInfo::getGlslReleaseVersion() const
{
if (isSupported()) {
return m_glslReleaseVersion;
} else {
LERROR() << "Current GPU card not supported. This function call should not happen.";
return -1;
}
}
Z3DGpuInfo::GpuVendor Z3DGpuInfo::getGpuVendor() const
{
return m_gpuVendor;
}
bool Z3DGpuInfo::isExtensionSupported(QString extension) const
{
return m_glExtensionsString.contains(extension, Qt::CaseInsensitive);
}
QString Z3DGpuInfo::getGlVersionString() const
{
return m_glVersionString;
}
QString Z3DGpuInfo::getGlVendorString() const
{
return m_glVendorString;
}
QString Z3DGpuInfo::getGlRendererString() const
{
return m_glRendererString;
}
QString Z3DGpuInfo::getGlShadingLanguageVersionString() const
{
return m_glslVersionString;
}
QString Z3DGpuInfo::getGlExtensionsString() const
{
return m_glExtensionsString;
}
int Z3DGpuInfo::getAvailableTextureMemory() const
{
int availableTexMem = -1;
if (m_gpuVendor == GPU_VENDOR_NVIDIA) {
if(isExtensionSupported("GL_NVX_gpu_memory_info")) {
int retVal;
glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &retVal);
availableTexMem = retVal;
}
} else if (m_gpuVendor == GPU_VENDOR_ATI) {
if(isExtensionSupported("GL_ATI_meminfo")) {
int retVal[4];
glGetIntegerv(GL_TEXTURE_FREE_MEMORY_ATI, retVal);
availableTexMem = retVal[1];
}
}
return availableTexMem;
}
int Z3DGpuInfo::getTotalTextureMemory() const
{
int totalTexMem = -1;
if (m_gpuVendor == GPU_VENDOR_NVIDIA) {
if(isExtensionSupported("GL_NVX_gpu_memory_info")) {
int retVal;
glGetIntegerv(GL_GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX, &retVal);
totalTexMem = retVal;
}
} else if (m_gpuVendor == GPU_VENDOR_ATI) {
if(isExtensionSupported("GL_ATI_meminfo")) {
int retVal[4];
glGetIntegerv(GL_TEXTURE_FREE_MEMORY_ATI, retVal);
totalTexMem = retVal[0];
}
}
return totalTexMem;
}
bool Z3DGpuInfo::isFrameBufferObjectSupported() const
{
return GLEW_VERSION_3_0 || isExtensionSupported("GL_EXT_framebuffer_object");
}
bool Z3DGpuInfo::isNonPowerOfTwoTextureSupported() const
{
return GLEW_VERSION_2_0 || isExtensionSupported("GL_ARB_texture_non_power_of_two");
}
bool Z3DGpuInfo::isGeometryShaderSupported() const
{
return GLEW_VERSION_3_2 ||
isExtensionSupported("GL_ARB_geometry_shader4") ||
isExtensionSupported("GL_EXT_geometry_shader4");
}
bool Z3DGpuInfo::isTextureFilterAnisotropicSupported() const
{
return isExtensionSupported("GL_EXT_texture_filter_anisotropic");
}
bool Z3DGpuInfo::isTextureRectangleSupported() const
{
return GLEW_VERSION_3_1 || isExtensionSupported("GL_ARB_texture_rectangle");
}
bool Z3DGpuInfo::isImagingSupported() const
{
return isExtensionSupported("GL_ARB_imaging");
}
bool Z3DGpuInfo::isColorBufferFloatSupported() const
{
return GLEW_VERSION_3_0 || isExtensionSupported("GL_ARB_color_buffer_float");
}
bool Z3DGpuInfo::isDepthBufferFloatSupported() const
{
return GLEW_VERSION_3_0 || isExtensionSupported("GL_ARB_depth_buffer_float");
}
bool Z3DGpuInfo::isTextureFloatSupported() const
{
return GLEW_VERSION_3_0 || isExtensionSupported("GL_ARB_texture_float");
}
bool Z3DGpuInfo::isTextureRGSupported() const
{
return GLEW_VERSION_3_0 || isExtensionSupported("GL_ARB_texture_rg");
}
bool Z3DGpuInfo::isVAOSupported() const
{
return GLEW_VERSION_3_0 || isExtensionSupported("GL_ARB_vertex_array_object") ||
isExtensionSupported("GL_APPLE_vertex_array_object");
}
QStringList Z3DGpuInfo::getGpuInfo() const
{
QStringList info;
if (!isSupported()) {
info << QString("Current GPU card is not supported. Reason: %1").
arg(m_notSupportedReason);
info << "3D functions will be disabled.";
return info;
}
#ifdef __APPLE__
QProcess dispInfo;
dispInfo.start("system_profiler", QStringList() << "SPDisplaysDataType");
if (dispInfo.waitForFinished(-1))
info << dispInfo.readAllStandardOutput();
else
info << dispInfo.readAllStandardError();
#endif
info << QString("OpenGL Vendor: %1").arg(m_glVendorString);
info << QString("OpenGL Renderer: %1").arg(m_glRendererString);
info << QString("OpenGL Version: %1").arg(m_glVersionString);
info << QString("OpenGL SL Version: %1").arg(m_glslVersionString);
//LINFO() << "OpenGL Extensions:" << m_glExtensionsString;
info << QString("Max Texture Size: %1").arg(m_maxTexureSize);
info << QString("Max 3D Texture Size: %1").arg(m_max3DTextureSize);
info << QString("Max Color Attachments: %1").arg(m_maxColorAttachments);
info << QString("Max Draw Buffer: %1").arg(m_maxDrawBuffer);
if(isGeometryShaderSupported() && m_maxGeometryOutputVertices > 0) {
info << QString("Max GS Output Vertices: %1").
arg(m_maxGeometryOutputVertices);
}
info << QString("Max VS Texture Image Units: %1").
arg(m_maxVertexTextureImageUnits);
if (isGeometryShaderSupported() && m_maxGeometryTextureImageUnits > 0) {
info << QString("Max GS Texture Image Units: %1").arg(m_maxGeometryTextureImageUnits);
}
info << QString("Max FS Texture Image Units: %1").arg(m_maxTextureImageUnits);
info << QString("VS+GS+FS Texture Image Units: %1").arg(m_maxCombinedTextureImageUnits);
info << QString("Max Texture Coordinates: %1").arg(m_maxTextureCoords);
if(getTotalTextureMemory() != -1) {
info << QString("Total Graphics Memory Size: %1 MB").
arg(getTotalTextureMemory()/1024);
}
if(getAvailableTextureMemory() != -1) {
info << QString("Available Graphics Memory Size: %1 MB").
arg(getAvailableTextureMemory()/1024);
}
info << QString("Smooth Point Size Range: (%1, %2)").
arg(m_minSmoothPointSize).arg(m_maxSmoothPointSize);
info << QString("Smooth Point Size Granularity: %1").
arg(m_smoothPointSizeGranularity);
info << QString("Aliased Point Size Range: (%1, %2)").
arg(m_minAliasedPointSize).arg(m_maxAliasedPointSize);
info << QString("Smooth Line Width Range: (%1, %2)").
arg(m_minSmoothLineWidth).arg(m_maxSmoothLineWidth);
info << QString("Smooth Line Width Granularity: %1").
arg(m_smoothLineWidthGranularity);
info << QString("Aliased Line Width Range: (%1, %2)").
arg(m_minAliasedLineWidth).arg(m_maxAliasedLineWidth);
return info;
}
void Z3DGpuInfo::logGpuInfo() const
{
if (!isSupported()) {
LINFO() << "Current GPU card is not supported. Reason: " << m_notSupportedReason;
LWARN() << "3D functions will be disabled.";
return;
}
#ifdef __APPLE__
QProcess dispInfo;
dispInfo.start("system_profiler", QStringList() << "SPDisplaysDataType");
if (dispInfo.waitForFinished(-1))
LINFO() << dispInfo.readAllStandardOutput();
else
LINFO() << dispInfo.readAllStandardError();
#endif
LINFO() << "OpenGL Vendor:" << m_glVendorString;
LINFO() << "OpenGL Renderer:" << m_glRendererString;
LINFO() << "OpenGL Version:" << m_glVersionString;
LINFO() << "OpenGL SL Version:" << m_glslVersionString;
//LINFO() << "OpenGL Extensions:" << m_glExtensionsString;
LINFO() << "Max Texture Size: " << m_maxTexureSize;
LINFO() << "Max 3D Texture Size: " << m_max3DTextureSize;
LINFO() << "Max Color Attachments: " << m_maxColorAttachments;
LINFO() << "Max Draw Buffer: " << m_maxDrawBuffer;
if(isGeometryShaderSupported() && m_maxGeometryOutputVertices > 0) {
LINFO() << "Max GS Output Vertices: " << m_maxGeometryOutputVertices;
}
LINFO() << "Max VS Texture Image Units: " << m_maxVertexTextureImageUnits;
if (isGeometryShaderSupported() && m_maxGeometryTextureImageUnits > 0) {
LINFO() << "Max GS Texture Image Units: " << m_maxGeometryTextureImageUnits;
}
LINFO() << "Max FS Texture Image Units: " << m_maxTextureImageUnits;
LINFO() << "VS+GS+FS Texture Image Units: " << m_maxCombinedTextureImageUnits;
LINFO() << "Max Texture Coordinates: " << m_maxTextureCoords;
if(getTotalTextureMemory() != -1) {
LINFO() << "Total Graphics Memory Size: " << getTotalTextureMemory()/1024 << "MB";
}
if(getAvailableTextureMemory() != -1) {
LINFO() << "Available Graphics Memory Size:" << getAvailableTextureMemory()/1024 << "MB";
}
LINFO() << "Smooth Point Size Range: " << "(" << m_minSmoothPointSize << "," << m_maxSmoothPointSize << ")";
LINFO() << "Smooth Point Size Granularity: " << m_smoothPointSizeGranularity;
LINFO() << "Aliased Point Size Range: " << "(" << m_minAliasedPointSize << "," << m_maxAliasedPointSize << ")";
LINFO() << "Smooth Line Width Range: " << "(" << m_minSmoothLineWidth << "," << m_maxSmoothLineWidth << ")";
LINFO() << "Smooth Line Width Granularity: " << m_smoothLineWidthGranularity;
LINFO() << "Aliased Line Width Range: " << "(" << m_minAliasedLineWidth << "," << m_maxAliasedLineWidth << ")";
LINFO() << "";
}
bool Z3DGpuInfo::isWeightedAverageSupported() const
{
#ifdef _ENABLE_WAVG_
return Z3DGpuInfoInstance.isTextureRGSupported() && Z3DGpuInfoInstance.isTextureRectangleSupported() &&
Z3DGpuInfoInstance.isTextureFloatSupported() && Z3DGpuInfoInstance.isImagingSupported() &&
Z3DGpuInfoInstance.isColorBufferFloatSupported() &&
Z3DGpuInfoInstance.getMaxColorAttachments() >= 2;
#else
return false;
#endif
}
bool Z3DGpuInfo::isDualDepthPeelingSupported() const
{
#ifdef _ENABLE_DDP_
return Z3DGpuInfoInstance.isTextureRGSupported() && Z3DGpuInfoInstance.isTextureRectangleSupported() &&
Z3DGpuInfoInstance.isTextureFloatSupported() && Z3DGpuInfoInstance.isImagingSupported() &&
Z3DGpuInfoInstance.isColorBufferFloatSupported() &&
Z3DGpuInfoInstance.getMaxColorAttachments() >= 8;
#else
return false;
#endif
}
bool Z3DGpuInfo::isLinkedListSupported() const
{
return GLEW_VERSION_4_2;
}
void Z3DGpuInfo::detectGpuInfo()
{
m_glVersionString = QString(reinterpret_cast<const char*>(glGetString(GL_VERSION)));
m_glVendorString = QString(reinterpret_cast<const char*>(glGetString(GL_VENDOR)));
m_glRendererString = QString(reinterpret_cast<const char*>(glGetString(GL_RENDERER)));
m_glExtensionsString = QString(reinterpret_cast<const char*>(glGetString(GL_EXTENSIONS)));
if (GLEW_VERSION_2_1) {
if (!isFrameBufferObjectSupported()) {
m_isSupported = false;
m_notSupportedReason = "Frame Buffer Object (FBO) is not supported by current openGL context.";
return;
} else if (!isNonPowerOfTwoTextureSupported()) { // not necessary, NPOT texture is supported since opengl 2.0
m_isSupported = false;
m_notSupportedReason = "Non power of two texture is not supported by current openGL context.";
return;
} else if (getGpuVendor() == GPU_VENDOR_ATI && isNonPowerOfTwoTextureSupported() &&
(m_glRendererString.contains("RADEON X", Qt::CaseInsensitive)||
m_glRendererString.contains("RADEON 9", Qt::CaseInsensitive))) { //from http://www.opengl.org/wiki/NPOT_Texture
m_isSupported = false;
m_notSupportedReason = "The R300 and R400-based cards (Radeon 9500+ and X500+) are incapable of generic NPOT usage. You can use NPOTs, \
but only if the texture has no mipmaps.";
return;
} else if (getGpuVendor() == GPU_VENDOR_NVIDIA && isNonPowerOfTwoTextureSupported() &&
m_glRendererString.contains("GeForce FX", Qt::CaseInsensitive)) { //from http://www.opengl.org/wiki/NPOT_Texture
m_isSupported = false;
m_notSupportedReason = "NV30-based cards (GeForce FX of any kind) are incapable of NPOTs at all, despite implementing OpenGL 2.0 \
(which requires NPOT). It will do software rendering if you try to use it. ";
return;
} else {
m_isSupported = true;
}
// Prevent segfault
const char* glslVS = reinterpret_cast<const char*>(glGetString(GL_SHADING_LANGUAGE_VERSION));
if (glslVS)
m_glslVersionString = QString(glslVS);
else
m_glslVersionString = "";
if (!parseVersionString(m_glVersionString, m_glMajorVersion, m_glMinorVersion, m_glReleaseVersion)) {
LERROR() << "Malformed OpenGL version string:" << m_glVersionString;
}
// GPU Vendor
if (m_glVendorString.contains("NVIDIA", Qt::CaseInsensitive))
m_gpuVendor = GPU_VENDOR_NVIDIA;
else if (m_glVendorString.contains("ATI", Qt::CaseInsensitive))
m_gpuVendor = GPU_VENDOR_ATI;
else if (m_glVendorString.contains("INTEL", Qt::CaseInsensitive))
m_gpuVendor = GPU_VENDOR_INTEL;
else {
m_gpuVendor = GPU_VENDOR_UNKNOWN;
}
// Shaders
if (!parseVersionString(m_glslVersionString, m_glslMajorVersion, m_glslMinorVersion,
m_glslReleaseVersion)) {
LERROR() << "Malformed GLSL version string:" << m_glslVersionString;
}
m_maxGeometryOutputVertices = -1;
if(isGeometryShaderSupported())
glGetIntegerv(GL_MAX_GEOMETRY_OUTPUT_VERTICES_EXT, &m_maxGeometryOutputVertices);
// Texturing
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &m_maxTexureSize);
glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE, &m_max3DTextureSize);
// http://www.opengl.org/wiki/Textures_-_more
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &m_maxTextureImageUnits);
glGetIntegerv(GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, &m_maxVertexTextureImageUnits);
m_maxGeometryTextureImageUnits = -1;
if (isGeometryShaderSupported())
glGetIntegerv(GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS, &m_maxGeometryTextureImageUnits);
glGetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, &m_maxCombinedTextureImageUnits);
glGetIntegerv(GL_MAX_TEXTURE_COORDS, &m_maxTextureCoords);
if (isTextureFilterAnisotropicSupported())
glGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &m_maxTextureAnisotropy);
else
m_maxTextureAnisotropy = 1.0;
glGetIntegerv(GL_MAX_COLOR_ATTACHMENTS_EXT, &m_maxColorAttachments);
glGetIntegerv(GL_MAX_DRAW_BUFFERS, &m_maxDrawBuffer);
// Point
GLfloat range[2];
glGetFloatv(GL_SMOOTH_POINT_SIZE_RANGE, range);
glGetFloatv(GL_SMOOTH_POINT_SIZE_GRANULARITY, &m_smoothPointSizeGranularity);
m_minSmoothPointSize = range[0];
m_maxSmoothPointSize = range[1];
glGetFloatv(GL_ALIASED_POINT_SIZE_RANGE, range);
m_minAliasedPointSize = range[0];
m_maxAliasedPointSize = range[1];
// Line
glGetFloatv(GL_SMOOTH_LINE_WIDTH_RANGE, range);
glGetFloatv(GL_SMOOTH_LINE_WIDTH_GRANULARITY, &m_smoothLineWidthGranularity);
m_minSmoothLineWidth = range[0];
m_maxSmoothLineWidth = range[1];
glGetFloatv(GL_ALIASED_LINE_WIDTH_RANGE, range);
m_minAliasedLineWidth = range[0];
m_maxAliasedLineWidth = range[1];
} else {
m_isSupported = false;
m_notSupportedReason = "Minimum OpenGL version required is 2.1, while current openGL version is: \"" + m_glVersionString + "\"";
}
}
// format "2.1[.1] otherstring"
bool Z3DGpuInfo::parseVersionString(const QString &versionString, int &major, int &minor, int &release)
{
major = -1;
minor = -1;
release = -1;
if (versionString.isEmpty())
return false;
QString str = versionString.mid(0, versionString.indexOf(" "));
QStringList list = str.split(".");
if (list.size() < 2 || list.size() > 3)
return false;
bool ok;
major = list[0].toInt(&ok);
if (!ok) {
major = -1;
return false;
}
minor = list[1].toInt(&ok);
if (!ok) {
major = -1;
minor = -1;
return false;
}
if (list.size() > 2) {
release = list[2].toInt(&ok);
if (!ok) {
major = -1;
minor = -1;
release = -1;
return false;
}
} else
release = 0;
return true;
}
|
stephenplaza/NeuTu
|
neurolabi/gui/z3dgpuinfo.cpp
|
C++
|
bsd-3-clause
| 16,476
|
/*
* Carrot2 project.
*
* Copyright (C) 2002-2010, Dawid Weiss, Stanisław Osiński.
* All rights reserved.
*
* Refer to the full license file "carrot2.LICENSE"
* in the root folder of the repository checkout or at:
* http://www.carrot2.org/carrot2.LICENSE
*/
package org.carrot2.util.attribute;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.Map;
import org.carrot2.util.attribute.metadata.AttributeMetadata;
import org.carrot2.util.attribute.metadata.BindableMetadata;
import org.carrot2.util.attribute.metadata.CommonMetadata;
import org.carrot2.util.attribute.test.metadata.AttributeDescriptions;
import org.carrot2.util.attribute.test.metadata.AttributeGroups;
import org.carrot2.util.attribute.test.metadata.AttributeLabels;
import org.carrot2.util.attribute.test.metadata.AttributeLevels;
import org.carrot2.util.attribute.test.metadata.AttributeTitles;
import org.carrot2.util.attribute.test.metadata.NoJavadoc;
import org.carrot2.util.attribute.test.metadata.TestBindable;
import org.junit.Test;
public class BindableMetadataBuilderTest
{
@Test
public void testEmptyJavadoc()
{
final Class<?> clazz = NoJavadoc.class;
final String fieldName = "noJavadoc";
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertNull(getLabel(clazz, fieldName));
assertNull(getTitle(clazz, fieldName));
assertNull(getDescription(clazz, fieldName));
}
@Test
public void testSingleWordLabel()
{
checkLabel(AttributeLabels.class, "singleWordLabel", "word");
}
@Test
public void testMultiWordLabel()
{
checkLabel(AttributeLabels.class, "multiWordLabel", "multi word label");
}
@Test
public void testMultiSentenceLabel()
{
checkLabel(AttributeLabels.class, "multiSentenceLabel",
"First label sentence. Second label sentence.");
}
@Test
public void testLabelWithComment()
{
checkLabel(AttributeLabels.class, "labelWithComment", "word");
}
@Test
public void testNoTitle()
{
checkTitle(AttributeTitles.class, "noTitle", null);
}
@Test
public void testEmptyTitle()
{
checkTitle(AttributeTitles.class, "emptyTitle", null);
}
@Test
public void testTitleWithPeriod()
{
checkTitle(AttributeTitles.class, "titleWithPeriod", "Title with period");
}
@Test
public void testLabelNotDefined()
{
checkLabelOrTitle(AttributeTitles.class, "titleWithPeriod", "Title with period");
}
@Test
public void testLabelDefined()
{
checkLabelOrTitle(AttributeTitles.class, "titleWithLabel", "label");
}
@Test
public void testTitleWithoutPeriod()
{
checkTitle(AttributeTitles.class, "titleWithoutPeriod", "Title without period");
}
@Test
public void testTitleWithDescription()
{
checkTitle(AttributeTitles.class, "titleWithDescription",
"Title with description");
}
@Test
public void testTitleWithLabel()
{
checkTitle(AttributeTitles.class, "titleWithLabel", "Title with label");
}
@Test
public void testTitleWithExtraSpace()
{
// Note that this scenario is not supported
checkTitle(AttributeTitles.class, "titleWithExtraSpace", "Title with extra space");
}
@Test
public void testTitleWithExclamationMark()
{
// Note that this scenario is not supported
checkTitle(AttributeTitles.class, "titleWithExclamationMark",
"Title with exclamation mark! and something more");
checkDescription(AttributeTitles.class, "titleWithExclamationMark",
"Description.");
}
@Test
public void testTitleWithExtraPeriods()
{
// Note that this scenario is not supported
checkTitle(AttributeTitles.class, "titleWithExtraPeriods",
"Title with extra periods (e.g. www.carrot2.org)");
checkDescription(AttributeTitles.class, "titleWithExtraPeriods", "Description.");
}
@Test
public void testTitleWithLink()
{
// Note that this scenario is not supported
checkTitle(AttributeTitles.class, "titleWithLink",
"Title with link to <code>AttributeTitles.titleWithLink</code>");
checkDescription(AttributeTitles.class, "titleWithLink", "Description.");
}
@Test
public void testDescriptionWithLinks()
{
// Note that this scenario is not supported
checkTitle(AttributeTitles.class, "descriptionWithLinks", "Title");
System.out.println(getDescription(AttributeTitles.class, "descriptionWithLinks"));
checkDescription(AttributeTitles.class, "descriptionWithLinks",
"Description with <code>" +
AttributeTitles.class.getName() +
".titleAtTheBottom</code> and <code>String</code> links.");
}
@Test
public void testTitleAtTheBottomNotSupported()
{
// Note that this scenario is not supported
checkTitle(AttributeTitles.class, "titleAtTheBottom", null);
}
@Test
public void testNoDescriptionNoTitle()
{
checkDescription(AttributeDescriptions.class, "noDescriptionNoTitle", null);
}
@Test
public void testNoDescription()
{
checkDescription(AttributeDescriptions.class, "noDescription", null);
}
@Test
public void testSingleSentenceDescription()
{
checkDescription(AttributeDescriptions.class, "singleSentenceDescription",
"Single sentence description.");
}
@Test
public void testTwoSentenceDescription()
{
checkDescription(AttributeDescriptions.class, "twoSentenceDescription",
"Description sentence 1. Description sentence 2.");
}
@Test
public void testDescriptionWithExtraSpace()
{
checkDescription(AttributeDescriptions.class, "descriptionWithExtraSpace",
"Description with extra space.");
}
@Test
public void testNamedAttributeNoJavadoc()
{
final Class<NamedAttributes> clazz = NamedAttributes.class;
final String fieldName = "noJavadoc";
checkLabel(clazz, fieldName, "label");
checkTitle(clazz, fieldName, "Title");
checkDescription(clazz, fieldName, "Description.");
}
@Test
public void testNamedAttributeLabelOverride()
{
final Class<NamedAttributes> clazz = NamedAttributes.class;
final String fieldName = "labelOverride";
checkLabel(clazz, fieldName, "overridden");
checkTitle(clazz, fieldName, "Title");
checkDescription(clazz, fieldName, "Description.");
}
@Test
public void testNamedAttributeTitleOverride()
{
final Class<NamedAttributes> clazz = NamedAttributes.class;
final String fieldName = "titleOverride";
checkLabel(clazz, fieldName, "label");
checkTitle(clazz, fieldName, "Title overridden");
checkDescription(clazz, fieldName, "Description.");
}
@Test
public void testNamedAttributeTitleDescriptionOverride()
{
final Class<NamedAttributes> clazz = NamedAttributes.class;
final String fieldName = "titleDescriptionOverride";
checkLabel(clazz, fieldName, "label");
checkTitle(clazz, fieldName, "Title overridden");
checkDescription(clazz, fieldName, "Description overridden.");
}
@Test
public void testNamedAttributeNoDotInKey()
{
final Class<NamedAttributes> clazz = NamedAttributes.class;
final String fieldName = "noDotInKey";
checkLabel(clazz, fieldName, null);
checkTitle(clazz, fieldName, null);
checkDescription(clazz, fieldName, null);
}
@Test
public void testBindableMetadata()
{
final BindableMetadata metadata = BindableDescriptorBuilder.buildDescriptor(
new TestBindable()).metadata;
assertNotNull(metadata);
assertEquals("Some test bindable", metadata.getTitle());
assertEquals("Description.", metadata.getDescription());
assertEquals("Test Bindable", metadata.getLabel());
}
@Test
public void testBasicLevel()
{
checkLevel(AttributeLevels.class, "basicLevel", AttributeLevel.BASIC);
}
@Test
public void testMediumLevel()
{
checkLevel(AttributeLevels.class, "mediumLevel", AttributeLevel.MEDIUM);
}
@Test
public void testAdvancedLevel()
{
checkLevel(AttributeLevels.class, "advancedLevel", AttributeLevel.ADVANCED);
}
@Test
public void testUnknownLevel()
{
assertNull(getLevel(AttributeLevels.class, "unknownLevel"));
}
@Test
public void testNoLevel()
{
assertNull(getLevel(AttributeLevels.class, "noLevel"));
}
@Test
public void testOneWordGroup()
{
checkGroup(AttributeGroups.class, "oneWordGroup", "Group");
}
@Test
public void testMultiWordGroup()
{
checkGroup(AttributeGroups.class, "multiWordGroup", "Multi word group");
}
@Test
public void testNoGroup()
{
assertNull(getGroup(AttributeGroups.class, "noGroup"));
}
/**
*
*/
protected void checkLabel(final Class<?> clazz, final String fieldName,
final String expectedLabel)
{
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertEquals(expectedLabel, getLabel(clazz, fieldName));
}
/**
*
*/
protected void checkGroup(final Class<?> clazz, final String fieldName,
final String expectedGroup)
{
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertEquals(expectedGroup, getGroup(clazz, fieldName));
}
/**
*
*/
protected void checkLevel(final Class<?> clazz, final String fieldName,
final AttributeLevel expectedLabel)
{
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertEquals(expectedLabel, getLevel(clazz, fieldName));
}
/**
*
*/
protected void checkLabelOrTitle(final Class<?> clazz, final String fieldName,
final String expectedLabel)
{
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertEquals(expectedLabel, getLabelOrTitle(clazz, fieldName));
}
/**
*
*/
protected void checkTitle(final Class<?> clazz, final String fieldName,
final String expectedTitle)
{
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertEquals(expectedTitle, getTitle(clazz, fieldName));
}
/**
*
*/
protected void checkDescription(final Class<?> clazz, final String fieldName,
final String expectedDescription)
{
assertNotNull(getAttributeMetadata(clazz, fieldName));
assertEquals(expectedDescription, getDescription(clazz, fieldName));
}
/**
*
*/
protected String getLabel(Class<?> componentClass, String fieldName)
{
final CommonMetadata fieldAttributeMetadata = getAttributeMetadata(
componentClass, fieldName);
return fieldAttributeMetadata.getLabel();
}
/**
*
*/
protected String getGroup(Class<?> componentClass, String fieldName)
{
final CommonMetadata fieldAttributeMetadata = getAttributeMetadata(
componentClass, fieldName);
return ((AttributeMetadata) fieldAttributeMetadata).getGroup();
}
/**
*
*/
protected AttributeLevel getLevel(Class<?> componentClass, String fieldName)
{
final CommonMetadata fieldAttributeMetadata = getAttributeMetadata(
componentClass, fieldName);
return ((AttributeMetadata) fieldAttributeMetadata).getLevel();
}
/**
*
*/
protected String getLabelOrTitle(Class<?> componentClass, String fieldName)
{
final CommonMetadata fieldAttributeMetadata = getAttributeMetadata(
componentClass, fieldName);
return fieldAttributeMetadata.getLabelOrTitle();
}
/**
*
*/
protected String getTitle(Class<?> componentClass, String fieldName)
{
final CommonMetadata fieldAttributeMetadata = getAttributeMetadata(
componentClass, fieldName);
return fieldAttributeMetadata.getTitle();
}
/**
*
*/
protected String getDescription(Class<?> componentClass, String fieldName)
{
final CommonMetadata fieldAttributeMetadata = getAttributeMetadata(
componentClass, fieldName);
return fieldAttributeMetadata.getDescription();
}
/**
*
*/
private CommonMetadata getAttributeMetadata(Class<?> componentClass, String fieldName)
{
try
{
final Map<String, AttributeMetadata> componentAttributeMetadata =
BindableDescriptorBuilder.buildDescriptor(
componentClass.newInstance()).metadata.getAttributeMetadata();
if (componentAttributeMetadata == null)
{
return null;
}
final CommonMetadata fieldAttributeMetadata =
componentAttributeMetadata.get(fieldName);
return fieldAttributeMetadata;
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
}
|
arnaudsj/carrot2
|
core/carrot2-util-attribute/src-test/org/carrot2/util/attribute/BindableMetadataBuilderTest.java
|
Java
|
bsd-3-clause
| 13,538
|
conda install --yes pip
pip install wget
python -m wget https://raw.githubusercontent.com/mjirik/lisa/master/requirements_pip.txt -o requirements_pip.txt
#python -m wget https://raw.githubusercontent.com/mjirik/lisa/master/requirements_conda.txt -o requirements_conda.txt
#python -m wget https://raw.githubusercontent.com/mjirik/lisa/master/requirements_conda_root.txt -o requirements_conda_root.txt
#conda install --yes --file requirements_conda_root.txt
#conda install --yes -c SimpleITK -c menpo -c mjirik --file requirements_conda.txt
conda install --yes -c SimpleITK -c menpo -c mjirik -c conda-forge lisa
# mahotas on luispedro is only for linux
# conda install --yes -c SimpleITK -c luispedro --file requirements_conda.txt
# 2. easy_install requirements simpleITK
easy_install -U --user mahotas
# 3. pip install our packages pyseg_base and dicom2fem
pip install -U --no-deps -r requirements_pip.txt --user
# linux specific
pip install scikit-fmm nomkl
mkdir projects
# 4. install - it is now installed with pip
cd projects
## mkdir gco_python
## cd gco_python
# git clone https://github.com/mjirik/gco_python.git
#cd gco_python
## echo `pwd`
# make
# python setup.py install --user
# cd ..
## cd ..
# 5. skelet3d - optional for Histology Analyser
# sudo -u $USER cd ~/projects
# mkdir ~/projects/skelet3d
# mkdir /projects/skelet3d
git clone https://github.com/mjirik/skelet3d.git
cd skelet3d
mkdir build
cd build
cmake ..
make
|
mjirik/lisa
|
install_nosudo.sh
|
Shell
|
bsd-3-clause
| 1,451
|
from django.http import HttpResponse, Http404
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import render, get_object_or_404
from django.core.urlresolvers import reverse
from django.utils.xmlutils import SimplerXMLGenerator
from models import Place, Region
from models import Locality
from models import GlobalRegion
from utils.utils import do_paging, split_list
from django.db.models import Count
from django.contrib.contenttypes.models import ContentType
import json
def place_detail(request, place_id):
"""
Lookup a ``Place`` based on its id. Pagination its objects.
"""
place = get_object_or_404(Place, pk=place_id)
try:
region = Region.objects.get(name=place.region)
except:
region = None
place_objects = place.museumobject_set.filter(public=True)
objects = do_paging(request, place_objects)
return render(request, "location/place_detail.html",
{'place': place, 'objects': objects,
'region': region})
def place_json(request, encoding='utf-8', mimetype='text/plain'):
places = Locality.objects.exclude(
latitude=None).annotate(Count('museumobject')).values(
'id', 'name', 'latitude', 'longitude',
'museumobject__count')
return HttpResponse(json.dumps(list(places), indent=2))
def place_kml(request, encoding='utf-8', mimetype='text/plain'):
"""
Write out all the known places to KML
"""
# mimetype = "application/vnd.google-earth.kml+xml"
# mimetype = "text/html"
places = Locality.objects.exclude(
latitude=None).annotate(Count('museumobject'))
response = HttpResponse(mimetype=mimetype)
handler = SimplerXMLGenerator(response, encoding)
handler.startDocument()
handler.startElement(u"kml",
{u"xmlns": u"http://www.opengis.net/kml/2.2"})
handler.startElement(u"Document", {})
for place in places:
place_url = request.build_absolute_uri(place.get_absolute_url())
handler.startElement(u"Placemark", {})
handler.addQuickElement(u"name",
"%s (%s)" % (place.name, place.museumobject__count))
handler.addQuickElement(u"description",
'<a href="%s">%s</a>' % (place_url, place.__unicode__()))
handler.startElement(u"Point", {})
handler.addQuickElement(u"coordinates", place.get_kml_coordinates())
handler.endElement(u"Point")
handler.endElement(u"Placemark")
handler.endElement(u"Document")
handler.endElement(u"kml")
return response
def place_duplicates(request):
'''
Used for finding duplicate places, by Geoname ID
'''
places = Place.objects.values(
'gn_id').order_by().annotate(
count=Count('gn_id')).filter(count__gt=1)
return render(request, "location/place_dups_list.html",
{'places': places})
def place_geoname(request, geoname_id):
places = Place.objects.filter(gn_id=geoname_id)
return render(request, "location/place_geoname.html", {'places': places})
def tree_view(request):
global_regions = GlobalRegion.objects.all()
return render(request, "location/tree_view.html",
{'global_regions': global_regions})
def find_location(model_type, id):
element_type = ContentType.objects.get(app_label='location', model=model_type)
return element_type.get_object_for_this_type(id=id)
def view_places(request):
grs = GlobalRegion.objects.exclude(icon_path="").prefetch_related('children')
d = dict((g.name, g) for g in grs)
grs = [d['Australia'], d['Pacific'], d['Asia'], d['Europe'], d['Americas'], d['Africa'],
d['Middle East']]
kml_url = request.build_absolute_uri(reverse('place_kml'))
return render(request, 'location/map.html',
{'global_regions': grs,
'kml_url': kml_url})
def view_geoloc(request, loctype, id, columns=3):
try:
geolocation = find_location(loctype, id)
except ObjectDoesNotExist:
raise Http404
items = geolocation.museumobject_set.select_related().filter(public=True
).prefetch_related('category', 'country', 'global_region'
).extra(
select={'public_images_count': 'select count(*) from mediaman_artefactrepresentation a WHERE a.artefact_id = cat_museumobject.id AND a.public'}
).order_by('-public_images_count', 'registration_number')
children = []
if hasattr(geolocation, 'children'):
children = geolocation.children.all()
objects = do_paging(request, items)
return render(request, 'location/geolocation.html',
{'geolocation': geolocation,
'objects': objects,
'num_children': len(children),
'children': split_list(children, parts=columns)})
|
uq-eresearch/uqam
|
location/views.py
|
Python
|
bsd-3-clause
| 4,790
|
/*-
* Copyright (c) 1998 Michael Smith <msmith@freebsd.org>
* Copyright (c) 2006 Marcel Moolenaar
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <sys/cdefs.h>
__FBSDID("$FreeBSD: releng/9.3/sys/boot/i386/efi/bootinfo.c 206376 2010-04-07 18:16:05Z rpaulo $");
#include <stand.h>
#include <string.h>
#include <sys/param.h>
#include <sys/reboot.h>
#include <sys/linker.h>
#include <efi.h>
#include <efilib.h>
#include "bootstrap.h"
#include "libi386.h"
#include <machine/bootinfo.h>
/*
* Return a 'boothowto' value corresponding to the kernel arguments in
* (kargs) and any relevant environment variables.
*/
static struct
{
const char *ev;
int mask;
} howto_names[] = {
{ "boot_askname", RB_ASKNAME},
{ "boot_cdrom", RB_CDROM},
{ "boot_ddb", RB_KDB},
{ "boot_dfltroot", RB_DFLTROOT},
{ "boot_gdb", RB_GDB},
{ "boot_multicons", RB_MULTIPLE},
{ "boot_mute", RB_MUTE},
{ "boot_pause", RB_PAUSE},
{ "boot_serial", RB_SERIAL},
{ "boot_single", RB_SINGLE},
{ "boot_verbose", RB_VERBOSE},
{ NULL, 0}
};
static const char howto_switches[] = "aCdrgDmphsv";
static int howto_masks[] = {
RB_ASKNAME, RB_CDROM, RB_KDB, RB_DFLTROOT, RB_GDB, RB_MULTIPLE,
RB_MUTE, RB_PAUSE, RB_SERIAL, RB_SINGLE, RB_VERBOSE
};
int
bi_getboothowto(char *kargs)
{
const char *sw;
char *opts;
int howto, i;
howto = 0;
/* Get the boot options from the environment first. */
for (i = 0; howto_names[i].ev != NULL; i++) {
if (getenv(howto_names[i].ev) != NULL)
howto |= howto_names[i].mask;
}
/* Parse kargs */
if (kargs == NULL)
return (howto);
opts = strchr(kargs, '-');
while (opts != NULL) {
while (*(++opts) != '\0') {
sw = strchr(howto_switches, *opts);
if (sw == NULL)
break;
howto |= howto_masks[sw - howto_switches];
}
opts = strchr(opts, '-');
}
return (howto);
}
/*
* Copy the environment into the load area starting at (addr).
* Each variable is formatted as <name>=<value>, with a single nul
* separating each variable, and a double nul terminating the environment.
*/
vm_offset_t
bi_copyenv(vm_offset_t start)
{
struct env_var *ep;
vm_offset_t addr, last;
size_t len;
addr = last = start;
/* Traverse the environment. */
for (ep = environ; ep != NULL; ep = ep->ev_next) {
len = strlen(ep->ev_name);
if (i386_copyin(ep->ev_name, addr, len) != len)
break;
addr += len;
if (i386_copyin("=", addr, 1) != 1)
break;
addr++;
if (ep->ev_value != NULL) {
len = strlen(ep->ev_value);
if (i386_copyin(ep->ev_value, addr, len) != len)
break;
addr += len;
}
if (i386_copyin("", addr, 1) != 1)
break;
last = ++addr;
}
if (i386_copyin("", last++, 1) != 1)
last = start;
return(last);
}
/*
* Copy module-related data into the load area, where it can be
* used as a directory for loaded modules.
*
* Module data is presented in a self-describing format. Each datum
* is preceded by a 32-bit identifier and a 32-bit size field.
*
* Currently, the following data are saved:
*
* MOD_NAME (variable) module name (string)
* MOD_TYPE (variable) module type (string)
* MOD_ARGS (variable) module parameters (string)
* MOD_ADDR sizeof(vm_offset_t) module load address
* MOD_SIZE sizeof(size_t) module size
* MOD_METADATA (variable) type-specific metadata
*/
#define COPY32(v, a) { \
u_int32_t x = (v); \
i386_copyin(&x, a, sizeof(x)); \
a += sizeof(x); \
}
#define MOD_STR(t, a, s) { \
COPY32(t, a); \
COPY32(strlen(s) + 1, a); \
i386_copyin(s, a, strlen(s) + 1); \
a += roundup(strlen(s) + 1, sizeof(u_int64_t));\
}
#define MOD_NAME(a, s) MOD_STR(MODINFO_NAME, a, s)
#define MOD_TYPE(a, s) MOD_STR(MODINFO_TYPE, a, s)
#define MOD_ARGS(a, s) MOD_STR(MODINFO_ARGS, a, s)
#define MOD_VAR(t, a, s) { \
COPY32(t, a); \
COPY32(sizeof(s), a); \
i386_copyin(&s, a, sizeof(s)); \
a += roundup(sizeof(s), sizeof(u_int64_t)); \
}
#define MOD_ADDR(a, s) MOD_VAR(MODINFO_ADDR, a, s)
#define MOD_SIZE(a, s) MOD_VAR(MODINFO_SIZE, a, s)
#define MOD_METADATA(a, mm) { \
COPY32(MODINFO_METADATA | mm->md_type, a); \
COPY32(mm->md_size, a); \
i386_copyin(mm->md_data, a, mm->md_size); \
a += roundup(mm->md_size, sizeof(u_int64_t));\
}
#define MOD_END(a) { \
COPY32(MODINFO_END, a); \
COPY32(0, a); \
}
vm_offset_t
bi_copymodules(vm_offset_t addr)
{
struct preloaded_file *fp;
struct file_metadata *md;
/* Start with the first module on the list, should be the kernel. */
for (fp = file_findfile(NULL, NULL); fp != NULL; fp = fp->f_next) {
/* The name field must come first. */
MOD_NAME(addr, fp->f_name);
MOD_TYPE(addr, fp->f_type);
if (fp->f_args)
MOD_ARGS(addr, fp->f_args);
MOD_ADDR(addr, fp->f_addr);
MOD_SIZE(addr, fp->f_size);
for (md = fp->f_metadata; md != NULL; md = md->md_next) {
if (!(md->md_type & MODINFOMD_NOCOPY))
MOD_METADATA(addr, md);
}
}
MOD_END(addr);
return(addr);
}
/*
* Load the information expected by the kernel.
*
* - The kernel environment is copied into kernel space.
* - Module metadata are formatted and placed in kernel space.
*/
int
bi_load(struct preloaded_file *fp, uint64_t *bi_addr)
{
struct bootinfo bi;
struct preloaded_file *xp;
struct file_metadata *md;
struct devdesc *rootdev;
char *rootdevname;
vm_offset_t addr, ssym, esym;
bzero(&bi, sizeof(struct bootinfo));
bi.bi_version = 1;
// bi.bi_boothowto = bi_getboothowto(fp->f_args);
/*
* Allow the environment variable 'rootdev' to override the supplied
* device. This should perhaps go to MI code and/or have $rootdev
* tested/set by MI code before launching the kernel.
*/
rootdevname = getenv("rootdev");
i386_getdev((void**)&rootdev, rootdevname, NULL);
if (rootdev != NULL) {
/* Try reading /etc/fstab to select the root device. */
getrootmount(i386_fmtdev(rootdev));
free(rootdev);
}
md = file_findmetadata(fp, MODINFOMD_SSYM);
ssym = (md != NULL) ? *((vm_offset_t *)&(md->md_data)) : 0;
md = file_findmetadata(fp, MODINFOMD_ESYM);
esym = (md != NULL) ? *((vm_offset_t *)&(md->md_data)) : 0;
if (ssym != 0 && esym != 0) {
bi.bi_symtab = ssym;
bi.bi_esymtab = esym;
}
/* Find the last module in the chain. */
addr = 0;
for (xp = file_findfile(NULL, NULL); xp != NULL; xp = xp->f_next) {
if (addr < (xp->f_addr + xp->f_size))
addr = xp->f_addr + xp->f_size;
}
addr = (addr + 15) & ~15;
/* Copy module list and metadata. */
bi.bi_modulep = addr;
addr = bi_copymodules(addr);
if (addr <= bi.bi_modulep) {
addr = bi.bi_modulep;
bi.bi_modulep = 0;
}
addr = (addr + 15) & ~15;
/* Copy our environment. */
bi.bi_envp = addr;
addr = bi_copyenv(addr);
if (addr <= bi.bi_envp) {
addr = bi.bi_envp;
bi.bi_envp = 0;
}
addr = (addr + PAGE_MASK) & ~PAGE_MASK;
bi.bi_kernend = addr;
return (ldr_bootinfo(&bi, bi_addr));
}
|
dcui/FreeBSD-9.3_kernel
|
sys/boot/i386/efi/bootinfo.c
|
C
|
bsd-3-clause
| 8,106
|
package q2;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
public class EchoTask implements Runnable {
private final Socket _client;
public EchoTask(Socket client) {
this._client = client;
}
@Override
public void run() {
System.out.println("Connection successful.");
System.out.println("Waiting for input from client...");
// Try with resources to automatically manage the streams' resources
try (PrintWriter out = new PrintWriter(this._client.getOutputStream(), true);
BufferedReader in = new BufferedReader(new InputStreamReader(this._client.getInputStream()));) {
String line;
while ((line = in.readLine()) != null) {
System.out.printf("Server: '%s'\n", line);
if (line.equals("Bye.")) {
break;
}
out.println(line.toUpperCase());
}
} catch (IOException e) {
System.err.println("Error reading from streams.");
System.exit(2);
} finally {
try {
this._client.close();
} catch (Exception e) {
System.err.println("Error closing socket.");
System.exit(3);
}
}
}
}
|
authchir/log735-lab1
|
q2/EchoTask.java
|
Java
|
bsd-3-clause
| 1,245
|
Deface::Override.new(
:virtual_path => "spree/products/show",
:name => "add_submit_wishlist_link",
:insert_after => "[data-hook='cart_form']",
:partial => "spree/shared/wishlist"
)
|
arunror/spree_arun_wishlist
|
app/overrides/add_wishlist_after_product_properties.rb
|
Ruby
|
bsd-3-clause
| 188
|
<?php
/**
* SiteQ
*
* Copyright (c) 2011-2012, Hans-Peter Buniat <hpbuniat@googlemail.com>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Hans-Peter Buniat nor the names of his
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @package SiteQ
* @author Hans-Peter Buniat <hpbuniat@googlemail.com>
* @copyright 2011-2012 Hans-Peter Buniat <hpbuniat@googlemail.com>
* @license http://www.opensource.org/licenses/bsd-license.php BSD License
*/
/**
* Output-Helper
*
* @author Hans-Peter Buniat <hpbuniat@googlemail.com>
* @copyright 2011-2012 Hans-Peter Buniat <hpbuniat@googlemail.com>
* @license http://www.opensource.org/licenses/bsd-license.php BSD License
* @version Release: @package_version@
* @link https://github.com/hpbuniat/SiteQ
*/
class SiteQ_TextUI_Output {
/**
* Print an Error
*
* @param string $message
* @param boolean $exit
*
* @return void
*/
static public function error($message, $exit = true) {
print_r('Error: ' . $message . PHP_EOL);
if ($exit === true) {
exit(Mergy_TextUI_Command::ERROR_EXIT);
}
}
/**
* Print an Info
*
* @param string $message
*
* @return void
*/
static public function info($message) {
print_r($message . PHP_EOL);
}
/**
* Write to ouput
*
* @param string $message
*
* @return void
*/
static public function write($message) {
print_r($message);
}
}
|
hpbuniat/SiteQ
|
SiteQ/TextUI/Output.php
|
PHP
|
bsd-3-clause
| 2,895
|
/*
* Copyright (c) 2012 ARM Limited
* All rights reserved.
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Copyright (c) 2002-2005 The Regents of The University of Michigan
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Steve Reinhardt
*/
#ifndef __CPU_SIMPLE_ATOMIC_HH__
#define __CPU_SIMPLE_ATOMIC_HH__
#include "cpu/simple/base.hh"
#include "params/AtomicSimpleCPU.hh"
class AtomicSimpleCPU : public BaseSimpleCPU
{
public:
AtomicSimpleCPU(AtomicSimpleCPUParams *params);
virtual ~AtomicSimpleCPU();
virtual void init();
private:
struct TickEvent : public Event
{
AtomicSimpleCPU *cpu;
TickEvent(AtomicSimpleCPU *c);
void process();
const char *description() const;
};
TickEvent tickEvent;
const int width;
bool locked;
const bool simulate_data_stalls;
const bool simulate_inst_stalls;
/**
* Drain manager to use when signaling drain completion
*
* This pointer is non-NULL when draining and NULL otherwise.
*/
DrainManager *drain_manager;
// main simulation loop (one cycle)
void tick();
/**
* Check if a system is in a drained state.
*
* We need to drain if:
* <ul>
* <li>We are in the middle of a microcode sequence as some CPUs
* (e.g., HW accelerated CPUs) can't be started in the middle
* of a gem5 microcode sequence.
*
* <li>The CPU is in a LLSC region. This shouldn't normally happen
* as these are executed atomically within a single tick()
* call. The only way this can happen at the moment is if
* there is an event in the PC event queue that affects the
* CPU state while it is in an LLSC region.
*
* <li>Stay at PC is true.
* </ul>
*/
bool isDrained() {
return microPC() == 0 &&
!locked &&
!stayAtPC;
}
/**
* Try to complete a drain request.
*
* @returns true if the CPU is drained, false otherwise.
*/
bool tryCompleteDrain();
/**
* An AtomicCPUPort overrides the default behaviour of the
* recvAtomic and ignores the packet instead of panicking.
*/
class AtomicCPUPort : public CpuPort
{
public:
AtomicCPUPort(const std::string &_name, BaseCPU* _cpu)
: CpuPort(_name, _cpu)
{ }
protected:
virtual Tick recvAtomicSnoop(PacketPtr pkt)
{
// Snooping a coherence request, just return
return 0;
}
};
AtomicCPUPort icachePort;
AtomicCPUPort dcachePort;
bool fastmem;
Request ifetch_req;
Request data_read_req;
Request data_write_req;
bool dcache_access;
Tick dcache_latency;
protected:
/** Return a reference to the data port. */
virtual CpuPort &getDataPort() { return dcachePort; }
/** Return a reference to the instruction port. */
virtual CpuPort &getInstPort() { return icachePort; }
public:
unsigned int drain(DrainManager *drain_manager);
void drainResume();
void switchOut();
void takeOverFrom(BaseCPU *oldCPU);
void verifyMemoryMode() const;
virtual void activateContext(ThreadID thread_num, Cycles delay);
virtual void suspendContext(ThreadID thread_num);
Fault readMem(Addr addr, uint8_t *data, unsigned size, unsigned flags);
Fault writeMem(uint8_t *data, unsigned size,
Addr addr, unsigned flags, uint64_t *res);
/**
* Print state of address in memory system via PrintReq (for
* debugging).
*/
void printAddr(Addr a);
};
#endif // __CPU_SIMPLE_ATOMIC_HH__
|
Dexhub/MTX
|
src/cpu/simple/atomic.hh
|
C++
|
bsd-3-clause
| 5,661
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.