code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30 values | license stringclasses 15 values | size int64 3 1.01M |
|---|---|---|---|---|---|
#include <algorithm>
#include <cassert>
#include <cstdio>
#include <iostream>
#include <sstream>
#include <string>
#include <vector>
#include <cstring>
#include <queue>
#include <utility>
using namespace std;
vector<string> A;
int m, n;
long long dist[20][20][20][20];
bool done[3000][20];
int T[20][2];
const int di[] = { -1, 0, 1, 0 };
const int dj[] = { 0, 1, 0, -1 };
void calc_dist(int si, int sj) {
queue< pair<int, int> > Q;
memset(done, 0, sizeof done);
int offset = 30*m;
si += offset;
done[si][sj] = 1;
Q.push(make_pair(si, sj));
int cur = 0;
dist[si-offset][sj][si-offset][sj] = 0;
int layer_sz = 0;
while (!Q.empty()) {
if (layer_sz == 0) {
layer_sz = Q.size();
++cur;
}
const pair<int, int> t = Q.front();
int i = t.first;
int j = t.second;
Q.pop();
--layer_sz;
for (int d=0; d<4; ++d) {
const int r = i + di[d];
const int c = j + dj[d];
if (r<0 || c<0 || r>=60*m || c>=n || done[r][c] || A[r%m][c]=='#') {
continue;
}
done[r][c] = 1;
if (r-offset>=0 && r-offset<m) {
dist[si-offset][sj][r-offset][c] = cur;
}
Q.push(make_pair(r, c));
}
if (A[i%m][j] == 'T') {
const int r = i;
const int c = (j==T[i%m][0] ? T[i%m][1] : T[i%m][0]);
assert(A[r%m][c] == 'T');
if (!done[r][c]) {
done[r][c] = 1;
if (r-offset>=0 && r-offset<m) {
dist[si-offset][sj][r-offset][c] = cur;
}
Q.push(make_pair(r, c));
}
}
}
}
const long long inf = (1LL<<62)-50;
bool is_valid_col[20];
long long price[64][20][20]; // [segment_cnt_exponent][enter_col][exit_col]
long long best[2][20][20]; // [enter_col][exit_col], two rows for DP
class InfiniteLab {
public:
long long getDistance(vector <string> A_, long long r1, int c1, long long r2, int c2) {
A = A_;
m = A.size();
n = A[0].size();
memset(T, 0xff, sizeof T);
for (int i=0; i<m; ++i) {
for (int j=0; j<n; ++j) {
if (A[i][j] == 'T') {
if (T[i][0] == -1) {
T[i][0] = j;
} else {
T[i][1] = j;
}
}
}
}
for (int j=0; j<n; ++j) {
is_valid_col[j] = (A[0][j]!='#' && A[m-1][j]!='#');
}
memset(dist, 0xff, sizeof dist);
for (int i=0; i<m; ++i) {
for (int j=0; j<n; ++j) {
if (A[i][j] != '#') {
calc_dist(i, j);
}
}
}
if (r1 > r2) {
swap(r1, r2);
swap(c1, c2);
}
if (r1 < 0) {
long long segs = (-r1+m-1)/m;
r1 += segs*m;
r2 += segs*m;
} else if (r1 >= m) {
long long segs = r1/m;
r1 -= segs*m;
r2 -= segs*m;
}
assert(r1>=0 && r1<m);
if (r2 < m) {
return dist[r1][c1][r2][c2];
} else if (r2 < 2*m) {
r2 -= m;
long long sol = inf;
for (int c=0; c<n; ++c) {
if (dist[r1][c1][m-1][c]!=-1 && dist[0][c][r2][c2]!=-1) {
sol = min(sol, dist[r1][c1][m-1][c] + dist[0][c][r2][c2] + 1);
}
}
return sol==inf ? -1 : sol;
} else {
for (int k=0; k<64; ++k) {
for (int i=0; i<20; ++i) {
for (int j=0; j<20; ++j) {
price[k][i][j] = inf;
}
}
}
for (int j1=0; j1<n; ++j1) {
for (int j2=0; j2<n; ++j2) {
if (dist[0][j1][m-1][j2] != -1) {
price[0][j1][j2] = dist[0][j1][m-1][j2];
}
}
}
long long seg_cnt = r2/m - r1/m - 1;
assert(seg_cnt >= 1);
long long cur = 2;
long long len_exp = 1;
while (cur <= seg_cnt) {
for (int j1=0; j1<n; ++j1) {
for (int j2=0; j2<n; ++j2) {
for (int j3=0; j3<n; ++j3) {
price[len_exp][j1][j3] = min(price[len_exp][j1][j3],
price[len_exp-1][j1][j2] + price[len_exp-1][j2][j3] + 1);
}
}
}
cur <<= 1;
++len_exp;
}
for (int i=0; i<2; ++i) {
for (int j=0; j<n; ++j) {
for (int k=0; k<n; ++k) {
best[i][j][k] = inf;
}
}
}
for (int c=0; c<n; ++c) {
if (is_valid_col[c]) {
best[0][c][c] = -1;
}
}
len_exp = 0;
cur = 1;
while (cur <= seg_cnt) {
if (seg_cnt & cur) {
for (int i=0; i<n; ++i) {
for (int j=0; j<n; ++j) {
best[(len_exp+1)&1][i][j] = inf;
for (int c=0; c<n; ++c) {
best[(len_exp+1)&1][i][j] = min(best[(len_exp+1)&1][i][j],
best[len_exp&1][i][c] + price[len_exp][c][j] + 1);
}
}
}
} else {
memcpy(best[(len_exp+1)&1], best[len_exp&1], sizeof best[(len_exp+1)&1]);
}
cur <<= 1;
++len_exp;
}
r2 %= m;
long long sol = inf;
for (int j1=0; j1<n; ++j1) {
for (int j2=0; j2<n; ++j2) {
if (dist[r1][c1][m-1][j1]!=-1 && dist[0][j2][r2][c2]!=-1) {
sol = min(sol, dist[r1][c1][m-1][j1] + best[len_exp&1][j1][j2] + dist[0][j2][r2][c2] + 2);
}
}
}
return sol==inf ? -1 : sol;
}
}
// BEGIN CUT HERE
public:
void run_test(int Case) { if ((Case == -1) || (Case == 0)) test_case_0(); if ((Case == -1) || (Case == 1)) test_case_1(); if ((Case == -1) || (Case == 2)) test_case_2(); if ((Case == -1) || (Case == 3)) test_case_3(); if ((Case == -1) || (Case == 4)) test_case_4(); }
private:
template <typename T> string print_array(const vector<T> &V) { ostringstream os; os << "{ "; for (typename vector<T>::const_iterator iter = V.begin(); iter != V.end(); ++iter) os << '\"' << *iter << "\","; os << " }"; return os.str(); }
void verify_case(int Case, const long long &Expected, const long long &Received) { cerr << "Test Case #" << Case << "..."; if (Expected == Received) cerr << "PASSED" << endl; else { cerr << "FAILED" << endl; cerr << "\tExpected: \"" << Expected << '\"' << endl; cerr << "\tReceived: \"" << Received << '\"' << endl; } }
void test_case_0() { string Arr0[] = {"#...##",
".##...",
"..#.##",
"#.#.##"}; vector <string> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 1LL; int Arg2 = 0; long long Arg3 = 5LL; int Arg4 = 3; long long Arg5 = 7LL; verify_case(0, Arg5, getDistance(Arg0, Arg1, Arg2, Arg3, Arg4)); }
void test_case_1() { string Arr0[] = {"##.#.",
".#T#T",
"...#.",
"##.#."}; vector <string> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 7LL; int Arg2 = 4; long long Arg3 = 1LL; int Arg4 = 0; long long Arg5 = 9LL; verify_case(1, Arg5, getDistance(Arg0, Arg1, Arg2, Arg3, Arg4)); }
void test_case_2() { string Arr0[] = {"..######.#",
".###T###.T",
"..T#.##T##",
".######..#"}; vector <string> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 1LL; int Arg2 = 0; long long Arg3 = 6LL; int Arg4 = 4; long long Arg5 = 11LL; verify_case(2, Arg5, getDistance(Arg0, Arg1, Arg2, Arg3, Arg4)); }
void test_case_3() { string Arr0[] = {"..#..",
".#.#.",
"....."}; vector <string> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = -29LL; int Arg2 = 2; long long Arg3 = 19LL; int Arg4 = 2; long long Arg5 = 54LL; verify_case(3, Arg5, getDistance(Arg0, Arg1, Arg2, Arg3, Arg4)); }
void test_case_4() { string Arr0[] = {".#.#.",
"..#..",
".....",
".....",
"..#.."}; vector <string> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = -999LL; int Arg2 = 3; long long Arg3 = 100LL; int Arg4 = 2; long long Arg5 = -1LL; verify_case(4, Arg5, getDistance(Arg0, Arg1, Arg2, Arg3, Arg4)); }
// END CUT HERE
};
// BEGIN CUT HERE
int main()
{
InfiniteLab ___test;
___test.run_test(-1);
}
// END CUT HERE
| ibudiselic/contest-problem-solutions | tc 160+/InfiniteLab.cpp | C++ | bsd-2-clause | 9,307 |
cask "mmhmm" do
version "2.2.3,1642040000"
sha256 "00a80b03bf3cb9774818cc473835ccc8475a8757f32116827b53be4911e99406"
url "https://updates.mmhmm.app/mac/production/mmhmm_#{version.csv.first}.zip"
name "mmhmm"
desc "Virtual video presentation software"
homepage "https://www.mmhmm.app/"
livecheck do
url "https://updates.mmhmm.app/mac/production/sparkle.xml"
strategy :sparkle
end
auto_updates true
depends_on macos: ">= :mojave"
app "mmhmm.app"
uninstall pkgutil: "app.mmhmm.app",
quit: "app.mmhmm.app",
launchctl: "app.mmhmm.Camera.Assistant"
end
| julionc/homebrew-cask | Casks/mmhmm.rb | Ruby | bsd-2-clause | 615 |
#!/usr/bin/env bash
cwd="$( cd "${BASH_SOURCE[0]%/*}" && pwd )"
cd "$cwd/.."
f=`mktemp -d`
git clone "git@github.com:relrod/chordpro.git" "$f/chordpro.git"
cabal haddock
pushd "$f/chordpro.git"
git checkout gh-pages && git rm -rf *
popd
mv dist/doc/html/chordpro/* "$f/chordpro.git/"
pushd "$f/chordpro.git"
git add -A
git commit -m "Manual docs deploy."
git push origin gh-pages
popd
rm -rf "$f"
if [ $? == 0 ]; then
echo "*** Done: http://relrod.github.io/chordpro/"
exit 0
else
echo "*** ERROR!!! Fix the above and try again."
exit 1
fi
| relrod/chordpro | scripts/deploy-haddock-manually.sh | Shell | bsd-2-clause | 558 |
require './spec/spec_helper'
describe Gusteau::SSH do
let(:connector_class) do
class Example
include Gusteau::SSH
attr_accessor :host, :port, :user, :password
end
Example
end
let(:connector) { connector_class.new }
describe "#conn" do
before do
connector.host = 'microsoft.com'
connector.port = 2202
connector.user = 'ray'
end
context "password is present" do
it "should use SSH port and password when present" do
connector.password = 'qwerty123'
Net::SSH.expects(:start).with('microsoft.com', 'ray', {:port => 2202, :password => 'qwerty123'})
connector.conn
end
end
context "password is not present" do
it "should not use password" do
Net::SSH.expects(:start).with('microsoft.com', 'ray', {:port => 2202})
connector.conn
end
end
end
describe "send methods" do
let(:conn) { stub_everything('conn') }
let(:channel) { stub_everything('channel') }
before do
connector.expects(:conn).at_least_once.returns(conn)
def conn.open_channel
yield channel
channel # is this the correct way to test it?
end
conn.expects(:channel).at_least_once.returns(channel)
end
describe "#send_command" do
context "user is root" do
before { connector.user = 'root' }
it "should execute the command without sudo" do
channel.expects(:exec).with("sh -l -c 'cowsay'")
connector.send_command 'cowsay'
end
end
context "user is not root" do
before { connector.user = 'vaskas' }
it "should execute the command with sudo" do
channel.expects(:exec).with("sudo -- sh -l -c 'cowsay'")
connector.send_command 'cowsay'
end
end
describe "success status" do
let(:success) { true }
before do
def channel.exec(cmd); yield true, success; end
channel.expects(:success).returns(success)
end
context "command succeeded" do
it "should start receiving data" do
channel.expects(:on_data)
connector.send_command 'sl'
end
end
context "command failed" do
let(:success) { false }
it "should raise an exception" do
proc { connector.send_command 'sl' }.must_raise RuntimeError
end
end
end
end
describe "#send_files" do
before do
connector.user = 'root'
connector.expects(:compressed_tar_stream).returns(mock())
channel.expects(:send_data)
end
it "should execute the extraction command and send the data" do
channel.expects(:exec).with("sh -l -c 'tar zxf - -C /etc/chef '")
connector.send_files(%w{ a b }, '/etc/chef')
end
it "should strip tar components" do
channel.expects(:exec).with("sh -l -c 'tar zxf - -C /etc/chef --strip-components=3'")
connector.send_files(%w{ c d }, '/etc/chef', 3)
end
end
end
end
| locomote/gusteau | spec/lib/gusteau/ssh_spec.rb | Ruby | bsd-2-clause | 3,069 |
#region Copyright
/*Copyright (c) 2015, Katascope
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
#endregion
namespace GlyphicsLibrary
{
//2D vector class, doubles as texture coords
public interface IFloat2
{
float X { get; set; }
float Y { get; set; }
}
} | katascope/Glyphics | Glyphics/GlyphicsLibrary/Glyphics Interfaces/IFloat2.cs | C# | bsd-2-clause | 1,503 |
package cola.machine.game.myblocks.model.ui.html;
import cola.machine.game.myblocks.Color;
import cola.machine.game.myblocks.engine.Constants;
//import cola.machine.game.myblocks.engine.MyBlockEngine;
import cola.machine.game.myblocks.engine.paths.PathManager;
import cola.machine.game.myblocks.manager.TextureManager;
import cola.machine.game.myblocks.model.textture.TextureInfo;
import cola.machine.game.myblocks.registry.CoreRegistry;
import cola.machine.game.myblocks.switcher.Switcher;
import com.dozenx.game.graphics.shader.ShaderManager;
import com.dozenx.game.opengl.util.OpenglUtils;
import com.dozenx.game.opengl.util.ShaderUtils;
import com.dozenx.util.StringUtil;
import com.dozenx.util.TimeUtil;
import core.log.LogUtil;
import de.matthiasmann.twl.Event;
import org.newdawn.slick.TrueTypeFont;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.vecmath.Vector4f;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Document extends HtmlObject {
public HtmlObject body = new HtmlObject();
public void setHeight(int height) {
this.height = height;
}
private static Document document =null;
public static boolean needUpdate = true;
public TrueTypeFont font;
public void initFont(){
boolean antiAlias = true;
OpenglUtils.checkGLError();
Font awtFont = new Font("Times New Roman", Font.PLAIN, 12);
String a="刷新用户名登录密码体力血蓝魔法敏捷坐标防御";
OpenglUtils.checkGLError();
font = new TrueTypeFont(awtFont, antiAlias,a.toCharArray());
OpenglUtils.checkGLError();
//org.newdawn.slick.Color.white.bind();
}
private Document() {
}
public void parseIndexHtml() {
body.getElementById("root").removeAllChild();
File file = PathManager.getInstance().getHomePath().resolve("html/index.html").toFile();
if (file.exists()) {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
//创建一个DocumentBuilder的对象
try {
//创建DocumentBuilder对象
DocumentBuilder db = dbf.newDocumentBuilder();
//通过DocumentBuilder对象的parser方法加载books.xml文件到当前项目下
org.w3c.dom.Document document = db.parse(file);
//获取所有book节点的集合
NodeList bodyList = document.getElementsByTagName("body");
NodeList bookList=bodyList.item(0).getChildNodes();
for (int i = 0; i < bookList.getLength(); i++) {
Node node = bookList.item(i);
HtmlObject childNode = parseNode(node);
if(childNode!=null) {
body.getElementById("root").appendChild(childNode);
}
}
} catch (ParserConfigurationException e) {
e.printStackTrace();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
document.needUpdate=true;
}
public HtmlObject parseNode(Node node) {
HtmlObject htmlObject = new HtmlObject();
if (node.getNodeName().equals("#Text") || node instanceof com.sun.org.apache.xerces.internal.dom.DeferredTextImpl) {
HtmlObject textNode =new HtmlObject();
textNode.innerText=node.getTextContent();
textNode.setColor(Constants.RGBA_GRAY);
textNode.setBorderColor(Constants.RGBA_RED);
textNode.setWidth(50);
textNode.setHeight(50);
return textNode;
}
NamedNodeMap attrs = node.getAttributes();
// System.out.println("第 " + (i + 1) + "本书共有" + attrs.getLength() + "个属性");
//遍历book的属性
if (attrs != null)
for (int j = 0; j < attrs.getLength(); j++) {
//通过item(index)方法获取book节点的某一个属性
Node attr = attrs.item(j);
//获取属性名
System.out.print("属性名:" + attr.getNodeName());
//获取属性值
System.out.println("--属性值" + attr.getNodeValue());
if (attr.getNodeName().equals("style")) {
String styleValue = attr.getNodeValue();
String styleValueAry[] = styleValue.split(";");
for (int styleIndex = 0; styleIndex < styleValueAry.length; styleIndex++) {
String keyValueAry[] = styleValueAry[styleIndex].split(":");
parseNodeKeyValue(htmlObject, keyValueAry[0], keyValueAry[1]);
}
//String attrVal = attr.getNodeValue();
//htmlObject.setWidth(Integer.valueOf(attrVal));
}
}
//查找子节点
NodeList nodeList = node.getChildNodes();
for (int i = 0; i < nodeList.getLength(); i++) {
if (nodeList.item(i).getNodeName().equals("#Text")) {
htmlObject.innerText = nodeList.item(i).getTextContent();
}
HtmlObject childNode = parseNode(nodeList.item(i));
htmlObject.appendChild(childNode);
}
return htmlObject;
//
// if(node.getNodeName().equals("div")){
//// Div div =new Div();
// //width
// String style = node.getAttributes("style");
// Node attr = attrs.item(j);
// //获取属性名
// System.out.print("属性名:" + attr.getNodeName());
// //获取属性值
// System.out.println("--属性值" + attr.getNodeValue());
// //height
// //
// }
}
/**
* 设置节点属性
*
* @param htmlObject
* @param key
* @param value
*/
public void parseNodeKeyValue(HtmlObject htmlObject, String key, String value) {
if (key.equals("width")) {
htmlObject.setWidth(Integer.valueOf(value));
} else if (key.equals("height")) {
htmlObject.setHeight(Integer.valueOf(value));
} else if (key.equals("margin-top")) {
htmlObject.setMarginTop(Short.valueOf(value));
} else if (key.equals("margin-left")) {
htmlObject.setMarginLeft(Short.valueOf(value));
} else if (key.equals("margin-bottom")) {
htmlObject.setMarginBottom(Short.valueOf(value));
} else if (key.equals("margin-right")) {
htmlObject.setMarginRight(Short.valueOf(value));
} else if (key.equals("padding-top")) {
htmlObject.setPaddingTop(Short.valueOf(value));
} else if (key.equals("padding-left")) {
htmlObject.setPaddingLeft(Short.valueOf(value));
} else if (key.equals("padding-bottom")) {
htmlObject.setPaddingBottom(Short.valueOf(value));
} else if (key.equals("padding-right")) {
htmlObject.setPaddingRight(Short.valueOf(value));
} else if (key.equals("background-image")) {
TextureManager textureManager = CoreRegistry.get(TextureManager.class);
textureManager.putImage(value, value);
htmlObject.setBackgroundImage(new Image(new TextureInfo(value)));
}else if (key.equals("background-color")) {
if(value.equals("red")){
htmlObject.setBackgroundColor(Constants.RGBA_RED);
}/*else if(value.equals("blue")){
htmlObject.setBorderColor(Constants.RGBA_BLUE);
}*/else if(value.startsWith("rgba")){
int start = value.indexOf("(");
int end =value.indexOf(")");
String rgabValue = value.substring(start+1,end);
String rgbaAry[]= rgabValue.split(",");
htmlObject.setBackgroundColor(new Vector4f(Float.valueOf(rgbaAry[0]) / 256, Float.valueOf(rgbaAry[1]) / 256, Float.valueOf(rgbaAry[2]) / 256, Float.valueOf(rgbaAry[3])));
}
}else if (key.equals("color")) {
if(value.equals("red")){
htmlObject.setColor(Constants.RGBA_RED);
}/*else if(value.equals("blue")){
htmlObject.setBorderColor(Constants.RGBA_BLUE);
}*/else if(value.startsWith("rgba")){
int start = value.indexOf("(");
int end =value.indexOf(")");
String rgabValue = value.substring(start+1,end);
String rgbaAry[]= rgabValue.split(",");
htmlObject.setColor(new Vector4f(Float.valueOf(rgbaAry[0]) / 256, Float.valueOf(rgbaAry[1]) / 256, Float.valueOf(rgbaAry[2]) / 256, Float.valueOf(rgbaAry[3])));
}
}else if (key.equals("border-color")) {
if(value.equals("red")){
htmlObject.setBorderColor(Constants.RGBA_RED);
}/*else if(value.equals("blue")){
htmlObject.setBorderColor(Constants.RGBA_BLUE);
}*/else if(value.startsWith("rgba")){
int start = value.indexOf("(");
int end =value.indexOf(")");
String rgabValue = value.substring(start+1,end);
String rgbaAry[]= rgabValue.split(",");
htmlObject.setBorderColor(new Vector4f(Float.valueOf(rgbaAry[0])/256,Float.valueOf(rgbaAry[1])/256,Float.valueOf(rgbaAry[2])/256,Float.valueOf(rgbaAry[3])));
}
}else if (key.equals("border-width")) {
value = value.replace("px","");
htmlObject.setBorderWidth(Integer.valueOf(value));
}
}
public static Document getInstance() {
if(document == null){
document = new Document();
document.init();
}
return document;
}
public void init(){
this.setId("document");
this.body =new HtmlObject();
body.setId("body");
body.canAcceptKeyboardFocus = true;
Div div =new Div();
div.setId("root");
div.setWidth(500);
div.setHeight(500);
body.appendChild(div);
this.appendChild(body);
Button button =new Button();
button .setColor(new Vector4f(0,0,0,1));
button.innerText="刷新";
button.setFontSize(34);
button.textAlign="center";
//button.setTextAlign("center");
button.setWidth(100);
button.setPaddingTop((short)15);
button.setHeight(50);
// button.setBackgroundImage(new Image(TextureManager.getTextureInfo("gridimage")));
button.setTop(25);
button.setBorderWidth(2);
//button.setBorderColor(new Vector4f(1,0,0,1));
button.setBackgroundImage(new Image(TextureManager.getTextureInfo("button")));
button.addCallback(new Runnable() {
@Override
public void run() {
document.parseIndexHtml();
// LogUtil.println("nihao");
}
});
body.appendChild(button);
this.canAcceptKeyboardFocus = true;
initFont();
}
public static HashMap variables = new HashMap();
private long mouseEventTime;//鼠标事件时间
public boolean hasInvalidLayouts = false;
/*public static List<HtmlObject> elements =new ArrayList<HtmlObject>();
//public static void appendChild(HtmlObject htmlObject){
elements.add(htmlObject);
}*/
/*public void removeChild(HtmlObject htmlObject){
elements.remove(htmlObject);
}*/
/*public static HtmlObject getElementById(String id){
for(HtmlObject htmlObject : elements){
*//*if(id.equals(htmlObject.id)){
return htmlObject;
}else {*//*
HtmlObject childObject =htmlObject.getElementById(id);
if(childObject!=null){
return childObject;
}
*//* }*//*
}
return null;
}*/
public static Object var(String name) {
return variables.get(name);
}
public static void var(String name, Object value) {
variables.put(name, value);
}
private Event event = new Event();
/* public final boolean handleKey(int keyCode, char keyChar, boolean pressed) {
event.keyCode = keyCode;
event.keyChar = keyChar;
event.keyRepeated = false;
keyEventTime = curTime;
if(event.keyCode != Event.KEY_NONE || event.keyChar != Event.CHAR_NONE) {
event.setModifiers(pressed);
if(pressed) {
keyRepeatDelay = KEYREPEAT_INITIAL_DELAY;
return sendKeyEvent(Event.Type.KEY_PRESSED);
} else {
keyRepeatDelay = NO_REPEAT;
return sendKeyEvent(Event.Type.KEY_RELEASED);
}
} else {
keyRepeatDelay = NO_REPEAT;
}
return false;
}*/
private long tooltipEventTime;
private long curTime;
private int mouseDownX;
private int mouseDownY;
private boolean wasInside;
private boolean dragActive;
private int mouseClickCount;
private int dragButton = -1;
private int mouseLastX;
private int mouseLastY;
private int mouseClickedX;
private int mouseClickedY;
public final boolean handleMouse(int mouseX, int mouseY, int button, boolean pressed) {
curTime = TimeUtil.getNowMills();
mouseEventTime = curTime;
tooltipEventTime = curTime;
event.mouseButton = button;
// only the previously pressed mouse button
int prevButtonState = event.getModifiers() & Event.MODIFIER_BUTTON;//l m r 哪个键按下了
//根据鼠标的按键构造事件
int buttonMask = 0;
switch (button) {
case Event.MOUSE_LBUTTON:
buttonMask = Event.MODIFIER_LBUTTON;
break;
case Event.MOUSE_RBUTTON:
buttonMask = Event.MODIFIER_RBUTTON;
break;
case Event.MOUSE_MBUTTON:
buttonMask = Event.MODIFIER_MBUTTON;
break;
}
//event 有一个modifer 标识了这个事件是否有按键 pressed代表了是按下还是抬起
event.setModifier(buttonMask, pressed);
boolean wasPressed = (prevButtonState & buttonMask) != 0; //如果一直按着的话 是大于0的
if (buttonMask != 0) {
// renderer.setMouseButton(button, pressed);
}
// don't send new mouse coords when still in drag area
if (dragActive || prevButtonState == 0) {
event.mouseX = mouseX;//记录 实时的鼠标位置
event.mouseY = mouseY;
} else {
event.mouseX = mouseDownX;//记录鼠标按下的那一刻
event.mouseY = mouseDownY;//记录鼠标按下的那一刻
}
event.mouseX = mouseX;//记录鼠标按下的那一刻
event.mouseY = mouseY;//记录鼠标按下的那一刻
boolean handled = dragActive;
/* for(HtmlObject child:childNodes){
if(child.isInside(event.mouseX,event.mouseY)){
if(this.lastChildMouseOver == child){
}else if(this.lastChildMouseOver != child) {
child.mouseOver
}
if(this.lastChildMouseOver != child){
lastChildMouseOver.routeMouseEvent(event.createSubEvent(Event.Type.MOUSE_EXITED));
}
lastChildMouseOver=child;
return true;
}
}*/
if (!dragActive) {//没有拖动 响应自身的
//LogUtil.println("" + mouseX +":"+ mouseY);
if (!isInside(mouseX, mouseY)) {//如果不在范围内
pressed = false;
mouseClickCount = 0;
if (wasInside) {//如果之前是在范围内的 那么就是离开的动作
sendMouseEvent(Event.Type.MOUSE_EXITED, null);//发送鼠标事件离开
wasInside = false;
}
} else if (!wasInside) {//如果现在在范围内 而且之前不在范围内
wasInside = true;
if (sendMouseEvent(Event.Type.MOUSE_ENTERED, null) != null) {//进入事件发送
handled = true;
}
}
}
//如果有鼠标移动===========================
if (mouseX != mouseLastX || mouseY != mouseLastY) {//鼠标有移动
mouseLastX = mouseX;
mouseLastY = mouseY;
if (prevButtonState != 0 && !dragActive) {
if (Math.abs(mouseX - mouseDownX) > DRAG_DIST ||
Math.abs(mouseY - mouseDownY) > DRAG_DIST) {
dragActive = true;
mouseClickCount = 0;
// close the tooltip - it may interface with dragging
//hideTooltip();
//hadOpenTooltip = false;
// grab the tooltip to prevent it from poping up while dragging
// the widget can still request a tooltip update
//tooltipOwner = lastMouseDownWidget;
}
}
if (dragActive) {
/*if(boundDragPopup != null) {
// a bound drag is converted to a mouse move
assert getTopPane() == boundDragPopup;
sendMouseEvent(Event.Type.MOUSE_MOVED, null);
} else */
if (lastMouseDownWidget != null) {
// send MOUSE_DRAGGED only to the widget which received the MOUSE_BTNDOWN
sendMouseEvent(Event.Type.MOUSE_DRAGGED, lastMouseDownWidget);
}
} else if (prevButtonState == 0) {
if (sendMouseEvent(Event.Type.MOUSE_MOVED, null) != null) {
handled = true;
return handled;
}
}
}
//有按键发生=========================================================
if (buttonMask != 0 && pressed != wasPressed) {//有按键发生
if (pressed) {
if (dragButton < 0) {
mouseDownX = mouseX;
mouseDownY = mouseY;
dragButton = button;
lastMouseDownWidget = sendMouseEvent(Event.Type.MOUSE_BTNDOWN, null);
} else if (lastMouseDownWidget != null /*&& boundDragPopup == null*/) {
// if another button is pressed while one button is already
// pressed then route the second button to the widget which
// received the first press
// but only when no bound drag is active
sendMouseEvent(Event.Type.MOUSE_BTNDOWN, lastMouseDownWidget);
}
} else if (dragButton >= 0 && (/*boundDragPopup == null ||*/ event.isMouseDragEnd())) {
// only send the last MOUSE_BTNUP event when a bound drag is active
/* if(boundDragPopup != null) {
if(button == dragButton) {
// for bound drag the MOUSE_BTNUP is first send to the current widget under the mouse
sendMouseEvent(Event.Type.MOUSE_BTNUP, getWidgetUnderMouse());
}
}*/
if (lastMouseDownWidget != null) {
// send MOUSE_BTNUP only to the widget which received the MOUSE_BTNDOWN
sendMouseEvent(Event.Type.MOUSE_BTNUP, lastMouseDownWidget);
}
}
if (lastMouseDownWidget != null) {
handled = true;
}
if (button == Event.MOUSE_LBUTTON && !popupEventOccured) {
if (!pressed && !dragActive) {
if (mouseClickCount == 0 ||
curTime - mouseClickedTime > DBLCLICK_TIME ||
lastMouseClickWidget != lastMouseDownWidget) {
mouseClickedX = mouseX;
mouseClickedY = mouseY;
lastMouseClickWidget = lastMouseDownWidget;
mouseClickCount = 0;
mouseClickedTime = curTime;
}
if (Math.abs(mouseX - mouseClickedX) < DRAG_DIST &&
Math.abs(mouseY - mouseClickedY) < DRAG_DIST) {
// ensure same click target as first
event.mouseX = mouseClickedX;
event.mouseY = mouseClickedY;
event.mouseClickCount = ++mouseClickCount;
mouseClickedTime = curTime;
if (lastMouseClickWidget != null) {
sendMouseEvent(Event.Type.MOUSE_CLICKED, lastMouseClickWidget);
}
} else {
lastMouseClickWidget = null;
}
}
}
}
//如果有拖动===========================================
if (event.isMouseDragEnd()) {
if (dragActive) {
dragActive = false;
sendMouseEvent(Event.Type.MOUSE_MOVED, null);
}
dragButton = -1;
if (boundDragCallback != null) {
try {
boundDragCallback.run();
} catch (Exception ex) {
Logger.getLogger(Document.class.getName()).log(Level.SEVERE,
"Exception in bound drag callback", ex);
} finally {
boundDragCallback = null;
// boundDragPopup = null;
}
}
}
return handled;
}
private Runnable boundDragCallback;
private boolean popupEventOccured;
//private InfoWindow activeInfoWindow;
private HtmlObject sendMouseEvent(Event.Type type, HtmlObject target) {
assert type.isMouseEvent;
popupEventOccured = false;
event.type = type;
event.dragEvent = dragActive;// && (boundDragPopup == null);
// renderer.setMousePosition(event.mouseX, event.mouseY);
if (target != null) {//TODO什么时候是不为空的edit field
if (!target.isDisabled() || !isMouseAction(event)) {
target.handleEvent(event);
}
return target;
} else {
assert !dragActive;//|| boundDragPopup != null;
HtmlObject widget = null;
/* if(activeInfoWindow != null) {//找激活窗口
if(activeInfoWindow.isMouseInside(event) && setMouseOverChild(activeInfoWindow, event)) {
widget = activeInfoWindow;
}
}*/
if (widget == null) {//如国没有toppane的话 就是找根节点
widget = this.childNodes.get(this.childNodes.size() - 1);//getTopPane();
setMouseOverChild(widget, event);//确定哪个元素是mouseoverchild 响应一下entered 和exied事件
}
return widget.routeMouseEvent(event);
}
}
private HtmlObject focusKeyWidget;
public HtmlObject getFocusKeyWidget() {
return focusKeyWidget;
}
public void setFocusKeyWidget(HtmlObject widget) {
if (focusKeyWidget == null && isFocusKey()) {
focusKeyWidget = widget;
}
}
private static final int FOCUS_KEY = Event.KEY_TAB;
boolean isFocusKey() {
return/* (event.keyCode == Event.MOUSE_LBUTTON ||*/ event.keyCode == FOCUS_KEY &&
((event.modifier & (Event.MODIFIER_CTRL | Event.MODIFIER_META | Event.MODIFIER_ALT)) == 0);
}
private long mouseClickedTime;
private HtmlObject lastMouseDownWidget;
private HtmlObject lastMouseClickWidget;
private static final int DBLCLICK_TIME = 500; // ms
private static final int DRAG_DIST = 3;
private static final int KEYREPEAT_INITIAL_DELAY = 250; // ms
private static final int NO_REPEAT = 0;
private int keyRepeatDelay;
public final boolean handleKey(int keyCode, char keyChar, boolean pressed) {
event.keyCode = keyCode;
event.keyChar = keyChar;
event.keyRepeated = false;
keyEventTime = curTime;
if (event.keyCode != Event.KEY_NONE || event.keyChar != Event.CHAR_NONE) {
event.setModifiers(pressed);
if (pressed) {
keyRepeatDelay = KEYREPEAT_INITIAL_DELAY;
return sendKeyEvent(Event.Type.KEY_PRESSED);
} else {
keyRepeatDelay = NO_REPEAT;
return sendKeyEvent(Event.Type.KEY_RELEASED);
}
} else {
keyRepeatDelay = NO_REPEAT;
}
return false;
}
@Override
public void render() {
if (Switcher.SHADER_ENABLE) {
ShaderUtils.finalDraw(ShaderManager.uiShaderConfig, ShaderManager.uiShaderConfig.getVao());//2DImage
} else {
super.render();
}
}
@Override
public void update() {
super.check();
if (Document.needUpdate) {
ShaderManager.uiShaderConfig.getVao().getVertices().clear();//每次重新刷新都要把之前的数据清空
this.setWidth(Constants.WINDOW_WIDTH);
this.setHeight(Constants.WINDOW_HEIGHT);
this.body.setWidth(Constants.WINDOW_WIDTH);
this.body.setHeight(Constants.WINDOW_HEIGHT);
OpenglUtils.checkGLError();
super.resize();
super.update();
super.recursivelySetGUI(this);
if (Switcher.SHADER_ENABLE) {
ShaderUtils.glUse(ShaderManager.uiShaderConfig,ShaderManager.uiShaderConfig.getVao());
ShaderManager.uiShaderConfig.getVao().getVertices().rewind();
super.buildVao();
// ShaderUtils.glColor(1,1,1);
// font.drawStringShader(100, 50, "我们是THE LIGHTWEIGHT JAVA GAMES LIBRARY", org.newdawn.slick.Color.yellow);
//
// ShaderUtils. draw2dColor(Constants.RGBA_GRAY,100,100,0.25f,50,50);
// ShaderUtils.update2dImageVao(ShaderManager.uiShaderConfig);
ShaderUtils.freshVao(ShaderManager.uiShaderConfig, ShaderManager.uiShaderConfig.getVao());
OpenglUtils.checkGLError();
}
//ShaderUtils.twoDColorBuffer.clear(); OpenglUtils.checkGLError();
//ShaderManager.uiShaderConfig.getVao().getVertices().clear(); OpenglUtils.checkGLError();
// this.setPerspective();
this.render();
OpenglUtils.checkGLError();
//div.shaderRender(); OpenglUtils.checkGLError();
// div2.shaderRender(); OpenglUtils.checkGLError();
//div3.shaderRender(); OpenglUtils.checkGLError();
//bag.shaderRender();
// ShaderUtils.update2dColorVao(); OpenglUtils.checkGLError();
Document.needUpdate = false;
}
}
public boolean hasFocusChild() {
if (this.body.focusChild != null) {
return true;
} else {
return false;
}
}
private boolean sendKeyEvent(Event.Type type) {
assert type.isKeyEvent;
popupEventOccured = false;
focusKeyWidget = null;
event.type = type;
event.dragEvent = false;
boolean handled = getTopPane().handleEvent(event);
if (!handled && focusKeyWidget != null) {
focusKeyWidget.handleFocusKeyEvent(event);
handled = true;
}
focusKeyWidget = null; // allow GC
return handled;
}
private HtmlObject getTopPane() {//获取这一层 子节点中位于最上面的那一个widget 一般是倒数第三个
// don't use potential overwritten methods
return super.getChildNodes().get(0);//因为最后一个是tooltip 而倒数第二个是一个widget
}
private long keyEventTime;
@Override
public boolean requestKeyboardFocus() {
// GUI always has the keyboard focus
return true;
}
@Override
protected boolean requestKeyboardFocus(HtmlObject child) {
if (child != null) {
if (child != getTopPane()) {
return false;
}
}
return super.requestKeyboardFocus(child);
}
}
| ColaMachine/MyBlock | src/main/java/cola/machine/game/myblocks/model/ui/html/Document.java | Java | bsd-2-clause | 29,042 |
// The ThunderRTC server files are in the lib folder
module.exports = require('./lib/thunderrtc_server'); | thabung/rtc | index.js | JavaScript | bsd-2-clause | 106 |
/*********************************************************************************
*
* Inviwo - Interactive Visualization Workshop
*
* Copyright (c) 2012-2019 Inviwo Foundation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*********************************************************************************/
#include <inviwo/core/io/datareader.h>
namespace inviwo {
const std::vector<FileExtension>& DataReader::getExtensions() const { return extensions_; }
void DataReader::addExtension(FileExtension ext) { extensions_.push_back(ext); }
} // namespace inviwo
| Sparkier/inviwo | src/core/io/datareader.cpp | C++ | bsd-2-clause | 1,842 |
/*
* Copyright (c) 2018, Lars <lars.oernlo@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.motherlode;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.time.Duration;
import java.time.Instant;
import javax.inject.Inject;
import static net.runelite.api.MenuAction.RUNELITE_OVERLAY_CONFIG;
import static net.runelite.client.ui.overlay.OverlayManager.OPTION_CONFIGURE;
import net.runelite.client.ui.overlay.OverlayMenuEntry;
import net.runelite.client.ui.overlay.OverlayPanel;
import net.runelite.client.ui.overlay.OverlayPosition;
import net.runelite.client.ui.overlay.components.LineComponent;
import net.runelite.client.ui.overlay.components.TitleComponent;
public class MotherlodeGemOverlay extends OverlayPanel
{
private final MotherlodePlugin plugin;
private final MotherlodeSession motherlodeSession;
private final MotherlodeConfig config;
@Inject
MotherlodeGemOverlay(MotherlodePlugin plugin, MotherlodeSession motherlodeSession, MotherlodeConfig config)
{
super(plugin);
setPosition(OverlayPosition.TOP_LEFT);
this.plugin = plugin;
this.motherlodeSession = motherlodeSession;
this.config = config;
getMenuEntries().add(new OverlayMenuEntry(RUNELITE_OVERLAY_CONFIG, OPTION_CONFIGURE, "Gem overlay"));
}
@Override
public Dimension render(Graphics2D graphics)
{
MotherlodeSession session = motherlodeSession;
if (session.getLastGemFound() == null || !plugin.isInMlm() || !config.showGemsFound())
{
return null;
}
Duration statTimeout = Duration.ofMinutes(config.statTimeout());
Duration sinceCut = Duration.between(session.getLastGemFound(), Instant.now());
if (sinceCut.compareTo(statTimeout) >= 0)
{
return null;
}
int diamondsFound = session.getDiamondsFound();
int rubiesFound = session.getRubiesFound();
int emeraldsFound = session.getEmeraldsFound();
int sapphiresFound = session.getSapphiresFound();
panelComponent.getChildren().add(TitleComponent.builder().text("Gems found").build());
if (diamondsFound > 0)
{
panelComponent.getChildren().add(LineComponent.builder()
.left("Diamonds:")
.right(Integer.toString(diamondsFound))
.build());
}
if (rubiesFound > 0)
{
panelComponent.getChildren().add(LineComponent.builder()
.left("Rubies:")
.right(Integer.toString(rubiesFound))
.build());
}
if (emeraldsFound > 0)
{
panelComponent.getChildren().add(LineComponent.builder()
.left("Emeralds:")
.right(Integer.toString(emeraldsFound))
.build());
}
if (sapphiresFound > 0)
{
panelComponent.getChildren().add(LineComponent.builder()
.left("Sapphires:")
.right(Integer.toString(sapphiresFound))
.build());
}
return super.render(graphics);
}
}
| l2-/runelite | runelite-client/src/main/java/net/runelite/client/plugins/motherlode/MotherlodeGemOverlay.java | Java | bsd-2-clause | 4,049 |
from pytest import mark
from django.urls import reverse
from email_template.models import Email
from assopy.models import AssopyUser
from conference.accounts import PRIVACY_POLICY_CHECKBOX, PRIVACY_POLICY_ERROR
from conference.models import CaptchaQuestion
from conference.users import RANDOM_USERNAME_LENGTH
from tests.common_tools import make_user, redirects_to, template_used, create_homepage_in_cms
SIGNUP_SUCCESFUL_302 = 302
SIGNUP_FAILED_200 = 200
login_url = reverse("accounts:login")
def check_login(client, email):
"Small helper for tests to check if login works correctly"
response = client.post(
login_url,
{
"email": email,
"password": "password",
"i_accept_privacy_policy": True,
},
)
# redirect means successful login, 200 means errors on form
LOGIN_SUCCESFUL_302 = 302
assert response.status_code == LOGIN_SUCCESFUL_302
return True
def activate_only_user():
user = AssopyUser.objects.get()
user.user.is_active = True
user.user.save()
@mark.django_db
def test_user_registration(client):
"""
Tests if users can create new account on the website
(to buy tickets, etc).
"""
# required for redirects to /
create_homepage_in_cms()
# 1. test if user can create new account
sign_up_url = reverse("accounts:signup_step_1_create_account")
response = client.get(sign_up_url)
assert response.status_code == 200
assert template_used(response, "conference/accounts/signup.html")
assert template_used(response, "conference/accounts/_login_with_google.html")
assert template_used(response, "conference/base.html")
assert PRIVACY_POLICY_CHECKBOX in response.content.decode("utf-8")
assert AssopyUser.objects.all().count() == 0
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "joedoe@example.com",
"password1": "password",
"password2": "password",
},
follow=True,
)
assert response.status_code == SIGNUP_FAILED_200
assert "/privacy/" in PRIVACY_POLICY_CHECKBOX
assert "I consent to the use of my data" in PRIVACY_POLICY_CHECKBOX
assert response.context["form"].errors["__all__"] == [PRIVACY_POLICY_ERROR]
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "joedoe@example.com",
"password1": "password",
"password2": "password",
"i_accept_privacy_policy": True,
},
follow=True,
)
# check if redirect was correct
assert template_used(
response, "conference/accounts/signup_please_verify_email.html"
)
assert template_used(response, "conference/base.html")
user = AssopyUser.objects.get()
assert user.name() == "Joe Doe"
assert user.user.is_active is False
# check if the random username was generated
assert len(user.user.username) == RANDOM_USERNAME_LENGTH
is_logged_in = client.login(
email="joedoe@example.com", password="password"
)
assert is_logged_in is False # user is inactive
response = client.get("/")
assert template_used(response, "conference/homepage/home_template.html")
assert "Joe Doe" not in response.content.decode("utf-8")
assert "Log out" not in response.content.decode("utf-8")
# enable the user
user.user.is_active = True
user.user.save()
is_logged_in = client.login(
email="joedoe@example.com", password="password"
)
assert is_logged_in
response = client.get("/")
assert template_used(response, "conference/homepage/home_template.html")
# checking if user is logged in.
assert "Joe Doe" in response.content.decode("utf-8")
@mark.django_db
def test_393_emails_are_lowercased_and_login_is_case_insensitive(client):
"""
https://github.com/EuroPython/epcon/issues/393
Test if we can regiester new account if we use the same email with
different case.
"""
sign_up_url = reverse("accounts:signup_step_1_create_account")
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "JoeDoe@example.com",
"password1": "password",
"password2": "password",
"i_accept_privacy_policy": True,
},
)
assert response.status_code == SIGNUP_SUCCESFUL_302
user = AssopyUser.objects.get()
assert user.name() == "Joe Doe"
assert user.user.email == "joedoe@example.com"
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "jOEdOE@example.com",
"password1": "password",
"password2": "password",
"i_accept_privacy_policy": True,
},
)
assert response.status_code == SIGNUP_FAILED_200
assert response.context["form"].errors["email"] == ["Email already in use"]
user = AssopyUser.objects.get() # still only one user
assert user.name() == "Joe Doe"
assert user.user.email == "joedoe@example.com"
# activate user so we can log in
user.user.is_active = True
user.user.save()
# check if we can login with lowercase
# the emails will be lowercased in db, but user is still able to log in
# using whatever case they want
assert check_login(client, email="JoeDoe@example.com")
assert check_login(client, email="joedoe@example.com")
assert check_login(client, email="JoeDoe@example.com")
assert check_login(client, email="JOEDOE@example.com")
@mark.django_db
def test_703_test_captcha_questions(client):
"""
https://github.com/EuroPython/epcon/issues/703
"""
QUESTION = "Can you foo in Python?"
ANSWER = "Yes you can"
CaptchaQuestion.objects.create(question=QUESTION, answer=ANSWER)
Email.objects.create(code="verify-account")
sign_up_url = reverse("accounts:signup_step_1_create_account")
response = client.get(sign_up_url)
# we have question in captcha_question.initial and captcha_answer.label
assert "captcha_question" in response.content.decode("utf-8")
assert "captcha_answer" in response.content.decode("utf-8")
assert response.content.decode("utf-8").count(QUESTION) == 2
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "JoeDoe@example.com",
"password1": "password",
"password2": "password",
"i_accept_privacy_policy": True,
},
)
assert response.status_code == SIGNUP_FAILED_200 # because missing captcha
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "JoeDoe@example.com",
"password1": "password",
"password2": "password",
"captcha_question": QUESTION,
"captcha_answer": "No you can't",
"i_accept_privacy_policy": True,
},
)
assert response.status_code == SIGNUP_FAILED_200 # because wrong answer
wrong_answer = ["Sorry, that's a wrong answer"]
assert response.context["form"].errors["captcha_answer"] == wrong_answer
response = client.post(
sign_up_url,
{
"first_name": "Joe",
"last_name": "Doe",
"email": "JoeDoe@example.com",
"password1": "password",
"password2": "password",
"captcha_question": QUESTION,
"captcha_answer": ANSWER,
"i_accept_privacy_policy": True,
},
)
assert response.status_code == SIGNUP_SUCCESFUL_302
activate_only_user()
assert check_login(client, email="joedoe@example.com")
# if there are no enabled questions they don't appear on the form
CaptchaQuestion.objects.update(enabled=False)
response = client.get(sign_up_url)
assert "captcha_question" not in response.content.decode("utf-8")
assert "captcha_answer" not in response.content.decode("utf-8")
assert response.content.decode("utf-8").count(QUESTION) == 0
@mark.django_db
def test_872_login_redirects_to_user_dashboard(client):
u = make_user(email='joe@example.com', password='foobar')
response = client.post(
login_url,
{
"email": u.email,
"password": 'foobar',
"i_accept_privacy_policy": True,
},
)
assert response.status_code == 302
assert redirects_to(response, "/user-panel/")
| EuroPython/epcon | tests/test_user_login_and_registration.py | Python | bsd-2-clause | 8,704 |
/*****************************************************************
* defintion of ats stylesheet
*****************************************************************/
/*
* global definition
*/
/* initialize line span */
h1,h2,h3,h4,h5,h6,p,ul,ol,dl
{
margin-top: 0;
margin-bottom: 0;
}
body
{
background-color : #ffffff;
font-size : 14px;
}
div.all
{
margin-right: auto;
margin-left: auto;
}
h2,h3
{
margin-top: 10px;
margin-bottom: 10px;
}
/*
* box header
*/
/* not IE */
div.boxheader
{
height: 30px;
padding: 10px 10px 20px 10px;
margin: 0px 0px 0px 0px;
}
/* IE 6.0 */
html div.boxheader
{
overflow: hidden;
height: 30px;
padding: 5px 5px 5px 5px;
margin: 0px 0px 0px 0px;
}
div.boxkeyvisual
{
height: 20px;
width: 300px;
padding: 0px 0px 0px 0px;
margin: 0px 0px 0px 0px;
}
/*
* box keyvisual
*/
/* IE 6.0 */
div.boxkeyvisual
{
float: left;
height: 20px;
padding: 0px 0px 0px 0px;
margin: 0px 0px 0px 0px;
}
html div.boxkeyvisual
{
float: left;
height: 20px;
padding: 0px 0px 0px 0px;
margin: 0px 0px 0px 0px;
}
/*
* box notice
*/
div.boxnotice
{
float: right;
padding: 0px 0px 0px 0px;
marging: 0px 0px 0px 0px;
background-color : #ffffff;
}
html div.boxnotice
{
float: right;
padding: 0px 0px 0px 0px;
marging: 0px 0px 0px 0px;
background-color : #ffffff;
}
div.boxnotice p.userstatus
{
text-align: right;
}
/*
* box global navi
*/
div.boxgnavi
{
left : 0px;
height: 0px;
marging: 0px 0px 0px 0px;
padding: 0px 0px 0px 0px;
background-color : #ffffff;
}
#globalnavilist
{
list-style-type: none;
float: left;
background-color : #ffffff;
}
#globalnavilist li
{
float: left;
width: 180px;
height: 10px;
font-weight: bold;
}
#globalnavilist a
{
display: block;
padding-top :10px;
padding-bottom :10px;
background-color : #ffffff;
border: 2px solid #797979;
color: #eef5ef;
text-decoration: none;
text-align: center;
}
#globalnavilist2
{
margin-left: 0;
padding-left: 0;
list-style-type: none;
float: left;
background-color : #ffffff;
}
#globalnavilist2 li
{
float: left;
width: 180px;
height: 10px;
font-weight: bold;
}
#globalnavilist2 a
{
display: block;
padding-top :10px;
padding-bottom :10px;
background-color : #ffffff;
color: #eef5ef;
text-decoration: none;
text-align: center;
}
/*
* box sub-navi
*/
div.boxsubnavi
{
float: left;
width : 100px;
background-color: #ffffff;
margin: 5px 0px 0px 0px;
padding: 5px 0px 0px 0px;
}
#subnavilist
{
margin-left: 0;
margin-bottom: 10px;
padding-left: 0;
list-style-type: none;
width: 175px;
}
#subnavilist li
{
/*
display: block;
border: 0px solid #797979;
*/
padding-left: 10px;
padding-top: 5px;
padding-bottom: 5px;
font-weight: bold;
text-align: left;
color: black;
list-style: none;
}
#subnavilist a
{
/*
display: block;
border: 1px solid #797979;
*/
padding-left: 0px;
padding-top: 0px;
padding-bottom: 0px;
text-align: left;
text-decoration: none;
}
/*
* box main
*/
div.boxmain
{
float : left;
background-color: #ffffff;
margin: 10px 0px 0px 10px;
padding: 10px 10px 10px 10px;
line-height : 130%;
}
div.boxmain h2
{
line-height : 150%;
}
div.boxmain pre.source
{
color: #ffffff;
background-color: #000000;
margin: 10px 0px 0px 10px;
padding: 10px 10px 10px 10px;
overflow: auto;
}
/*
* box booter
*/
div.boxfooter
{
clear: both;
text-align: center;
vertical-align: middle;
background-color: #ffffff;
margin: 0px 0px 0px 0px;
padding: 10px 10px 10px 10px;
}
/*
* table
*/
td.num
{
text-align: right;
}
/*
* list
*/
/*
* other
*/
h1
{
text-align : center;
}
h2
{
} | dictoss/active-task-summary | ats/static/ats/css/atsstyle.css | CSS | bsd-2-clause | 3,679 |
# -*- encoding: utf-8 -*-
import mock
import os
from shutil import rmtree
from tempfile import mkdtemp
from django.test import TestCase
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test.utils import override_settings
from django.template.base import TemplateDoesNotExist
from paperclip.models import Attachment
from geotrek.common.models import Organism, FileType
from geotrek.common.parsers import ExcelParser, AttachmentParserMixin
class OrganismParser(ExcelParser):
model = Organism
fields = {'organism': 'nOm'}
class OrganismEidParser(ExcelParser):
model = Organism
fields = {'organism': 'nOm'}
eid = 'organism'
class AttachmentParser(AttachmentParserMixin, OrganismEidParser):
non_fields = {'attachments': 'photo'}
class ParserTests(TestCase):
def test_bad_parser_class(self):
with self.assertRaises(CommandError) as cm:
call_command('import', 'geotrek.common.DoesNotExist', '', verbosity=0)
self.assertEqual(unicode(cm.exception), u"Failed to import parser class 'geotrek.common.DoesNotExist'")
def test_bad_filename(self):
with self.assertRaises(CommandError) as cm:
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', 'find_me/I_am_not_there.shp', verbosity=0)
self.assertEqual(unicode(cm.exception), u"File does not exists at: find_me/I_am_not_there.shp")
def test_create(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 1)
organism = Organism.objects.get()
self.assertEqual(organism.organism, u"Comité Théodule")
def test_duplicate_without_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 2)
def test_unmodified_with_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
self.assertEqual(Organism.objects.count(), 1)
def test_updated_with_eid(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
filename2 = os.path.join(os.path.dirname(__file__), 'data', 'organism2.xls')
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename2, verbosity=0)
self.assertEqual(Organism.objects.count(), 2)
organisms = Organism.objects.order_by('pk')
self.assertEqual(organisms[0].organism, u"Comité Théodule")
self.assertEqual(organisms[1].organism, u"Comité Hippolyte")
def test_report_format_text(self):
parser = OrganismParser()
self.assertRegexpMatches(parser.report(), '0/0 lines imported.')
self.assertNotRegexpMatches(parser.report(), '<div id=\"collapse-\$celery_id\" class=\"collapse\">')
def test_report_format_html(self):
parser = OrganismParser()
self.assertRegexpMatches(parser.report(output_format='html'), '<div id=\"collapse-\$celery_id\" class=\"collapse\">')
def test_report_format_bad(self):
parser = OrganismParser()
with self.assertRaises(TemplateDoesNotExist):
parser.report(output_format='toto')
@override_settings(MEDIA_ROOT=mkdtemp('geotrek_test'))
class AttachmentParserTests(TestCase):
def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie")
def tearDown(self):
rmtree(settings.MEDIA_ROOT)
@mock.patch('requests.get')
def test_attachment(self, mocked):
mocked.return_value.status_code = 200
mocked.return_value.content = ''
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
organism = Organism.objects.get()
attachment = Attachment.objects.get()
self.assertEqual(attachment.content_object, organism)
self.assertEqual(attachment.attachment_file.name, 'paperclip/common_organism/{pk}/titi.png'.format(pk=organism.pk))
self.assertEqual(attachment.filetype, self.filetype)
@mock.patch('requests.get')
def test_attachment_not_updated(self, mocked):
mocked.return_value.status_code = 200
mocked.return_value.content = ''
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0)
self.assertEqual(mocked.call_count, 1)
self.assertEqual(Attachment.objects.count(), 1)
| johan--/Geotrek | geotrek/common/tests/test_parsers.py | Python | bsd-2-clause | 5,465 |
from hippiehug import RedisStore, Tree, Leaf, Branch
import pytest
## ============== TESTS ===================
def test_evidence():
t = Tree()
# Test positive case
t.add(b"Hello", b"Hello")
t.add(b"World", b"World")
root, E = t.evidence(b"World")
assert len(E) == 2
store = dict((e.identity(), e) for e in E)
t2 = Tree(store, root)
assert t2.is_in(b"World")
def test_store(rstore):
l = Leaf(b"Hello", b"Hello")
rstore[l.identity()] = l
assert rstore[l.identity()].identity() == l.identity()
def test_store_tree(rstore):
t = Tree(store=rstore)
from os import urandom
for _ in range(100):
item = urandom(32)
t.add(item, item)
assert t.is_in(item)
assert not t.is_in(urandom(32))
def test_leaf_isin():
l = Leaf(b"Hello", b"Hello")
store = {l.identity() : l}
b = l.add(store, b"Woitemrld", b"Woitemrld")
assert l.is_in(store, b"Hello", b"Hello")
def test_leaf_isin_map():
l = Leaf(item=b"Hello", key=b"World")
store = {l.identity() : l}
b = l.add(store, b"World", b"World")
assert l.is_in(store, item=b"Hello", key=b"World")
def test_Branch_isin():
l = Leaf(b"Hello", b"Hello")
store = {l.identity() : l}
b = l.add(store, b"World", b"World")
assert b.is_in(store, b"Hello", b"Hello")
assert b.is_in(store, b"World", b"World")
def test_Branch_isin_map():
l = Leaf(item=b"Hello", key=b"A")
store = {l.identity() : l}
b = l.add(store, item=b"World", key=b"B")
assert b.is_in(store, b"Hello", b"A")
assert b.is_in(store, b"World", b"B")
assert not b.is_in(store, b"World", b"C")
def test_Branch_multi():
l = Leaf(b"Hello", b"Hello")
store = {l.identity() : l}
b = l.multi_add(store, [b"B", b"C"], [b"B", b"C"])
b.check(store)
assert b.is_in(store, b"B", b"B")
assert b.is_in(store, b"C", b"C")
assert b.is_in(store, b"Hello", b"Hello")
def test_Branch_add():
l = Leaf(b"Hello", b"Hello")
store = {l.identity() : l}
b = l.add(store, b"World", b"World")
b2 = b.add(store, b"Doom", b"Doom")
assert isinstance(b2, Branch)
assert b2.left_branch in store
assert b2.right_branch in store
assert b2.identity() in store
b2.check(store)
def test_add_like_a_monkey():
root = Leaf(b"Hello",b"Hello")
store = {root.identity() : root}
from os import urandom
for _ in range(100):
item = urandom(32)
root = root.add(store, item, item)
root.check(store)
assert root.is_in(store, item, item)
def test_Leaf_add():
l = Leaf(b"Hello", b"Hello")
store = {l.identity() : l}
b = l.add(store, b"World", b"World")
assert isinstance(b, Branch)
assert b.left_branch in store
assert b.right_branch in store
assert b.identity() in store
assert store[b.left_branch].item <= b.pivot
assert store[b.right_branch].item > b.pivot
def test_Tree():
t = Tree()
def test_add_isin():
t = Tree()
# Test positive case
t.add(b"Hello")
assert t.is_in(b"Hello") == True
# Infix operator
assert b"Hello" in t
def test_fail_isin():
t = Tree()
# Test negative case
assert t.is_in(b"World") == False
def test_massive():
t = Tree()
from os import urandom
for _ in range(100):
item = urandom(32)
t.add(item)
assert t.is_in(item)
assert not t.is_in(urandom(32))
def test_multi_add():
t = Tree()
from os import urandom
X = [urandom(32) for _ in range(100)]
t.multi_add(X)
for x in X:
assert x in t
X = [urandom(32) for _ in range(100)]
t.multi_add(X)
for x in X:
assert x in t
Y = [urandom(32) for _ in range(100)]
for y in Y:
assert y not in t
def test_multi_small():
t = Tree()
t.multi_add([b"Hello", b"World"])
assert b"Hello" in t
assert b"World" in t
t.multi_add([b"A", b"B", b"C", b"D", b"E", b"F"])
assert b"E" in t
assert b"F" in t
def test_multi_test():
t = Tree()
t.multi_add([b"Hello", b"World"])
assert t.multi_is_in([b"Hello", b"World"]) == [True, True]
answer, head, evidence = t.multi_is_in([b"Hello", b"World"], evidence=True)
assert answer == [True, True]
e = dict((k.identity(), k) for k in evidence)
t2 = Tree(e, head)
assert t2.multi_is_in([b"Hello", b"World"]) == [True, True]
def test_lookup():
l = Leaf(item=b"Hello", key=b"A")
store = {l.identity() : l}
b = l.add(store, item=b"World", key=b"B")
assert b.is_in(store, b"Hello", b"A")
assert b.is_in(store, b"World", b"B")
assert not b.is_in(store, b"World", b"C")
assert b.lookup(store, b"B") == (b"B", b"World")
try:
b.lookup(store, b"B") == (b"B", b"World2")
assert False
except:
assert True
try:
b.lookup(store, b"C") == (b"B", b"World2")
assert False
except:
assert True
def test_double_add():
l = Leaf(item=b"Hello", key=b"A")
store = {l.identity() : l}
b = l.add(store, item=b"World", key=b"B")
assert b.is_in(store, b"Hello", b"A")
assert b.is_in(store, b"World", b"B")
assert not b.is_in(store, b"World", b"C")
b = b.add(store, item=b"World2", key=b"B")
assert b.lookup(store, b"B") == (b"B", b"World")
assert not b.lookup(store, b"B") == (b"B", b"World2")
def test_tree_default_store():
t = Tree()
t.multi_add([b"test"])
assert t.is_in(b"test")
t2 = Tree()
assert not t2.is_in(b"test")
def test_tree_empty_store():
store = {}
t = Tree(store)
t.multi_add([b"test"])
assert t.is_in(b"test")
t2 = Tree(store, root_hash=t.root())
assert t2.is_in(b"test")
| gdanezis/rousseau-chain | hippiehug-package/tests/test_tree.py | Python | bsd-2-clause | 5,884 |
#!/bin/sh
# add ssh to default lxd image
if [ "X$1" = "X8" -o "X$1" = "X8-Stream" ]; then
image=centos-8-Stream
else
image=centos-7
fi
guest=default-$image
template="$image"-nossh
publishalias="$image"
lxc init $template $guest
lxc start $guest
openssl rand -base64 48 | perl -ne 'print "$_" x2' | lxc exec $guest -- passwd root
lxc exec $guest -- dhclient eth0
lxc exec $guest -- ping -c 1 8.8.8.8
if [ "X$1" = "X8" -o "X$1" = "X8-Stream" ]; then
lxc exec $guest -- dnf -y upgrade
lxc exec $guest -- dnf install -y openssh-server sudo ruby yum-utils
lxc exec $guest -- dnf install -y python3 python3-pip openssl-devel python36-devel libffi-devel "@Development tools"
lxc exec $guest -- pip3 install ansible
lxc exec $guest -- ln -s /usr/bin/pip3 /usr/bin/pip
else
lxc exec $guest -- yum -y upgrade
lxc exec $guest -- yum install -y openssh-server sudo ruby yum-utils
fi
lxc exec $guest -- systemctl enable sshd
lxc exec $guest -- systemctl start sshd
lxc exec $guest -- mkdir /root/.ssh || true
lxc exec $guest -- gem install busser
lxc stop $guest --force
lxc publish $guest --alias $publishalias
lxc delete $guest
| juju4/ansible-MISP | test/lxd/centos-ssh-image.sh | Shell | bsd-2-clause | 1,139 |
package inpro.incremental.unit;
import inpro.apps.SimpleMonitor;
import inpro.audio.AudioUtils;
import inpro.audio.DispatchStream;
import inpro.synthesis.MaryAdapter;
import inpro.synthesis.MaryAdapter5internal;
import inpro.synthesis.hts.IUBasedFullPStream;
import inpro.synthesis.hts.VocodingAudioStream;
import org.junit.Test;
public class HesitationIUTest {
@Test(timeout=60000)
public void test() {
MaryAdapter.getInstance();
DispatchStream dispatcher = SimpleMonitor.setupDispatcher();
HesitationIU hes = new HesitationIU();
dispatcher.playStream(AudioUtils.get16kAudioStreamForVocodingStream(new VocodingAudioStream(new IUBasedFullPStream(hes), MaryAdapter5internal.getDefaultHMMData(), true)), false);
dispatcher.waitUntilDone();
dispatcher.shutdown();
}
}
| ONatalia/Masterarbeit | test/inpro/incremental/unit/HesitationIUTest.java | Java | bsd-2-clause | 785 |
class Up < Formula
desc "Tool for writing command-line pipes with instant live preview"
homepage "https://github.com/akavel/up"
url "https://github.com/akavel/up/archive/v0.3.2.tar.gz"
sha256 "359510cfea8af8f14de39d63f63cc5c765f681cca2c37f00174837d52c62cad1"
bottle do
cellar :any_skip_relocation
sha256 "83553c30a557b081201b4e28600f52b589bfd8fc640c8b57dc6086d3a450be15" => :catalina
sha256 "0c453761279cdc6a995ae471841b2e8513215c3d30f4f448c3cf82f548376fa5" => :mojave
sha256 "f9ea40f11e458e2bda259fa428a9f390d9a9efce1d7983f9325eda17b4655501" => :high_sierra
sha256 "558f89d83bd23a28ef31a1d72f7749521f68ebf0d767a8cffb2c6b9311461e13" => :sierra
sha256 "76219e31703806b0c911b10f8edb5077f6ec423c6bc6d6882445e439c452f54c" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["GOPATH"] = HOMEBREW_CACHE/"go_cache"
dir = buildpath/"src/github.com/akavel/up"
dir.install buildpath.children
cd dir do
system "go", "build", "-o", bin/"up", "up.go"
prefix.install_metafiles
end
end
test do
# error: terminal entry not found
return if ENV["TERM"] == "dumb"
shell_output("#{bin}/up --debug 2&>1", 1)
assert_predicate testpath/"up.debug", :exist?, "up.debug not found"
assert_includes File.read(testpath/"up.debug"), "checking $SHELL"
end
end
| LinuxbrewTestBot/homebrew-core | Formula/up.rb | Ruby | bsd-2-clause | 1,344 |
using Orchard.Environment.Extensions.Models;
using Orchard.Security.Permissions;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace Digic.Sitemap
{
public class Permissions : IPermissionProvider
{
public static readonly Permission ManageSitemap = new Permission { Description = "Manage Sitemap", Name = "ManageSitemap" };
public virtual Feature Feature { get; set; }
public IEnumerable<Permission> GetPermissions()
{
return new[] {
ManageSitemap,
};
}
public IEnumerable<PermissionStereotype> GetDefaultStereotypes()
{
return new[] {
new PermissionStereotype {
Name = "Administrator",
Permissions = new[] {ManageSitemap}
},
new PermissionStereotype {
Name = "Editor",
Permissions = new[] {ManageSitemap}
},
new PermissionStereotype {
Name = "Moderator",
Permissions = new[] {ManageSitemap}
},
new PermissionStereotype {
Name = "Author",
Permissions = new[] {ManageSitemap}
},
new PermissionStereotype {
Name = "Contributor",
Permissions = new[] {ManageSitemap}
},
};
}
}
} | digic/orchard-sitemap | Permissions.cs | C# | bsd-2-clause | 1,526 |
/*****************************************************************************/
/* */
/* © 2011, Aurbach & Associates, Inc. All rights reserved. */
/* */
/* Redistribution and use in source and binary forms, with or without */
/* modification, are permitted provided that the following condition */
/* are met: */
/* */
/* * Redistributions of source code must retain the above copyright */
/* notice, this list of conditions and the following disclaimer. */
/* */
/* * Redistributions in binary form must reproduce the above copyright */
/* notice, this list of conditions and the following disclaimer in the */
/* documentation and/or other materials provided with the distribution. */
/* */
/* * Neither the name of Aurbach & Associates, Inc. nor the names of any */
/* of its employees may be used to endorse or promote products derived */
/* from this software without specific prior written permission. */
/* */
/* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS */
/* ÒAS ISÓ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT */
/* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A */
/* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER */
/* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, */
/* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, */
/* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR */
/* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING */
/* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS */
/* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
/* */
/*****************************************************************************/
/*
* Class Name: CAPreferences
*
* Change History:
*
* Who Date Description
* ---------------------------------------------------------------------------
* RLA 09-Oct-2006 Original Code (UCFPreferences.h)
* RLA 23-Aug-2011 Converted for use in the Constructor environment
*/
// ---------------------------------------------------------------------------
// NOTE: It would be pretty logical (and obvious) to implement specifc
// accessor functions for booleans, integers, and strings. However,
// we don't do that here because Constructor doesn't need them.
// ---------------------------------------------------------------------------
#pragma once
#if !defined(__MACH__)
#include <CFPreferences.h>
#endif
namespace CAPreferences {
bool IsDefined (
CFStringRef inKey );
CFPropertyListRef CopyValueAsPropertyList (
CFStringRef inKey );
void SetValueAsPropertyList (
CFStringRef inKey,
CFPropertyListRef inValue );
UInt32 GetValueAsBlock (
CFStringRef inKey,
void * inBlock,
UInt32 inMaxSize );
void SetValueAsBlock (
CFStringRef inKey,
const void * inBlock,
UInt32 inSize );
Handle CopyValueAsHandle (
CFStringRef inKey );
void SetValueAsHandle (
CFStringRef inKey,
Handle inValue );
void Remove (
CFStringRef inKey );
bool Synchronize ();
}
class StUpdatePreferences {
public:
StUpdatePreferences () {}
~StUpdatePreferences ()
{ CAPreferences::Synchronize(); }
};
| mctully/tntbasic | third_party/PowerPlant/constructor/Constructor_Pro/Constructor/Source files/CO- Core/Application/CAPreferences.h | C | bsd-2-clause | 3,667 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_22) on Sun Aug 26 15:13:10 EDT 2012 -->
<TITLE>
Uses of Class org.newdawn.slick.svg.inkscape.RectProcessor (Slick - The 2D Library)
</TITLE>
<META NAME="date" CONTENT="2012-08-26">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.newdawn.slick.svg.inkscape.RectProcessor (Slick - The 2D Library)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/newdawn/slick/svg/inkscape/RectProcessor.html" title="class in org.newdawn.slick.svg.inkscape"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/newdawn/slick/svg/inkscape//class-useRectProcessor.html" target="_top"><B>FRAMES</B></A>
<A HREF="RectProcessor.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.newdawn.slick.svg.inkscape.RectProcessor</B></H2>
</CENTER>
No usage of org.newdawn.slick.svg.inkscape.RectProcessor
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/newdawn/slick/svg/inkscape/RectProcessor.html" title="class in org.newdawn.slick.svg.inkscape"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/newdawn/slick/svg/inkscape//class-useRectProcessor.html" target="_top"><B>FRAMES</B></A>
<A HREF="RectProcessor.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
<i>Copyright © 2006 New Dawn Software. All Rights Reserved.</i>
</BODY>
</HTML>
| SenshiSentou/SourceFight | slick_dev/trunk/Slick/javadoc/org/newdawn/slick/svg/inkscape/class-use/RectProcessor.html | HTML | bsd-2-clause | 6,133 |
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2010 - 2014 Board of Regents of the University of
* Wisconsin-Madison.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package imagej.workflow;
/**
* Information about a module. Module may be a workflow or a plugin.
*
* @author aivar
*/
public interface IModuleInfo extends IModuleInfoInternal {
/**
* Gets name of module.
*
* @return
*/
public String getName();
/**
* Gets input item information array.
*
* @return
*/
public IItemInfo[] getInputItemInfos();
/**
* Gets output item information array.
*
* @return
*/
public IItemInfo[] getOutputItemInfos();
/**
* Is this a workflow?
*
* @return
*/
public boolean isWorkflow();
}
| imagej/workflow | src/main/java/imagej/workflow/IModuleInfo.java | Java | bsd-2-clause | 2,131 |
/*
* Copyright (c) 2012, JInterval Project.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.java.jinterval.rational;
import java.math.BigInteger;
import java.math.RoundingMode;
/**
*
* @author nadezhin
*/
class BinaryValueSet64 extends BinaryValueSet {
static final int SIGNIFICAND_WIDTH = 53;
static final int MINEXP = -1022;
static final int MAXEXP = 1023;
static final long SIGN_MASK = 0x8000000000000000L;
static final long EXP_BIT_MASK = 0x7FF0000000000000L;
static final long SIGNIF_BIT_MASK = 0x000FFFFFFFFFFFFFL;
BinaryValueSet64() {
super(SIGNIFICAND_WIDTH, MINEXP, MAXEXP);
}
@Override
MathContext makeMathContext(RoundingMode rm) {
switch (rm) {
case UP:
return new MathContextUp64();
case DOWN:
return new MathContextDown64();
case CEILING:
return new MathContextCeiling64();
case FLOOR:
return new MathContextFloor64();
case HALF_UP:
return new MathContextHalfUp64();
case HALF_DOWN:
return new MathContextHalfDown64();
case HALF_EVEN:
return new MathContextHalfEven64();
case UNNECESSARY:
return new MathContextUnnecessary64();
default:
throw new AssertionError();
}
}
class MathContext64 extends MathContext {
private MathContext64(RoundingMode rm) {
super(BinaryValueSet64.this, rm);
}
@Override
BinaryDouble round(BinaryDoubleImpl v) {
return v;
}
}
private class MathContextUp64 extends MathContext64 {
private MathContextUp64() {
super(RoundingMode.UP);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsUp(v));
}
}
private class MathContextDown64 extends MathContext64 {
private MathContextDown64() {
super(RoundingMode.DOWN);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsDown(v));
}
}
private class MathContextCeiling64 extends MathContext64 {
private MathContextCeiling64() {
super(RoundingMode.CEILING);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsCeiling(v));
}
}
private class MathContextFloor64 extends MathContext64 {
private MathContextFloor64() {
super(RoundingMode.FLOOR);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsFloor(v));
}
}
private class MathContextHalfUp64 extends MathContext64 {
private MathContextHalfUp64() {
super(RoundingMode.HALF_UP);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsHalfUp(v));
}
}
private class MathContextHalfDown64 extends MathContext64 {
private MathContextHalfDown64() {
super(RoundingMode.HALF_DOWN);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsHalfDown(v));
}
}
private class MathContextHalfEven64 extends MathContext64 {
private MathContextHalfEven64() {
super(RoundingMode.HALF_EVEN);
}
@Override
ExtendedRational round(BinaryImpl v) {
return fromBits(bitsHalfEven(v));
}
}
private class MathContextUnnecessary64 extends MathContext64 {
private MathContextUnnecessary64() {
super(RoundingMode.UNNECESSARY);
}
@Override
ExtendedRational round(BinaryImpl v) {
throw new ArithmeticException("Rounding necessary");
}
}
static double doubleValueUp(BinaryImpl v) {
return Double.longBitsToDouble(bitsUp(v));
}
static double doubleValueDown(BinaryImpl v) {
return Double.longBitsToDouble(bitsDown(v));
}
static double doubleValueHalfUp(BinaryImpl v) {
return Double.longBitsToDouble(bitsHalfUp(v));
}
static double doubleValueHalfDown(BinaryImpl v) {
return Double.longBitsToDouble(bitsHalfDown(v));
}
static double doubleValueHalfEven(BinaryImpl v) {
return Double.longBitsToDouble(bitsHalfEven(v));
}
private static ExtendedRational fromBits(long bits) {
long maskedBits = bits & 0x7FFFFFFFFFFFFFFFL;
if (maskedBits <= 0) {
assert maskedBits == 0;
return Rational.zero();
}
if (maskedBits >= 0x7FF0000000000000L) {
assert maskedBits == 0x7FF0000000000000L;
return bits >= 0 ? ExtendedRational.POSITIVE_INFINITY : ExtendedRational.NEGATIVE_INFINITY;
}
return new BinaryDoubleImpl(Double.longBitsToDouble(bits));
}
private static long bitsUp(BinaryImpl v) {
return bitsDown(v) + 1;
}
private static long bitsDown(BinaryImpl v) {
int signum = v.signum();
int precision = v.precision();
int exp = v.intExp2();
BigInteger unscaledValueAbs = v.getNumeratorWithout2sAbs();
int dotExp = exp + precision - 1;
long bits;
if (dotExp >= MINEXP) {
if (dotExp <= MAXEXP) {
bits = unscaledValueAbs.shiftRight(precision - SIGNIFICAND_WIDTH).longValue()
+ (((long) (dotExp - MINEXP)) << 52);
} else {
bits = 0x7FEFFFFFFFFFFFFFL;
}
} else {
bits = unscaledValueAbs.shiftRight(MINEXP - SIGNIFICAND_WIDTH + 1 - exp).longValue();
}
if (signum < 0) {
bits |= SIGN_MASK;
}
return bits;
}
private static long bitsCeiling(BinaryImpl v) {
long bits = bitsDown(v);
return v.signum() > 0 ? bits + 1 : bits;
}
private static long bitsFloor(BinaryImpl v) {
long bits = bitsDown(v);
return v.signum() < 0 ? bits + 1 : bits;
}
static long bitsHalfUp(BinaryImpl v) {
int signum = v.signum();
int precision = v.precision();
int exp = v.intExp2();
BigInteger unscaledValueAbs = v.getNumeratorWithout2sAbs();
int dotExp = exp + precision - 1;
long bits;
if (dotExp >= MINEXP) {
if (dotExp <= MAXEXP) {
bits = unscaledValueAbs.shiftRight(precision - (SIGNIFICAND_WIDTH + 1)).longValue()
+ (((long) (dotExp - MINEXP)) << 53);
} else {
bits = 0xFFDFFFFFFFFFFFFFL;
}
} else {
bits = unscaledValueAbs.shiftRight(MINEXP - SIGNIFICAND_WIDTH - exp).longValue();
}
bits++;
bits >>>= 1;
if (signum < 0) {
bits |= SIGN_MASK;
}
return bits;
}
static long bitsHalfDown(BinaryImpl v) {
int signum = v.signum();
int precision = v.precision();
int exp = v.intExp2();
BigInteger unscaledValueAbs = v.getNumeratorWithout2sAbs();
int dotExp = exp + precision - 1;
long bits = 0;
int shift;
if (dotExp >= MINEXP) {
if (dotExp <= MAXEXP) {
shift = precision - (SIGNIFICAND_WIDTH + 1);
bits = unscaledValueAbs.shiftRight(shift).longValue()
+ (((long) (dotExp - MINEXP)) << 53);
} else {
bits = 0xFFDFFFFFFFFFFFFFL;
shift = Integer.MAX_VALUE;
}
} else {
shift = MINEXP - SIGNIFICAND_WIDTH - exp;
bits = unscaledValueAbs.shiftRight(shift).longValue();
}
if (shift > 1) {
bits++;
}
bits >>>= 1;
if (signum < 0) {
bits |= SIGN_MASK;
}
return bits;
}
static long bitsHalfEven(BinaryImpl v) {
int signum = v.signum();
int precision = v.precision();
int exp = v.intExp2();
BigInteger unscaledValueAbs = v.getNumeratorWithout2sAbs();
int dotExp = exp + precision - 1;
long bits = 0;
int shift;
if (dotExp >= MINEXP) {
if (dotExp <= MAXEXP) {
shift = precision - (SIGNIFICAND_WIDTH + 1);
bits = unscaledValueAbs.shiftRight(shift).longValue()
+ (((long) (dotExp - MINEXP)) << 53);
} else {
bits = 0xFFDFFFFFFFFFFFFFL;
shift = Integer.MAX_VALUE;
}
} else {
shift = MINEXP - SIGNIFICAND_WIDTH - exp;
bits = unscaledValueAbs.shiftRight(shift).longValue();
}
if (shift > 0 || (bits & 2) != 0) {
bits++;
}
bits >>>= 1;
if (signum < 0) {
bits |= SIGN_MASK;
}
return bits;
}
}
| jinterval/jinterval | jinterval-rational-java/src/main/java/net/java/jinterval/rational/BinaryValueSet64.java | Java | bsd-2-clause | 10,359 |
from flask import *
from pyZPL import *
from printLabel import printLabel
import xml.etree.ElementTree as ET
import os
app = Flask(__name__)
dn = os.path.dirname(os.path.realpath(__file__))+"/"
tree = ET.parse(dn+"pace.xml")
customElements = tree.findall(".//*[@id]")
customItems = []
for element in customElements:
newItem = ZPLCustomItem()
newItem.ID = element.get("id")
newItem.data = element.text
newItem.type = element.tag
if element.get("fixed"):
newItem.fixed = "readonly"
customItems.append(newItem)
@app.route('/')
def root():
return render_template("index.html",items=customItems)
@app.route('/print', methods=['POST'])
def print_():
customItemsModified = []
if request.method == 'POST':
for key,value in request.form.iteritems():
newItem = ZPLCustomItem()
split = key.split('_')
newItem.type = split[len(split)-1]
newItem.ID = str.join("_",split[:len(split)-1])
newItem.data = request.form[newItem.ID+"_string"]
try:
request.form[newItem.ID+"_bool"]
newItem.visible = True
except KeyError:
newItem.visible = False
customItemsModified.append(newItem)
return printLabel(customItemsModified)
else:
return "can has post?"
if __name__ == '__main__':
app.debug = True
app.run()
| OHRI-BioInfo/pyZPL | web.py | Python | bsd-2-clause | 1,418 |
# coding: utf-8
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logger', '0011_add-index-to-instance-uuid_and_xform_uuid'),
]
operations = [
migrations.AddField(
model_name='xform',
name='kpi_asset_uid',
field=models.CharField(max_length=32, null=True),
),
]
| kobotoolbox/kobocat | onadata/apps/logger/migrations/0012_add_asset_uid_to_xform.py | Python | bsd-2-clause | 390 |
/*
* Copyright (c) 2011-2014, University of Delaware
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <iostream>
#include <stdlib.h>
#include "darts.h"
#define INNER 1000
#define OUTER 1000
using namespace darts;
class aCD : public Codelet
{
public:
Codelet * toSignal;
aCD(uint32_t dep, uint32_t res, ThreadedProcedure * myTP, uint32_t stat, Codelet * toSig):
Codelet(dep, res, myTP, stat),
toSignal(toSig) { }
aCD(void){ }
void initACD(uint32_t dep, uint32_t res, ThreadedProcedure * myTP, uint32_t stat, Codelet * toSig)
{
initCodelet(dep,res,myTP,stat);
toSignal = toSig;
}
virtual void fire(void)
{
toSignal->decDep();
}
};
class aTP : public ThreadedProcedure
{
public:
aCD * acd;
aCD end;
aTP(int fanout, Codelet * toSig):
ThreadedProcedure(),
acd(new aCD[fanout]),
end(fanout,fanout,this,0,toSig)
{
for(int i=0; i<fanout; i++)
{
acd[i].initACD(0,0,this,i,&end);
add(&acd[i]);
}
}
~aTP(void)
{
delete [] acd;
}
};
int main(int argc, char *argv[])
{
if (argc != 6)
{
std::cout << "enter number of TP CD TPM CDM Fanout" << std::endl;
return 0;
}
int tps = atoi(argv[1]);
int cds = atoi(argv[2]);
int tpm = atoi(argv[3]);
int cdm = atoi(argv[4]);
int fanout = atoi(argv[5]);
uint64_t innerTime = 0;
uint64_t outerTime = 0;
ThreadAffinity affin(cds, tps, SPREAD, tpm, cdm);
if (affin.generateMask())
{
Runtime * rt = new Runtime(&affin);
for (int i = 0; i < OUTER; i++)
{
rt->run(launch<aTP>(fanout,&Runtime::finalSignal));
for (int j = 0; j < INNER; j++)
{
uint64_t startTime = getTime();
rt->run(launch<aTP>(fanout,&Runtime::finalSignal));
uint64_t endTime = getTime();
innerTime += endTime - startTime;
}
outerTime += innerTime / INNER;
innerTime = 0;
}
std::cout << outerTime/OUTER << std::endl;
delete rt;
}
return 0;
}
| szuckerm/DARTS | apps/Fanout/cd_fanout.cpp | C++ | bsd-2-clause | 3,498 |
//****************************************************************************
// Copyright © 2015 Jan Erik Breimo. All rights reserved.
// Created by Jan Erik Breimo on 2015-07-27
//
// This file is distributed under the Simplified BSD License.
// License text is included with the source distribution.
//****************************************************************************
#pragma once
#include "CodePage.hpp"
namespace Ystring { namespace Encodings
{
YSTRING_API CodePage makeWindows1252();
}}
| jebreimo/Ystring | src/Ystring/Encodings/Windows1252.hpp | C++ | bsd-2-clause | 512 |
//
// The Open Toolkit Library License
//
// Copyright (c) 2006 - 2009 the Open Toolkit library.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do
// so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
//
#if EXPERIMENTAL
namespace OpenTK.Compute.CL10
{
using System;
using System.Text;
using System.Runtime.InteropServices;
#pragma warning disable 3019
#pragma warning disable 1591
partial class CL
{
internal static partial class Core
{
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clBuildProgram", ExactSpelling = true)]
internal extern static unsafe int BuildProgram(IntPtr program, uint num_devices, IntPtr* device_list, String options, IntPtr pfn_notify, IntPtr user_data);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateBuffer", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateBuffer(IntPtr context, MemFlags flags, IntPtr size, IntPtr host_ptr, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateCommandQueue", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateCommandQueue(IntPtr context, IntPtr device, CommandQueueFlags properties, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateContext", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateContext(IntPtr* properties, uint num_devices, IntPtr* devices, IntPtr pfn_notify, IntPtr user_data, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateContextFromType", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateContextFromType(IntPtr* properties, DeviceTypeFlags device_type, IntPtr pfn_notify, IntPtr user_data, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateImage2D", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateImage2D(IntPtr context, MemFlags flags, ImageFormat* image_format, IntPtr image_width, IntPtr image_height, IntPtr image_row_pitch, IntPtr host_ptr, [OutAttribute] int* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateImage3D", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateImage3D(IntPtr context, MemFlags flags, ImageFormat* image_format, IntPtr image_width, IntPtr image_height, IntPtr image_depth, IntPtr image_row_pitch, IntPtr image_slice_pitch, IntPtr host_ptr, [OutAttribute] int* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateKernel", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateKernel(IntPtr program, String kernel_name, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateKernelsInProgram", ExactSpelling = true)]
internal extern static unsafe int CreateKernelsInProgram(IntPtr program, uint num_kernels, IntPtr* kernels, [OutAttribute] uint* num_kernels_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateProgramWithBinary", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateProgramWithBinary(IntPtr context, uint num_devices, IntPtr* device_list, IntPtr* lengths, byte** binaries, int* binary_status, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateProgramWithSource", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateProgramWithSource(IntPtr context, uint count, String[] strings, IntPtr* lengths, [OutAttribute] OpenTK.Compute.CL10.ErrorCode* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clCreateSampler", ExactSpelling = true)]
internal extern static unsafe IntPtr CreateSampler(IntPtr context, bool normalized_coords, AddressingMode addressing_mode, FilterMode filter_mode, [OutAttribute] int* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueBarrier", ExactSpelling = true)]
internal extern static int EnqueueBarrier(IntPtr command_queue);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueCopyBuffer", ExactSpelling = true)]
internal extern static unsafe int EnqueueCopyBuffer(IntPtr command_queue, IntPtr src_buffer, IntPtr dst_buffer, IntPtr src_offset, IntPtr dst_offset, IntPtr cb, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueCopyBufferToImage", ExactSpelling = true)]
internal extern static unsafe int EnqueueCopyBufferToImage(IntPtr command_queue, IntPtr src_buffer, IntPtr dst_image, IntPtr src_offset, IntPtr** dst_origin, IntPtr** region, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueCopyImage", ExactSpelling = true)]
internal extern static unsafe int EnqueueCopyImage(IntPtr command_queue, IntPtr src_image, IntPtr dst_image, IntPtr** src_origin, IntPtr** dst_origin, IntPtr** region, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueCopyImageToBuffer", ExactSpelling = true)]
internal extern static unsafe int EnqueueCopyImageToBuffer(IntPtr command_queue, IntPtr src_image, IntPtr dst_buffer, IntPtr** src_origin, IntPtr** region, IntPtr dst_offset, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueMapBuffer", ExactSpelling = true)]
internal extern static unsafe System.IntPtr EnqueueMapBuffer(IntPtr command_queue, IntPtr buffer, bool blocking_map, MapFlags map_flags, IntPtr offset, IntPtr cb, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event, [OutAttribute] int* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueMapImage", ExactSpelling = true)]
internal extern static unsafe System.IntPtr EnqueueMapImage(IntPtr command_queue, IntPtr image, bool blocking_map, MapFlags map_flags, IntPtr** origin, IntPtr** region, IntPtr* image_row_pitch, IntPtr* image_slice_pitch, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event, [OutAttribute] int* errcode_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueMarker", ExactSpelling = true)]
internal extern static unsafe int EnqueueMarker(IntPtr command_queue, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueNativeKernel", ExactSpelling = true)]
internal extern static unsafe int EnqueueNativeKernel(IntPtr command_queue, IntPtr user_func, IntPtr args, IntPtr cb_args, uint num_mem_objects, IntPtr* mem_list, IntPtr args_mem_loc, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueNDRangeKernel", ExactSpelling = true)]
internal extern static unsafe int EnqueueNDRangeKernel(IntPtr command_queue, IntPtr kernel, uint work_dim, IntPtr* global_work_offset, IntPtr* global_work_size, IntPtr* local_work_size, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueReadBuffer", ExactSpelling = true)]
internal extern static unsafe int EnqueueReadBuffer(IntPtr command_queue, IntPtr buffer, bool blocking_read, IntPtr offset, IntPtr cb, IntPtr ptr, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueReadImage", ExactSpelling = true)]
internal extern static unsafe int EnqueueReadImage(IntPtr command_queue, IntPtr image, bool blocking_read, IntPtr** origin, IntPtr** region, IntPtr row_pitch, IntPtr slice_pitch, IntPtr ptr, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueTask", ExactSpelling = true)]
internal extern static unsafe int EnqueueTask(IntPtr command_queue, IntPtr kernel, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueUnmapMemObject", ExactSpelling = true)]
internal extern static unsafe int EnqueueUnmapMemObject(IntPtr command_queue, IntPtr memobj, IntPtr mapped_ptr, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueWaitForEvents", ExactSpelling = true)]
internal extern static unsafe int EnqueueWaitForEvents(IntPtr command_queue, uint num_events, IntPtr* event_list);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueWriteBuffer", ExactSpelling = true)]
internal extern static unsafe int EnqueueWriteBuffer(IntPtr command_queue, IntPtr buffer, bool blocking_write, IntPtr offset, IntPtr cb, IntPtr ptr, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clEnqueueWriteImage", ExactSpelling = true)]
internal extern static unsafe int EnqueueWriteImage(IntPtr command_queue, IntPtr image, bool blocking_write, IntPtr** origin, IntPtr** region, IntPtr input_row_pitch, IntPtr input_slice_pitch, IntPtr ptr, uint num_events_in_wait_list, IntPtr* event_wait_list, IntPtr* @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clFinish", ExactSpelling = true)]
internal extern static int Finish(IntPtr command_queue);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clFlush", ExactSpelling = true)]
internal extern static int Flush(IntPtr command_queue);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetCommandQueueInfo", ExactSpelling = true)]
internal extern static unsafe int GetCommandQueueInfo(IntPtr command_queue, CommandQueueInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetContextInfo", ExactSpelling = true)]
internal extern static unsafe int GetContextInfo(IntPtr context, ContextInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetDeviceIDs", ExactSpelling = true)]
internal extern static unsafe int GetDeviceIDs(IntPtr platform, DeviceTypeFlags device_type, uint num_entries, IntPtr* devices, uint* num_devices);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetDeviceInfo", ExactSpelling = true)]
internal extern static unsafe int GetDeviceInfo(IntPtr device, DeviceInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetEventInfo", ExactSpelling = true)]
internal extern static unsafe int GetEventInfo(IntPtr @event, EventInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetEventProfilingInfo", ExactSpelling = true)]
internal extern static unsafe int GetEventProfilingInfo(IntPtr @event, ProfilingInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetImageInfo", ExactSpelling = true)]
internal extern static unsafe int GetImageInfo(IntPtr image, ImageInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetKernelInfo", ExactSpelling = true)]
internal extern static unsafe int GetKernelInfo(IntPtr kernel, KernelInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetKernelWorkGroupInfo", ExactSpelling = true)]
internal extern static unsafe int GetKernelWorkGroupInfo(IntPtr kernel, IntPtr device, KernelWorkGroupInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetMemObjectInfo", ExactSpelling = true)]
internal extern static unsafe int GetMemObjectInfo(IntPtr memobj, MemInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetPlatformIDs", ExactSpelling = true)]
internal extern static unsafe int GetPlatformIDs(uint num_entries, IntPtr* platforms, uint* num_platforms);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetPlatformInfo", ExactSpelling = true)]
internal extern static unsafe int GetPlatformInfo(IntPtr platform, PlatformInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetProgramBuildInfo", ExactSpelling = true)]
internal extern static unsafe int GetProgramBuildInfo(IntPtr program, IntPtr device, ProgramBuildInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetProgramInfo", ExactSpelling = true)]
internal extern static unsafe int GetProgramInfo(IntPtr program, ProgramInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetSamplerInfo", ExactSpelling = true)]
internal extern static unsafe int GetSamplerInfo(IntPtr sampler, SamplerInfo param_name, IntPtr param_value_size, IntPtr param_value, [OutAttribute] IntPtr* param_value_size_ret);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clGetSupportedImageFormats", ExactSpelling = true)]
internal extern static unsafe int GetSupportedImageFormats(IntPtr context, MemFlags flags, MemObjectType image_type, uint num_entries, ImageFormat* image_formats, uint* num_image_formats);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseCommandQueue", ExactSpelling = true)]
internal extern static int ReleaseCommandQueue(IntPtr command_queue);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseContext", ExactSpelling = true)]
internal extern static int ReleaseContext(IntPtr context);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseEvent", ExactSpelling = true)]
internal extern static int ReleaseEvent(IntPtr @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseKernel", ExactSpelling = true)]
internal extern static int ReleaseKernel(IntPtr kernel);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseMemObject", ExactSpelling = true)]
internal extern static int ReleaseMemObject(IntPtr memobj);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseProgram", ExactSpelling = true)]
internal extern static int ReleaseProgram(IntPtr program);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clReleaseSampler", ExactSpelling = true)]
internal extern static int ReleaseSampler(IntPtr sampler);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainCommandQueue", ExactSpelling = true)]
internal extern static int RetainCommandQueue(IntPtr command_queue);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainContext", ExactSpelling = true)]
internal extern static int RetainContext(IntPtr context);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainEvent", ExactSpelling = true)]
internal extern static int RetainEvent(IntPtr @event);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainKernel", ExactSpelling = true)]
internal extern static int RetainKernel(IntPtr kernel);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainMemObject", ExactSpelling = true)]
internal extern static int RetainMemObject(IntPtr memobj);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainProgram", ExactSpelling = true)]
internal extern static int RetainProgram(IntPtr program);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clRetainSampler", ExactSpelling = true)]
internal extern static int RetainSampler(IntPtr sampler);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clSetCommandQueueProperty", ExactSpelling = true)]
internal extern static unsafe int SetCommandQueueProperty(IntPtr command_queue, CommandQueueFlags properties, bool enable, CommandQueueFlags* old_properties);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clSetKernelArg", ExactSpelling = true)]
internal extern static int SetKernelArg(IntPtr kernel, uint arg_index, IntPtr arg_size, IntPtr arg_value);
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clUnloadCompiler", ExactSpelling = true)]
internal extern static int UnloadCompiler();
[System.Security.SuppressUnmanagedCodeSecurity()]
[System.Runtime.InteropServices.DllImport(CL.Library, EntryPoint = "clWaitForEvents", ExactSpelling = true)]
internal extern static unsafe int WaitForEvents(uint num_events, IntPtr* event_list);
}
}
}
#endif | LayoutFarm/PixelFarm | src/PixelFarm/BackEnd.MiniOpenTK/src/OpenTK/Compute/CL10/Core.cs | C# | bsd-2-clause | 24,483 |
import copy
import mufsim.utils as util
import mufsim.gamedb as db
import mufsim.stackitems as si
from mufsim.errors import MufRuntimeError
from mufsim.insts.base import Instruction, instr
class InstPushItem(Instruction):
value = 0
def __init__(self, line, val):
self.value = val
super(InstPushItem, self).__init__(line)
def execute(self, fr):
fr.data_push(self.value)
def __str__(self):
return si.item_repr(self.value)
class InstGlobalVar(Instruction):
varnum = 0
varname = 0
def __init__(self, line, vnum, vname):
self.varnum = vnum
self.varname = vname
super(InstGlobalVar, self).__init__(line)
def execute(self, fr):
fr.data_push(si.GlobalVar(self.varnum))
def __str__(self):
return "LV%d: %s" % (self.varnum, self.varname)
class InstFuncVar(Instruction):
varnum = 0
varname = 0
def __init__(self, line, vnum, vname):
self.varnum = vnum
self.varname = vname
super(InstFuncVar, self).__init__(line)
def execute(self, fr):
fr.data_push(si.FuncVar(self.varnum))
def __str__(self):
return "SV%d: %s" % (self.varnum, self.varname)
@instr("secure_sysvars")
class InstSecureSysvars(Instruction):
def execute(self, fr):
fr.globalvar_set(0, fr.user)
fr.globalvar_set(1, si.DBRef(db.getobj(fr.user).location))
fr.globalvar_set(2, fr.trigger)
fr.globalvar_set(3, fr.command)
@instr("!")
class InstBang(Instruction):
def execute(self, fr):
fr.check_underflow(2)
v = fr.data_pop(si.GlobalVar, si.FuncVar)
val = fr.data_pop()
if isinstance(v, si.GlobalVar):
fr.globalvar_set(v.value, val)
elif isinstance(v, si.FuncVar):
fr.funcvar_set(v.value, val)
def __str__(self):
return "!"
@instr("@")
class InstAt(Instruction):
def execute(self, fr):
v = fr.data_pop(si.GlobalVar, si.FuncVar)
if isinstance(v, si.GlobalVar):
val = fr.globalvar_get(v.value)
fr.data_push(val)
elif isinstance(v, si.FuncVar):
val = fr.funcvar_get(v.value)
fr.data_push(val)
def __str__(self):
return "@"
@instr("dup")
class InstDup(Instruction):
def execute(self, fr):
a = fr.data_pop()
fr.data_push(a)
fr.data_push(a)
@instr("shallow_copy")
class InstShallowCopy(Instruction):
def execute(self, fr):
a = fr.data_pop()
fr.data_push(a)
fr.data_push(copy.copy(a))
@instr("deep_copy")
class InstDeepCopy(Instruction):
def execute(self, fr):
a = fr.data_pop()
fr.data_push(a)
fr.data_push(copy.deepcopy(a))
@instr("?dup")
class InstQDup(Instruction):
def execute(self, fr):
a = fr.data_pop()
if isinstance(a, si.DBRef):
if a.value != -1:
fr.data_push(a)
elif a:
fr.data_push(a)
fr.data_push(a)
@instr("dupn")
class InstDupN(Instruction):
def execute(self, fr):
n = fr.data_pop(int)
fr.check_underflow(n)
for i in range(n):
fr.data_push(fr.data_pick(n))
@instr("ldup")
class InstLDup(Instruction):
def execute(self, fr):
n = fr.data_pick(1)
if not isinstance(n, int):
raise MufRuntimeError("Expected integer argument.")
n += 1
fr.check_underflow(n)
for i in range(n):
fr.data_push(fr.data_pick(n))
@instr("pop")
class InstPop(Instruction):
def execute(self, fr):
fr.data_pop()
@instr("popn")
class InstPopN(Instruction):
def execute(self, fr):
n = fr.data_pop(int)
fr.check_underflow(n)
for i in range(n):
fr.data_pop()
@instr("swap")
class InstSwap(Instruction):
def execute(self, fr):
fr.check_underflow(2)
b = fr.data_pop()
a = fr.data_pop()
fr.data_push(b)
fr.data_push(a)
@instr("rot")
class InstRot(Instruction):
def execute(self, fr):
fr.check_underflow(3)
a = fr.data_pull(3)
fr.data_push(a)
@instr("-rot")
class InstNegRot(Instruction):
def execute(self, fr):
fr.check_underflow(3)
c = fr.data_pop()
b = fr.data_pop()
a = fr.data_pop()
fr.data_push(c)
fr.data_push(a)
fr.data_push(b)
@instr("rotate")
class InstRotate(Instruction):
def execute(self, fr):
num = fr.data_pop(int)
fr.check_underflow(num)
if not num:
return
if num < 0:
a = fr.data_pop()
fr.data_insert((-num) - 1, a)
elif num > 0:
a = fr.data_pull(num)
fr.data_push(a)
@instr("pick")
class InstPick(Instruction):
def execute(self, fr):
num = fr.data_pop(int)
fr.check_underflow(num)
if not num:
return
if num < 0:
raise MufRuntimeError("Expected positive integer.")
else:
a = fr.data_pick(num)
fr.data_push(a)
@instr("over")
class InstOver(Instruction):
def execute(self, fr):
fr.check_underflow(2)
a = fr.data_pick(2)
fr.data_push(a)
@instr("put")
class InstPut(Instruction):
def execute(self, fr):
fr.check_underflow(2)
num = fr.data_pop(int)
val = fr.data_pop()
fr.check_underflow(num)
if not num:
return
if num < 0:
raise MufRuntimeError("Value out of range")
else:
fr.data_put(num, val)
@instr("nip")
class InstNip(Instruction):
def execute(self, fr):
fr.check_underflow(3)
b = fr.data_pop()
a = fr.data_pop()
fr.data_push(b)
@instr("tuck")
class InstTuck(Instruction):
def execute(self, fr):
fr.check_underflow(3)
b = fr.data_pop()
a = fr.data_pop()
fr.data_push(b)
fr.data_push(a)
fr.data_push(b)
@instr("reverse")
class InstReverse(Instruction):
def execute(self, fr):
num = fr.data_pop(int)
fr.check_underflow(num)
if not num:
return
arr = [fr.data_pop() for i in range(num)]
for val in arr:
fr.data_push(val)
@instr("lreverse")
class InstLReverse(Instruction):
def execute(self, fr):
num = fr.data_pop(int)
fr.check_underflow(num)
if not num:
return
arr = [fr.data_pop() for i in range(num)]
for val in arr:
fr.data_push(val)
fr.data_push(num)
@instr("{")
class InstMark(Instruction):
def execute(self, fr):
fr.data_push(si.Mark())
@instr("}")
class InstMarkCount(Instruction):
def execute(self, fr):
for i in range(fr.data_depth()):
a = fr.data_pick(i + 1)
if isinstance(a, si.Mark):
fr.data_pull(i + 1)
fr.data_push(i)
return
raise MufRuntimeError("StackUnderflow")
@instr("depth")
class InstDepth(Instruction):
def execute(self, fr):
fr.data_push(fr.data_depth())
@instr("fulldepth")
class InstFullDepth(Instruction):
def execute(self, fr):
fr.data_push(fr.data_full_depth())
@instr("variable")
class InstVariable(Instruction):
def execute(self, fr):
vnum = fr.data_pop(int)
fr.data_push(si.GlobalVar(vnum))
@instr("localvar")
class InstLocalVar(Instruction):
def execute(self, fr):
vnum = fr.data_pop(int)
fr.data_push(si.GlobalVar(vnum))
@instr("caller")
class InstCaller(Instruction):
def execute(self, fr):
fr.data_push(fr.caller_get())
@instr("prog")
class InstProg(Instruction):
def execute(self, fr):
fr.data_push(fr.program)
@instr("trig")
class InstTrig(Instruction):
def execute(self, fr):
fr.data_push(fr.trigger)
@instr("cmd")
class InstCmd(Instruction):
def execute(self, fr):
fr.data_push(fr.command)
@instr("checkargs")
class InstCheckArgs(Instruction):
itemtypes = {
'a': ([si.Address], "address"),
'd': ([si.DBRef], "dbref"),
'D': ([si.DBRef], "valid object dbref"),
'e': ([si.DBRef], "exit dbref"),
'E': ([si.DBRef], "valid exit dbref"),
'f': ([si.DBRef], "program dbref"),
'F': ([si.DBRef], "valid program dbref"),
'i': ([int], "integer"),
'l': ([si.Lock], "lock"),
'p': ([si.DBRef], "player dbref"),
'P': ([si.DBRef], "valid player dbref"),
'r': ([si.DBRef], "room dbref"),
'R': ([si.DBRef], "valid room dbref"),
's': ([str], "string"),
'S': ([str], "non-null string"),
't': ([si.DBRef], "thing dbref"),
'T': ([si.DBRef], "valid thing dbref"),
'v': ([si.GlobalVar, si.FuncVar], "variable"),
'?': ([], "any"),
}
objtypes = {
'D': "",
'P': "player",
'R': "room",
'T': "thing",
'E': "exit",
'F': "program",
}
def checkargs_part(self, fr, fmt, depth=1):
count = ""
pos = len(fmt) - 1
while pos >= 0:
ch = fmt[pos]
pos -= 1
if ch == " ":
continue
elif util.is_int(ch):
count = ch + count
continue
elif ch == "}":
newpos = pos
cnt = 1 if not count else int(count)
for i in range(cnt):
val = fr.data_pick(depth)
depth += 1
fr.check_type(val, [int])
for j in range(val):
newpos, depth = self.checkargs_part(
fr, fmt[:pos + 1], depth)
pos = newpos
count = ""
elif ch == "{":
return (pos, depth)
elif ch in self.itemtypes:
cnt = 1 if not count else int(count)
count = ""
for i in range(cnt):
val = fr.data_pick(depth)
depth += 1
types, label = self.itemtypes[ch]
fr.check_type(val, types)
if ch == "S" and val == "":
raise MufRuntimeError(
"Expected %s at depth %d" % (label, depth))
if si.DBRef in types:
typ = self.objtypes[ch.upper()]
if (
not db.validobj(val) and
ch.isupper()
) or (
db.validobj(val) and typ and
db.getobj(val).objtype != typ
):
raise MufRuntimeError(
"Expected %s at depth %d" % (label, depth))
def execute(self, fr):
argexp = fr.data_pop(str)
self.checkargs_part(fr, argexp)
# vim: expandtab tabstop=4 shiftwidth=4 softtabstop=4 nowrap
| revarbat/mufsim | mufsim/insts/stack.py | Python | bsd-2-clause | 11,103 |
#include "engine/plugins/match.hpp"
#include "engine/plugins/plugin_base.hpp"
#include "engine/api/match_api.hpp"
#include "engine/api/match_parameters.hpp"
#include "engine/map_matching/bayes_classifier.hpp"
#include "engine/map_matching/sub_matching.hpp"
#include "util/coordinate_calculation.hpp"
#include "util/integer_range.hpp"
#include "util/json_util.hpp"
#include "util/string_util.hpp"
#include <cstdlib>
#include <algorithm>
#include <functional>
#include <iterator>
#include <memory>
#include <string>
#include <vector>
static double search_radius_for_gps_radius(double gps_radius)
{
// For a given GPS radius, determine the radius we need to search for candidate street segments
// to have a 99.9% chance of finding the correct segment.
// For more detail, see the analysis at https://github.com/Project-OSRM/osrm-backend/pull/3184
return std::min(gps_radius * 3.5 + 45, 200.0);
}
namespace osrm
{
namespace engine
{
namespace plugins
{
// Filters PhantomNodes to obtain a set of viable candiates
void filterCandidates(const std::vector<util::Coordinate> &coordinates,
MatchPlugin::CandidateLists &candidates_lists)
{
for (const auto current_coordinate : util::irange<std::size_t>(0, coordinates.size()))
{
bool allow_uturn = false;
if (coordinates.size() - 1 > current_coordinate && 0 < current_coordinate)
{
double turn_angle =
util::coordinate_calculation::computeAngle(coordinates[current_coordinate - 1],
coordinates[current_coordinate],
coordinates[current_coordinate + 1]);
// sharp turns indicate a possible uturn
if (turn_angle <= 90.0 || turn_angle >= 270.0)
{
allow_uturn = true;
}
}
auto &candidates = candidates_lists[current_coordinate];
if (candidates.empty())
{
continue;
}
// sort by forward id, then by reverse id and then by distance
std::sort(candidates.begin(),
candidates.end(),
[](const PhantomNodeWithDistance &lhs, const PhantomNodeWithDistance &rhs) {
return lhs.phantom_node.forward_segment_id.id <
rhs.phantom_node.forward_segment_id.id ||
(lhs.phantom_node.forward_segment_id.id ==
rhs.phantom_node.forward_segment_id.id &&
(lhs.phantom_node.reverse_segment_id.id <
rhs.phantom_node.reverse_segment_id.id ||
(lhs.phantom_node.reverse_segment_id.id ==
rhs.phantom_node.reverse_segment_id.id &&
lhs.distance < rhs.distance)));
});
auto new_end =
std::unique(candidates.begin(),
candidates.end(),
[](const PhantomNodeWithDistance &lhs, const PhantomNodeWithDistance &rhs) {
return lhs.phantom_node.forward_segment_id.id ==
rhs.phantom_node.forward_segment_id.id &&
lhs.phantom_node.reverse_segment_id.id ==
rhs.phantom_node.reverse_segment_id.id;
});
candidates.resize(new_end - candidates.begin());
if (!allow_uturn)
{
const auto compact_size = candidates.size();
for (const auto i : util::irange<std::size_t>(0, compact_size))
{
// Split edge if it is bidirectional and append reverse direction to end of list
if (candidates[i].phantom_node.forward_segment_id.enabled &&
candidates[i].phantom_node.reverse_segment_id.enabled)
{
PhantomNode reverse_node(candidates[i].phantom_node);
reverse_node.forward_segment_id.enabled = false;
candidates.push_back(
PhantomNodeWithDistance{reverse_node, candidates[i].distance});
candidates[i].phantom_node.reverse_segment_id.enabled = false;
}
}
}
// sort by distance to make pruning effective
std::sort(candidates.begin(),
candidates.end(),
[](const PhantomNodeWithDistance &lhs, const PhantomNodeWithDistance &rhs) {
return lhs.distance < rhs.distance;
});
}
}
Status MatchPlugin::HandleRequest(const std::shared_ptr<datafacade::BaseDataFacade> facade,
const api::MatchParameters ¶meters,
util::json::Object &json_result) const
{
BOOST_ASSERT(parameters.IsValid());
// enforce maximum number of locations for performance reasons
if (max_locations_map_matching > 0 &&
static_cast<int>(parameters.coordinates.size()) > max_locations_map_matching)
{
return Error("TooBig", "Too many trace coordinates", json_result);
}
if (!CheckAllCoordinates(parameters.coordinates))
{
return Error("InvalidValue", "Invalid coordinate value.", json_result);
}
// Check for same or increasing timestamps. Impl. note: Incontrast to `sort(first,
// last, less_equal)` checking `greater` in reverse meets irreflexive requirements.
const auto time_increases_monotonically = std::is_sorted(
parameters.timestamps.rbegin(), parameters.timestamps.rend(), std::greater<>{});
if (!time_increases_monotonically)
{
return Error(
"InvalidValue", "Timestamps need to be monotonically increasing.", json_result);
}
// assuming radius is the standard deviation of a normal distribution
// that models GPS noise (in this model), x3 should give us the correct
// search radius with > 99% confidence
std::vector<double> search_radiuses;
if (parameters.radiuses.empty())
{
search_radiuses.resize(parameters.coordinates.size(),
DEFAULT_GPS_PRECISION * RADIUS_MULTIPLIER);
}
else
{
search_radiuses.resize(parameters.coordinates.size());
std::transform(parameters.radiuses.begin(),
parameters.radiuses.end(),
search_radiuses.begin(),
[&](const boost::optional<double> &maybe_radius) {
double gps_radius = maybe_radius ? *maybe_radius : DEFAULT_GPS_PRECISION;
return search_radius_for_gps_radius(gps_radius);
});
}
auto candidates_lists = GetPhantomNodesInRange(*facade, parameters, search_radiuses);
filterCandidates(parameters.coordinates, candidates_lists);
if (std::all_of(candidates_lists.begin(),
candidates_lists.end(),
[](const std::vector<PhantomNodeWithDistance> &candidates) {
return candidates.empty();
}))
{
return Error("NoSegment",
std::string("Could not find a matching segment for any coordinate."),
json_result);
}
// call the actual map matching
SubMatchingList sub_matchings = map_matching(*facade,
candidates_lists,
parameters.coordinates,
parameters.timestamps,
parameters.radiuses);
if (sub_matchings.size() == 0)
{
return Error("NoMatch", "Could not match the trace.", json_result);
}
std::vector<InternalRouteResult> sub_routes(sub_matchings.size());
for (auto index : util::irange<std::size_t>(0UL, sub_matchings.size()))
{
BOOST_ASSERT(sub_matchings[index].nodes.size() > 1);
// FIXME we only run this to obtain the geometry
// The clean way would be to get this directly from the map matching plugin
PhantomNodes current_phantom_node_pair;
for (unsigned i = 0; i < sub_matchings[index].nodes.size() - 1; ++i)
{
current_phantom_node_pair.source_phantom = sub_matchings[index].nodes[i];
current_phantom_node_pair.target_phantom = sub_matchings[index].nodes[i + 1];
BOOST_ASSERT(current_phantom_node_pair.source_phantom.IsValid());
BOOST_ASSERT(current_phantom_node_pair.target_phantom.IsValid());
sub_routes[index].segment_end_coordinates.emplace_back(current_phantom_node_pair);
}
// force uturns to be on, since we split the phantom nodes anyway and only have
// bi-directional
// phantom nodes for possible uturns
shortest_path(
*facade, sub_routes[index].segment_end_coordinates, {false}, sub_routes[index]);
BOOST_ASSERT(sub_routes[index].shortest_path_length != INVALID_EDGE_WEIGHT);
}
api::MatchAPI match_api{*facade, parameters};
match_api.MakeResponse(sub_matchings, sub_routes, json_result);
return Status::Ok;
}
}
}
}
| hydrays/osrm-backend | src/engine/plugins/match.cpp | C++ | bsd-2-clause | 9,398 |
/*********************************************************************************
*
* Inviwo - Interactive Visualization Workshop
*
* Copyright (c) 2018-2019 Inviwo Foundation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*********************************************************************************/
#pragma once
#include <modules/discretedata/discretedatamoduledefine.h>
#include <inviwo/core/common/inviwo.h>
namespace inviwo {
namespace discretedata {
//! Discretedata index type
using ind = signed long long;
/**
* Mapping structure name to respective dimension.
* Assign channels to any dimensions this way.
* If these do not suffice, cast the respective short.
*/
enum class GridPrimitive : ind {
Undef = -1,
Vertex = 0,
Edge = 1,
Face = 2,
Volume = 3,
HyperVolume = 4
};
} // namespace discretedata
} // namespace inviwo
| Sparkier/inviwo | modules/discretedata/include/modules/discretedata/discretedatatypes.h | C | bsd-2-clause | 2,147 |
#!/usr/bin/env python
# encoding: utf-8
class MyRange(object):
def __init__(self, n):
self.idx = 0
self.n = n
def __iter__(self):
return self
def next(self):
if self.idx < self.n:
val = self.idx
self.idx += 1
return val
else:
raise StopIteration()
myRange = MyRange(3)
for i in myRange:
print i
| feixiao5566/Py_Rabbic | IO/自定义迭代器.py | Python | bsd-2-clause | 402 |
%
% POK header
%
% The following file is a part of the POK project. Any modification should
% be made according to the POK licence. You CANNOT use this file or a part
% of a file for your own project.
%
% For more information on the POK licence, please see our LICENCE FILE
%
% Please follow the coding guidelines described in doc/CODING_GUIDELINES
%
% Copyright (c) 2007-2009 POK team
%
% Created by julien on Thu Apr 30 14:07:20 2009
%
\chapter{Installation}
\label{chapter-installation}
\section{Supported development platforms}
\begin{itemize}
\item[$\bullet$] \linux
\item[$\bullet$] \macosx
\item[$\bullet$] \windows
\end{itemize}
\section{Get more information}
The following information are the standard procedures. It may be out of date
or miss something. In that case, you will find updated information on the
\pok website (\texttt{http://pok.gunnm.org}) and its wiki section.
In addition, there are some tutorials and information about the installation
of required tools.
\section{Linux/MacOS}
\subsection{Pre-requires}
\begin{itemize}
\item[$\bullet$] The GNU-C Compiler (aka GCC), version 3.x or 4.x
\item[$\bullet$] GNU binutils
\item[$\bullet$] GNU Zip (aka gzip)
\item[$\bullet$] Mtools (MS-DOS disk utilities)
\item[$\bullet$] AWK
\item[$\bullet$] Perl (with \texttt{XML::XPath::XMLParser} and
\texttt{XML::LibXML} modules)
\item[$\bullet$] \qemu (for x86 emulation)
\item[$\bullet$] \ocarina (for code generation only)
\item[$\bullet$] TSIM (for Leon3 emulation)
\end{itemize}
\subsubsection{Note for MacOS users}
\pok uses the ELF format to store partitions. Unfortunately, this binary
format is not supported by \macosx tools. To use \pok, you must use a
development toolchain that supports the ELF format.
For that, you can easily build an ELF cross compiler using MacPorts. The
name of the required packages are \texttt{i386-elf-gcc} and
\texttt{i386-elf-binutils}.
Moreover, \macosx does not provide necessary Perl modules but you can
install them with MacPorts. The package names are \texttt{p5-xml-xpath} and
\texttt{p5-xml-libxml}.
\subsection{Running POK}
Ocarina is needed by POK. A script is provided to automatically install
the latest build:
\begin{verbatim}
$ sh ./misc/get_ocarina.sh
\end{verbatim}
You can then try to build and run some of the POK examples located in the
`example' directory.
\begin{verbatim}
$ cd examples/partitions-threads
$ make
$ make -C generated-code run
\end{verbatim}
A whole chapter of this documentation is dedicated to those examples and their
purpose.
\section{Windows}
\subsection{Pre-requires}
There is many pre-requires to run \pok on Windows. To make a better user
experience, we provide cross-development tools to build and use POK.
For cross-development tools can be retrieved from the website of the
project. Then, unzip the tools in a directory called \texttt{crosstools},
at the root directory of the project.
Once you have this directory, run the file \texttt{configure.bat} located
in this directory. Is everything is wrong, a warning will be displayed on
the screen.
For code generation, you can install \ocarina for Windows. All
installation instructions are available on Ocarina website.
| phipse/pok | doc/userguide/installation.tex | TeX | bsd-2-clause | 3,546 |
cask "amazon-workspaces" do
version "3.0.9.1231"
sha256 "f363e1cadd72ea706591617f5660f7bafdaa249af1979e80d58bb21c0fd430df"
# workspaces-client-updates.s3.amazonaws.com/ was verified as official when first introduced to the cask
url "https://workspaces-client-updates.s3.amazonaws.com/prod/iad/osx/WorkSpaces.pkg"
appcast "https://d2td7dqidlhjx7.cloudfront.net/prod/iad/osx/WorkSpacesAppCast_macOS_20171023.xml"
name "Amazon Workspaces"
homepage "https://clients.amazonworkspaces.com/"
pkg "WorkSpaces.pkg"
uninstall pkgutil: "com.amazon.workspaces"
end
| haha1903/homebrew-cask | Casks/amazon-workspaces.rb | Ruby | bsd-2-clause | 574 |
using System;
using System.Text;
using LLVMSharp.Compiler.CocoR;
using LLVMSharp.Compiler.CodeGenerators;
using LLVMSharp.Compiler.Walkers;
namespace LLVMSharp.Compiler.Ast
{
public class AstNot : AstNode, IAstExpression
{
public IAstExpression AstExpression;
public AstNot(
string path, int lineNumber, int columnNumber)
: base(path, lineNumber, columnNumber) { }
public AstNot(IParser parser) : base(parser) { }
public AstNot(IParser parser, bool useLookAhead) : base(parser, useLookAhead) { }
public override string ToString()
{
StringBuilder sb = new StringBuilder();
sb.Append("--AstNot--{0}{0}");
sb.Append("Src: {1}{0}");
sb.Append("Ln: {2}{0}");
sb.Append("Col: {3}{0}{0}");
return string.Format(sb.ToString(), Environment.NewLine, base.Path, base.LineNumber, base.ColumnNumber);
}
public void Walk(Walker walker)
{
//todo walker
}
public void EmitCode(CodeGenerator cgen)
{
cgen.EmitCode(this);
}
public string AssociatedType
{
get { return AstExpression.AssociatedType; }
set { AstExpression.AssociatedType = value; }
}
}
}
| bklooste/Bitcsharp | bitcsharp/src/lsc/Compiler/Ast/Unary/AstNot.cs | C# | bsd-2-clause | 1,330 |
#!/usr/bin/perl -w
# MGEL
# Surya Saha 3/15/07
# reading cmd line input .out file which is sorted on the start position
# and finds the relationship among images and families
# Relationship types:
# Upstream: u1 (0-500 bases),u2 (500-1000 bases),u3 (1000-5000 bases), u4 (5000-10000 bases), u5 (10000-15000 bases)
# Downstream: d1 (0-500 bases),d2 (500-1000 bases),d3 (1000-5000 bases), d4 (5000-10000 bases), d5 (10000-15000 bases)
# In: Location of fam2 is entirely within fam1 (IN)
# Contains: Location of fam2 is entirely within fam1 (Cont)
# Overlap:
# single linkage algo so consider overlap if > 10% of either
# 10% to 30% (Ovlap-10to30)
# 30% to 70% (Ovlap-30to70)
# 70% + (Ovlap>70)
# Creating the frequent itemsets in the format
# fam1, fam1-count, fam1-avglen, fam2, fam2-count, fam2-avglen, Occurence, Strand, Category
# v3: Removed all duplicate counting
# v3: Counts all relationships
# v4: Optimized the code to avoid recording itemsets with 0 count
# v4: Check for function call with large parameters
# v5: count relations for images INSTEAD of families
# v5: Use the strand information to calculate the relationships (See rLog)
# v6: Optimize the code (remove duplicates)
# v6: Fixed the bug where false relations were being counted for 'B' strand because of missing ELSE
# v7: Better progress messages
# v7: Hangs with chr12.con.out
# v8 : 07/01/07
# v8: Reducing the number of loops
# v8: No pruning.
# v8 : F1 O F2 is equal to F2 O F1 if F1==F2
# v8 : Huge improvement in complexity (8+ hours to 36 mins for chr12.con.out)
# v8 : Both the sub_fam/img and ref_fam/img will not take part in relationships with ref_fam and
# sub_fam respec. in the future
# v9 : Added a reciprocal relationship for IN called CONTAINS to handle the new confidence calulation
# v10: Writing out the copy information for each relationship
# v10: Writing out the copy information for each relationship separately for each strand
# v10.1: Introduced a flag to prevent writing out copies file (for noise files)
# v11: Modified get_index to use a hash instead of iterating thru an array
# v11: Improved runtime on chr12 from 25 mins to 2 mins
# v11: Fixed it so no information is recorded for copies unless required
use strict;
use warnings;
use POSIX;
unless (@ARGV == 2){
print "USAGE: $0 <input .out file> <write copies??(0/1)>\n";
exit;
}
my ($ifname,$rec,@temp,%temphash,$ctr,$i,$j,$copy_file_flag);
my (@table,@famnames,@counts,%counts_index,$ups_ctr,$dns_ctr, $ref_img,
$ref_fam,$ref_start,$ref_end,$ref_strand,$sub_fam,$sub_img,$sub_start,
$sub_end,$sub_strand,%pos_relationships, %comp_relationships, %both_relationships,
%pos_rel_history, %comp_rel_history, %both_rel_history, $tot_fams,
$tot_recs,$user_t,$system_t, $cuser_t,$csystem_t);
my ($pos_copy_ctr,$comp_copy_ctr, $both_copy_ctr,
@pos_copies,@comp_copies,@both_copies);
$ifname=$ARGV[0];
chomp $ifname;
$copy_file_flag=$ARGV[1];
chomp $copy_file_flag;
if(!($copy_file_flag == 0 || $copy_file_flag == 1) ){ print STDERR "flag can be only 0 or 1\nExiting..\n"; exit;}
unless(open(INFILEDATA,$ifname)){print "not able to open ".$ifname."\n\n";exit;}
unless(open(OUTFILEDATA,">$ifname.f_itemsets.tab")){print "not able to open ".$ifname."f_itemsets.tab\n\n";exit;}
if($copy_file_flag){
unless(open(OUTFILECOPIESPOS,">$ifname.pos.copies.tab")){print "not able to open ".$ifname."pos.copies.tab \n\n";exit;}
unless(open(OUTFILECOPIESCOMP,">$ifname.comp.copies.tab")){print "not able to open ".$ifname."comp.copies.tab \n\n";exit;}
unless(open(OUTFILECOPIESBOTH,">$ifname.both.copies.tab")){print "not able to open ".$ifname."both.copies.tab \n\n";exit;}
}
# debugging
# unless(open(ERRFILE,">ERRFILE")){print "not able to open ERRFILE \n\n";exit;}
#to get the index position of a family in the @counts array
#it might be faster to just get info from the @counts array
#once we have the index pos
#params: $fam
# sub get_index{
# my ($fam,$ctr);
# $fam=$_[0];
# $fam=~ s/\s*//g;
# $ctr=0;
# foreach (@counts){
# $_->[0] =~ s/\s*//g;
# if ($_->[0] eq $fam){
# return $ctr;
# last;
# }
# else{
# $ctr++;
# }
# }
# }
# reimplementing the subroutine to use a hash to return the location of
# the family, help in speedup??
#params: $fam
sub get_index{
$_[0]=~ s/\s*//g;
return $counts_index{$_[0]};
}
# SLURPING IN THE WHOLE .OUT REPORT FILE
$ctr=0;
while($rec=<INFILEDATA>){
if($rec =~ /#/){next;}
if(length ($rec) < 10){next;}#for avoiding last line
push @table, [split(' ',$rec)];
$ctr++;
}
# record tot recs
$tot_recs = $ctr;
print OUTFILEDATA "\# Version: 11\n";
if($copy_file_flag){
print OUTFILECOPIESPOS "\# Version: 10.1\n";
print OUTFILECOPIESCOMP "\# Version: 10.1\n";
print OUTFILECOPIESBOTH "\# Version: 10.1\n";
}
$i=localtime();
print OUTFILEDATA "\# Time: $i\n";
if($copy_file_flag){
print OUTFILECOPIESPOS "\# Time: $i\n";
print OUTFILECOPIESCOMP "\# Time: $i\n";
print OUTFILECOPIESBOTH "\# Time: $i\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\# Runtime details after reading in the file: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILEDATA "\n";
print STDERR "\# Runtime details after reading $tot_recs from file: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
#@table
#1935 10.6 0.0 2.8 chr12 8936 9225 27748096 C R=286 Unknown (0) 283 2
#0 1 2 3 4 5 6 7 8 9 10 11 12 13
# FIND THE NUMBER OF OCCURENCES OF EACH FAMILY
# get family names
$ctr=0;
foreach(@table){
$famnames[$ctr++]=$_->[9];
}
#removing duplicates
#sorting
@temp=sort @famnames;
@famnames=@temp;
%temphash = map { $_, 1 } @famnames;
@famnames = keys %temphash;
#sorting agn
@temp=sort @famnames;
@famnames=@temp;
# INITIALIZING THE @COUNTS 2D ARRAY
# @count: fam occurences avg-len imagenum
$ctr=0;
foreach(@famnames){
$counts[$ctr][0]=$_;
#adding a value into the hash for family pos
$counts_index{"$_"} = $ctr;
#initializing all counters to 0
$counts[$ctr][1]=0;#occurences
$counts[$ctr][2]=0;#avg length
$counts[$ctr++][3]=0;#number of images (mined till now)
}
$tot_fams=$ctr;
# populating the @counts array
# count the number of times a family is found and its avg length
foreach $i (@counts){
foreach $j (@table){
if($i->[0] eq $j->[9]){
$i->[1]++;#occurences
$i->[2] = $i->[2] + ($j->[6] - $j->[5]);#total length till now
}
}
$i->[2]=floor($i->[2] / $i->[1]);#avg length
}
# Add a field to end of @table
# where @table[][14]=image number
foreach (@table){
# since @counts[][3] is initialized to 0
$_->[14]=1+$counts[&get_index($_->[9])][3]++;
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\# Runtime details after preparing \@counts and appending \@table: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILEDATA "\n";
print STDERR "\# Runtime details after preparing \@counts and appending \@table: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n";
# FINDING ALL RELATIONS
# @table sorted on start position
# 1935 10.6 0.0 2.8 chr12 8936 9225 27748096 C R=286 Unknown (0) 283 2 3
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
# @count: fam occurences avg-len imagenum
# finding the relationships
# %pos_relationships : [fam1 fam2 category] = count
# %comp_relationships : [fam1 fam2 category] = count
# %both_relationships : [fam1 fam2 category] = count
# %pos_rel_history : [fam1 fam1-img category] = fam2
# %comp_rel_history : [fam1 fam1-img category] = fam2
# %both_rel_history : [fam1 fam1-img category] = fam2
$ups_ctr=$dns_ctr=0;
if($copy_file_flag){
$pos_copy_ctr=$comp_copy_ctr=$both_copy_ctr=0;
}
for $i (0 .. $#table){
$ref_start=$table[$i][5]; $ref_end=$table[$i][6];
$ref_strand=$table[$i][8]; $ref_fam=$table[$i][9];
$ref_img=$table[$i][14];
# cleaning up
$ref_start=~ s/\s//g; $ref_end=~ s/\s//g;
$ref_strand=~ s/\s//g; $ref_fam=~ s/\s//g;
$ref_img=~ s/\s//g;
print STDERR '.';
$j=$i;
# only look for relationships with images located before it and
# ending within 15k bases before ref_start or anytime after it
while(($j!=0) && ($table[$j-1][6] > $ref_start-15000)) {
$ups_ctr++;
$j--;
$sub_start=$table[$j][5]; $sub_end=$table[$j][6];
$sub_strand=$table[$j][8]; $sub_fam=$table[$j][9];
$sub_img=$table[$j][14];
# cleaning up
$sub_start=~ s/\s//g; $sub_end=~ s/\s//g;
$sub_strand=~ s/\s//g; $sub_fam=~ s/\s//g;
$sub_img=~ s/\s//g;
# Note: since all relationship are exclusive, I have used elsif
# In: Location of ref fam is entirely within sub fam (IN,CONT)
# IN should be first bcos if sub start is near the ref start, it will
# be listed right before the ref record in the list
if(($sub_start <= $ref_start) && ($sub_end >= $ref_end)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} =$sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$ref_fam $sub_fam IN"}) {
$pos_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$pos_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$ref_fam; $pos_copies[$pos_copy_ctr][1]="IN";
$pos_copies[$pos_copy_ctr][2]=$sub_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$ref_start; $pos_copies[$pos_copy_ctr][5]=$ref_end;
$pos_copies[$pos_copy_ctr][6]=$sub_start; $pos_copies[$pos_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam CONT"}) {
$pos_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam CONT"}++;
}
if($copy_file_flag){
# add record for CONT relationship
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="CONT";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam IN"}) {
$comp_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="IN";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam CONT"}) {
$comp_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="CONT";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$ref_fam $sub_fam IN"}) {
$both_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$both_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$ref_fam; $both_copies[$both_copy_ctr][1]="IN";
$both_copies[$both_copy_ctr][2]=$sub_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$ref_start; $both_copies[$both_copy_ctr][5]=$ref_end;
$both_copies[$both_copy_ctr][6]=$sub_start; $both_copies[$both_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam CONT"}) {
$both_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="CONT";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}# IN end
# Overlap: If overlap is more than 10% of length of either family (Ovlap)
# now if subject fam ends within the reference fam
elsif (($sub_end > $ref_start) && ($sub_end < $ref_end)) {
my ($ovlap, $ref_ovlap, $sub_ovlap);
$ovlap = $sub_end - $ref_start;
$ref_ovlap = ($ovlap / ($ref_end - $ref_start)) * 100;
$sub_ovlap = ($ovlap / ($sub_end - $sub_start)) * 100;
# Overlap :10% to 30% (Ovlap-10to30)
if ((($ref_ovlap > 10.00) && ($ref_ovlap <= 30.00)) ||
(($sub_ovlap > 10.00) && ($sub_ovlap <= 30.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-10to30";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-10to30";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-10to30";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap :30% to 70% (Ovlap-30to70)
elsif ((($ref_ovlap > 30.00) && ($ref_ovlap <= 70.00)) ||
(($sub_ovlap > 30.00) && ($sub_ovlap <= 70.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-30to70";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-30to70";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-30to70";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap : >70% (Ovlap-70plus)
elsif (($ref_ovlap > 70.00) || ($sub_ovlap > 70.00)) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-70plus";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-70plus";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-70plus";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
}# overlap end
# Upstream: u1 (0-500 bases)
elsif(($sub_end <= $ref_start) && ($sub_end > $ref_start-500)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u1"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u1"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u1"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u1"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u1"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u1"}) {
$pos_relationships{"$sub_fam $ref_fam u1"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u1"}++;
}
# add record for u1 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u1";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u1"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u1"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u1"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u1"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u1"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u1"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u1"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u1"}++;
}
# add record for u1 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u1";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u1"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u1"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u1"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u1"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u1"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u1"}) {
$both_relationships{"$sub_fam $ref_fam u1"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u1"}++;
}
# add record for u1 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u1";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u2 (500-1000 bases)
elsif(($sub_end <= $ref_start-500) && ($sub_end > $ref_start-1000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u2"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u2"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u2"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u2"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u2"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u2"}) {
$pos_relationships{"$sub_fam $ref_fam u2"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u2"}++;
}
# add record for u2 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u2";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u2"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u2"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u2"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u2"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u2"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u2"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u2"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u2"}++;
}
# add record for u2 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u2";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u2"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u2"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u2"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u2"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u2"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u2"}) {
$both_relationships{"$sub_fam $ref_fam u2"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u2"}++;
}
# add record for u2 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u2";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u3 (1000-5000 bases)
elsif(($sub_end <= $ref_start-1000) && ($sub_end > $ref_start-5000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u3"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u3"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u3"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u3"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u3"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u3"}) {
$pos_relationships{"$sub_fam $ref_fam u3"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u3"}++;
}
# add record for u3 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u3";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u3"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u3"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u3"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u3"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u3"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u3"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u3"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u3"}++;
}
# add record for u3 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u3";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u3"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u3"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u3"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u3"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u3"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u3"}) {
$both_relationships{"$sub_fam $ref_fam u3"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u3"}++;
}
# add record for u3 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u3";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u4 (5000-10000 bases)
elsif(($sub_end <= $ref_start-5000) && ($sub_end > $ref_start-10000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u4"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u4"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u4"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u4"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u4"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u4"}) {
$pos_relationships{"$sub_fam $ref_fam u4"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u4"}++;
}
# add record for u4 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u4";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u4"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u4"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u4"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u4"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u4"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u4"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u4"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u4"}++;
}
# add record for u4 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u4";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u4"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u4"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u4"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u4"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u4"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u4"}) {
$both_relationships{"$sub_fam $ref_fam u4"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u4"}++;
}
# add record for u4 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u4";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u5 (10000-15000 bases)
elsif(($sub_end <= $ref_start-10000) && ($sub_end > $ref_start-15000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u5"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u5"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u5"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u5"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u5"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u5"}) {
$pos_relationships{"$sub_fam $ref_fam u5"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u5"}++;
}
# add record for u5 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u5";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u5"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u5"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u5"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u5"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u5"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u5"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u5"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u5"}++;
}
# add record for u5 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u5";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='B';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u5"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u5"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u5"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u5"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u5"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u5"}) {
$both_relationships{"$sub_fam $ref_fam u5"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u5"}++;
}
# add record for u5 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u5";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# if($copy_file_flag){
# # temporary fix to reduce memory consumption when
# # copies are not needed
# @pos_copies=();
# @comp_copies=();
# @both_copies=();# deallocating memory
# $pos_copy_ctr=$comp_copy_ctr=$both_copy_ctr=0;# resetting counters
# }
# print STDERR '.';
}# end while
$j=$i;
# only look for relationships with images located after it
# and starting within 15k bases after ref_end (enforced by condition above)
# or anytime after ref_start (enforced by sorting the list on start pos)
while(($j!=$#table) && ($table[$j+1][5] < $ref_end+15000)){
$dns_ctr++;
$j++;
$sub_start=$table[$j][5]; $sub_end=$table[$j][6];
$sub_strand=$table[$j][8]; $sub_fam=$table[$j][9];
$sub_img=$table[$j][14];
# cleaning up
$sub_start=~ s/\s//g; $sub_end=~ s/\s//g;
$sub_strand=~ s/\s//g; $sub_fam=~ s/\s//g;
$sub_img=~ s/\s//g;
# Note: since all relationship are exclusive, I have used elsif
# In: Location of ref fam is entirely within sub fam (IN)
# IN should be first bcos if sub start is near the ref start, it will
# be listed right after the ref record in the list
if(($sub_start == $ref_start) && ($sub_end >= $ref_end)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$ref_fam $sub_fam IN"}) {
$pos_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$pos_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$ref_fam; $pos_copies[$pos_copy_ctr][1]="IN";
$pos_copies[$pos_copy_ctr][2]=$sub_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$ref_start; $pos_copies[$pos_copy_ctr][5]=$ref_end;
$pos_copies[$pos_copy_ctr][6]=$sub_start; $pos_copies[$pos_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam CONT"}) {
$pos_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="CONT";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam IN"}) {
$comp_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="IN";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam CONT"}) {
$comp_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="CONT";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$ref_fam $sub_fam IN"}) {
$both_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$both_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$ref_fam; $both_copies[$both_copy_ctr][1]="IN";
$both_copies[$both_copy_ctr][2]=$sub_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$ref_start; $both_copies[$both_copy_ctr][5]=$ref_end;
$both_copies[$both_copy_ctr][6]=$sub_start; $both_copies[$both_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam CONT"}) {
$both_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="CONT";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}# IN end
# Overlap: If overlap is more than 10% of length of either family (Ovlap)
# now if subject fam ends within the reference fam
elsif (($sub_start > $ref_start) && ($sub_start < $ref_end)) {
my ($ovlap, $ref_ovlap, $sub_ovlap);
$ovlap = $ref_end - $sub_start;
$ref_ovlap = ($ovlap / ($ref_end - $ref_start)) * 100;
$sub_ovlap = ($ovlap / ($sub_end - $sub_start)) * 100;
# Overlap :10% to 30% (Ovlap-10to30)
if ((($ref_ovlap > 10.00) && ($ref_ovlap <= 30.00)) ||
(($sub_ovlap > 10.00) && ($sub_ovlap <= 30.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-10to30";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-10to30";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-10to30";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap :30% to 70% (Ovlap-30to70)
elsif ((($ref_ovlap > 30.00) && ($ref_ovlap <= 70.00)) ||
(($sub_ovlap > 30.00) && ($sub_ovlap <= 70.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-30to70";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-30to70";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-30to70";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap : >70% (Ovlap-70plus)
elsif (($ref_ovlap > 70.00) || ($sub_ovlap > 70.00)) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-70plus";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-70plus";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-70plus";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
}# overlap end
# Downstream: d1 (0-500 bases)
elsif(($sub_start >= $ref_end) && ($sub_start < $ref_end+500)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d1"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d1"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d1"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d1"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d1"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d1"}) {
$pos_relationships{"$sub_fam $ref_fam d1"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d1"}++;
}
# add record for d1 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d1";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d1"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d1"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d1"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d1"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d1"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d1"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d1"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d1"}++;
}
# add record for d1 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d1";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d1"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d1"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d1"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d1"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d1"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d1"}) {
$both_relationships{"$sub_fam $ref_fam d1"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d1"}++;
}
# add record for d1 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d1";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d2 (500-1000 bases)
elsif(($sub_start >= $ref_end+500) && ($sub_start < $ref_end+1000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d2"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d2"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d2"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d2"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d2"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d2"}) {
$pos_relationships{"$sub_fam $ref_fam d2"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d2"}++;
}
# add record for d2 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d2";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d2"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d2"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d2"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d2"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d2"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d2"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d2"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d2"}++;
}
# add record for d2 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d2";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d2"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d2"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d2"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d2"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d2"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d2"}) {
$both_relationships{"$sub_fam $ref_fam d2"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d2"}++;
}
# add record for d2 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d2";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d3 (1000-5000 bases)
elsif(($sub_start >= $ref_end+1000) && ($sub_start < $ref_end+5000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d3"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d3"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d3"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d3"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d3"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d3"}) {
$pos_relationships{"$sub_fam $ref_fam d3"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d3"}++;
}
# add record for d3 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d3";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d3"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d3"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d3"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d3"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d3"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d3"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d3"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d3"}++;
}
# add record for d3 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d3";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d3"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d3"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d3"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d3"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d3"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d3"}) {
$both_relationships{"$sub_fam $ref_fam d3"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d3"}++;
}
# add record for d3 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d3";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d4 (5000-10000 bases)
elsif(($sub_start >= $ref_end+5000) && ($sub_start < $ref_end+10000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d4"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d4"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d4"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d4"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d4"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d4"}) {
$pos_relationships{"$sub_fam $ref_fam d4"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d4"}++;
}
# add record for d4 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$ref_fam; $pos_copies[$pos_copy_ctr][1]="d4";
$pos_copies[$pos_copy_ctr][2]=$sub_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$ref_start; $pos_copies[$pos_copy_ctr][5]=$ref_end;
$pos_copies[$pos_copy_ctr][6]=$sub_start; $pos_copies[$pos_copy_ctr++][7]=$sub_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d4"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d4"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d4"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d4"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d4"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d4"} = $ref_fam;
# # debugging
# if($ref_fam eq "R=759" && $sub_fam eq "R=759"){
# print ERRFILE "\nREF image data:\n";
# foreach(0..14){ print ERRFILE $table[$i][$_],' ';}
# print ERRFILE "\n";
# print ERRFILE "SUB image data:\n";
# foreach(0..14){ print ERRFILE $table[$j][$_],' ';}
# print ERRFILE "\n\n";
# }
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d4"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d4"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d4"}++;
}
# add record for d4 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d4";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d4"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d4"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d4"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d4"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d4"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d4"}) {
$both_relationships{"$sub_fam $ref_fam d4"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d4"}++;
}
# add record for d4 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d4";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d5 (10000-15000 bases)
elsif(($sub_start >= $ref_end+10000) && ($sub_start < $ref_end+15000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d5"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d5"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d5"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d5"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d5"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d5"}) {
$pos_relationships{"$sub_fam $ref_fam d5"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d5"}++;
}
# add record for d5 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d5";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d5"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d5"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d5"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d5"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d5"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d5"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d5"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d5"}++;
}
# add record for d5 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d5";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d5"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d5"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d5"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d5"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d5"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d5"}) {
$both_relationships{"$sub_fam $ref_fam d5"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d5"}++;
}
# add record for d5 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d5";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# if($copy_file_flag){
# # temporary fix to reduce memory consumption when
# # copies are not needed
# @pos_copies=();
# @comp_copies=();
# @both_copies=();# deallocating memory
# $pos_copy_ctr=$comp_copy_ctr=$both_copy_ctr=0;# resetting counters
# }
# print STDERR '.';
}#end while
}# end relationship finding
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after finding relationships: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n\n\n";
print STDERR "\n\# Runtime details after finding relationships: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n\n";
# PRINTING THE ITEMSETS
# %pos_relationships : [fam1 fam2 category] = count
# %comp_relationships : [fam1 fam2 category] = count
# %both_relationships : [fam1 fam2 category] = count
# @count: fam occurences avg-len imagenum
# Creating the frequent itemsets in the format
# fam1, fam1-count, fam1-avglen, fam2, fam2-count, fam2-avglen, Occurence, Strand, Category
print OUTFILEDATA "\# Total records in OUT file: $tot_recs\n";
print OUTFILEDATA "\# Total number of families: $tot_fams\n\n";
print OUTFILEDATA "\# Note: If dns rec ~ ups rec, then the regions were located uniformly\n";
print OUTFILEDATA "\# Average number of upstream OUT records processed per image: ".ceil($ups_ctr/$tot_recs)."\n";
print OUTFILEDATA "\# Average number of downstream OUT records processed per image: ".ceil($dns_ctr/$tot_recs)."\n";
print OUTFILEDATA "\# Average number of OUT records processed per image: ".ceil(($ups_ctr+$dns_ctr)/$tot_recs)."\n\n";
print OUTFILEDATA "\# Total relationships on pos strand:".keys(%pos_relationships)."\n";
if($copy_file_flag){ print OUTFILEDATA "\# Total copies/clusters on pos strand:".$pos_copy_ctr."\n";}
print OUTFILEDATA "\# Total relationships on comp strand:".keys(%comp_relationships)."\n";
if($copy_file_flag){ print OUTFILEDATA "\# Total copies/clusters on comp strand:".$comp_copy_ctr."\n";}
print OUTFILEDATA "\# Total relationships on both strands:".keys(%both_relationships)."\n";
if($copy_file_flag){ print OUTFILEDATA "\# Total copies/clusters on both strands:".$both_copy_ctr."\n\n\n";}
else{print OUTFILEDATA "\n\n";}
# TESTING
# relationships on the positive strand
# while( ($i,$j) = each %pos_relationships){
# @temp=split(' ',$i);
# print OUTFILEDATA "$temp[0]\t$temp[2]\t$temp[1]\t$j\t+\n";
# }
#
# # relationships on the comp strand
# while( ($i,$j) = each %comp_relationships){
# @temp=split(' ',$i);
# print OUTFILEDATA "$temp[0]\t$temp[2]\t$temp[1]\t$j\tC\n";
# }
# relationships on the positive strand
while( ($i,$j) = each %pos_relationships){
@temp=split(' ',$i);
$rec=&get_index($temp[0]);
print OUTFILEDATA "$temp[0]\t$counts[$rec][1]\t$counts[$rec][2]\t";
$rec=&get_index($temp[1]);
print OUTFILEDATA "$temp[1]\t$counts[$rec][1]\t$counts[$rec][2]\t";
print OUTFILEDATA "$j\t+\t$temp[2]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after printing positive itemsets: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing positive itemsets: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
# relationships on the comp strand
while( ($i,$j) = each %comp_relationships){
@temp=split(' ',$i);
$rec=&get_index($temp[0]);
print OUTFILEDATA "$temp[0]\t$counts[$rec][1]\t$counts[$rec][2]\t";
$rec=&get_index($temp[1]);
print OUTFILEDATA "$temp[1]\t$counts[$rec][1]\t$counts[$rec][2]\t";
print OUTFILEDATA "$j\tC\t$temp[2]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after printing negative itemsets: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing negative itemsets: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
# relationships on both strands
while( ($i,$j) = each %both_relationships){
@temp=split(' ',$i);
$rec=&get_index($temp[0]);
print OUTFILEDATA "$temp[0]\t$counts[$rec][1]\t$counts[$rec][2]\t";
$rec=&get_index($temp[1]);
print OUTFILEDATA "$temp[1]\t$counts[$rec][1]\t$counts[$rec][2]\t";
print OUTFILEDATA "$j\tB\t$temp[2]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after printing both itemsets: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing both itemsets: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
# PRINTING @copies
# @copies : fam1 rel fam2 Strand fam1_st fam1_end fam2_st fam2_end
if($copy_file_flag){
print OUTFILECOPIESPOS "#fam1\trel\tfam2\tStrand\tfam1-st\tf1-end\tf2-st\tf2-end\n";
foreach $i (@pos_copies){
print OUTFILECOPIESPOS "$i->[0]\t$i->[1]\t$i->[2]\t$i->[3]\t$i->[4]\t$i->[5]\t$i->[6]\t$i->[7]\n";
}
print OUTFILECOPIESCOMP "#fam1\trel\tfam2\tStrand\tfam1-st\tf1-end\tf2-st\tf2-end\n";
foreach $i (@comp_copies){
print OUTFILECOPIESCOMP "$i->[0]\t$i->[1]\t$i->[2]\t$i->[3]\t$i->[4]\t$i->[5]\t$i->[6]\t$i->[7]\n";
}
print OUTFILECOPIESBOTH "#fam1\trel\tfam2\tStrand\tfam1-st\tf1-end\tf2-st\tf2-end\n";
foreach $i (@both_copies){
print OUTFILECOPIESBOTH "$i->[0]\t$i->[1]\t$i->[2]\t$i->[3]\t$i->[4]\t$i->[5]\t$i->[6]\t$i->[7]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILECOPIESPOS "\n\# Runtime details after printing copy info: \n";
print OUTFILECOPIESPOS "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILECOPIESPOS "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILECOPIESCOMP "\n\# Runtime details after printing copy info: \n";
print OUTFILECOPIESCOMP "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILECOPIESCOMP "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILECOPIESBOTH "\n\# Runtime details after printing copy info: \n";
print OUTFILECOPIESBOTH "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILECOPIESBOTH "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing copy info: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
}
close (INFILEDATA);
close (OUTFILEDATA);
if($copy_file_flag){
close (OUTFILECOPIESPOS);
close (OUTFILECOPIESCOMP);
close (OUTFILECOPIESBOTH);
}
# # debugging
# close (ERRFILE);
exit;
| suryasaha/ProxMiner | archive/miner.out2f_itemsets.v11.pl | Perl | bsd-2-clause | 97,224 |
cask '4k-stogram' do
version '2.4.2.1306'
sha256 '5e6b21b06587a98a048640f1fe6e653a40e4f2d4489701995c64e1520580e3cf'
url "https://downloads2.4kdownload.com/app/4kstogram_#{version.major_minor}.dmg"
appcast 'https://www.4kdownload.com/download',
checkpoint: '49ff507a887439ff101f36b84c58788834a7a4a7638127609c8a49fa6278b627'
name '4K Stogram'
homepage 'https://www.4kdownload.com/products/product-stogram'
app '4K Stogram.app'
end
| ninjahoahong/homebrew-cask | Casks/4k-stogram.rb | Ruby | bsd-2-clause | 455 |
/////////////////////////////////////////////////////////////////////////////
// Name: mymodels.cpp
// Purpose: wxDataViewCtrl wxWidgets sample
// Author: Robert Roebling
// Modified by: Francesco Montorsi, Bo Yang
// Created: 06/01/06
// Copyright: (c) Robert Roebling
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx/wx.h".
#include "wx/wxprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#ifndef WX_PRECOMP
#include "wx/wx.h"
#endif
#include "wx/dataview.h"
#include "mymodels.h"
// ----------------------------------------------------------------------------
// resources
// ----------------------------------------------------------------------------
#include "null.xpm"
#include "wx_small.xpm"
// ----------------------------------------------------------------------------
// MyMusicTreeModel
// ----------------------------------------------------------------------------
MyMusicTreeModel::MyMusicTreeModel()
{
m_root = new MyMusicTreeModelNode( NULL, "My Music" );
// setup pop music
m_pop = new MyMusicTreeModelNode( m_root, "Pop music" );
m_pop->Append(
new MyMusicTreeModelNode( m_pop, "You are not alone", "Michael Jackson", 1995 ) );
m_pop->Append(
new MyMusicTreeModelNode( m_pop, "Take a bow", "Madonna", 1994 ) );
m_root->Append( m_pop );
// setup classical music
m_classical = new MyMusicTreeModelNode( m_root, "Classical music" );
m_ninth = new MyMusicTreeModelNode( m_classical, "Ninth symphony",
"Ludwig van Beethoven", 1824 );
m_classical->Append( m_ninth );
m_classical->Append( new MyMusicTreeModelNode( m_classical, "German Requiem",
"Johannes Brahms", 1868 ) );
m_root->Append( m_classical );
m_classicalMusicIsKnownToControl = false;
}
wxString MyMusicTreeModel::GetTitle( const wxDataViewItem &item ) const
{
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
if (!node) // happens if item.IsOk()==false
return wxEmptyString;
return node->m_title;
}
wxString MyMusicTreeModel::GetArtist( const wxDataViewItem &item ) const
{
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
if (!node) // happens if item.IsOk()==false
return wxEmptyString;
return node->m_artist;
}
int MyMusicTreeModel::GetYear( const wxDataViewItem &item ) const
{
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
if (!node) // happens if item.IsOk()==false
return 2000;
return node->m_year;
}
void MyMusicTreeModel::AddToClassical( const wxString &title, const wxString &artist,
unsigned int year )
{
if (!m_classical)
{
wxASSERT(m_root);
// it was removed: restore it
m_classical = new MyMusicTreeModelNode( m_root, "Classical music" );
m_root->Append( m_classical );
// notify control
wxDataViewItem child( (void*) m_classical );
wxDataViewItem parent( (void*) m_root );
ItemAdded( parent, child );
}
// add to the classical music node a new node:
MyMusicTreeModelNode *child_node =
new MyMusicTreeModelNode( m_classical, title, artist, year );
m_classical->Append( child_node );
// FIXME: what's m_classicalMusicIsKnownToControl for?
if (m_classicalMusicIsKnownToControl)
{
// notify control
wxDataViewItem child( (void*) child_node );
wxDataViewItem parent( (void*) m_classical );
ItemAdded( parent, child );
}
}
void MyMusicTreeModel::Delete( const wxDataViewItem &item )
{
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
if (!node) // happens if item.IsOk()==false
return;
wxDataViewItem parent( node->GetParent() );
if (!parent.IsOk())
{
wxASSERT(node == m_root);
// don't make the control completely empty:
wxLogError( "Cannot remove the root item!" );
return;
}
// is the node one of those we keep stored in special pointers?
if (node == m_pop)
m_pop = NULL;
else if (node == m_classical)
m_classical = NULL;
else if (node == m_ninth)
m_ninth = NULL;
// first remove the node from the parent's array of children;
// NOTE: MyMusicTreeModelNodePtrArray is only an array of _pointers_
// thus removing the node from it doesn't result in freeing it
node->GetParent()->GetChildren().Remove( node );
// free the node
delete node;
// notify control
ItemDeleted( parent, item );
}
int MyMusicTreeModel::Compare( const wxDataViewItem &item1, const wxDataViewItem &item2,
unsigned int column, bool ascending ) const
{
wxASSERT(item1.IsOk() && item2.IsOk());
// should never happen
if (IsContainer(item1) && IsContainer(item2))
{
wxVariant value1, value2;
GetValue( value1, item1, 0 );
GetValue( value2, item2, 0 );
wxString str1 = value1.GetString();
wxString str2 = value2.GetString();
int res = str1.Cmp( str2 );
if (res) return res;
// items must be different
wxUIntPtr litem1 = (wxUIntPtr) item1.GetID();
wxUIntPtr litem2 = (wxUIntPtr) item2.GetID();
return litem1-litem2;
}
return wxDataViewModel::Compare( item1, item2, column, ascending );
}
void MyMusicTreeModel::GetValue( wxVariant &variant,
const wxDataViewItem &item, unsigned int col ) const
{
wxASSERT(item.IsOk());
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
switch (col)
{
case 0:
variant = node->m_title;
break;
case 1:
variant = node->m_artist;
break;
case 2:
variant = (long) node->m_year;
break;
case 3:
variant = node->m_quality;
break;
case 4:
variant = 80L; // all music is very 80% popular
break;
case 5:
if (GetYear(item) < 1900)
variant = "old";
else
variant = "new";
break;
default:
wxLogError( "MyMusicTreeModel::GetValue: wrong column %d", col );
}
}
bool MyMusicTreeModel::SetValue( const wxVariant &variant,
const wxDataViewItem &item, unsigned int col )
{
wxASSERT(item.IsOk());
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
switch (col)
{
case 0:
node->m_title = variant.GetString();
return true;
case 1:
node->m_artist = variant.GetString();
return true;
case 2:
node->m_year = variant.GetLong();
return true;
case 3:
node->m_quality = variant.GetString();
return true;
default:
wxLogError( "MyMusicTreeModel::SetValue: wrong column" );
}
return false;
}
bool MyMusicTreeModel::IsEnabled( const wxDataViewItem &item,
unsigned int col ) const
{
wxASSERT(item.IsOk());
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
// disable Beethoven's ratings, his pieces can only be good
return !(col == 3 && node->m_artist.EndsWith("Beethoven"));
}
wxDataViewItem MyMusicTreeModel::GetParent( const wxDataViewItem &item ) const
{
// the invisible root node has no parent
if (!item.IsOk())
return wxDataViewItem(0);
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
// "MyMusic" also has no parent
if (node == m_root)
return wxDataViewItem(0);
return wxDataViewItem( (void*) node->GetParent() );
}
bool MyMusicTreeModel::IsContainer( const wxDataViewItem &item ) const
{
// the invisble root node can have children
// (in our model always "MyMusic")
if (!item.IsOk())
return true;
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) item.GetID();
return node->IsContainer();
}
unsigned int MyMusicTreeModel::GetChildren( const wxDataViewItem &parent,
wxDataViewItemArray &array ) const
{
MyMusicTreeModelNode *node = (MyMusicTreeModelNode*) parent.GetID();
if (!node)
{
array.Add( wxDataViewItem( (void*) m_root ) );
return 1;
}
if (node == m_classical)
{
MyMusicTreeModel *model = (MyMusicTreeModel*)(const MyMusicTreeModel*) this;
model->m_classicalMusicIsKnownToControl = true;
}
if (node->GetChildCount() == 0)
{
return 0;
}
unsigned int count = node->GetChildren().GetCount();
for (unsigned int pos = 0; pos < count; pos++)
{
MyMusicTreeModelNode *child = node->GetChildren().Item( pos );
array.Add( wxDataViewItem( (void*) child ) );
}
return count;
}
// ----------------------------------------------------------------------------
// MyListModel
// ----------------------------------------------------------------------------
static int my_sort_reverse( int *v1, int *v2 )
{
return *v2-*v1;
}
static int my_sort( int *v1, int *v2 )
{
return *v1-*v2;
}
#define INITIAL_NUMBER_OF_ITEMS 10000
MyListModel::MyListModel() :
wxDataViewVirtualListModel( INITIAL_NUMBER_OF_ITEMS )
{
// the first 100 items are really stored in this model;
// all the others are synthesized on request
static const unsigned NUMBER_REAL_ITEMS = 100;
m_textColValues.reserve(NUMBER_REAL_ITEMS);
m_textColValues.push_back("first row with long label to test ellipsization");
for (unsigned int i = 1; i < NUMBER_REAL_ITEMS; i++)
{
m_textColValues.push_back(wxString::Format("real row %d", i));
}
m_iconColValues.assign(NUMBER_REAL_ITEMS, "test");
m_icon[0] = wxIcon( null_xpm );
m_icon[1] = wxIcon( wx_small_xpm );
}
void MyListModel::Prepend( const wxString &text )
{
m_textColValues.Insert( text, 0 );
RowPrepended();
}
void MyListModel::DeleteItem( const wxDataViewItem &item )
{
unsigned int row = GetRow( item );
if (row >= m_textColValues.GetCount())
return;
m_textColValues.RemoveAt( row );
RowDeleted( row );
}
void MyListModel::DeleteItems( const wxDataViewItemArray &items )
{
unsigned i;
wxArrayInt rows;
for (i = 0; i < items.GetCount(); i++)
{
unsigned int row = GetRow( items[i] );
if (row < m_textColValues.GetCount())
rows.Add( row );
}
if (rows.GetCount() == 0)
{
// none of the selected items were in the range of the items
// which we store... for simplicity, don't allow removing them
wxLogError( "Cannot remove rows with an index greater than %u", unsigned(m_textColValues.GetCount()) );
return;
}
// Sort in descending order so that the last
// row will be deleted first. Otherwise the
// remaining indeces would all be wrong.
rows.Sort( my_sort_reverse );
for (i = 0; i < rows.GetCount(); i++)
m_textColValues.RemoveAt( rows[i] );
// This is just to test if wxDataViewCtrl can
// cope with removing rows not sorted in
// descending order
rows.Sort( my_sort );
RowsDeleted( rows );
}
void MyListModel::AddMany()
{
Reset( GetCount()+1000 );
}
void MyListModel::GetValueByRow( wxVariant &variant,
unsigned int row, unsigned int col ) const
{
switch ( col )
{
case Col_EditableText:
if (row >= m_textColValues.GetCount())
variant = wxString::Format( "virtual row %d", row );
else
variant = m_textColValues[ row ];
break;
case Col_IconText:
{
wxString text;
if ( row >= m_iconColValues.GetCount() )
text = "virtual icon";
else
text = m_iconColValues[row];
variant << wxDataViewIconText(text, m_icon[row % 2]);
}
break;
case Col_Date:
variant = wxDateTime(1, wxDateTime::Jan, 2000).Add(wxTimeSpan(row));
break;
case Col_TextWithAttr:
{
static const char *labels[5] =
{
"blue", "green", "red", "bold cyan", "default",
};
variant = labels[row % 5];
}
break;
case Col_Custom:
{
IntToStringMap::const_iterator it = m_customColValues.find(row);
if ( it != m_customColValues.end() )
variant = it->second;
else
variant = wxString::Format("%d", row % 100);
}
break;
case Col_Max:
wxFAIL_MSG( "invalid column" );
}
}
bool MyListModel::GetAttrByRow( unsigned int row, unsigned int col,
wxDataViewItemAttr &attr ) const
{
switch ( col )
{
case Col_EditableText:
case Col_Date:
return false;
case Col_IconText:
if ( !(row % 2) )
return false;
attr.SetColour(*wxLIGHT_GREY);
break;
case Col_TextWithAttr:
case Col_Custom:
// do what the labels defined in GetValueByRow() hint at
switch ( row % 5 )
{
case 0:
attr.SetColour(*wxBLUE);
break;
case 1:
attr.SetColour(*wxGREEN);
break;
case 2:
attr.SetColour(*wxRED);
break;
case 3:
attr.SetColour(*wxCYAN);
attr.SetBold(true);
break;
case 4:
return false;
}
break;
case Col_Max:
wxFAIL_MSG( "invalid column" );
}
return true;
}
bool MyListModel::SetValueByRow( const wxVariant &variant,
unsigned int row, unsigned int col )
{
switch ( col )
{
case Col_EditableText:
case Col_IconText:
if (row >= m_textColValues.GetCount())
{
// the item is not in the range of the items
// which we store... for simplicity, don't allow editing it
wxLogError( "Cannot edit rows with an index greater than %d",
m_textColValues.GetCount() );
return false;
}
if ( col == Col_EditableText )
{
m_textColValues[row] = variant.GetString();
}
else // col == Col_IconText
{
wxDataViewIconText iconText;
iconText << variant;
m_iconColValues[row] = iconText.GetText();
}
return true;
case Col_Date:
case Col_TextWithAttr:
wxLogError("Cannot edit the column %d", col);
break;
case Col_Custom:
m_customColValues[row] = variant.GetString();
break;
case Col_Max:
wxFAIL_MSG( "invalid column" );
}
return false;
}
// ----------------------------------------------------------------------------
// MyListStoreDerivedModel
// ----------------------------------------------------------------------------
bool MyListStoreDerivedModel::IsEnabledByRow(unsigned int row, unsigned int col) const
{
// disabled the last two checkboxes
return !(col == 0 && 8 <= row && row <= 9);
}
| adouble42/nemesis-current | wxWidgets-3.1.0/samples/dataview/mymodels.cpp | C++ | bsd-2-clause | 15,923 |
# Homework 2 solution, part 1: cnf.py
# Andrew Gordon
# Feb 18, 2015
# Revised June 19, 2015 for better input/output and implies->if
import sys
import fileinput
def biconditionalElimination(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "iff":
return(["and",
["if",
biconditionalElimination(s[1]),
biconditionalElimination(s[2])],
["if",
biconditionalElimination(s[2]),
biconditionalElimination(s[1])]])
else:
return([s[0]] + [biconditionalElimination(i) for i in s[1:]])
def implicationElimination(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "if":
return(["or",
["not",
implicationElimination(s[1])],
implicationElimination(s[2])])
else:
return([s[0]] + [implicationElimination(i) for i in s[1:]])
def doubleNegationElimination(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "not" and type(s[1]) is list and s[1][0] == "not":
return(doubleNegationElimination(s[1][1]))
else:
return([s[0]] + [doubleNegationElimination(i) for i in s[1:]])
def demorgan(s):
revision = demorgan1(s)
if revision == s:
return s
else:
return demorgan(revision)
def demorgan1(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "not" and type(s[1]) is list and s[1][0] == "and":
return(["or"] + [demorgan(["not", i]) for i in s[1][1:]])
elif type(s) is list and s[0] == "not" and type(s[1]) is list and s[1][0] == "or":
return(["and"] + [demorgan(["not", i]) for i in s[1][1:]])
else:
return ([s[0]] + [demorgan(i) for i in s[1:]])
def binaryize(s): # ensures all connectives are binary (and / or)
if type(s) is str:
return s
elif type(s) is list and s[0] == "and" and len(s) > 3: # too long
return(["and", s[1], binaryize(["and"] + s[2:])])
elif type(s) is list and s[0] == "or" and len(s) > 3: # too long
return(["or", s[1], binaryize(["or"] + s[2:])])
else:
return([s[0]] + [binaryize(i) for i in s[1:]])
def distributivity(s):
revision = distributivity1(s)
if revision == s:
return s
else:
return distributivity(revision)
def distributivity1(s): # only works on binary connectives
if type(s) is str:
return s
elif type(s) is list and s[0] == "or" and type(s[1]) is list and s[1][0] == "and":
# distribute s[2] over s[1]
return(["and"] + [distributivity(["or", i, s[2]]) for i in s[1][1:]])
elif type(s) is list and s[0] == "or" and type(s[2]) is list and s[2][0] == "and":
# distribute s[1] over s[2]
return(["and"] + [distributivity(["or", i, s[1]]) for i in s[2][1:]])
else:
return ([s[0]] + [distributivity(i) for i in s[1:]])
def andAssociativity(s):
revision = andAssociativity1(s)
if revision == s:
return s
else:
return andAssociativity(revision)
def andAssociativity1(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "and":
result = ["and"]
# iterate through conjuncts looking for "and" lists
for i in s[1:]:
if type(i) is list and i[0] == "and":
result = result + i[1:]
else:
result.append(i)
return result
else:
return([s[0]] + [andAssociativity1(i) for i in s[1:]])
def orAssociativity(s):
revision = orAssociativity1(s)
if revision == s:
return s
else:
return orAssociativity(revision)
def orAssociativity1(s):
if type(s) is str:
return s
elif type(s) is list and s[0] == "or":
result = ["or"]
# iterate through disjuncts looking for "or" lists
for i in s[1:]:
if type(i) is list and i[0] == "or":
result = result + i[1:]
else:
result.append(i)
return result
else:
return([s[0]] + [orAssociativity1(i) for i in s[1:]])
def removeDuplicateLiterals(s):
if type(s) is str:
return s
if s[0] == "not":
return s
if s[0] == "and":
return(["and"] + [removeDuplicateLiterals(i) for i in s[1:]])
if s[0] == "or":
remains = []
for l in s[1:]:
if l not in remains:
remains.append(l)
if len(remains) == 1:
return remains[0]
else:
return(["or"] + remains)
def removeDuplicateClauses(s):
if type(s) is str:
return s
if s[0] == "not":
return s
if s[0] == "or":
return s
if s[0] == "and": #conjunction of clauses
remains = []
for c in s[1:]:
if unique(c, remains):
remains.append(c)
if len(remains) == 1:
return remains[0]
else:
return(["and"] + remains)
def unique(c, remains):
for p in remains:
if type(c) is str or type(p) is str:
if c == p:
return False
elif len(c) == len(p):
if len([i for i in c[1:] if i not in p[1:]]) == 0:
return False
return True
def cnf(s):
s = biconditionalElimination(s)
s = implicationElimination(s)
s = demorgan(s)
s = doubleNegationElimination(s)
s = binaryize(s)
s = distributivity(s)
s = andAssociativity(s)
s = orAssociativity(s)
s = removeDuplicateLiterals(s)
s = removeDuplicateClauses(s)
return s
if __name__ == "__main__":
sentences = fileinput.input()
for l in sentences:
print repr(cnf(eval(l.strip())))
| asgordon/DPLL | cnf.py | Python | bsd-2-clause | 5,803 |
/*
* Copyright (c) 2018, The University of Memphis, MD2K Center of Excellence
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.md2k.utilities.data_format;
import org.md2k.datakitapi.time.DateTime;
/**
* Defines an activity marker.
* <p>
* Current activity markers are:
* <ul>
* <li>Smoking</li>
* <li>Sleep</li>
* <li>Wakeup</li>
* </ul>
* </p>
*/
public class Marker {
/** Constant for smoking marker. <p>"SMOKING"</p> */
public static final String SMOKING = "SMOKING";
/** Constant for sleeping marker. <p>"SLEEP"</p> */
public static final String SLEEP = "SLEEP";
/** Constant for wakeup marker. <p>"WAKEUP"</p> */
public static final String WAKEUP = "WAKEUP";
/** Marker type. */
String type;
/** Marker timestamp. */
long timestamp;
/**
* Constructor
*
* @param type Marker type.
*/
public Marker(String type){
this.type = type;
timestamp = System.currentTimeMillis();
}
/**
* Returns the marker type.
* @return The marker type.
*/
public String getType() {
return type;
}
/**
* Sets the marker type.
* @param type The marker type.
*/
public void setType(String type) {
this.type = type;
}
/**
* Returns the timestamp.
* @return The timestamp.
*/
public long getTimestamp() {
return timestamp;
}
/**
* Sets the timestamp.
* @param timestamp The timestamp.
*/
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
} | MD2Korg/mCerebrum-Utilities | utilities/src/main/java/org/md2k/utilities/data_format/Marker.java | Java | bsd-2-clause | 2,913 |
<?php
/**
* GetCustomPaymentMethodsResponseTest
*
* PHP version 5
*
* @category Class
* @package Swagger\Client
* @author Swagger Codegen team
* @link https://github.com/swagger-api/swagger-codegen
*/
/**
* MINDBODY Public API
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: v6
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
* Swagger Codegen version: 2.4.6
*/
/**
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen
* Please update the test case below to test the model.
*/
namespace Swagger\Client;
/**
* GetCustomPaymentMethodsResponseTest Class Doc Comment
*
* @category Class
* @description GetCustomPaymentMethodsResponse
* @package Swagger\Client
* @author Swagger Codegen team
* @link https://github.com/swagger-api/swagger-codegen
*/
class GetCustomPaymentMethodsResponseTest extends \PHPUnit_Framework_TestCase
{
/**
* Setup before running any test case
*/
public static function setUpBeforeClass()
{
}
/**
* Setup before running each test case
*/
public function setUp()
{
}
/**
* Clean up after running each test case
*/
public function tearDown()
{
}
/**
* Clean up after running all test cases
*/
public static function tearDownAfterClass()
{
}
/**
* Test "GetCustomPaymentMethodsResponse"
*/
public function testGetCustomPaymentMethodsResponse()
{
}
/**
* Test attribute "pagination_response"
*/
public function testPropertyPaginationResponse()
{
}
/**
* Test attribute "payment_methods"
*/
public function testPropertyPaymentMethods()
{
}
}
| mindbody/API-Examples | SDKs/PHP/SwaggerClient-php/test/Model/GetCustomPaymentMethodsResponseTest.php | PHP | bsd-2-clause | 1,886 |
#ifndef COMMON_HPP
#define COMMON_HPP
#pragma once
#include "typedefs.hpp"
#include "configuration_space.hpp"
#include "utils.hpp"
#endif // COMMON_HPP | maverick-long/trajopt | src/trajopt/common.hpp | C++ | bsd-2-clause | 153 |
/*
Copyright 1992 Mark Emmer.
All Rights Reserved.
Use Of this source code is governed by a BSD-style
license that can be found in the LICENSE file.
*/
#include "extrn88.h"
/*
* retstrf(s, presult) - Return far C string
*/
word retstrf(char far *s, union block far *presult)
{
presult->fsb.fsptr = s; /* return far string pointer */
presult->fsb.fslen = strlenf(s);/* set string length */
return BL_FS;
}
| spitbol/88-binary | EXTERNAL/C/LIB/RETSTRF.C | C++ | bsd-2-clause | 445 |
#include <algorithm>
#include <cassert>
#include <cstdio>
#include <iostream>
#include <sstream>
#include <string>
#include <vector>
using namespace std;
vector<int> a;
long long absolute(long long x) {
if (x < 0)
return -x;
else return x;
}
class Equidistance {
public:
long long eval(int fix, long long d) {
long long ret = 0;
for (int i=0; i<(int)a.size(); ++i)
if (i != fix)
ret += absolute(a[fix] + (i-fix)*d - a[i]);
return ret;
}
long long solve(int fix) {
long long l = 1;
long long h = 4000000100LL;
while (h-l > 20) {
long long lmid = (2*l+h)/3;
long long hmid = (l+2*h)/3;
if (eval(fix, lmid) < eval(fix, hmid))
h = hmid;
else
l = lmid;
}
long long ret = eval(fix, l);
for (long long i=l+1; i<=h; ++i)
ret = min(ret, eval(fix, i));
return ret;
}
long long minimumEffort(vector <int> initial) {
a = initial;
sort(a.begin(), a.end());
long long sol = 1LL<<62;
for (int i=0; i<(int)a.size(); ++i)
sol = min(sol, solve(i));
return sol;
}
// BEGIN CUT HERE
public:
void run_test(int Case) { if ((Case == -1) || (Case == 0)) test_case_0(); if ((Case == -1) || (Case == 1)) test_case_1(); if ((Case == -1) || (Case == 2)) test_case_2(); if ((Case == -1) || (Case == 3)) test_case_3(); if ((Case == -1) || (Case == 4)) test_case_4(); }
private:
template <typename T> string print_array(const vector<T> &V) { ostringstream os; os << "{ "; for (typename vector<T>::const_iterator iter = V.begin(); iter != V.end(); ++iter) os << '\"' << *iter << "\","; os << " }"; return os.str(); }
void verify_case(int Case, const long long &Expected, const long long &Received) { cerr << "Test Case #" << Case << "..."; if (Expected == Received) cerr << "PASSED" << endl; else { cerr << "FAILED" << endl; cerr << "\tExpected: \"" << Expected << '\"' << endl; cerr << "\tReceived: \"" << Received << '\"' << endl; } }
void test_case_0() { int Arr0[] = { 1, 4, 7, 10 }; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 0LL; verify_case(0, Arg1, minimumEffort(Arg0)); }
void test_case_1() { int Arr0[] = { 4, 3, 1 }; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 1LL; verify_case(1, Arg1, minimumEffort(Arg0)); }
void test_case_2() { int Arr0[] = { 3, 3, 3 }; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 2LL; verify_case(2, Arg1, minimumEffort(Arg0)); }
void test_case_3() { int Arr0[] = { -2000000000, 2000000000 }; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 0LL; verify_case(3, Arg1, minimumEffort(Arg0)); }
void test_case_4() { int Arr0[] = { 2, 3, 4, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18 }; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); long long Arg1 = 8LL; verify_case(4, Arg1, minimumEffort(Arg0)); }
// END CUT HERE
};
// BEGIN CUT HERE
int main()
{
Equidistance ___test;
___test.run_test(-1);
}
// END CUT HERE
| ibudiselic/contest-problem-solutions | tc 160+/Equidistance.cpp | C++ | bsd-2-clause | 3,085 |
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS = -W
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
-rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Airship.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Airship.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Airship"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Airship"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
htmlzip: html
cd $(BUILDDIR)/html && zip -r ../html.zip *
| mgax/airship | docs/Makefile | Makefile | bsd-2-clause | 5,631 |
define([
"cali-calcu/base/var",
"$J",
"cali-calcu/CommandParser",
"../dev/Matchers"
], function (func, $J, CommandParser, Matchers) {
describe("cali.module.base.var", function(){
// setup the environment
var parser = null;
var eps;
beforeEach(function() {
parser = new CommandParser();
$J.jasmine.Expectation.addMatchers(Matchers);
eps = 1e-4;
});
it("should computes the var of the input", function(){
var inputs = [
[ [[1], [7]], , ],
[ [[1, 5, 9], [7, 15, 22]], , ],
[ [[1, 5, 9], [7, 15, 22]], ,1 ],
[ [[1, 5, 9], [7, 15, 22]], 0,1 ],
[ [[1, 5, 9], [7, 15, 22]], ,2 ],
[ [[1, 5, 9], [7, 15, 22]], 1 ,2 ],
];
var outputs = [
[[18]],
[[18, 50, 84.5]],
[[18, 50, 84.5]],
[[18, 50, 84.5]],
[[16.0], [56.3333333]],
[[10.66667], [37.55557]],
];
for(var i in inputs){
expect(func(inputs[i][0], inputs[i][1], inputs[i][2])).toBeMatrixCloseTo(outputs[i], eps);
}
});
});
}); | Calculingua/cali-app | test/calcu/base/var.spec.js | JavaScript | bsd-2-clause | 952 |
/*
* Copyright (c) 2017, Seth <Sethtroll3@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.fishing;
import java.util.HashMap;
import java.util.Map;
import lombok.Getter;
import net.runelite.api.ItemID;
import static net.runelite.api.NpcID.FISHING_SPOT_1497;
import static net.runelite.api.NpcID.FISHING_SPOT_1498;
import static net.runelite.api.NpcID.FISHING_SPOT_1510;
import static net.runelite.api.NpcID.FISHING_SPOT_1511;
import static net.runelite.api.NpcID.FISHING_SPOT_1518;
import static net.runelite.api.NpcID.FISHING_SPOT_1519;
import static net.runelite.api.NpcID.FISHING_SPOT_1520;
import static net.runelite.api.NpcID.FISHING_SPOT_1521;
import static net.runelite.api.NpcID.FISHING_SPOT_1522;
import static net.runelite.api.NpcID.FISHING_SPOT_1523;
import static net.runelite.api.NpcID.FISHING_SPOT_1524;
import static net.runelite.api.NpcID.FISHING_SPOT_1525;
import static net.runelite.api.NpcID.FISHING_SPOT_1528;
import static net.runelite.api.NpcID.FISHING_SPOT_1530;
import static net.runelite.api.NpcID.FISHING_SPOT_1536;
import static net.runelite.api.NpcID.FISHING_SPOT_1542;
import static net.runelite.api.NpcID.FISHING_SPOT_1544;
import static net.runelite.api.NpcID.FISHING_SPOT_2653;
import static net.runelite.api.NpcID.FISHING_SPOT_2654;
import static net.runelite.api.NpcID.FISHING_SPOT_2655;
import static net.runelite.api.NpcID.FISHING_SPOT_3913;
import static net.runelite.api.NpcID.FISHING_SPOT_3914;
import static net.runelite.api.NpcID.FISHING_SPOT_3915;
import static net.runelite.api.NpcID.FISHING_SPOT_4316;
import static net.runelite.api.NpcID.FISHING_SPOT_4476;
import static net.runelite.api.NpcID.FISHING_SPOT_4477;
import static net.runelite.api.NpcID.FISHING_SPOT_4710;
import static net.runelite.api.NpcID.FISHING_SPOT_4712;
import static net.runelite.api.NpcID.FISHING_SPOT_4713;
import static net.runelite.api.NpcID.FISHING_SPOT_5233;
import static net.runelite.api.NpcID.FISHING_SPOT_5234;
import static net.runelite.api.NpcID.FISHING_SPOT_5820;
import static net.runelite.api.NpcID.FISHING_SPOT_5821;
import static net.runelite.api.NpcID.FISHING_SPOT_6488;
import static net.runelite.api.NpcID.FISHING_SPOT_7155;
import static net.runelite.api.NpcID.FISHING_SPOT_7199;
import static net.runelite.api.NpcID.FISHING_SPOT_7200;
import static net.runelite.api.NpcID.FISHING_SPOT_7323;
import static net.runelite.api.NpcID.FISHING_SPOT_7459;
import static net.runelite.api.NpcID.FISHING_SPOT_7460;
import static net.runelite.api.NpcID.FISHING_SPOT_7461;
import static net.runelite.api.NpcID.FISHING_SPOT_7462;
import static net.runelite.api.NpcID.FISHING_SPOT_7465;
import static net.runelite.api.NpcID.FISHING_SPOT_7466;
import static net.runelite.api.NpcID.FISHING_SPOT_7467;
import static net.runelite.api.NpcID.FISHING_SPOT_7469;
import static net.runelite.api.NpcID.FISHING_SPOT_7470;
import static net.runelite.api.NpcID.FISHING_SPOT_7730;
import static net.runelite.api.NpcID.FISHING_SPOT_7731;
import static net.runelite.api.NpcID.FISHING_SPOT_7732;
import static net.runelite.api.NpcID.FISHING_SPOT_7733;
import static net.runelite.api.NpcID.FISHING_SPOT_7946;
import static net.runelite.api.NpcID.FISHING_SPOT_7947;
import static net.runelite.api.NpcID.FISHING_SPOT_8523;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_1508;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_1509;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_1513;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_1515;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_1526;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_1527;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_6825;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_7463;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_7464;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_7468;
import static net.runelite.api.NpcID.ROD_FISHING_SPOT_7676;
@Getter
enum FishingSpot
{
SHRIMP("Shrimp, Anchovies", ItemID.RAW_SHRIMPS,
FISHING_SPOT_1518, FISHING_SPOT_1521, FISHING_SPOT_1523,
FISHING_SPOT_1524, FISHING_SPOT_1525, FISHING_SPOT_1528,
FISHING_SPOT_1530, FISHING_SPOT_1544, FISHING_SPOT_3913,
FISHING_SPOT_7155, FISHING_SPOT_7459, FISHING_SPOT_7462,
FISHING_SPOT_7467, FISHING_SPOT_7469, FISHING_SPOT_7947
),
LOBSTER("Lobster, Swordfish, Tuna", ItemID.RAW_LOBSTER,
FISHING_SPOT_1510, FISHING_SPOT_1519, FISHING_SPOT_1522,
FISHING_SPOT_3914, FISHING_SPOT_5820, FISHING_SPOT_7199,
FISHING_SPOT_7460, FISHING_SPOT_7465, FISHING_SPOT_7470,
FISHING_SPOT_7946
),
SHARK("Shark, Bass", ItemID.RAW_SHARK,
FISHING_SPOT_1511, FISHING_SPOT_1520, FISHING_SPOT_3915,
FISHING_SPOT_4476, FISHING_SPOT_4477, FISHING_SPOT_5233,
FISHING_SPOT_5234, FISHING_SPOT_5821, FISHING_SPOT_7200,
FISHING_SPOT_7461, FISHING_SPOT_7466
),
MONKFISH("Monkfish", ItemID.RAW_MONKFISH,
FISHING_SPOT_4316
),
SALMON("Salmon, Trout", ItemID.RAW_SALMON,
ROD_FISHING_SPOT, ROD_FISHING_SPOT_1508, ROD_FISHING_SPOT_1509,
ROD_FISHING_SPOT_1513, ROD_FISHING_SPOT_1515, ROD_FISHING_SPOT_1526,
ROD_FISHING_SPOT_1527, ROD_FISHING_SPOT_7463, ROD_FISHING_SPOT_7464,
ROD_FISHING_SPOT_7468
),
BARB_FISH("Sturgeon, Salmon, Trout", ItemID.LEAPING_STURGEON,
FISHING_SPOT_1542, FISHING_SPOT_7323
),
ANGLERFISH("Anglerfish", ItemID.RAW_ANGLERFISH,
ROD_FISHING_SPOT_6825
),
MINNOW("Minnow", ItemID.MINNOW,
FISHING_SPOT_7730, FISHING_SPOT_7731, FISHING_SPOT_7732, FISHING_SPOT_7733
),
INFERNAL_EEL("Infernal Eel", ItemID.INFERNAL_EEL,
ROD_FISHING_SPOT_7676
),
KARAMBWAN("Karambwan", ItemID.RAW_KARAMBWAN,
FISHING_SPOT_4712, FISHING_SPOT_4713
),
KARAMBWANJI("Karambwanji, Shrimp", ItemID.KARAMBWANJI,
FISHING_SPOT_4710
),
SACRED_EEL("Sacred eel", ItemID.SACRED_EEL,
FISHING_SPOT_6488
),
CAVE_EEL("Cave eel", ItemID.RAW_CAVE_EEL,
FISHING_SPOT_1497, FISHING_SPOT_1498
),
SLIMY_EEL("Slimy eel", ItemID.RAW_SLIMY_EEL,
FISHING_SPOT_2653, FISHING_SPOT_2654, FISHING_SPOT_2655
),
DARK_CRAB("Dark Crab", ItemID.RAW_DARK_CRAB,
FISHING_SPOT_1536
),
COMMON_TENCH("Common tench, Bluegill, Greater siren, Mottled eel", ItemID.COMMON_TENCH,
FISHING_SPOT_8523);
@Getter
private static final Map<Integer, FishingSpot> SPOTS = new HashMap<>();
private final String name;
private final int fishSpriteId;
private final int[] ids;
static
{
FishingSpot[] spots = values();
for (FishingSpot spot : spots)
{
for (int spotId : spot.getIds())
{
SPOTS.put(spotId, spot);
}
}
}
FishingSpot(String spot, int fishSpriteId, int... ids)
{
this.name = spot;
this.fishSpriteId = fishSpriteId;
this.ids = ids;
}
}
| abelbriggs1/runelite | runelite-client/src/main/java/net/runelite/client/plugins/fishing/FishingSpot.java | Java | bsd-2-clause | 7,975 |
<?PHP
/*-
* Copyright (c) 2018 Etienne Bagnoud <etienne@artisan-numerique.ch>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
Namespace artnum;
class Files {
/* Write data to file if it's writable */
function toFile ($content, $file) {
if ($this->writable($file)) {
return file_put_contents($file, $content);
}
return false;
}
/* check if file is writable, if file doesn't exist, check if the directory is writable */
function writable($file) {
if (file_exists($file) && is_file($file) && is_writable($file)) {
return true;
} else {
if (is_dir(dirname($file)) && is_writable(dirname($file))) {
return true;
}
}
return false;
}
/* alias */
function writeable($file) {
return $this->writable($file);
}
/* verify if file exists and is readable but if file doesn't exist, check if it could be written */
function mayExist($file) {
if (file_exists($file)) {
if ($this->readable($file)) {
return true;
}
}
if ($this->writeable($file)) {
return true;
}
return false;
}
function readable($file) {
if (file_exists($file) && is_file($file) && is_readable($file)) {
return true;
}
return false;
}
function fromFile($file) {
if ($this->readable($file)) {
return file_get_contents($file);
}
return false;
}
}
?>
| artnum/phplibs | Files.php | PHP | bsd-2-clause | 2,742 |
/*
* Copyright (c) 2017, Terrence Ezrol (ezterry)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package com.ezrol.terry.minecraft.jsonRecipeEdit.commands;
import com.ezrol.terry.minecraft.jsonRecipeEdit.JSONRecipeEdit;
import com.ezrol.terry.minecraft.jsonRecipeEdit.virtualcommandblock.VCommandSet;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import java.util.LinkedList;
/**
* Read in a Virtual Command Chain
* These act like chains of command blocks based on the input criteria
*
* Created by ezterry on 2/18/17.
*/
public class VirtualCommandChain extends GenericCommand{
@Override
public String getCommandName() {
return "virtual command chain";
}
private boolean isString(JsonElement e){
return e.isJsonPrimitive() && e.getAsJsonPrimitive().isString();
}
@Override
public void runCommand(JsonObject command) {
VCommandSet chain;
LinkedList<String> cmdlst = new LinkedList<>();
String trigger;
String dim;
String filter;
boolean log;
String name;
if (command.has("trigger") && isString(command.get("trigger"))) {
trigger = command.get("trigger").getAsString();
} else {
error("Command chain trigger must be provided");
error(String.format("not provided in: %s", command.toString()));
return;
}
if (!command.has("dim")) {
dim = "*";
} else if (isString(command.get("dim"))) {
dim = command.get("dim").getAsString();
} else if (command.get("dim").isJsonPrimitive() && command.get("dim").getAsJsonPrimitive().isNumber()) {
dim = String.format("%d", command.get("dim").getAsJsonPrimitive().getAsInt());
} else {
error(String.format("Command chain dimension must be either a string or a number, got: %s",
command.get("dim").toString()));
return;
}
if (!command.has("filter")) {
filter = "";
} else if (isString(command.get("filter"))) {
filter = command.get("filter").getAsString();
} else {
error(String.format("Unexpected command chain filter: %s",
command.get("filter").toString()));
return;
}
log = command.has("log") && command.get("log").getAsJsonPrimitive().getAsBoolean();
if (!command.has("name")) {
name = "VCommandChain";
} else {
name = command.get("name").getAsJsonPrimitive().getAsString();
}
if (command.has("chain") && command.get("chain").isJsonArray()) {
for (JsonElement e : command.get("chain").getAsJsonArray()) {
if (isString(e)) {
cmdlst.addLast(e.getAsString());
} else {
error(String.format("invalid chain command: %s", e.toString()));
return;
}
}
}
chain = new VCommandSet(trigger, filter, dim, cmdlst, log, name);
JSONRecipeEdit.commandChains.addCommand(chain);
info(String.format("Adding command chain: %s", name));
}
}
| ezterry/JsonRecipeEdit | src/main/java/com/ezrol/terry/minecraft/jsonRecipeEdit/commands/VirtualCommandChain.java | Java | bsd-2-clause | 4,501 |
#ifndef PM_BASE64_H_
#define PM_BASE64_H_
#include "pm.h"
#include <stdint.h>
#include <stdio.h>
#define PM_BASE64_DECODED_SIZE(x) ((PM_ROUND_UP(x, 4UL) * 3UL) / 4UL)
#define PM_BASE64_ENCODED_SIZE(x) ((PM_ROUND_UP(x, 3UL) * 4UL) / 3UL)
enum pm_base64_encoding_e
{
PM_BASE64_RFC3458,
PM_BASE64_RFC4648,
PM_BASE64_RFC7515,
PM_BASE64_XML_IDENTIFIER,
PM_BASE64_XML_NAME_TOKEN,
};
extern ssize_t pm_base64_encode(
char *buffer,
size_t szBuffer,
void const *data,
size_t szData);
extern ssize_t pm_base64_decode(
void *buffer,
size_t szBuffer,
char const *data,
size_t szData);
#endif
| porkfactor/matasano | c/libcryptopals/include/pm/base64.h | C | bsd-2-clause | 699 |
cask 'language-switcher' do
version '1.1.7'
sha256 'c65882f00b195a0821dd3baf2c81a71d3ddd01b64cf6beaf56abb47cb948ffa8'
url "http://www.tj-hd.co.uk/downloads/Language_Switcher_#{version.gsub('.', '_')}.dmg"
appcast 'http://feeds.tj-hd.co.uk/feeds/language_switcher/appcast.xml',
:sha256 => '0a33d4efed28803122f154fd8a9eb0c62f60534ca542f1fa80a98341fcce4f15'
name 'Language Switcher'
homepage 'http://www.tj-hd.co.uk/en-gb/languageswitcher/'
license :gratis
app 'Language Switcher.app'
end
| cedwardsmedia/homebrew-cask | Casks/language-switcher.rb | Ruby | bsd-2-clause | 514 |
#include <cstddef>
#include <cstdint>
#include <type_traits>
#include "etl/type_list.h"
using etl::TypeList;
/*
* There's not a lot one can do with an empty TypeList.
*/
static_assert(TypeList<>::size() == 0, "Empty TypeList must have size 0");
template <typename T> struct BogusFn;
static_assert(std::is_same<TypeList<>,
TypeList<>::Map<BogusFn>>::value,
"TypeList<>::Map<F> must be TypeList<>, independent of F");
/*
* Non-empty TypeLists are a bit more interesting.
*/
struct A {};
struct B {};
struct C {};
// Repetition
static_assert(std::is_same<etl::Repeat<A, 3>, TypeList<A, A, A>>::value, "");
// Sizing
static_assert(TypeList<A, B, C>::size() == 3, "");
// Indexing
static_assert(std::is_same<A, TypeList<A, B, C>::At<0>>::value, "");
static_assert(std::is_same<B, TypeList<A, B, C>::At<1>>::value, "");
static_assert(std::is_same<C, TypeList<A, B, C>::At<2>>::value, "");
// Mapping
static_assert(
std::is_same<TypeList<unsigned char, unsigned int>,
TypeList<signed char, signed int>::Map<std::make_unsigned>
>::value, "");
// Searching by predicate, returning the type.
static_assert(
std::is_same<char *,
TypeList<int, char *, bool, void *>::FindFirst<std::is_pointer>
>::value, "");
// Searching by identity, returning the index.
static_assert(TypeList<int, bool, char>::index_of<int>() == 0, "");
static_assert(TypeList<int, bool, char>::index_of<bool>() == 1, "");
static_assert(TypeList<int, bool, char>::index_of<char>() == 2, "");
// Checking uniqueness.
static_assert(TypeList<>::all_unique, "");
static_assert(TypeList<int>::all_unique, "");
static_assert(TypeList<int, bool, char>::all_unique, "");
static_assert(TypeList<int, bool, int>::all_unique == false, "");
// Checking containment.
static_assert(TypeList<>::contains<int>() == false, "");
static_assert(TypeList<char>::contains<int>() == false, "");
static_assert(TypeList<int>::contains<int>(), "");
static_assert(TypeList<char, bool>::contains<int>() == false, "");
static_assert(TypeList<char, int>::contains<int>(), "");
// Deriving aggregate properties.
static_assert(etl::MaxSizeOf<TypeList<int>>::value == sizeof(int), "");
static_assert(etl::MaxAlignOf<TypeList<int>>::value == alignof(int), "");
| cbiffle/etl-test | test/type_list_test.cc | C++ | bsd-2-clause | 2,323 |
// Copyright 1998-2015 Epic Games, Inc. All Rights Reserved.
#pragma once
#include "MobilityCustomization.h"
class FSceneComponentDetails : public IDetailCustomization
{
public:
/** Makes a new instance of this detail layout class for a specific detail view requesting it */
static TSharedRef<IDetailCustomization> MakeInstance();
/** IDetailCustomization interface */
virtual void CustomizeDetails( IDetailLayoutBuilder& DetailBuilder ) override;
private:
void MakeTransformDetails( IDetailLayoutBuilder& DetailBuilder );
TSharedPtr<FMobilityCustomization> MobilityCustomization;
};
| PopCap/GameIdea | Engine/Source/Editor/DetailCustomizations/Private/SceneComponentDetails.h | C | bsd-2-clause | 597 |
# RetinaSDK.js - A JavaScript Client for the Cortical.io Retina API
Pure JavaScript wrapper library for the [Cortical.io API](http://api.cortical.io/). Register for a [free Cortical.io
API key](http://www.cortical.io/resources_apikey.html) and include RetinaSDK.js to add language intelligence to any
browser-based application.
## Introduction
Cortical.io's Retina API allows the user to perform semantic operations on text. One can for example:
* measure the semantic similarity between two written entities
* create a semantic classifier based on positive and negative example texts
* extract keywords from a text
* divide a text into sub-sections corresponding to semantic changes
* extract terms from a text based on part of speech tags
The meaning of terms and texts is stored in a sparse binary representation that allows the user to apply logical
operators to refine the semantic representation of a concept.
You can read more about the technology at the [documentation page](http://documentation.cortical.io/intro.html).
To access the API, you will need to register for an [API key](http://www.cortical.io/resources_apikey.html).
## Installation
Download and include `retina-sdk-1.0.js` (development version) or `retina-sdk-1.0.min.js` (production version) in an
HTML document.
<script src="/path/to/retina-sdk-1.0.js"></script>
Once the script has loaded, the global object retinaSDK will be created from which you can instantiate the client
with a valid API Key.
## Usage
**RetinaSDK.js** offers two abstractions of the Cortical.io Retina API, a lightweight module that offers simplified
access to the most common and useful API functions available and a full version module that gives the user complete
control over various parameter settings and complete access to all API endpoints.
### LiteClient Module
The LiteClient module is sufficient for most applications and offers the ability to quickly and easily
compute keywords for a text, semantically compare two texts, retrieve similar terms, create category filters for
semantic filtering and generate semantic fingerprints of a given text. To get started, create an instance of the
lightweight client by passing your API key as follows:
```javascript
/* Create "lightweight" LiteClient instance */
var liteClient = retinaSDK.LiteClient(your_api_key)
```
Once you've created a client instance, you can start using it to make calls to the Retina API:
```javascript
/* Retrieve similar terms */
liteClient.getSimilarTerms("javascript");
> ["javascript", "browser", "html", "browsers", "api", "xml", "functionality", "microsoft", "runtime", "perl", "implementations", "css", "software", "unix", "files", "gui", "server", "plugin", "internet explorer", "linux"]
/* Return keywords of a text */
liteClient.getKeywords("Vienna is the capital and largest city of Austria, and one of the nine states of Austria");
> ["austria", "vienna"]
/* Compute a semantic fingerprint for an input text */
liteClient.getFingerprint("apple")
> Array[328]
/* Compute the similarity between two texts */
liteClient.compare("apple", "microsoft")
> 0.4024390243902438
/* Compute the similarity between two fingerprints */
var appleFP = liteClient.getFingerprint("apple")
var microsoftFP = liteClient.getFingerprint("microsoft")
liteClient.compare(appleFP, microsoftFP)
> 0.4024390243902438
/* Compute the similarity between a fingerprint and a text */
var appleFP = liteClient.getFingerprint("apple")
liteClient.compare(appleFP, "microsoft")
> 0.4024390243902438
/* Construct a composite Fingerprint from an array of texts to use for semantic filtering */
var neurologyFilter = liteClient.createCategoryFilter(["neuron", "synapse", "neocortex"])
console.log(neurologyFilter)
> Array[677]
/* Use the neurologyFilter computed above to compare and classify new texts. */
liteClient.compare(neurologyFilter, "skylab")
> 0.056544622895956895 // low semantic similarity -> negative classification
liteClient.compare(neurologyFilter, "cortical column")
> 0.35455851788907006 // high semantic similarity -> positive classification
```
#### Callbacks
The above examples show basic use of the LiteClient without callback functions to process the responses. But since
each call to the LiteClient results in an HTTP request being made to the Cortical.io API, it is highly recommended
to pass a callback function as part of each method call to handle the resulting response. While the callback
parameter is technically optional, if it is missing, the HTTP requests made will block code execution until
a response is received, which can result in poor application performance.
Callbacks can either be a single function or an object with two named functions, success and error, which will
process normal responses or deal with failed requests. If only a single function is passed, it will be assumed to be
the success function and failed requests will result in an exception.
```javascript
/* Asynchronously retrieve similar terms with a callback function */
liteClient.getSimilarTerms("javascript", function(similarTerms) {
console.log(similarTerms)
});
/* Asynchronously retrieve similar terms with an object containing success and error callbacks */
liteClient.getSimilarTerms("javascript", {success: function(similarTerms) {
console.log(similarTerms)
}, error: function(response){
// handle error
}});
```
### FullClient Module
The FullClient module provides complete access to the entire Retina API and allows for more flexibility in configuring
request parameters than the LiteClient module. Some functionality included with the FullClient not available in the
LiteClient are operations on expressions, images and bulk requests. A full listing of the FullClient's methods is
provided below (Available Functions and Parameters).
As with the LiteClient, the FullClient must be instantiated with a valid Cortical.io API key:
```javascript
/* Create FullClient instance */
var fullClient = retinaSDK.FullClient(your_api_key)
```
Additional parameters can also be passed when creating a FullClient instance to specify the host address (in case you
have access to your own Retina API service, for example by running your own [AWS](https://aws.amazon.com/marketplace/seller-profile?id=c88ca878-a648-464c-b29b-38ba057bd2f5) or [Azure instance](https://azure.microsoft.com/en-us/marketplace/partners/cortical-io/cortical-io-retinaservice-eng-gen/)) and Retina name, so you can
configure a specific Retina for subsequent calls.
```javascript
/* Create FullClient instance with explicit server address and Retina name */
var fullClient = retinaSDK.FullClient(your_api_key, "http://api.cortical.io/rest/", "en_associative")
```
#### Semantic Expressions
The semantic fingerprint is the basic unit within the Retina API. A text or a term can be resolved into a fingerprint
using the API. Fingerprints can also be combined in *expressions*, and a number of methods
expect input in our expression language. This is explained in more detail [here](http://documentation.cortical.io/the_power_of_expressions.html).
Expressions are essentially `json` strings with reserved keys: `term`, `text`, and `positions`.
In the following example, we note that the `compare` function takes a list of two such expressions as arguments.
In JavaScript we can create a list of two objects with (in this case) `term` elements.
```javascript
fullClient.compare({comparison: [{"term": "synapse"}, {"term": "skylab"}]})
> Object {euclideanDistance: 0.9679144385026738, sizeRight: 146, overlappingLeftRight: 0.02631578947368421, overlappingRightLeft: 0.0410958904109589, weightedScoring: 0.6719223186964691…}
```
Expressions can also be connected to each other with the operators `and`, `or` and `sub`:
```javascript
/* Subtract the meaning of tiger from the term 'jaguar' to compute a Fingerprint composed of the car-related meaning of 'jaguar' */
fullClient.getFingerprintForExpression({expression: {"sub": [{"term": "jaguar"}, {"term": "tiger"}]}})
```
#### Callbacks
As with the LiteClient, all calls to the FullClient accept an optional callback parameter that can either be a single
function or an object with two named functions, success and error, which will process normal responses or deal with
failed requests. If only a single function is passed, it will be assumed to be the success function and failed
requests will result in an exception.
#### Available Functions and Parameters
<table class="table table-bordered table-striped">
<thead>
<tr>
<th style="">Method</th>
<th style="">Description</th>
<th style="">Required Parameters</th>
<th style="">Optional Parameters</th>
</tr>
</thead>
<tbody>
<tr>
<td>getRetinas</td>
<td>Returns information about Retinas as an array of Retina objects</td>
<td>none</td>
<td>retina_name (string)</td>
</tr>
<tr>
<td>getTerms</td>
<td>Returns information about terms as an array of term objects</td>
<td>none</td>
<td>term (string), start_index (number), max_results (number), get_fingerprint (boolean)</td>
</tr>
<tr>
<td>getContextsForTerm</td>
<td>Returns an array of all the contexts for a given term</td>
<td>term (string)</td>
<td>start_index (number), max_results (number), get_fingerprint (boolean)</td>
</tr>
<tr>
<td>getSimilarTermsForTerm</td>
<td>Returns an array of similar terms for the specified input term</td>
<td>term (string)</td>
<td>context_id (number), start_index (number), max_results (number), pos_type (string), get_fingerprint
(boolean)</td>
</tr>
<tr>
<td>getFingerprintForText</td>
<td>Returns a Retina representation (a Fingerprint) of the input text</td>
<td>text (string)</td>
<td>none</td>
</tr>
<tr>
<td>getKeywordsForText</td>
<td>Returns an array of keywords from the input text</td>
<td>text (string)</td>
<td>none</td>
</tr>
<tr>
<td>getTokensForText</td>
<td>Returns an array of sentences (each of which is a comma-separated list of tokens) from an input
text</td>
<td>text (string)</td>
<td>pos_tags (string)</td>
</tr>
<tr>
<td>getSlicesForText</td>
<td>Returns an array of text objects corresponding to the input text, divided according to topic changes</td>
<td>text (string)</td>
<td>pos_tags (string)</td>
</tr>
<tr>
<td>getFingerprintsForTexts</td>
<td>Returns an array of Retina representations (Fingerprints) of each input text</td>
<td>texts (array of strings)</td>
<td>sparsity (number)</td>
</tr>
<tr>
<td>getLanguageForText</td>
<td>Returns an object containing information about the language of the specified text</td>
<td>text (string)</td>
<td>none</td>
</tr>
<tr>
<td>getFingerprintForExpression</td>
<td>Returns a Retina representation (a Fingerprint) of the input expression</td>
<td>expression (JSON object encapsulating a Semantic Expression)</td>
<td>sparsity (number)</td>
</tr>
<tr>
<td>getContextsForExpression</td>
<td>Returns an array of contexts for the input expression</td>
<td>expression (JSON object encapsulating a Semantic Expression)</td>
<td>start_index (number), max_results (number), get_fingerprint (boolean), sparsity (number)</td>
</tr>
<tr>
<td>getSimilarTermsForExpression</td>
<td>Returns an array of similar terms for the input expression</td>
<td>expression (JSON object encapsulating a Semantic Expression)</td>
<td>context_id (number), start_index (number), max_results (number), pos_type (string), sparsity (number), get_fingerprint (boolean)</td>
</tr>
<tr>
<td>getFingerprintsForExpressions</td>
<td>Returns an array of Retina representations (Fingerprints) for an array of input expressions</td>
<td>expressions (array of JSON objects encapsulating Semantic Expressions)</td>
<td>sparsity (number)</td>
</tr>
<tr>
<td>getContextsForExpressions</td>
<td>Returns an array of context arrays for the input expressions</td>
<td>expressions (array of JSON objects encapsulating Semantic Expressions)</td>
<td>start_index (number), max_results (number), sparsity (number), get_fingerprint (boolean)</td>
</tr>
<tr>
<td>getSimilarTermsForExpressions</td>
<td>Returns an array of Term object arrays containing similar terms corresponding to the input array of
expressions</td>
<td>expressions (array of JSON objects encapsulating Semantic Expressions)</td>
<td>context_id (number), start_index (number), max_results (number), pos_type (string), sparsity (number), get_fingerprint (boolean)</td>
</tr>
<tr>
<td>compare</td>
<td>Returns an object containing distance and similarity measures of the two input expression</td>
<td>comparison (array of JSON object pair encapsulating Semantic Expressions to compare)</td>
<td>none</td>
</tr>
<tr>
<td>compareBulk</td>
<td>Returns an array of objects containing distance and similarity measures of the input array of
expressions to compare</td>
<td>comparisons (array of JSON object pairs encapsulating Semantic Expressions to compare)</td>
<td>none</td>
</tr>
<tr>
<td>getImage</td>
<td>Returns a visualization as an encoded string of the input expression</td>
<td>expression (JSON object encapsulating a Semantic Expression)</td>
<td>image_scalar (number), plot_shape (string), image_encoding (string), sparsity (number)</td>
</tr>
<tr>
<td>getImages</td>
<td>Returns an array of visualizations as encoded string of the input expressions</td>
<td>expressions (array of JSON objects encapsulating Semantic Expressions)</td>
<td>image_scalar (number), plot_shape (string), image_encoding (string), sparsity (number),
get_fingerprint (boolean)</td>
</tr>
<tr>
<td>compareImage</td>
<td>Returns an overlay image for the two input elements specified by a JSON array containing two
expressions</td>
<td>expressions (array of two JSON objects encapsulating two Semantic Expressions)</td>
<td>image_scalar (number), plot_shape (string), image_encoding (string)</td>
</tr>
<tr>
<td>createCategoryFilter</td>
<td>Returns a Semantic Fingerprint used to filter texts by together positive and negative examples of
texts that should be positively and negatively classified by the filter</td>
<td>filter_name (string), positive_examples (array of strings representing positive examples for the
filter)</td>
<td>negative_examples (array of strings representing negative examples for the
filter)</td>
</tr>
</tbody>
</table>
#### FullClient Examples
```javascript
/* Create FullClient instance */
var fullClient = retinaSDK.FullClient(your_api_key)
/* Retrieve an array of all available Retinas */
fullClient.getRetinas(callback)
/* Retrieve information about a specific term */
fullClient.getTerms({term: "javascript"}, callback)
/* Get contexts for a given term */
fullClient.getContextsForTerm({term: "javascript", max_results: 3}, callback)
/* Get similar terms and their Fingerprints for a given term */
fullClient.getSimilarTermsForTerm({term: "javascript", get_fingerprint: true}, callback)
/* Encode a text into a Semantic Fingerprint */
fullClient.getFingerprintForText({"text": "JavaScript is a dynamically typed object-oriented programming language"}, callback)
/* Return keywords from a text */
fullClient.getKeywordsForText({"text": "JavaScript is a dynamically typed object-oriented programming language"}, callback)
/* Returns tokens from an input text */
fullClient.getTokensForText({"text": "JavaScript is a dynamically typed object-oriented programming language", pos_tags: "NN, NNP"}, callback)
/* Slice the input text according to semantic changes (works best on larger texts of at least several sentences) */
fullClient.getSlicesForText({"text": text}, callback)
/* Return Semantic Fingerprints for numerous texts in a single call */
fullClient.getFingerprintsForTexts({"texts": ["first text", "second text"]}, callback)
/* Detect the language for an input text */
fullClient.getLanguageForText({"text": "Dieser Text ist auf Deutsch"}, callback)
/* Return the Fingerprint for an input expression */
fullClient.getFingerprintForExpression({expression: {"text": "JavaScript is a dynamically typed object-oriented programming language"}}, callback)
/* Return contexts for an input expression */
fullClient.getContextsForExpression({expression: {"text": "JavaScript is a dynamically typed object-oriented programming language"}}, callback)
/* Return similar terms for an input expression */
fullClient.getSimilarTermsForExpression({expression: {"text": "JavaScript is a dynamically typed object-oriented programming language"}}, callback)
/* Return Fingerprints for multiple semantic expressions */
fullClient.getFingerprintsForExpressions({expressions: [{"text": "first text"}, {"text": "second text"}]}, callback)
/* Return contexts for multiple semantic expressions */
fullClient.getContextsForExpressions({expressions: [{"text": "first text"}, {"text": "second text"}]}, callback)
/* Return similar terms for multiple semantic expressions */
fullClient.getSimilarTermsForExpressions({expressions: [{"text": "first text"}, {"text": "second text"}]}, callback)
/* Compute the semantic similarity of two input expressions */
fullClient.compare({comparison: [{term: "synapse"}, {term: "skylab"}]}, callback)
/* Make multiple comparisons in a single call */
var comparison1 = [{"term": "synapse"}, {"term": "skylab"}];
var comparison2 = [{"term": "mir"}, {"text": "skylab was a space station"}];
fullClient.compareBulk({comparisons: [comparison1, comparison2]}, callback);
/* Create an image from an expression */
fullClient.getImage({expression: {"term": "test"}}, callback)
/* Create multiple images from multiple expressions in a single call */
fullClient.getImages({expressions: [{"text": "first text"}, {"text": "second text"}]}, callback)
/* Create a composite image showing the visual overlap between two expressions */
fullClient.compareImage({expressions: [{"text": "first text"}, {"text": "second text"}]}, callback)
/* Create a filter Fingerprint from example texts that should "pass through" the filter */
fullClient.createCategoryFilter({filter_name: "test", positive_examples: ["JavaScript is a dynamically typed object-oriented programming language", "jQuery is a cross-platform JavaScript library designed to simplify the client-side scripting of HTML."]}, callback)
```
## Support
For further documentation about the Retina API and information on Cortical.io's `Retina` technology please see our
[Knowledge Base](http://www.cortical.io/resources_tutorials.html). Also the `test` folder contains more examples of how to use the
client modules.
If you have any questions or problems please visit our [forum](http://www.cortical.io/resources_forum.html).
### Changelog
<B>v 1.0.0</B>
* Initial release. | cortical-io/RetinaSDK.js | README.md | Markdown | bsd-2-clause | 18,946 |
package com.livescribe.framework.lsmail;
import java.util.Date;
import java.util.List;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import com.danga.MemCached.MemCachedClient;
import com.danga.MemCached.SockIOPool;
/**
* <bold>imported from the LSFoundation project</bold>
*
* The QueueManager class provide various convenient API to push data to the messaging queue and retrieve items that are in a queue.
* The QueueManager work with a Queuing system that use a memcached protocol. The current queuing system use is Kestrel, see
* http://robey.lag.net/2008/11/27/scarling-to-kestrel.html for more details
* The basic concept of the class is to create a QueueManager with a name - so you can potentially differentiate different manage. In
* addition the name is used internally to create the memcached client (see SockIOPool documentation for details).
* When the QueueManager is created you can simply use it to push something in a queue (a queue is simply identified by a name), and you
* can retrieve any object inserted in a particular queue through the getFromQueue's API. The getFromQueue API provide one method that take
* a timeout, and it is strongly suggested to use such API if you know that the queue that you are requesting may not be constantly filled up.
* In addition if you don't want to specify any particular timeout, it is strongly suggested to use the API
* <code>
* public Object getFromQueue(String queueName);
* </code>
* This method will use a default short timeout (default is 100 ms), thus the call may incur a small pause but will make your system
* run a lot smoother. By using a timeout, you will avoid creating an application that will be constantly polling the server
* (thus using a lot of CPU).
*
* When accessing the content of the queue, the QueueManager create each queue in Kestrel using the specified name but prefixed with "-" sign
* in front... By doing that it may later open the API to get some other queue without a "-" that could be use more from a system point of view
* or any other convenient thing. Feel free to remove that prefix is you don't think it's useful.
*
* @author Mohammad M. Naqvi
*
*/
@Component
public class QueueManager {
private final Logger logger = Logger.getLogger(QueueManager.class);
protected String managerName;
protected MemCachedClient memcacheClient;
/**
* Constructor to initialize a Queue using the Memcached protocol. You need to provide the name of the queue manager
* and the list of servers that are used for the corresponding queue. Like memcached the queue system can be distributed across
* different system (using Kestrel right now as the back end system).
* A queue manager connect to a particular cluster of machine, and can obviously contains different queue. By providing
* a name to the manager, you can differentiate different logical queue that can be spread in different cluster.
*
* @param mName the queue manager name to use
* @param servers the list of server where Kestrel is running. Format should : 'hostname:port'
*/
public QueueManager(String mName, List<String> servers) {
super();
managerName = mName;
if (servers != null && servers.size() > 0) {
SockIOPool pool = SockIOPool.getInstance(managerName);
String[] serverList = servers.toArray(new String[servers.size()]);
pool.setServers(serverList);
pool.initialize();
// Create the client for that queue
memcacheClient = new MemCachedClient(managerName);
logger.debug("Created a new QueueManager ["+managerName+"] with servers: " + servers);
} else {
memcacheClient = null;
logger.warn("QueueManager created but no server configured. QueueManager ["+managerName+"] will NOT work!");
}
}
/**
* Push an object to the specified queue with no particular expiration date.
* @param queueName the name of the queue where the object should be added
* @param anObject the object to push in the queue
*/
public void pushToQueue(String queueName, Object anObject) {
pushToQueue(queueName, anObject, null);
}
/**
* Push an object to the specified queue with an expiration date setup to expire in timeoutInMillis from now
* @param queueName the name of the queue where the object should be added
* @param anObject the object to push in the queue
* @param timeoutInMillis time out in milli seconds from now for the object to expire
*/
public void pushToQueue(String queueName, Object anObject, long timeoutInMillis) {
pushToQueue(queueName, anObject, new java.util.Date(System.currentTimeMillis() + timeoutInMillis));
}
/**
* Push an object to the Queue.
* @param queueName the name of the queue where the object should be added
* @param anObject the object to push in the queue
* @param expirationDate when the object can be expired in the queue
*/
public void pushToQueue(String queueName, Object anObject, Date expirationDate) {
// If no memcached client don't bother trying to put something in the queue
if (!isValidQueue()) {
logger.warn("The queue ["+queueName+"] is NOT valid. ");
return;
}
memcacheClient.set(queueNameFrom(queueName), anObject, expirationDate, queueNameFrom(queueName).hashCode());
logger.debug("Pushed: [" + anObject + "] in queue ["+queueName+"]");
}
/**
* Return true if the current Queue has an active queue (information can be pushed an retrieved).
* @return
*/
public boolean isValidQueue() {
return memcacheClient != null;
}
public String getManagerName() {
return managerName;
}
public void setManagerName(String managerName) {
this.managerName = managerName;
}
public MemCachedClient getMemcacheClient() {
return memcacheClient;
}
public void setMemcacheClient(MemCachedClient memcacheClient) {
this.memcacheClient = memcacheClient;
}
@Override
public String toString() {
return "<QueueManager> Queue name ["+managerName+"] - client: " + memcacheClient;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((managerName == null) ? 0 : managerName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
QueueManager other = (QueueManager) obj;
if (managerName == null) {
if (other.managerName != null)
return false;
} else if (!managerName.equals(other.managerName))
return false;
return true;
}
private String queueNameFrom(String aName) {
return "-" + aName;
}
} | jackstraw66/web | livescribe/lsmailservice/src/main/java/com/livescribe/framework/lsmail/QueueManager.java | Java | bsd-2-clause | 6,610 |
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_API_ARGUMENTS_H_
#define V8_API_ARGUMENTS_H_
#include "src/api.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
// Custom arguments replicate a small segment of stack that can be
// accessed through an Arguments object the same way the actual stack
// can.
template <int kArrayLength>
class CustomArgumentsBase : public Relocatable {
public:
virtual inline void IterateInstance(ObjectVisitor* v) {
v->VisitPointers(values_, values_ + kArrayLength);
}
protected:
inline Object** begin() { return values_; }
explicit inline CustomArgumentsBase(Isolate* isolate)
: Relocatable(isolate) {}
Object* values_[kArrayLength];
};
template <typename T>
class CustomArguments : public CustomArgumentsBase<T::kArgsLength> {
public:
static const int kReturnValueOffset = T::kReturnValueIndex;
typedef CustomArgumentsBase<T::kArgsLength> Super;
~CustomArguments() {
this->begin()[kReturnValueOffset] =
reinterpret_cast<Object*>(kHandleZapValue);
}
protected:
explicit inline CustomArguments(Isolate* isolate) : Super(isolate) {}
template <typename V>
Handle<V> GetReturnValue(Isolate* isolate);
inline Isolate* isolate() {
return reinterpret_cast<Isolate*>(this->begin()[T::kIsolateIndex]);
}
};
template <typename T>
template <typename V>
Handle<V> CustomArguments<T>::GetReturnValue(Isolate* isolate) {
// Check the ReturnValue.
Object** handle = &this->begin()[kReturnValueOffset];
// Nothing was set, return empty handle as per previous behaviour.
if ((*handle)->IsTheHole(isolate)) return Handle<V>();
Handle<V> result = Handle<V>::cast(Handle<Object>(handle));
result->VerifyApiCallResultType();
return result;
}
class PropertyCallbackArguments
: public CustomArguments<PropertyCallbackInfo<Value> > {
public:
typedef PropertyCallbackInfo<Value> T;
typedef CustomArguments<T> Super;
static const int kArgsLength = T::kArgsLength;
static const int kThisIndex = T::kThisIndex;
static const int kHolderIndex = T::kHolderIndex;
static const int kDataIndex = T::kDataIndex;
static const int kReturnValueDefaultValueIndex =
T::kReturnValueDefaultValueIndex;
static const int kIsolateIndex = T::kIsolateIndex;
static const int kShouldThrowOnErrorIndex = T::kShouldThrowOnErrorIndex;
PropertyCallbackArguments(Isolate* isolate, Object* data, Object* self,
JSObject* holder, Object::ShouldThrow should_throw)
: Super(isolate) {
Object** values = this->begin();
values[T::kThisIndex] = self;
values[T::kHolderIndex] = holder;
values[T::kDataIndex] = data;
values[T::kIsolateIndex] = reinterpret_cast<Object*>(isolate);
values[T::kShouldThrowOnErrorIndex] =
Smi::FromInt(should_throw == Object::THROW_ON_ERROR ? 1 : 0);
// Here the hole is set as default value.
// It cannot escape into js as it's remove in Call below.
values[T::kReturnValueDefaultValueIndex] =
isolate->heap()->the_hole_value();
values[T::kReturnValueIndex] = isolate->heap()->the_hole_value();
DCHECK(values[T::kHolderIndex]->IsHeapObject());
DCHECK(values[T::kIsolateIndex]->IsSmi());
}
/*
* The following Call functions wrap the calling of all callbacks to handle
* calling either the old or the new style callbacks depending on which one
* has been registered.
* For old callbacks which return an empty handle, the ReturnValue is checked
* and used if it's been set to anything inside the callback.
* New style callbacks always use the return value.
*/
Handle<JSObject> Call(IndexedPropertyEnumeratorCallback f);
inline Handle<Object> Call(AccessorNameGetterCallback f, Handle<Name> name);
inline Handle<Object> Call(GenericNamedPropertyQueryCallback f,
Handle<Name> name);
inline Handle<Object> Call(GenericNamedPropertyDeleterCallback f,
Handle<Name> name);
inline Handle<Object> Call(IndexedPropertyGetterCallback f, uint32_t index);
inline Handle<Object> Call(IndexedPropertyQueryCallback f, uint32_t index);
inline Handle<Object> Call(IndexedPropertyDeleterCallback f, uint32_t index);
inline Handle<Object> Call(GenericNamedPropertySetterCallback f,
Handle<Name> name, Handle<Object> value);
inline Handle<Object> Call(GenericNamedPropertyDefinerCallback f,
Handle<Name> name,
const v8::PropertyDescriptor& desc);
inline Handle<Object> Call(IndexedPropertySetterCallback f, uint32_t index,
Handle<Object> value);
inline Handle<Object> Call(IndexedPropertyDefinerCallback f, uint32_t index,
const v8::PropertyDescriptor& desc);
inline void Call(AccessorNameSetterCallback f, Handle<Name> name,
Handle<Object> value);
private:
inline JSObject* holder() {
return JSObject::cast(this->begin()[T::kHolderIndex]);
}
};
class FunctionCallbackArguments
: public CustomArguments<FunctionCallbackInfo<Value> > {
public:
typedef FunctionCallbackInfo<Value> T;
typedef CustomArguments<T> Super;
static const int kArgsLength = T::kArgsLength;
static const int kHolderIndex = T::kHolderIndex;
static const int kDataIndex = T::kDataIndex;
static const int kReturnValueDefaultValueIndex =
T::kReturnValueDefaultValueIndex;
static const int kIsolateIndex = T::kIsolateIndex;
static const int kCalleeIndex = T::kCalleeIndex;
static const int kContextSaveIndex = T::kContextSaveIndex;
static const int kNewTargetIndex = T::kNewTargetIndex;
FunctionCallbackArguments(internal::Isolate* isolate, internal::Object* data,
internal::HeapObject* callee,
internal::Object* holder,
internal::HeapObject* new_target,
internal::Object** argv, int argc)
: Super(isolate), argv_(argv), argc_(argc) {
Object** values = begin();
values[T::kDataIndex] = data;
values[T::kCalleeIndex] = callee;
values[T::kHolderIndex] = holder;
values[T::kNewTargetIndex] = new_target;
values[T::kContextSaveIndex] = isolate->heap()->the_hole_value();
values[T::kIsolateIndex] = reinterpret_cast<internal::Object*>(isolate);
// Here the hole is set as default value.
// It cannot escape into js as it's remove in Call below.
values[T::kReturnValueDefaultValueIndex] =
isolate->heap()->the_hole_value();
values[T::kReturnValueIndex] = isolate->heap()->the_hole_value();
DCHECK(values[T::kCalleeIndex]->IsJSFunction() ||
values[T::kCalleeIndex]->IsFunctionTemplateInfo());
DCHECK(values[T::kHolderIndex]->IsHeapObject());
DCHECK(values[T::kIsolateIndex]->IsSmi());
}
/*
* The following Call function wraps the calling of all callbacks to handle
* calling either the old or the new style callbacks depending on which one
* has been registered.
* For old callbacks which return an empty handle, the ReturnValue is checked
* and used if it's been set to anything inside the callback.
* New style callbacks always use the return value.
*/
Handle<Object> Call(FunctionCallback f);
private:
internal::Object** argv_;
int argc_;
};
} // namespace internal
} // namespace v8
#endif // V8_API_ARGUMENTS_H_
| ssaroha/node-webrtc | third_party/webrtc/include/chromium/src/v8/src/api-arguments.h | C | bsd-2-clause | 7,551 |
from mpi4py import MPI
from pySDC.helpers.stats_helper import filter_stats, sort_stats
from pySDC.implementations.controller_classes.controller_MPI import controller_MPI
from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right
from pySDC.implementations.problem_classes.HeatEquation_1D_FD import heat1d
from pySDC.implementations.sweeper_classes.generic_LU import generic_implicit
from pySDC.implementations.transfer_classes.TransferMesh import mesh_to_mesh
def set_parameters_ml():
"""
Helper routine to set parameters for the following multi-level runs
Returns:
dict: dictionary containing the simulation parameters
dict: dictionary containing the controller parameters
float: starting time
float: end time
"""
# initialize level parameters
level_params = dict()
level_params['restol'] = 5E-10
level_params['dt'] = 0.125
# initialize sweeper parameters
sweeper_params = dict()
sweeper_params['collocation_class'] = CollGaussRadau_Right
sweeper_params['QI'] = 'LU'
sweeper_params['num_nodes'] = [3]
# initialize problem parameters
problem_params = dict()
problem_params['nu'] = 0.1 # diffusion coefficient
problem_params['freq'] = 2 # frequency for the test value
problem_params['nvars'] = [63, 31] # number of degrees of freedom for each level
# initialize step parameters
step_params = dict()
step_params['maxiter'] = 50
step_params['errtol'] = 1E-05
# initialize space transfer parameters
space_transfer_params = dict()
space_transfer_params['rorder'] = 2
space_transfer_params['iorder'] = 6
# initialize controller parameters
controller_params = dict()
controller_params['logger_level'] = 30
controller_params['all_to_done'] = True # can ask the controller to keep iterating all steps until the end
controller_params['use_iteration_estimator'] = False # activate iteration estimator
# fill description dictionary for easy step instantiation
description = dict()
description['problem_class'] = heat1d # pass problem class
description['problem_params'] = problem_params # pass problem parameters
description['sweeper_class'] = generic_implicit # pass sweeper
description['sweeper_params'] = sweeper_params # pass sweeper parameters
description['level_params'] = level_params # pass level parameters
description['step_params'] = step_params # pass step parameters
description['space_transfer_class'] = mesh_to_mesh # pass spatial transfer class
description['space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer
# set time parameters
t0 = 0.0
Tend = 1.0
return description, controller_params, t0, Tend
if __name__ == "__main__":
"""
A simple test program to do MPI-parallel PFASST runs
"""
# set MPI communicator
comm = MPI.COMM_WORLD
# get parameters from Part A
description, controller_params, t0, Tend = set_parameters_ml()
# instantiate controllers
controller = controller_MPI(controller_params=controller_params, description=description, comm=comm)
# get initial values on finest level
P = controller.S.levels[0].prob
uinit = P.u_exact(t0)
# call main functions to get things done...
uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
# filter statistics by type (number of iterations)
filtered_stats = filter_stats(stats, type='niter')
# convert filtered statistics to list of iterations count, sorted by process
iter_counts = sort_stats(filtered_stats, sortby='time')
# combine statistics into list of statistics
iter_counts_list = comm.gather(iter_counts, root=0)
rank = comm.Get_rank()
size = comm.Get_size()
if rank == 0:
out = 'Working with %2i processes...' % size
print(out)
# compute exact solutions and compare with both results
uex = P.u_exact(Tend)
err = abs(uex - uend)
out = 'Error vs. exact solution: %12.8e' % err
print(out)
# build one list of statistics instead of list of lists, the sort by time
iter_counts_gather = [item for sublist in iter_counts_list for item in sublist]
iter_counts = sorted(iter_counts_gather, key=lambda tup: tup[0])
# compute and print statistics
for item in iter_counts:
out = 'Number of iterations for time %4.2f: %1i ' % (item[0], item[1])
print(out)
| Parallel-in-Time/pySDC | pySDC/playgrounds/compression/run_parallel_Heat_NumPy.py | Python | bsd-2-clause | 4,544 |
/**
* RetMax.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.6.2 Built on : Apr 17, 2012 (05:34:40 IST)
*/
package gov.nih.nlm.ncbi.www.soap.eutils.esearch;
/**
* RetMax bean class
*/
@SuppressWarnings({"unchecked","unused"})
public class RetMax
implements org.apache.axis2.databinding.ADBBean{
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName(
"http://www.ncbi.nlm.nih.gov/soap/eutils/esearch",
"RetMax",
"ns3");
/**
* field for RetMax
*/
protected java.lang.String localRetMax ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getRetMax(){
return localRetMax;
}
/**
* Auto generated setter method
* @param param RetMax
*/
public void setRetMax(java.lang.String param){
this.localRetMax=param;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME);
return factory.createOMElement(dataSource,MY_QNAME);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
java.lang.String namespace = "http://www.ncbi.nlm.nih.gov/soap/eutils/esearch";
java.lang.String _localName = "RetMax";
writeStartElement(null, namespace, _localName, xmlWriter);
// add the type details if this is used in a simple type
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://www.ncbi.nlm.nih.gov/soap/eutils/esearch");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":RetMax",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"RetMax",
xmlWriter);
}
}
if (localRetMax==null){
throw new org.apache.axis2.databinding.ADBException("RetMax cannot be null !!");
}else{
xmlWriter.writeCharacters(localRetMax);
}
xmlWriter.writeEndElement();
}
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://www.ncbi.nlm.nih.gov/soap/eutils/esearch")){
return "ns3";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* Utility method to write an element start tag.
*/
private void writeStartElement(java.lang.String prefix, java.lang.String namespace, java.lang.String localPart,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, localPart);
} else {
if (namespace.length() == 0) {
prefix = "";
} else if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, localPart, namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName,attValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext();
while (true) {
java.lang.String uri = nsContext.getNamespaceURI(prefix);
if (uri == null || uri.length() == 0) {
break;
}
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(MY_QNAME,
new java.lang.Object[]{
org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT,
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRetMax)
},
null);
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static RetMax parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
RetMax object =
new RetMax();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() ){
if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.ncbi.nlm.nih.gov/soap/eutils/esearch","RetMax").equals(reader.getName())){
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","nil");
if ("true".equals(nillableValue) || "1".equals(nillableValue)){
throw new org.apache.axis2.databinding.ADBException("The element: "+"RetMax" +" cannot be null");
}
java.lang.String content = reader.getElementText();
object.setRetMax(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| milot-mirdita/GeMuDB | Vendor/NCBI eutils/src/gov/nih/nlm/ncbi/www/soap/eutils/esearch/RetMax.java | Java | bsd-2-clause | 17,431 |
django-12factor-docker
======================
Sample django project demonstrating the 12-factor architecture using docker.
Configuration
-------------
Some basic configuration needs to be in place before the whole environment can
be properly started.
Configuration files are located in the *conf* folder, and they should contain
one environment variable definition per line, using the syntax *KEY=VALUE*.
- conf/app.env
This file should contain the following:
RAVEN_DSN=http://apikey:password@sentry:9000/2
To obtain the values for *apikey* and *apipassword* first start the stack once,
log into sentry, and configure the service appropriately. Sentry will generate
a key/password pair that should then be entered into this config file and the
stack restarted to allow the app to send error reports to sentry.
- conf/sentry.env
This file should contain the following:
SECRET_KEY=changeme
Change the value as you see fit.
Quickstart
----------
First you need to start the sentry container so that you can
generate the credentials required for the app to be able to
send error reports to it.
$ make up SERVICE=sentry
Once the service is up, log into http://localhost:9000 using the default admin credentials
for the sentry server:
- username: admin
- password: admin
Create a default organization, team and project, and let it generate an api
key for our Django project. With that key, create a file *conf/app.env* with the contents:
RAVEN_DSN=http://<key>:<secret>@sentry:9000/<id>
Now it’s time to start the stack. Just run
$ make up
This will rebuild any containers that have been updated and will then
start the full stack.
To verify sentry is properly working with the provided credentials, you
can submit a test message to sentry like
$ docker-compose run app python manage.py raven test
And then verify it shows up in http://localhost:9000
To stop the stack run
$ make down
For other possible targets run
$ make help
Stack
-----
Currently the stack consists of:
- db: a postgresql database
- dbdata: a data volume container for the db container
- app: a simple django application
- web: a frontend web server based on nginx
- amqp: a rabbitmq server to queue celery tasks
- amqpdata: a data volume container for the amqp container
- flower: a web application for monitoring and administering the rabbitmq server
- sentry: a modern error logging and aggregation platform
- sentryworker: celery workers for processing sentry events
- sentryredis: redis backend for caching and celery tasks for sentry
- sentryredisdata: a data volume container for the sentryredis container
- sentrydb: a postgresql database for sentry
- sentrydbdata: a data volume container for the sentrydb container
- elk: a full ELK stack (elasticsearch/logstash/kibana)
- logstashforwarder: a container running logstash-forwarder to process logs from other containers
The only containers exposed on the host are:
- web: to access the application as it’s intended in production,
- flower: to access the admin console for the rabbitmq server
- sentry: to view the logs and errors
- elk: to view logs via kibana and to inspect elasticsearch (for easier debugging)
Limiting the services accessible from the host is intended to
simulate a production environment where all other parts are
protected by a firewall or DMZ.
| ricardokirkner/django-12factor-docker | README.md | Markdown | bsd-2-clause | 3,337 |
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef CAPNP_EZ_RPC_H_
#define CAPNP_EZ_RPC_H_
#include "rpc.h"
namespace kj { class AsyncIoProvider; class LowLevelAsyncIoProvider; }
namespace capnp {
class EzRpcContext;
class EzRpcClient {
// Super-simple interface for setting up a Cap'n Proto RPC client. Example:
//
// # Cap'n Proto schema
// interface Adder {
// add @0 (left :Int32, right :Int32) -> (value :Int32);
// }
//
// // C++ client
// int main() {
// capnp::EzRpcClient client("localhost:3456");
// Adder::Client adder = client.importCap<Adder>("adder");
// auto request = adder.addRequest();
// request.setLeft(12);
// request.setRight(34);
// auto response = request.send().wait(client.getWaitScope());
// assert(response.getValue() == 46);
// return 0;
// }
//
// // C++ server
// class AdderImpl final: public Adder::Server {
// public:
// kj::Promise<void> add(AddContext context) override {
// auto params = context.getParams();
// context.getResults().setValue(params.getLeft() + params.getRight());
// return kj::READY_NOW;
// }
// };
//
// int main() {
// capnp::EzRpcServer server("*:3456");
// server.exportCap("adder", kj::heap<AdderImpl>());
// kj::NEVER_DONE.wait(server.getWaitScope());
// }
//
// This interface is easy, but it hides a lot of useful features available from the lower-level
// classes:
// - The server can only export a small set of public, singleton capabilities under well-known
// string names. This is fine for transient services where no state needs to be kept between
// connections, but hides the power of Cap'n Proto when it comes to long-lived resources.
// - EzRpcClient/EzRpcServer automatically set up a `kj::EventLoop` and make it current for the
// thread. Only one `kj::EventLoop` can exist per thread, so you cannot use these interfaces
// if you wish to set up your own event loop. (However, you can safely create multiple
// EzRpcClient / EzRpcServer objects in a single thread; they will make sure to make no more
// than one EventLoop.)
// - These classes only support simple two-party connections, not multilateral VatNetworks.
// - These classes only support communication over a raw, unencrypted socket. If you want to
// build on an abstract stream (perhaps one which supports encryption), you must use the
// lower-level interfaces.
//
// Some of these restrictions will probably be lifted in future versions, but some things will
// always require using the low-level interfaces directly. If you are interested in working
// at a lower level, start by looking at these interfaces:
// - `kj::startAsyncIo()` in `kj/async-io.h`.
// - `RpcSystem` in `capnp/rpc.h`.
// - `TwoPartyVatNetwork` in `capnp/rpc-twoparty.h`.
public:
explicit EzRpcClient(kj::StringPtr serverAddress, uint defaultPort = 0);
// Construct a new EzRpcClient and connect to the given address. The connection is formed in
// the background -- if it fails, calls to capabilities returned by importCap() will fail with an
// appropriate exception.
//
// `defaultPort` is the IP port number to use if `serverAddress` does not include it explicitly.
// If unspecified, the port is required in `serverAddress`.
//
// The address is parsed by `kj::Network` in `kj/async-io.h`. See that interface for more info
// on the address format, but basically it's what you'd expect.
EzRpcClient(struct sockaddr* serverAddress, uint addrSize);
// Like the above constructor, but connects to an already-resolved socket address. Any address
// format supported by `kj::Network` in `kj/async-io.h` is accepted.
explicit EzRpcClient(int socketFd);
// Create a client on top of an already-connected socket.
~EzRpcClient() noexcept(false);
template <typename Type>
typename Type::Client importCap(kj::StringPtr name);
Capability::Client importCap(kj::StringPtr name);
// Ask the sever for the capability with the given name. You may specify a type to automatically
// down-cast to that type. It is up to you to specify the correct expected type.
kj::WaitScope& getWaitScope();
// Get the `WaitScope` for the client's `EventLoop`, which allows you to synchronously wait on
// promises.
kj::AsyncIoProvider& getIoProvider();
// Get the underlying AsyncIoProvider set up by the RPC system. This is useful if you want
// to do some non-RPC I/O in asynchronous fashion.
kj::LowLevelAsyncIoProvider& getLowLevelIoProvider();
// Get the underlying LowLevelAsyncIoProvider set up by the RPC system. This is useful if you
// want to do some non-RPC I/O in asynchronous fashion.
private:
struct Impl;
kj::Own<Impl> impl;
};
class EzRpcServer {
// The server counterpart to `EzRpcClient`. See `EzRpcClient` for an example.
public:
explicit EzRpcServer(kj::StringPtr bindAddress, uint deafultPort = 0);
// Construct a new `EzRpcServer` that binds to the given address. An address of "*" means to
// bind to all local addresses.
//
// `defaultPort` is the IP port number to use if `serverAddress` does not include it explicitly.
// If unspecified, a port is chosen automatically, and you must call getPort() to find out what
// it is.
//
// The address is parsed by `kj::Network` in `kj/async-io.h`. See that interface for more info
// on the address format, but basically it's what you'd expect.
//
// The server might not begin listening immediately, especially if `bindAddress` needs to be
// resolved. If you need to wait until the server is definitely up, wait on the promise returned
// by `getPort()`.
EzRpcServer(struct sockaddr* bindAddress, uint addrSize);
// Like the above constructor, but binds to an already-resolved socket address. Any address
// format supported by `kj::Network` in `kj/async-io.h` is accepted.
EzRpcServer(int socketFd, uint port);
// Create a server on top of an already-listening socket (i.e. one on which accept() may be
// called). `port` is returned by `getPort()` -- it serves no other purpose.
~EzRpcServer() noexcept(false);
void exportCap(kj::StringPtr name, Capability::Client cap);
// Export a capability publicly under the given name, so that clients can import it.
//
// Keep in mind that you can implicitly convert `kj::Own<MyType::Server>&&` to
// `Capability::Client`, so it's typicall to pass something like
// `kj::heap<MyImplementation>(<constructor params>)` as the second parameter.
kj::Promise<uint> getPort();
// Get the IP port number on which this server is listening. This promise won't resolve until
// the server is actually listening. If the address was not an IP address (e.g. it was a Unix
// domain socket) then getPort() resolves to zero.
kj::WaitScope& getWaitScope();
// Get the `WaitScope` for the client's `EventLoop`, which allows you to synchronously wait on
// promises.
kj::AsyncIoProvider& getIoProvider();
// Get the underlying AsyncIoProvider set up by the RPC system. This is useful if you want
// to do some non-RPC I/O in asynchronous fashion.
kj::LowLevelAsyncIoProvider& getLowLevelIoProvider();
// Get the underlying LowLevelAsyncIoProvider set up by the RPC system. This is useful if you
// want to do some non-RPC I/O in asynchronous fashion.
private:
struct Impl;
kj::Own<Impl> impl;
};
// =======================================================================================
// inline implementation details
template <typename Type>
inline typename Type::Client EzRpcClient::importCap(kj::StringPtr name) {
return importCap(name).castAs<Type>();
}
} // namespace capnp
#endif // CAPNP_EZ_RPC_H_
| artillery/capnproto | c++/src/capnp/ez-rpc.h | C | bsd-2-clause | 9,216 |
Kwf.Utils.ResponsiveEl('.kwcTextImage', [420]);
//remove largeText class if >55% of element is covered by image
Kwf.onJElementWidthChange('.kwcTextImage', function textImage(el) {
var img = el.find('div.image .kwcAbstractImage .container');
if (img) {
if (img.width() < (el.width() * 0.55)) {
el.removeClass('largeImage');
el.addClass('largeText');
} else {
el.removeClass('largeText');
el.addClass('largeImage');
}
}
});
| yacon/koala-framework | Kwc/TextImage/Component.js | JavaScript | bsd-2-clause | 507 |
<?php
$params = require(__DIR__ . '/params.php');
$config = [
'id' => 'addresses',
'basePath' => dirname(__DIR__),
'bootstrap' => ['log'],
'components' => [
'request' => [
'baseUrl' => '',
// !!! insert a secret key in the following (if it is empty) - this is required by cookie validation
'cookieValidationKey' => 'xODnAvtJNLY9L6ihy9XckCXkuixXmVqg',
],
'errorHandler' => [
'errorAction' => 'site/error',
],
'log' => [
'traceLevel' => YII_DEBUG ? 3 : 0,
'targets' => [
[
'class' => 'yii\log\FileTarget',
'levels' => ['error', 'warning'],
],
],
],
'db' => require(__DIR__ . '/db.php'),
'urlManager' => [
'enablePrettyUrl' => true,
'showScriptName' => false,
'rules' => [
'' => 'site/index',
'<action:\w+>' => 'site/<action>',
],
],
],
'params' => $params,
];
if (YII_ENV_DEV) {
// configuration adjustments for 'dev' environment
$config['bootstrap'][] = 'debug';
$config['modules']['debug'] = [
'class' => 'yii\debug\Module',
// uncomment the following to add your IP if you are not connecting from localhost.
//'allowedIPs' => ['127.0.0.1', '::1'],
];
$config['bootstrap'][] = 'gii';
$config['modules']['gii'] = [
'class' => 'yii\gii\Module',
// uncomment the following to add your IP if you are not connecting from localhost.
//'allowedIPs' => ['127.0.0.1', '::1'],
];
}
return $config;
| ahromets/owlytest | config/web.php | PHP | bsd-3-clause | 1,703 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="ms_MY">
<context>
<name>mainUI</name>
<message>
<location filename="../mainUI.ui" line="14"/>
<location filename="../mainUI.cpp" line="53"/>
<source>Calculator</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.ui" line="657"/>
<source>Advanced Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="83"/>
<source>Percentage %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="85"/>
<source>Power %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="87"/>
<source>Base-10 Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="89"/>
<source>Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="91"/>
<source>Constant Pi %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="94"/>
<source>Square Root %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="96"/>
<source>Logarithm %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="98"/>
<source>Natural Log %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="101"/>
<source>Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="103"/>
<source>Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="105"/>
<source>Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="108"/>
<source>Arc Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="110"/>
<source>Arc Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="112"/>
<source>Arc Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="115"/>
<source>Hyperbolic Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="117"/>
<source>Hyperbolic Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="119"/>
<source>Hyperbolic Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="182"/>
<source>Save Calculator History</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>
| sasongko26/lumina | src-qt5/desktop-utils/lumina-calculator/i18n/l-calc_ms.ts | TypeScript | bsd-3-clause | 3,770 |
<?php
namespace User\Listener;
use Zend\EventManager\EventInterface;
use Zend\EventManager\EventManagerInterface;
use Zend\EventManager\ListenerAggregateInterface;
use User\Acl\Service as AclService;
use Zend\Mvc\MvcEvent;
/**
* User listener
*
*
* @package Application
*/
class UserListener implements ListenerAggregateInterface
{
/**
* @var \Zend\Stdlib\CallbackHandler[]
*/
protected $listeners = array();
/**
* Attach to an event manager
*
* @param EventManagerInterface $events
* @param integer $priority
*/
public function attach(EventManagerInterface $events)
{
$this->listeners[] = $events->attach(
MvcEvent::EVENT_DISPATCH, array($this, 'checkAcl'), 100
);
$this->listeners[] = $events->attach(
MvcEvent::EVENT_RENDER, array($this, 'addAclToNavigation'), -100
);
}
/**
* Detach all our listeners from the event manager
*
* @param EventManagerInterface $events
* @return void
*/
public function detach(EventManagerInterface $events)
{
foreach ($this->listeners as $index => $listener) {
if ($events->detach($listener)) {
unset($this->listeners[$index]);
}
}
}
/**
* Listen to the "route" event and check the user rights
*
* @param MvcEvent $e
* @return null
*/
public function checkAcl(EventInterface $e)
{
// get route match, params and objects
$routeMatch = $e->getRouteMatch();
$controllerParam = $routeMatch->getParam('controller');
$actionParam = $routeMatch->getParam('action');
$serviceManager = $e->getApplication()->getServiceManager();
$controllerLoader = $serviceManager->get('ControllerLoader');
$acl = $serviceManager->get('User\Acl\Service');
// try to load current controller
try {
$controller = $controllerLoader->get($controllerParam);
} catch (\Exception $exception) {
return;
}
// check acl
if (!$acl->isAllowed($controllerParam, $actionParam)) {
// check for guests
if ($acl->getRole() == 'guest') {
$routeMatch->setParam('controller', 'user');
$routeMatch->setParam('action', 'login');
} else {
$routeMatch->setParam('controller', 'admin');
$routeMatch->setParam('action', 'forbidden');
}
}
}
/**
* Listen to the "render" event and add the acl to the navigation
*
* @param MvcEvent $e
* @return null
*/
public function addAclToNavigation(EventInterface $e)
{
// get service manager, view manager and acl service
$serviceManager = $e->getApplication()->getServiceManager();
$viewManager = $serviceManager->get('viewmanager');
$aclService = $serviceManager->get('User\Acl\Service');
// set navigation plugin and set acl and role
$plugin = $viewManager->getRenderer()->plugin('navigation');
$plugin->setRole($aclService->getRole());
$plugin->setAcl($aclService->getAcl());
}
} | roxmox/pmts | module/User/src/User/Listener/AdminListener.php | PHP | bsd-3-clause | 3,290 |
/*-
* Copyright (c) 2000 Daniel Capo Sobral
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $FreeBSD: src/sys/boot/ficl/loader.c,v 1.8 2002/08/21 09:28:00 scottl Exp $
*/
/*******************************************************************
** l o a d e r . c
** Additional FICL words designed for FreeBSD's loader
**
*******************************************************************/
#ifdef TESTMAIN
#include <stdlib.h>
#else
#include <stand.h>
#endif
#include "bootstrap.h"
#include <string.h>
#include "ficl.h"
/* FreeBSD's loader interaction words and extras
*
* setenv ( value n name n' -- )
* setenv? ( value n name n' flag -- )
* getenv ( addr n -- addr' n' | -1 )
* unsetenv ( addr n -- )
* copyin ( addr addr' len -- )
* copyout ( addr addr' len -- )
* findfile ( name len type len' -- addr )
* pnpdevices ( -- addr )
* pnphandlers ( -- addr )
* ccall ( [[...[p10] p9] ... p1] n addr -- result )
* .# ( value -- )
*/
#ifndef TESTMAIN
void
ficlSetenv(FICL_VM *pVM)
{
char *namep, *valuep, *name, *value;
int names, values;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 4, 0);
#endif
names = stackPopINT(pVM->pStack);
namep = (char*) stackPopPtr(pVM->pStack);
values = stackPopINT(pVM->pStack);
valuep = (char*) stackPopPtr(pVM->pStack);
name = (char*) ficlMalloc(names+1);
if (!name)
vmThrowErr(pVM, "Error: out of memory");
strncpy(name, namep, names);
name[names] = '\0';
value = (char*) ficlMalloc(values+1);
if (!value)
vmThrowErr(pVM, "Error: out of memory");
strncpy(value, valuep, values);
value[values] = '\0';
setenv(name, value, 1);
ficlFree(name);
ficlFree(value);
return;
}
void
ficlSetenvq(FICL_VM *pVM)
{
char *namep, *valuep, *name, *value;
int names, values, overwrite;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 5, 0);
#endif
overwrite = stackPopINT(pVM->pStack);
names = stackPopINT(pVM->pStack);
namep = (char*) stackPopPtr(pVM->pStack);
values = stackPopINT(pVM->pStack);
valuep = (char*) stackPopPtr(pVM->pStack);
name = (char*) ficlMalloc(names+1);
if (!name)
vmThrowErr(pVM, "Error: out of memory");
strncpy(name, namep, names);
name[names] = '\0';
value = (char*) ficlMalloc(values+1);
if (!value)
vmThrowErr(pVM, "Error: out of memory");
strncpy(value, valuep, values);
value[values] = '\0';
setenv(name, value, overwrite);
ficlFree(name);
ficlFree(value);
return;
}
void
ficlGetenv(FICL_VM *pVM)
{
char *namep, *name, *value;
int names;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 2, 2);
#endif
names = stackPopINT(pVM->pStack);
namep = (char*) stackPopPtr(pVM->pStack);
name = (char*) ficlMalloc(names+1);
if (!name)
vmThrowErr(pVM, "Error: out of memory");
strncpy(name, namep, names);
name[names] = '\0';
value = getenv(name);
ficlFree(name);
if(value != NULL) {
stackPushPtr(pVM->pStack, value);
stackPushINT(pVM->pStack, strlen(value));
} else
stackPushINT(pVM->pStack, -1);
return;
}
void
ficlUnsetenv(FICL_VM *pVM)
{
char *namep, *name;
int names;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 2, 0);
#endif
names = stackPopINT(pVM->pStack);
namep = (char*) stackPopPtr(pVM->pStack);
name = (char*) ficlMalloc(names+1);
if (!name)
vmThrowErr(pVM, "Error: out of memory");
strncpy(name, namep, names);
name[names] = '\0';
unsetenv(name);
ficlFree(name);
return;
}
void
ficlCopyin(FICL_VM *pVM)
{
void* src;
vm_offset_t dest;
size_t len;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 3, 0);
#endif
len = stackPopINT(pVM->pStack);
dest = stackPopINT(pVM->pStack);
src = stackPopPtr(pVM->pStack);
archsw.arch_copyin(src, dest, len);
return;
}
void
ficlCopyout(FICL_VM *pVM)
{
void* dest;
vm_offset_t src;
size_t len;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 3, 0);
#endif
len = stackPopINT(pVM->pStack);
dest = stackPopPtr(pVM->pStack);
src = stackPopINT(pVM->pStack);
archsw.arch_copyout(src, dest, len);
return;
}
void
ficlFindfile(FICL_VM *pVM)
{
char *name, *type, *namep, *typep;
struct preloaded_file* fp;
int names, types;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 4, 1);
#endif
types = stackPopINT(pVM->pStack);
typep = (char*) stackPopPtr(pVM->pStack);
names = stackPopINT(pVM->pStack);
namep = (char*) stackPopPtr(pVM->pStack);
name = (char*) ficlMalloc(names+1);
if (!name)
vmThrowErr(pVM, "Error: out of memory");
strncpy(name, namep, names);
name[names] = '\0';
type = (char*) ficlMalloc(types+1);
if (!type)
vmThrowErr(pVM, "Error: out of memory");
strncpy(type, typep, types);
type[types] = '\0';
fp = file_findfile(name, type);
stackPushPtr(pVM->pStack, fp);
return;
}
#ifdef HAVE_PNP
void
ficlPnpdevices(FICL_VM *pVM)
{
static int pnp_devices_initted = 0;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 0, 1);
#endif
if(!pnp_devices_initted) {
STAILQ_INIT(&pnp_devices);
pnp_devices_initted = 1;
}
stackPushPtr(pVM->pStack, &pnp_devices);
return;
}
void
ficlPnphandlers(FICL_VM *pVM)
{
#if FICL_ROBUST > 1
vmCheckStack(pVM, 0, 1);
#endif
stackPushPtr(pVM->pStack, pnphandlers);
return;
}
#endif
#endif /* ndef TESTMAIN */
void
ficlCcall(FICL_VM *pVM)
{
int (*func)(int, ...);
int result, p[10];
int nparam, i;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 2, 0);
#endif
func = stackPopPtr(pVM->pStack);
nparam = stackPopINT(pVM->pStack);
#if FICL_ROBUST > 1
vmCheckStack(pVM, nparam, 1);
#endif
for (i = 0; i < nparam; i++)
p[i] = stackPopINT(pVM->pStack);
result = func(p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7], p[8],
p[9]);
stackPushINT(pVM->pStack, result);
return;
}
/**************************************************************************
f i c l E x e c F D
** reads in text from file fd and passes it to ficlExec()
* returns VM_OUTOFTEXT on success or the ficlExec() error code on
* failure.
*/
#define nLINEBUF 256
int ficlExecFD(FICL_VM *pVM, int fd)
{
char cp[nLINEBUF];
int nLine = 0, rval = VM_OUTOFTEXT;
char ch;
CELL id;
id = pVM->sourceID;
pVM->sourceID.i = fd;
/* feed each line to ficlExec */
while (1) {
int status, i;
i = 0;
while ((status = read(fd, &ch, 1)) > 0 && ch != '\n')
cp[i++] = ch;
nLine++;
if (!i) {
if (status < 1)
break;
continue;
}
rval = ficlExecC(pVM, cp, i);
if(rval != VM_QUIT && rval != VM_USEREXIT && rval != VM_OUTOFTEXT)
{
pVM->sourceID = id;
return rval;
}
}
/*
** Pass an empty line with SOURCE-ID == -1 to flush
** any pending REFILLs (as required by FILE wordset)
*/
pVM->sourceID.i = -1;
ficlExec(pVM, "");
pVM->sourceID = id;
return rval;
}
static void displayCellNoPad(FICL_VM *pVM)
{
CELL c;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 1, 0);
#endif
c = stackPop(pVM->pStack);
ltoa((c).i, pVM->pad, pVM->base);
vmTextOut(pVM, pVM->pad, 0);
return;
}
/* fopen - open a file and return new fd on stack.
*
* fopen ( ptr count mode -- fd )
*/
static void pfopen(FICL_VM *pVM)
{
int mode, fd, count;
char *ptr, *name;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 3, 1);
#endif
mode = stackPopINT(pVM->pStack); /* get mode */
count = stackPopINT(pVM->pStack); /* get count */
ptr = stackPopPtr(pVM->pStack); /* get ptr */
if ((count < 0) || (ptr == NULL)) {
stackPushINT(pVM->pStack, -1);
return;
}
/* ensure that the string is null terminated */
name = (char *)malloc(count+1);
bcopy(ptr,name,count);
name[count] = 0;
/* open the file */
fd = open(name, mode);
free(name);
stackPushINT(pVM->pStack, fd);
return;
}
/* fclose - close a file who's fd is on stack.
*
* fclose ( fd -- )
*/
static void pfclose(FICL_VM *pVM)
{
int fd;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 1, 0);
#endif
fd = stackPopINT(pVM->pStack); /* get fd */
if (fd != -1)
close(fd);
return;
}
/* fread - read file contents
*
* fread ( fd buf nbytes -- nread )
*/
static void pfread(FICL_VM *pVM)
{
int fd, len;
char *buf;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 3, 1);
#endif
len = stackPopINT(pVM->pStack); /* get number of bytes to read */
buf = stackPopPtr(pVM->pStack); /* get buffer */
fd = stackPopINT(pVM->pStack); /* get fd */
if (len > 0 && buf && fd != -1)
stackPushINT(pVM->pStack, read(fd, buf, len));
else
stackPushINT(pVM->pStack, -1);
return;
}
/* fload - interpret file contents
*
* fload ( fd -- )
*/
static void pfload(FICL_VM *pVM)
{
int fd;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 1, 0);
#endif
fd = stackPopINT(pVM->pStack); /* get fd */
if (fd != -1)
ficlExecFD(pVM, fd);
return;
}
/* fwrite - write file contents
*
* fwrite ( fd buf nbytes -- nwritten )
*/
static void pfwrite(FICL_VM *pVM)
{
int fd, len;
char *buf;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 3, 1);
#endif
len = stackPopINT(pVM->pStack); /* get number of bytes to read */
buf = stackPopPtr(pVM->pStack); /* get buffer */
fd = stackPopINT(pVM->pStack); /* get fd */
if (len > 0 && buf && fd != -1)
stackPushINT(pVM->pStack, write(fd, buf, len));
else
stackPushINT(pVM->pStack, -1);
return;
}
/* fseek - seek to a new position in a file
*
* fseek ( fd ofs whence -- pos )
*/
static void pfseek(FICL_VM *pVM)
{
int fd, pos, whence;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 3, 1);
#endif
whence = stackPopINT(pVM->pStack);
pos = stackPopINT(pVM->pStack);
fd = stackPopINT(pVM->pStack);
stackPushINT(pVM->pStack, lseek(fd, pos, whence));
return;
}
/* key - get a character from stdin
*
* key ( -- char )
*/
static void key(FICL_VM *pVM)
{
#if FICL_ROBUST > 1
vmCheckStack(pVM, 0, 1);
#endif
stackPushINT(pVM->pStack, getchar());
return;
}
/* key? - check for a character from stdin (FACILITY)
*
* key? ( -- flag )
*/
static void keyQuestion(FICL_VM *pVM)
{
#if FICL_ROBUST > 1
vmCheckStack(pVM, 0, 1);
#endif
#ifdef TESTMAIN
/* XXX Since we don't fiddle with termios, let it always succeed... */
stackPushINT(pVM->pStack, FICL_TRUE);
#else
/* But here do the right thing. */
stackPushINT(pVM->pStack, ischar()? FICL_TRUE : FICL_FALSE);
#endif
return;
}
/* seconds - gives number of seconds since beginning of time
*
* beginning of time is defined as:
*
* BTX - number of seconds since midnight
* FreeBSD - number of seconds since Jan 1 1970
*
* seconds ( -- u )
*/
static void pseconds(FICL_VM *pVM)
{
#if FICL_ROBUST > 1
vmCheckStack(pVM,0,1);
#endif
stackPushUNS(pVM->pStack, (FICL_UNS) time(NULL));
return;
}
/* ms - wait at least that many milliseconds (FACILITY)
*
* ms ( u -- )
*
*/
static void ms(FICL_VM *pVM)
{
#if FICL_ROBUST > 1
vmCheckStack(pVM,1,0);
#endif
#ifdef TESTMAIN
usleep(stackPopUNS(pVM->pStack)*1000);
#else
delay(stackPopUNS(pVM->pStack)*1000);
#endif
return;
}
/* fkey - get a character from a file
*
* fkey ( file -- char )
*/
static void fkey(FICL_VM *pVM)
{
int i, fd;
char ch;
#if FICL_ROBUST > 1
vmCheckStack(pVM, 1, 1);
#endif
fd = stackPopINT(pVM->pStack);
i = read(fd, &ch, 1);
stackPushINT(pVM->pStack, i > 0 ? ch : -1);
return;
}
/*
** Retrieves free space remaining on the dictionary
*/
static void freeHeap(FICL_VM *pVM)
{
stackPushINT(pVM->pStack, dictCellsAvail(ficlGetDict(pVM->pSys)));
}
/******************* Increase dictionary size on-demand ******************/
static void ficlDictThreshold(FICL_VM *pVM)
{
stackPushPtr(pVM->pStack, &dictThreshold);
}
static void ficlDictIncrease(FICL_VM *pVM)
{
stackPushPtr(pVM->pStack, &dictIncrease);
}
/**************************************************************************
f i c l C o m p i l e P l a t f o r m
** Build FreeBSD platform extensions into the system dictionary
**************************************************************************/
void ficlCompilePlatform(FICL_SYSTEM *pSys)
{
FICL_DICT *dp = pSys->dp;
assert (dp);
dictAppendWord(dp, ".#", displayCellNoPad, FW_DEFAULT);
dictAppendWord(dp, "fopen", pfopen, FW_DEFAULT);
dictAppendWord(dp, "fclose", pfclose, FW_DEFAULT);
dictAppendWord(dp, "fread", pfread, FW_DEFAULT);
dictAppendWord(dp, "fload", pfload, FW_DEFAULT);
dictAppendWord(dp, "fkey", fkey, FW_DEFAULT);
dictAppendWord(dp, "fseek", pfseek, FW_DEFAULT);
dictAppendWord(dp, "fwrite", pfwrite, FW_DEFAULT);
dictAppendWord(dp, "key", key, FW_DEFAULT);
dictAppendWord(dp, "key?", keyQuestion, FW_DEFAULT);
dictAppendWord(dp, "ms", ms, FW_DEFAULT);
dictAppendWord(dp, "seconds", pseconds, FW_DEFAULT);
dictAppendWord(dp, "heap?", freeHeap, FW_DEFAULT);
dictAppendWord(dp, "dictthreshold", ficlDictThreshold, FW_DEFAULT);
dictAppendWord(dp, "dictincrease", ficlDictIncrease, FW_DEFAULT);
#ifndef TESTMAIN
#ifdef __i386__
dictAppendWord(dp, "outb", ficlOutb, FW_DEFAULT);
dictAppendWord(dp, "inb", ficlInb, FW_DEFAULT);
#endif
dictAppendWord(dp, "setenv", ficlSetenv, FW_DEFAULT);
dictAppendWord(dp, "setenv?", ficlSetenvq, FW_DEFAULT);
dictAppendWord(dp, "getenv", ficlGetenv, FW_DEFAULT);
dictAppendWord(dp, "unsetenv", ficlUnsetenv, FW_DEFAULT);
dictAppendWord(dp, "copyin", ficlCopyin, FW_DEFAULT);
dictAppendWord(dp, "copyout", ficlCopyout, FW_DEFAULT);
dictAppendWord(dp, "findfile", ficlFindfile, FW_DEFAULT);
#ifdef HAVE_PNP
dictAppendWord(dp, "pnpdevices",ficlPnpdevices, FW_DEFAULT);
dictAppendWord(dp, "pnphandlers",ficlPnphandlers, FW_DEFAULT);
#endif
dictAppendWord(dp, "ccall", ficlCcall, FW_DEFAULT);
#endif
#if defined(__i386__)
ficlSetEnv(pSys, "arch-i386", FICL_TRUE);
ficlSetEnv(pSys, "arch-alpha", FICL_FALSE);
ficlSetEnv(pSys, "arch-ia64", FICL_FALSE);
#elif defined(__alpha__)
ficlSetEnv(pSys, "arch-i386", FICL_FALSE);
ficlSetEnv(pSys, "arch-alpha", FICL_TRUE);
ficlSetEnv(pSys, "arch-ia64", FICL_FALSE);
#elif defined(__ia64__)
ficlSetEnv(pSys, "arch-i386", FICL_FALSE);
ficlSetEnv(pSys, "arch-alpha", FICL_FALSE);
ficlSetEnv(pSys, "arch-ia64", FICL_TRUE);
#endif
return;
}
| MarginC/kame | freebsd5/sys/boot/ficl/loader.c | C | bsd-3-clause | 15,839 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_12) on Thu Jun 11 16:35:04 CST 2009 -->
<TITLE>
Àà org.xsaas.xstat.entity.InformantInfo µÄʹÓà (µ÷²éͳ¼ÆÈí¼þ·þÎñƽ̨ API)
</TITLE>
<META NAME="date" CONTENT="2009-06-11">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Àà org.xsaas.xstat.entity.InformantInfo µÄʹÓà (µ÷²éͳ¼ÆÈí¼þ·þÎñƽ̨ API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Ìø¹ýµ¼º½Á´½Ó"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>¸ÅÊö</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Èí¼þ°ü</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà"><FONT CLASS="NavBarFont1"><B>Àà</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>ʹÓÃ</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Ê÷</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Òѹýʱ</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Ë÷Òý</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>°ïÖú</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
ÉÏÒ»¸ö
ÏÂÒ»¸ö</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/xsaas/xstat/entity/\class-useInformantInfo.html" target="_top"><B>¿ò¼Ü</B></A>
<A HREF="InformantInfo.html" target="_top"><B>ÎÞ¿ò¼Ü</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>ËùÓÐÀà</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>ËùÓÐÀà</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Àà org.xsaas.xstat.entity.InformantInfo<br>µÄʹÓÃ</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
ʹÓà <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄÈí¼þ°ü</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.xsaas.xstat.business"><B>org.xsaas.xstat.business</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.xsaas.xstat.business.service"><B>org.xsaas.xstat.business.service</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.xsaas.xstat.dao"><B>org.xsaas.xstat.dao</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.xsaas.xstat.dao.hibernate"><B>org.xsaas.xstat.dao.hibernate</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="org.xsaas.xstat.business"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<A HREF="../../../../../org/xsaas/xstat/business/package-summary.html">org.xsaas.xstat.business</A> ÖÐ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄʹÓÃ</FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/business/package-summary.html">org.xsaas.xstat.business</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#getInformantInfo(java.lang.Long)">getInformantInfo</A></B>(java.lang.Long informantID)</CODE>
<BR>
»ñÈ¡±»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø±äÁ¿ÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄÀàÐ굀 <A HREF="../../../../../org/xsaas/xstat/business/package-summary.html">org.xsaas.xstat.business</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#findInformantInfoByPage(int, int)">findInformantInfoByPage</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
·ÖÒ³ÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#findInformantInfoByPage(long, int, int)">findInformantInfoByPage</A></B>(long clientID,
int firstResult,
int maxResult)</CODE>
<BR>
·ÖÒ³ÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#findPageByDelStatus(int, int)">findPageByDelStatus</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
ɾ³ý״̬·ÖÒ³ÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#getInformantInfoList()">getInformantInfoList</A></B>()</CODE>
<BR>
»ñÈ¡±»¼ì²â¶ÔÏóÐÅÏ¢Áбí</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#getInformantInfoList(java.lang.Long)">getInformantInfoList</A></B>(java.lang.Long clientID)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">²ÎÊýÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/business/package-summary.html">org.xsaas.xstat.business</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#deleteInformantInfo(org.xsaas.xstat.entity.InformantInfo)">deleteInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
ɾ³ý±»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#saveInformantInfo(org.xsaas.xstat.entity.InformantInfo)">saveInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
±£´æ±»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>IInformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/IInformantInfoService.html#updateInformantInfo(org.xsaas.xstat.entity.InformantInfo)">updateInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
¸üб»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
</TABLE>
<P>
<A NAME="org.xsaas.xstat.business.service"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<A HREF="../../../../../org/xsaas/xstat/business/service/package-summary.html">org.xsaas.xstat.business.service</A> ÖÐ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄʹÓÃ</FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/business/service/package-summary.html">org.xsaas.xstat.business.service</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A></CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#getInformantInfo(java.lang.Long)">getInformantInfo</A></B>(java.lang.Long informantID)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø±äÁ¿ÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄÀàÐ굀 <A HREF="../../../../../org/xsaas/xstat/business/service/package-summary.html">org.xsaas.xstat.business.service</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#findInformantInfoByPage(int, int)">findInformantInfoByPage</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#findInformantInfoByPage(long, int, int)">findInformantInfoByPage</A></B>(long clientID,
int firstResult,
int maxResult)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#findPageByDelStatus(int, int)">findPageByDelStatus</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#getInformantInfoList()">getInformantInfoList</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#getInformantInfoList(java.lang.Long)">getInformantInfoList</A></B>(java.lang.Long clientID)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">²ÎÊýÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/business/service/package-summary.html">org.xsaas.xstat.business.service</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#deleteInformantInfo(org.xsaas.xstat.entity.InformantInfo)">deleteInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#saveInformantInfo(org.xsaas.xstat.entity.InformantInfo)">saveInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>InformantInfoService.</B><B><A HREF="../../../../../org/xsaas/xstat/business/service/InformantInfoService.html#updateInformantInfo(org.xsaas.xstat.entity.InformantInfo)">updateInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<A NAME="org.xsaas.xstat.dao"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<A HREF="../../../../../org/xsaas/xstat/dao/package-summary.html">org.xsaas.xstat.dao</A> ÖÐ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄʹÓÃ</FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/dao/package-summary.html">org.xsaas.xstat.dao</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#getInformantInfo(java.lang.Long)">getInformantInfo</A></B>(java.lang.Long informantID)</CODE>
<BR>
»ñÈ¡±»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø±äÁ¿ÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄÀàÐ굀 <A HREF="../../../../../org/xsaas/xstat/dao/package-summary.html">org.xsaas.xstat.dao</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#findInformantInfoByPage(int, int)">findInformantInfoByPage</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
·ÖÒ³ÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#findInformantInfoByPage(long, int, int)">findInformantInfoByPage</A></B>(long clientID,
int firstResult,
int maxResult)</CODE>
<BR>
·ÖÒ³ÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#findPageByDelStatus(int, int)">findPageByDelStatus</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
ɾ³ý״̬·ÖÒ³ÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#getInformantInfoList()">getInformantInfoList</A></B>()</CODE>
<BR>
»ñÈ¡±»¼ì²â¶ÔÏóÐÅÏ¢Áбí</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#getInformantInfoList(java.lang.Long)">getInformantInfoList</A></B>(java.lang.Long clientID)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">²ÎÊýÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/dao/package-summary.html">org.xsaas.xstat.dao</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#deleteInformantInfo(org.xsaas.xstat.entity.InformantInfo)">deleteInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
ɾ³ý±»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#saveInformantInfo(org.xsaas.xstat.entity.InformantInfo)">saveInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
±£´æ±»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>IInformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/IInformantInfoDao.html#updateInformantInfo(org.xsaas.xstat.entity.InformantInfo)">updateInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
¸üб»¼ì²â¶ÔÏóÐÅÏ¢</TD>
</TR>
</TABLE>
<P>
<A NAME="org.xsaas.xstat.dao.hibernate"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<A HREF="../../../../../org/xsaas/xstat/dao/hibernate/package-summary.html">org.xsaas.xstat.dao.hibernate</A> ÖÐ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄʹÓÃ</FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/dao/hibernate/package-summary.html">org.xsaas.xstat.dao.hibernate</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A></CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#getInformantInfo(java.lang.Long)">getInformantInfo</A></B>(java.lang.Long informantID)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">·µ»Ø±äÁ¿ÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄÀàÐ굀 <A HREF="../../../../../org/xsaas/xstat/dao/hibernate/package-summary.html">org.xsaas.xstat.dao.hibernate</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#findInformantInfoByPage(int, int)">findInformantInfoByPage</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#findInformantInfoByPage(long, int, int)">findInformantInfoByPage</A></B>(long clientID,
int firstResult,
int maxResult)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#findPageByDelStatus(int, int)">findPageByDelStatus</A></B>(int firstResult,
int maxResult)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#getInformantInfoList()">getInformantInfoList</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A>></CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#getInformantInfoList(java.lang.Long)">getInformantInfoList</A></B>(java.lang.Long clientID)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">²ÎÊýÀàÐÍΪ <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> µÄ <A HREF="../../../../../org/xsaas/xstat/dao/hibernate/package-summary.html">org.xsaas.xstat.dao.hibernate</A> Öеķ½·¨</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#deleteInformantInfo(org.xsaas.xstat.entity.InformantInfo)">deleteInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#saveInformantInfo(org.xsaas.xstat.entity.InformantInfo)">saveInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>InformantInfoDao.</B><B><A HREF="../../../../../org/xsaas/xstat/dao/hibernate/InformantInfoDao.html#updateInformantInfo(org.xsaas.xstat.entity.InformantInfo)">updateInformantInfo</A></B>(<A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà">InformantInfo</A> data)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Ìø¹ýµ¼º½Á´½Ó"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>¸ÅÊö</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Èí¼þ°ü</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/xsaas/xstat/entity/InformantInfo.html" title="org.xsaas.xstat.entity ÖеÄÀà"><FONT CLASS="NavBarFont1"><B>Àà</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>ʹÓÃ</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Ê÷</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Òѹýʱ</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Ë÷Òý</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>°ïÖú</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
ÉÏÒ»¸ö
ÏÂÒ»¸ö</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/xsaas/xstat/entity/\class-useInformantInfo.html" target="_top"><B>¿ò¼Ü</B></A>
<A HREF="InformantInfo.html" target="_top"><B>ÎÞ¿ò¼Ü</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>ËùÓÐÀà</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>ËùÓÐÀà</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
<i>Copyright © 2008 ±±¾©°¬µÏÖÇÈíÐÅÏ¢¼¼ÊõÓÐÏÞÔðÈι«Ë¾. All Rights Reserved.</i><br/><i><a href="http://www.idea-soft.cn" target="_blank">http://www.idea-soft.cn</a></i> <i><a href="http://www.iaicc.cn" target="_blank">http://www.iaicc.cn</a></i> <i><a href="http://www.xsaas.org" target="_blank">http://www.xsaas.org</a></i>
</BODY>
</HTML>
| wangxin39/xstat | XStatAPI/docs/api/org/xsaas/xstat/entity/class-use/InformantInfo.html | HTML | bsd-3-clause | 35,607 |
from django.test import TestCase
from addressbase.models import Address
from addressbase.tests.factories import AddressFactory, UprnToCouncilFactory
class TestAddressFactory(TestCase):
def test_address_factory(self):
address = AddressFactory()
self.assertEqual(len(address.uprn), 9)
self.assertEqual(address.addressbase_postal, "D")
class TestUprnToCouncilFactory(TestCase):
def test_uprn_to_council_factory(self):
uprn_to_council = UprnToCouncilFactory()
self.assertIsInstance(uprn_to_council.uprn, Address)
| DemocracyClub/UK-Polling-Stations | polling_stations/apps/addressbase/tests/test_factories.py | Python | bsd-3-clause | 562 |
""" Test functions for stats module
"""
import warnings
import re
import sys
import pickle
import os
from numpy.testing import (assert_equal, assert_array_equal,
assert_almost_equal, assert_array_almost_equal,
assert_allclose, assert_, assert_warns,
assert_array_less, suppress_warnings)
import pytest
from pytest import raises as assert_raises
import numpy
import numpy as np
from numpy import typecodes, array
from numpy.lib.recfunctions import rec_append_fields
from scipy import special
from scipy._lib._util import check_random_state
from scipy.integrate import IntegrationWarning
import scipy.stats as stats
from scipy.stats._distn_infrastructure import argsreduce
import scipy.stats.distributions
from scipy.special import xlogy
from .test_continuous_basic import distcont
# python -OO strips docstrings
DOCSTRINGS_STRIPPED = sys.flags.optimize > 1
def _assert_hasattr(a, b, msg=None):
if msg is None:
msg = '%s does not have attribute %s' % (a, b)
assert_(hasattr(a, b), msg=msg)
def test_api_regression():
# https://github.com/scipy/scipy/issues/3802
_assert_hasattr(scipy.stats.distributions, 'f_gen')
def check_vonmises_pdf_periodic(k, l, s, x):
vm = stats.vonmises(k, loc=l, scale=s)
assert_almost_equal(vm.pdf(x), vm.pdf(x % (2*numpy.pi*s)))
def check_vonmises_cdf_periodic(k, l, s, x):
vm = stats.vonmises(k, loc=l, scale=s)
assert_almost_equal(vm.cdf(x) % 1, vm.cdf(x % (2*numpy.pi*s)) % 1)
def test_vonmises_pdf_periodic():
for k in [0.1, 1, 101]:
for x in [0, 1, numpy.pi, 10, 100]:
check_vonmises_pdf_periodic(k, 0, 1, x)
check_vonmises_pdf_periodic(k, 1, 1, x)
check_vonmises_pdf_periodic(k, 0, 10, x)
check_vonmises_cdf_periodic(k, 0, 1, x)
check_vonmises_cdf_periodic(k, 1, 1, x)
check_vonmises_cdf_periodic(k, 0, 10, x)
def test_vonmises_line_support():
assert_equal(stats.vonmises_line.a, -np.pi)
assert_equal(stats.vonmises_line.b, np.pi)
def test_vonmises_numerical():
vm = stats.vonmises(800)
assert_almost_equal(vm.cdf(0), 0.5)
@pytest.mark.parametrize('dist',
['alpha', 'betaprime',
'fatiguelife', 'invgamma', 'invgauss', 'invweibull',
'johnsonsb', 'levy', 'levy_l', 'lognorm', 'gilbrat',
'powerlognorm', 'rayleigh', 'wald'])
def test_support(dist):
"""gh-6235"""
dct = dict(distcont)
args = dct[dist]
dist = getattr(stats, dist)
assert_almost_equal(dist.pdf(dist.a, *args), 0)
assert_equal(dist.logpdf(dist.a, *args), -np.inf)
assert_almost_equal(dist.pdf(dist.b, *args), 0)
assert_equal(dist.logpdf(dist.b, *args), -np.inf)
class TestRandInt(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.randint.rvs(5, 30, size=100)
assert_(numpy.all(vals < 30) & numpy.all(vals >= 5))
assert_(len(vals) == 100)
vals = stats.randint.rvs(5, 30, size=(2, 50))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.randint.rvs(15, 46)
assert_((val >= 15) & (val < 46))
assert_(isinstance(val, numpy.ScalarType), msg=repr(type(val)))
val = stats.randint(15, 46).rvs(3)
assert_(val.dtype.char in typecodes['AllInteger'])
def test_pdf(self):
k = numpy.r_[0:36]
out = numpy.where((k >= 5) & (k < 30), 1.0/(30-5), 0)
vals = stats.randint.pmf(k, 5, 30)
assert_array_almost_equal(vals, out)
def test_cdf(self):
x = np.linspace(0, 36, 100)
k = numpy.floor(x)
out = numpy.select([k >= 30, k >= 5], [1.0, (k-5.0+1)/(30-5.0)], 0)
vals = stats.randint.cdf(x, 5, 30)
assert_array_almost_equal(vals, out, decimal=12)
class TestBinom(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.binom.rvs(10, 0.75, size=(2, 50))
assert_(numpy.all(vals >= 0) & numpy.all(vals <= 10))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.binom.rvs(10, 0.75)
assert_(isinstance(val, int))
val = stats.binom(10, 0.75).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_pmf(self):
# regression test for Ticket #1842
vals1 = stats.binom.pmf(100, 100, 1)
vals2 = stats.binom.pmf(0, 100, 0)
assert_allclose(vals1, 1.0, rtol=1e-15, atol=0)
assert_allclose(vals2, 1.0, rtol=1e-15, atol=0)
def test_entropy(self):
# Basic entropy tests.
b = stats.binom(2, 0.5)
expected_p = np.array([0.25, 0.5, 0.25])
expected_h = -sum(xlogy(expected_p, expected_p))
h = b.entropy()
assert_allclose(h, expected_h)
b = stats.binom(2, 0.0)
h = b.entropy()
assert_equal(h, 0.0)
b = stats.binom(2, 1.0)
h = b.entropy()
assert_equal(h, 0.0)
def test_warns_p0(self):
# no spurious warnigns are generated for p=0; gh-3817
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
assert_equal(stats.binom(n=2, p=0).mean(), 0)
assert_equal(stats.binom(n=2, p=0).std(), 0)
class TestBernoulli(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.bernoulli.rvs(0.75, size=(2, 50))
assert_(numpy.all(vals >= 0) & numpy.all(vals <= 1))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.bernoulli.rvs(0.75)
assert_(isinstance(val, int))
val = stats.bernoulli(0.75).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_entropy(self):
# Simple tests of entropy.
b = stats.bernoulli(0.25)
expected_h = -0.25*np.log(0.25) - 0.75*np.log(0.75)
h = b.entropy()
assert_allclose(h, expected_h)
b = stats.bernoulli(0.0)
h = b.entropy()
assert_equal(h, 0.0)
b = stats.bernoulli(1.0)
h = b.entropy()
assert_equal(h, 0.0)
class TestBradford(object):
# gh-6216
def test_cdf_ppf(self):
c = 0.1
x = np.logspace(-20, -4)
q = stats.bradford.cdf(x, c)
xx = stats.bradford.ppf(q, c)
assert_allclose(x, xx)
class TestNBinom(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.nbinom.rvs(10, 0.75, size=(2, 50))
assert_(numpy.all(vals >= 0))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.nbinom.rvs(10, 0.75)
assert_(isinstance(val, int))
val = stats.nbinom(10, 0.75).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_pmf(self):
# regression test for ticket 1779
assert_allclose(np.exp(stats.nbinom.logpmf(700, 721, 0.52)),
stats.nbinom.pmf(700, 721, 0.52))
# logpmf(0,1,1) shouldn't return nan (regression test for gh-4029)
val = scipy.stats.nbinom.logpmf(0, 1, 1)
assert_equal(val, 0)
class TestGenInvGauss(object):
def setup_method(self):
np.random.seed(1234)
@pytest.mark.slow
def test_rvs_with_mode_shift(self):
# ratio_unif w/ mode shift
gig = stats.geninvgauss(2.3, 1.5)
_, p = stats.kstest(gig.rvs(size=1500, random_state=1234), gig.cdf)
assert_equal(p > 0.05, True)
@pytest.mark.slow
def test_rvs_without_mode_shift(self):
# ratio_unif w/o mode shift
gig = stats.geninvgauss(0.9, 0.75)
_, p = stats.kstest(gig.rvs(size=1500, random_state=1234), gig.cdf)
assert_equal(p > 0.05, True)
@pytest.mark.slow
def test_rvs_new_method(self):
# new algorithm of Hoermann / Leydold
gig = stats.geninvgauss(0.1, 0.2)
_, p = stats.kstest(gig.rvs(size=1500, random_state=1234), gig.cdf)
assert_equal(p > 0.05, True)
@pytest.mark.slow
def test_rvs_p_zero(self):
def my_ks_check(p, b):
gig = stats.geninvgauss(p, b)
rvs = gig.rvs(size=1500, random_state=1234)
return stats.kstest(rvs, gig.cdf)[1] > 0.05
# boundary cases when p = 0
assert_equal(my_ks_check(0, 0.2), True) # new algo
assert_equal(my_ks_check(0, 0.9), True) # ratio_unif w/o shift
assert_equal(my_ks_check(0, 1.5), True) # ratio_unif with shift
def test_rvs_negative_p(self):
# if p negative, return inverse
assert_equal(
stats.geninvgauss(-1.5, 2).rvs(size=10, random_state=1234),
1 / stats.geninvgauss(1.5, 2).rvs(size=10, random_state=1234))
def test_invgauss(self):
# test that invgauss is special case
ig = stats.geninvgauss.rvs(size=1500, p=-0.5, b=1, random_state=1234)
assert_equal(stats.kstest(ig, 'invgauss', args=[1])[1] > 0.15, True)
# test pdf and cdf
mu, x = 100, np.linspace(0.01, 1, 10)
pdf_ig = stats.geninvgauss.pdf(x, p=-0.5, b=1 / mu, scale=mu)
assert_allclose(pdf_ig, stats.invgauss(mu).pdf(x))
cdf_ig = stats.geninvgauss.cdf(x, p=-0.5, b=1 / mu, scale=mu)
assert_allclose(cdf_ig, stats.invgauss(mu).cdf(x))
def test_pdf_R(self):
# test against R package GIGrvg
# x <- seq(0.01, 5, length.out = 10)
# GIGrvg::dgig(x, 0.5, 1, 1)
vals_R = np.array([2.081176820e-21, 4.488660034e-01, 3.747774338e-01,
2.693297528e-01, 1.905637275e-01, 1.351476913e-01,
9.636538981e-02, 6.909040154e-02, 4.978006801e-02,
3.602084467e-02])
x = np.linspace(0.01, 5, 10)
assert_allclose(vals_R, stats.geninvgauss.pdf(x, 0.5, 1))
def test_pdf_zero(self):
# pdf at 0 is 0, needs special treatment to avoid 1/x in pdf
assert_equal(stats.geninvgauss.pdf(0, 0.5, 0.5), 0)
# if x is large and p is moderate, make sure that pdf does not
# overflow because of x**(p-1); exp(-b*x) forces pdf to zero
assert_equal(stats.geninvgauss.pdf(2e6, 50, 2), 0)
class TestNormInvGauss(object):
def setup_method(self):
np.random.seed(1234)
def test_cdf_R(self):
# test pdf and cdf vals against R
# require("GeneralizedHyperbolic")
# x_test <- c(-7, -5, 0, 8, 15)
# r_cdf <- GeneralizedHyperbolic::pnig(x_test, mu = 0, a = 1, b = 0.5)
# r_pdf <- GeneralizedHyperbolic::dnig(x_test, mu = 0, a = 1, b = 0.5)
r_cdf = np.array([8.034920282e-07, 2.512671945e-05, 3.186661051e-01,
9.988650664e-01, 9.999848769e-01])
x_test = np.array([-7, -5, 0, 8, 15])
vals_cdf = stats.norminvgauss.cdf(x_test, a=1, b=0.5)
assert_allclose(vals_cdf, r_cdf, atol=1e-9)
def test_pdf_R(self):
# values from R as defined in test_cdf_R
r_pdf = np.array([1.359600783e-06, 4.413878805e-05, 4.555014266e-01,
7.450485342e-04, 8.917889931e-06])
x_test = np.array([-7, -5, 0, 8, 15])
vals_pdf = stats.norminvgauss.pdf(x_test, a=1, b=0.5)
assert_allclose(vals_pdf, r_pdf, atol=1e-9)
def test_stats(self):
a, b = 1, 0.5
gamma = np.sqrt(a**2 - b**2)
v_stats = (b / gamma, a**2 / gamma**3, 3.0 * b / (a * np.sqrt(gamma)),
3.0 * (1 + 4 * b**2 / a**2) / gamma)
assert_equal(v_stats, stats.norminvgauss.stats(a, b, moments='mvsk'))
def test_ppf(self):
a, b = 1, 0.5
x_test = np.array([0.001, 0.5, 0.999])
vals = stats.norminvgauss.ppf(x_test, a, b)
assert_allclose(x_test, stats.norminvgauss.cdf(vals, a, b))
class TestGeom(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.geom.rvs(0.75, size=(2, 50))
assert_(numpy.all(vals >= 0))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.geom.rvs(0.75)
assert_(isinstance(val, int))
val = stats.geom(0.75).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_pmf(self):
vals = stats.geom.pmf([1, 2, 3], 0.5)
assert_array_almost_equal(vals, [0.5, 0.25, 0.125])
def test_logpmf(self):
# regression test for ticket 1793
vals1 = np.log(stats.geom.pmf([1, 2, 3], 0.5))
vals2 = stats.geom.logpmf([1, 2, 3], 0.5)
assert_allclose(vals1, vals2, rtol=1e-15, atol=0)
# regression test for gh-4028
val = stats.geom.logpmf(1, 1)
assert_equal(val, 0.0)
def test_cdf_sf(self):
vals = stats.geom.cdf([1, 2, 3], 0.5)
vals_sf = stats.geom.sf([1, 2, 3], 0.5)
expected = array([0.5, 0.75, 0.875])
assert_array_almost_equal(vals, expected)
assert_array_almost_equal(vals_sf, 1-expected)
def test_logcdf_logsf(self):
vals = stats.geom.logcdf([1, 2, 3], 0.5)
vals_sf = stats.geom.logsf([1, 2, 3], 0.5)
expected = array([0.5, 0.75, 0.875])
assert_array_almost_equal(vals, np.log(expected))
assert_array_almost_equal(vals_sf, np.log1p(-expected))
def test_ppf(self):
vals = stats.geom.ppf([0.5, 0.75, 0.875], 0.5)
expected = array([1.0, 2.0, 3.0])
assert_array_almost_equal(vals, expected)
def test_ppf_underflow(self):
# this should not underflow
assert_allclose(stats.geom.ppf(1e-20, 1e-20), 1.0, atol=1e-14)
class TestPlanck(object):
def setup_method(self):
np.random.seed(1234)
def test_sf(self):
vals = stats.planck.sf([1, 2, 3], 5.)
expected = array([4.5399929762484854e-05,
3.0590232050182579e-07,
2.0611536224385579e-09])
assert_array_almost_equal(vals, expected)
def test_logsf(self):
vals = stats.planck.logsf([1000., 2000., 3000.], 1000.)
expected = array([-1001000., -2001000., -3001000.])
assert_array_almost_equal(vals, expected)
class TestGennorm(object):
def test_laplace(self):
# test against Laplace (special case for beta=1)
points = [1, 2, 3]
pdf1 = stats.gennorm.pdf(points, 1)
pdf2 = stats.laplace.pdf(points)
assert_almost_equal(pdf1, pdf2)
def test_norm(self):
# test against normal (special case for beta=2)
points = [1, 2, 3]
pdf1 = stats.gennorm.pdf(points, 2)
pdf2 = stats.norm.pdf(points, scale=2**-.5)
assert_almost_equal(pdf1, pdf2)
class TestHalfgennorm(object):
def test_expon(self):
# test against exponential (special case for beta=1)
points = [1, 2, 3]
pdf1 = stats.halfgennorm.pdf(points, 1)
pdf2 = stats.expon.pdf(points)
assert_almost_equal(pdf1, pdf2)
def test_halfnorm(self):
# test against half normal (special case for beta=2)
points = [1, 2, 3]
pdf1 = stats.halfgennorm.pdf(points, 2)
pdf2 = stats.halfnorm.pdf(points, scale=2**-.5)
assert_almost_equal(pdf1, pdf2)
def test_gennorm(self):
# test against generalized normal
points = [1, 2, 3]
pdf1 = stats.halfgennorm.pdf(points, .497324)
pdf2 = stats.gennorm.pdf(points, .497324)
assert_almost_equal(pdf1, 2*pdf2)
class TestTruncnorm(object):
def setup_method(self):
np.random.seed(1234)
def test_ppf_ticket1131(self):
vals = stats.truncnorm.ppf([-0.5, 0, 1e-4, 0.5, 1-1e-4, 1, 2], -1., 1.,
loc=[3]*7, scale=2)
expected = np.array([np.nan, 1, 1.00056419, 3, 4.99943581, 5, np.nan])
assert_array_almost_equal(vals, expected)
def test_isf_ticket1131(self):
vals = stats.truncnorm.isf([-0.5, 0, 1e-4, 0.5, 1-1e-4, 1, 2], -1., 1.,
loc=[3]*7, scale=2)
expected = np.array([np.nan, 5, 4.99943581, 3, 1.00056419, 1, np.nan])
assert_array_almost_equal(vals, expected)
def test_gh_2477_small_values(self):
# Check a case that worked in the original issue.
low, high = -11, -10
x = stats.truncnorm.rvs(low, high, 0, 1, size=10)
assert_(low < x.min() < x.max() < high)
# Check a case that failed in the original issue.
low, high = 10, 11
x = stats.truncnorm.rvs(low, high, 0, 1, size=10)
assert_(low < x.min() < x.max() < high)
# @pytest.mark.xfail(reason="truncnorm rvs is know to fail at extreme tails")
def test_gh_2477_large_values(self):
# Check a case that used to fail because of extreme tailness.
low, high = 100, 101
with np.errstate(divide='ignore'):
x = stats.truncnorm.rvs(low, high, 0, 1, size=10)
print(low, x.min(), x.max(), high)
assert_(low <= x.min() <= x.max() <= high), str([low, high, x])
# Check some additional extreme tails
low, high = 1000, 1001
x = stats.truncnorm.rvs(low, high, 0, 1, size=10)
assert_(low < x.min() < x.max() < high)
low, high = 10000, 10001
x = stats.truncnorm.rvs(low, high, 0, 1, size=10)
assert_(low < x.min() < x.max() < high)
def test_gh_9403_nontail_values(self):
for low, high in [[3, 4], [-4, -3]]:
xvals = np.array([-np.inf, low, high, np.inf])
xmid = (high+low)/2.0
cdfs = stats.truncnorm.cdf(xvals, low, high)
sfs = stats.truncnorm.sf(xvals, low, high)
pdfs = stats.truncnorm.pdf(xvals, low, high)
expected_cdfs = np.array([0, 0, 1, 1])
expected_sfs = np.array([1.0, 1.0, 0.0, 0.0])
expected_pdfs = np.array([0, 3.3619772, 0.1015229, 0])
if low < 0:
expected_pdfs = np.array([0, 0.1015229, 3.3619772, 0])
assert_almost_equal(cdfs, expected_cdfs)
assert_almost_equal(sfs, expected_sfs)
assert_almost_equal(pdfs, expected_pdfs)
assert_almost_equal(np.log(expected_pdfs[1]/expected_pdfs[2]), low+0.5)
pvals = np.array([0, 0.5, 1.0])
ppfs = stats.truncnorm.ppf(pvals, low, high)
expected_ppfs = np.array([low, np.sign(low)*3.1984741, high])
assert_almost_equal(ppfs, expected_ppfs)
if low < 0:
assert_almost_equal(stats.truncnorm.sf(xmid, low, high), 0.8475544278436675)
assert_almost_equal(stats.truncnorm.cdf(xmid, low, high), 0.1524455721563326)
else:
assert_almost_equal(stats.truncnorm.cdf(xmid, low, high), 0.8475544278436675)
assert_almost_equal(stats.truncnorm.sf(xmid, low, high), 0.1524455721563326)
pdf = stats.truncnorm.pdf(xmid, low, high)
assert_almost_equal(np.log(pdf/expected_pdfs[2]), (xmid+0.25)/2)
def test_gh_9403_medium_tail_values(self):
for low, high in [[39, 40], [-40, -39]]:
xvals = np.array([-np.inf, low, high, np.inf])
xmid = (high+low)/2.0
cdfs = stats.truncnorm.cdf(xvals, low, high)
sfs = stats.truncnorm.sf(xvals, low, high)
pdfs = stats.truncnorm.pdf(xvals, low, high)
expected_cdfs = np.array([0, 0, 1, 1])
expected_sfs = np.array([1.0, 1.0, 0.0, 0.0])
expected_pdfs = np.array([0, 3.90256074e+01, 2.73349092e-16, 0])
if low < 0:
expected_pdfs = np.array([0, 2.73349092e-16, 3.90256074e+01, 0])
assert_almost_equal(cdfs, expected_cdfs)
assert_almost_equal(sfs, expected_sfs)
assert_almost_equal(pdfs, expected_pdfs)
assert_almost_equal(np.log(expected_pdfs[1]/expected_pdfs[2]), low+0.5)
pvals = np.array([0, 0.5, 1.0])
ppfs = stats.truncnorm.ppf(pvals, low, high)
expected_ppfs = np.array([low, np.sign(low)*39.01775731, high])
assert_almost_equal(ppfs, expected_ppfs)
cdfs = stats.truncnorm.cdf(ppfs, low, high)
assert_almost_equal(cdfs, pvals)
if low < 0:
assert_almost_equal(stats.truncnorm.sf(xmid, low, high), 0.9999999970389126)
assert_almost_equal(stats.truncnorm.cdf(xmid, low, high), 2.961048103554866e-09)
else:
assert_almost_equal(stats.truncnorm.cdf(xmid, low, high), 0.9999999970389126)
assert_almost_equal(stats.truncnorm.sf(xmid, low, high), 2.961048103554866e-09)
pdf = stats.truncnorm.pdf(xmid, low, high)
assert_almost_equal(np.log(pdf/expected_pdfs[2]), (xmid+0.25)/2)
xvals = np.linspace(low, high, 11)
xvals2 = -xvals[::-1]
assert_almost_equal(stats.truncnorm.cdf(xvals, low, high), stats.truncnorm.sf(xvals2, -high, -low)[::-1])
assert_almost_equal(stats.truncnorm.sf(xvals, low, high), stats.truncnorm.cdf(xvals2, -high, -low)[::-1])
assert_almost_equal(stats.truncnorm.pdf(xvals, low, high), stats.truncnorm.pdf(xvals2, -high, -low)[::-1])
def _test_moments_one_range(self, a, b, expected):
m0, v0, s0, k0 = expected[:4]
m, v, s, k = stats.truncnorm.stats(a, b, moments='mvsk')
assert_almost_equal(m, m0)
assert_almost_equal(v, v0)
assert_almost_equal(s, s0)
assert_almost_equal(k, k0)
@pytest.mark.xfail_on_32bit("reduced accuracy with 32bit platforms.")
def test_moments(self):
# Values validated by changing TRUNCNORM_TAIL_X so as to evaluate
# using both the _norm_XXX() and _norm_logXXX() functions, and by
# removing the _stats and _munp methods in truncnorm tp force
# numerical quadrature.
self._test_moments_one_range(-30, 30, [0, 1, 0.0, 0.0])
self._test_moments_one_range(-10, 10, [0, 1, 0.0, 0.0])
self._test_moments_one_range(-3, 3, [0, 0.97333692, 0.0, -0.17111444])
self._test_moments_one_range(-2, 2, [0, 0.7737413, 0.0, -0.63446328])
self._test_moments_one_range(0, np.inf, [0.79788456, 0.36338023, 0.99527175, 0.8691773])
self._test_moments_one_range(-1, 3, [0.2827861, 0.61614174, 0.53930185, -0.20582065])
self._test_moments_one_range(-3, 1, [-0.2827861, 0.61614174, -0.53930185, -0.20582065])
self._test_moments_one_range(-10, -9, [-9.10845629, 0.01144881, -1.89856073, 5.07334611])
self._test_moments_one_range(-20, -19, [-19.05234395, 0.00272507, -1.9838686, 5.87208674])
self._test_moments_one_range(-30, -29, [-29.03440124, 0.00118066, -1.99297727, 5.9303358])
self._test_moments_one_range(-40, -39, [-39.02560741993262, 0.0006548, -1.99631464, 5.61677584])
self._test_moments_one_range(39, 40, [39.02560741993262, 0.0006548, 1.99631464, 5.61677584])
def test_9902_moments(self):
m, v = stats.truncnorm.stats(0, np.inf, moments='mv')
assert_almost_equal(m, 0.79788456)
assert_almost_equal(v, 0.36338023)
def test_gh_1489_trac_962_rvs(self):
# Check the original example.
low, high = 10, 15
x = stats.truncnorm.rvs(low, high, 0, 1, size=10)
assert_(low < x.min() < x.max() < high)
class TestHypergeom(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.hypergeom.rvs(20, 10, 3, size=(2, 50))
assert_(numpy.all(vals >= 0) &
numpy.all(vals <= 3))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.hypergeom.rvs(20, 3, 10)
assert_(isinstance(val, int))
val = stats.hypergeom(20, 3, 10).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_precision(self):
# comparison number from mpmath
M = 2500
n = 50
N = 500
tot = M
good = n
hgpmf = stats.hypergeom.pmf(2, tot, good, N)
assert_almost_equal(hgpmf, 0.0010114963068932233, 11)
def test_args(self):
# test correct output for corner cases of arguments
# see gh-2325
assert_almost_equal(stats.hypergeom.pmf(0, 2, 1, 0), 1.0, 11)
assert_almost_equal(stats.hypergeom.pmf(1, 2, 1, 0), 0.0, 11)
assert_almost_equal(stats.hypergeom.pmf(0, 2, 0, 2), 1.0, 11)
assert_almost_equal(stats.hypergeom.pmf(1, 2, 1, 0), 0.0, 11)
def test_cdf_above_one(self):
# for some values of parameters, hypergeom cdf was >1, see gh-2238
assert_(0 <= stats.hypergeom.cdf(30, 13397950, 4363, 12390) <= 1.0)
def test_precision2(self):
# Test hypergeom precision for large numbers. See #1218.
# Results compared with those from R.
oranges = 9.9e4
pears = 1.1e5
fruits_eaten = np.array([3, 3.8, 3.9, 4, 4.1, 4.2, 5]) * 1e4
quantile = 2e4
res = [stats.hypergeom.sf(quantile, oranges + pears, oranges, eaten)
for eaten in fruits_eaten]
expected = np.array([0, 1.904153e-114, 2.752693e-66, 4.931217e-32,
8.265601e-11, 0.1237904, 1])
assert_allclose(res, expected, atol=0, rtol=5e-7)
# Test with array_like first argument
quantiles = [1.9e4, 2e4, 2.1e4, 2.15e4]
res2 = stats.hypergeom.sf(quantiles, oranges + pears, oranges, 4.2e4)
expected2 = [1, 0.1237904, 6.511452e-34, 3.277667e-69]
assert_allclose(res2, expected2, atol=0, rtol=5e-7)
def test_entropy(self):
# Simple tests of entropy.
hg = stats.hypergeom(4, 1, 1)
h = hg.entropy()
expected_p = np.array([0.75, 0.25])
expected_h = -np.sum(xlogy(expected_p, expected_p))
assert_allclose(h, expected_h)
hg = stats.hypergeom(1, 1, 1)
h = hg.entropy()
assert_equal(h, 0.0)
def test_logsf(self):
# Test logsf for very large numbers. See issue #4982
# Results compare with those from R (v3.2.0):
# phyper(k, n, M-n, N, lower.tail=FALSE, log.p=TRUE)
# -2239.771
k = 1e4
M = 1e7
n = 1e6
N = 5e4
result = stats.hypergeom.logsf(k, M, n, N)
expected = -2239.771 # From R
assert_almost_equal(result, expected, decimal=3)
k = 1
M = 1600
n = 600
N = 300
result = stats.hypergeom.logsf(k, M, n, N)
expected = -2.566567e-68 # From R
assert_almost_equal(result, expected, decimal=15)
def test_logcdf(self):
# Test logcdf for very large numbers. See issue #8692
# Results compare with those from R (v3.3.2):
# phyper(k, n, M-n, N, lower.tail=TRUE, log.p=TRUE)
# -5273.335
k = 1
M = 1e7
n = 1e6
N = 5e4
result = stats.hypergeom.logcdf(k, M, n, N)
expected = -5273.335 # From R
assert_almost_equal(result, expected, decimal=3)
# Same example as in issue #8692
k = 40
M = 1600
n = 50
N = 300
result = stats.hypergeom.logcdf(k, M, n, N)
expected = -7.565148879229e-23 # From R
assert_almost_equal(result, expected, decimal=15)
k = 125
M = 1600
n = 250
N = 500
result = stats.hypergeom.logcdf(k, M, n, N)
expected = -4.242688e-12 # From R
assert_almost_equal(result, expected, decimal=15)
# test broadcasting robustness based on reviewer
# concerns in PR 9603; using an array version of
# the example from issue #8692
k = np.array([40, 40, 40])
M = 1600
n = 50
N = 300
result = stats.hypergeom.logcdf(k, M, n, N)
expected = np.full(3, -7.565148879229e-23) # filled from R result
assert_almost_equal(result, expected, decimal=15)
class TestLoggamma(object):
def test_stats(self):
# The following precomputed values are from the table in section 2.2
# of "A Statistical Study of Log-Gamma Distribution", by Ping Shing
# Chan (thesis, McMaster University, 1993).
table = np.array([
# c, mean, var, skew, exc. kurt.
0.5, -1.9635, 4.9348, -1.5351, 4.0000,
1.0, -0.5772, 1.6449, -1.1395, 2.4000,
12.0, 2.4427, 0.0869, -0.2946, 0.1735,
]).reshape(-1, 5)
for c, mean, var, skew, kurt in table:
computed = stats.loggamma.stats(c, moments='msvk')
assert_array_almost_equal(computed, [mean, var, skew, kurt],
decimal=4)
class TestLogistic(object):
# gh-6226
def test_cdf_ppf(self):
x = np.linspace(-20, 20)
y = stats.logistic.cdf(x)
xx = stats.logistic.ppf(y)
assert_allclose(x, xx)
def test_sf_isf(self):
x = np.linspace(-20, 20)
y = stats.logistic.sf(x)
xx = stats.logistic.isf(y)
assert_allclose(x, xx)
def test_extreme_values(self):
# p is chosen so that 1 - (1 - p) == p in double precision
p = 9.992007221626409e-16
desired = 34.53957599234088
assert_allclose(stats.logistic.ppf(1 - p), desired)
assert_allclose(stats.logistic.isf(p), desired)
class TestLogser(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.logser.rvs(0.75, size=(2, 50))
assert_(numpy.all(vals >= 1))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.logser.rvs(0.75)
assert_(isinstance(val, int))
val = stats.logser(0.75).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_pmf_small_p(self):
m = stats.logser.pmf(4, 1e-20)
# The expected value was computed using mpmath:
# >>> import mpmath
# >>> mpmath.mp.dps = 64
# >>> k = 4
# >>> p = mpmath.mpf('1e-20')
# >>> float(-(p**k)/k/mpmath.log(1-p))
# 2.5e-61
# It is also clear from noticing that for very small p,
# log(1-p) is approximately -p, and the formula becomes
# p**(k-1) / k
assert_allclose(m, 2.5e-61)
def test_mean_small_p(self):
m = stats.logser.mean(1e-8)
# The expected mean was computed using mpmath:
# >>> import mpmath
# >>> mpmath.dps = 60
# >>> p = mpmath.mpf('1e-8')
# >>> float(-p / ((1 - p)*mpmath.log(1 - p)))
# 1.000000005
assert_allclose(m, 1.000000005)
class TestPareto(object):
def test_stats(self):
# Check the stats() method with some simple values. Also check
# that the calculations do not trigger RuntimeWarnings.
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
m, v, s, k = stats.pareto.stats(0.5, moments='mvsk')
assert_equal(m, np.inf)
assert_equal(v, np.inf)
assert_equal(s, np.nan)
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(1.0, moments='mvsk')
assert_equal(m, np.inf)
assert_equal(v, np.inf)
assert_equal(s, np.nan)
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(1.5, moments='mvsk')
assert_equal(m, 3.0)
assert_equal(v, np.inf)
assert_equal(s, np.nan)
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(2.0, moments='mvsk')
assert_equal(m, 2.0)
assert_equal(v, np.inf)
assert_equal(s, np.nan)
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(2.5, moments='mvsk')
assert_allclose(m, 2.5 / 1.5)
assert_allclose(v, 2.5 / (1.5*1.5*0.5))
assert_equal(s, np.nan)
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(3.0, moments='mvsk')
assert_allclose(m, 1.5)
assert_allclose(v, 0.75)
assert_equal(s, np.nan)
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(3.5, moments='mvsk')
assert_allclose(m, 3.5 / 2.5)
assert_allclose(v, 3.5 / (2.5*2.5*1.5))
assert_allclose(s, (2*4.5/0.5)*np.sqrt(1.5/3.5))
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(4.0, moments='mvsk')
assert_allclose(m, 4.0 / 3.0)
assert_allclose(v, 4.0 / 18.0)
assert_allclose(s, 2*(1+4.0)/(4.0-3) * np.sqrt((4.0-2)/4.0))
assert_equal(k, np.nan)
m, v, s, k = stats.pareto.stats(4.5, moments='mvsk')
assert_allclose(m, 4.5 / 3.5)
assert_allclose(v, 4.5 / (3.5*3.5*2.5))
assert_allclose(s, (2*5.5/1.5) * np.sqrt(2.5/4.5))
assert_allclose(k, 6*(4.5**3 + 4.5**2 - 6*4.5 - 2)/(4.5*1.5*0.5))
def test_sf(self):
x = 1e9
b = 2
scale = 1.5
p = stats.pareto.sf(x, b, loc=0, scale=scale)
expected = (scale/x)**b # 2.25e-18
assert_allclose(p, expected)
class TestGenpareto(object):
def test_ab(self):
# c >= 0: a, b = [0, inf]
for c in [1., 0.]:
c = np.asarray(c)
a, b = stats.genpareto._get_support(c)
assert_equal(a, 0.)
assert_(np.isposinf(b))
# c < 0: a=0, b=1/|c|
c = np.asarray(-2.)
a, b = stats.genpareto._get_support(c)
assert_allclose([a, b], [0., 0.5])
def test_c0(self):
# with c=0, genpareto reduces to the exponential distribution
# rv = stats.genpareto(c=0.)
rv = stats.genpareto(c=0.)
x = np.linspace(0, 10., 30)
assert_allclose(rv.pdf(x), stats.expon.pdf(x))
assert_allclose(rv.cdf(x), stats.expon.cdf(x))
assert_allclose(rv.sf(x), stats.expon.sf(x))
q = np.linspace(0., 1., 10)
assert_allclose(rv.ppf(q), stats.expon.ppf(q))
def test_cm1(self):
# with c=-1, genpareto reduces to the uniform distr on [0, 1]
rv = stats.genpareto(c=-1.)
x = np.linspace(0, 10., 30)
assert_allclose(rv.pdf(x), stats.uniform.pdf(x))
assert_allclose(rv.cdf(x), stats.uniform.cdf(x))
assert_allclose(rv.sf(x), stats.uniform.sf(x))
q = np.linspace(0., 1., 10)
assert_allclose(rv.ppf(q), stats.uniform.ppf(q))
# logpdf(1., c=-1) should be zero
assert_allclose(rv.logpdf(1), 0)
def test_x_inf(self):
# make sure x=inf is handled gracefully
rv = stats.genpareto(c=0.1)
assert_allclose([rv.pdf(np.inf), rv.cdf(np.inf)], [0., 1.])
assert_(np.isneginf(rv.logpdf(np.inf)))
rv = stats.genpareto(c=0.)
assert_allclose([rv.pdf(np.inf), rv.cdf(np.inf)], [0., 1.])
assert_(np.isneginf(rv.logpdf(np.inf)))
rv = stats.genpareto(c=-1.)
assert_allclose([rv.pdf(np.inf), rv.cdf(np.inf)], [0., 1.])
assert_(np.isneginf(rv.logpdf(np.inf)))
def test_c_continuity(self):
# pdf is continuous at c=0, -1
x = np.linspace(0, 10, 30)
for c in [0, -1]:
pdf0 = stats.genpareto.pdf(x, c)
for dc in [1e-14, -1e-14]:
pdfc = stats.genpareto.pdf(x, c + dc)
assert_allclose(pdf0, pdfc, atol=1e-12)
cdf0 = stats.genpareto.cdf(x, c)
for dc in [1e-14, 1e-14]:
cdfc = stats.genpareto.cdf(x, c + dc)
assert_allclose(cdf0, cdfc, atol=1e-12)
def test_c_continuity_ppf(self):
q = np.r_[np.logspace(1e-12, 0.01, base=0.1),
np.linspace(0.01, 1, 30, endpoint=False),
1. - np.logspace(1e-12, 0.01, base=0.1)]
for c in [0., -1.]:
ppf0 = stats.genpareto.ppf(q, c)
for dc in [1e-14, -1e-14]:
ppfc = stats.genpareto.ppf(q, c + dc)
assert_allclose(ppf0, ppfc, atol=1e-12)
def test_c_continuity_isf(self):
q = np.r_[np.logspace(1e-12, 0.01, base=0.1),
np.linspace(0.01, 1, 30, endpoint=False),
1. - np.logspace(1e-12, 0.01, base=0.1)]
for c in [0., -1.]:
isf0 = stats.genpareto.isf(q, c)
for dc in [1e-14, -1e-14]:
isfc = stats.genpareto.isf(q, c + dc)
assert_allclose(isf0, isfc, atol=1e-12)
def test_cdf_ppf_roundtrip(self):
# this should pass with machine precision. hat tip @pbrod
q = np.r_[np.logspace(1e-12, 0.01, base=0.1),
np.linspace(0.01, 1, 30, endpoint=False),
1. - np.logspace(1e-12, 0.01, base=0.1)]
for c in [1e-8, -1e-18, 1e-15, -1e-15]:
assert_allclose(stats.genpareto.cdf(stats.genpareto.ppf(q, c), c),
q, atol=1e-15)
def test_logsf(self):
logp = stats.genpareto.logsf(1e10, .01, 0, 1)
assert_allclose(logp, -1842.0680753952365)
# Values in 'expected_stats' are
# [mean, variance, skewness, excess kurtosis].
@pytest.mark.parametrize(
'c, expected_stats',
[(0, [1, 1, 2, 6]),
(1/4, [4/3, 32/9, 10/np.sqrt(2), np.nan]),
(1/9, [9/8, (81/64)*(9/7), (10/9)*np.sqrt(7), 754/45]),
(-1, [1/2, 1/12, 0, -6/5])])
def test_stats(self, c, expected_stats):
result = stats.genpareto.stats(c, moments='mvsk')
assert_allclose(result, expected_stats, rtol=1e-13, atol=1e-15)
def test_var(self):
# Regression test for gh-11168.
v = stats.genpareto.var(1e-8)
assert_allclose(v, 1.000000040000001, rtol=1e-13)
class TestPearson3(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.pearson3.rvs(0.1, size=(2, 50))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllFloat'])
val = stats.pearson3.rvs(0.5)
assert_(isinstance(val, float))
val = stats.pearson3(0.5).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllFloat'])
assert_(len(val) == 3)
def test_pdf(self):
vals = stats.pearson3.pdf(2, [0.0, 0.1, 0.2])
assert_allclose(vals, np.array([0.05399097, 0.05555481, 0.05670246]),
atol=1e-6)
vals = stats.pearson3.pdf(-3, 0.1)
assert_allclose(vals, np.array([0.00313791]), atol=1e-6)
vals = stats.pearson3.pdf([-3, -2, -1, 0, 1], 0.1)
assert_allclose(vals, np.array([0.00313791, 0.05192304, 0.25028092,
0.39885918, 0.23413173]), atol=1e-6)
def test_cdf(self):
vals = stats.pearson3.cdf(2, [0.0, 0.1, 0.2])
assert_allclose(vals, np.array([0.97724987, 0.97462004, 0.97213626]),
atol=1e-6)
vals = stats.pearson3.cdf(-3, 0.1)
assert_allclose(vals, [0.00082256], atol=1e-6)
vals = stats.pearson3.cdf([-3, -2, -1, 0, 1], 0.1)
assert_allclose(vals, [8.22563821e-04, 1.99860448e-02, 1.58550710e-01,
5.06649130e-01, 8.41442111e-01], atol=1e-6)
class TestKappa4(object):
def test_cdf_genpareto(self):
# h = 1 and k != 0 is generalized Pareto
x = [0.0, 0.1, 0.2, 0.5]
h = 1.0
for k in [-1.9, -1.0, -0.5, -0.2, -0.1, 0.1, 0.2, 0.5, 1.0,
1.9]:
vals = stats.kappa4.cdf(x, h, k)
# shape parameter is opposite what is expected
vals_comp = stats.genpareto.cdf(x, -k)
assert_allclose(vals, vals_comp)
def test_cdf_genextreme(self):
# h = 0 and k != 0 is generalized extreme value
x = np.linspace(-5, 5, 10)
h = 0.0
k = np.linspace(-3, 3, 10)
vals = stats.kappa4.cdf(x, h, k)
vals_comp = stats.genextreme.cdf(x, k)
assert_allclose(vals, vals_comp)
def test_cdf_expon(self):
# h = 1 and k = 0 is exponential
x = np.linspace(0, 10, 10)
h = 1.0
k = 0.0
vals = stats.kappa4.cdf(x, h, k)
vals_comp = stats.expon.cdf(x)
assert_allclose(vals, vals_comp)
def test_cdf_gumbel_r(self):
# h = 0 and k = 0 is gumbel_r
x = np.linspace(-5, 5, 10)
h = 0.0
k = 0.0
vals = stats.kappa4.cdf(x, h, k)
vals_comp = stats.gumbel_r.cdf(x)
assert_allclose(vals, vals_comp)
def test_cdf_logistic(self):
# h = -1 and k = 0 is logistic
x = np.linspace(-5, 5, 10)
h = -1.0
k = 0.0
vals = stats.kappa4.cdf(x, h, k)
vals_comp = stats.logistic.cdf(x)
assert_allclose(vals, vals_comp)
def test_cdf_uniform(self):
# h = 1 and k = 1 is uniform
x = np.linspace(-5, 5, 10)
h = 1.0
k = 1.0
vals = stats.kappa4.cdf(x, h, k)
vals_comp = stats.uniform.cdf(x)
assert_allclose(vals, vals_comp)
def test_integers_ctor(self):
# regression test for gh-7416: _argcheck fails for integer h and k
# in numpy 1.12
stats.kappa4(1, 2)
class TestPoisson(object):
def setup_method(self):
np.random.seed(1234)
def test_pmf_basic(self):
# Basic case
ln2 = np.log(2)
vals = stats.poisson.pmf([0, 1, 2], ln2)
expected = [0.5, ln2/2, ln2**2/4]
assert_allclose(vals, expected)
def test_mu0(self):
# Edge case: mu=0
vals = stats.poisson.pmf([0, 1, 2], 0)
expected = [1, 0, 0]
assert_array_equal(vals, expected)
interval = stats.poisson.interval(0.95, 0)
assert_equal(interval, (0, 0))
def test_rvs(self):
vals = stats.poisson.rvs(0.5, size=(2, 50))
assert_(numpy.all(vals >= 0))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.poisson.rvs(0.5)
assert_(isinstance(val, int))
val = stats.poisson(0.5).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_stats(self):
mu = 16.0
result = stats.poisson.stats(mu, moments='mvsk')
assert_allclose(result, [mu, mu, np.sqrt(1.0/mu), 1.0/mu])
mu = np.array([0.0, 1.0, 2.0])
result = stats.poisson.stats(mu, moments='mvsk')
expected = (mu, mu, [np.inf, 1, 1/np.sqrt(2)], [np.inf, 1, 0.5])
assert_allclose(result, expected)
class TestKSTwo(object):
def setup_method(self):
np.random.seed(1234)
def test_cdf(self):
for n in [1, 2, 3, 10, 100, 1000]:
# Test x-values:
# 0, 1/2n, where the cdf should be 0
# 1/n, where the cdf should be n!/n^n
# 0.5, where the cdf should match ksone.cdf
# 1-1/n, where cdf = 1-2/n^n
# 1, where cdf == 1
# (E.g. Exact values given by Eqn 1 in Simard / L'Ecuyer)
x = np.array([0, 0.5/n, 1/n, 0.5, 1-1.0/n, 1])
v1 = (1.0/n)**n
lg = scipy.special.gammaln(n+1)
elg = (np.exp(lg) if v1 != 0 else 0)
expected = np.array([0, 0, v1 * elg,
1 - 2*stats.ksone.sf(0.5, n),
max(1 - 2*v1, 0.0),
1.0])
vals_cdf = stats.kstwo.cdf(x, n)
assert_allclose(vals_cdf, expected)
def test_sf(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
# Same x values as in test_cdf, and use sf = 1 - cdf
x = np.array([0, 0.5/n, 1/n, 0.5, 1-1.0/n, 1])
v1 = (1.0/n)**n
lg = scipy.special.gammaln(n+1)
elg = (np.exp(lg) if v1 != 0 else 0)
expected = np.array([1.0, 1.0,
1 - v1 * elg,
2*stats.ksone.sf(0.5, n),
min(2*v1, 1.0), 0])
vals_sf = stats.kstwo.sf(x, n)
assert_allclose(vals_sf, expected)
def test_cdf_sqrtn(self):
# For fixed a, cdf(a/sqrt(n), n) -> kstwobign(a) as n->infinity
# cdf(a/sqrt(n), n) is an increasing function of n (and a)
# Check that the function is indeed increasing (allowing for some
# small floating point and algorithm differences.)
x = np.linspace(0, 2, 11)[1:]
ns = [50, 100, 200, 400, 1000, 2000]
for _x in x:
xn = _x / np.sqrt(ns)
probs = stats.kstwo.cdf(xn, ns)
diffs = np.diff(probs)
assert_array_less(diffs, 1e-8)
def test_cdf_sf(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
vals_cdf = stats.kstwo.cdf(x, n)
vals_sf = stats.kstwo.sf(x, n)
assert_array_almost_equal(vals_cdf, 1 - vals_sf)
def test_cdf_sf_sqrtn(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
xn = x / np.sqrt(n)
vals_cdf = stats.kstwo.cdf(xn, n)
vals_sf = stats.kstwo.sf(xn, n)
assert_array_almost_equal(vals_cdf, 1 - vals_sf)
def test_ppf_of_cdf(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
xn = x[x > 0.5/n]
vals_cdf = stats.kstwo.cdf(xn, n)
# CDFs close to 1 are better dealt with using the SF
cond = (0 < vals_cdf) & (vals_cdf < 0.99)
vals = stats.kstwo.ppf(vals_cdf, n)
assert_allclose(vals[cond], xn[cond], rtol=1e-4)
def test_isf_of_sf(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
xn = x[x > 0.5/n]
vals_isf = stats.kstwo.isf(xn, n)
cond = (0 < vals_isf) & (vals_isf < 1.0)
vals = stats.kstwo.sf(vals_isf, n)
assert_allclose(vals[cond], xn[cond], rtol=1e-4)
def test_ppf_of_cdf_sqrtn(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
xn = (x / np.sqrt(n))[x > 0.5/n]
vals_cdf = stats.kstwo.cdf(xn, n)
cond = (0 < vals_cdf) & (vals_cdf < 1.0)
vals = stats.kstwo.ppf(vals_cdf, n)
assert_allclose(vals[cond], xn[cond])
def test_isf_of_sf_sqrtn(self):
x = np.linspace(0, 1, 11)
for n in [1, 2, 3, 10, 100, 1000]:
xn = (x / np.sqrt(n))[x > 0.5/n]
vals_sf = stats.kstwo.sf(xn, n)
# SFs close to 1 are better dealt with using the CDF
cond = (0 < vals_sf) & (vals_sf < 0.95)
vals = stats.kstwo.isf(vals_sf, n)
assert_allclose(vals[cond], xn[cond])
def test_ppf(self):
probs = np.linspace(0, 1, 11)[1:]
for n in [1, 2, 3, 10, 100, 1000]:
xn = stats.kstwo.ppf(probs, n)
vals_cdf = stats.kstwo.cdf(xn, n)
assert_allclose(vals_cdf, probs)
def test_simard_lecuyer_table1(self):
# Compute the cdf for values near the mean of the distribution.
# The mean u ~ log(2)*sqrt(pi/(2n))
# Compute for x in [u/4, u/3, u/2, u, 2u, 3u]
# This is the computation of Table 1 of Simard, R., L'Ecuyer, P. (2011)
# "Computing the Two-Sided Kolmogorov-Smirnov Distribution".
# Except that the values below are not from the published table, but
# were generated using an independent SageMath implementation of
# Durbin's algorithm (with the exponentiation and scaling of
# Marsaglia/Tsang/Wang's version) using 500 bit arithmetic.
# Some of the values in the published table have relative
# errors greater than 1e-4.
ns = [10, 50, 100, 200, 500, 1000]
ratios = np.array([1.0/4, 1.0/3, 1.0/2, 1, 2, 3])
expected = np.array([
[1.92155292e-08, 5.72933228e-05, 2.15233226e-02, 6.31566589e-01, 9.97685592e-01, 9.99999942e-01],
[2.28096224e-09, 1.99142563e-05, 1.42617934e-02, 5.95345542e-01, 9.96177701e-01, 9.99998662e-01],
[1.00201886e-09, 1.32673079e-05, 1.24608594e-02, 5.86163220e-01, 9.95866877e-01, 9.99998240e-01],
[4.93313022e-10, 9.52658029e-06, 1.12123138e-02, 5.79486872e-01, 9.95661824e-01, 9.99997964e-01],
[2.37049293e-10, 6.85002458e-06, 1.01309221e-02, 5.73427224e-01, 9.95491207e-01, 9.99997750e-01],
[1.56990874e-10, 5.71738276e-06, 9.59725430e-03, 5.70322692e-01, 9.95409545e-01, 9.99997657e-01]
])
for idx, n in enumerate(ns):
x = ratios * np.log(2) * np.sqrt(np.pi/2/n)
vals_cdf = stats.kstwo.cdf(x, n)
assert_allclose(vals_cdf, expected[idx], rtol=1e-5)
class TestZipf(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.zipf.rvs(1.5, size=(2, 50))
assert_(numpy.all(vals >= 1))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.zipf.rvs(1.5)
assert_(isinstance(val, int))
val = stats.zipf(1.5).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
def test_moments(self):
# n-th moment is finite iff a > n + 1
m, v = stats.zipf.stats(a=2.8)
assert_(np.isfinite(m))
assert_equal(v, np.inf)
s, k = stats.zipf.stats(a=4.8, moments='sk')
assert_(not np.isfinite([s, k]).all())
class TestDLaplace(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
vals = stats.dlaplace.rvs(1.5, size=(2, 50))
assert_(numpy.shape(vals) == (2, 50))
assert_(vals.dtype.char in typecodes['AllInteger'])
val = stats.dlaplace.rvs(1.5)
assert_(isinstance(val, int))
val = stats.dlaplace(1.5).rvs(3)
assert_(isinstance(val, numpy.ndarray))
assert_(val.dtype.char in typecodes['AllInteger'])
assert_(stats.dlaplace.rvs(0.8) is not None)
def test_stats(self):
# compare the explicit formulas w/ direct summation using pmf
a = 1.
dl = stats.dlaplace(a)
m, v, s, k = dl.stats('mvsk')
N = 37
xx = np.arange(-N, N+1)
pp = dl.pmf(xx)
m2, m4 = np.sum(pp*xx**2), np.sum(pp*xx**4)
assert_equal((m, s), (0, 0))
assert_allclose((v, k), (m2, m4/m2**2 - 3.), atol=1e-14, rtol=1e-8)
def test_stats2(self):
a = np.log(2.)
dl = stats.dlaplace(a)
m, v, s, k = dl.stats('mvsk')
assert_equal((m, s), (0., 0.))
assert_allclose((v, k), (4., 3.25))
class TestInvGamma(object):
def test_invgamma_inf_gh_1866(self):
# invgamma's moments are only finite for a>n
# specific numbers checked w/ boost 1.54
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
mvsk = stats.invgamma.stats(a=19.31, moments='mvsk')
expected = [0.05461496450, 0.0001723162534, 1.020362676,
2.055616582]
assert_allclose(mvsk, expected)
a = [1.1, 3.1, 5.6]
mvsk = stats.invgamma.stats(a=a, moments='mvsk')
expected = ([10., 0.476190476, 0.2173913043], # mmm
[np.inf, 0.2061430632, 0.01312749422], # vvv
[np.nan, 41.95235392, 2.919025532], # sss
[np.nan, np.nan, 24.51923076]) # kkk
for x, y in zip(mvsk, expected):
assert_almost_equal(x, y)
def test_cdf_ppf(self):
# gh-6245
x = np.logspace(-2.6, 0)
y = stats.invgamma.cdf(x, 1)
xx = stats.invgamma.ppf(y, 1)
assert_allclose(x, xx)
def test_sf_isf(self):
# gh-6245
if sys.maxsize > 2**32:
x = np.logspace(2, 100)
else:
# Invgamme roundtrip on 32-bit systems has relative accuracy
# ~1e-15 until x=1e+15, and becomes inf above x=1e+18
x = np.logspace(2, 18)
y = stats.invgamma.sf(x, 1)
xx = stats.invgamma.isf(y, 1)
assert_allclose(x, xx, rtol=1.0)
class TestF(object):
def test_endpoints(self):
# Compute the pdf at the left endpoint dst.a.
data = [[stats.f, (2, 1), 1.0]]
for _f, _args, _correct in data:
ans = _f.pdf(_f.a, *_args)
print(_f, (_args), ans, _correct, ans == _correct)
ans = [_f.pdf(_f.a, *_args) for _f, _args, _ in data]
correct = [_correct_ for _f, _args, _correct_ in data]
assert_array_almost_equal(ans, correct)
def test_f_moments(self):
# n-th moment of F distributions is only finite for n < dfd / 2
m, v, s, k = stats.f.stats(11, 6.5, moments='mvsk')
assert_(np.isfinite(m))
assert_(np.isfinite(v))
assert_(np.isfinite(s))
assert_(not np.isfinite(k))
def test_moments_warnings(self):
# no warnings should be generated for dfd = 2, 4, 6, 8 (div by zero)
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
stats.f.stats(dfn=[11]*4, dfd=[2, 4, 6, 8], moments='mvsk')
def test_stats_broadcast(self):
dfn = np.array([[3], [11]])
dfd = np.array([11, 12])
m, v, s, k = stats.f.stats(dfn=dfn, dfd=dfd, moments='mvsk')
m2 = [dfd / (dfd - 2)]*2
assert_allclose(m, m2)
v2 = 2 * dfd**2 * (dfn + dfd - 2) / dfn / (dfd - 2)**2 / (dfd - 4)
assert_allclose(v, v2)
s2 = ((2*dfn + dfd - 2) * np.sqrt(8*(dfd - 4)) /
((dfd - 6) * np.sqrt(dfn*(dfn + dfd - 2))))
assert_allclose(s, s2)
k2num = 12 * (dfn * (5*dfd - 22) * (dfn + dfd - 2) +
(dfd - 4) * (dfd - 2)**2)
k2den = dfn * (dfd - 6) * (dfd - 8) * (dfn + dfd - 2)
k2 = k2num / k2den
assert_allclose(k, k2)
def test_rvgeneric_std():
# Regression test for #1191
assert_array_almost_equal(stats.t.std([5, 6]), [1.29099445, 1.22474487])
def test_moments_t():
# regression test for #8786
assert_equal(stats.t.stats(df=1, moments='mvsk'),
(np.inf, np.nan, np.nan, np.nan))
assert_equal(stats.t.stats(df=1.01, moments='mvsk'),
(0.0, np.inf, np.nan, np.nan))
assert_equal(stats.t.stats(df=2, moments='mvsk'),
(0.0, np.inf, np.nan, np.nan))
assert_equal(stats.t.stats(df=2.01, moments='mvsk'),
(0.0, 2.01/(2.01-2.0), np.nan, np.inf))
assert_equal(stats.t.stats(df=3, moments='sk'), (np.nan, np.inf))
assert_equal(stats.t.stats(df=3.01, moments='sk'), (0.0, np.inf))
assert_equal(stats.t.stats(df=4, moments='sk'), (0.0, np.inf))
assert_equal(stats.t.stats(df=4.01, moments='sk'), (0.0, 6.0/(4.01 - 4.0)))
class TestRvDiscrete(object):
def setup_method(self):
np.random.seed(1234)
def test_rvs(self):
states = [-1, 0, 1, 2, 3, 4]
probability = [0.0, 0.3, 0.4, 0.0, 0.3, 0.0]
samples = 1000
r = stats.rv_discrete(name='sample', values=(states, probability))
x = r.rvs(size=samples)
assert_(isinstance(x, numpy.ndarray))
for s, p in zip(states, probability):
assert_(abs(sum(x == s)/float(samples) - p) < 0.05)
x = r.rvs()
assert_(isinstance(x, int))
def test_entropy(self):
# Basic tests of entropy.
pvals = np.array([0.25, 0.45, 0.3])
p = stats.rv_discrete(values=([0, 1, 2], pvals))
expected_h = -sum(xlogy(pvals, pvals))
h = p.entropy()
assert_allclose(h, expected_h)
p = stats.rv_discrete(values=([0, 1, 2], [1.0, 0, 0]))
h = p.entropy()
assert_equal(h, 0.0)
def test_pmf(self):
xk = [1, 2, 4]
pk = [0.5, 0.3, 0.2]
rv = stats.rv_discrete(values=(xk, pk))
x = [[1., 4.],
[3., 2]]
assert_allclose(rv.pmf(x),
[[0.5, 0.2],
[0., 0.3]], atol=1e-14)
def test_cdf(self):
xk = [1, 2, 4]
pk = [0.5, 0.3, 0.2]
rv = stats.rv_discrete(values=(xk, pk))
x_values = [-2, 1., 1.1, 1.5, 2.0, 3.0, 4, 5]
expected = [0, 0.5, 0.5, 0.5, 0.8, 0.8, 1, 1]
assert_allclose(rv.cdf(x_values), expected, atol=1e-14)
# also check scalar arguments
assert_allclose([rv.cdf(xx) for xx in x_values],
expected, atol=1e-14)
def test_ppf(self):
xk = [1, 2, 4]
pk = [0.5, 0.3, 0.2]
rv = stats.rv_discrete(values=(xk, pk))
q_values = [0.1, 0.5, 0.6, 0.8, 0.9, 1.]
expected = [1, 1, 2, 2, 4, 4]
assert_allclose(rv.ppf(q_values), expected, atol=1e-14)
# also check scalar arguments
assert_allclose([rv.ppf(q) for q in q_values],
expected, atol=1e-14)
def test_cdf_ppf_next(self):
# copied and special cased from test_discrete_basic
vals = ([1, 2, 4, 7, 8], [0.1, 0.2, 0.3, 0.3, 0.1])
rv = stats.rv_discrete(values=vals)
assert_array_equal(rv.ppf(rv.cdf(rv.xk[:-1]) + 1e-8),
rv.xk[1:])
def test_expect(self):
xk = [1, 2, 4, 6, 7, 11]
pk = [0.1, 0.2, 0.2, 0.2, 0.2, 0.1]
rv = stats.rv_discrete(values=(xk, pk))
assert_allclose(rv.expect(), np.sum(rv.xk * rv.pk), atol=1e-14)
def test_multidimension(self):
xk = np.arange(12).reshape((3, 4))
pk = np.array([[0.1, 0.1, 0.15, 0.05],
[0.1, 0.1, 0.05, 0.05],
[0.1, 0.1, 0.05, 0.05]])
rv = stats.rv_discrete(values=(xk, pk))
assert_allclose(rv.expect(), np.sum(rv.xk * rv.pk), atol=1e-14)
def test_bad_input(self):
xk = [1, 2, 3]
pk = [0.5, 0.5]
assert_raises(ValueError, stats.rv_discrete, **dict(values=(xk, pk)))
pk = [1, 2, 3]
assert_raises(ValueError, stats.rv_discrete, **dict(values=(xk, pk)))
xk = [1, 2, 3]
pk = [0.5, 1.2, -0.7]
assert_raises(ValueError, stats.rv_discrete, **dict(values=(xk, pk)))
xk = [1, 2, 3, 4, 5]
pk = [0.3, 0.3, 0.3, 0.3, -0.2]
assert_raises(ValueError, stats.rv_discrete, **dict(values=(xk, pk)))
def test_shape_rv_sample(self):
# tests added for gh-9565
# mismatch of 2d inputs
xk, pk = np.arange(4).reshape((2, 2)), np.full((2, 3), 1/6)
assert_raises(ValueError, stats.rv_discrete, **dict(values=(xk, pk)))
# same number of elements, but shapes not compatible
xk, pk = np.arange(6).reshape((3, 2)), np.full((2, 3), 1/6)
assert_raises(ValueError, stats.rv_discrete, **dict(values=(xk, pk)))
# same shapes => no error
xk, pk = np.arange(6).reshape((3, 2)), np.full((3, 2), 1/6)
assert_equal(stats.rv_discrete(values=(xk, pk)).pmf(0), 1/6)
class TestSkewNorm(object):
def setup_method(self):
self.rng = check_random_state(1234)
def test_normal(self):
# When the skewness is 0 the distribution is normal
x = np.linspace(-5, 5, 100)
assert_array_almost_equal(stats.skewnorm.pdf(x, a=0),
stats.norm.pdf(x))
def test_rvs(self):
shape = (3, 4, 5)
x = stats.skewnorm.rvs(a=0.75, size=shape, random_state=self.rng)
assert_equal(shape, x.shape)
x = stats.skewnorm.rvs(a=-3, size=shape, random_state=self.rng)
assert_equal(shape, x.shape)
def test_moments(self):
X = stats.skewnorm.rvs(a=4, size=int(1e6), loc=5, scale=2,
random_state=self.rng)
expected = [np.mean(X), np.var(X), stats.skew(X), stats.kurtosis(X)]
computed = stats.skewnorm.stats(a=4, loc=5, scale=2, moments='mvsk')
assert_array_almost_equal(computed, expected, decimal=2)
X = stats.skewnorm.rvs(a=-4, size=int(1e6), loc=5, scale=2,
random_state=self.rng)
expected = [np.mean(X), np.var(X), stats.skew(X), stats.kurtosis(X)]
computed = stats.skewnorm.stats(a=-4, loc=5, scale=2, moments='mvsk')
assert_array_almost_equal(computed, expected, decimal=2)
def test_cdf_large_x(self):
# Regression test for gh-7746.
# The x values are large enough that the closest 64 bit floating
# point representation of the exact CDF is 1.0.
p = stats.skewnorm.cdf([10, 20, 30], -1)
assert_allclose(p, np.ones(3), rtol=1e-14)
p = stats.skewnorm.cdf(25, 2.5)
assert_allclose(p, 1.0, rtol=1e-14)
def test_cdf_sf_small_values(self):
# Triples are [x, a, cdf(x, a)]. These values were computed
# using CDF[SkewNormDistribution[0, 1, a], x] in Wolfram Alpha.
cdfvals = [
[-8, 1, 3.870035046664392611e-31],
[-4, 2, 8.1298399188811398e-21],
[-2, 5, 1.55326826787106273e-26],
[-9, -1, 2.257176811907681295e-19],
[-10, -4, 1.523970604832105213e-23],
]
for x, a, cdfval in cdfvals:
p = stats.skewnorm.cdf(x, a)
assert_allclose(p, cdfval, rtol=1e-8)
# For the skew normal distribution, sf(-x, -a) = cdf(x, a).
p = stats.skewnorm.sf(-x, -a)
assert_allclose(p, cdfval, rtol=1e-8)
class TestExpon(object):
def test_zero(self):
assert_equal(stats.expon.pdf(0), 1)
def test_tail(self): # Regression test for ticket 807
assert_equal(stats.expon.cdf(1e-18), 1e-18)
assert_equal(stats.expon.isf(stats.expon.sf(40)), 40)
def test_nan_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.nan])
assert_raises(RuntimeError, stats.expon.fit, x)
def test_inf_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.inf])
assert_raises(RuntimeError, stats.expon.fit, x)
class TestNorm(object):
def test_nan_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.nan])
assert_raises(RuntimeError, stats.norm.fit, x)
def test_inf_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.inf])
assert_raises(RuntimeError, stats.norm.fit, x)
def test_bad_keyword_arg(self):
x = [1, 2, 3]
assert_raises(TypeError, stats.norm.fit, x, plate="shrimp")
class TestUniform(object):
"""gh-10300"""
def test_nan_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.nan])
assert_raises(RuntimeError, stats.uniform.fit, x)
def test_inf_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.inf])
assert_raises(RuntimeError, stats.uniform.fit, x)
class TestExponNorm(object):
def test_moments(self):
# Some moment test cases based on non-loc/scaled formula
def get_moms(lam, sig, mu):
# See wikipedia for these formulae
# where it is listed as an exponentially modified gaussian
opK2 = 1.0 + 1 / (lam*sig)**2
exp_skew = 2 / (lam * sig)**3 * opK2**(-1.5)
exp_kurt = 6.0 * (1 + (lam * sig)**2)**(-2)
return [mu + 1/lam, sig*sig + 1.0/(lam*lam), exp_skew, exp_kurt]
mu, sig, lam = 0, 1, 1
K = 1.0 / (lam * sig)
sts = stats.exponnorm.stats(K, loc=mu, scale=sig, moments='mvsk')
assert_almost_equal(sts, get_moms(lam, sig, mu))
mu, sig, lam = -3, 2, 0.1
K = 1.0 / (lam * sig)
sts = stats.exponnorm.stats(K, loc=mu, scale=sig, moments='mvsk')
assert_almost_equal(sts, get_moms(lam, sig, mu))
mu, sig, lam = 0, 3, 1
K = 1.0 / (lam * sig)
sts = stats.exponnorm.stats(K, loc=mu, scale=sig, moments='mvsk')
assert_almost_equal(sts, get_moms(lam, sig, mu))
mu, sig, lam = -5, 11, 3.5
K = 1.0 / (lam * sig)
sts = stats.exponnorm.stats(K, loc=mu, scale=sig, moments='mvsk')
assert_almost_equal(sts, get_moms(lam, sig, mu))
def test_nan_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.nan])
assert_raises(RuntimeError, stats.exponnorm.fit, x, floc=0, fscale=1)
def test_inf_raises_error(self):
# see gh-issue 10300
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.inf])
assert_raises(RuntimeError, stats.exponnorm.fit, x, floc=0, fscale=1)
def test_extremes_x(self):
# Test for extreme values against overflows
assert_almost_equal(stats.exponnorm.pdf(-900, 1), 0.0)
assert_almost_equal(stats.exponnorm.pdf(+900, 1), 0.0)
assert_almost_equal(stats.exponnorm.pdf(1, 0.01), 0.0)
assert_almost_equal(stats.exponnorm.pdf(-900, 0.01), 0.0)
assert_almost_equal(stats.exponnorm.pdf(+900, 0.01), 0.0)
class TestGenExpon(object):
def test_pdf_unity_area(self):
from scipy.integrate import simps
# PDF should integrate to one
p = stats.genexpon.pdf(numpy.arange(0, 10, 0.01), 0.5, 0.5, 2.0)
assert_almost_equal(simps(p, dx=0.01), 1, 1)
def test_cdf_bounds(self):
# CDF should always be positive
cdf = stats.genexpon.cdf(numpy.arange(0, 10, 0.01), 0.5, 0.5, 2.0)
assert_(numpy.all((0 <= cdf) & (cdf <= 1)))
class TestExponpow(object):
def test_tail(self):
assert_almost_equal(stats.exponpow.cdf(1e-10, 2.), 1e-20)
assert_almost_equal(stats.exponpow.isf(stats.exponpow.sf(5, .8), .8),
5)
class TestSkellam(object):
def test_pmf(self):
# comparison to R
k = numpy.arange(-10, 15)
mu1, mu2 = 10, 5
skpmfR = numpy.array(
[4.2254582961926893e-005, 1.1404838449648488e-004,
2.8979625801752660e-004, 6.9177078182101231e-004,
1.5480716105844708e-003, 3.2412274963433889e-003,
6.3373707175123292e-003, 1.1552351566696643e-002,
1.9606152375042644e-002, 3.0947164083410337e-002,
4.5401737566767360e-002, 6.1894328166820688e-002,
7.8424609500170578e-002, 9.2418812533573133e-002,
1.0139793148019728e-001, 1.0371927988298846e-001,
9.9076583077406091e-002, 8.8546660073089561e-002,
7.4187842052486810e-002, 5.8392772862200251e-002,
4.3268692953013159e-002, 3.0248159818374226e-002,
1.9991434305603021e-002, 1.2516877303301180e-002,
7.4389876226229707e-003])
assert_almost_equal(stats.skellam.pmf(k, mu1, mu2), skpmfR, decimal=15)
def test_cdf(self):
# comparison to R, only 5 decimals
k = numpy.arange(-10, 15)
mu1, mu2 = 10, 5
skcdfR = numpy.array(
[6.4061475386192104e-005, 1.7810985988267694e-004,
4.6790611790020336e-004, 1.1596768997212152e-003,
2.7077485103056847e-003, 5.9489760066490718e-003,
1.2286346724161398e-002, 2.3838698290858034e-002,
4.3444850665900668e-002, 7.4392014749310995e-002,
1.1979375231607835e-001, 1.8168808048289900e-001,
2.6011268998306952e-001, 3.5253150251664261e-001,
4.5392943399683988e-001, 5.5764871387982828e-001,
6.5672529695723436e-001, 7.4527195703032389e-001,
8.1945979908281064e-001, 8.7785257194501087e-001,
9.2112126489802404e-001, 9.5136942471639818e-001,
9.7136085902200120e-001, 9.8387773632530240e-001,
9.9131672394792536e-001])
assert_almost_equal(stats.skellam.cdf(k, mu1, mu2), skcdfR, decimal=5)
class TestLognorm(object):
def test_pdf(self):
# Regression test for Ticket #1471: avoid nan with 0/0 situation
# Also make sure there are no warnings at x=0, cf gh-5202
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
pdf = stats.lognorm.pdf([0, 0.5, 1], 1)
assert_array_almost_equal(pdf, [0.0, 0.62749608, 0.39894228])
def test_logcdf(self):
# Regression test for gh-5940: sf et al would underflow too early
x2, mu, sigma = 201.68, 195, 0.149
assert_allclose(stats.lognorm.sf(x2-mu, s=sigma),
stats.norm.sf(np.log(x2-mu)/sigma))
assert_allclose(stats.lognorm.logsf(x2-mu, s=sigma),
stats.norm.logsf(np.log(x2-mu)/sigma))
class TestBeta(object):
def test_logpdf(self):
# Regression test for Ticket #1326: avoid nan with 0*log(0) situation
logpdf = stats.beta.logpdf(0, 1, 0.5)
assert_almost_equal(logpdf, -0.69314718056)
logpdf = stats.beta.logpdf(0, 0.5, 1)
assert_almost_equal(logpdf, np.inf)
def test_logpdf_ticket_1866(self):
alpha, beta = 267, 1472
x = np.array([0.2, 0.5, 0.6])
b = stats.beta(alpha, beta)
assert_allclose(b.logpdf(x).sum(), -1201.699061824062)
assert_allclose(b.pdf(x), np.exp(b.logpdf(x)))
def test_fit_bad_keyword_args(self):
x = [0.1, 0.5, 0.6]
assert_raises(TypeError, stats.beta.fit, x, floc=0, fscale=1,
plate="shrimp")
def test_fit_duplicated_fixed_parameter(self):
# At most one of 'f0', 'fa' or 'fix_a' can be given to the fit method.
# More than one raises a ValueError.
x = [0.1, 0.5, 0.6]
assert_raises(ValueError, stats.beta.fit, x, fa=0.5, fix_a=0.5)
class TestBetaPrime(object):
def test_logpdf(self):
alpha, beta = 267, 1472
x = np.array([0.2, 0.5, 0.6])
b = stats.betaprime(alpha, beta)
assert_(np.isfinite(b.logpdf(x)).all())
assert_allclose(b.pdf(x), np.exp(b.logpdf(x)))
def test_cdf(self):
# regression test for gh-4030: Implementation of
# scipy.stats.betaprime.cdf()
x = stats.betaprime.cdf(0, 0.2, 0.3)
assert_equal(x, 0.0)
alpha, beta = 267, 1472
x = np.array([0.2, 0.5, 0.6])
cdfs = stats.betaprime.cdf(x, alpha, beta)
assert_(np.isfinite(cdfs).all())
# check the new cdf implementation vs generic one:
gen_cdf = stats.rv_continuous._cdf_single
cdfs_g = [gen_cdf(stats.betaprime, val, alpha, beta) for val in x]
assert_allclose(cdfs, cdfs_g, atol=0, rtol=2e-12)
class TestGamma(object):
def test_pdf(self):
# a few test cases to compare with R
pdf = stats.gamma.pdf(90, 394, scale=1./5)
assert_almost_equal(pdf, 0.002312341)
pdf = stats.gamma.pdf(3, 10, scale=1./5)
assert_almost_equal(pdf, 0.1620358)
def test_logpdf(self):
# Regression test for Ticket #1326: cornercase avoid nan with 0*log(0)
# situation
logpdf = stats.gamma.logpdf(0, 1)
assert_almost_equal(logpdf, 0)
def test_fit_bad_keyword_args(self):
x = [0.1, 0.5, 0.6]
assert_raises(TypeError, stats.gamma.fit, x, floc=0, plate="shrimp")
class TestChi2(object):
# regression tests after precision improvements, ticket:1041, not verified
def test_precision(self):
assert_almost_equal(stats.chi2.pdf(1000, 1000), 8.919133934753128e-003,
decimal=14)
assert_almost_equal(stats.chi2.pdf(100, 100), 0.028162503162596778,
decimal=14)
def test_ppf(self):
# Expected values computed with mpmath.
df = 4.8
x = stats.chi2.ppf(2e-47, df)
assert_allclose(x, 1.098472479575179840604902808e-19, rtol=1e-10)
x = stats.chi2.ppf(0.5, df)
assert_allclose(x, 4.15231407598589358660093156, rtol=1e-10)
df = 13
x = stats.chi2.ppf(2e-77, df)
assert_allclose(x, 1.0106330688195199050507943e-11, rtol=1e-10)
x = stats.chi2.ppf(0.1, df)
assert_allclose(x, 7.041504580095461859307179763, rtol=1e-10)
class TestGumbelL(object):
# gh-6228
def test_cdf_ppf(self):
x = np.linspace(-100, -4)
y = stats.gumbel_l.cdf(x)
xx = stats.gumbel_l.ppf(y)
assert_allclose(x, xx)
def test_logcdf_logsf(self):
x = np.linspace(-100, -4)
y = stats.gumbel_l.logcdf(x)
z = stats.gumbel_l.logsf(x)
u = np.exp(y)
v = -special.expm1(z)
assert_allclose(u, v)
def test_sf_isf(self):
x = np.linspace(-20, 5)
y = stats.gumbel_l.sf(x)
xx = stats.gumbel_l.isf(y)
assert_allclose(x, xx)
class TestLevyStable(object):
def test_fit(self):
# construct data to have percentiles that match
# example in McCulloch 1986.
x = [-.05413,-.05413,
0.,0.,0.,0.,
.00533,.00533,.00533,.00533,.00533,
.03354,.03354,.03354,.03354,.03354,
.05309,.05309,.05309,.05309,.05309]
alpha1, beta1, loc1, scale1 = stats.levy_stable._fitstart(x)
assert_allclose(alpha1, 1.48, rtol=0, atol=0.01)
assert_almost_equal(beta1, -.22, 2)
assert_almost_equal(scale1, 0.01717, 4)
assert_almost_equal(loc1, 0.00233, 2) # to 2 dps due to rounding error in McCulloch86
# cover alpha=2 scenario
x2 = x + [.05309,.05309,.05309,.05309,.05309]
alpha2, beta2, loc2, scale2 = stats.levy_stable._fitstart(x2)
assert_equal(alpha2, 2)
assert_equal(beta2, -1)
assert_almost_equal(scale2, .02503, 4)
assert_almost_equal(loc2, .03354, 4)
@pytest.mark.slow
def test_pdf_nolan_samples(self):
""" Test pdf values against Nolan's stablec.exe output
see - http://fs2.american.edu/jpnolan/www/stable/stable.html
There's a known limitation of Nolan's executable for alpha < 0.2.
Repeat following with beta = -1, -.5, 0, .5 and 1
stablec.exe <<
1 # pdf
1 # Nolan S equivalent to S0 in scipy
.25,2,.25 # alpha
-1,-1,0 # beta
-10,10,1 # x
1,0 # gamma, delta
2 # output file
"""
data = np.load(os.path.abspath(os.path.join(os.path.dirname(__file__),
'data/stable-pdf-sample-data.npy')))
data = np.core.records.fromarrays(data.T, names='x,p,alpha,beta')
# support numpy 1.8.2 for travis
npisin = np.isin if hasattr(np, "isin") else np.in1d
tests = [
# best selects
['best', None, 8, None],
# quadrature is accurate for most alpha except 0.25; perhaps limitation of Nolan stablec?
# we reduce size of x to speed up computation as numerical integration slow.
['quadrature', None, 8, lambda r: (r['alpha'] > 0.25) & (npisin(r['x'], [-10,-5,0,5,10]))],
# zolatarev is accurate except at alpha==1, beta != 0
['zolotarev', None, 8, lambda r: r['alpha'] != 1],
['zolotarev', None, 8, lambda r: (r['alpha'] == 1) & (r['beta'] == 0)],
['zolotarev', None, 1, lambda r: (r['alpha'] == 1) & (r['beta'] != 0)],
# fft accuracy reduces as alpha decreases, fails at low values of alpha and x=0
['fft', 0, 4, lambda r: r['alpha'] > 1],
['fft', 0, 3, lambda r: (r['alpha'] < 1) & (r['alpha'] > 0.25)],
['fft', 0, 1, lambda r: (r['alpha'] == 0.25) & (r['x'] != 0)], # not useful here
]
for ix, (default_method, fft_min_points, decimal_places, filter_func) in enumerate(tests):
stats.levy_stable.pdf_default_method = default_method
stats.levy_stable.pdf_fft_min_points_threshold = fft_min_points
subdata = data[filter_func(data)] if filter_func is not None else data
with suppress_warnings() as sup:
sup.record(RuntimeWarning, "Density calculation unstable for alpha=1 and beta!=0.*")
sup.record(RuntimeWarning, "Density calculations experimental for FFT method.*")
p = stats.levy_stable.pdf(subdata['x'], subdata['alpha'], subdata['beta'], scale=1, loc=0)
subdata2 = rec_append_fields(subdata, 'calc', p)
failures = subdata2[(np.abs(p-subdata['p']) >= 1.5*10.**(-decimal_places)) | np.isnan(p)]
assert_almost_equal(p, subdata['p'], decimal_places, "pdf test %s failed with method '%s'\n%s" % (ix, default_method, failures), verbose=False)
@pytest.mark.slow
def test_cdf_nolan_samples(self):
""" Test cdf values against Nolan's stablec.exe output
see - http://fs2.american.edu/jpnolan/www/stable/stable.html
There's a known limitation of Nolan's executable for alpha < 0.2.
Repeat following with beta = -1, -.5, 0, .5 and 1
stablec.exe <<
2 # cdf
1 # Nolan S equivalent to S0 in scipy
.25,2,.25 # alpha
-1,-1,0 # beta
-10,10,1 # x
1,0 # gamma, delta
2 # output file
"""
data = np.load(os.path.abspath(os.path.join(os.path.dirname(__file__),
'data/stable-cdf-sample-data.npy')))
data = np.core.records.fromarrays(data.T, names='x,p,alpha,beta')
tests = [
# zolatarev is accurate for all values
['zolotarev', None, 8, None],
# fft accuracy poor, very poor alpha < 1
['fft', 0, 2, lambda r: r['alpha'] > 1],
]
for ix, (default_method, fft_min_points, decimal_places, filter_func) in enumerate(tests):
stats.levy_stable.pdf_default_method = default_method
stats.levy_stable.pdf_fft_min_points_threshold = fft_min_points
subdata = data[filter_func(data)] if filter_func is not None else data
with suppress_warnings() as sup:
sup.record(RuntimeWarning, 'FFT method is considered ' +
'experimental for cumulative distribution ' +
'function evaluations.*')
p = stats.levy_stable.cdf(subdata['x'], subdata['alpha'], subdata['beta'], scale=1, loc=0)
subdata2 = rec_append_fields(subdata, 'calc', p)
failures = subdata2[(np.abs(p-subdata['p']) >= 1.5*10.**(-decimal_places)) | np.isnan(p)]
assert_almost_equal(p, subdata['p'], decimal_places, "cdf test %s failed with method '%s'\n%s" % (ix, default_method, failures), verbose=False)
def test_pdf_alpha_equals_one_beta_non_zero(self):
""" sample points extracted from Tables and Graphs of Stable Probability
Density Functions - Donald R Holt - 1973 - p 187.
"""
xs = np.array([0, 0, 0, 0,
1, 1, 1, 1,
2, 2, 2, 2,
3, 3, 3, 3,
4, 4, 4, 4])
density = np.array([.3183, .3096, .2925, .2622,
.1591, .1587, .1599, .1635,
.0637, .0729, .0812, .0955,
.0318, .0390, .0458, .0586,
.0187, .0236, .0285, .0384])
betas = np.array([0, .25, .5, 1,
0, .25, .5, 1,
0, .25, .5, 1,
0, .25, .5, 1,
0, .25, .5, 1])
tests = [
['quadrature', None, 4],
#['fft', 0, 4],
['zolotarev', None, 1],
]
with np.errstate(all='ignore'), suppress_warnings() as sup:
sup.filter(category=RuntimeWarning, message="Density calculation unstable.*")
for default_method, fft_min_points, decimal_places in tests:
stats.levy_stable.pdf_default_method = default_method
stats.levy_stable.pdf_fft_min_points_threshold = fft_min_points
#stats.levy_stable.fft_grid_spacing = 0.0001
pdf = stats.levy_stable.pdf(xs, 1, betas, scale=1, loc=0)
assert_almost_equal(pdf, density, decimal_places, default_method)
def test_stats(self):
param_sets = [
[(1.48,-.22, 0, 1), (0,np.inf,np.NaN,np.NaN)],
[(2,.9, 10, 1.5), (10,4.5,0,0)]
]
for args, exp_stats in param_sets:
calc_stats = stats.levy_stable.stats(args[0], args[1], loc=args[2], scale=args[3], moments='mvsk')
assert_almost_equal(calc_stats, exp_stats)
class TestArrayArgument(object): # test for ticket:992
def setup_method(self):
np.random.seed(1234)
def test_noexception(self):
rvs = stats.norm.rvs(loc=(np.arange(5)), scale=np.ones(5),
size=(10, 5))
assert_equal(rvs.shape, (10, 5))
class TestDocstring(object):
def test_docstrings(self):
# See ticket #761
if stats.rayleigh.__doc__ is not None:
assert_("rayleigh" in stats.rayleigh.__doc__.lower())
if stats.bernoulli.__doc__ is not None:
assert_("bernoulli" in stats.bernoulli.__doc__.lower())
def test_no_name_arg(self):
# If name is not given, construction shouldn't fail. See #1508.
stats.rv_continuous()
stats.rv_discrete()
class TestEntropy(object):
def test_entropy_positive(self):
# See ticket #497
pk = [0.5, 0.2, 0.3]
qk = [0.1, 0.25, 0.65]
eself = stats.entropy(pk, pk)
edouble = stats.entropy(pk, qk)
assert_(0.0 == eself)
assert_(edouble >= 0.0)
def test_entropy_base(self):
pk = np.ones(16, float)
S = stats.entropy(pk, base=2.)
assert_(abs(S - 4.) < 1.e-5)
qk = np.ones(16, float)
qk[:8] = 2.
S = stats.entropy(pk, qk)
S2 = stats.entropy(pk, qk, base=2.)
assert_(abs(S/S2 - np.log(2.)) < 1.e-5)
def test_entropy_zero(self):
# Test for PR-479
assert_almost_equal(stats.entropy([0, 1, 2]), 0.63651416829481278,
decimal=12)
def test_entropy_2d(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
qk = [[0.2, 0.1], [0.3, 0.6], [0.5, 0.3]]
assert_array_almost_equal(stats.entropy(pk, qk),
[0.1933259, 0.18609809])
def test_entropy_2d_zero(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
qk = [[0.0, 0.1], [0.3, 0.6], [0.5, 0.3]]
assert_array_almost_equal(stats.entropy(pk, qk),
[np.inf, 0.18609809])
pk[0][0] = 0.0
assert_array_almost_equal(stats.entropy(pk, qk),
[0.17403988, 0.18609809])
def test_entropy_base_2d_nondefault_axis(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
assert_array_almost_equal(stats.entropy(pk, axis=1),
[0.63651417, 0.63651417, 0.66156324])
def test_entropy_2d_nondefault_axis(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
qk = [[0.2, 0.1], [0.3, 0.6], [0.5, 0.3]]
assert_array_almost_equal(stats.entropy(pk, qk, axis=1),
[0.231049, 0.231049, 0.127706])
def test_entropy_raises_value_error(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
qk = [[0.1, 0.2], [0.6, 0.3]]
assert_raises(ValueError, stats.entropy, pk, qk)
def test_base_entropy_with_axis_0_is_equal_to_default(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
assert_array_almost_equal(stats.entropy(pk, axis=0),
stats.entropy(pk))
def test_entropy_with_axis_0_is_equal_to_default(self):
pk = [[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]]
qk = [[0.2, 0.1], [0.3, 0.6], [0.5, 0.3]]
assert_array_almost_equal(stats.entropy(pk, qk, axis=0),
stats.entropy(pk, qk))
def test_base_entropy_transposed(self):
pk = np.array([[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]])
assert_array_almost_equal(stats.entropy(pk.T).T,
stats.entropy(pk, axis=1))
def test_entropy_transposed(self):
pk = np.array([[0.1, 0.2], [0.6, 0.3], [0.3, 0.5]])
qk = np.array([[0.2, 0.1], [0.3, 0.6], [0.5, 0.3]])
assert_array_almost_equal(stats.entropy(pk.T, qk.T).T,
stats.entropy(pk, qk, axis=1))
def TestArgsreduce():
a = array([1, 3, 2, 1, 2, 3, 3])
b, c = argsreduce(a > 1, a, 2)
assert_array_equal(b, [3, 2, 2, 3, 3])
assert_array_equal(c, [2, 2, 2, 2, 2])
b, c = argsreduce(2 > 1, a, 2)
assert_array_equal(b, a[0])
assert_array_equal(c, [2])
b, c = argsreduce(a > 0, a, 2)
assert_array_equal(b, a)
assert_array_equal(c, [2] * numpy.size(a))
class TestFitMethod(object):
skip = ['ncf', 'ksone', 'kstwo']
def setup_method(self):
np.random.seed(1234)
# skip these b/c deprecated, or only loc and scale arguments
fitSkipNonFinite = ['frechet_l', 'frechet_r', 'expon', 'norm', 'uniform', ]
@pytest.mark.parametrize('dist,args', distcont)
def test_fit_w_non_finite_data_values(self, dist, args):
"""gh-10300"""
if dist in self.fitSkipNonFinite:
pytest.skip("%s fit known to fail or deprecated" % dist)
x = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.nan])
y = np.array([1.6483, 2.7169, 2.4667, 1.1791, 3.5433, np.inf])
distfunc = getattr(stats, dist)
assert_raises(RuntimeError, distfunc.fit, x, floc=0, fscale=1)
assert_raises(RuntimeError, distfunc.fit, y, floc=0, fscale=1)
def test_fix_fit_2args_lognorm(self):
# Regression test for #1551.
np.random.seed(12345)
with np.errstate(all='ignore'):
x = stats.lognorm.rvs(0.25, 0., 20.0, size=20)
expected_shape = np.sqrt(((np.log(x) - np.log(20))**2).mean())
assert_allclose(np.array(stats.lognorm.fit(x, floc=0, fscale=20)),
[expected_shape, 0, 20], atol=1e-8)
def test_fix_fit_norm(self):
x = np.arange(1, 6)
loc, scale = stats.norm.fit(x)
assert_almost_equal(loc, 3)
assert_almost_equal(scale, np.sqrt(2))
loc, scale = stats.norm.fit(x, floc=2)
assert_equal(loc, 2)
assert_equal(scale, np.sqrt(3))
loc, scale = stats.norm.fit(x, fscale=2)
assert_almost_equal(loc, 3)
assert_equal(scale, 2)
def test_fix_fit_gamma(self):
x = np.arange(1, 6)
meanlog = np.log(x).mean()
# A basic test of gamma.fit with floc=0.
floc = 0
a, loc, scale = stats.gamma.fit(x, floc=floc)
s = np.log(x.mean()) - meanlog
assert_almost_equal(np.log(a) - special.digamma(a), s, decimal=5)
assert_equal(loc, floc)
assert_almost_equal(scale, x.mean()/a, decimal=8)
# Regression tests for gh-2514.
# The problem was that if `floc=0` was given, any other fixed
# parameters were ignored.
f0 = 1
floc = 0
a, loc, scale = stats.gamma.fit(x, f0=f0, floc=floc)
assert_equal(a, f0)
assert_equal(loc, floc)
assert_almost_equal(scale, x.mean()/a, decimal=8)
f0 = 2
floc = 0
a, loc, scale = stats.gamma.fit(x, f0=f0, floc=floc)
assert_equal(a, f0)
assert_equal(loc, floc)
assert_almost_equal(scale, x.mean()/a, decimal=8)
# loc and scale fixed.
floc = 0
fscale = 2
a, loc, scale = stats.gamma.fit(x, floc=floc, fscale=fscale)
assert_equal(loc, floc)
assert_equal(scale, fscale)
c = meanlog - np.log(fscale)
assert_almost_equal(special.digamma(a), c)
def test_fix_fit_beta(self):
# Test beta.fit when both floc and fscale are given.
def mlefunc(a, b, x):
# Zeros of this function are critical points of
# the maximum likelihood function.
n = len(x)
s1 = np.log(x).sum()
s2 = np.log(1-x).sum()
psiab = special.psi(a + b)
func = [s1 - n * (-psiab + special.psi(a)),
s2 - n * (-psiab + special.psi(b))]
return func
# Basic test with floc and fscale given.
x = np.array([0.125, 0.25, 0.5])
a, b, loc, scale = stats.beta.fit(x, floc=0, fscale=1)
assert_equal(loc, 0)
assert_equal(scale, 1)
assert_allclose(mlefunc(a, b, x), [0, 0], atol=1e-6)
# Basic test with f0, floc and fscale given.
# This is also a regression test for gh-2514.
x = np.array([0.125, 0.25, 0.5])
a, b, loc, scale = stats.beta.fit(x, f0=2, floc=0, fscale=1)
assert_equal(a, 2)
assert_equal(loc, 0)
assert_equal(scale, 1)
da, db = mlefunc(a, b, x)
assert_allclose(db, 0, atol=1e-5)
# Same floc and fscale values as above, but reverse the data
# and fix b (f1).
x2 = 1 - x
a2, b2, loc2, scale2 = stats.beta.fit(x2, f1=2, floc=0, fscale=1)
assert_equal(b2, 2)
assert_equal(loc2, 0)
assert_equal(scale2, 1)
da, db = mlefunc(a2, b2, x2)
assert_allclose(da, 0, atol=1e-5)
# a2 of this test should equal b from above.
assert_almost_equal(a2, b)
# Check for detection of data out of bounds when floc and fscale
# are given.
assert_raises(ValueError, stats.beta.fit, x, floc=0.5, fscale=1)
y = np.array([0, .5, 1])
assert_raises(ValueError, stats.beta.fit, y, floc=0, fscale=1)
assert_raises(ValueError, stats.beta.fit, y, floc=0, fscale=1, f0=2)
assert_raises(ValueError, stats.beta.fit, y, floc=0, fscale=1, f1=2)
# Check that attempting to fix all the parameters raises a ValueError.
assert_raises(ValueError, stats.beta.fit, y, f0=0, f1=1,
floc=2, fscale=3)
def test_expon_fit(self):
x = np.array([2, 2, 4, 4, 4, 4, 4, 8])
loc, scale = stats.expon.fit(x)
assert_equal(loc, 2) # x.min()
assert_equal(scale, 2) # x.mean() - x.min()
loc, scale = stats.expon.fit(x, fscale=3)
assert_equal(loc, 2) # x.min()
assert_equal(scale, 3) # fscale
loc, scale = stats.expon.fit(x, floc=0)
assert_equal(loc, 0) # floc
assert_equal(scale, 4) # x.mean() - loc
def test_lognorm_fit(self):
x = np.array([1.5, 3, 10, 15, 23, 59])
lnxm1 = np.log(x - 1)
shape, loc, scale = stats.lognorm.fit(x, floc=1)
assert_allclose(shape, lnxm1.std(), rtol=1e-12)
assert_equal(loc, 1)
assert_allclose(scale, np.exp(lnxm1.mean()), rtol=1e-12)
shape, loc, scale = stats.lognorm.fit(x, floc=1, fscale=6)
assert_allclose(shape, np.sqrt(((lnxm1 - np.log(6))**2).mean()),
rtol=1e-12)
assert_equal(loc, 1)
assert_equal(scale, 6)
shape, loc, scale = stats.lognorm.fit(x, floc=1, fix_s=0.75)
assert_equal(shape, 0.75)
assert_equal(loc, 1)
assert_allclose(scale, np.exp(lnxm1.mean()), rtol=1e-12)
def test_uniform_fit(self):
x = np.array([1.0, 1.1, 1.2, 9.0])
loc, scale = stats.uniform.fit(x)
assert_equal(loc, x.min())
assert_equal(scale, x.ptp())
loc, scale = stats.uniform.fit(x, floc=0)
assert_equal(loc, 0)
assert_equal(scale, x.max())
loc, scale = stats.uniform.fit(x, fscale=10)
assert_equal(loc, 0)
assert_equal(scale, 10)
assert_raises(ValueError, stats.uniform.fit, x, floc=2.0)
assert_raises(ValueError, stats.uniform.fit, x, fscale=5.0)
def test_fshapes(self):
# take a beta distribution, with shapes='a, b', and make sure that
# fa is equivalent to f0, and fb is equivalent to f1
a, b = 3., 4.
x = stats.beta.rvs(a, b, size=100, random_state=1234)
res_1 = stats.beta.fit(x, f0=3.)
res_2 = stats.beta.fit(x, fa=3.)
assert_allclose(res_1, res_2, atol=1e-12, rtol=1e-12)
res_2 = stats.beta.fit(x, fix_a=3.)
assert_allclose(res_1, res_2, atol=1e-12, rtol=1e-12)
res_3 = stats.beta.fit(x, f1=4.)
res_4 = stats.beta.fit(x, fb=4.)
assert_allclose(res_3, res_4, atol=1e-12, rtol=1e-12)
res_4 = stats.beta.fit(x, fix_b=4.)
assert_allclose(res_3, res_4, atol=1e-12, rtol=1e-12)
# cannot specify both positional and named args at the same time
assert_raises(ValueError, stats.beta.fit, x, fa=1, f0=2)
# check that attempting to fix all parameters raises a ValueError
assert_raises(ValueError, stats.beta.fit, x, fa=0, f1=1,
floc=2, fscale=3)
# check that specifying floc, fscale and fshapes works for
# beta and gamma which override the generic fit method
res_5 = stats.beta.fit(x, fa=3., floc=0, fscale=1)
aa, bb, ll, ss = res_5
assert_equal([aa, ll, ss], [3., 0, 1])
# gamma distribution
a = 3.
data = stats.gamma.rvs(a, size=100)
aa, ll, ss = stats.gamma.fit(data, fa=a)
assert_equal(aa, a)
def test_extra_params(self):
# unknown parameters should raise rather than be silently ignored
dist = stats.exponnorm
data = dist.rvs(K=2, size=100)
dct = dict(enikibeniki=-101)
assert_raises(TypeError, dist.fit, data, **dct)
class TestFrozen(object):
def setup_method(self):
np.random.seed(1234)
# Test that a frozen distribution gives the same results as the original
# object.
#
# Only tested for the normal distribution (with loc and scale specified)
# and for the gamma distribution (with a shape parameter specified).
def test_norm(self):
dist = stats.norm
frozen = stats.norm(loc=10.0, scale=3.0)
result_f = frozen.pdf(20.0)
result = dist.pdf(20.0, loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.cdf(20.0)
result = dist.cdf(20.0, loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.ppf(0.25)
result = dist.ppf(0.25, loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.isf(0.25)
result = dist.isf(0.25, loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.sf(10.0)
result = dist.sf(10.0, loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.median()
result = dist.median(loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.mean()
result = dist.mean(loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.var()
result = dist.var(loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.std()
result = dist.std(loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.entropy()
result = dist.entropy(loc=10.0, scale=3.0)
assert_equal(result_f, result)
result_f = frozen.moment(2)
result = dist.moment(2, loc=10.0, scale=3.0)
assert_equal(result_f, result)
assert_equal(frozen.a, dist.a)
assert_equal(frozen.b, dist.b)
def test_gamma(self):
a = 2.0
dist = stats.gamma
frozen = stats.gamma(a)
result_f = frozen.pdf(20.0)
result = dist.pdf(20.0, a)
assert_equal(result_f, result)
result_f = frozen.cdf(20.0)
result = dist.cdf(20.0, a)
assert_equal(result_f, result)
result_f = frozen.ppf(0.25)
result = dist.ppf(0.25, a)
assert_equal(result_f, result)
result_f = frozen.isf(0.25)
result = dist.isf(0.25, a)
assert_equal(result_f, result)
result_f = frozen.sf(10.0)
result = dist.sf(10.0, a)
assert_equal(result_f, result)
result_f = frozen.median()
result = dist.median(a)
assert_equal(result_f, result)
result_f = frozen.mean()
result = dist.mean(a)
assert_equal(result_f, result)
result_f = frozen.var()
result = dist.var(a)
assert_equal(result_f, result)
result_f = frozen.std()
result = dist.std(a)
assert_equal(result_f, result)
result_f = frozen.entropy()
result = dist.entropy(a)
assert_equal(result_f, result)
result_f = frozen.moment(2)
result = dist.moment(2, a)
assert_equal(result_f, result)
assert_equal(frozen.a, frozen.dist.a)
assert_equal(frozen.b, frozen.dist.b)
def test_regression_ticket_1293(self):
# Create a frozen distribution.
frozen = stats.lognorm(1)
# Call one of its methods that does not take any keyword arguments.
m1 = frozen.moment(2)
# Now call a method that takes a keyword argument.
frozen.stats(moments='mvsk')
# Call moment(2) again.
# After calling stats(), the following was raising an exception.
# So this test passes if the following does not raise an exception.
m2 = frozen.moment(2)
# The following should also be true, of course. But it is not
# the focus of this test.
assert_equal(m1, m2)
def test_ab(self):
# test that the support of a frozen distribution
# (i) remains frozen even if it changes for the original one
# (ii) is actually correct if the shape parameters are such that
# the values of [a, b] are not the default [0, inf]
# take a genpareto as an example where the support
# depends on the value of the shape parameter:
# for c > 0: a, b = 0, inf
# for c < 0: a, b = 0, -1/c
c = -0.1
rv = stats.genpareto(c=c)
a, b = rv.dist._get_support(c)
assert_equal([a, b], [0., 10.])
c = 0.1
stats.genpareto.pdf(0, c=c)
assert_equal(rv.dist._get_support(c), [0, np.inf])
c = -0.1
rv = stats.genpareto(c=c)
a, b = rv.dist._get_support(c)
assert_equal([a, b], [0., 10.])
c = 0.1
stats.genpareto.pdf(0, c) # this should NOT change genpareto.b
assert_equal((rv.dist.a, rv.dist.b), stats.genpareto._get_support(c))
rv1 = stats.genpareto(c=0.1)
assert_(rv1.dist is not rv.dist)
# c >= 0: a, b = [0, inf]
for c in [1., 0.]:
c = np.asarray(c)
rv = stats.genpareto(c=c)
a, b = rv.a, rv.b
assert_equal(a, 0.)
assert_(np.isposinf(b))
# c < 0: a=0, b=1/|c|
c = np.asarray(-2.)
a, b = stats.genpareto._get_support(c)
assert_allclose([a, b], [0., 0.5])
def test_rv_frozen_in_namespace(self):
# Regression test for gh-3522
assert_(hasattr(stats.distributions, 'rv_frozen'))
def test_random_state(self):
# only check that the random_state attribute exists,
frozen = stats.norm()
assert_(hasattr(frozen, 'random_state'))
# ... that it can be set,
frozen.random_state = 42
assert_equal(frozen.random_state.get_state(),
np.random.RandomState(42).get_state())
# ... and that .rvs method accepts it as an argument
rndm = np.random.RandomState(1234)
frozen.rvs(size=8, random_state=rndm)
def test_pickling(self):
# test that a frozen instance pickles and unpickles
# (this method is a clone of common_tests.check_pickling)
beta = stats.beta(2.3098496451481823, 0.62687954300963677)
poiss = stats.poisson(3.)
sample = stats.rv_discrete(values=([0, 1, 2, 3],
[0.1, 0.2, 0.3, 0.4]))
for distfn in [beta, poiss, sample]:
distfn.random_state = 1234
distfn.rvs(size=8)
s = pickle.dumps(distfn)
r0 = distfn.rvs(size=8)
unpickled = pickle.loads(s)
r1 = unpickled.rvs(size=8)
assert_equal(r0, r1)
# also smoke test some methods
medians = [distfn.ppf(0.5), unpickled.ppf(0.5)]
assert_equal(medians[0], medians[1])
assert_equal(distfn.cdf(medians[0]),
unpickled.cdf(medians[1]))
def test_expect(self):
# smoke test the expect method of the frozen distribution
# only take a gamma w/loc and scale and poisson with loc specified
def func(x):
return x
gm = stats.gamma(a=2, loc=3, scale=4)
gm_val = gm.expect(func, lb=1, ub=2, conditional=True)
gamma_val = stats.gamma.expect(func, args=(2,), loc=3, scale=4,
lb=1, ub=2, conditional=True)
assert_allclose(gm_val, gamma_val)
p = stats.poisson(3, loc=4)
p_val = p.expect(func)
poisson_val = stats.poisson.expect(func, args=(3,), loc=4)
assert_allclose(p_val, poisson_val)
class TestExpect(object):
# Test for expect method.
#
# Uses normal distribution and beta distribution for finite bounds, and
# hypergeom for discrete distribution with finite support
def test_norm(self):
v = stats.norm.expect(lambda x: (x-5)*(x-5), loc=5, scale=2)
assert_almost_equal(v, 4, decimal=14)
m = stats.norm.expect(lambda x: (x), loc=5, scale=2)
assert_almost_equal(m, 5, decimal=14)
lb = stats.norm.ppf(0.05, loc=5, scale=2)
ub = stats.norm.ppf(0.95, loc=5, scale=2)
prob90 = stats.norm.expect(lambda x: 1, loc=5, scale=2, lb=lb, ub=ub)
assert_almost_equal(prob90, 0.9, decimal=14)
prob90c = stats.norm.expect(lambda x: 1, loc=5, scale=2, lb=lb, ub=ub,
conditional=True)
assert_almost_equal(prob90c, 1., decimal=14)
def test_beta(self):
# case with finite support interval
v = stats.beta.expect(lambda x: (x-19/3.)*(x-19/3.), args=(10, 5),
loc=5, scale=2)
assert_almost_equal(v, 1./18., decimal=13)
m = stats.beta.expect(lambda x: x, args=(10, 5), loc=5., scale=2.)
assert_almost_equal(m, 19/3., decimal=13)
ub = stats.beta.ppf(0.95, 10, 10, loc=5, scale=2)
lb = stats.beta.ppf(0.05, 10, 10, loc=5, scale=2)
prob90 = stats.beta.expect(lambda x: 1., args=(10, 10), loc=5.,
scale=2., lb=lb, ub=ub, conditional=False)
assert_almost_equal(prob90, 0.9, decimal=13)
prob90c = stats.beta.expect(lambda x: 1, args=(10, 10), loc=5,
scale=2, lb=lb, ub=ub, conditional=True)
assert_almost_equal(prob90c, 1., decimal=13)
def test_hypergeom(self):
# test case with finite bounds
# without specifying bounds
m_true, v_true = stats.hypergeom.stats(20, 10, 8, loc=5.)
m = stats.hypergeom.expect(lambda x: x, args=(20, 10, 8), loc=5.)
assert_almost_equal(m, m_true, decimal=13)
v = stats.hypergeom.expect(lambda x: (x-9.)**2, args=(20, 10, 8),
loc=5.)
assert_almost_equal(v, v_true, decimal=14)
# with bounds, bounds equal to shifted support
v_bounds = stats.hypergeom.expect(lambda x: (x-9.)**2,
args=(20, 10, 8),
loc=5., lb=5, ub=13)
assert_almost_equal(v_bounds, v_true, decimal=14)
# drop boundary points
prob_true = 1-stats.hypergeom.pmf([5, 13], 20, 10, 8, loc=5).sum()
prob_bounds = stats.hypergeom.expect(lambda x: 1, args=(20, 10, 8),
loc=5., lb=6, ub=12)
assert_almost_equal(prob_bounds, prob_true, decimal=13)
# conditional
prob_bc = stats.hypergeom.expect(lambda x: 1, args=(20, 10, 8), loc=5.,
lb=6, ub=12, conditional=True)
assert_almost_equal(prob_bc, 1, decimal=14)
# check simple integral
prob_b = stats.hypergeom.expect(lambda x: 1, args=(20, 10, 8),
lb=0, ub=8)
assert_almost_equal(prob_b, 1, decimal=13)
def test_poisson(self):
# poisson, use lower bound only
prob_bounds = stats.poisson.expect(lambda x: 1, args=(2,), lb=3,
conditional=False)
prob_b_true = 1-stats.poisson.cdf(2, 2)
assert_almost_equal(prob_bounds, prob_b_true, decimal=14)
prob_lb = stats.poisson.expect(lambda x: 1, args=(2,), lb=2,
conditional=True)
assert_almost_equal(prob_lb, 1, decimal=14)
def test_genhalflogistic(self):
# genhalflogistic, changes upper bound of support in _argcheck
# regression test for gh-2622
halflog = stats.genhalflogistic
# check consistency when calling expect twice with the same input
res1 = halflog.expect(args=(1.5,))
halflog.expect(args=(0.5,))
res2 = halflog.expect(args=(1.5,))
assert_almost_equal(res1, res2, decimal=14)
def test_rice_overflow(self):
# rice.pdf(999, 0.74) was inf since special.i0 silentyly overflows
# check that using i0e fixes it
assert_(np.isfinite(stats.rice.pdf(999, 0.74)))
assert_(np.isfinite(stats.rice.expect(lambda x: 1, args=(0.74,))))
assert_(np.isfinite(stats.rice.expect(lambda x: 2, args=(0.74,))))
assert_(np.isfinite(stats.rice.expect(lambda x: 3, args=(0.74,))))
def test_logser(self):
# test a discrete distribution with infinite support and loc
p, loc = 0.3, 3
res_0 = stats.logser.expect(lambda k: k, args=(p,))
# check against the correct answer (sum of a geom series)
assert_allclose(res_0,
p / (p - 1.) / np.log(1. - p), atol=1e-15)
# now check it with `loc`
res_l = stats.logser.expect(lambda k: k, args=(p,), loc=loc)
assert_allclose(res_l, res_0 + loc, atol=1e-15)
def test_skellam(self):
# Use a discrete distribution w/ bi-infinite support. Compute two first
# moments and compare to known values (cf skellam.stats)
p1, p2 = 18, 22
m1 = stats.skellam.expect(lambda x: x, args=(p1, p2))
m2 = stats.skellam.expect(lambda x: x**2, args=(p1, p2))
assert_allclose(m1, p1 - p2, atol=1e-12)
assert_allclose(m2 - m1**2, p1 + p2, atol=1e-12)
def test_randint(self):
# Use a discrete distribution w/ parameter-dependent support, which
# is larger than the default chunksize
lo, hi = 0, 113
res = stats.randint.expect(lambda x: x, (lo, hi))
assert_allclose(res,
sum(_ for _ in range(lo, hi)) / (hi - lo), atol=1e-15)
def test_zipf(self):
# Test that there is no infinite loop even if the sum diverges
assert_warns(RuntimeWarning, stats.zipf.expect,
lambda x: x**2, (2,))
def test_discrete_kwds(self):
# check that discrete expect accepts keywords to control the summation
n0 = stats.poisson.expect(lambda x: 1, args=(2,))
n1 = stats.poisson.expect(lambda x: 1, args=(2,),
maxcount=1001, chunksize=32, tolerance=1e-8)
assert_almost_equal(n0, n1, decimal=14)
def test_moment(self):
# test the .moment() method: compute a higher moment and compare to
# a known value
def poiss_moment5(mu):
return mu**5 + 10*mu**4 + 25*mu**3 + 15*mu**2 + mu
for mu in [5, 7]:
m5 = stats.poisson.moment(5, mu)
assert_allclose(m5, poiss_moment5(mu), rtol=1e-10)
class TestNct(object):
def test_nc_parameter(self):
# Parameter values c<=0 were not enabled (gh-2402).
# For negative values c and for c=0 results of rv.cdf(0) below were nan
rv = stats.nct(5, 0)
assert_equal(rv.cdf(0), 0.5)
rv = stats.nct(5, -1)
assert_almost_equal(rv.cdf(0), 0.841344746069, decimal=10)
def test_broadcasting(self):
res = stats.nct.pdf(5, np.arange(4, 7)[:, None],
np.linspace(0.1, 1, 4))
expected = array([[0.00321886, 0.00557466, 0.00918418, 0.01442997],
[0.00217142, 0.00395366, 0.00683888, 0.01126276],
[0.00153078, 0.00291093, 0.00525206, 0.00900815]])
assert_allclose(res, expected, rtol=1e-5)
def test_variance_gh_issue_2401(self):
# Computation of the variance of a non-central t-distribution resulted
# in a TypeError: ufunc 'isinf' not supported for the input types,
# and the inputs could not be safely coerced to any supported types
# according to the casting rule 'safe'
rv = stats.nct(4, 0)
assert_equal(rv.var(), 2.0)
def test_nct_inf_moments(self):
# n-th moment of nct only exists for df > n
m, v, s, k = stats.nct.stats(df=1.9, nc=0.3, moments='mvsk')
assert_(np.isfinite(m))
assert_equal([v, s, k], [np.inf, np.nan, np.nan])
m, v, s, k = stats.nct.stats(df=3.1, nc=0.3, moments='mvsk')
assert_(np.isfinite([m, v, s]).all())
assert_equal(k, np.nan)
class TestRice(object):
def test_rice_zero_b(self):
# rice distribution should work with b=0, cf gh-2164
x = [0.2, 1., 5.]
assert_(np.isfinite(stats.rice.pdf(x, b=0.)).all())
assert_(np.isfinite(stats.rice.logpdf(x, b=0.)).all())
assert_(np.isfinite(stats.rice.cdf(x, b=0.)).all())
assert_(np.isfinite(stats.rice.logcdf(x, b=0.)).all())
q = [0.1, 0.1, 0.5, 0.9]
assert_(np.isfinite(stats.rice.ppf(q, b=0.)).all())
mvsk = stats.rice.stats(0, moments='mvsk')
assert_(np.isfinite(mvsk).all())
# furthermore, pdf is continuous as b\to 0
# rice.pdf(x, b\to 0) = x exp(-x^2/2) + O(b^2)
# see e.g. Abramovich & Stegun 9.6.7 & 9.6.10
b = 1e-8
assert_allclose(stats.rice.pdf(x, 0), stats.rice.pdf(x, b),
atol=b, rtol=0)
def test_rice_rvs(self):
rvs = stats.rice.rvs
assert_equal(rvs(b=3.).size, 1)
assert_equal(rvs(b=3., size=(3, 5)).shape, (3, 5))
class TestErlang(object):
def setup_method(self):
np.random.seed(1234)
def test_erlang_runtimewarning(self):
# erlang should generate a RuntimeWarning if a non-integer
# shape parameter is used.
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
# The non-integer shape parameter 1.3 should trigger a
# RuntimeWarning
assert_raises(RuntimeWarning,
stats.erlang.rvs, 1.3, loc=0, scale=1, size=4)
# Calling the fit method with `f0` set to an integer should
# *not* trigger a RuntimeWarning. It should return the same
# values as gamma.fit(...).
data = [0.5, 1.0, 2.0, 4.0]
result_erlang = stats.erlang.fit(data, f0=1)
result_gamma = stats.gamma.fit(data, f0=1)
assert_allclose(result_erlang, result_gamma, rtol=1e-3)
def test_gh_pr_10949_argcheck(self):
assert_equal(stats.erlang.pdf(0.5, a=[1, -1]), stats.gamma.pdf(0.5, a=[1, -1]))
class TestRayleigh(object):
# gh-6227
def test_logpdf(self):
y = stats.rayleigh.logpdf(50)
assert_allclose(y, -1246.0879769945718)
def test_logsf(self):
y = stats.rayleigh.logsf(50)
assert_allclose(y, -1250)
class TestExponWeib(object):
def test_pdf_logpdf(self):
# Regression test for gh-3508.
x = 0.1
a = 1.0
c = 100.0
p = stats.exponweib.pdf(x, a, c)
logp = stats.exponweib.logpdf(x, a, c)
# Expected values were computed with mpmath.
assert_allclose([p, logp],
[1.0000000000000054e-97, -223.35075402042244])
def test_a_is_1(self):
# For issue gh-3508.
# Check that when a=1, the pdf and logpdf methods of exponweib are the
# same as those of weibull_min.
x = np.logspace(-4, -1, 4)
a = 1
c = 100
p = stats.exponweib.pdf(x, a, c)
expected = stats.weibull_min.pdf(x, c)
assert_allclose(p, expected)
logp = stats.exponweib.logpdf(x, a, c)
expected = stats.weibull_min.logpdf(x, c)
assert_allclose(logp, expected)
def test_a_is_1_c_is_1(self):
# When a = 1 and c = 1, the distribution is exponential.
x = np.logspace(-8, 1, 10)
a = 1
c = 1
p = stats.exponweib.pdf(x, a, c)
expected = stats.expon.pdf(x)
assert_allclose(p, expected)
logp = stats.exponweib.logpdf(x, a, c)
expected = stats.expon.logpdf(x)
assert_allclose(logp, expected)
class TestWeibull(object):
def test_logpdf(self):
# gh-6217
y = stats.weibull_min.logpdf(0, 1)
assert_equal(y, 0)
def test_with_maxima_distrib(self):
# Tests for weibull_min and weibull_max.
# The expected values were computed using the symbolic algebra
# program 'maxima' with the package 'distrib', which has
# 'pdf_weibull' and 'cdf_weibull'. The mapping between the
# scipy and maxima functions is as follows:
# -----------------------------------------------------------------
# scipy maxima
# --------------------------------- ------------------------------
# weibull_min.pdf(x, a, scale=b) pdf_weibull(x, a, b)
# weibull_min.logpdf(x, a, scale=b) log(pdf_weibull(x, a, b))
# weibull_min.cdf(x, a, scale=b) cdf_weibull(x, a, b)
# weibull_min.logcdf(x, a, scale=b) log(cdf_weibull(x, a, b))
# weibull_min.sf(x, a, scale=b) 1 - cdf_weibull(x, a, b)
# weibull_min.logsf(x, a, scale=b) log(1 - cdf_weibull(x, a, b))
#
# weibull_max.pdf(x, a, scale=b) pdf_weibull(-x, a, b)
# weibull_max.logpdf(x, a, scale=b) log(pdf_weibull(-x, a, b))
# weibull_max.cdf(x, a, scale=b) 1 - cdf_weibull(-x, a, b)
# weibull_max.logcdf(x, a, scale=b) log(1 - cdf_weibull(-x, a, b))
# weibull_max.sf(x, a, scale=b) cdf_weibull(-x, a, b)
# weibull_max.logsf(x, a, scale=b) log(cdf_weibull(-x, a, b))
# -----------------------------------------------------------------
x = 1.5
a = 2.0
b = 3.0
# weibull_min
p = stats.weibull_min.pdf(x, a, scale=b)
assert_allclose(p, np.exp(-0.25)/3)
lp = stats.weibull_min.logpdf(x, a, scale=b)
assert_allclose(lp, -0.25 - np.log(3))
c = stats.weibull_min.cdf(x, a, scale=b)
assert_allclose(c, -special.expm1(-0.25))
lc = stats.weibull_min.logcdf(x, a, scale=b)
assert_allclose(lc, np.log(-special.expm1(-0.25)))
s = stats.weibull_min.sf(x, a, scale=b)
assert_allclose(s, np.exp(-0.25))
ls = stats.weibull_min.logsf(x, a, scale=b)
assert_allclose(ls, -0.25)
# Also test using a large value x, for which computing the survival
# function using the CDF would result in 0.
s = stats.weibull_min.sf(30, 2, scale=3)
assert_allclose(s, np.exp(-100))
ls = stats.weibull_min.logsf(30, 2, scale=3)
assert_allclose(ls, -100)
# weibull_max
x = -1.5
p = stats.weibull_max.pdf(x, a, scale=b)
assert_allclose(p, np.exp(-0.25)/3)
lp = stats.weibull_max.logpdf(x, a, scale=b)
assert_allclose(lp, -0.25 - np.log(3))
c = stats.weibull_max.cdf(x, a, scale=b)
assert_allclose(c, np.exp(-0.25))
lc = stats.weibull_max.logcdf(x, a, scale=b)
assert_allclose(lc, -0.25)
s = stats.weibull_max.sf(x, a, scale=b)
assert_allclose(s, -special.expm1(-0.25))
ls = stats.weibull_max.logsf(x, a, scale=b)
assert_allclose(ls, np.log(-special.expm1(-0.25)))
# Also test using a value of x close to 0, for which computing the
# survival function using the CDF would result in 0.
s = stats.weibull_max.sf(-1e-9, 2, scale=3)
assert_allclose(s, -special.expm1(-1/9000000000000000000))
ls = stats.weibull_max.logsf(-1e-9, 2, scale=3)
assert_allclose(ls, np.log(-special.expm1(-1/9000000000000000000)))
class TestRdist(object):
def test_rdist_cdf_gh1285(self):
# check workaround in rdist._cdf for issue gh-1285.
distfn = stats.rdist
values = [0.001, 0.5, 0.999]
assert_almost_equal(distfn.cdf(distfn.ppf(values, 541.0), 541.0),
values, decimal=5)
def test_rdist_beta(self):
# rdist is a special case of stats.beta
x = np.linspace(-0.99, 0.99, 10)
c = 2.7
assert_almost_equal(0.5*stats.beta(c/2, c/2).pdf((x + 1)/2),
stats.rdist(c).pdf(x))
class TestTrapz(object):
def test_reduces_to_triang(self):
modes = [0, 0.3, 0.5, 1]
for mode in modes:
x = [0, mode, 1]
assert_almost_equal(stats.trapz.pdf(x, mode, mode),
stats.triang.pdf(x, mode))
assert_almost_equal(stats.trapz.cdf(x, mode, mode),
stats.triang.cdf(x, mode))
def test_reduces_to_uniform(self):
x = np.linspace(0, 1, 10)
assert_almost_equal(stats.trapz.pdf(x, 0, 1), stats.uniform.pdf(x))
assert_almost_equal(stats.trapz.cdf(x, 0, 1), stats.uniform.cdf(x))
def test_cases(self):
# edge cases
assert_almost_equal(stats.trapz.pdf(0, 0, 0), 2)
assert_almost_equal(stats.trapz.pdf(1, 1, 1), 2)
assert_almost_equal(stats.trapz.pdf(0.5, 0, 0.8),
1.11111111111111111)
assert_almost_equal(stats.trapz.pdf(0.5, 0.2, 1.0),
1.11111111111111111)
# straightforward case
assert_almost_equal(stats.trapz.pdf(0.1, 0.2, 0.8), 0.625)
assert_almost_equal(stats.trapz.pdf(0.5, 0.2, 0.8), 1.25)
assert_almost_equal(stats.trapz.pdf(0.9, 0.2, 0.8), 0.625)
assert_almost_equal(stats.trapz.cdf(0.1, 0.2, 0.8), 0.03125)
assert_almost_equal(stats.trapz.cdf(0.2, 0.2, 0.8), 0.125)
assert_almost_equal(stats.trapz.cdf(0.5, 0.2, 0.8), 0.5)
assert_almost_equal(stats.trapz.cdf(0.9, 0.2, 0.8), 0.96875)
assert_almost_equal(stats.trapz.cdf(1.0, 0.2, 0.8), 1.0)
def test_trapz_vect(self):
# test that array-valued shapes and arguments are handled
c = np.array([0.1, 0.2, 0.3])
d = np.array([0.5, 0.6])[:, None]
x = np.array([0.15, 0.25, 0.9])
v = stats.trapz.pdf(x, c, d)
cc, dd, xx = np.broadcast_arrays(c, d, x)
res = np.empty(xx.size, dtype=xx.dtype)
ind = np.arange(xx.size)
for i, x1, c1, d1 in zip(ind, xx.ravel(), cc.ravel(), dd.ravel()):
res[i] = stats.trapz.pdf(x1, c1, d1)
assert_allclose(v, res.reshape(v.shape), atol=1e-15)
class TestTriang(object):
def test_edge_cases(self):
with np.errstate(all='raise'):
assert_equal(stats.triang.pdf(0, 0), 2.)
assert_equal(stats.triang.pdf(0.5, 0), 1.)
assert_equal(stats.triang.pdf(1, 0), 0.)
assert_equal(stats.triang.pdf(0, 1), 0)
assert_equal(stats.triang.pdf(0.5, 1), 1.)
assert_equal(stats.triang.pdf(1, 1), 2)
assert_equal(stats.triang.cdf(0., 0.), 0.)
assert_equal(stats.triang.cdf(0.5, 0.), 0.75)
assert_equal(stats.triang.cdf(1.0, 0.), 1.0)
assert_equal(stats.triang.cdf(0., 1.), 0.)
assert_equal(stats.triang.cdf(0.5, 1.), 0.25)
assert_equal(stats.triang.cdf(1., 1.), 1)
class TestMielke(object):
def test_moments(self):
k, s = 4.642, 0.597
# n-th moment exists only if n < s
assert_equal(stats.mielke(k, s).moment(1), np.inf)
assert_equal(stats.mielke(k, 1.0).moment(1), np.inf)
assert_(np.isfinite(stats.mielke(k, 1.01).moment(1)))
def test_burr_equivalence(self):
x = np.linspace(0.01, 100, 50)
k, s = 2.45, 5.32
assert_allclose(stats.burr.pdf(x, s, k/s), stats.mielke.pdf(x, k, s))
class TestBurr(object):
def test_endpoints_7491(self):
# gh-7491
# Compute the pdf at the left endpoint dst.a.
data = [
[stats.fisk, (1,), 1],
[stats.burr, (0.5, 2), 1],
[stats.burr, (1, 1), 1],
[stats.burr, (2, 0.5), 1],
[stats.burr12, (1, 0.5), 0.5],
[stats.burr12, (1, 1), 1.0],
[stats.burr12, (1, 2), 2.0]]
ans = [_f.pdf(_f.a, *_args) for _f, _args, _ in data]
correct = [_correct_ for _f, _args, _correct_ in data]
assert_array_almost_equal(ans, correct)
ans = [_f.logpdf(_f.a, *_args) for _f, _args, _ in data]
correct = [np.log(_correct_) for _f, _args, _correct_ in data]
assert_array_almost_equal(ans, correct)
def test_burr_stats_9544(self):
# gh-9544. Test from gh-9978
c, d = 5.0, 3
mean, variance = stats.burr(c, d).stats()
# mean = sc.beta(3 + 1/5, 1. - 1/5) * 3 = 1.4110263...
# var = sc.beta(3 + 2 / 5, 1. - 2 / 5) * 3 - (sc.beta(3 + 1 / 5, 1. - 1 / 5) * 3) ** 2
mean_hc, variance_hc = 1.4110263183925857, 0.22879948026191643
assert_allclose(mean, mean_hc)
assert_allclose(variance, variance_hc)
def test_burr_nan_mean_var_9544(self):
# gh-9544. Test from gh-9978
c, d = 0.5, 3
mean, variance = stats.burr(c, d).stats()
assert_(np.isnan(mean))
assert_(np.isnan(variance))
c, d = 1.5, 3
mean, variance = stats.burr(c, d).stats()
assert_(np.isfinite(mean))
assert_(np.isnan(variance))
c, d = 0.5, 3
e1, e2, e3, e4 = stats.burr._munp(np.array([1, 2, 3, 4]), c, d)
assert_(np.isnan(e1))
assert_(np.isnan(e2))
assert_(np.isnan(e3))
assert_(np.isnan(e4))
c, d = 1.5, 3
e1, e2, e3, e4 = stats.burr._munp([1, 2, 3, 4], c, d)
assert_(np.isfinite(e1))
assert_(np.isnan(e2))
assert_(np.isnan(e3))
assert_(np.isnan(e4))
c, d = 2.5, 3
e1, e2, e3, e4 = stats.burr._munp([1, 2, 3, 4], c, d)
assert_(np.isfinite(e1))
assert_(np.isfinite(e2))
assert_(np.isnan(e3))
assert_(np.isnan(e4))
c, d = 3.5, 3
e1, e2, e3, e4 = stats.burr._munp([1, 2, 3, 4], c, d)
assert_(np.isfinite(e1))
assert_(np.isfinite(e2))
assert_(np.isfinite(e3))
assert_(np.isnan(e4))
c, d = 4.5, 3
e1, e2, e3, e4 = stats.burr._munp([1, 2, 3, 4], c, d)
assert_(np.isfinite(e1))
assert_(np.isfinite(e2))
assert_(np.isfinite(e3))
assert_(np.isfinite(e4))
def test_540_567():
# test for nan returned in tickets 540, 567
assert_almost_equal(stats.norm.cdf(-1.7624320982), 0.03899815971089126,
decimal=10, err_msg='test_540_567')
assert_almost_equal(stats.norm.cdf(-1.7624320983), 0.038998159702449846,
decimal=10, err_msg='test_540_567')
assert_almost_equal(stats.norm.cdf(1.38629436112, loc=0.950273420309,
scale=0.204423758009),
0.98353464004309321,
decimal=10, err_msg='test_540_567')
def test_regression_ticket_1316():
# The following was raising an exception, because _construct_default_doc()
# did not handle the default keyword extradoc=None. See ticket #1316.
stats._continuous_distns.gamma_gen(name='gamma')
def test_regression_ticket_1326():
# adjust to avoid nan with 0*log(0)
assert_almost_equal(stats.chi2.pdf(0.0, 2), 0.5, 14)
def test_regression_tukey_lambda():
# Make sure that Tukey-Lambda distribution correctly handles
# non-positive lambdas.
x = np.linspace(-5.0, 5.0, 101)
olderr = np.seterr(divide='ignore')
try:
for lam in [0.0, -1.0, -2.0, np.array([[-1.0], [0.0], [-2.0]])]:
p = stats.tukeylambda.pdf(x, lam)
assert_((p != 0.0).all())
assert_(~np.isnan(p).all())
lam = np.array([[-1.0], [0.0], [2.0]])
p = stats.tukeylambda.pdf(x, lam)
finally:
np.seterr(**olderr)
assert_(~np.isnan(p).all())
assert_((p[0] != 0.0).all())
assert_((p[1] != 0.0).all())
assert_((p[2] != 0.0).any())
assert_((p[2] == 0.0).any())
@pytest.mark.skipif(DOCSTRINGS_STRIPPED, reason="docstrings stripped")
def test_regression_ticket_1421():
assert_('pdf(x, mu, loc=0, scale=1)' not in stats.poisson.__doc__)
assert_('pmf(x,' in stats.poisson.__doc__)
def test_nan_arguments_gh_issue_1362():
with np.errstate(invalid='ignore'):
assert_(np.isnan(stats.t.logcdf(1, np.nan)))
assert_(np.isnan(stats.t.cdf(1, np.nan)))
assert_(np.isnan(stats.t.logsf(1, np.nan)))
assert_(np.isnan(stats.t.sf(1, np.nan)))
assert_(np.isnan(stats.t.pdf(1, np.nan)))
assert_(np.isnan(stats.t.logpdf(1, np.nan)))
assert_(np.isnan(stats.t.ppf(1, np.nan)))
assert_(np.isnan(stats.t.isf(1, np.nan)))
assert_(np.isnan(stats.bernoulli.logcdf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.cdf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.logsf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.sf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.pmf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.logpmf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.ppf(np.nan, 0.5)))
assert_(np.isnan(stats.bernoulli.isf(np.nan, 0.5)))
def test_frozen_fit_ticket_1536():
np.random.seed(5678)
true = np.array([0.25, 0., 0.5])
x = stats.lognorm.rvs(true[0], true[1], true[2], size=100)
olderr = np.seterr(divide='ignore')
try:
params = np.array(stats.lognorm.fit(x, floc=0.))
finally:
np.seterr(**olderr)
assert_almost_equal(params, true, decimal=2)
params = np.array(stats.lognorm.fit(x, fscale=0.5, loc=0))
assert_almost_equal(params, true, decimal=2)
params = np.array(stats.lognorm.fit(x, f0=0.25, loc=0))
assert_almost_equal(params, true, decimal=2)
params = np.array(stats.lognorm.fit(x, f0=0.25, floc=0))
assert_almost_equal(params, true, decimal=2)
np.random.seed(5678)
loc = 1
floc = 0.9
x = stats.norm.rvs(loc, 2., size=100)
params = np.array(stats.norm.fit(x, floc=floc))
expected = np.array([floc, np.sqrt(((x-floc)**2).mean())])
assert_almost_equal(params, expected, decimal=4)
def test_regression_ticket_1530():
# Check the starting value works for Cauchy distribution fit.
np.random.seed(654321)
rvs = stats.cauchy.rvs(size=100)
params = stats.cauchy.fit(rvs)
expected = (0.045, 1.142)
assert_almost_equal(params, expected, decimal=1)
def test_gh_pr_4806():
# Check starting values for Cauchy distribution fit.
np.random.seed(1234)
x = np.random.randn(42)
for offset in 10000.0, 1222333444.0:
loc, scale = stats.cauchy.fit(x + offset)
assert_allclose(loc, offset, atol=1.0)
assert_allclose(scale, 0.6, atol=1.0)
def test_tukeylambda_stats_ticket_1545():
# Some test for the variance and kurtosis of the Tukey Lambda distr.
# See test_tukeylamdba_stats.py for more tests.
mv = stats.tukeylambda.stats(0, moments='mvsk')
# Known exact values:
expected = [0, np.pi**2/3, 0, 1.2]
assert_almost_equal(mv, expected, decimal=10)
mv = stats.tukeylambda.stats(3.13, moments='mvsk')
# 'expected' computed with mpmath.
expected = [0, 0.0269220858861465102, 0, -0.898062386219224104]
assert_almost_equal(mv, expected, decimal=10)
mv = stats.tukeylambda.stats(0.14, moments='mvsk')
# 'expected' computed with mpmath.
expected = [0, 2.11029702221450250, 0, -0.02708377353223019456]
assert_almost_equal(mv, expected, decimal=10)
def test_poisson_logpmf_ticket_1436():
assert_(np.isfinite(stats.poisson.logpmf(1500, 200)))
def test_powerlaw_stats():
"""Test the powerlaw stats function.
This unit test is also a regression test for ticket 1548.
The exact values are:
mean:
mu = a / (a + 1)
variance:
sigma**2 = a / ((a + 2) * (a + 1) ** 2)
skewness:
One formula (see https://en.wikipedia.org/wiki/Skewness) is
gamma_1 = (E[X**3] - 3*mu*E[X**2] + 2*mu**3) / sigma**3
A short calculation shows that E[X**k] is a / (a + k), so gamma_1
can be implemented as
n = a/(a+3) - 3*(a/(a+1))*a/(a+2) + 2*(a/(a+1))**3
d = sqrt(a/((a+2)*(a+1)**2)) ** 3
gamma_1 = n/d
Either by simplifying, or by a direct calculation of mu_3 / sigma**3,
one gets the more concise formula:
gamma_1 = -2.0 * ((a - 1) / (a + 3)) * sqrt((a + 2) / a)
kurtosis: (See https://en.wikipedia.org/wiki/Kurtosis)
The excess kurtosis is
gamma_2 = mu_4 / sigma**4 - 3
A bit of calculus and algebra (sympy helps) shows that
mu_4 = 3*a*(3*a**2 - a + 2) / ((a+1)**4 * (a+2) * (a+3) * (a+4))
so
gamma_2 = 3*(3*a**2 - a + 2) * (a+2) / (a*(a+3)*(a+4)) - 3
which can be rearranged to
gamma_2 = 6 * (a**3 - a**2 - 6*a + 2) / (a*(a+3)*(a+4))
"""
cases = [(1.0, (0.5, 1./12, 0.0, -1.2)),
(2.0, (2./3, 2./36, -0.56568542494924734, -0.6))]
for a, exact_mvsk in cases:
mvsk = stats.powerlaw.stats(a, moments="mvsk")
assert_array_almost_equal(mvsk, exact_mvsk)
def test_powerlaw_edge():
# Regression test for gh-3986.
p = stats.powerlaw.logpdf(0, 1)
assert_equal(p, 0.0)
def test_exponpow_edge():
# Regression test for gh-3982.
p = stats.exponpow.logpdf(0, 1)
assert_equal(p, 0.0)
# Check pdf and logpdf at x = 0 for other values of b.
p = stats.exponpow.pdf(0, [0.25, 1.0, 1.5])
assert_equal(p, [np.inf, 1.0, 0.0])
p = stats.exponpow.logpdf(0, [0.25, 1.0, 1.5])
assert_equal(p, [np.inf, 0.0, -np.inf])
def test_gengamma_edge():
# Regression test for gh-3985.
p = stats.gengamma.pdf(0, 1, 1)
assert_equal(p, 1.0)
# Regression tests for gh-4724.
p = stats.gengamma._munp(-2, 200, 1.)
assert_almost_equal(p, 1./199/198)
p = stats.gengamma._munp(-2, 10, 1.)
assert_almost_equal(p, 1./9/8)
def test_ksone_fit_freeze():
# Regression test for ticket #1638.
d = np.array(
[-0.18879233, 0.15734249, 0.18695107, 0.27908787, -0.248649,
-0.2171497, 0.12233512, 0.15126419, 0.03119282, 0.4365294,
0.08930393, -0.23509903, 0.28231224, -0.09974875, -0.25196048,
0.11102028, 0.1427649, 0.10176452, 0.18754054, 0.25826724,
0.05988819, 0.0531668, 0.21906056, 0.32106729, 0.2117662,
0.10886442, 0.09375789, 0.24583286, -0.22968366, -0.07842391,
-0.31195432, -0.21271196, 0.1114243, -0.13293002, 0.01331725,
-0.04330977, -0.09485776, -0.28434547, 0.22245721, -0.18518199,
-0.10943985, -0.35243174, 0.06897665, -0.03553363, -0.0701746,
-0.06037974, 0.37670779, -0.21684405])
try:
olderr = np.seterr(invalid='ignore')
with suppress_warnings() as sup:
sup.filter(IntegrationWarning,
"The maximum number of subdivisions .50. has been "
"achieved.")
sup.filter(RuntimeWarning,
"floating point number truncated to an integer")
stats.ksone.fit(d)
finally:
np.seterr(**olderr)
def test_norm_logcdf():
# Test precision of the logcdf of the normal distribution.
# This precision was enhanced in ticket 1614.
x = -np.asarray(list(range(0, 120, 4)))
# Values from R
expected = [-0.69314718, -10.36010149, -35.01343716, -75.41067300,
-131.69539607, -203.91715537, -292.09872100, -396.25241451,
-516.38564863, -652.50322759, -804.60844201, -972.70364403,
-1156.79057310, -1356.87055173, -1572.94460885, -1805.01356068,
-2053.07806561, -2317.13866238, -2597.19579746, -2893.24984493,
-3205.30112136, -3533.34989701, -3877.39640444, -4237.44084522,
-4613.48339520, -5005.52420869, -5413.56342187, -5837.60115548,
-6277.63751711, -6733.67260303]
assert_allclose(stats.norm().logcdf(x), expected, atol=1e-8)
# also test the complex-valued code path
assert_allclose(stats.norm().logcdf(x + 1e-14j).real, expected, atol=1e-8)
# test the accuracy: d(logcdf)/dx = pdf / cdf \equiv exp(logpdf - logcdf)
deriv = (stats.norm.logcdf(x + 1e-10j)/1e-10).imag
deriv_expected = np.exp(stats.norm.logpdf(x) - stats.norm.logcdf(x))
assert_allclose(deriv, deriv_expected, atol=1e-10)
def test_levy_cdf_ppf():
# Test levy.cdf, including small arguments.
x = np.array([1000, 1.0, 0.5, 0.1, 0.01, 0.001])
# Expected values were calculated separately with mpmath.
# E.g.
# >>> mpmath.mp.dps = 100
# >>> x = mpmath.mp.mpf('0.01')
# >>> cdf = mpmath.erfc(mpmath.sqrt(1/(2*x)))
expected = np.array([0.9747728793699604,
0.3173105078629141,
0.1572992070502851,
0.0015654022580025495,
1.523970604832105e-23,
1.795832784800726e-219])
y = stats.levy.cdf(x)
assert_allclose(y, expected, rtol=1e-10)
# ppf(expected) should get us back to x.
xx = stats.levy.ppf(expected)
assert_allclose(xx, x, rtol=1e-13)
def test_hypergeom_interval_1802():
# these two had endless loops
assert_equal(stats.hypergeom.interval(.95, 187601, 43192, 757),
(152.0, 197.0))
assert_equal(stats.hypergeom.interval(.945, 187601, 43192, 757),
(152.0, 197.0))
# this was working also before
assert_equal(stats.hypergeom.interval(.94, 187601, 43192, 757),
(153.0, 196.0))
# degenerate case .a == .b
assert_equal(stats.hypergeom.ppf(0.02, 100, 100, 8), 8)
assert_equal(stats.hypergeom.ppf(1, 100, 100, 8), 8)
def test_distribution_too_many_args():
np.random.seed(1234)
# Check that a TypeError is raised when too many args are given to a method
# Regression test for ticket 1815.
x = np.linspace(0.1, 0.7, num=5)
assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, loc=1.0)
assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, 4, loc=1.0)
assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, 4, 5)
assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, loc=1.0, scale=0.5)
assert_raises(TypeError, stats.gamma.rvs, 2., 3, loc=1.0, scale=0.5)
assert_raises(TypeError, stats.gamma.cdf, x, 2., 3, loc=1.0, scale=0.5)
assert_raises(TypeError, stats.gamma.ppf, x, 2., 3, loc=1.0, scale=0.5)
assert_raises(TypeError, stats.gamma.stats, 2., 3, loc=1.0, scale=0.5)
assert_raises(TypeError, stats.gamma.entropy, 2., 3, loc=1.0, scale=0.5)
assert_raises(TypeError, stats.gamma.fit, x, 2., 3, loc=1.0, scale=0.5)
# These should not give errors
stats.gamma.pdf(x, 2, 3) # loc=3
stats.gamma.pdf(x, 2, 3, 4) # loc=3, scale=4
stats.gamma.stats(2., 3)
stats.gamma.stats(2., 3, 4)
stats.gamma.stats(2., 3, 4, 'mv')
stats.gamma.rvs(2., 3, 4, 5)
stats.gamma.fit(stats.gamma.rvs(2., size=7), 2.)
# Also for a discrete distribution
stats.geom.pmf(x, 2, loc=3) # no error, loc=3
assert_raises(TypeError, stats.geom.pmf, x, 2, 3, 4)
assert_raises(TypeError, stats.geom.pmf, x, 2, 3, loc=4)
# And for distributions with 0, 2 and 3 args respectively
assert_raises(TypeError, stats.expon.pdf, x, 3, loc=1.0)
assert_raises(TypeError, stats.exponweib.pdf, x, 3, 4, 5, loc=1.0)
assert_raises(TypeError, stats.exponweib.pdf, x, 3, 4, 5, 0.1, 0.1)
assert_raises(TypeError, stats.ncf.pdf, x, 3, 4, 5, 6, loc=1.0)
assert_raises(TypeError, stats.ncf.pdf, x, 3, 4, 5, 6, 1.0, scale=0.5)
stats.ncf.pdf(x, 3, 4, 5, 6, 1.0) # 3 args, plus loc/scale
def test_ncx2_tails_ticket_955():
# Trac #955 -- check that the cdf computed by special functions
# matches the integrated pdf
a = stats.ncx2.cdf(np.arange(20, 25, 0.2), 2, 1.07458615e+02)
b = stats.ncx2._cdfvec(np.arange(20, 25, 0.2), 2, 1.07458615e+02)
assert_allclose(a, b, rtol=1e-3, atol=0)
def test_ncx2_tails_pdf():
# ncx2.pdf does not return nans in extreme tails(example from gh-1577)
# NB: this is to check that nan_to_num is not needed in ncx2.pdf
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "divide by zero encountered in log")
assert_equal(stats.ncx2.pdf(1, np.arange(340, 350), 2), 0)
logval = stats.ncx2.logpdf(1, np.arange(340, 350), 2)
assert_(np.isneginf(logval).all())
@pytest.mark.parametrize('method, expected', [
('cdf', np.array([2.497951336e-09, 3.437288941e-10])),
('pdf', np.array([1.238579980e-07, 1.710041145e-08])),
('logpdf', np.array([-15.90413011, -17.88416331])),
('ppf', np.array([4.865182052, 7.017182271]))
])
def test_ncx2_zero_nc(method, expected):
# gh-5441
# ncx2 with nc=0 is identical to chi2
# Comparison to R (v3.5.1)
# > options(digits=10)
# > pchisq(0.1, df=10, ncp=c(0,4))
# > dchisq(0.1, df=10, ncp=c(0,4))
# > dchisq(0.1, df=10, ncp=c(0,4), log=TRUE)
# > qchisq(0.1, df=10, ncp=c(0,4))
result = getattr(stats.ncx2, method)(0.1, nc=[0, 4], df=10)
assert_allclose(result, expected, atol=1e-15)
def test_ncx2_zero_nc_rvs():
# gh-5441
# ncx2 with nc=0 is identical to chi2
result = stats.ncx2.rvs(df=10, nc=0, random_state=1)
expected = stats.chi2.rvs(df=10, random_state=1)
assert_allclose(result, expected, atol=1e-15)
def test_foldnorm_zero():
# Parameter value c=0 was not enabled, see gh-2399.
rv = stats.foldnorm(0, scale=1)
assert_equal(rv.cdf(0), 0) # rv.cdf(0) previously resulted in: nan
def test_stats_shapes_argcheck():
# stats method was failing for vector shapes if some of the values
# were outside of the allowed range, see gh-2678
mv3 = stats.invgamma.stats([0.0, 0.5, 1.0], 1, 0.5) # 0 is not a legal `a`
mv2 = stats.invgamma.stats([0.5, 1.0], 1, 0.5)
mv2_augmented = tuple(np.r_[np.nan, _] for _ in mv2)
assert_equal(mv2_augmented, mv3)
# -1 is not a legal shape parameter
mv3 = stats.lognorm.stats([2, 2.4, -1])
mv2 = stats.lognorm.stats([2, 2.4])
mv2_augmented = tuple(np.r_[_, np.nan] for _ in mv2)
assert_equal(mv2_augmented, mv3)
# FIXME: this is only a quick-and-dirty test of a quick-and-dirty bugfix.
# stats method with multiple shape parameters is not properly vectorized
# anyway, so some distributions may or may not fail.
# Test subclassing distributions w/ explicit shapes
class _distr_gen(stats.rv_continuous):
def _pdf(self, x, a):
return 42
class _distr2_gen(stats.rv_continuous):
def _cdf(self, x, a):
return 42 * a + x
class _distr3_gen(stats.rv_continuous):
def _pdf(self, x, a, b):
return a + b
def _cdf(self, x, a):
# Different # of shape params from _pdf, to be able to check that
# inspection catches the inconsistency."""
return 42 * a + x
class _distr6_gen(stats.rv_continuous):
# Two shape parameters (both _pdf and _cdf defined, consistent shapes.)
def _pdf(self, x, a, b):
return a*x + b
def _cdf(self, x, a, b):
return 42 * a + x
class TestSubclassingExplicitShapes(object):
# Construct a distribution w/ explicit shapes parameter and test it.
def test_correct_shapes(self):
dummy_distr = _distr_gen(name='dummy', shapes='a')
assert_equal(dummy_distr.pdf(1, a=1), 42)
def test_wrong_shapes_1(self):
dummy_distr = _distr_gen(name='dummy', shapes='A')
assert_raises(TypeError, dummy_distr.pdf, 1, **dict(a=1))
def test_wrong_shapes_2(self):
dummy_distr = _distr_gen(name='dummy', shapes='a, b, c')
dct = dict(a=1, b=2, c=3)
assert_raises(TypeError, dummy_distr.pdf, 1, **dct)
def test_shapes_string(self):
# shapes must be a string
dct = dict(name='dummy', shapes=42)
assert_raises(TypeError, _distr_gen, **dct)
def test_shapes_identifiers_1(self):
# shapes must be a comma-separated list of valid python identifiers
dct = dict(name='dummy', shapes='(!)')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_identifiers_2(self):
dct = dict(name='dummy', shapes='4chan')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_identifiers_3(self):
dct = dict(name='dummy', shapes='m(fti)')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_identifiers_nodefaults(self):
dct = dict(name='dummy', shapes='a=2')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_args(self):
dct = dict(name='dummy', shapes='*args')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_kwargs(self):
dct = dict(name='dummy', shapes='**kwargs')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_keywords(self):
# python keywords cannot be used for shape parameters
dct = dict(name='dummy', shapes='a, b, c, lambda')
assert_raises(SyntaxError, _distr_gen, **dct)
def test_shapes_signature(self):
# test explicit shapes which agree w/ the signature of _pdf
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, a):
return stats.norm._pdf(x) * a
dist = _dist_gen(shapes='a')
assert_equal(dist.pdf(0.5, a=2), stats.norm.pdf(0.5)*2)
def test_shapes_signature_inconsistent(self):
# test explicit shapes which do not agree w/ the signature of _pdf
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, a):
return stats.norm._pdf(x) * a
dist = _dist_gen(shapes='a, b')
assert_raises(TypeError, dist.pdf, 0.5, **dict(a=1, b=2))
def test_star_args(self):
# test _pdf with only starargs
# NB: **kwargs of pdf will never reach _pdf
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, *args):
extra_kwarg = args[0]
return stats.norm._pdf(x) * extra_kwarg
dist = _dist_gen(shapes='extra_kwarg')
assert_equal(dist.pdf(0.5, extra_kwarg=33), stats.norm.pdf(0.5)*33)
assert_equal(dist.pdf(0.5, 33), stats.norm.pdf(0.5)*33)
assert_raises(TypeError, dist.pdf, 0.5, **dict(xxx=33))
def test_star_args_2(self):
# test _pdf with named & starargs
# NB: **kwargs of pdf will never reach _pdf
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, offset, *args):
extra_kwarg = args[0]
return stats.norm._pdf(x) * extra_kwarg + offset
dist = _dist_gen(shapes='offset, extra_kwarg')
assert_equal(dist.pdf(0.5, offset=111, extra_kwarg=33),
stats.norm.pdf(0.5)*33 + 111)
assert_equal(dist.pdf(0.5, 111, 33),
stats.norm.pdf(0.5)*33 + 111)
def test_extra_kwarg(self):
# **kwargs to _pdf are ignored.
# this is a limitation of the framework (_pdf(x, *goodargs))
class _distr_gen(stats.rv_continuous):
def _pdf(self, x, *args, **kwargs):
# _pdf should handle *args, **kwargs itself. Here "handling"
# is ignoring *args and looking for ``extra_kwarg`` and using
# that.
extra_kwarg = kwargs.pop('extra_kwarg', 1)
return stats.norm._pdf(x) * extra_kwarg
dist = _distr_gen(shapes='extra_kwarg')
assert_equal(dist.pdf(1, extra_kwarg=3), stats.norm.pdf(1))
def shapes_empty_string(self):
# shapes='' is equivalent to shapes=None
class _dist_gen(stats.rv_continuous):
def _pdf(self, x):
return stats.norm.pdf(x)
dist = _dist_gen(shapes='')
assert_equal(dist.pdf(0.5), stats.norm.pdf(0.5))
class TestSubclassingNoShapes(object):
# Construct a distribution w/o explicit shapes parameter and test it.
def test_only__pdf(self):
dummy_distr = _distr_gen(name='dummy')
assert_equal(dummy_distr.pdf(1, a=1), 42)
def test_only__cdf(self):
# _pdf is determined from _cdf by taking numerical derivative
dummy_distr = _distr2_gen(name='dummy')
assert_almost_equal(dummy_distr.pdf(1, a=1), 1)
@pytest.mark.skipif(DOCSTRINGS_STRIPPED, reason="docstring stripped")
def test_signature_inspection(self):
# check that _pdf signature inspection works correctly, and is used in
# the class docstring
dummy_distr = _distr_gen(name='dummy')
assert_equal(dummy_distr.numargs, 1)
assert_equal(dummy_distr.shapes, 'a')
res = re.findall(r'logpdf\(x, a, loc=0, scale=1\)',
dummy_distr.__doc__)
assert_(len(res) == 1)
@pytest.mark.skipif(DOCSTRINGS_STRIPPED, reason="docstring stripped")
def test_signature_inspection_2args(self):
# same for 2 shape params and both _pdf and _cdf defined
dummy_distr = _distr6_gen(name='dummy')
assert_equal(dummy_distr.numargs, 2)
assert_equal(dummy_distr.shapes, 'a, b')
res = re.findall(r'logpdf\(x, a, b, loc=0, scale=1\)',
dummy_distr.__doc__)
assert_(len(res) == 1)
def test_signature_inspection_2args_incorrect_shapes(self):
# both _pdf and _cdf defined, but shapes are inconsistent: raises
assert_raises(TypeError, _distr3_gen, name='dummy')
def test_defaults_raise(self):
# default arguments should raise
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, a=42):
return 42
assert_raises(TypeError, _dist_gen, **dict(name='dummy'))
def test_starargs_raise(self):
# without explicit shapes, *args are not allowed
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, a, *args):
return 42
assert_raises(TypeError, _dist_gen, **dict(name='dummy'))
def test_kwargs_raise(self):
# without explicit shapes, **kwargs are not allowed
class _dist_gen(stats.rv_continuous):
def _pdf(self, x, a, **kwargs):
return 42
assert_raises(TypeError, _dist_gen, **dict(name='dummy'))
@pytest.mark.skipif(DOCSTRINGS_STRIPPED, reason="docstring stripped")
def test_docstrings():
badones = [r',\s*,', r'\(\s*,', r'^\s*:']
for distname in stats.__all__:
dist = getattr(stats, distname)
if isinstance(dist, (stats.rv_discrete, stats.rv_continuous)):
for regex in badones:
assert_(re.search(regex, dist.__doc__) is None)
def test_infinite_input():
assert_almost_equal(stats.skellam.sf(np.inf, 10, 11), 0)
assert_almost_equal(stats.ncx2._cdf(np.inf, 8, 0.1), 1)
def test_lomax_accuracy():
# regression test for gh-4033
p = stats.lomax.ppf(stats.lomax.cdf(1e-100, 1), 1)
assert_allclose(p, 1e-100)
def test_gompertz_accuracy():
# Regression test for gh-4031
p = stats.gompertz.ppf(stats.gompertz.cdf(1e-100, 1), 1)
assert_allclose(p, 1e-100)
def test_truncexpon_accuracy():
# regression test for gh-4035
p = stats.truncexpon.ppf(stats.truncexpon.cdf(1e-100, 1), 1)
assert_allclose(p, 1e-100)
def test_rayleigh_accuracy():
# regression test for gh-4034
p = stats.rayleigh.isf(stats.rayleigh.sf(9, 1), 1)
assert_almost_equal(p, 9.0, decimal=15)
def test_genextreme_give_no_warnings():
"""regression test for gh-6219"""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
stats.genextreme.cdf(.5, 0)
stats.genextreme.pdf(.5, 0)
stats.genextreme.ppf(.5, 0)
stats.genextreme.logpdf(-np.inf, 0.0)
number_of_warnings_thrown = len(w)
assert_equal(number_of_warnings_thrown, 0)
def test_genextreme_entropy():
# regression test for gh-5181
euler_gamma = 0.5772156649015329
h = stats.genextreme.entropy(-1.0)
assert_allclose(h, 2*euler_gamma + 1, rtol=1e-14)
h = stats.genextreme.entropy(0)
assert_allclose(h, euler_gamma + 1, rtol=1e-14)
h = stats.genextreme.entropy(1.0)
assert_equal(h, 1)
h = stats.genextreme.entropy(-2.0, scale=10)
assert_allclose(h, euler_gamma*3 + np.log(10) + 1, rtol=1e-14)
h = stats.genextreme.entropy(10)
assert_allclose(h, -9*euler_gamma + 1, rtol=1e-14)
h = stats.genextreme.entropy(-10)
assert_allclose(h, 11*euler_gamma + 1, rtol=1e-14)
def test_genextreme_sf_isf():
# Expected values were computed using mpmath:
#
# import mpmath
#
# def mp_genextreme_sf(x, xi, mu=0, sigma=1):
# # Formula from wikipedia, which has a sign convention for xi that
# # is the opposite of scipy's shape parameter.
# if xi != 0:
# t = mpmath.power(1 + ((x - mu)/sigma)*xi, -1/xi)
# else:
# t = mpmath.exp(-(x - mu)/sigma)
# return 1 - mpmath.exp(-t)
#
# >>> mpmath.mp.dps = 1000
# >>> s = mp_genextreme_sf(mpmath.mp.mpf("1e8"), mpmath.mp.mpf("0.125"))
# >>> float(s)
# 1.6777205262585625e-57
# >>> s = mp_genextreme_sf(mpmath.mp.mpf("7.98"), mpmath.mp.mpf("-0.125"))
# >>> float(s)
# 1.52587890625e-21
# >>> s = mp_genextreme_sf(mpmath.mp.mpf("7.98"), mpmath.mp.mpf("0"))
# >>> float(s)
# 0.00034218086528426593
x = 1e8
s = stats.genextreme.sf(x, -0.125)
assert_allclose(s, 1.6777205262585625e-57)
x2 = stats.genextreme.isf(s, -0.125)
assert_allclose(x2, x)
x = 7.98
s = stats.genextreme.sf(x, 0.125)
assert_allclose(s, 1.52587890625e-21)
x2 = stats.genextreme.isf(s, 0.125)
assert_allclose(x2, x)
x = 7.98
s = stats.genextreme.sf(x, 0)
assert_allclose(s, 0.00034218086528426593)
x2 = stats.genextreme.isf(s, 0)
assert_allclose(x2, x)
def test_burr12_ppf_small_arg():
prob = 1e-16
quantile = stats.burr12.ppf(prob, 2, 3)
# The expected quantile was computed using mpmath:
# >>> import mpmath
# >>> mpmath.mp.dps = 100
# >>> prob = mpmath.mpf('1e-16')
# >>> c = mpmath.mpf(2)
# >>> d = mpmath.mpf(3)
# >>> float(((1-prob)**(-1/d) - 1)**(1/c))
# 5.7735026918962575e-09
assert_allclose(quantile, 5.7735026918962575e-09)
def test_crystalball_function():
"""
All values are calculated using the independent implementation of the
ROOT framework (see https://root.cern.ch/).
Corresponding ROOT code is given in the comments.
"""
X = np.linspace(-5.0, 5.0, 21)[:-1]
# for(float x = -5.0; x < 5.0; x+=0.5)
# std::cout << ROOT::Math::crystalball_pdf(x, 1.0, 2.0, 1.0) << ", ";
calculated = stats.crystalball.pdf(X, beta=1.0, m=2.0)
expected = np.array([0.0202867, 0.0241428, 0.0292128, 0.0360652, 0.045645,
0.059618, 0.0811467, 0.116851, 0.18258, 0.265652,
0.301023, 0.265652, 0.18258, 0.097728, 0.0407391,
0.013226, 0.00334407, 0.000658486, 0.000100982,
1.20606e-05])
assert_allclose(expected, calculated, rtol=0.001)
# for(float x = -5.0; x < 5.0; x+=0.5)
# std::cout << ROOT::Math::crystalball_pdf(x, 2.0, 3.0, 1.0) << ", ";
calculated = stats.crystalball.pdf(X, beta=2.0, m=3.0)
expected = np.array([0.0019648, 0.00279754, 0.00417592, 0.00663121,
0.0114587, 0.0223803, 0.0530497, 0.12726, 0.237752,
0.345928, 0.391987, 0.345928, 0.237752, 0.12726,
0.0530497, 0.0172227, 0.00435458, 0.000857469,
0.000131497, 1.57051e-05])
assert_allclose(expected, calculated, rtol=0.001)
# for(float x = -5.0; x < 5.0; x+=0.5) {
# std::cout << ROOT::Math::crystalball_pdf(x, 2.0, 3.0, 2.0, 0.5);
# std::cout << ", ";
# }
calculated = stats.crystalball.pdf(X, beta=2.0, m=3.0, loc=0.5, scale=2.0)
expected = np.array([0.00785921, 0.0111902, 0.0167037, 0.0265249,
0.0423866, 0.0636298, 0.0897324, 0.118876, 0.147944,
0.172964, 0.189964, 0.195994, 0.189964, 0.172964,
0.147944, 0.118876, 0.0897324, 0.0636298, 0.0423866,
0.0265249])
assert_allclose(expected, calculated, rtol=0.001)
# for(float x = -5.0; x < 5.0; x+=0.5)
# std::cout << ROOT::Math::crystalball_cdf(x, 1.0, 2.0, 1.0) << ", ";
calculated = stats.crystalball.cdf(X, beta=1.0, m=2.0)
expected = np.array([0.12172, 0.132785, 0.146064, 0.162293, 0.18258,
0.208663, 0.24344, 0.292128, 0.36516, 0.478254,
0.622723, 0.767192, 0.880286, 0.94959, 0.982834,
0.995314, 0.998981, 0.999824, 0.999976, 0.999997])
assert_allclose(expected, calculated, rtol=0.001)
# for(float x = -5.0; x < 5.0; x+=0.5)
# std::cout << ROOT::Math::crystalball_cdf(x, 2.0, 3.0, 1.0) << ", ";
calculated = stats.crystalball.cdf(X, beta=2.0, m=3.0)
expected = np.array([0.00442081, 0.00559509, 0.00730787, 0.00994682,
0.0143234, 0.0223803, 0.0397873, 0.0830763, 0.173323,
0.320592, 0.508717, 0.696841, 0.844111, 0.934357,
0.977646, 0.993899, 0.998674, 0.999771, 0.999969,
0.999997])
assert_allclose(expected, calculated, rtol=0.001)
# for(float x = -5.0; x < 5.0; x+=0.5) {
# std::cout << ROOT::Math::crystalball_cdf(x, 2.0, 3.0, 2.0, 0.5);
# std::cout << ", ";
# }
calculated = stats.crystalball.cdf(X, beta=2.0, m=3.0, loc=0.5, scale=2.0)
expected = np.array([0.0176832, 0.0223803, 0.0292315, 0.0397873, 0.0567945,
0.0830763, 0.121242, 0.173323, 0.24011, 0.320592,
0.411731, 0.508717, 0.605702, 0.696841, 0.777324,
0.844111, 0.896192, 0.934357, 0.960639, 0.977646])
assert_allclose(expected, calculated, rtol=0.001)
def test_crystalball_function_moments():
"""
All values are calculated using the pdf formula and the integrate function
of Mathematica
"""
# The Last two (alpha, n) pairs test the special case n == alpha**2
beta = np.array([2.0, 1.0, 3.0, 2.0, 3.0])
m = np.array([3.0, 3.0, 2.0, 4.0, 9.0])
# The distribution should be correctly normalised
expected_0th_moment = np.array([1.0, 1.0, 1.0, 1.0, 1.0])
calculated_0th_moment = stats.crystalball._munp(0, beta, m)
assert_allclose(expected_0th_moment, calculated_0th_moment, rtol=0.001)
# calculated using wolframalpha.com
# e.g. for beta = 2 and m = 3 we calculate the norm like this:
# integrate exp(-x^2/2) from -2 to infinity +
# integrate (3/2)^3*exp(-2^2/2)*(3/2-2-x)^(-3) from -infinity to -2
norm = np.array([2.5511, 3.01873, 2.51065, 2.53983, 2.507410455])
a = np.array([-0.21992, -3.03265, np.inf, -0.135335, -0.003174])
expected_1th_moment = a / norm
calculated_1th_moment = stats.crystalball._munp(1, beta, m)
assert_allclose(expected_1th_moment, calculated_1th_moment, rtol=0.001)
a = np.array([np.inf, np.inf, np.inf, 3.2616, 2.519908])
expected_2th_moment = a / norm
calculated_2th_moment = stats.crystalball._munp(2, beta, m)
assert_allclose(expected_2th_moment, calculated_2th_moment, rtol=0.001)
a = np.array([np.inf, np.inf, np.inf, np.inf, -0.0577668])
expected_3th_moment = a / norm
calculated_3th_moment = stats.crystalball._munp(3, beta, m)
assert_allclose(expected_3th_moment, calculated_3th_moment, rtol=0.001)
a = np.array([np.inf, np.inf, np.inf, np.inf, 7.78468])
expected_4th_moment = a / norm
calculated_4th_moment = stats.crystalball._munp(4, beta, m)
assert_allclose(expected_4th_moment, calculated_4th_moment, rtol=0.001)
a = np.array([np.inf, np.inf, np.inf, np.inf, -1.31086])
expected_5th_moment = a / norm
calculated_5th_moment = stats.crystalball._munp(5, beta, m)
assert_allclose(expected_5th_moment, calculated_5th_moment, rtol=0.001)
def test_ncf_variance():
# Regression test for gh-10658 (incorrect variance formula for ncf).
# The correct value of ncf.var(2, 6, 4), 42.75, can be verified with, for
# example, Wolfram Alpha with the expression
# Variance[NoncentralFRatioDistribution[2, 6, 4]]
# or with the implementation of the noncentral F distribution in the C++
# library Boost.
v = stats.ncf.var(2, 6, 4)
assert_allclose(v, 42.75, rtol=1e-14)
class TestHistogram(object):
def setup_method(self):
np.random.seed(1234)
# We have 8 bins
# [1,2), [2,3), [3,4), [4,5), [5,6), [6,7), [7,8), [8,9)
# But actually np.histogram will put the last 9 also in the [8,9) bin!
# Therefore there is a slight difference below for the last bin, from
# what you might have expected.
histogram = np.histogram([1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5,
6, 6, 6, 6, 7, 7, 7, 8, 8, 9], bins=8)
self.template = stats.rv_histogram(histogram)
data = stats.norm.rvs(loc=1.0, scale=2.5, size=10000, random_state=123)
norm_histogram = np.histogram(data, bins=50)
self.norm_template = stats.rv_histogram(norm_histogram)
def test_pdf(self):
values = np.array([0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5,
5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5])
pdf_values = np.asarray([0.0/25.0, 0.0/25.0, 1.0/25.0, 1.0/25.0,
2.0/25.0, 2.0/25.0, 3.0/25.0, 3.0/25.0,
4.0/25.0, 4.0/25.0, 5.0/25.0, 5.0/25.0,
4.0/25.0, 4.0/25.0, 3.0/25.0, 3.0/25.0,
3.0/25.0, 3.0/25.0, 0.0/25.0, 0.0/25.0])
assert_allclose(self.template.pdf(values), pdf_values)
# Test explicitly the corner cases:
# As stated above the pdf in the bin [8,9) is greater than
# one would naively expect because np.histogram putted the 9
# into the [8,9) bin.
assert_almost_equal(self.template.pdf(8.0), 3.0/25.0)
assert_almost_equal(self.template.pdf(8.5), 3.0/25.0)
# 9 is outside our defined bins [8,9) hence the pdf is already 0
# for a continuous distribution this is fine, because a single value
# does not have a finite probability!
assert_almost_equal(self.template.pdf(9.0), 0.0/25.0)
assert_almost_equal(self.template.pdf(10.0), 0.0/25.0)
x = np.linspace(-2, 2, 10)
assert_allclose(self.norm_template.pdf(x),
stats.norm.pdf(x, loc=1.0, scale=2.5), rtol=0.1)
def test_cdf_ppf(self):
values = np.array([0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5,
5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5])
cdf_values = np.asarray([0.0/25.0, 0.0/25.0, 0.0/25.0, 0.5/25.0,
1.0/25.0, 2.0/25.0, 3.0/25.0, 4.5/25.0,
6.0/25.0, 8.0/25.0, 10.0/25.0, 12.5/25.0,
15.0/25.0, 17.0/25.0, 19.0/25.0, 20.5/25.0,
22.0/25.0, 23.5/25.0, 25.0/25.0, 25.0/25.0])
assert_allclose(self.template.cdf(values), cdf_values)
# First three and last two values in cdf_value are not unique
assert_allclose(self.template.ppf(cdf_values[2:-1]), values[2:-1])
# Test of cdf and ppf are inverse functions
x = np.linspace(1.0, 9.0, 100)
assert_allclose(self.template.ppf(self.template.cdf(x)), x)
x = np.linspace(0.0, 1.0, 100)
assert_allclose(self.template.cdf(self.template.ppf(x)), x)
x = np.linspace(-2, 2, 10)
assert_allclose(self.norm_template.cdf(x),
stats.norm.cdf(x, loc=1.0, scale=2.5), rtol=0.1)
def test_rvs(self):
N = 10000
sample = self.template.rvs(size=N, random_state=123)
assert_equal(np.sum(sample < 1.0), 0.0)
assert_allclose(np.sum(sample <= 2.0), 1.0/25.0 * N, rtol=0.2)
assert_allclose(np.sum(sample <= 2.5), 2.0/25.0 * N, rtol=0.2)
assert_allclose(np.sum(sample <= 3.0), 3.0/25.0 * N, rtol=0.1)
assert_allclose(np.sum(sample <= 3.5), 4.5/25.0 * N, rtol=0.1)
assert_allclose(np.sum(sample <= 4.0), 6.0/25.0 * N, rtol=0.1)
assert_allclose(np.sum(sample <= 4.5), 8.0/25.0 * N, rtol=0.1)
assert_allclose(np.sum(sample <= 5.0), 10.0/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 5.5), 12.5/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 6.0), 15.0/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 6.5), 17.0/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 7.0), 19.0/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 7.5), 20.5/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 8.0), 22.0/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 8.5), 23.5/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 9.0), 25.0/25.0 * N, rtol=0.05)
assert_allclose(np.sum(sample <= 9.0), 25.0/25.0 * N, rtol=0.05)
assert_equal(np.sum(sample > 9.0), 0.0)
def test_munp(self):
for n in range(4):
assert_allclose(self.norm_template._munp(n),
stats.norm._munp(n, 1.0, 2.5), rtol=0.05)
def test_entropy(self):
assert_allclose(self.norm_template.entropy(),
stats.norm.entropy(loc=1.0, scale=2.5), rtol=0.05)
def test_loguniform():
# This test makes sure the alias of "loguniform" is log-uniform
rv = stats.loguniform(10 ** -3, 10 ** 0)
rvs = rv.rvs(size=10000, random_state=42)
vals, _ = np.histogram(np.log10(rvs), bins=10)
assert 900 <= vals.min() <= vals.max() <= 1100
assert np.abs(np.median(vals) - 1000) <= 10
class TestArgus(object):
def test_argus_rvs_large_chi(self):
# test that the algorithm can handle large values of chi
x = stats.argus.rvs(50, size=500, random_state=325)
assert_almost_equal(stats.argus(50).mean(), x.mean(), decimal=4)
def test_argus_rvs_ratio_uniforms(self):
# test that the ratio of uniforms algorithms works for chi > 2.611
x = stats.argus.rvs(3.5, size=1500, random_state=1535)
assert_almost_equal(stats.argus(3.5).mean(), x.mean(), decimal=3)
assert_almost_equal(stats.argus(3.5).std(), x.std(), decimal=3)
| aeklant/scipy | scipy/stats/tests/test_distributions.py | Python | bsd-3-clause | 165,416 |
// Copyright (c) 2017 Timo Savola. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
#include <sys/types.h>
struct cgroup_config {
const char *title;
const char *parent;
};
extern const char cgroup_backend[];
void init_cgroup(pid_t pid, const struct cgroup_config *config);
| tsavola/gate | runtime/container/cgroup.h | C | bsd-3-clause | 355 |
<?php
/**
* Message translations.
*
* This file is automatically generated by 'yii message/extract' command.
* It contains the localizable messages extracted from source code.
* You may modify this file by translating the extracted messages.
*
* Each array element represents the translation (value) of a message (key).
* If the value is empty, the message is considered as not translated.
* Messages that no longer need translation will have their translations
* enclosed between a pair of '@@' marks.
*
* Message string can be used with plural forms format. Check i18n section
* of the guide for details.
*
* NOTE: this file must be saved in UTF-8 encoding.
*/
return array (
'Controls' => 'Contrôles',
'Name' => 'Nom',
'Opens the link in the same frame as it was clicked (default).' => 'Ouvrir le lien au même endroit où il a été cliqué (par défaut).',
'Opens the link in a new window or tab.' => 'Ouvrir le lien dans une nouvelle fenêtre ou un nouvel onglet.',
'Opens the link in the parent frame.' => 'Ouvrir le lien dans la fenêtre on l\'onglet parent.',
'Opens the link in the full body of the window.' => 'Ouvrir le lien dans une nouvelle fenêtre au dessus du reste.',
'Parent' => 'Parent',
'Position' => 'Position',
'Presentation' => 'Présentation',
'URL' => 'URL',
);
| Arza-Studio/yiingine | controllers/admin/messages/fr/MenusController.php | PHP | bsd-3-clause | 1,326 |
-- [er]create table of range partition on char field having boundary values and split 2nd partition to another partition but lost data
create table range_test(id int not null ,
test_int int,
test_char char(50),
test_varchar varchar(2000),
test_datetime timestamp,primary key(id,test_char))
PARTITION BY RANGE (test_char) (
PARTITION p0 VALUES LESS THAN ('ddd'),
PARTITION p1 VALUES LESS THAN ('ggg'),
PARTITION p2 VALUES LESS THAN ('kkk')
);
insert into range_test values (1,1,'aaa','aaa','2000-01-01 09:00:00');
insert into range_test values (2,2,'bbb','bbb','2000-01-02 09:00:00');
insert into range_test values (3,3,'ccc','ccc','2000-01-03 09:00:00');
insert into range_test values (4,11,'ddd','ddd','2000-02-01 09:00:00');
insert into range_test values (5,12,'eee','eee','2000-02-02 09:00:00');
insert into range_test values (6,13,'fff','fff','2000-02-03 09:00:00');
insert into range_test values (7,21,'ggg','ggg','2000-03-01 09:00:00');
insert into range_test values (8,22,'hhh','hhh','2000-03-02 09:00:00');
insert into range_test values (9,23,'iii','iii','2000-03-03 09:00:00');
insert into range_test values (10,31,'jjj','jjj','2000-04-01 09:00:00');
ALTER TABLE range_test REORGANIZE PARTITION p1 INTO (
PARTITION p3 VALUES LESS THAN ('iii'));
drop table range_test;
| CUBRID/cubrid-testcases | sql/_01_object/_09_partition/_005_reorganization/cases/1068.sql | SQL | bsd-3-clause | 1,309 |
/*
* Copyright 2010 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef GrColor_DEFINED
#define GrColor_DEFINED
#include "include/core/SkColor.h"
#include "include/core/SkColorPriv.h"
#include "include/gpu/GrTypes.h"
#include "include/private/SkColorData.h"
#include "include/private/SkHalf.h"
#include "src/gpu/BufferWriter.h"
/**
* GrColor is 4 bytes for R, G, B, A, in a specific order defined below. Whether the color is
* premultiplied or not depends on the context in which it is being used.
*/
typedef uint32_t GrColor;
// shift amount to assign a component to a GrColor int
// These shift values are chosen for compatibility with GL attrib arrays
// ES doesn't allow BGRA vertex attrib order so if they were not in this order
// we'd have to swizzle in shaders.
#ifdef SK_CPU_BENDIAN
#define GrColor_SHIFT_R 24
#define GrColor_SHIFT_G 16
#define GrColor_SHIFT_B 8
#define GrColor_SHIFT_A 0
#else
#define GrColor_SHIFT_R 0
#define GrColor_SHIFT_G 8
#define GrColor_SHIFT_B 16
#define GrColor_SHIFT_A 24
#endif
/**
* Pack 4 components (RGBA) into a GrColor int
*/
static inline GrColor GrColorPackRGBA(unsigned r, unsigned g, unsigned b, unsigned a) {
SkASSERT((uint8_t)r == r);
SkASSERT((uint8_t)g == g);
SkASSERT((uint8_t)b == b);
SkASSERT((uint8_t)a == a);
return (r << GrColor_SHIFT_R) |
(g << GrColor_SHIFT_G) |
(b << GrColor_SHIFT_B) |
(a << GrColor_SHIFT_A);
}
// extract a component (byte) from a GrColor int
#define GrColorUnpackR(color) (((color) >> GrColor_SHIFT_R) & 0xFF)
#define GrColorUnpackG(color) (((color) >> GrColor_SHIFT_G) & 0xFF)
#define GrColorUnpackB(color) (((color) >> GrColor_SHIFT_B) & 0xFF)
#define GrColorUnpackA(color) (((color) >> GrColor_SHIFT_A) & 0xFF)
/**
* Since premultiplied means that alpha >= color, we construct a color with
* each component==255 and alpha == 0 to be "illegal"
*/
#define GrColor_ILLEGAL (~(0xFF << GrColor_SHIFT_A))
/** Normalizes and coverts an uint8_t to a float. [0, 255] -> [0.0, 1.0] */
static inline float GrNormalizeByteToFloat(uint8_t value) {
static const float ONE_OVER_255 = 1.f / 255.f;
return value * ONE_OVER_255;
}
/** Used to pick vertex attribute types. */
static inline bool SkPMColor4fFitsInBytes(const SkPMColor4f& color) {
// Might want to instead check that the components are [0...a] instead of [0...1]?
return color.fitsInBytes();
}
static inline uint64_t SkPMColor4f_toFP16(const SkPMColor4f& color) {
uint64_t halfColor;
SkFloatToHalf_finite_ftz(Sk4f::Load(color.vec())).store(&halfColor);
return halfColor;
}
/**
* GrVertexColor is a helper for writing colors to a vertex attribute. It stores either GrColor
* or four half-float channels, depending on the wideColor parameter. VertexWriter will write the
* correct amount of data. Note that the GP needs to have been constructed with the correct
* attribute type for colors, to match the usage here.
*/
class GrVertexColor {
public:
GrVertexColor() = default;
explicit GrVertexColor(const SkPMColor4f& color, bool wideColor) {
this->set(color, wideColor);
}
void set(const SkPMColor4f& color, bool wideColor) {
if (wideColor) {
memcpy(fColor, color.vec(), sizeof(fColor));
} else {
fColor[0] = color.toBytes_RGBA();
}
fWideColor = wideColor;
}
size_t size() const { return fWideColor ? 16 : 4; }
private:
template <typename T>
friend skgpu::VertexWriter& skgpu::operator<<(skgpu::VertexWriter&, const T&);
uint32_t fColor[4];
bool fWideColor;
};
template <>
SK_MAYBE_UNUSED inline skgpu::VertexWriter& skgpu::operator<<(skgpu::VertexWriter& w,
const GrVertexColor& color) {
w << color.fColor[0];
if (color.fWideColor) {
w << color.fColor[1]
<< color.fColor[2]
<< color.fColor[3];
}
return w;
}
#endif
| youtube/cobalt | third_party/skia_next/third_party/skia/src/gpu/GrColor.h | C | bsd-3-clause | 4,138 |
---
layout: doc
title: "**HOWTO:** Decode to Texture"
---
# **HOWTO:** Decode to Texture
Starboard declares the interfaces necessary to allow applications to query for
video frames from the media player, and have them returned as texture objects
(e.g. GLES textures). This is useful if the application would like to apply
a geometrical transformation to the rendered video, in order to support 360
spherical video playback for example. Additionally, if a Starboard platform
implementation does not support punch-through video playback, then
applications can choose to use decode-to-texture instead.
## API Overview
Decode-to-texture support involves multiple Starboard API functions spanning
both the [`starboard/player.h`](../player.h) and
[`starboard/decode_target.h`](../decode_target.h) Starboard interface header
files. Support for decode-to-texture began in version 4 of the Starboard
API.
In particular, the following function implementations require consideration
for decode-to-texture support:
From [`starboard/player.h`](../player.h),
* `SbPlayerCreate()`
* `SbPlayerOutputModeSupported()`
* `SbPlayerGetCurrentFrame()`
From [`starboard/decode_target.h`](../decode_target.h),
* `SbDecodeTargetRelease()`
* `SbDecodeTargetGetInfo()`
Note that it is possible that you may not need to use the
`SbDecodeTargetGraphicsContextProvider` parameter of SbPlayerCreate(). More on
this later.
## Example Application Usage Pattern
We now describe an example, and typical, sequence of steps that an
application will take when it wishes to make use of decode-to-texture
support.

1. An application with the desire to make use of decode-to-texture will first
call `SbPlayerOutputModeSupported()`, passing in
`kSbPlayerOutputModeDecodeToTexture` for its `output_mode` parameter. If
the function returns false, the application learns that decode-to-texture
is not supported by the platform and it will not continue with a
decode-to-texture flow.
2. If `SbPlayerOutputModeSupported()` returns true, the application will call
`SbPlayerCreate()`, passing in `kSbPlayerOutputModeDecodeToTexture` for
the `output_mode` parameter, and also providing a valid `provider`
parameter (more on this later). At this point, the Starboard platform is
expected to have created a player with the decode-to-texture output mode.
3. Once the player is started and playback has begun, the application's
renderer thread (this may be a different thread than the one that called
`SbPlayerCreate()`) will repeatedly and frequently call
`SbPlayerGetCurrentFrame()`. Since this function will be called from the
application's renderer thread, it should be thread-safe. If the platform
uses a GLES renderer, it is guaranteed that this function will be called
with the GLES renderer context set as current. This function is expected
to return the video frame that is to be displayed at the time the function
is called as a `SbDecodeTarget` object. The `SbPlayerGetCurrentFrame()`
will be called at the renderer's frequency, i.e. the application render
loop's frame rate. If the application's frame rate is higher than the
video's frame rate, then the same video frame will sometimes be returned
in consecutive calls to `SbPlayerGetCurrentFrame()`. If the video's frame
rate is higher than the application's (this should be rare), then some
video frames will never be returned by calls to
`SbPlayerGetCurrentFrame()`; in other words, video frames will be
dropped.
4. Once the application has acquired a valid SbDecodeTarget object through a
call to `SbPlayerGetCurrentFrame()`, it will call
`SbDecodeTargetGetInfo()` on it to extract information about the opaque
`SbDecodeTarget` object. The `SbDecodeTargetGetInfo()` function fills
out a `SbDecodeTargetInfo` structure which contains information about the
decoded frame and, most importantly, a reference to a GLES texture ID on
GLES platforms, or a reference to a `SbBlitterSurface` object on
Starboard Blitter API platforms. The application can then use this
texture/surface handle to render the video frame as it wishes.
5. When the application is finished using the `SbDecodeTarget` that it has
acquired through the `SbPlayerGetCurrentFrame()` function, it will call
`SbDecodeTargetRelease()` on it. The Starboard platform implementation
should ensure that the `SbDecodeTarget` object returned by
`SbPlayerGetCurrentFrame()` remains valid until the corresponding call to
`SbDecodeTargetRelease()` is made. A call to `SbDecodeTargetRelease()`
will be made to match each call to `SbPlayerGetCurrentFrame()`.
## The `SbDecodeTargetGraphicsContextProvider` object
It is completely possible that a platform's Starboard implementation can
properly implement decode-to-texture support without dealing with the
`SbDecodeTargetGraphicsContextProvider` object (passed in to
`SbPlayerCreate()`). The `SbDecodeTargetGraphicsContextProvider` reference
gives platforms references to the graphics objects that will later be used to
render the decoded frames. For example, on Blitter API platforms, a reference
to the `SbBlitterDevice` object will be a mamber of
`SbDecodeTargetGraphicsContextProvider`. For EGL platforms, a `EGLDisplay` and
`EGLContext` will be available, but additionally a
`SbDecodeTargetGlesContextRunner` function pointer will be provided that will
allow you to run arbitrary code on the renderer thread with the `EGLContext`
held current. This may be useful if your `SbDecodeTarget` creation code will
required making GLES calls (e.g. `glGenTextures()`) in which a `EGLContext` must
be held current.
## Performance Considerations
The decode-to-texture Starboard API is specifically designed to allow
Starboard implementations to have the player decode directly to a texture,
so that the application can then reference and render with that texture
without at any point performing a pixel copy. The
decode-to-texture path can therefore be highly performant.
It is still recommended however that platforms support the punch-through
player mode if possible. When using the decode-to-texture player output
mode, the video may be rendered within the application's render loop, which
means that non-video-related time complexity in the application's render
loop can affect video playback's apparent frame rate, potentially resulting in
dropped frames. The platform can likely configure punch-through video to
refresh on its own loop, decoupling it from the application render loop.
## Implementation Strategies
### Working with "push" players
If your player implementation is setup with a "push" framework where
frames are pushed out as soon as they are decoded, then you will need
to cache those frames (along with their timestamps) so that they can be
passed on to the application when `SbPlayerGetCurrentFrame()` is called.
This same strategy applies if the player pushes frames only when they are meant
to be rendered.
| youtube/cobalt | cobalt/site/docs/gen/starboard/doc/howto_decode_to_texture.md | Markdown | bsd-3-clause | 7,083 |
# AMQP Module
**For additional reference, please review the [source](https://github.com/Codeception/Codeception/tree/2.0/src/Codeception/Module/AMQP.php)**
This module interacts with message broker software that implements
the Advanced Message Queuing Protocol (AMQP) standard. For example, RabbitMQ (tested).
Use it to cleanup the queue between tests.
<div class="alert alert-info">
To use this module with Composer you need <em>"videlalvaro/php-amqplib": "*"</em> package.
</div>
## Status
* Maintainer: **davert**, **tiger-seo**
* Stability: **alpha**
* Contact: codecept@davert.mail.ua
* Contact: tiger.seo@gmail.com
*Please review the code of non-stable modules and provide patches if you have issues.*
## Config
* host: localhost - host to connect
* username: guest - username to connect
* password: guest - password to connect
* vhost: '/' - vhost to connect
* cleanup: true - defined queues will be purged before running every test.
* queues: [mail, twitter] - queues to cleanup
### Example
modules:
enabled: [AMQP]
config:
AMQP:
host: 'localhost'
port: '5672'
username: 'guest'
password: 'guest'
vhost: '/'
queues: [queue1, queue2]
## Public Properties
* connection - AMQPConnection - current connection
@since 1.1.2
@author tiger.seo@gmail.com
@author davert
### grabMessageFromQueue
Takes last message from queue.
$message = $I->grabMessageFromQueue('queue.emails');
* `param` $queue
@return AMQPMessage
### pushToExchange
Sends message to exchange by sending exchange name, message
and (optionally) a routing key
``` php
<?php
$I->pushToExchange('exchange.emails', 'thanks');
$I->pushToExchange('exchange.emails', new AMQPMessage('Thanks!'));
$I->pushToExchange('exchange.emails', new AMQPMessage('Thanks!'), 'severity');
?>
```
* `param` $exchange
* `param` $message string|AMQPMessage
* `param` $routing_key
### pushToQueue
Sends message to queue
``` php
<?php
$I->pushToQueue('queue.jobs', 'create user');
$I->pushToQueue('queue.jobs', new AMQPMessage('create'));
?>
```
* `param` $queue
* `param` $message string|AMQPMessage
### seeMessageInQueueContainsText
Checks if message containing text received.
**This method drops message from queue**
**This method will wait for message. If none is sent the script will stuck**.
``` php
<?php
$I->pushToQueue('queue.emails', 'Hello, davert');
$I->seeMessageInQueueContainsText('queue.emails','davert');
?>
```
* `param` $queue
* `param` $text
<p> </p><div class="alert alert-warning">Module reference is taken from the source code. <a href="https://github.com/Codeception/Codeception/tree/2.0/src/Codeception/Module/AMQP.php">Help us to improve documentation. Edit module reference</a></div>
| kaliss96/calificaciones | vendor/codeception/codeception/docs/modules/AMQP.md | Markdown | bsd-3-clause | 2,944 |
<?php
/**
* Formagic
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at
* http://www.formagic-php.net/license-agreement/
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@formagic-php.net so we can send you a copy immediately.
*
* @author Florian Sonnenburg
* @copyright 2007-2014 Florian Sonnenburg
* @license http://www.formagic-php.net/license-agreement/ New BSD License
*/
/**
* Returns rendered HTML form. Tables are used to place form elements.
*
* @package Formagic\Renderer
* @author Florian Sonnenburg
* @since 0.2.0 First time introduced
**/
class Formagic_Renderer_Html implements Formagic_Renderer_Interface
{
/**
* Form wrapping template array
* @var array
**/
protected $_formWrapperTemplate =
'
<form action="%ACTION%" method="%METHOD%"%ATTRIBUTES%>
%HIDDENS%
%CONTAINER%
</form>
';
/**
* Container wrapping template array
* @var array
**/
protected $_containerWrapperTemplate = array('' =>
'<table border="0" cellpadding="0" cellspacing="0"%ATTRIBUTES%>
%ROWS%
</table>'
);
/**
* Template for rows containing containers
* @var array
**/
protected $_containerRowTemplate = array('' =>
'
<tr>
<td colspan="2">
%CONTAINER%</td>
</tr>'
);
/**
* Container label template array
* @var array
*/
protected $_containerLabelTemplate = array('' => '');
/**
* Template for rows containing normal items array
* @var array
**/
protected $_itemRowTemplate = array('' =>
'
<tr>
<td>%LABEL%</td>
<td>%ERROR%
%INPUT%</td>
</tr>'
);
/**
* Template for displaying the item label array
* @var array
**/
protected $_itemLabelTemplate = array('' =>
'<label for="%ID%"%ERRORCLASS%>%LABEL%%MANDATORYMARKER%</label>'
);
/**
* Array of templates for item's error wrapper (list open tag)
* @var array
*/
protected $_itemErrorWrapperTemplate = array('' =>
'<ul%ERRORCLASS%>%ERRORS%</ul>'
);
/**
* Array of templates for displaying the item error string (list elements)
* @var array
**/
protected $_itemErrorTemplate = array('' =>
'<li>%ERRORMESSAGE%</li>'
);
/**
* Error class name
* @var string
*/
protected $_errorClass = 'formagicError';
/**
* HTML string for mandatory fields array
* @var array
**/
protected $_mandatoryMarkerTemplate = array('' =>
' <span class="mandatory">*</span>'
);
/**
* Hidden inputs string
* @var string
**/
protected $_hiddenString = '';
/**
* Translator object
* @var Formagic_Translator_Interface
**/
protected $_translator;
/**
* Sets the translator object for this renderer instance
*
* @param Formagic_Translator_Interface $translator Translator instance
*/
public function __construct(Formagic_Translator_Interface $translator = null)
{
if (null === $translator) {
$translator = Formagic::getTranslator();
}
$this->_translator = $translator;
}
/**
* Returns current translator instance.
*
* @return Formagic_Translator_Interface Translator object
*/
public function getTranslator()
{
return $this->_translator;
}
/**
* Sets error CSS class.
*
* This css class is per default added to the label and error list
* tag of items that did not pass validation.
*
* @param string $errorClass New error class
* @return Formagic_Renderer_Html Fluent interface
*/
public function setErrorClass($errorClass)
{
$this->_errorClass = $errorClass;
return $this;
}
/**
* Sets new template for rows that hold sub-containers
* (tr/td tags by default).
*
* Supported placeholders:
* - %CONTAINER%: HTML for the subcontainer, including it's wrapper
*
* @see setContainerLabelTemplate()
* @see setContainerWrapperTemplate()
* @param string $template Template string
* @param string|Formagic_Item_Container $container Optional. Defines this
* template only for a specific container.
* @return Formagic_Renderer_Html Fluent interface
*/
public function setContainerRowTemplate($template, $container = '')
{
$this->_setTemplate($this->_containerRowTemplate, $template, $container);
return $this;
}
/**
* Returns the template for rows that hold sub-containers.
*
* @param string|Formagic_Item_Container $container Optional. Returns a
* template defined for one specific container.
* @return string Template string
*/
public function getContainerRowTemplate($container = '')
{
return $this->_getTemplate($this->_containerRowTemplate, $container);
}
/**
* Sets wrapper template for containers
* (opening/closing table tag by default).
*
* Available placeholders:
* - %ROWS%: Rendered rows as HTML, including row wrapper
* - %ATTRIBUTES%: Assembled attributes string as HTML
* - %LABEL%: Rendered container label as HTML
*
* @see setContainerLabelTemplate()
* @see setContainerRowTemplate()
* @param string $template Template string
* @param string|Formagic_Item_Container $container Optional. Defines this
* template only for a specific container.
* @return Formagic_Renderer_Html Fluent interface
*/
public function setContainerWrapperTemplate($template, $container = '')
{
$this->_setTemplate($this->_containerWrapperTemplate, $template, $container);
return $this;
}
/**
* Returns wrapper template for containers.
*
* @param string $container Optional. Returns a template defined for
* one specific item with name $name.
* @return string Template string
*/
public function getContainerWrapperTemplate($container = '')
{
return $this->_getTemplate($this->_containerWrapperTemplate, $container);
}
/**
* Sets container label template (empty by default).
*
* Available placeholders:
* - %LABEL%: Label string
*
* @see setContainerWrapperTemplate()
* @see setContainerRowTemplate()
* @param string $template Template string
* @param string|Formagic_Item_Container $container Optional. Defines this
* template only for a specific container (name or container object).
* @return Formagic_Renderer_Html Fluent interface
*/
public function setContainerLabelTemplate($template, $container = '')
{
$this->_setTemplate($this->_containerLabelTemplate, $template, $container);
return $this;
}
/**
* Returns container label template.
*
* @param string|Formagic_Item_Container $container Optional. Returns a
* template defined for one specific container.
* @return string Template string
*/
public function getContainerLabelTemplate($container = '')
{
return $this->_getTemplate($this->_containerLabelTemplate, $container);
}
/**
* Sets form wrapper template (opening/closing form tag by default).
*
* Available placeholders:
* - %ACTION%: Form action string
* - %METHOD%: Form method string
* - %ATTRIBUTES%: Assembled tag attributes string
* - %HIDDENS%: Rendered hidden inputs
* - %CONTAINER%: Rendered HTML of item holder container
*
* @param string $template Template string
* @return Formagic_Renderer_Html Fluent interface
*/
public function setFormWrapperTemplate($template)
{
$this->_formWrapperTemplate = $template;
return $this;
}
/**
* Returns form wrapper tag.
*
* @return string Template string
*/
public function getFormWrapperTemplate()
{
return $this->_formWrapperTemplate;
}
/**
* Sets template for rows containing input items (tr/td tags by default).
*
* Available placeholders:
* - %LABEL%: Item label string
* - %ERROR%: Assembled error list (HTML)
* - %ERRORCLASS%: CSS class attribute with error class (eg. ' class="formagicError"')
* - %INPUT%: Input HTML
*
* @see setItemLabelTemplate()
* @see setItemErrorTemplate()
* @param string $template Template string
* @param string|Formagic_Item_Abstract $item Optional. Defines this
* template only for a specific item (name or item object).
* @return Formagic_Renderer_Html Fluent interface
*/
public function setItemRowTemplate($template, $item = '')
{
$this->_setTemplate($this->_itemRowTemplate, $template, $item);
return $this;
}
/**
* Returns item row template.
*
* @param string|Formagic_Item_Abstract $item Optional. Returns a template
* defined for one specific item.
* @return string Template string
*/
public function getItemRowTemplate($item = '')
{
return $this->_getTemplate($this->_itemRowTemplate, $item);
}
/**
* Sets label template for a single item (label tag by default).
*
* Available placeholders:
* - %LABEL%: Label string defined for item
* - %ID%: Value of Item's HTML ID attribute
* - %MANDATORYMARKER%: Marker for items with mandatory rule
*
* @see setItemRowTemplate()
* @param string $template Template string
* @param string|Formagic_Item_Abstract $item Optional. Defines this
* template only for a specific item (name or item object).
* @return Formagic_Renderer_Html Fluent interface
*/
public function setItemLabelTemplate($template, $item = '')
{
$this->_setTemplate($this->_itemLabelTemplate, $template, $item);
return $this;
}
/**
* Returns label template for an item.
*
* @param string|Formagic_Item_Abstract $item Optional. Returns a template
* defined for one specific item.
* @return string Template string
*/
public function getItemLabelTemplate($item = '')
{
return $this->_getTemplate($this->_itemLabelTemplate, $item);
}
/**
* Sets a new template for a single error message (HTML LI tag by default)
*
* Available placeholders:
* - %ERRORMESSAGE%: Message string returned by violated rule
*
* @param string $template Template string
* @param string|Formagic_Item_Abstract $item Optional. Defines this
* template only for a specific item (name or item object).
* @return Formagic_Renderer_Html Fluent interface
*/
public function setItemErrorTemplate($template, $item = '')
{
$this->_setTemplate($this->_itemErrorTemplate, $template, $item);
return $this;
}
/**
* Returns single error message template
*
* @param string|Formagic_Item_Abstract $item Optional. Returns a template
* defined for one specific item.
* @return string Template string
*/
public function getItemErrorTemplate($item = '')
{
return $this->_getTemplate($this->_itemErrorTemplate, $item);
}
/**
* Sets error wrapper template (opening and closing list tags by default).
*
* Supported placeholders:
* - %ERRORS%: Rendered list of item errors
*
* @param string $template Template string
* @param string|Formagic_Item_Abstract $item Optional. Defines this
* template only for a specific item (name or item object).
* @return Formagic_Renderer_Html Fluent interface
*/
public function setItemErrorWrapperTemplate($template, $item = '')
{
$this->_setTemplate($this->_itemErrorWrapperTemplate, $template, $item);
return $this;
}
/**
* Returns error wrapper template for item errors.
*
* @param string|Formagic_Item_Abstract $item Optional. Returns a template
* defined for one specific item.
* @return string Template string
*/
public function getItemErrorWrapperTemplate($item = '')
{
return $this->_getTemplate($this->_itemErrorWrapperTemplate, $item);
}
/**
* Sets marker string for items that are marked mandatory
* (asterisk by default).
*
* @param string $template Template string
* @param string|Formagic_Item_Abstract $item Optional. Item this template
* is to be defined for.
* @return Formagic_Renderer_Html Fluent interface
*/
public function setMandatoryMarkerTemplate($template, $item = '')
{
$this->_setTemplate($this->_mandatoryMarkerTemplate, $template, $item);
return $this;
}
/**
* Returns marker string for items marked as mandatory.
*
* @param string|Formagic_Item_Abstract $item Optional. Returns a template
* defined for one specific item.
* @return string Template string
*/
public function getMandatoryMarkerTemplate($item = '')
{
return $this->_getTemplate($this->_mandatoryMarkerTemplate, $item);
}
/**
* Returns form HTML string
*
* @param Formagic $form Formagic object to be rendered.
* @return string The rendered HTML string
*/
public function render(Formagic $form)
{
// init hidden input rendering
$this->_hiddenString = '';
$attributeStr = $form->getAttributeStr();
// prototype a root level item holder container without attributes
$itemHolderClone = clone $form->getItemHolder();
$itemHolderClone
->setRequiredAttributes(array())
->setAttributes(array());
$content = $this->_renderContainer($itemHolderClone);
$str = str_replace(
array(
'%ACTION%',
'%METHOD%',
'%ATTRIBUTES%',
'%HIDDENS%',
'%CONTAINER%'
),
array(
$form->getFormAction(),
$form->getMethod(),
$attributeStr,
$this->_hiddenString,
$content
),
$this->getFormWrapperTemplate());
return $str;
}
/**
* Adds hidden fields to form HTML string.
*
* @param Formagic_Item_Hidden $item Hidden item to be rendered.
*/
protected function _addHiddenItem(Formagic_Item_Hidden $item)
{
$tpl = '<input type="hidden" value="%s"%s />'. "\n ";
$this->_hiddenString .= sprintf(
$tpl,
htmlspecialchars($item->getValue()),
$item->getAttributeStr()
);
}
/**
* Returns HTML for all items of a container (recursively)
*
* @param Formagic_Item_Container $container Container to be rendererd
* @return string HTML string
*/
protected function _renderContainer(Formagic_Item_Container $container)
{
$rows = '';
$this->_errorClassHtml = '';
foreach ($container->getItems() as $item) {
// skip disabled inputs
if ($item->isDisabled()) {
continue;
}
// Handle containers recursively
if ($item instanceOf Formagic_Item_Container) {
// render fake input string from container items
$subContainerRows = $this->_renderContainer($item);
// build row the sub-container will go into
$rows .= str_replace(
'%CONTAINER%',
$subContainerRows,
$this->getContainerRowTemplate($item)
);
continue;
}
// Catch hiddens and continue to next input
if ($item->isHidden()) {
$this->_addHiddenItem($item);
continue;
}
// Error message and class
list ($errorString, $errorClass) = $this->_getErrorProperties($item);
// Render label
$itemLabel = $this->_getItemLabel($item);
// Render item row string
$rows .= str_replace(
array('%LABEL%', '%ERROR%', '%ERRORCLASS%', '%INPUT%'),
array($itemLabel, $errorString, $errorClass, $item->getHtml()),
$this->getItemRowTemplate($item));
}
// build container content including wrapping HTML
$containerLabel = $this->_getContainerLabel($container);
$res = str_replace(
array('%ROWS%', '%ATTRIBUTES%', '%LABEL%'),
array($rows, $container->getAttributeStr(), $containerLabel),
$this->getContainerWrapperTemplate($container)
);
return $res;
}
/**
* Sets a template string for an item to a template pool.
*
* @param array $templateArray Template pool new template is to be added to
* @param string $template New template string
* @param string|Formagic_Item_Abstract $item
*/
protected function _setTemplate(&$templateArray, $template, $item)
{
if ($item instanceOf Formagic_Item_Abstract) {
$itemName = $item->getName();
} else {
$itemName = (string)$item;
}
$templateArray[$itemName] = $template;
}
/**
* Returns a template for a specific item from a template array.
*
* @param array $templateArray Pool of templates to choose from.
* @param string|Formagic_Item_Abstract $item
* @return string Template string
*/
private function _getTemplate($templateArray, $item)
{
if ($item instanceOf Formagic_Item_Abstract) {
$itemName = $item->getName();
} else {
$itemName = (string)$item;
}
// no specific template found -> use default one
if (!isset($templateArray[$itemName])) {
$itemName = '';
}
return $templateArray[$itemName];
}
/**
* Returns the rendered error list and HTML class attribute.
*
* Returns empty array if item validated ok.
*
* @param Formagic_Item_Abstract $item
* @return array Error list and class attribute. Example:
* <code>
* array(
* '<ul>
* <li>Please enter a value.</li>
* </ul>',
* ' class="formagicError"')
* </code>
*/
protected function _getErrorProperties(Formagic_Item_Abstract $item)
{
// skip all if no errors occured
$rules = $item->getViolatedRules();
if (!count($rules)) {
return array('', '');
}
$itemName = $item->getName();
// assemble error message string
$errors = '';
$errorTemplate = $this->getItemErrorTemplate($itemName);
foreach ($rules as $rule) {
$errors .= str_replace(
'%ERRORMESSAGE%',
$rule->getMessage(),
$errorTemplate
);
}
// wrap error message string
$errorWrapper = $this->getItemErrorWrapperTemplate($itemName);
$errorString = str_replace(
array(
'%ERRORS%',
), array(
$errors,
),
$errorWrapper
);
$errorClass = ' class="' . $this->_errorClass . '"';
return array($errorString, $errorClass);
}
/**
* Returns rendered item label and template.
*
* Returns a non-breakin-space HTML entity if no item label is provided.
*
* @param Formagic_Item_Abstract $item Input item
* @return string Item label and template
*/
protected function _getItemLabel(Formagic_Item_Abstract $item)
{
$itemLabel = $item->getLabel();
if (empty($itemLabel)) {
return ' ';
}
$itemId = $item->getAttribute('id');
$marker = $item->hasRule('mandatory')
? $this->getMandatoryMarkerTemplate($item)
: '';
$label = str_replace(
array('%LABEL%', '%ID%', '%MANDATORYMARKER%'),
array($this->_translator->_($itemLabel), $itemId, $marker),
$this->getItemLabelTemplate($item));
return $label;
}
/**
* Renders a container label into the container label template.
*
* Returns empty string if no container label is defined.
*
* @param Formagic_Item_Container $container Container item
* @return string Container label and template string
*/
protected function _getContainerLabel(Formagic_Item_Container $container)
{
$label = $container->getLabel();
if (empty($label)) {
return '';
}
$label = str_replace(
'%LABEL%',
$this->_translator->_($label),
$this->getContainerLabelTemplate($container));
return $label;
}
}
| Weasle/Formagic | src/Formagic/Renderer/Html.php | PHP | bsd-3-clause | 21,342 |
% COMMAND-WRAPPER-SKEL(1) Command Wrapper 0.1.0 | Command Wrapper
% Peter Trsko
% 2nd June 2019
# NAME
`command-wrapper-skel` - Generate subcommand or configuration file skeleton
for specific Command Wrapper environment, i.e. toolset.
# USAGE
TOOLSET\_COMMAND \[GLOBAL\_OPTIONS] skel \[\--language=LANGUAGE|-l LANGUAGE] SUBCOMMAND
TOOLSET\_COMMAND \[GLOBAL\_OPTIONS] skel {\--help|-h}
TOOLSET\_COMMAND \[GLOBAL\_OPTIONS] help skel
# DESCRIPTION
Purpose of this Command Wrapper subcommand is to allow users to rapidly create
or prototype new subcommands.
Command Wrapper subcommand can be written in any language user chooses. In the
end it is just an executable that follows Command Wrapper's *SUBCOMMAND
PROTOCOL*, which is documented in a separate manual page
`command-wrapper-subcommand-protocol(7)`. At the moment this subcommand
provides skeletons only for a alimited number of languages, see
`--language=`*LANGUAGE* option for more details.
# OPTIONS
\--language=*LANGUAGE*, \--language *LANGUAGE*, -l *LANGUAGE*
: Choose programming language of the new subcommand. Currently only
following values are supported:
* *haskell* (default)
* *bash*
* *dhall* (configuration files)
\--parents, -p
: Create parent directories if they do not exist.
\--\[no-]edit, -e, -E
: Open, or not, the created file in an editor afterwards. Options `-e` and
`-E` are equivalent to `--edit` and `--no-edit`, respectively.
\--help, -h
: Display help information and exit. Same as `TOOLSET_COMMAND help skel`.
`SUBCOMMAND`
: Name of the new subcommand. Where and how the source code or executable
file will be named is configurable. By default it is:
```
${HOME}/.local/lib/${toolset}/${toolset}-${subcommand}
```
# EXIT STATUS
For documentation of generic *EXIT STATUS* codes see `command-wrapper(1)`
manual page section *EXIT STATUS*. Any *EXIT STATUS* codes specific to this
subcommand will be listed below.
`3`
: Cannot create target file. This can happen for multiple reasons:
* Target directory doesn't exist and `--parens` option wasnt specified.
* Target file exists and we don't want to overwrite it.
# FILES
`${XDG_CONFIG_HOME:-$HOME/.config}/${toolset}/command-wrapper-skel.dhall`
: Configuration file specifies templates for individual *LANGUAGE*s, how the
new *SUBCOMMAND* files will be named, and where they will be stored.
See also `XDG_CONFIG_HOME` in *ENVIRONMENT VARIABLES* section for more
information on how Command Wrapper figures out where to look for this
configuration file.
# ENVIRONMENT VARIABLES
See also `command-wrapper(1)` *ENVIRONMENT VARIABLES* section. Everything
mentioned there applies to this subcommand as well.
`XDG_CONFIG_HOME`
: Overrides where this subcommand expects its configuration file. It follows
this simple logic:
* If `XDG_CONFIG_HOME` environment variable is set then the configuration
file has path:
```
${XDG_CONFIG_HOME}/${toolset}/command-wrapper-skel.dhall
```
* If `XDG_CONFIG_HOME` environment variable is not set then default value
is used instead:
```
${HOME}/.config/${toolset}/command-wrapper-skel.dhall
```
See [XDG Base Directory Specification
](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
for more information on rationale behind this.
# SEE ALSO
command-wrapper(1)
* [XDG Base Directory Specification
](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
# BUGS
<https://github.com/trskop/command-wrapper/issues>
| trskop/command-wrapper | command-wrapper/man/command-wrapper-skel.1.md | Markdown | bsd-3-clause | 3,645 |
/**
* LaserPaddleItem.cpp
*
* Copyright (c) 2014, Callum Hay
* All rights reserved.
*
* Redistribution and use of the Biff! Bam!! Blammo!?! code or any derivative
* works are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The names of its contributors may not be used to endorse or promote products
* derived from this software without specific prior written permission.
* 4. Redistributions may not be sold, nor may they be used in a commercial
* product or activity without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL CALLUM HAY BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "LaserPaddleItem.h"
#include "GameModel.h"
#include "GameItemTimer.h"
const double LaserPaddleItem::LASER_PADDLE_TIMER_IN_SECS = 15.0;
const char* LaserPaddleItem::LASER_PADDLE_ITEM_NAME = "LaserBulletPaddle";
LaserPaddleItem::LaserPaddleItem(const Point2D &spawnOrigin, const Vector2D& dropDir, GameModel *gameModel) :
GameItem(LASER_PADDLE_ITEM_NAME, spawnOrigin, dropDir, gameModel, GameItem::Good) {
}
LaserPaddleItem::~LaserPaddleItem() {
}
double LaserPaddleItem::Activate() {
this->isActive = true;
PlayerPaddle* paddle = this->gameModel->GetPlayerPaddle();
assert(paddle != NULL);
// Kill other laser paddle timers
std::list<GameItemTimer*>& activeTimers = this->gameModel->GetActiveTimers();
std::vector<GameItemTimer*> removeTimers;
for (std::list<GameItemTimer*>::iterator iter = activeTimers.begin(); iter != activeTimers.end(); ++iter) {
GameItemTimer* currTimer = *iter;
if (currTimer->GetTimerItemType() == GameItem::LaserBulletPaddleItem) {
removeTimers.push_back(currTimer);
}
}
// Remove the laser paddle timers from the list of active timers
for (int i = 0; i < static_cast<int>(removeTimers.size()); i++) {
GameItemTimer* currTimer = removeTimers[i];
activeTimers.remove(currTimer);
delete currTimer;
currTimer = NULL;
}
// Make the paddle have laser shooting abilities
paddle->AddPaddleType(PlayerPaddle::LaserBulletPaddle);
GameItem::Activate();
return LaserPaddleItem::LASER_PADDLE_TIMER_IN_SECS;
}
void LaserPaddleItem::Deactivate() {
if (!this->isActive) {
return;
}
PlayerPaddle* paddle = this->gameModel->GetPlayerPaddle();
assert(paddle != NULL);
paddle->RemovePaddleType(PlayerPaddle::LaserBulletPaddle);
this->isActive = false;
GameItem::Deactivate();
} | callumhay/biffbamblammogame | GameModel/LaserPaddleItem.cpp | C++ | bsd-3-clause | 3,424 |
#ifndef __TEST_H__
#define __TEST_H__
#include <stdio.h>
char * Exec(char * soData, int * replylen);
int TimerEvnet();
void OnInit();
#endif
| jacket-code/redisPlatform | solib/test.h | C | bsd-3-clause | 145 |
<?php
use yii\helpers\Url;
$this->params['current_nav'] = 'memorial';
?>
<div class="container main-container">
<div class="row">
<div class="col-md-12"
<a href="#" >
<img src="/static/images/memorial/memorial_banner.png" width="100%">
</a>
</div>
</div>
<div class="blank"></div>
<div class="row">
<div class="col-md-12">
<div class="sort-inner">
<ul>
<li class="pull-right">
<form method="get" >
<input name="MemorialSearch[title]" value="<?=$searchModel->title?>" placeholder="纪念馆名称"><button>搜索</button>
</form>
</li>
</ul>
</div>
</div>
</div>
<div class="blank"></div>
<div class="row memorials-list">
<?php
$models = $dataProvider->getModels();
foreach ($models as $model):
?>
<div class="col-md-4">
<div class="media">
<div class="media-left">
<div class="tab-content">
<div class="tab-pane active ml_0_0">
<a href="<?=Url::toRoute(['/memorial/home/hall/index','id'=>$model->id])?>" target="_blank">
<img src="<?=$model->getThumbImg('174x210')?>">
</a>
</div>
</div>
</div>
<div class="media-body">
<a target="_blank" href="<?=Url::toRoute(['/memorial/home/hall/index','id'=>$model->id])?>">
<h4 class="media-heading ellipsis"><?=$model->title?></h4>
</a>
<div class="tab-content">
<?php foreach ($model->deads as $v):?>
<div class="tab-pane active ml_0_0">
<a target="_blank" href="#">
<p class="ellipsis"><?=$v->dead_name?></p>
</a>
<em><?=$v->birth?>-<?=$v->fete?></em>
</div>
<?php endforeach;?>
</div>
<p class="ellipsis">建馆人:<?=$model->user->username?></p>
<small>建馆时间:<?=date('Y-m-d', $model->created_at)?></small><br>
</div>
</div>
</div>
<?php endforeach;?>
</div>
<div class="memorials-pager">
<?php
echo \yii\widgets\LinkPager::widget([
'pagination' => $dataProvider->getPagination(),
'nextPageLabel' => '>',
'prevPageLabel' => '<',
'lastPageLabel' => '尾页',
'firstPageLabel' => '首页',
'options' => [
'class' => 'pull-right pagination'
]
]);
?>
</div>
</div> | cboy868/lion | modules/memorial/views/home/site/memorial.php | PHP | bsd-3-clause | 3,091 |
<?php
namespace Config\Factory\Service;
use Zend\ServiceManager\FactoryInterface;
use Zend\ServiceManager\ServiceLocatorInterface;
use Config\Service\ConfigService;
class ConfigServiceFactory implements FactoryInterface {
public function createService(ServiceLocatorInterface $serviceLocator) {
$service = new ConfigService($serviceLocator->get('Application\Common\Service\EntityManagerHolder'));
$service->setEntityManager($serviceLocator->get('Doctrine\ORM\EntityManager'));
return $service;
}
}
| michaelrohr/Sentru-WEB | module/Config/src/config/Factory/Service/ConfigServiceFactory.php | PHP | bsd-3-clause | 537 |
from __future__ import absolute_import, unicode_literals, division, print_function
from . import model_base
__all__ = ['PhotomModelB4']
class PhotomModelB4(model_base.DataModel):
"""
A data model for photom reference files.
"""
schema_url = "photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(PhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class NircamPhotomModelB4(PhotomModelB4):
"""
A data model for NIRCam photom reference files.
"""
schema_url = "nircam_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(NircamPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class NirissPhotomModelB4(PhotomModelB4):
"""
A data model for NIRISS photom reference files.
"""
schema_url = "niriss_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(NirissPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class NirspecPhotomModelB4(PhotomModelB4):
"""
A data model for NIRSpec photom reference files.
"""
schema_url = "nirspec_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(NirspecPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class MiriImgPhotomModelB4(PhotomModelB4):
"""
A data model for MIRI imaging photom reference files.
"""
schema_url = "mirimg_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(MiriImgPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
class MiriMrsPhotomModelB4(PhotomModelB4):
"""
A data model for MIRI MRS photom reference files.
"""
schema_url = "mirmrs_photomb4.schema.yaml"
def __init__(self, init=None, phot_table=None, **kwargs):
super(MiriMrsPhotomModelB4, self).__init__(init=init, **kwargs)
if phot_table is not None:
self.phot_table = phot_table
| mdboom/jwst_lib.models | jwst_lib/models/photom_b4.py | Python | bsd-3-clause | 2,355 |
/*
* Copyright (C) 2007-2022 S[&]T, The Netherlands.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CODA_HDF4_INTERNAL_H
#define CODA_HDF4_INTERNAL_H
#include "coda-hdf4.h"
#include "coda-mem-internal.h"
#include "hdf.h"
#include "mfhdf.h"
#define MAX_HDF4_NAME_LENGTH 256
#define MAX_HDF4_VAR_DIMS 32
typedef enum hdf4_type_tag_enum
{
tag_hdf4_basic_type, /* coda_integer_class, coda_real_class, coda_text_class */
tag_hdf4_basic_type_array, /* coda_array_class */
tag_hdf4_string, /* coda_text_class (= attribute containing array of chars) */
tag_hdf4_attributes, /* coda_record_class */
tag_hdf4_file_attributes, /* coda_record_class */
tag_hdf4_GRImage, /* coda_array_class */
tag_hdf4_SDS, /* coda_array_class */
tag_hdf4_Vdata, /* coda_record_class */
tag_hdf4_Vdata_field, /* coda_array_class */
tag_hdf4_Vgroup /* coda_record_class */
} hdf4_type_tag;
/* Inheritance tree:
* coda_dynamic_type
* \ -- coda_hdf4_type
* \ -- coda_hdf4_basic_type_array
* |-- coda_hdf4_attributes
* |-- coda_hdf4_file_attributes
* |-- coda_hdf4_GRImage
* |-- coda_hdf4_SDS
* |-- coda_hdf4_Vdata
* |-- coda_hdf4_Vdata_field
* |-- coda_hdf4_Vgroup
*/
typedef struct coda_hdf4_type_struct
{
coda_backend backend;
coda_type *definition;
hdf4_type_tag tag;
} coda_hdf4_type;
/* We only use this type for attribute data.
* Although other types, such as GRImage, and Vdata objects also have a properties such as 'ncomp' and 'order'
* that might be used to create an array of basic types, the 'ncomp' and 'order' for these types can be more
* naturally implemented as additional diminsions to the parent type (which is an array).
* We therefore only use the basic_type_array if the parent compound type is a record (which is only when the parent
* type is a tag_hdf4_attributes or tag_hdf4_file_attributes type).
*/
typedef struct coda_hdf4_basic_type_array_struct
{
coda_backend backend;
coda_type_array *definition;
hdf4_type_tag tag;
coda_hdf4_type *basic_type;
} coda_hdf4_basic_type_array;
typedef struct coda_hdf4_attributes_struct
{
coda_backend backend;
coda_type_record *definition;
hdf4_type_tag tag;
hdf4_type_tag parent_tag;
int32 parent_id;
int32 field_index; /* only for Vdata */
coda_hdf4_type **attribute; /* basic types for each of the attributes */
int32 num_obj_attributes;
int32 num_data_labels;
int32 num_data_descriptions;
int32 *ann_id;
} coda_hdf4_attributes;
typedef struct coda_hdf4_file_attributes_struct
{
coda_backend backend;
coda_type_record *definition;
hdf4_type_tag tag;
coda_hdf4_type **attribute; /* basic types for each of the attributes */
int32 num_gr_attributes;
int32 num_sd_attributes;
int32 num_file_labels;
int32 num_file_descriptions;
} coda_hdf4_file_attributes;
typedef struct coda_hdf4_GRImage_struct
{
coda_backend backend;
coda_type_array *definition;
hdf4_type_tag tag;
int32 group_count; /* number of groups this item belongs to */
int32 ref;
int32 ri_id;
int32 index;
char gri_name[MAX_HDF4_NAME_LENGTH + 1];
int32 ncomp;
int32 data_type;
int32 interlace_mode;
int32 dim_sizes[2];
coda_hdf4_type *basic_type;
coda_hdf4_attributes *attributes;
} coda_hdf4_GRImage;
typedef struct coda_hdf4_SDS_struct
{
coda_backend backend;
coda_type_array *definition;
hdf4_type_tag tag;
int32 group_count; /* number of groups this item belongs to */
int32 ref;
int32 sds_id;
int32 index;
char sds_name[MAX_HDF4_NAME_LENGTH + 1];
int32 rank;
int32 dimsizes[MAX_HDF4_VAR_DIMS];
int32 data_type;
coda_hdf4_type *basic_type;
coda_hdf4_attributes *attributes;
} coda_hdf4_SDS;
typedef struct coda_hdf4_Vdata_struct
{
coda_backend backend;
coda_type_record *definition;
hdf4_type_tag tag;
int32 group_count; /* number of groups this item belongs to */
int32 ref;
int32 vdata_id;
int32 hide;
char vdata_name[MAX_HDF4_NAME_LENGTH + 1];
char classname[MAX_HDF4_NAME_LENGTH + 1];
struct coda_hdf4_Vdata_field_struct **field;
coda_hdf4_attributes *attributes;
} coda_hdf4_Vdata;
typedef struct coda_hdf4_Vdata_field_struct
{
coda_backend backend;
coda_type_array *definition;
hdf4_type_tag tag;
char field_name[MAX_HDF4_NAME_LENGTH + 1];
int32 num_records;
int32 order;
int num_elements;
int32 data_type;
coda_hdf4_type *basic_type;
coda_hdf4_attributes *attributes;
} coda_hdf4_Vdata_field;
typedef struct coda_hdf4_Vgroup_struct
{
coda_backend backend;
coda_type_record *definition;
hdf4_type_tag tag;
int32 group_count; /* number of groups this item belongs to */
int32 ref;
int32 vgroup_id;
int32 hide;
char vgroup_name[MAX_HDF4_NAME_LENGTH + 1];
char classname[MAX_HDF4_NAME_LENGTH + 1];
int32 version;
struct coda_hdf4_type_struct **entry;
coda_hdf4_attributes *attributes;
} coda_hdf4_Vgroup;
struct coda_hdf4_product_struct
{
/* general fields (shared between all supported product types) */
char *filename;
int64_t file_size;
coda_format format;
coda_mem_record *root_type;
const coda_product_definition *product_definition;
long *product_variable_size;
int64_t **product_variable;
int64_t mem_size;
uint8_t *mem_ptr;
/* 'hdf4' product specific fields */
int32 is_hdf; /* is it a real HDF4 file or are we accessing a (net)CDF file */
int32 file_id;
int32 gr_id;
int32 sd_id;
int32 an_id;
int32 num_sd_file_attributes;
int32 num_gr_file_attributes;
int32 num_sds;
coda_hdf4_SDS **sds;
int32 num_images;
coda_hdf4_GRImage **gri;
int32 num_vgroup;
coda_hdf4_Vgroup **vgroup;
int32 num_vdata;
coda_hdf4_Vdata **vdata;
};
typedef struct coda_hdf4_product_struct coda_hdf4_product;
coda_hdf4_GRImage *coda_hdf4_GRImage_new(coda_hdf4_product *product, int32 index);
coda_hdf4_SDS *coda_hdf4_SDS_new(coda_hdf4_product *product, int32 sds_index);
coda_hdf4_Vdata *coda_hdf4_Vdata_new(coda_hdf4_product *product, int32 vdata_ref);
coda_hdf4_Vgroup *coda_hdf4_Vgroup_new(coda_hdf4_product *product, int32 vgroup_ref);
int coda_hdf4_create_root(coda_hdf4_product *product);
#endif
| stcorp/coda | libcoda/coda-hdf4-internal.h | C | bsd-3-clause | 7,920 |
"""
* Copyright (c) 2012-2017, Nic McDonald and Adriana Flores
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* - Neither the name of prim nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
"""
import codecs
import re
import os
import sys
try:
from setuptools import setup
except:
print('please install setuptools via pip:')
print(' pip3 install setuptools')
sys.exit(-1)
def find_version(*file_paths):
version_file = codecs.open(os.path.join(os.path.abspath(
os.path.dirname(__file__)), *file_paths), 'r').read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name='sssweep',
version=find_version('sssweep', '__init__.py'),
description='Automatic task generation for SuperSim sweeps and plot web viewer',
author='Nic McDonald and Adriana Flores',
author_email='nicci02@hotmail.com and adrifloresm@gmail.com',
license='BSD',
url='http://github.com/nicmcd/sssweep',
packages=['sssweep'],
install_requires=['taskrun >= 3.0.0',
'ssplot >= 0.1.0'],
)
| adrifloresm/sssweep | setup.py | Python | bsd-3-clause | 2,611 |
include(CMakeFindDependencyMacro)
find_dependency(Threads)
find_dependency(MPI)
include(${CMAKE_CURRENT_LIST_DIR}/mplTargets.cmake)
| rabauke/mpl | cmake/mplConfig.cmake | CMake | bsd-3-clause | 135 |
<?php namespace lang;
/**
* Indicates a class specified by a name cannot be found - that is,
* no classloader provides such a class.
*
* @see xp://lang.IClassLoader#loadClass
* @see xp://lang.XPClass#forName
* @test xp://net.xp_framework.unittest.reflection.ClassLoaderTest
* @test xp://net.xp_framework.unittest.reflection.ReflectionTest
* @test xp://net.xp_framework.unittest.reflection.RuntimeClassDefinitionTest
*/
class ClassNotFoundException extends XPException implements ClassLoadingException {
protected $failedClass= null;
protected $loaders= [];
/**
* Constructor
*
* @param string $failedClass
* @param lang.IClassLoader[] $loaders default []
* @param lang.Throwable $cause default NULL
*/
public function __construct($failedClass, $loaders= [], $cause= null) {
parent::__construct(sprintf($this->message(), $failedClass).($cause ? ': '.$cause->getMessage() : ''), $cause);
$this->failedClass= $failedClass;
$this->loaders= $loaders;
}
/**
* Returns the classloaders that were asked
*
* @return lang.IClassLoader[]
*/
public function getLoaders() { return $this->loaders; }
/**
* Returns the exception's message - override this in
* subclasses to provide exact error hints.
*
* @return string
*/
protected function message() { return 'Class "%s" could not be found'; }
/**
* Retrieve name of class which could not be loaded
*
* @return string
*/
public function getFailedClassName() { return $this->failedClass; }
/**
* Retrieve compound representation
*
* @return string
*/
public function compoundMessage() {
return
'Exception '.nameof($this).' ('.$this->message.") {\n ".
implode("\n ", array_map(['xp', 'stringOf'], $this->loaders))."\n }"
;
}
}
| johannes85/core | src/main/php/lang/ClassNotFoundException.class.php | PHP | bsd-3-clause | 1,825 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE401_Memory_Leak__twoIntsStruct_calloc_02.c
Label Definition File: CWE401_Memory_Leak.c.label.xml
Template File: sources-sinks-02.tmpl.c
*/
/*
* @description
* CWE: 401 Memory Leak
* BadSource: calloc Allocate data using calloc()
* GoodSource: Allocate data on the stack
* Sinks:
* GoodSink: call free() on data
* BadSink : no deallocation of data
* Flow Variant: 02 Control flow: if(1) and if(0)
*
* */
#include "std_testcase.h"
#include <wchar.h>
#ifndef OMITBAD
void CWE401_Memory_Leak__twoIntsStruct_calloc_02_bad()
{
twoIntsStruct * data;
data = NULL;
if(1)
{
/* POTENTIAL FLAW: Allocate memory on the heap */
data = (twoIntsStruct *)calloc(100, sizeof(twoIntsStruct));
if (data == NULL) {exit(-1);}
/* Initialize and make use of data */
data[0].intOne = 0;
data[0].intTwo = 0;
printStructLine(&data[0]);
}
if(1)
{
/* POTENTIAL FLAW: No deallocation */
; /* empty statement needed for some flow variants */
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodB2G1() - use badsource and goodsink by changing the second 1 to 0 */
static void goodB2G1()
{
twoIntsStruct * data;
data = NULL;
if(1)
{
/* POTENTIAL FLAW: Allocate memory on the heap */
data = (twoIntsStruct *)calloc(100, sizeof(twoIntsStruct));
if (data == NULL) {exit(-1);}
/* Initialize and make use of data */
data[0].intOne = 0;
data[0].intTwo = 0;
printStructLine(&data[0]);
}
if(0)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
printLine("Benign, fixed string");
}
else
{
/* FIX: Deallocate memory */
free(data);
}
}
/* goodB2G2() - use badsource and goodsink by reversing the blocks in the second if */
static void goodB2G2()
{
twoIntsStruct * data;
data = NULL;
if(1)
{
/* POTENTIAL FLAW: Allocate memory on the heap */
data = (twoIntsStruct *)calloc(100, sizeof(twoIntsStruct));
if (data == NULL) {exit(-1);}
/* Initialize and make use of data */
data[0].intOne = 0;
data[0].intTwo = 0;
printStructLine(&data[0]);
}
if(1)
{
/* FIX: Deallocate memory */
free(data);
}
}
/* goodG2B1() - use goodsource and badsink by changing the first 1 to 0 */
static void goodG2B1()
{
twoIntsStruct * data;
data = NULL;
if(0)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
printLine("Benign, fixed string");
}
else
{
/* FIX: Use memory allocated on the stack with ALLOCA */
data = (twoIntsStruct *)ALLOCA(100*sizeof(twoIntsStruct));
/* Initialize and make use of data */
data[0].intOne = 0;
data[0].intTwo = 0;
printStructLine(&data[0]);
}
if(1)
{
/* POTENTIAL FLAW: No deallocation */
; /* empty statement needed for some flow variants */
}
}
/* goodG2B2() - use goodsource and badsink by reversing the blocks in the first if */
static void goodG2B2()
{
twoIntsStruct * data;
data = NULL;
if(1)
{
/* FIX: Use memory allocated on the stack with ALLOCA */
data = (twoIntsStruct *)ALLOCA(100*sizeof(twoIntsStruct));
/* Initialize and make use of data */
data[0].intOne = 0;
data[0].intTwo = 0;
printStructLine(&data[0]);
}
if(1)
{
/* POTENTIAL FLAW: No deallocation */
; /* empty statement needed for some flow variants */
}
}
void CWE401_Memory_Leak__twoIntsStruct_calloc_02_good()
{
goodB2G1();
goodB2G2();
goodG2B1();
goodG2B2();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE401_Memory_Leak__twoIntsStruct_calloc_02_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE401_Memory_Leak__twoIntsStruct_calloc_02_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE401_Memory_Leak/s03/CWE401_Memory_Leak__twoIntsStruct_calloc_02.c | C | bsd-3-clause | 4,741 |
class ChangeCityDefaultInAddresses < ActiveRecord::Migration
def change
change_column :spree_addresses, :state_id, :integer, default: 11260
end
end
| danghieu/spree_users_management | db/migrate/20160224092840_change_city_default_in_addresses.rb | Ruby | bsd-3-clause | 155 |
#pragma once
#include <algorithm>
#include <cmath>
#include "multiscalefilter/MultiScaleFilter.hpp"
class FourierDecomposition
{
int window_type;
double T;// period
double sigma;//parameter sigma
float beta, alpha, remap_sigma;
int n;//order
double omega_n;
double fcos(double x)
{
return getGaussianWindow(abs(x / sigma)) * cos(omega_n * x);
}
double fsin(double x)
{
switch (window_type)
{
case GAUSS:
//return x * getGaussWeight(x, 0, sigma) * sin(omega_n * x); break;
return x * getGaussianWindow(abs(x / sigma)) * sin(omega_n * x); break;
case S_TONE:
return getSToneCurve<double>(x, 0.0, remap_sigma, beta, alpha) * sin(omega_n * x); break;
case HAT:
return x * std::max(0.0, 1.0 - abs(x / sigma)) * sin(omega_n * x); break;
case SMOOTH_HAT:
return getSmoothingHat(x, 0.0, sigma, 10) * sin(omega_n * x); break;
}
return x * sin(omega_n * x);// getSToneWeight(x, remap_sigma, beta, alpha)* sin(omega_n * x);
}
double f(double x)
{
switch (window_type)
{
case GAUSS:
return getGaussianWindow(abs(x / sigma)); break;
case S_TONE:
return getSToneCurve<double>(x, 0.0, remap_sigma, beta, alpha); break;
case HAT:
return std::max(0.0, 1.0 - abs(x / sigma)); break;
case SMOOTH_HAT:
return getSmoothingHat(x, 0.0, sigma, 10); break;
}
return getSToneWeight(float(x), remap_sigma, beta, alpha);
}
public:
FourierDecomposition(double T, double sigma, double beta, double alpha, double remap_sigma, int n, int window_type)
:T(T), sigma(sigma), n(n), window_type(window_type), beta((float)beta), alpha((float)alpha), remap_sigma((float)remap_sigma)
{
omega_n = n * CV_2PI / T;//omega=CV_2PI/T
}
//a, b: Integration interval
//m: number of divisions
double ct(double a, double b, const int m, bool isKahan = false)//0-T/2
{
const double step = (b - a) / m;//interval
//init
double x = a;
double s = 0.0;//result of integral
if (isKahan)
{
double c = 0.0;
for (int k = 1; k <= m - 1; k++)
{
x += step;
const double y = fcos(x) - c;
const double t = s + y;
c = (t - s) - y;
s = t;
}
}
else
{
for (int k = 1; k <= m - 1; k++)
{
x += step;
s += fcos(x);
}
}
s = step * ((fcos(a) + fcos(b)) / 2.0 + s);
//double sign=0;
//if (n % 4 == 0)sign = 1.0;
//if (n % 4 == 2)sign = -1.0;
//s = step * ((f(0) +sign*f(b)) / 2.0 + s);
return s;
}
//a, b: Integration interval
//m: number of divisions
double st(double a, double b, const int m, const bool isKahan = false)
{
const double step = (b - a) / m;//interval
//init
double x = a;
double s = 0.0;//result of integral
if (isKahan)
{
double c = 0.0;
for (int k = 1; k <= m - 1; k++)
{
x += step;
const double y = fsin(x) - c;
const double t = s + y;
c = (t - s) - y;
s = t;
}
}
else
{
for (int k = 1; k <= m - 1; k++)
{
x += step;
s += fsin(x);
}
}
s = step * ((fsin(a) + fsin(b)) / 2.0 + s);
return s;
}
//double operator()(double a, double b, const int m)
double init(double a, double b, const int m, const bool isKahan = false)
{
const double step = (b - a) / m;//interval
//init
double x = a;
double s = 0.0;//result of integral
if (isKahan)
{
double c = 0.0;
for (int k = 1; k <= m - 1; k++)
{
x += step;
const double y = f(x) - c;
const double t = s + y;
c = (t - s) - y;
s = t;
}
}
else
{
for (int k = 1; k <= m - 1; k++)
{
x += step;
s += f(x);
}
}
s = step * ((f(a) + f(b)) / 2.0 + s);
return s;
}
}; | norishigefukushima/OpenCP | include/multiscalefilter/FourierSeriesExpansion.h | C | bsd-3-clause | 3,583 |
import pytest
from collections import namedtuple
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_less
from skopt import dummy_minimize
from skopt.benchmarks import bench1
from skopt.callbacks import TimerCallback
from skopt.callbacks import DeltaYStopper
@pytest.mark.fast_test
def test_timer_callback():
callback = TimerCallback()
dummy_minimize(bench1, [(-1.0, 1.0)], callback=callback, n_calls=10)
assert_equal(len(callback.iter_time), 10)
assert_less(0.0, sum(callback.iter_time))
@pytest.mark.fast_test
def test_deltay_stopper():
deltay = DeltaYStopper(0.2, 3)
Result = namedtuple('Result', ['func_vals'])
assert deltay(Result([0, 1, 2, 3, 4, 0.1, 0.19]))
assert not deltay(Result([0, 1, 2, 3, 4, 0.1]))
assert deltay(Result([0, 1])) is None
| ccauet/scikit-optimize | skopt/tests/test_callbacks.py | Python | bsd-3-clause | 832 |
/* This file may have been modified by DJ Delorie (Jan 1991). If so,
** these modifications are Coyright (C) 1991 DJ Delorie, 24 Kirsten Ave,
** Rochester NH, 03867-2954, USA.
*/
/*-
* Copyright (c) 1988 The Regents of the University of California.
* All rights reserved.
*
* Redistribution and use in source and binary forms are permitted
* provided that: (1) source distributions retain this entire copyright
* notice and comment, and (2) distributions including binaries display
* the following acknowledgement: ``This product includes software
* developed by the University of California, Berkeley and its contributors''
* in the documentation or other materials provided with the distribution
* and in all advertising materials mentioning features or use of this
* software. Neither the name of the University nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
#include <reent.h>
#include <stdlib.h>
#include <string.h>
#include "envlock.h"
/* _putenv_r - reentrant version of putenv that either adds
or replaces the environment variable "name"
with "value" which is specified by str as "name=value". */
int
_DEFUN (_putenv_r, (reent_ptr, str),
struct _reent *reent_ptr _AND
_CONST char *str)
{
register char *p, *equal;
int rval;
p = _strdup_r (reent_ptr, str);
if (!p)
return 1;
if (!(equal = index (p, '=')))
{
(void) _free_r (reent_ptr, p);
return 1;
}
*equal = '\0';
rval = _setenv_r (reent_ptr, p, equal + 1, 1);
(void) _free_r (reent_ptr, p);
return rval;
}
| shaotuanchen/sunflower_exp | tools/source/newlib-1.9.0/newlib/libc/stdlib/putenv_r.c | C | bsd-3-clause | 1,877 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_14) on Fri Sep 18 14:09:16 BST 2009 -->
<TITLE>
Uses of Class uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource.NCBIBlastResourceConfiguration
</TITLE>
<META NAME="date" CONTENT="2009-09-18">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource.NCBIBlastResourceConfiguration";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceConfiguration.html" title="class in uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../../../../../index.html?uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/\class-useNCBIBlastResourceConfiguration.html" target="_top"><B>FRAMES</B></A>
<A HREF="NCBIBlastResourceConfiguration.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource.NCBIBlastResourceConfiguration</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceConfiguration.html" title="class in uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource">NCBIBlastResourceConfiguration</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource"><B>uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceConfiguration.html" title="class in uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource">NCBIBlastResourceConfiguration</A> in <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/package-summary.html">uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/package-summary.html">uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource</A> that return <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceConfiguration.html" title="class in uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource">NCBIBlastResourceConfiguration</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceConfiguration.html" title="class in uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource">NCBIBlastResourceConfiguration</A></CODE></FONT></TD>
<TD><CODE><B>NCBIBlastResourceBase.</B><B><A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceBase.html#getConfiguration()">getConfiguration</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/NCBIBlastResourceConfiguration.html" title="class in uk.org.mygrid.cagrid.servicewrapper.service.ncbiblast.service.globus.resource"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../../../../../index.html?uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/\class-useNCBIBlastResourceConfiguration.html" target="_top"><B>FRAMES</B></A>
<A HREF="NCBIBlastResourceConfiguration.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| NCIP/taverna-grid | servicewrapper/doc/uk/org/mygrid/cagrid/servicewrapper/service/ncbiblast/service/globus/resource/class-use/NCBIBlastResourceConfiguration.html | HTML | bsd-3-clause | 10,064 |
/*
Language: XL
Author: Christophe de Dinechin <christophe@taodyne.com>
Description: An extensible programming language, based on parse tree rewriting
Website: http://xlr.sf.net
*/
export default function(hljs) {
const KWS = [
"if",
"then",
"else",
"do",
"while",
"until",
"for",
"loop",
"import",
"with",
"is",
"as",
"where",
"when",
"by",
"data",
"constant",
"integer",
"real",
"text",
"name",
"boolean",
"symbol",
"infix",
"prefix",
"postfix",
"block",
"tree"
];
const BUILT_INS = [
"in",
"mod",
"rem",
"and",
"or",
"xor",
"not",
"abs",
"sign",
"floor",
"ceil",
"sqrt",
"sin",
"cos",
"tan",
"asin",
"acos",
"atan",
"exp",
"expm1",
"log",
"log2",
"log10",
"log1p",
"pi",
"at",
"text_length",
"text_range",
"text_find",
"text_replace",
"contains",
"page",
"slide",
"basic_slide",
"title_slide",
"title",
"subtitle",
"fade_in",
"fade_out",
"fade_at",
"clear_color",
"color",
"line_color",
"line_width",
"texture_wrap",
"texture_transform",
"texture",
"scale_?x",
"scale_?y",
"scale_?z?",
"translate_?x",
"translate_?y",
"translate_?z?",
"rotate_?x",
"rotate_?y",
"rotate_?z?",
"rectangle",
"circle",
"ellipse",
"sphere",
"path",
"line_to",
"move_to",
"quad_to",
"curve_to",
"theme",
"background",
"contents",
"locally",
"time",
"mouse_?x",
"mouse_?y",
"mouse_buttons"
];
const BUILTIN_MODULES = [
"ObjectLoader",
"Animate",
"MovieCredits",
"Slides",
"Filters",
"Shading",
"Materials",
"LensFlare",
"Mapping",
"VLCAudioVideo",
"StereoDecoder",
"PointCloud",
"NetworkAccess",
"RemoteControl",
"RegExp",
"ChromaKey",
"Snowfall",
"NodeJS",
"Speech",
"Charts"
];
const LITERALS = [
"true",
"false",
"nil"
];
const KEYWORDS = {
$pattern: /[a-zA-Z][a-zA-Z0-9_?]*/,
keyword: KWS,
literal: LITERALS,
built_in: BUILT_INS.concat(BUILTIN_MODULES)
};
const DOUBLE_QUOTE_TEXT = {
className: 'string',
begin: '"',
end: '"',
illegal: '\\n'
};
const SINGLE_QUOTE_TEXT = {
className: 'string',
begin: '\'',
end: '\'',
illegal: '\\n'
};
const LONG_TEXT = {
className: 'string',
begin: '<<',
end: '>>'
};
const BASED_NUMBER = {
className: 'number',
begin: '[0-9]+#[0-9A-Z_]+(\\.[0-9-A-Z_]+)?#?([Ee][+-]?[0-9]+)?'
};
const IMPORT = {
beginKeywords: 'import',
end: '$',
keywords: KEYWORDS,
contains: [ DOUBLE_QUOTE_TEXT ]
};
const FUNCTION_DEFINITION = {
className: 'function',
begin: /[a-z][^\n]*->/,
returnBegin: true,
end: /->/,
contains: [
hljs.inherit(hljs.TITLE_MODE, { starts: {
endsWithParent: true,
keywords: KEYWORDS
} })
]
};
return {
name: 'XL',
aliases: [ 'tao' ],
keywords: KEYWORDS,
contains: [
hljs.C_LINE_COMMENT_MODE,
hljs.C_BLOCK_COMMENT_MODE,
DOUBLE_QUOTE_TEXT,
SINGLE_QUOTE_TEXT,
LONG_TEXT,
FUNCTION_DEFINITION,
IMPORT,
BASED_NUMBER,
hljs.NUMBER_MODE
]
};
}
| highlightjs/highlight.js | src/languages/xl.js | JavaScript | bsd-3-clause | 3,413 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过以下
// 特性集控制。更改这些特性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("Rhea.Data")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Rhea.Data")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。 如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 特性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("dd5e393b-27b5-4f63-97a7-3bd79027ded5")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 生成号
// 修订号
//
// 可以指定所有这些值,也可以使用“生成号”和“修订号”的默认值,
// 方法是按如下所示使用“*”:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| robertzml/Rhea | Rhea.Data/Properties/AssemblyInfo.cs | C# | bsd-3-clause | 1,306 |
<?php
declare(strict_types=1);
require __DIR__ . "/../vendor/autoload.php";
Testbench\Bootstrap::setup(__DIR__ . '/_temp', function (\Nette\Configurator $configurator): void {
$configurator->addParameters([
"appDir" => __DIR__,
"tempDir" => __DIR__ . "/_temp",
"debugMode" => true,
]);
$configurator->addConfig(__DIR__ . "/tests.neon");
});
?> | nexendrie/translation | tests/bootstrap.php | PHP | bsd-3-clause | 362 |
import os
import sys
import re
from bento.compat \
import \
inspect as compat_inspect
from bento.commands.core \
import \
command
SAFE_MODULE_NAME = re.compile("[^a-zA-Z_]")
__HOOK_REGISTRY = {}
__PRE_HOOK_REGISTRY = {}
__POST_HOOK_REGISTRY = {}
__COMMANDS_OVERRIDE = {}
__INIT_FUNCS = {}
def add_to_registry(func, category):
global __HOOK_REGISTRY
if not category in __HOOK_REGISTRY:
__HOOK_REGISTRY[category] = [func]
else:
__HOOK_REGISTRY[category].append(func)
def override_command(command, func):
global __COMMANDS_OVERRIDE
local_dir = os.path.dirname(compat_inspect.stack()[2][1])
if __COMMANDS_OVERRIDE.has_key(command):
__COMMANDS_OVERRIDE[command].append((func, local_dir))
else:
__COMMANDS_OVERRIDE[command] = [(func, local_dir)]
def add_to_pre_registry(func, cmd_name):
global __PRE_HOOK_REGISTRY
if not cmd_name in __PRE_HOOK_REGISTRY:
__PRE_HOOK_REGISTRY[cmd_name] = [func]
else:
__PRE_HOOK_REGISTRY[cmd_name].append(func)
def add_to_post_registry(func, cmd_name):
global __POST_HOOK_REGISTRY
if not cmd_name in __POST_HOOK_REGISTRY:
__POST_HOOK_REGISTRY[cmd_name] = [func]
else:
__POST_HOOK_REGISTRY[cmd_name].append(func)
def get_registry_categories():
global __HOOK_REGISTRY
return __HOOK_REGISTRY.keys()
def get_registry_category(categorie):
global __HOOK_REGISTRY
return __HOOK_REGISTRY[categorie]
def get_pre_hooks(cmd_name):
global __PRE_HOOK_REGISTRY
return __PRE_HOOK_REGISTRY.get(cmd_name, [])
def get_post_hooks(cmd_name):
global __POST_HOOK_REGISTRY
return __POST_HOOK_REGISTRY.get(cmd_name, [])
def get_command_override(cmd_name):
global __COMMANDS_OVERRIDE
return __COMMANDS_OVERRIDE.get(cmd_name, [])
def _make_hook_decorator(command_name, kind):
name = "%s_%s" % (kind, command_name)
help_bypass = False
def decorator(f):
local_dir = os.path.dirname(compat_inspect.stack()[1][1])
add_to_registry((f, local_dir, help_bypass), name)
if kind == "post":
add_to_post_registry((f, local_dir, help_bypass), command_name)
elif kind == "pre":
add_to_pre_registry((f, local_dir, help_bypass), command_name)
else:
raise ValueError("invalid hook kind %s" % kind)
return f
return decorator
post_configure = _make_hook_decorator("configure", "post")
pre_configure = _make_hook_decorator("configure", "pre")
post_build = _make_hook_decorator("build", "post")
pre_build = _make_hook_decorator("build", "pre")
post_sdist = _make_hook_decorator("sdist", "post")
pre_sdist = _make_hook_decorator("sdist", "pre")
def override(f):
override_command(f.__name__, f)
def options(f):
__INIT_FUNCS["options"] = f
return lambda context: f(context)
def startup(f):
__INIT_FUNCS["startup"] = f
return lambda context: f(context)
def shutdown(f):
__INIT_FUNCS["shutdown"] = f
return lambda context: f(context)
def dummy_startup(ctx):
pass
def dummy_options(ctx):
pass
def dummy_shutdown():
pass
def create_hook_module(target):
import imp
safe_name = SAFE_MODULE_NAME.sub("_", target, len(target))
module_name = "bento_hook_%s" % safe_name
main_file = os.path.abspath(target)
module = imp.new_module(module_name)
module.__file__ = main_file
code = open(main_file).read()
sys.path.insert(0, os.path.dirname(main_file))
try:
exec(compile(code, main_file, 'exec'), module.__dict__)
sys.modules[module_name] = module
finally:
sys.path.pop(0)
module.root_path = main_file
if not "startup" in __INIT_FUNCS:
module.startup = dummy_startup
if not "options" in __INIT_FUNCS:
module.options = dummy_options
if not "shutdown" in __INIT_FUNCS:
module.shutdown = dummy_shutdown
return module
| abadger/Bento | bento/commands/hooks.py | Python | bsd-3-clause | 3,935 |
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdlib.h>
#include "chrome/common/url_constants.h"
#include "googleurl/src/url_util.h"
namespace chrome {
#if defined(OS_CHROMEOS)
const char kCrosScheme[] = "cros";
#endif
const char* kSavableSchemes[] = {
kHttpScheme,
kHttpsScheme,
kFileScheme,
kFtpScheme,
kExtensionScheme,
kChromeDevToolsScheme,
kChromeUIScheme,
NULL
};
const char kAboutAboutURL[] = "about:about";
const char kAboutAppCacheInternalsURL[] = "about:appcache-internals";
const char kAboutCacheURL[] = "about:cache";
const char kAboutConflicts[] = "about:conflicts";
const char kAboutKillURL[] = "about:kill";
const char kAboutCreditsURL[] = "about:credits";
const char kAboutDNSURL[] = "about:dns";
const char kAboutFlagsURL[] = "about:flags";
const char kAboutGpuURL[] = "about:gpu";
const char kAboutGpuCleanURL[] = "about:gpuclean";
const char kAboutGpuCrashURL[] = "about:gpucrash";
const char kAboutGpuHangURL[] = "about:gpuhang";
const char kAboutHangURL[] = "about:hang";
const char kAboutHistogramsURL[] = "about:histograms";
const char kAboutMemoryURL[] = "about:memory";
const char kAboutNetInternalsURL[] = "about:net-internals";
const char kAboutPluginsURL[] = "about:plugins";
const char kAboutShorthangURL[] = "about:shorthang";
const char kAboutSyncURL[] = "about:sync";
const char kAboutSyncInternalsURL[] = "about:sync-internals";
const char kAboutTermsURL[] = "about:terms";
const char kAboutVersionURL[] = "about:version";
// Use an obfuscated URL to make this nondiscoverable, we only want this
// to be used for testing.
const char kAboutBrowserCrash[] = "about:inducebrowsercrashforrealz";
const char kChromeUIAboutAboutURL[] = "chrome://about/about";
const char kChromeUIAboutCreditsURL[] = "chrome://about/credits";
const char kChromeUIAboutURL[] = "chrome://settings/about";
const char kChromeUIAppLauncherURL[] = "chrome://newtab/#mode=app-launcher";
const char kChromeUIBookmarksURL[] = "chrome://bookmarks/";
const char kChromeUIBugReportURL[] = "chrome://bugreport/";
const char kChromeUIConflictsURL[] = "chrome://conflicts/";
const char kChromeUIConstrainedHTMLTestURL[] = "chrome://constrained-test/";
const char kChromeUICrashesURL[] = "chrome://crashes/";
const char kChromeUIDevToolsURL[] = "chrome-devtools://devtools/";
const char kChromeUIDownloadsURL[] = "chrome://downloads/";
const char kChromeUIExtensionIconURL[] = "chrome://extension-icon/";
const char kChromeUIExtensionsURL[] = "chrome://extensions/";
const char kChromeUIFaviconURL[] = "chrome://favicon/";
const char kChromeUIFlagsURL[] = "chrome://flags/";
const char kChromeUIHistory2URL[] = "chrome://history2/";
const char kChromeUIHistoryURL[] = "chrome://history/";
const char kChromeUIIPCURL[] = "chrome://about/ipc";
const char kChromeUIKeyboardURL[] = "chrome://keyboard/";
const char kChromeUINewTabURL[] = "chrome://newtab";
const char kChromeUIPluginsURL[] = "chrome://plugins/";
const char kChromeUIPrintURL[] = "chrome://print/";
const char kChromeUISettingsURL[] = "chrome://settings/";
const char kChromeUITextfieldsURL[] = "chrome://textfields/";
#if defined(OS_CHROMEOS)
const char kChromeUIAboutOSCreditsURL[] = "chrome://about/os-credits";
const char kChromeUIActivationMessage[] = "chrome://activationmessage/";
const char kChromeUIActiveDownloadsURL[] = "chrome://active-downloads/";
const char kChromeUIChooseMobileNetworkURL[] =
"chrome://choose-mobile-network/";
const char kChromeUICollectedCookiesURL[] = "chrome://collected-cookies/";
const char kChromeUIHttpAuthURL[] = "chrome://http-auth/";
const char kChromeUIImageBurnerURL[] = "chrome://imageburner/";
const char kChromeUIKeyboardOverlayURL[] = "chrome://keyboardoverlay/";
const char kChromeUIMediaplayerURL[] = "chrome://mediaplayer/";
const char kChromeUIMobileSetupURL[] = "chrome://mobilesetup/";
const char kChromeUIProxySettingsURL[] = "chrome://proxy-settings/";
const char kChromeUIRegisterPageURL[] = "chrome://register/";
const char kChromeUISlideshowURL[] = "chrome://slideshow/";
const char kChromeUISimUnlockURL[] = "chrome://sim-unlock/";
const char kChromeUISystemInfoURL[] = "chrome://system/";
const char kChromeUIUserImageURL[] = "chrome://userimage/";
const char kChromeUIEnterpriseEnrollmentURL[] =
"chrome://enterprise-enrollment/";
#endif
// Keep this list sorted please.
const char kChromeUIBookmarksHost[] = "bookmarks";
const char kChromeUIBugReportHost[] = "bugreport";
const char kChromeUIConflictsHost[] = "conflicts";
const char kChromeUICrashesHost[] = "crashes";
const char kChromeUIDevToolsHost[] = "devtools";
const char kChromeUIDialogHost[] = "dialog";
const char kChromeUIDownloadsHost[] = "downloads";
const char kChromeUIExtensionIconHost[] = "extension-icon";
const char kChromeUIExtensionsHost[] = "extensions";
const char kChromeUIFaviconHost[] = "favicon";
const char kChromeUITouchIconHost[] = "touch-icon";
const char kChromeUIFlagsHost[] = "flags";
const char kChromeUIGpuInternalsHost[] = "gpu-internals";
const char kChromeUIHistoryHost[] = "history";
const char kChromeUIHistory2Host[] = "history2";
const char kChromeUIInspectorHost[] = "inspector";
const char kChromeUIKeyboardHost[] = "keyboard";
const char kChromeUINetInternalsHost[] = "net-internals";
const char kChromeUINewTabHost[] = "newtab";
const char kChromeUIPluginsHost[] = "plugins";
const char kChromeUIPrintHost[] = "print";
const char kChromeUIRemotingHost[] = "remoting";
const char kChromeUIRemotingResourcesHost[] = "remotingresources";
const char kChromeUIResourcesHost[] = "resources";
const char kChromeUIScreenshotPath[] = "screenshots";
const char kChromeUISettingsHost[] = "settings";
const char kChromeUISyncInternalsHost[] = "sync-internals";
const char kChromeUISyncResourcesHost[] = "syncresources";
const char kChromeUITextfieldsHost[] = "textfields";
const char kChromeUIThemePath[] = "theme";
const char kChromeUIThumbnailPath[] = "thumb";
#if defined(OS_CHROMEOS)
const char kChromeUIActiveDownloadsHost[] = "active-downloads";
const char kChromeUIActivationMessageHost[] = "activationmessage";
const char kChromeUIChooseMobileNetworkHost[] = "choose-mobile-network";
const char kChromeUICollectedCookiesHost[] = "collected-cookies";
const char kChromeUIHttpAuthHost[] = "http-auth";
const char kChromeUIImageBurnerHost[] = "imageburner";
const char kChromeUIKeyboardOverlayHost[] = "keyboardoverlay";
const char kChromeUIMediaplayerHost[] = "mediaplayer";
const char kChromeUIMobileSetupHost[] = "mobilesetup";
const char kChromeUIProxySettingsHost[] = "proxy-settings";
const char kChromeUIRegisterPageHost[] = "register";
const char kChromeUISlideshowHost[] = "slideshow";
const char kChromeUISimUnlockHost[] = "sim-unlock";
const char kChromeUISystemInfoHost[] = "system";
const char kChromeUIMenu[] = "menu";
const char kChromeUIWrenchMenu[] = "wrench-menu";
const char kChromeUINetworkMenu[] = "network-menu";
const char kChromeUIUserImageHost[] = "userimage";
const char kChromeUIEnterpriseEnrollmentHost[] = "enterprise-enrollment";
#endif
#if defined(OS_CHROMEOS) && defined(TOUCH_UI)
const char kChromeUILoginContainerHost[] = "login-container";
const char kChromeUILoginHost[] = "login";
#endif
const char kAppCacheViewInternalsURL[] = "chrome://appcache-internals/";
const char kBlobViewInternalsURL[] = "chrome://blob-internals/";
const char kCloudPrintResourcesURL[] = "chrome://cloudprintresources/";
const char kCloudPrintResourcesHost[] = "cloudprintresources";
const char kCloudPrintSetupHost[] = "cloudprintsetup";
const char kNetworkViewInternalsURL[] = "chrome://net-internals/";
const char kNetworkViewCacheURL[] = "chrome://view-http-cache/";
const char kSyncViewInternalsURL[] = "chrome://sync-internals/";
// GPU sub pages
const char kGpuInternalsURL[] = "chrome://gpu-internals/";
// Option sub pages.
const char kAdvancedOptionsSubPage[] = "advanced";
const char kAutofillSubPage[] = "autofill";
const char kBrowserOptionsSubPage[] = "browser";
const char kClearBrowserDataSubPage[] = "clearBrowserData";
const char kContentSettingsSubPage[] = "content";
const char kContentSettingsExceptionsSubPage[] = "contentExceptions";
const char kDefaultOptionsSubPage[] = "";
const char kImportDataSubPage[] = "importData";
const char kInstantConfirmPage[] = "instantConfirm";
const char kLanguageOptionsSubPage[] = "languages";
const char kPersonalOptionsSubPage[] = "personal";
const char kPasswordManagerSubPage[] = "passwords";
const char kSearchEnginesSubPage[] = "searchEngines";
const char kSyncSetupSubPage[] = "syncSetup";
#if defined(OS_CHROMEOS)
const char kInternetOptionsSubPage[] = "internet";
const char kSystemOptionsSubPage[] = "system";
#endif
const char kPasswordManagerLearnMoreURL[] =
#if defined(OS_CHROMEOS)
"https://www.google.com/support/chromeos/bin/answer.py?answer=95606";
#else
"https://www.google.com/support/chrome/bin/answer.py?answer=95606";
#endif
const char kChromeHelpURL[] =
#if defined(OS_CHROMEOS)
"https://www.google.com/support/chromeos/";
#else
"https://www.google.com/support/chrome/";
#endif
const char kPageInfoHelpCenterURL[] =
#if defined(OS_CHROMEOS)
"https://www.google.com/support/chromeos/bin/answer.py?answer=95617";
#else
"https://www.google.com/support/chrome/bin/answer.py?answer=95617";
#endif
const char kCrashReasonURL[] =
#if defined(OS_CHROMEOS)
"https://www.google.com/support/chromeos/bin/answer.py?answer=1047340";
#else
"https://www.google.com/support/chrome/bin/answer.py?answer=95669";
#endif
// TODO: These are currently placeholders that point to the crash
// docs. See bug http://crosbug.com/10711
const char kKillReasonURL[] =
#if defined(OS_CHROMEOS)
"https://www.google.com/support/chromeos/bin/answer.py?answer=1047340";
#else
"https://www.google.com/support/chrome/bin/answer.py?answer=95669";
#endif
const char kPrivacyLearnMoreURL[] =
#if defined(OS_CHROMEOS)
"https://www.google.com/support/chromeos/bin/answer.py?answer=1047334";
#else
"https://www.google.com/support/chrome/bin/answer.py?answer=114836";
#endif
const char kChromiumProjectURL[] = "http://code.google.com/chromium/";
const char kLearnMoreReportingURL[] =
"https://www.google.com/support/chrome/bin/answer.py?answer=96817";
const char kOutdatedPluginLearnMoreURL[] =
"https://www.google.com/support/chrome/bin/answer.py?answer=1181003";
const char kBlockedPluginLearnMoreURL[] =
"https://www.google.com/support/chrome/bin/answer.py?answer=1247383";
void RegisterChromeSchemes() {
// Don't need "chrome-internal" which was used in old versions of Chrome for
// the new tab page.
url_util::AddStandardScheme(kChromeDevToolsScheme);
url_util::AddStandardScheme(kChromeUIScheme);
url_util::AddStandardScheme(kExtensionScheme);
url_util::AddStandardScheme(kMetadataScheme);
#if defined(OS_CHROMEOS)
url_util::AddStandardScheme(kCrosScheme);
#endif
// Prevent future modification of the standard schemes list. This is to
// prevent accidental creation of data races in the program. AddStandardScheme
// isn't threadsafe so must be called when GURL isn't used on any other
// thread. This is really easy to mess up, so we say that all calls to
// AddStandardScheme in Chrome must be inside this function.
url_util::LockStandardSchemes();
}
} // namespace chrome
| Crystalnix/house-of-life-chromium | chrome/common/url_constants.cc | C++ | bsd-3-clause | 11,428 |
<?php
namespace Phalcon
{
/**
* \Phalcon\FilterInterface
*
* Interface for \Phalcon\Filter
*/
interface FilterInterface
{
/**
* Adds a user-defined filter
*
* @param
* string name
* @param
* callable handler
* @return \Phalcon\FilterInterface
*/
public function add($name, $handler);
/**
* Sanizites a value with a specified single or set of filters
*
* @param
* mixed value
* @param
* mixed filters
* @return mixed
*/
public function sanitize($value, $filters);
/**
* Return the user-defined filters in the instance
*
* @return object[]
*/
public function getFilters();
}
}
| hu2008yinxiang/phalcon-pure-php | src/Phalcon/FilterInterface.php | PHP | bsd-3-clause | 928 |
var namespacecode_1_1models_1_1model_etas_ga =
[
[ "model", "classcode_1_1models_1_1model_etas_ga_1_1model.html", "classcode_1_1models_1_1model_etas_ga_1_1model" ]
]; | PyQuake/earthquakemodels | html/namespacecode_1_1models_1_1model_etas_ga.js | JavaScript | bsd-3-clause | 170 |
/* Posts */
#wall textarea {
width: 100%;
}
#wall ul {
display: block;
list-style-type: none;
margin: 0;
padding: 0;
}
#wall ul ul {
font-size: smaller;
margin-left: 85px;
}
#wall ul ul li {
margin: 15px 0;
padding-top: 15px;
border-top: 1px solid #999;
}
#wall ul ul li.comment img {
float: left;
width: 50px;
}
#wall ul ul li > div {
margin-left: 60px;
}
#wall li.post img {
float: left;
width: 75px;
}
#wall li.post > div {
padding-left: 85px;
}
img.mini {
width: 20px;
height: 20px;
}
| pekkis/losonaamakirja | web/css/losofacebook-posts.css | CSS | bsd-3-clause | 562 |
import os
import nose
import django
NAME = os.path.basename(os.path.dirname(__file__))
ROOT = os.path.abspath(os.path.dirname(__file__))
os.environ['DJANGO_SETTINGS_MODULE'] = 'fake_settings'
os.environ['PYTHONPATH'] = os.pathsep.join([ROOT,
os.path.join(ROOT, 'examples')])
if __name__ == '__main__':
if hasattr(django, 'setup'):
# Django's app registry was added in 1.7. We need to call `setup` to
# initiate it.
django.setup()
nose.main()
| jbalogh/jingo | run_tests.py | Python | bsd-3-clause | 522 |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.wpi.first.wpilibj.templates.commands;
import team.util.LogDebugger;
/**
*
* @author team3574
*/
public class ShiftGear1 extends CommandBase {
public ShiftGear1() {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
requires(theShifter);
}
// Called just before this Command runs the first time
protected void initialize() {
LogDebugger.log("gear 1 init");
theShifter.gear1();
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return true;
}
// Called once after isFinished returns true
protected void end() {
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
}
}
| Team3574/Alastair | src/edu/wpi/first/wpilibj/templates/commands/ShiftGear1.java | Java | bsd-3-clause | 1,036 |
namespace ELFinder.Connector.Exceptions
{
/// <summary>
/// ELFinder new name selection exception
/// </summary>
public class ELFinderNewNameSelectionException : ELFinderConnectorException
{
#region Properties
/// <summary>
/// New name
/// </summary>
public string NewName { get; set; }
#endregion
#region Constructors
/// <summary>
/// Create a new instance
/// </summary>
/// <param name="newName">New name</param>
public ELFinderNewNameSelectionException(string newName)
{
NewName = newName;
}
#endregion
}
} | linguanostra/ELFinder.Connector.NET | Core/ELFinder.Connector/Exceptions/ELFinderNewNameSelectionException.cs | C# | bsd-3-clause | 680 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.