repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
foreverForCode/openresty | app/njs/controller/flagship.js |
var ParHref = jems.parsURL();
$(function(){
// 图片滚动
jeSlide({
mainCell: "#flagshipslider",
navCell: ".hd ul",
conCell: ".bd ul",
effect: "leftLoop",
duration: 4,
// switchCell: ".datapic",
sLoad: "data-pic",
isTouch:true,
showNav: true,//自动分页
autoPlay: $('#flagshipslider .bd li').length > 1 ? true : false //自动播放
});
jems.fixMenu();
//微信分享
jems.wxShare("洋葱旗舰馆");
//图片延迟加载插件引用
$('.lazy').lazyload({placeAttr: "dataimg", fewPiece: 0});
//返回顶部插件引用
$(window).goStick({btnCell:"#gotop",posBottom: 50});
}); |
digithun/jamplay-nap | scripts/seeds/seed-generator/book.js | <filename>scripts/seeds/seed-generator/book.js
const casual = require('casual')
const _ = require('lodash')
const { writeSeed, loadSeedId, cover, preview, genArray } = require('../helpers')
const sequential = require('promise-sequential')
module.exports = async function generate () {
const authorIds = loadSeedId('user')
const tagIds = loadSeedId('tag')
const authorId = authorIds[casual.integer(0, authorIds.length - 1)]
const dumpBook = {
_id: '58a6e85138cbdaba481a7b59',
title: casual.title,
episodeIds: [],
thumbnailImage: await preview(),
coverImage: await cover(),
authorId,
commentIds: [],
tagIds: genArray(tagIds, 5),
category: await casual.book_category,
synopsis: casual.sentences(20),
viewCount: casual.positive_int(10000),
createdAt: casual.date
}
const books = await sequential(_.range(100).map(() => async () => {
const authorId = authorIds[casual.integer(0, authorIds.length - 1)]
return {
_id: casual.objectId,
title: casual.title,
episodeIds: [],
thumbnailImage: await preview(),
coverImage: await cover(),
authorId,
commentIds: [],
tagIds: genArray(tagIds, 5),
category: await casual.book_category,
synopsis: casual.sentences(20),
viewCount: casual.positive_int(10000),
createdAt: casual.date
}
// .then(book => {
// return genBookFollower(users, book).then(() => book);
// });
}))
return writeSeed('book', [].concat([dumpBook], books))
}
|
01shadowalker01/code-challenges | leetcode/numbers/number-complement.js | /**
* Problem link:
* https://leetcode.com/problems/number-complement/
*
* @param {number} num
* @return {number}
*/
var findComplement = function (num) {
const binaryStr = toBinary(num);
const binaryArray = binaryStr.split("").map((bit) => Number(bit));
const complement = binaryArray.map((bit) => (bit ? 0 : 1)).join("");
return toDecimal(complement);
};
function toBinary(decimal) {
return (decimal >>> 0).toString(2);
}
function toDecimal(binary) {
return parseInt(binary, 2);
}
module.exports = { findComplement };
|
daneryl/uwazi | app/api/evidences_vault/vault.js | <filename>app/api/evidences_vault/vault.js
import 'isomorphic-fetch';
import { URLSearchParams } from 'url';
import request from 'shared/JSONRequest';
import { fileFromReadStream } from 'api/files/filesystem';
import evidencePackage from './evidencePackage';
import vaultEvidencesModel from './vaultEvidencesModel';
const vaultUrl = 'https://public.evidence-vault.org/';
const statusToProcess = ['201', '418'];
const vault = {
async newEvidences(token) {
const body = new URLSearchParams();
body.append('token', token);
const evidencesTracked = (await vaultEvidencesModel.get()).map(e => e.request);
return request
.post(`${vaultUrl}list.php`, body, {
'Content-Type': 'application/x-www-form-urlencoded',
})
.then(res =>
res.json
.filter(e => !evidencesTracked.includes(e.request))
.filter(e => statusToProcess.includes(e.status))
);
},
async downloadPackage(evidence) {
const fileName = await fileFromReadStream(
`${evidence.request}.zip`,
(
await fetch(`${vaultUrl}/download/${evidence.filename}`)
).body
);
return evidencePackage(fileName, evidence);
},
};
export default vault;
|
FusionBox2/MAF2 | Core/mafNodeFactory.h | <filename>Core/mafNodeFactory.h
/*=========================================================================
Program: MAF2
Module: mafNodeFactory
Authors: <NAME>
Copyright (c) B3C
All rights reserved. See Copyright.txt or
http://www.scsitaly.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#ifndef __mafNodeFactory_h
#define __mafNodeFactory_h
//----------------------------------------------------------------------------
// includes :
//----------------------------------------------------------------------------
#include "mafObjectFactory.h"
#include "mafPics.h"
/** to be used internally for plugging default nodes --- calls a member function directly */
#define mafPlugNodeMacro(node_type,descr) \
RegisterNewNode(node_type::GetStaticTypeName(), descr, node_type::NewObject); \
if (mafPictureFactory::GetPicsInitialized()) \
mafPictureFactory::GetPictureFactory()->AddVmePic(node_type::GetStaticTypeName(),node_type::GetIcon());
//----------------------------------------------------------------------------
// forward declarations :
//----------------------------------------------------------------------------
class mafNode;
/** Object factory for Nodes.
To make a new VME available in the MAF it must be plugged inside a factory, in particular
this factory must be of type mafNodeFactory to be able to retrieve the list of nodes plugged
in the factory. Also when using mafPlugNode<node_type> the node icon is plugged inside the
the MAF picture factory. */
class MAF_EXPORT mafNodeFactory : public mafObjectFactory
{
public:
mafTypeMacro(mafNodeFactory,mafObjectFactory);
virtual const char* GetMAFSourceVersion() const;
virtual const char* GetDescription() const;
/* Initialize the factory creating and registering a new instance */
static int Initialize();
/** return the instance pointer of the factory. return NULL if not initialized yet */
static mafNodeFactory *GetInstance();// {if (!m_Instance) Initialize(); return m_Instance;}
/** create an instance of the node give its type name */
static mafNode *CreateNodeInstance(const char *type_name);
/**
This function can be used by Application code to register new Objects's to the mflCoreFactory */
void RegisterNewNode(const char* node_name, const char* description, mafCreateObjectFunction createFunction);
/** return list of names for nodes plugged into this factory */
static std::vector<std::string> &GetNodeNames();// {return m_NodeNames;}
protected:
mafNodeFactory();
~mafNodeFactory() { }
static bool m_Initialized;
// static mafNodeFactory *m_Instance;
// static std::vector<std::string> m_NodeNames;
private:
mafNodeFactory(const mafNodeFactory&); // Not implemented.
void operator=(const mafNodeFactory&); // Not implemented.
};
/** Plug a node in the main MAF Node factory.*/
template <class T>
class mafPlugNode
{
public:
mafPlugNode(const char *description);
};
//------------------------------------------------------------------------------
/** Plug a new Node class into the Node factory.*/
template <class T>
mafPlugNode<T>::mafPlugNode(const char *description)
//------------------------------------------------------------------------------
{
mafNodeFactory *factory=mafNodeFactory::GetInstance();
if (factory)
{
factory->RegisterNewNode(T::GetStaticTypeName(), description, T::NewObject);
// here plug node's icon inside picture factory
if (mafPictureFactory::GetPicsInitialized())
mafPictureFactory::GetPictureFactory()->AddVmePic(T::GetStaticTypeName(),T::GetIcon());
}
}
/** Plug an attribute class into the Node factory.*/
template <class T>
class mafPlugAttribute
{
public:
mafPlugAttribute(const char *description) \
{ \
mafNodeFactory *factory=mafNodeFactory::GetInstance(); \
if (factory) \
factory->RegisterNewObject(T::GetStaticTypeName(), description, T::NewObject); \
}
};
#endif
|
sepehr-laal/nofx | nofx/nofx_ofTextureData/nofx_ofTextureData.cc | #include "nofx_ofTextureData.h"
#include "..\nofx\nofx_types.h"
namespace nofx
{
namespace ClassWrappers
{
using node::ObjectWrap;
Persistent<Function> OfTextureDataWrap::constructor;
//--------------------------------------------------------------
NAN_METHOD(OfTextureDataWrap::New)
{
NanScope();
if (args.IsConstructCall()) {
OfTextureDataWrap* obj;
if (args[0]->IsNull())
{
obj = new OfTextureDataWrap(nullptr);
}
else if (args.Length() == 0)
{
obj = new OfTextureDataWrap(new ofTextureData());
}
else
{
//copy constructor
obj = new OfTextureDataWrap(ObjectWrap::Unwrap<OfTextureDataWrap>(args[0]->ToObject())->GetWrapped());
}
obj->Wrap(args.This());
NanReturnValue(args.This());
}
else
{
// Invoked as plain function `MyObject(...)`, turn into construct call.
std::vector<v8::Handle<v8::Value>> lArgvVec;
for (int i = 0; i < args.Length(); ++i) { lArgvVec.push_back(args[i]); }
NanReturnValue(NanNew<v8::Function>(constructor)->NewInstance(lArgvVec.size(), (lArgvVec.size() == 0) ? nullptr : &lArgvVec[0]));
}
}
//--------------------------------------------------------------
void OfTextureDataWrap::Initialize(v8::Handle<Object> exports)
{
auto tpl = NanNew<v8::FunctionTemplate>(New);
tpl->SetClassName(NanNew("ofTextureData"));
auto inst = tpl->InstanceTemplate();
inst->SetInternalFieldCount(1);
inst->SetAccessor(NanNew("bAllocated"), OfTextureDataWrap::GetBAllocated, OfTextureDataWrap::SetBAllocated);
inst->SetAccessor(NanNew("bFlipTexture"), OfTextureDataWrap::GetBFlipTexture, OfTextureDataWrap::SetBFlipTexture);
inst->SetAccessor(NanNew("compressionType"), OfTextureDataWrap::GetCompressionType, OfTextureDataWrap::SetCompressionType);
inst->SetAccessor(NanNew("glTypeInternal"), OfTextureDataWrap::GetGlTypeInternal, OfTextureDataWrap::SetGlTypeInternal);
inst->SetAccessor(NanNew("height"), OfTextureDataWrap::GetHeight, OfTextureDataWrap::SetHeight);
inst->SetAccessor(NanNew("tex_h"), OfTextureDataWrap::GetTex_h, OfTextureDataWrap::SetTex_h);
inst->SetAccessor(NanNew("tex_t"), OfTextureDataWrap::GetTex_t, OfTextureDataWrap::SetTex_t);
inst->SetAccessor(NanNew("tex_u"), OfTextureDataWrap::GetTex_u, OfTextureDataWrap::SetTex_u);
inst->SetAccessor(NanNew("tex_w"), OfTextureDataWrap::GetTex_w, OfTextureDataWrap::SetTex_w);
inst->SetAccessor(NanNew("textureID"), OfTextureDataWrap::GetTextureID, OfTextureDataWrap::SetTextureID);
inst->SetAccessor(NanNew("textureTarget"), OfTextureDataWrap::GetTextureTarget, OfTextureDataWrap::SetTextureTarget);
inst->SetAccessor(NanNew("width"), OfTextureDataWrap::GetWidth, OfTextureDataWrap::SetWidth);
NanSetPrototypeTemplate(tpl, NanNew("NOFX_TYPE"), NanNew(NOFX_TYPES::OFTEXTUREDATA), v8::ReadOnly);
NanAssignPersistent(constructor, tpl->GetFunction());
exports->Set(NanNew<String>("ofTextureData"), tpl->GetFunction());
}
NAN_GETTER(OfTextureDataWrap::GetBAllocated)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->bAllocated);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetBFlipTexture)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->bFlipTexture);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetCompressionType)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->compressionType);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetGlTypeInternal)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->glTypeInternal);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetHeight)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->height);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetTex_h)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->tex_h);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetTex_t)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->tex_t);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetTex_u)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->tex_u);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetTex_w)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->tex_w);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetTextureID)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->textureID);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetTextureTarget)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->textureTarget);
}
//----------------------------------------------------
NAN_GETTER(OfTextureDataWrap::GetWidth)
{
const auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
NanReturnValue(self->width);
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetBAllocated)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->bAllocated = value->BooleanValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetBFlipTexture)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->bFlipTexture = value->BooleanValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetCompressionType)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->compressionType = static_cast<ofTexCompression>(value->Int32Value());
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetGlTypeInternal)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->glTypeInternal = value->Int32Value();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetHeight)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->height = value->NumberValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetTex_h)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->tex_h = value->NumberValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetTex_t)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->tex_t = value->NumberValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetTex_u)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->tex_u = value->NumberValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetTex_w)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->tex_w = value->NumberValue();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetTextureID)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->textureID = value->Uint32Value();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetTextureTarget)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->textureTarget = value->Int32Value();
}
//----------------------------------------------------
NAN_SETTER(OfTextureDataWrap::SetWidth)
{
auto self = ObjectWrap::Unwrap<OfTextureDataWrap>(args.This())->GetWrapped();
self->width = value->NumberValue();
}
//----------------------------------------------------
} //!namespace ClassWrappers
} //!namespace nofx |
sathishms77/test | common/utils/dockerhelpers.py | #
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Some docker related convenience functions
"""
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor
import os
import socket
from structlog import get_logger
from docker import Client, errors
docker_socket = os.environ.get('DOCKER_SOCK', 'unix://tmp/docker.sock')
log = get_logger()
def get_my_containers_name():
"""
Return the docker containers name in which this process is running.
To look up the container name, we use the container ID extracted from the
$HOSTNAME environment variable (which is set by docker conventions).
:return: String with the docker container name (or None if any issue is
encountered)
"""
my_container_id = os.environ.get('HOSTNAME', None)
try:
docker_cli = Client(base_url=docker_socket)
info = docker_cli.inspect_container(my_container_id)
except Exception, e:
log.exception('failed', my_container_id=my_container_id, e=e)
raise
name = info['Name'].lstrip('/')
return name
def get_all_running_containers():
try:
docker_cli = Client(base_url=docker_socket)
containers = docker_cli.containers()
except Exception, e:
log.exception('failed', e=e)
raise
return containers
def inspect_container(id):
try:
docker_cli = Client(base_url=docker_socket)
info = docker_cli.inspect_container(id)
except Exception, e:
log.exception('failed-inspect-container', id=id, e=e)
raise
return info
|
adantra/mfem | tests/unit/fem/test_calccurlshape.cpp | // Copyright (c) 2010-2022, Lawrence Livermore National Security, LLC. Produced
// at the Lawrence Livermore National Laboratory. All Rights reserved. See files
// LICENSE and NOTICE for details. LLNL-CODE-806117.
//
// This file is part of the MFEM library. For more information and source code
// availability visit https://mfem.org.
//
// MFEM is free software; you can redistribute it and/or modify it under the
// terms of the BSD-3 license. We welcome feedback and contributions, see file
// CONTRIBUTING.md for details.
#include "mfem.hpp"
#include "catch.hpp"
#include <iostream>
#include <cmath>
using namespace mfem;
/**
* Utility function to generate IntegerationPoints, based on param ip
* that are outside the unit interval. Results are placed in output
* parameter arr.
*
* Note: this is defined in test_calcshape.cpp
*/
void GetRelatedIntegrationPoints(const IntegrationPoint& ip, int dim,
Array<IntegrationPoint>& arr);
/**
* Utility function to setup IsoparametricTransformations for reference
* elements of various types.
*
* Note: this is defined in test_calcvshape.cpp
*/
void GetReferenceTransformation(const Element::Type ElemType,
IsoparametricTransformation & T);
/**
* Linear test function whose curl is equal to 1 in 2D and (1,1,1) in 3D.
*/
void test_curl_func(const Vector &x, Vector &v)
{
int dim = x.Size();
v.SetSize(dim);
v[0] = 4.0 * x[1];
v[1] = 5.0 * x[0];
if (dim == 3)
{
v[0] += 3.0 * x[2];
v[1] += x[2];
v[2] = 2.0 * (x[0] + x[1]);
}
}
/**
* Tests fe->CalcCurlShape() over a grid of IntegrationPoints
* of resolution res. Also tests at integration points
* that are outside the element.
*/
void TestCalcCurlShape(FiniteElement* fe, ElementTransformation * T, int res)
{
int dof = fe->GetDof();
int dim = fe->GetDim();
int cdim = 2 * dim - 3;
Vector dofs(dof);
Vector v(cdim);
DenseMatrix weights( dof, cdim );
VectorFunctionCoefficient vCoef(dim, test_curl_func);
fe->Project(vCoef, *T, dofs);
// Get a uniform grid of integration points
RefinedGeometry* ref = GlobGeometryRefiner.Refine( fe->GetGeomType(), res);
const IntegrationRule& intRule = ref->RefPts;
int npoints = intRule.GetNPoints();
for (int i=0; i < npoints; ++i)
{
// Get the current integration point from intRule
IntegrationPoint pt = intRule.IntPoint(i);
// Get several variants of this integration point
// some of which are inside the element and some are outside
Array<IntegrationPoint> ipArr;
GetRelatedIntegrationPoints( pt, dim, ipArr );
// For each such integration point check that the weights
// from CalcCurlShape() sum to one
for (int j=0; j < ipArr.Size(); ++j)
{
IntegrationPoint& ip = ipArr[j];
fe->CalcCurlShape(ip, weights);
weights.MultTranspose(dofs, v);
REQUIRE( v[0] == Approx(1.) );
if (dim == 3)
{
REQUIRE( v[1] == Approx(1.) );
REQUIRE( v[2] == Approx(1.) );
}
}
}
}
TEST_CASE("CalcCurlShape for several ND FiniteElement instances",
"[ND_TriangleElement]"
"[ND_QuadrilateralElement]"
"[ND_TetrahedronElement]"
"[ND_WedgeElement]"
"[ND_HexahedronElement]")
{
int maxOrder = 5;
int resolution = 10;
SECTION("ND_TriangleElement")
{
IsoparametricTransformation T;
GetReferenceTransformation(Element::TRIANGLE, T);
for (int order =1; order <= maxOrder; ++order)
{
std::cout << "Testing ND_TriangleElement::CalcCurlShape() "
<< "for order " << order << std::endl;
ND_TriangleElement fe(order);
TestCalcCurlShape(&fe, &T, resolution);
}
}
SECTION("ND_QuadrilateralElement")
{
IsoparametricTransformation T;
GetReferenceTransformation(Element::QUADRILATERAL, T);
for (int order =1; order <= maxOrder; ++order)
{
std::cout << "Testing ND_QuadrilateralElement::CalcCurlShape() "
<< "for order " << order << std::endl;
ND_QuadrilateralElement fe(order);
TestCalcCurlShape(&fe, &T, resolution);
}
}
SECTION("ND_TetrahedronElement")
{
IsoparametricTransformation T;
GetReferenceTransformation(Element::TETRAHEDRON, T);
for (int order =1; order <= maxOrder; ++order)
{
std::cout << "Testing ND_TetrahedronElement::CalcCurlShape() "
<< "for order " << order << std::endl;
ND_TetrahedronElement fe(order);
TestCalcCurlShape(&fe, &T, resolution);
}
}
SECTION("ND_WedgeElement")
{
IsoparametricTransformation T;
GetReferenceTransformation(Element::WEDGE, T);
for (int order =1; order <= maxOrder; ++order)
{
std::cout << "Testing ND_WedgeElement::CalcCurlShape() "
<< "for order " << order << std::endl;
ND_WedgeElement fe(order);
TestCalcCurlShape(&fe, &T, resolution);
}
}
SECTION("ND_HexahedronElement")
{
IsoparametricTransformation T;
GetReferenceTransformation(Element::HEXAHEDRON, T);
for (int order =1; order <= maxOrder; ++order)
{
std::cout << "Testing ND_HexahedronElement::CalcCurlShape() "
<< "for order " << order << std::endl;
ND_HexahedronElement fe(order);
TestCalcCurlShape(&fe, &T, resolution);
}
}
}
|
luiscarlosgph/nas | static/rompr/ui/playlist.js | <filename>static/rompr/ui/playlist.js
function Playlist() {
var self = this;
var tracklist = [];
var currentalbum = -1;
this.currentTrack = null;
var finaltrack = -1;
this.rolledup = [];
var updatecounter = 0;
var do_delayed_update = false;
var scrollto = -1;
var updateErrorFlag = 0;
var mode = null;
this.emptytrack = {
album: "",
albumartist: "",
backendid: "",
compilation: "",
creator: "",
dir: "",
duration: "",
expires: "",
image: "",
key: "",
location: "",
musicbrainz: {
albumartistid: "",
albumid: "",
artistid: "",
trackid: ""
},
origimage: "",
playlistpos: "",
spotify: {
album: ""
},
station: "",
stationurl: "",
stream: "",
title: "",
tracknumber: "",
type: "",
date: ""
};
/*
We keep count of how many ongoing requests we have sent to Apache
If this ever exceeds 1, all responses we receive will be ignored until
the count reaches zero. We then do one more afterwards, because we can't be
sure the responses have come back in the right order.
*/
this.repopulate = function() {
debug.log("PLAYLIST","Repopulating....");
updatecounter++;
player.controller.getPlaylist();
coverscraper.clearCallbacks();
}
this.updateFailure = function() {
debug.error("PLAYLIST","Got notified that an update FAILED");
infobar.notify(infobar.ERROR, language.gettext("label_playlisterror"));
updatecounter--;
updateErrorFlag++;
// After 5 consecutive update failures, we give up because something is obviously wrong.
if (updatecounter == 0 && updateErrorFlag < 6) {
debug.log("PLAYLIST","Update failed and no more are expected. Doing another");
self.repopulate();
}
if (updateErrorFlag > 5) {
alert(language.gettext("label_playlisterror"));
}
}
this.newXSPF = function(list) {
var item;
var count = 0;
var current_album = "";
var current_artist = "";
var current_type = "";
updateErrorFlag = 0;
// This is a mechanism to prevent multiple repeated updates of the playlist in the case
// where, for example, the user is clicking rapidly on the delete button for lots of tracks
// and the playlist is slow to update from mpd
updatecounter--;
if (updatecounter > 0) {
debug.log("PLAYLIST","Received playlist update but ",updatecounter," more are coming - ignoring");
do_delayed_update = true;
return 0;
}
if (do_delayed_update) {
// Once all the repeated updates have been received from mpd, ignore them all
// (because we can't be sure which order they will have come back in),
// do one more of our own, and use that one
do_delayed_update = false;
debug.log("PLAYLIST","Doing delayed playlist update");
self.repopulate();
return 0;
}
// ***********
debug.log("PLAYLIST","Got Playlist from Apache",list);
finaltrack = -1;
currentalbum = -1;
tracklist = [];
var totaltime = 0;
var unixtimestamp = Math.round(new Date()/1000);
for (var i in list) {
track = list[i];
track.duration = parseFloat(track.duration);
totaltime += track.duration;
var sortartist = (track.albumartist == "") ? track.creator : track.albumartist;
if ((track.compilation != "yes" && sortartist.toLowerCase() != current_artist.toLowerCase()) ||
track.album.toLowerCase() != current_album.toLowerCase() ||
track.type != current_type)
{
current_type = track.type;
current_artist = sortartist;
current_album = track.album;
switch (track.type) {
case "local":
if (track.compilation == "yes") {
var hidden = (self.rolledup["Various Artists"+track.album]) ? true : false;
item = new Album("Various Artists", track.album, count, hidden);
} else {
var hidden = (self.rolledup[sortartist+track.album]) ? true : false;
item = new Album(sortartist, track.album, count, hidden);
}
tracklist[count] = item;
count++;
current_station = "";
break;
case "stream":
// Streams are hidden by default - hence we use the opposite logic for the flag
var hidden = (self.rolledup["StReAm"+track.album]) ? false : true;
item = new Stream(count, track.album, hidden);
tracklist[count] = item;
count++;
current_station = "";
break;
default:
item = new Album(sortartist, track.album, count);
tracklist[count] = item;
count++;
current_station = "";
break;
}
}
item.newtrack(track);
finaltrack = parseInt(track.playlistpos);
}
// After all that, which will have taken a finite time - which could be a long time on
// a slow device or with a large playlist, let's check that no more updates are pending
// before we put all this stuff into the window. (More might have come in while we were organising this one)
// This might all seem like a faff, but you do not want stuff you've just removed
// suddenly re-appearing in front of your eyes and then vanishing again. It looks crap.
if (updatecounter > 0) {
debug.log("PLAYLIST","Aborting update because counter is non-zero");
return;
}
$("#sortable").empty();
if (finaltrack > -1) {
$("#pltracks").html((finaltrack+1).toString() +' '+language.gettext("label_tracks"));
$("#pltime").html(language.gettext("label_duration")+' : '+formatTimeString(totaltime));
$("#plmode").html(modehtml(mode));
} else {
$("#pltracks").html("");
$("#pltime").html("");
$("#plmode").html("");
}
for (var i in tracklist) {
$("#sortable").append(tracklist[i].getHTML());
}
makeFictionalCharacter();
self.setHeight();
self.findCurrentTrack();
if (finaltrack == -1) {
// Playlist is empty
debug.log("PLAYLIST","Playlist is empty");
nowplaying.newTrack(self.emptytrack);
infobar.setProgress(0,-1,-1);
}
player.controller.postLoadActions();
// scrollto is now used only to prevent findCurrentTrack from scrolling to the current track.
// This stops it from being really annoying where it scrolls to the current track
// when you're in the middle of deleting some stuff lower down.
// Scrolling when we have the custom scrollbars isn't necessary when we repopulate as the 'scrollbars'
// basically just stay where they were.
// Note that we currently set scrollto to 1 when we drag stuff onto or within the playlist. This prevents
// the auto-scroll from moving the playlist around by itself, which is very confusing for the user.
// We don't set it when stuff is added by double-click. This means auto-scroll will keep the playlist
// on or around the current track when we do this. This seems to make the most sense.
scrollto = -1;
}
function makeFictionalCharacter() {
// Invisible empty div tacked on the end is where we add our 'Incoming' animation
$("#sortable").append('<div id="waiter" class="containerbox"></div>');
}
function modehtml(m) {
if (!m) return "";
var html = "";
if (m.match(/stars/)) {
html = '<img src="newimages/'+m+'.png" height="14px" />';
html = html + '<img class="clickicon" height="14px" style="margin-left:8px" src="newimages/edit-delete.png" onclick="playlist.endSmartMode()" />';
} else if (m.match(/tag/)) {
m = m.replace(/tag\+/,'');
m = m.replace(/,/, ', ');
html = '<img src="newimages/tag.png" height="14px" style="margin-right:4px;vertical-align:middle" />'+m;
html = html + '<img class="clickicon" height="14px" style="margin-left:8px;vertical-align:middle" src="newimages/edit-delete.png" onclick="playlist.endSmartMode()" />';
}
return html;
}
this.endSmartMode = function() {
mode = null;
self.repopulate();
}
this.setHeight = function() {
var newheight = $("#bottompage").height() - $("#horse").height();
if ($("#playlistbuttons").is(":visible")) {
newheight -= $("#playlistbuttons").height();
if (mobile != "no") {
newheight -= 2;
}
}
$("#pscroller").css("height", newheight.toString()+"px");
if (mobile == "no") {
$('#pscroller').mCustomScrollbar("update");
}
}
this.load = function(name) {
$("#sortable").empty();
makeFictionalCharacter();
playlist.waiting();
if (mobile == "no") {
$("#lpscr").slideToggle('fast');
} else {
sourcecontrol('playlistm');
}
debug.log("PLAYLIST","Loading Playlist",name);
mode = null;
player.controller.loadPlaylist(name);
}
this.loadSmart = function(name) {
var action = "getplaylist";
playlist.waiting();
if (name == "tag") {
name += "+" + $("#cynthia").val();
}
debug.log("PLAYLIST","Loading Playlist",name);
if (mode && mode == name) {
// We're already running this playlist
action = "repopulate";
} else {
if (mobile == "no") {
$("#lpscr").slideToggle('fast');
} else {
sourcecontrol('playlistm');
}
}
mode = name;
$.ajax({
type: "POST",
dataType: "json",
data: { action: action, playlist: name },
url: "userRatings.php",
success: function(data) {
if (data.length > 0) {
debug.log("SMARTPLAYLIST","Got tracks",data);
player.controller.addTracks(data, null, null);
} else {
mode = null;
playlist.repopulate();
}
},
fail: function() {
infobar.notify(infobar.ERROR,"Failed to create Playlist");
playlist.repopulate();
}
});
}
this.clear = function() {
mode = null;
player.controller.clearPlaylist();
}
this.draggedToEmpty = function(event, ui) {
debug.log("PLAYLIST","Something was dropped on the empty playlist area",event,ui);
ui.item = ui.helper;
playlist.waiting();
playlist.dragstopped(event,ui);
}
this.dragstopped = function(event, ui) {
debug.log("PLAYLIST","Drag Stopped",event,ui);
event.stopImmediatePropagation();
var moveto = (function getMoveTo(i) {
debug.log("Drag Stopped",i.next());
if (i.next().hasClass('track')) {
return parseInt(i.next().attr("name"));
}
if (i.next().hasClass('item')) {
return tracklist[parseInt(i.next().attr("name"))].getFirst();
}
if (i.parent().hasClass('trackgroup')) {
return getMoveTo(i.parent());
}
return (parseInt(finaltrack))+1;
})($(ui.item));
if (ui.item.hasClass("draggable")) {
// Something dragged from the albums list
var tracks = new Array();
$.each($('.selected').filter(removeOpenItems), function (index, element) {
var uri = $(element).attr("name");
if (uri) {
if ($(element).hasClass('clickalbum')) {
tracks.push({ type: "item",
name: uri});
} else if ($(element).hasClass('clickcue')) {
tracks.push({ type: "cue",
name: decodeURIComponent(uri)});
} else {
var options = { type: "uri",
name: decodeURIComponent(uri)};
$(element).find('input').each( function() {
switch ($(this).val()) {
case "needsfiltering":
options.findexact = {artist: $(element).children('.saname').text()};
options.filterdomain = ['spotify:'];
debug.log("PLAYLIST", "Adding Spotify artist",$(element).children('.saname').text());
break;
}
});
tracks.push(options);
}
}
});
scrollto = 1;
player.controller.addTracks(tracks, null, moveto);
$('.selected').removeClass('selected');
$("#dragger").remove();
} else {
// Something dragged within the playlist
var elementmoved = ui.item.hasClass('track') ? 'track' : 'item';
switch (elementmoved) {
case "track":
var firstitem = parseInt(ui.item.attr("name"));
var numitems = 1;
break;
case "item":
var firstitem = tracklist[parseInt(ui.item.attr("name"))].getFirst();
var numitems = tracklist[parseInt(ui.item.attr("name"))].getSize();
break;
}
// If we move DOWN we have to calculate what the position will be AFTER the items have been moved.
// It's understandable, but slightly counter-intuitive
if (firstitem < moveto) {
moveto = moveto - numitems;
if (moveto < 0) { moveto = 0; }
}
scrollto = 1;
player.controller.move(firstitem, numitems, moveto);
}
}
function removeOpenItems(index) {
if ($(this).hasClass('clicktrack') || $(this).hasClass('clickcue')) {
return true;
}
// Filter out artist and album items whose dropdowns have been populated -
// In these cases the individual tracks will exist and will be selected
// (and might only have partial selections even if the header is selected)
if ($("#"+$(this).attr('name')).hasClass('notfilled') || $(this).hasClass('onefile')) {
return true;
} else {
return false;
}
}
this.delete = function(id) {
scrollto = 1;
$('.track[romprid="'+id.toString()+'"]').remove();
player.controller.removeId([parseInt(id)]);
}
this.waiting = function() {
$("#waiter").empty();
doSomethingUseful('waiter', language.gettext("label_incoming"));
}
// This is used for adding stream playlists ONLY
this.newInternetRadioStation = function(list) {
scrollto = (finaltrack)+1;
var tracks = [];
$(list).find("track").each( function() {
tracks.push({ type: "uri",
name: $(this).find("location").text()}
);
});
if (tracks.length > 0) {
player.controller.addTracks(tracks, playlist.playFromEnd(), null);
}
}
this.hideItem = function(i) {
tracklist[i].rollUp();
}
this.playFromEnd = function() {
if (player.status.state == "stop") {
debug.log("PLAYLIST","Playfromend",finaltrack+1);
return finaltrack+1;
} else {
debug.log("PLAYLIST","Disabling auto-play");
return -1;
}
}
this.getfinaltrack = function() {
return finaltrack;
}
this.saveTrackPlaylist = function(xml) {
$.post("newplaylist.php", { type: "track", xml: xml});
}
this.findCurrentTrack = function() {
debug.log("PLAYLIST","Looking For Current Track",player.status.songid);
self.currentTrack = null;
$(".playlistcurrentitem").removeClass('playlistcurrentitem').addClass('playlistitem');
$(".playlistcurrenttitle").removeClass('playlistcurrenttitle').addClass('playlisttitle');
for(var i in tracklist) {
self.currentTrack = tracklist[i].findcurrent(player.status.songid);
if (self.currentTrack) {
currentalbum = i;
scrollToCurrentTrack();
if (mode && self.currentTrack.playlistpos == finaltrack) {
self.loadSmart(mode);
}
break;
}
}
return self.currentTrack;
}
function scrollToCurrentTrack() {
if (prefs.scrolltocurrent &&
$('.track[romprid="'+player.status.songid+'"]').offset() &&
scrollto === -1) {
debug.log("PLAYLIST","Scrolling to",player.status.songid);
if (mobile == "no") {
$('#pscroller').mCustomScrollbar(
"scrollTo",
$('div.track[romprid="'+player.status.songid+'"]').offset().top - $('#sortable').offset().top - $('#pscroller').height()/2,
{ scrollInertia: 0 }
);
} else {
$('#pscroller').animate({
scrollTop: $('div.track[romprid="'+player.status.songid+'"]').offset().top - $('#sortable').offset().top - $('#pscroller').height()/2
}, 500);
}
}
}
this.stopped = function() {
infobar.setProgress(0,-1,-1);
}
this.trackchanged = function() {
if (self.currentTrack && self.currentTrack.type == "podcast") {
debug.log("PLAYLIST", "Seeing if we need to mark a podcast as listened");
podcasts.checkMarkPodcastAsListened(self.currentTrack.location);
}
}
this.stopafter = function() {
if (self.currentTrack.type == "stream") {
infobar.notify(infobar.ERROR, language.gettext("label_notforradio"));
} else if (player.status.state == "play") {
if (player.status.single == 0) {
player.controller.stopafter();
var timeleft = self.currentTrack.duration - infobar.progress();
if (timeleft < 4) { timeleft = 300 };
var repeats = Math.round(timeleft / 4);
$("#stopafterbutton").effect('pulsate', {times: repeats}, 4000);
} else {
player.controller.cancelSingle();
$("#stopafterbutton").stop(true, true);
}
}
}
this.previous = function() {
if (currentalbum >= 0) {
tracklist[currentalbum].previoustrackcommand();
}
}
this.next = function() {
if (currentalbum >= 0) {
tracklist[currentalbum].nexttrackcommand();
}
}
this.deleteGroup = function(index) {
scrollto = 1;
tracklist[index].deleteSelf();
}
this.addtrack = function(element) {
self.waiting();
var n = decodeURIComponent(element.attr("name"));
var options = [{ type: "uri",
name: n,
}];
$.each(element.children('input'), function() {
switch ($(this).val()) {
case "needsfiltering":
options[0].findexact = {artist: element.children('.saname').text()};
options[0].filterdomain = ['spotify:'];
debug.log("PLAYLIST", "Adding Spotify artist",element.children('.saname').text());
break;
}
});
player.controller.addTracks(options,
playlist.playFromEnd(),
null);
}
this.addcue = function(element) {
self.waiting();
var n = decodeURIComponent(element.attr("name"));
var options = [{ type: "cue",
name: n,
}];
player.controller.addTracks(options,
playlist.playFromEnd(),
null);
}
this.addalbum = function(element) {
self.waiting();
player.controller.addTracks([{ type: "item",
name: element.attr("name")}],
playlist.playFromEnd(), null);
}
this.addFavourite = function(index) {
debug.log("PLAYLIST","Adding Fave Station, index",index, tracklist[index].album);
var data = { station: tracklist[index].getFnackle() };
if (self.currentTrack) {
data.uri = self.currentTrack.location;
}
$.post("addfave.php", data)
.done( function() {
if (!prefs.hide_radiolist) {
$("#yourradiolist").load("yourradio.php");
}
});
}
this.getCurrent = function(thing) {
return self.currentTrack[thing];
}
function Album(artist, album, index, rolledup) {
var self = this;
var tracks = [];
this.artist = artist;
this.album = album;
this.index = index;
this.newtrack = function (track) {
tracks.push(track);
}
this.getHTML = function() {
var html = self.header();
for (var trackpointer in tracks) {
var showartist = false;
if (tracks[trackpointer].compilation == "yes" ||
(tracks[trackpointer].albumartist != "" && tracks[trackpointer].albumartist != tracks[trackpointer].creator)) {
showartist = true;
}
html = html + '<div name="'+tracks[trackpointer].playlistpos+'" romprid="'+tracks[trackpointer].backendid+'" class="track clickable clickplaylist sortable containerbox playlistitem menuitem">';
var l = tracks[trackpointer].location;
if (l.substring(0,11) == "soundcloud:") {
html = html + '<div class="smallcover fixed"><img class="smallcover" src="'+tracks[trackpointer].image+'" /></div>';
} else if (tracks[trackpointer].type == "podcast") {
html = html + '<div class="tracknumbr fixed">';
html = html + '<img src="newimages/Apple_Podcast_logo.png" height="16px" />';
html = html + '</div>';
} else{
html = html + '<div class="tracknumbr fixed"';
if (tracks.length > 99 ||
tracks[trackpointer].tracknumber > 99) {
html = html + ' style="width:3em"';
}
html = html + '>'+format_tracknum(tracks[trackpointer].tracknumber)+'</div>';
}
if (l.substring(0, 7) == "spotify") {
html = html + '<div class="playlisticon fixed"><img height="12px" src="newimages/spotify-logo.png" /></div>';
} else if (l.substring(0, 6) == "gmusic") {
html = html + '<div class="playlisticon fixed"><img height="12px" src="newimages/play-logo.png" /></div>';
}
if (showartist) {
html = html + '<div class="containerbox vertical expand">';
html = html + '<div class="line">'+tracks[trackpointer].title+'</div>';
html = html + '<div class="line playlistrow2">'+tracks[trackpointer].creator+'</div>';
html = html + '</div>';
} else {
html = html + '<div class="expand line">'+tracks[trackpointer].title+'</div>';
}
html = html + '<div class="tiny fixed">'+formatTimeString(tracks[trackpointer].duration)+'</div>';
html = html + '<div class="playlisticonr fixed clickable clickicon clickremovetrack" romprid="'+tracks[trackpointer].backendid+'"><img src="newimages/edit-delete.png" /></div>';
html = html + '</div>';
}
// Close the rollup div we added in the header
html = html + '</div>'
return html;
}
this.header = function() {
var html = "";
html = html + '<div name="'+self.index+'" romprid="'+tracks[0].backendid+'" class="item clickable clickplaylist sortable containerbox menuitem playlisttitle">';
var l = tracks[0].location;
if (l.substring(0,11) == "soundcloud:") {
html = html + '<div class="smallcover fixed clickable clickicon clickrollup" romprname="'+self.index+'"><img class="smallcover" src="newimages/soundcloud-logo.png"/></div>';
} else {
if (tracks[0].image && tracks[0].image != "") {
// An image was supplied - either a local one or supplied by the backend
html = html + '<div class="smallcover fixed clickable clickicon clickrollup" romprname="'+self.index+'"><img class="smallcover fixed" name="'+tracks[0].key+'" src="'+tracks[0].image+'"/></div>';
} else {
// This is so we can get albumart when we're playing spotify
// Once mopidy starts supplying us with images, we can dump this code
// Note - this is reuired for when we load a spotify playlist because the albums won't be
// present in the window anywhere else
var i = findImageInWindow(tracks[0].key);
if (i !== false) {
debug.log("PLAYLIST","Playlist using image already in window");
this.updateImages(i);
html = html + '<div class="smallcover fixed clickable clickicon clickrollup" romprname="'+self.index+'"><img class="smallcover fixed" name="'+tracks[0].key+'" src="'+i+'"/></div>';
} else {
html = html + '<div class="smallcover fixed clickable clickicon clickrollup" romprname="'+self.index
+ '"><img class="smallcover updateable notexist fixed clickable clickicon clickrollup" romprname="'+self.index
+'" name="'+tracks[0].key+'" src=""/></div>';
coverscraper.setCallback(this.updateImages, tracks[0].key);
coverscraper.GetNewAlbumArt(tracks[0].key);
}
}
}
html = html + '<div class="containerbox vertical expand">';
html = html + '<div class="line">'+self.artist+'</div>';
html = html + '<div class="line">'+self.album+'</div>';
html = html + '</div>';
html = html + '<div class="playlisticonr fixed clickable clickicon clickremovealbum" name="'+self.index+'"><img src="newimages/edit-delete.png" /></div>';
html = html + '</div>';
html = html + '<div class="trackgroup';
if (rolledup) {
html = html + ' invisible';
}
html = html + '" name="'+self.index+'">';
return html;
}
this.getFnackle = function() {
return tracks[0].album;
}
this.rollUp = function() {
$('.trackgroup[name="'+self.index+'"]').slideToggle('slow');
rolledup = !rolledup;
if (rolledup) {
playlist.rolledup[this.artist+this.album] = true;
} else {
playlist.rolledup[this.artist+this.album] = undefined;
}
}
this.updateImages = function(src) {
for (var trackpointer in tracks) {
tracks[trackpointer].image = src;
tracks[trackpointer].origimage = src.replace(/_original/, '_asdownloaded');
}
infobar.albumImage.setSecondarySource( {key: tracks[0].key, image: src, origimage: src.replace(/_original/, '_asdownloaded')});
}
this.getFirst = function() {
return parseInt(tracks[0].playlistpos);
}
this.getSize = function() {
return tracks.length;
}
this.isLast = function(id) {
if (id == tracks[tracks.length - 1].backendid) {
return true;
} else {
return false;
}
}
this.findcurrent = function(which) {
var result = null;
for(var i in tracks) {
if (tracks[i].backendid == which) {
$('.item[name="'+self.index+'"]').removeClass('playlisttitle').addClass('playlistcurrenttitle');
$('.track[romprid="'+which+'"]').removeClass('playlistitem').addClass('playlistcurrentitem');
result = tracks[i];
break;
}
}
return result;
}
this.deleteSelf = function() {
var todelete = [];
$('.item[name="'+self.index+'"]').next().remove();
$('.item[name="'+self.index+'"]').remove();
for(var i in tracks) {
todelete.push(tracks[i].backendid);
}
player.controller.removeId(todelete)
}
this.previoustrackcommand = function() {
player.controller.previous();
}
this.nexttrackcommand = function() {
player.controller.next();
}
function format_tracknum(tracknum) {
var r = /^(\d+)/;
var result = r.exec(tracknum) || "";
return result[1] || "";
}
}
function Stream(index, album, rolledup) {
var self = this;
var tracks = [];
this.index = index;
var rolledup = rolledup;
this.album = album;
this.newtrack = function (track) {
tracks.push(track);
}
this.getHTML = function() {
var html = self.header();
for (var trackpointer in tracks) {
html = html + '<div name="'+tracks[trackpointer].playlistpos+'" romprid="'+tracks[trackpointer].backendid+'" class="booger clickable clickplaylist containerbox playlistitem menuitem">';
html = html + '<div class="playlisticon fixed"><img height="12px" src="newimages/broadcast.png" /></div>';
html = html + '<div class="containerbox vertical expand">';
html = html + '<div class="playlistrow2 line">'+tracks[trackpointer].stream+'</div>';
html = html + '<div class="tiny line">'+tracks[trackpointer].location+'</div>';
html = html + '</div>';
html = html + '</div>';
}
// Close the rollup div we added in the header
html = html + '</div>';
return html;
}
this.header = function() {
var html = "";
html = html + '<div name="'+self.index+'" romprid="'+tracks[0].backendid+'" class="item clickable clickplaylist sortable containerbox menuitem playlisttitle">';
var image = (tracks[0].image) ? tracks[0].image : "newimages/broadcast.png";
html = html + '<div class="smallcover fixed clickable clickicon clickrollup" romprname="'+self.index+'"><img class="smallcover" name="'+tracks[0].key+'"" src="'+image+'"/></div>';
html = html + '<div class="containerbox vertical expand">';
html = html + '<div class="line">'+tracks[0].creator+'</div>';
html = html + '<div class="line">'+tracks[0].album+'</div>';
html = html + '</div>';
html = html + '<div class="containerbox vertical fixed">';
html = html + '<div class="playlisticonr clickable clickicon clickaddfave" name="'+self.index+'"><img height="12px" width="12px" src="newimages/broadcast-12.png"></div>';
html = html + '<div class="playlisticonr clickable clickicon clickremovealbum" name="'+self.index+'"><img src="newimages/edit-delete.png"></div>';
html = html + '</div>';
html = html + '</div>';
html = html + '<div class="trackgroup';
if (rolledup) {
html = html + ' invisible';
}
html = html + '" name="'+self.index+'">';
return html;
}
this.getFnackle = function() {
return tracks[0].album;
}
this.rollUp = function() {
$('.trackgroup[name="'+self.index+'"]').slideToggle('slow');
rolledup = !rolledup;
// Logic is backwards for streams, because they're hidden by default
if (rolledup) {
playlist.rolledup["StReAm"+this.album] = undefined;
} else {
playlist.rolledup["StReAm"+this.album] = true;
}
}
this.getFirst = function() {
return parseInt(tracks[0].playlistpos);
}
this.getSize = function() {
return tracks.length;
}
this.isLast = function(id) {
if (id == tracks[tracks.length - 1].backendid) {
return true;
} else {
return false;
}
}
this.findcurrent = function(which) {
var result = null;
for(var i in tracks) {
if (tracks[i].backendid == which) {
$('.item[name="'+self.index+'"]').removeClass('playlisttitle').addClass('playlistcurrenttitle');
$('.booger[romprid="'+which+'"]').removeClass('playlistitem').addClass('playlistcurrentitem');
result = tracks[i];
break;
}
}
return result;
}
this.deleteSelf = function() {
var todelete = [];
for(var i in tracks) {
$('.booger[name="'+tracks[i].playlistpos+'"]').remove();
todelete.push(tracks[i].backendid);
}
$('.item[name="'+self.index+'"]').remove();
player.controller.removeId(todelete)
}
this.previoustrackcommand = function() {
player.controller.playByPosition(parseInt(tracks[0].playlistpos)-1);
}
this.nexttrackcommand = function() {
player.controller.playByPosition(parseInt(tracks[(tracks.length)-1].playlistpos)+1);
}
}
}
|
lcxw/umail | umail-api/src/main/java/org/edu/mail/api/domain/Contact.java | <reponame>lcxw/umail
package org.edu.mail.api.domain;
public class Contact {
private String name;
private String email;
private String movephone;
private String company;
private String workphone;
private String remark;
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
private String userId;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name == null ? null : name.trim();
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email == null ? null : email.trim();
}
public String getMovephone() {
return movephone;
}
public void setMovephone(String movephone) {
this.movephone = movephone == null ? null : movephone.trim();
}
public String getCompany() {
return company;
}
public void setCompany(String company) {
this.company = company == null ? null : company.trim();
}
public String getWorkphone() {
return workphone;
}
public void setWorkphone(String workphone) {
this.workphone = workphone == null ? null : workphone.trim();
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark == null ? null : remark.trim();
}
} |
auntaru/artemis-demo | artemis-demo-java-se/graph-neo4j/src/main/java/org/jnosql/artemis/demo/se/TravelApp.java | /*
* Copyright (c) 2017 <NAME> and others
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php.
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
*
* <NAME>
*/
package org.jnosql.artemis.demo.se;
import org.eclipse.jnosql.artemis.graph.GraphTemplate;
import javax.enterprise.inject.se.SeContainer;
import javax.enterprise.inject.se.SeContainerInitializer;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static java.util.stream.Collectors.counting;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.toList;
public final class TravelApp {
private TravelApp() {
}
private static final String GOAL = "type";
private static final String FUN = "fun";
private static final String TRAVELS = "travels";
private static final String WORK = "Work";
public static void main(String[] args) {
try (SeContainer container = SeContainerInitializer.newInstance().initialize()) {
GraphTemplate graph = container.select(GraphTemplate.class).get();
Traveler stark = graph.insert(Traveler.of("Stark"));
Traveler roges = graph.insert(Traveler.of("Rogers"));
Traveler romanoff = graph.insert(Traveler.of("Romanoff"));
Traveler banners = graph.insert(Traveler.of("Banners"));
City sanFrancisco = graph.insert(City.of("San Francisco"));
City moscow = graph.insert(City.of("Moscow"));
City newYork = graph.insert(City.of("New York"));
City saoPaulo = graph.insert(City.of("São Paulo"));
City casaBlanca = graph.insert(City.of("Casa Blanca"));
graph.edge(stark, TRAVELS, sanFrancisco).add(GOAL, FUN);
graph.edge(stark, TRAVELS, moscow).add(GOAL, FUN);
graph.edge(stark, TRAVELS, newYork).add(GOAL, FUN);
graph.edge(stark, TRAVELS, saoPaulo).add(GOAL, FUN);
graph.edge(stark, TRAVELS, casaBlanca).add(GOAL, FUN);
graph.edge(roges, TRAVELS, newYork).add(GOAL, WORK);
graph.edge(banners, TRAVELS, casaBlanca).add(GOAL, WORK);
graph.edge(banners, TRAVELS, saoPaulo).add(GOAL, WORK);
graph.edge(romanoff, TRAVELS, moscow).add(GOAL, WORK);
graph.edge(romanoff, TRAVELS, newYork).add(GOAL, WORK);
graph.edge(romanoff, TRAVELS, saoPaulo).add(GOAL, WORK);
graph.edge(romanoff, TRAVELS, casaBlanca).add(GOAL, FUN);
graph.edge(stark, "knows", romanoff);
graph.edge(stark, "knows", roges);
graph.edge(roges, "knows", romanoff);
Map<String, Long> mostFunCity = graph.getTraversalVertex()
.inE(TRAVELS)
.has(GOAL, FUN).inV()
.<City>getResult()
.map(City::getName)
.collect((groupingBy(Function.identity(), counting())));
Map<String, Long> mostBusiness = graph.getTraversalVertex()
.inE(TRAVELS)
.has(GOAL, WORK).inV()
.<City>getResult()
.map(City::getName)
.collect((groupingBy(Function.identity(), counting())));
Map<String, Long> mostTravelCity = graph.getTraversalVertex()
.out(TRAVELS)
.<City>getResult()
.map(City::getName)
.collect((groupingBy(Function.identity(), counting())));
Map<String, Long> personTravelFun = graph.getTraversalVertex()
.inE(TRAVELS)
.has(GOAL, FUN).outV()
.<Traveler>getResult()
.map(Traveler::getName)
.collect((groupingBy(Function.identity(), counting())));
Map<String, Long> personTravelWork = graph.getTraversalVertex()
.inE(TRAVELS)
.has(GOAL, WORK).outV()
.<Traveler>getResult()
.map(Traveler::getName)
.collect((groupingBy(Function.identity(), counting())));
Map<String, Long> personTravel = graph.getTraversalVertex()
.in(TRAVELS)
.<Traveler>getResult()
.map(Traveler::getName)
.collect((groupingBy(Function.identity(), counting())));
List<String> friendsCasaBlanca = graph.getTraversalVertex()
.hasLabel("City")
.has("name", "Casa Blanca")
.in(TRAVELS).<Traveler>getResult().map(Traveler::getName).collect(toList());
System.out.println("The city most fun: "+ mostFunCity);
System.out.println("The city most business: "+ mostBusiness);
System.out.println("The city with more travel: "+ mostTravelCity);
System.out.println("The person who traveled fun: "+ personTravelFun);
System.out.println("The person who traveled business: "+ personTravelWork);
System.out.println("The person who traveled: "+ personTravel);
System.out.println("Friends because went to Casa Blanca: " + casaBlanca);
}
}
}
|
jiefzz/EJoker | ejoker-core/src/main/java/pro/jiefzz/ejoker/infrastructure/impl/AbstractMessageHandler.java | package pro.jiefzz.ejoker.infrastructure.impl;
import java.util.concurrent.Future;
import pro.jiefzz.ejoker.common.system.task.AsyncTaskResult;
import pro.jiefzz.ejoker.messaging.IMessage;
import pro.jiefzz.ejoker.messaging.IMessageHandler;
public abstract class AbstractMessageHandler implements IMessageHandler {
@Override
public Future<AsyncTaskResult<Void>> handleAsync(IMessage message) {
assert false;
return null;
}
}
|
waterlili/laravel_flower | resources/assets/js/NGAc.js | <reponame>waterlili/laravel_flower
app.directive('useAc', function ($http) {
function link(s, elm, attrs) {
if (!s.n) {
s.n = {};
}
if (!s.o) {
s.o = {}
}
s.o.querySearch = function ($search) {
var _export = {textSearch: $search};
if (s.o.getData) {
_export.extra = s.o.getData();
}
return $http.post(home(s.link), _export).then(function (response) {
return response.data;
});
};
s.$watch('o.selectedItem', function (n) {
s.n = n;
});
}
return {
restrict: 'A',
link: link,
scope: {
n: '=ngModel',
o: '=ngOpt',
link: '=ngLink'
}
}
}); |
amaankhan02/SelfDrivingCar | RaspberryPi/Autonomous/SoloRPi3_Method/Commands.py | <filename>RaspberryPi/Autonomous/SoloRPi3_Method/Commands.py
from enum import Enum;
class Commands(Enum):
#TODO: Right sql script to change all cmds with len 7 to len 4, must find an efficient way to change these
NO_CMD = [0,0,0,0]
LEFT = [1,0,0,0] #left forward
RIGHT = [0,1,0,0] #right forward
FORWARD = [0,0,1,0] #Only forward straight
BACK = [0,0,0,1]
# STOP_ALL_MOTORS = [0,0,0,0,1,0,0]
# BACK_MOTOR_STOP = [0,0,0,0,0,1,0] #dont think i should use this
# RESET_STEER = [0,0,0,0,0,0,1]
# def get_oneHotEncoded(self, command):
@staticmethod
def parseCommand(cmd):
if cmd == Commands.NO_CMD or cmd == Commands.NO_CMD.value:
return "NO_CMD"
elif cmd == Commands.LEFT or cmd == Commands.LEFT.value:
return "LEFT"
elif cmd == Commands.RIGHT or cmd == Commands.RIGHT.value:
return 'RIGHT'
elif cmd == Commands.FORWARD or cmd == Commands.FORWARD.value:
return 'FORWARD'
elif cmd == Commands.BACK or cmd == Commands.BACK.value:
return 'BACK'
# elif cmd == Commands.STOP_ALL_MOTORS or cmd == Commands.STOP_ALL_MOTORS.value:
# return 'STOP ALL MOTORS'
# elif cmd == Commands.BACK_MOTOR_STOP:
# return 'BACK_MOTOR_STOP'
# elif cmd == Commands.RESET_STEER:
# return 'RESET_STEER'
else:
return None |
pwn1/csplib | internal/scripts/support/add_preview_link_to_pr.py | <reponame>pwn1/csplib
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Add build url to pull request
from github3 import login
import os
import sys
token=os.getenv('CSPLIB_ROBOT_TOKEN')
gh = login(token=token)
if not gh:
print("error logging in")
sys.exit(3)
issue_num=os.getenv('TRAVIS_PULL_REQUEST')
place="PR-{}".format(issue_num)
text="Build preview located at http://csplib.github.io/csplib-PR-builds"
pr = gh.issue('csplib', 'csplib', issue_num)
already_added = any( i for i in pr.iter_comments() if text in i.to_json()['body'] )
if not already_added:
pr.create_comment("%s/%s/\n This will be automatically updated if more commits are added. If travis-ci is busy it may take ~10 minutes for the build to appear"
% (text, place))
print("Added url to pull request")
else:
print("already added url to pull request")
|
tienph91/Aspose.Slides-for-Java | Examples/src/main/java/com/aspose/slides/examples/tables/TableFromScratch.java | package com.aspose.slides.examples.tables;
import com.aspose.slides.*;
import com.aspose.slides.examples.RunExamples;
public class TableFromScratch
{
public static void main(String[] args)
{
//ExStart:TableFromScratch
// The path to the documents directory.
String dataDir = RunExamples.getDataDir_Tables();
// Instantiate Presentation class that represents PPTX// Instantiate Presentation class that represents PPTX
Presentation presentation = new Presentation(dataDir + "UpdateExistingTable.pptx");
try
{
// Access the first slide
ISlide sld = presentation.getSlides().get_Item(0);
// Initialize null TableEx
ITable table = null;
// Iterate through the shapes and set a reference to the table found
for (IShape shape : sld.getShapes())
if (shape instanceof ITable)
table = (ITable) shape;
// Set the text of the first column of second row
table.getRows().get_Item(0).get_Item(1).getTextFrame().setText("New");
// Write the PPTX to Disk
presentation.save(dataDir + "UpdateTable_out.pptx", SaveFormat.Pptx);
}
finally
{
if (presentation != null) presentation.dispose();
}
//ExEnd:TableFromScratch
}
}
|
PatMyron/goformation | cloudformation/quicksight/aws-quicksight-datasource_prestoparameters.go | // Code generated by "go generate". Please don't change this file directly.
package quicksight
import (
"github.com/awslabs/goformation/v6/cloudformation/policies"
)
// DataSource_PrestoParameters AWS CloudFormation Resource (AWS::QuickSight::DataSource.PrestoParameters)
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-quicksight-datasource-prestoparameters.html
type DataSource_PrestoParameters struct {
// Catalog AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-quicksight-datasource-prestoparameters.html#cfn-quicksight-datasource-prestoparameters-catalog
Catalog string `json:"Catalog"`
// Host AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-quicksight-datasource-prestoparameters.html#cfn-quicksight-datasource-prestoparameters-host
Host string `json:"Host"`
// Port AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-quicksight-datasource-prestoparameters.html#cfn-quicksight-datasource-prestoparameters-port
Port float64 `json:"Port"`
// AWSCloudFormationDeletionPolicy represents a CloudFormation DeletionPolicy
AWSCloudFormationDeletionPolicy policies.DeletionPolicy `json:"-"`
// AWSCloudFormationUpdateReplacePolicy represents a CloudFormation UpdateReplacePolicy
AWSCloudFormationUpdateReplacePolicy policies.UpdateReplacePolicy `json:"-"`
// AWSCloudFormationDependsOn stores the logical ID of the resources to be created before this resource
AWSCloudFormationDependsOn []string `json:"-"`
// AWSCloudFormationMetadata stores structured data associated with this resource
AWSCloudFormationMetadata map[string]interface{} `json:"-"`
// AWSCloudFormationCondition stores the logical ID of the condition that must be satisfied for this resource to be created
AWSCloudFormationCondition string `json:"-"`
}
// AWSCloudFormationType returns the AWS CloudFormation resource type
func (r *DataSource_PrestoParameters) AWSCloudFormationType() string {
return "AWS::QuickSight::DataSource.PrestoParameters"
}
|
anshika581/competitive-programming-1 | src/contest/usaco/USACO_2015_Cow_Hopscotch_Silver.java | <reponame>anshika581/competitive-programming-1
package contest.usaco;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.StringTokenizer;
public class USACO_2015_Cow_Hopscotch_Silver {
static final int MOD = 1000000007;
static BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
static PrintWriter ps = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out)));
static StringTokenizer st;
public static void main(String[] args) throws IOException {
int r = readInt();
int c = readInt();
int k = readInt();
int[][][] prefix = new int[r + 1][c + 1][k];
int[][] g = new int[r + 1][c + 1];
for (int i = 1; i <= r; i++)
for (int j = 1; j <= c; j++)
g[i][j] = readInt() - 1;
prefix[1][1][g[1][1]] = 1;
for (int i = 1; i <= r; i++) {
for (int j = 1; j <= c; j++) {
if (i == 1 && j == 1)
continue;
int sum = 0;
int id = g[i][j];
for (int x = 0; x < k; x++) {
prefix[i][j][x] = (((prefix[i][j][x] + prefix[i - 1][j][x]) % MOD + prefix[i][j - 1][x]) % MOD - prefix[i - 1][j - 1][x]) % MOD;
if (x == id)
continue;
sum = (sum + prefix[i - 1][j - 1][x]) % MOD;
}
prefix[i][j][id] = (prefix[i][j][id] + sum) % MOD;
}
}
System.out.println(((((prefix[r][c][g[r][c]] - prefix[r - 1][c][g[r][c]]) % MOD - prefix[r][c - 1][g[r][c]]) % MOD + prefix[r - 1][c - 1][g[r][c]]) % MOD + MOD) % MOD);
}
static String next() throws IOException {
while (st == null || !st.hasMoreTokens())
st = new StringTokenizer(br.readLine().trim());
return st.nextToken();
}
static long readLong() throws IOException {
return Long.parseLong(next());
}
static int readInt() throws IOException {
return Integer.parseInt(next());
}
static double readDouble() throws IOException {
return Double.parseDouble(next());
}
static char readCharacter() throws IOException {
return next().charAt(0);
}
static String readLine() throws IOException {
return br.readLine().trim();
}
}
|
fossabot/passbook | passbook/providers/oauth/settings.py | """passbook OAuth_Provider"""
CORS_ORIGIN_ALLOW_ALL = True
REQUEST_APPROVAL_PROMPT = "auto"
INSTALLED_APPS = [
"oauth2_provider",
"corsheaders",
]
MIDDLEWARE = [
"oauth2_provider.middleware.OAuth2TokenMiddleware",
"corsheaders.middleware.CorsMiddleware",
]
AUTHENTICATION_BACKENDS = [
"oauth2_provider.backends.OAuth2Backend",
]
OAUTH2_PROVIDER_APPLICATION_MODEL = "passbook_providers_oauth.OAuth2Provider"
OAUTH2_PROVIDER = {
# this is the list of available scopes
"SCOPES": {
"openid": "Access OpenID Userinfo",
"openid:userinfo": "Access OpenID Userinfo",
# 'write': 'Write scope',
# 'groups': 'Access to your groups',
"user:email": "GitHub Compatibility: User E-Mail",
"read:org": "GitHub Compatibility: User Groups",
}
}
|
jianmin/dmlc-hc-taxonomy | ui/bower_components/ml-utils/src/maps.js | <filename>ui/bower_components/ml-utils/src/maps.js
"use strict";
module.exports = function(app) {
app
.provider('mlMaps', function() {
// configurable options
var apiKey, clientId, v, sensor, cb = 'mlMapsLoaded', libraries = [], scriptUrl = 'https://maps.googleapis.com/maps/api/js';
var defaultOptions = {
center: [37.774546, -122.433523],
zoom: 8,
controlType: 'HORIZONTAL_BAR', // like google maps
zoomControl: 'LARGE',
disbleDefaultUI: true
};
// exposed config fuctions for .config() block
this.setApiKey = function(key) {
apiKey = key;
};
this.setClientId = function(id) {
clientId = id;
};
this.useSensor = function(flag) {
sensor = flag;
};
this.setScriptUrl = function(url) {
scriptUrl = url;
};
this.useLibraries = function(libs) {
libraries = libs;
};
this.addLibrary = function(lib) {
libraries.push(lib);
}
// since google maps is not loaded yet, any lat/lng must be arrays instead of google map objects
this.setDefaultOptions = function(options) {
defaultOptions = options;
};
// sets the default value for center, may still be overridden with createMap
this.defaultCenter = function(lat,lng) {
defaultOptions.center = [lat,lng];
};
this.defaultZoom = function(z) {
defaultOptions.zoom = z;
};
this.useVersion = function(vnum) {
v = vnum;
};
var createScriptUrl = function() {
var url = scriptUrl + '?callback=mlMapsLoaded';
if (apiKey) {
url += '&key=' + apiKey;
}
if (v) {
url += '&v=' + v;
}
url += '&sensor=' + (sensor ? 'true':'false');
if (clientId) {
url += '&clientId='+clientId;
}
if (libraries.length) {
url += '&libraries='+libraries.join(',');
}
return url;
};
this.$get = ['scriptLoader', '$q', function(scriptLoader,$q) {
var loader = $q.defer(), load = function() { return loader.promise; }, maps = {}, service = {};
// google maps does its own magic, so we need to listen for their callback not just wait for the script to be loaded
window.mlMapsLoaded = function() {
service.loaded = true;
loader.resolve();
}
//inject the googlemaps api if necessary
if (window.google && window.google.maps) {
load.resolve();
} else {
console.log('injecting the googlemaps api');
scriptLoader.loadScript(createScriptUrl());
}
// primarily used internally but exposed just in case someone wants to use it
service.resolveMapOrMapName = function(mapOrMapName) {
if (typeof mapOrMapName === 'string') {
var mapObj = maps[mapOrMapName];
if (mapObj) {
return mapObj.map;
}
} else {
return mapOrMapName; // it's a map (well we assume it is if it's not a string)
}
}
// callers need to make sure this is called only after a map has been loaded
service.addMarker = function(position,mapOrMapName) {
var map = service.resolveMapOrMapName(mapOrMapName);
var thisLatLng = new google.maps.LatLng(position[0],position[1]);
return new google.maps.Marker({
position: thisLatLng,
map: map
})
};
// create a new map
service.createMap = function(name,ele,options) {
name = name || 'default';
console.log('creating map '+name);
var mapOptions = {};
angular.extend(mapOptions,defaultOptions,options||{});
//console.log(mapOptions);
// we need to make sure the script has been loaded
return load().then(function() {
console.log('maps initialized');
// now that it's loaded we can convert things to google maps API objects
mapOptions.center = new google.maps.LatLng(mapOptions.center[0],mapOptions.center[1]);
if(mapOptions.controlType) {
mapOptions.mapTypeControl = true;
mapOptions.mapTypeControlOptions = {
style: google.maps.MapTypeControlStyle[mapOptions.controlType],
position: google.maps.ControlPosition.BOTTOM_CENTER
}
}
if (mapOptions.panControl) {
} else {
mapOptions.panControl = false;
}
if (mapOptions.zoomControl) {
mapOptions.zoomControlOptions = {
style: google.maps.ZoomControlStyle[mapOptions.zoomControl],
position: google.maps.ControlPosition.RIGHT_BOTTOM
}
mapOptions.zoomControl = true;
}
// create an object that will house other things like layers, markers, etc
maps[name] = {
name: name,
map: new google.maps.Map(ele,mapOptions)
};
console.log(maps[name].map)
maps[name].map._objMapName = name; // store our name on the map itself so we don't lose the connection
return maps[name];
});
};
service.showTraffic = function(mapName) { // potentially: support mapOrMapName here, for now just mapName
var mapObj = maps[mapName];
if (mapObj) {
if (!mapObj.trafficLayer) {
mapObj.trafficLayer = new google.maps.TrafficLayer();
}
mapObj.trafficLayer.setMap(mapObj.map);
}
}
service.hideTraffic = function(mapName) { // potentially: support mapOrMapName here, for now just mapName
var mapObj = maps[mapName];
if (mapObj && mapObj.trafficLayer) {
mapObj.trafficLayer.setMap();
}
}
service.showTransit = function(mapName) { // potentially: support mapOrMapName here, for now just mapName
var mapObj = maps[mapName];
if (mapObj) {
if (!mapObj.transitLayer) {
mapObj.transitLayer = new google.maps.TransitLayer();
}
mapObj.transitLayer.setMap(mapObj.map);
}
}
service.hideTransit = function(mapName) { // potentially: support mapOrMapName here, for now just mapName
var mapObj = maps[mapName];
if (mapObj && mapObj.trafficLayer) {
mapObj.transitLayer.setMap();
}
}
service.addHeatmap = function(mapName,heatMapData) {
console.log('adding heat map');
var mapObj = maps[mapName];
if (mapObj) {
if (!mapObj.heatmaps) {
mapObj.heatmaps = [];
}
var heatmap = new google.maps.visualization.HeatmapLayer({
data: heatMapData
});
heatmap.setMap(mapObj.map);
mapObj.heatmaps.push(heatmap);
}
};
service.hideHeatmaps = function(mapName) {
var mapObj = maps[mapName];
if (mapObj && mapObj.heatmaps) {
var hm = mapObj.heatmaps.pop();
while(hm) {
hm.setMap();
hm = mapObj.heatmaps.pop();
}
}
}
service.showHeatmaps = function(mapName) {
var mapObj = maps[mapName];
if (mapObj && mapObj.heatmaps) {
var hm = mapObj.heatmaps.pop();
while(hm) {
hm.setMap(mapObj.map);
hm = mapObj.heatmaps.pop();
}
}
}
service.loadKML = function(mapName,url) { // KML or GeoRSS URL - supports multiple
var mapObj = maps[mapName];
if (mapObj) {
if (!mapObj.kmlayers) {
mapObj.kmlayers = [];
}
console.log('adding kml to '+mapName+': '+url);
var kmlayer = new google.maps.KmlLayer({url: url});
kmlayer.setMap(mapObj.map);
mapObj.kmlayers.push(kmlayer);
return kmlayer;
}
}
service.loadGeoJson = function(mapName,url) { // GeoJSON - only allows a single data layer
var mapObj = maps[mapName];
if (mapObj && mapObj.map) {
mapObj.map.data.loadGeoJson(url);
}
}
return service;
}];
})
// insert a map into any DOM element <div ml-map="mapName"></div>
.directive('mlMap', ['mlMaps', function(mlMaps) {
return {
restrict: 'A',
scope: {
mlMap: '@', // directive also contains the map name
controls: '@', // [optional] valid: 'off', 'false', 'default', any other values equate to true
zoom: '@', // [optional] set a numeric zoom level
center: '=', // [optional] the lat,lng value center of the map, expected: an array
markerAt: '=' // [optional] the lat,lng coords of a default marker to add, expected: an array
},
link: function(scope,element,attributes) {
var opts = { };
if (scope.center || scope.markerAt) {
opts.center = scope.center || scope.markerAt;
}
if (scope.zoom && !isNaN(scope.zoom)) {
opts.zoom = parseInt(scope.zoom);
}
// construct the options object
if (scope.controls === 'false' || scope.controls === 'off') {
opts.controlType = false;
opts.mapTypeControl = false;
opts.streetViewControl = false;
opts.zoomControl = false;
} else if (scope.controls === 'default') {
opts.controlType =false;
}
var mapName = scope.mlMap || 'default'
// use the name or default if empty attribute
var map = mlMaps.createMap(mapName,element[0], opts);
if (scope.markerAt && scope.markerAt.length === 2) {
// add marker after the google script has loaded
map.then(function(map) {
mlMaps.addMarker(scope.markerAt,map.map);
});
}
}
}
}]);
}; |
isabella232/aistreams | third_party/gst-plugins-base/gst/playback/gstparsebin.c | <reponame>isabella232/aistreams<filename>third_party/gst-plugins-base/gst/playback/gstparsebin.c<gh_stars>1-10
/* GStreamer
* Copyright (C) <2006> <NAME> <<EMAIL>>
* Copyright (C) <2009> <NAME> <<EMAIL>>
* Copyright (C) <2011> Hewlett-Packard Development Company, L.P.
* Author: <NAME> <<EMAIL>>, Collabora Ltd.
* Copyright (C) <2013> Collabora Ltd.
* Author: <NAME> <<EMAIL>>
* Copyright (C) <2015-2016> Centricular Ltd
* @author: <NAME> <<EMAIL>>
* @author: <NAME> <<EMAIL>>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-parsebin
* @title: parsebin
*
* #GstBin that auto-magically constructs a parsing pipeline
* using available parsers and demuxers via auto-plugging.
*
* parsebin unpacks the contents of the input stream to the
* level of parsed elementary streams, but unlike decodebin
* it doesn't connect decoder elements. The output pads
* produce packetised encoded data with timestamps where possible,
* or send missing-element messages where not.
*
* <emphasis>parsebin is still experimental API and a technology preview.
* Its behaviour and exposed API is subject to change.</emphasis>
*/
/* Implementation notes:
*
* The following section describes how ParseBin works internally.
*
* The first part of ParseBin is its typefind element, which tries
* to determine the media type of the input stream. If the type is found
* autoplugging starts.
*
* ParseBin internally organizes the elements it autoplugged into
* GstParseChains and GstParseGroups. A parse chain is a single chain
* of parsing, this
* means that if ParseBin ever autoplugs an element with two+ srcpads
* (e.g. a demuxer) this will end the chain and everything following this
* demuxer will be put into parse groups below the chain. Otherwise,
* if an element has a single srcpad that outputs raw data the parse chain
* is ended too and a GstParsePad is stored and blocked.
*
* A parse group combines a number of chains that are created by a
* demuxer element.
*
* This continues until the top-level parse chain is complete. A parse
* chain is complete if it either ends with a blocked elementary stream,
* if autoplugging stopped because no suitable plugins could be found
* or if the active group is complete. A parse group on the other hand
* is complete if all child chains are complete.
*
* If this happens at some point, all end pads of all active groups are exposed.
* For this ParseBin adds the end pads, and then unblocks them. Now playback starts.
*
* If one of the chains that end on a endpad receives EOS ParseBin checks
* if all chains and groups are drained. In that case everything goes into EOS.
* If there is a chain where the active group is drained but there exist next
* groups, the active group is hidden (endpads are removed) and the next group
* is exposed. This means that in some cases more pads may be created even
* after the initial no-more-pads signal. This happens for example with
* so-called "chained oggs", most commonly found among ogg/vorbis internet
* radio streams.
*
* Note 1: If we're talking about blocked endpads this really means that the
* *target* pads of the endpads are blocked. Pads that are exposed to the outside
* should never ever be blocked!
*
* Note 2: If a group is complete and the parent's chain demuxer adds new pads
* but never signaled no-more-pads this additional pads will be ignored!
*
*/
/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
* with newer GLib versions (>= 2.31.0) */
#define GLIB_DISABLE_DEPRECATION_WARNINGS
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst-i18n-plugin.h>
#include <string.h>
#include <gst/gst.h>
#include <gst/pbutils/pbutils.h>
#include "gstplay-enum.h"
#include "gstplayback.h"
#include "gstplaybackutils.h"
#include "gstrawcaps.h"
/* generic templates */
static GstStaticPadTemplate parse_bin_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate parse_bin_src_template =
GST_STATIC_PAD_TEMPLATE ("src_%u",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
GST_DEBUG_CATEGORY_STATIC (gst_parse_bin_debug);
#define GST_CAT_DEFAULT gst_parse_bin_debug
typedef struct _GstPendingPad GstPendingPad;
typedef struct _GstParseElement GstParseElement;
typedef struct _GstParseChain GstParseChain;
typedef struct _GstParseGroup GstParseGroup;
typedef struct _GstParsePad GstParsePad;
typedef GstGhostPadClass GstParsePadClass;
typedef struct _GstParseBin GstParseBin;
typedef struct _GstParseBinClass GstParseBinClass;
#define GST_TYPE_PARSE_BIN (gst_parse_bin_get_type())
#define GST_PARSE_BIN_CAST(obj) ((GstParseBin*)(obj))
#define GST_PARSE_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PARSE_BIN,GstParseBin))
#define GST_PARSE_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_PARSE_BIN,GstParseBinClass))
#define GST_IS_parse_bin(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_PARSE_BIN))
#define GST_IS_parse_bin_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_PARSE_BIN))
/**
* GstParseBin:
*
* The opaque #GstParseBin data structure
*/
struct _GstParseBin
{
GstBin bin; /* we extend GstBin */
/* properties */
gchar *encoding; /* encoding of subtitles */
guint64 connection_speed;
GstElement *typefind; /* this holds the typefind object */
GMutex expose_lock; /* Protects exposal and removal of groups */
GstParseChain *parse_chain; /* Top level parse chain */
guint nbpads; /* unique identifier for source pads */
GMutex factories_lock;
guint32 factories_cookie; /* Cookie from last time when factories was updated */
GList *factories; /* factories we can use for selecting elements */
GMutex subtitle_lock; /* Protects changes to subtitles and encoding */
GList *subtitles; /* List of elements with subtitle-encoding,
* protected by above mutex! */
gboolean have_type; /* if we received the have_type signal */
guint have_type_id; /* signal id for have-type from typefind */
GMutex dyn_lock; /* lock protecting pad blocking */
gboolean shutdown; /* if we are shutting down */
GList *blocked_pads; /* pads that have set to block */
gboolean expose_allstreams; /* Whether to expose unknow type streams or not */
GList *filtered; /* elements for which error messages are filtered */
GList *filtered_errors; /* filtered error messages */
GMutex cleanup_lock; /* Mutex used to protect the cleanup thread */
GThread *cleanup_thread; /* thread used to free chains asynchronously.
* We store it to make sure we end up joining it
* before stopping the element.
* Protected by the object lock */
};
struct _GstParseBinClass
{
GstBinClass parent_class;
/* signal fired when we found a pad that we cannot parse */
void (*unknown_type) (GstElement * element, GstPad * pad, GstCaps * caps);
/* signal fired to know if we continue trying to parse the given caps */
gboolean (*autoplug_continue) (GstElement * element, GstPad * pad,
GstCaps * caps);
/* signal fired to get a list of factories to try to autoplug */
GValueArray *(*autoplug_factories) (GstElement * element, GstPad * pad,
GstCaps * caps);
/* signal fired to sort the factories */
GValueArray *(*autoplug_sort) (GstElement * element, GstPad * pad,
GstCaps * caps, GValueArray * factories);
/* signal fired to select from the proposed list of factories */
GstAutoplugSelectResult (*autoplug_select) (GstElement * element,
GstPad * pad, GstCaps * caps, GstElementFactory * factory);
/* signal fired when a autoplugged element that is not linked downstream
* or exposed wants to query something */
gboolean (*autoplug_query) (GstElement * element, GstPad * pad,
GstQuery * query);
/* fired when the last group is drained */
void (*drained) (GstElement * element);
};
/* signals */
enum
{
SIGNAL_UNKNOWN_TYPE,
SIGNAL_AUTOPLUG_CONTINUE,
SIGNAL_AUTOPLUG_FACTORIES,
SIGNAL_AUTOPLUG_SELECT,
SIGNAL_AUTOPLUG_SORT,
SIGNAL_AUTOPLUG_QUERY,
SIGNAL_DRAINED,
LAST_SIGNAL
};
#define DEFAULT_SUBTITLE_ENCODING NULL
/* by default we use the automatic values above */
#define DEFAULT_EXPOSE_ALL_STREAMS TRUE
#define DEFAULT_CONNECTION_SPEED 0
/* Properties */
enum
{
PROP_0,
PROP_SUBTITLE_ENCODING,
PROP_SINK_CAPS,
PROP_EXPOSE_ALL_STREAMS,
PROP_CONNECTION_SPEED
};
static GstBinClass *parent_class;
static guint gst_parse_bin_signals[LAST_SIGNAL] = { 0 };
static void type_found (GstElement * typefind, guint probability,
GstCaps * caps, GstParseBin * parse_bin);
static gboolean gst_parse_bin_autoplug_continue (GstElement * element,
GstPad * pad, GstCaps * caps);
static GValueArray *gst_parse_bin_autoplug_factories (GstElement *
element, GstPad * pad, GstCaps * caps);
static GValueArray *gst_parse_bin_autoplug_sort (GstElement * element,
GstPad * pad, GstCaps * caps, GValueArray * factories);
static GstAutoplugSelectResult gst_parse_bin_autoplug_select (GstElement *
element, GstPad * pad, GstCaps * caps, GstElementFactory * factory);
static gboolean gst_parse_bin_autoplug_query (GstElement * element,
GstPad * pad, GstQuery * query);
static void gst_parse_bin_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_parse_bin_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void caps_notify_cb (GstPad * pad, GParamSpec * unused,
GstParseChain * chain);
static GstStateChangeReturn gst_parse_bin_change_state (GstElement * element,
GstStateChange transition);
static void gst_parse_bin_handle_message (GstBin * bin, GstMessage * message);
static void gst_parse_pad_update_caps (GstParsePad * parsepad, GstCaps * caps);
static void gst_parse_pad_update_tags (GstParsePad * parsepad,
GstTagList * tags);
static GstEvent *gst_parse_pad_stream_start_event (GstParsePad * parsepad,
GstEvent * event);
static void gst_parse_pad_update_stream_collection (GstParsePad * parsepad,
GstStreamCollection * collection);
static GstCaps *get_pad_caps (GstPad * pad);
static GstStreamType guess_stream_type_from_caps (GstCaps * caps);
#define EXPOSE_LOCK(parsebin) G_STMT_START { \
GST_LOG_OBJECT (parsebin, \
"expose locking from thread %p", \
g_thread_self ()); \
g_mutex_lock (&GST_PARSE_BIN_CAST(parsebin)->expose_lock); \
GST_LOG_OBJECT (parsebin, \
"expose locked from thread %p", \
g_thread_self ()); \
} G_STMT_END
#define EXPOSE_UNLOCK(parsebin) G_STMT_START { \
GST_LOG_OBJECT (parsebin, \
"expose unlocking from thread %p", \
g_thread_self ()); \
g_mutex_unlock (&GST_PARSE_BIN_CAST(parsebin)->expose_lock); \
} G_STMT_END
#define DYN_LOCK(parsebin) G_STMT_START { \
GST_LOG_OBJECT (parsebin, \
"dynlocking from thread %p", \
g_thread_self ()); \
g_mutex_lock (&GST_PARSE_BIN_CAST(parsebin)->dyn_lock); \
GST_LOG_OBJECT (parsebin, \
"dynlocked from thread %p", \
g_thread_self ()); \
} G_STMT_END
#define DYN_UNLOCK(parsebin) G_STMT_START { \
GST_LOG_OBJECT (parsebin, \
"dynunlocking from thread %p", \
g_thread_self ()); \
g_mutex_unlock (&GST_PARSE_BIN_CAST(parsebin)->dyn_lock); \
} G_STMT_END
#define SUBTITLE_LOCK(parsebin) G_STMT_START { \
GST_LOG_OBJECT (parsebin, \
"subtitle locking from thread %p", \
g_thread_self ()); \
g_mutex_lock (&GST_PARSE_BIN_CAST(parsebin)->subtitle_lock); \
GST_LOG_OBJECT (parsebin, \
"subtitle lock from thread %p", \
g_thread_self ()); \
} G_STMT_END
#define SUBTITLE_UNLOCK(parsebin) G_STMT_START { \
GST_LOG_OBJECT (parsebin, \
"subtitle unlocking from thread %p", \
g_thread_self ()); \
g_mutex_unlock (&GST_PARSE_BIN_CAST(parsebin)->subtitle_lock); \
} G_STMT_END
struct _GstPendingPad
{
GstPad *pad;
GstParseChain *chain;
gulong event_probe_id;
gulong notify_caps_id;
};
struct _GstParseElement
{
GstElement *element;
GstElement *capsfilter; /* Optional capsfilter for Parser/Convert */
gulong pad_added_id;
gulong pad_removed_id;
gulong no_more_pads_id;
};
/* GstParseGroup
*
* Streams belonging to the same group/chain of a media file
*
* When changing something here lock the parent chain!
*/
struct _GstParseGroup
{
GstParseBin *parsebin;
GstParseChain *parent;
gboolean no_more_pads; /* TRUE if the demuxer signaled no-more-pads */
gboolean drained; /* TRUE if the all children are drained */
GList *children; /* List of GstParseChains in this group */
};
struct _GstParseChain
{
GstParseGroup *parent;
GstParseBin *parsebin;
GMutex lock; /* Protects this chain and its groups */
GstPad *pad; /* srcpad that caused creation of this chain */
GstCaps *start_caps; /* The initial caps of this chain */
gboolean drained; /* TRUE if the all children are drained */
gboolean demuxer; /* TRUE if elements->data is a demuxer */
gboolean parsed; /* TRUE if any elements are a parser */
GList *elements; /* All elements in this group, first
is the latest and most downstream element */
/* Note: there are only groups if the last element of this chain
* is a demuxer, otherwise the chain will end with an endpad.
* The other way around this means, that endpad only exists if this
* chain doesn't end with a demuxer! */
GstParseGroup *active_group; /* Currently active group */
GList *next_groups; /* head is newest group, tail is next group.
a new group will be created only if the head
group had no-more-pads. If it's only exposed
all new pads will be ignored! */
GList *pending_pads; /* Pads that have no fixed caps yet */
GstParsePad *current_pad; /* Current ending pad of the chain that can't
* be exposed yet but would be the same as endpad
* once it can be exposed */
GstParsePad *endpad; /* Pad of this chain that could be exposed */
gboolean deadend; /* This chain is incomplete and can't be completed,
e.g. no suitable parser could be found
e.g. stream got EOS without buffers
*/
gchar *deadend_details;
GstCaps *endcaps; /* Caps that were used when linking to the endpad
or that resulted in the deadend
*/
/* FIXME: This should be done directly via a thread! */
GList *old_groups; /* Groups that should be freed later */
};
static void gst_parse_chain_free (GstParseChain * chain);
static GstParseChain *gst_parse_chain_new (GstParseBin * parsebin,
GstParseGroup * group, GstPad * pad, GstCaps * start_caps);
static void gst_parse_group_hide (GstParseGroup * group);
static void gst_parse_group_free (GstParseGroup * group);
static GstParseGroup *gst_parse_group_new (GstParseBin * parsebin,
GstParseChain * chain);
static gboolean gst_parse_chain_is_complete (GstParseChain * chain);
static gboolean gst_parse_chain_expose (GstParseChain * chain,
GList ** endpads, gboolean * missing_plugin,
GString * missing_plugin_details, gboolean * last_group,
gboolean * uncollected_streams);
static void build_fallback_collection (GstParseChain * chain,
GstStreamCollection * collection);
static gboolean gst_parse_chain_is_drained (GstParseChain * chain);
static gboolean gst_parse_group_is_complete (GstParseGroup * group);
static gboolean gst_parse_group_is_drained (GstParseGroup * group);
static gboolean gst_parse_bin_expose (GstParseBin * parsebin);
#define CHAIN_MUTEX_LOCK(chain) G_STMT_START { \
GST_LOG_OBJECT (chain->parsebin, \
"locking chain %p from thread %p", \
chain, g_thread_self ()); \
g_mutex_lock (&chain->lock); \
GST_LOG_OBJECT (chain->parsebin, \
"locked chain %p from thread %p", \
chain, g_thread_self ()); \
} G_STMT_END
#define CHAIN_MUTEX_UNLOCK(chain) G_STMT_START { \
GST_LOG_OBJECT (chain->parsebin, \
"unlocking chain %p from thread %p", \
chain, g_thread_self ()); \
g_mutex_unlock (&chain->lock); \
} G_STMT_END
/* GstParsePad
*
* GstPad private used for source pads of chains
*/
struct _GstParsePad
{
GstGhostPad parent;
GstParseBin *parsebin;
GstParseChain *chain;
gboolean blocked; /* the *target* pad is blocked */
gboolean exposed; /* the pad is exposed */
gboolean drained; /* an EOS has been seen on the pad */
gulong block_id;
gboolean in_a_fallback_collection;
GstStreamCollection *active_collection;
GstStream *active_stream;
};
GType gst_parse_pad_get_type (void);
G_DEFINE_TYPE (GstParsePad, gst_parse_pad, GST_TYPE_GHOST_PAD);
#define GST_TYPE_PARSE_PAD (gst_parse_pad_get_type ())
#define GST_PARSE_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PARSE_PAD,GstParsePad))
static GstParsePad *gst_parse_pad_new (GstParseBin * parsebin,
GstParseChain * chain);
static void gst_parse_pad_activate (GstParsePad * parsepad,
GstParseChain * chain);
static void gst_parse_pad_unblock (GstParsePad * parsepad);
static void gst_parse_pad_set_blocked (GstParsePad * parsepad,
gboolean blocked);
static gboolean gst_parse_pad_query (GstPad * pad, GstObject * parent,
GstQuery * query);
static GstPadProbeReturn
gst_parse_pad_event (GstPad * pad, GstPadProbeInfo * info, gpointer user_data);
static void gst_pending_pad_free (GstPendingPad * ppad);
static GstPadProbeReturn pad_event_cb (GstPad * pad, GstPadProbeInfo * info,
gpointer data);
/********************************
* Standard GObject boilerplate *
********************************/
static void gst_parse_bin_class_init (GstParseBinClass * klass);
static void gst_parse_bin_init (GstParseBin * parse_bin);
static void gst_parse_bin_dispose (GObject * object);
static void gst_parse_bin_finalize (GObject * object);
static GType
gst_parse_bin_get_type (void)
{
static GType gst_parse_bin_type = 0;
if (!gst_parse_bin_type) {
static const GTypeInfo gst_parse_bin_info = {
sizeof (GstParseBinClass),
NULL,
NULL,
(GClassInitFunc) gst_parse_bin_class_init,
NULL,
NULL,
sizeof (GstParseBin),
0,
(GInstanceInitFunc) gst_parse_bin_init,
NULL
};
gst_parse_bin_type =
g_type_register_static (GST_TYPE_BIN, "GstParseBin",
&gst_parse_bin_info, 0);
}
return gst_parse_bin_type;
}
static gboolean
_gst_boolean_accumulator (GSignalInvocationHint * ihint,
GValue * return_accu, const GValue * handler_return, gpointer dummy)
{
gboolean myboolean;
myboolean = g_value_get_boolean (handler_return);
if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP))
g_value_set_boolean (return_accu, myboolean);
/* stop emission if FALSE */
return myboolean;
}
static gboolean
_gst_boolean_or_accumulator (GSignalInvocationHint * ihint,
GValue * return_accu, const GValue * handler_return, gpointer dummy)
{
gboolean myboolean;
gboolean retboolean;
myboolean = g_value_get_boolean (handler_return);
retboolean = g_value_get_boolean (return_accu);
if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP))
g_value_set_boolean (return_accu, myboolean || retboolean);
return TRUE;
}
/* we collect the first result */
static gboolean
_gst_array_accumulator (GSignalInvocationHint * ihint,
GValue * return_accu, const GValue * handler_return, gpointer dummy)
{
gpointer array;
array = g_value_get_boxed (handler_return);
if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP))
g_value_set_boxed (return_accu, array);
return FALSE;
}
static gboolean
_gst_select_accumulator (GSignalInvocationHint * ihint,
GValue * return_accu, const GValue * handler_return, gpointer dummy)
{
GstAutoplugSelectResult res;
res = g_value_get_enum (handler_return);
if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP))
g_value_set_enum (return_accu, res);
/* Call the next handler in the chain (if any) when the current callback
* returns TRY. This makes it possible to register separate autoplug-select
* handlers that implement different TRY/EXPOSE/SKIP strategies.
*/
if (res == GST_AUTOPLUG_SELECT_TRY)
return TRUE;
return FALSE;
}
static gboolean
_gst_array_hasvalue_accumulator (GSignalInvocationHint * ihint,
GValue * return_accu, const GValue * handler_return, gpointer dummy)
{
gpointer array;
array = g_value_get_boxed (handler_return);
if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP))
g_value_set_boxed (return_accu, array);
if (array != NULL)
return FALSE;
return TRUE;
}
static void
gst_parse_bin_class_init (GstParseBinClass * klass)
{
GObjectClass *gobject_klass;
GstElementClass *gstelement_klass;
GstBinClass *gstbin_klass;
gobject_klass = (GObjectClass *) klass;
gstelement_klass = (GstElementClass *) klass;
gstbin_klass = (GstBinClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gobject_klass->dispose = gst_parse_bin_dispose;
gobject_klass->finalize = gst_parse_bin_finalize;
gobject_klass->set_property = gst_parse_bin_set_property;
gobject_klass->get_property = gst_parse_bin_get_property;
/**
* GstParseBin::unknown-type:
* @bin: The ParseBin.
* @pad: The new pad containing caps that cannot be resolved to a 'final'
* stream type.
* @caps: The #GstCaps of the pad that cannot be resolved.
*
* This signal is emitted when a pad for which there is no further possible
* parsing is added to the ParseBin.
*/
gst_parse_bin_signals[SIGNAL_UNKNOWN_TYPE] =
g_signal_new ("unknown-type", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass, unknown_type),
NULL, NULL, g_cclosure_marshal_generic, G_TYPE_NONE, 2,
GST_TYPE_PAD, GST_TYPE_CAPS);
/**
* GstParseBin::autoplug-continue:
* @bin: The ParseBin.
* @pad: The #GstPad.
* @caps: The #GstCaps found.
*
* This signal is emitted whenever ParseBin finds a new stream. It is
* emitted before looking for any elements that can handle that stream.
*
* > Invocation of signal handlers stops after the first signal handler
* > returns %FALSE. Signal handlers are invoked in the order they were
* > connected in.
*
* Returns: %TRUE if you wish ParseBin to look for elements that can
* handle the given @caps. If %FALSE, those caps will be considered as
* final and the pad will be exposed as such (see 'pad-added' signal of
* #GstElement).
*/
gst_parse_bin_signals[SIGNAL_AUTOPLUG_CONTINUE] =
g_signal_new ("autoplug-continue", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass, autoplug_continue),
_gst_boolean_accumulator, NULL, g_cclosure_marshal_generic,
G_TYPE_BOOLEAN, 2, GST_TYPE_PAD, GST_TYPE_CAPS);
/**
* GstParseBin::autoplug-factories:
* @bin: The ParseBin.
* @pad: The #GstPad.
* @caps: The #GstCaps found.
*
* This function is emitted when an array of possible factories for @caps on
* @pad is needed. ParseBin will by default return an array with all
* compatible factories, sorted by rank.
*
* If this function returns NULL, @pad will be exposed as a final caps.
*
* If this function returns an empty array, the pad will be considered as
* having an unhandled type media type.
*
* > Only the signal handler that is connected first will ever by invoked.
* > Don't connect signal handlers with the #G_CONNECT_AFTER flag to this
* > signal, they will never be invoked!
*
* Returns: a #GValueArray* with a list of factories to try. The factories are
* by default tried in the returned order or based on the index returned by
* "autoplug-select".
*/
gst_parse_bin_signals[SIGNAL_AUTOPLUG_FACTORIES] =
g_signal_new ("autoplug-factories", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass,
autoplug_factories), _gst_array_accumulator, NULL,
g_cclosure_marshal_generic, G_TYPE_VALUE_ARRAY, 2,
GST_TYPE_PAD, GST_TYPE_CAPS);
/**
* GstParseBin::autoplug-sort:
* @bin: The ParseBin.
* @pad: The #GstPad.
* @caps: The #GstCaps.
* @factories: A #GValueArray of possible #GstElementFactory to use.
*
* Once ParseBin has found the possible #GstElementFactory objects to try
* for @caps on @pad, this signal is emitted. The purpose of the signal is for
* the application to perform additional sorting or filtering on the element
* factory array.
*
* The callee should copy and modify @factories or return %NULL if the
* order should not change.
*
* > Invocation of signal handlers stops after one signal handler has
* > returned something else than %NULL. Signal handlers are invoked in
* > the order they were connected in.
* > Don't connect signal handlers with the #G_CONNECT_AFTER flag to this
* > signal, they will never be invoked!
*
* Returns: A new sorted array of #GstElementFactory objects.
*/
gst_parse_bin_signals[SIGNAL_AUTOPLUG_SORT] =
g_signal_new ("autoplug-sort", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass, autoplug_sort),
_gst_array_hasvalue_accumulator, NULL,
g_cclosure_marshal_generic, G_TYPE_VALUE_ARRAY, 3, GST_TYPE_PAD,
GST_TYPE_CAPS, G_TYPE_VALUE_ARRAY | G_SIGNAL_TYPE_STATIC_SCOPE);
/**
* GstParseBin::autoplug-select:
* @bin: The ParseBin.
* @pad: The #GstPad.
* @caps: The #GstCaps.
* @factory: A #GstElementFactory to use.
*
* This signal is emitted once ParseBin has found all the possible
* #GstElementFactory that can be used to handle the given @caps. For each of
* those factories, this signal is emitted.
*
* The signal handler should return a #GST_TYPE_AUTOPLUG_SELECT_RESULT enum
* value indicating what ParseBin should do next.
*
* A value of #GST_AUTOPLUG_SELECT_TRY will try to autoplug an element from
* @factory.
*
* A value of #GST_AUTOPLUG_SELECT_EXPOSE will expose @pad without plugging
* any element to it.
*
* A value of #GST_AUTOPLUG_SELECT_SKIP will skip @factory and move to the
* next factory.
*
* > The signal handler will not be invoked if any of the previously
* > registered signal handlers (if any) return a value other than
* > GST_AUTOPLUG_SELECT_TRY. Which also means that if you return
* > GST_AUTOPLUG_SELECT_TRY from one signal handler, handlers that get
* > registered next (again, if any) can override that decision.
*
* Returns: a #GST_TYPE_AUTOPLUG_SELECT_RESULT that indicates the required
* operation. the default handler will always return
* #GST_AUTOPLUG_SELECT_TRY.
*/
gst_parse_bin_signals[SIGNAL_AUTOPLUG_SELECT] =
g_signal_new ("autoplug-select", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass, autoplug_select),
_gst_select_accumulator, NULL,
g_cclosure_marshal_generic,
GST_TYPE_AUTOPLUG_SELECT_RESULT, 3, GST_TYPE_PAD, GST_TYPE_CAPS,
GST_TYPE_ELEMENT_FACTORY);
/**
* GstParseBin::autoplug-query:
* @bin: The ParseBin.
* @child: The child element doing the query
* @pad: The #GstPad.
* @element: The #GstElement.
* @query: The #GstQuery.
*
* This signal is emitted whenever an autoplugged element that is
* not linked downstream yet and not exposed does a query. It can
* be used to tell the element about the downstream supported caps
* for example.
*
* Returns: %TRUE if the query was handled, %FALSE otherwise.
*/
gst_parse_bin_signals[SIGNAL_AUTOPLUG_QUERY] =
g_signal_new ("autoplug-query", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass, autoplug_query),
_gst_boolean_or_accumulator, NULL, g_cclosure_marshal_generic,
G_TYPE_BOOLEAN, 3, GST_TYPE_PAD, GST_TYPE_ELEMENT,
GST_TYPE_QUERY | G_SIGNAL_TYPE_STATIC_SCOPE);
/**
* GstParseBin::drained
* @bin: The ParseBin
*
* This signal is emitted once ParseBin has finished parsing all the data.
*/
gst_parse_bin_signals[SIGNAL_DRAINED] =
g_signal_new ("drained", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstParseBinClass, drained),
NULL, NULL, g_cclosure_marshal_generic, G_TYPE_NONE, 0, G_TYPE_NONE);
g_object_class_install_property (gobject_klass, PROP_SUBTITLE_ENCODING,
g_param_spec_string ("subtitle-encoding", "subtitle encoding",
"Encoding to assume if input subtitles are not in UTF-8 encoding. "
"If not set, the GST_SUBTITLE_ENCODING environment variable will "
"be checked for an encoding to use. If that is not set either, "
"ISO-8859-15 will be assumed.", NULL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_klass, PROP_SINK_CAPS,
g_param_spec_boxed ("sink-caps", "Sink Caps",
"The caps of the input data. (NULL = use typefind element)",
GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstParseBin::expose-all-streams
*
* Expose streams of unknown type.
*
* If set to %FALSE, then only the streams that can be parsed to the final
* caps (see 'caps' property) will have a pad exposed. Streams that do not
* match those caps but could have been parsed will not have parser plugged
* in internally and will not have a pad exposed.
*/
g_object_class_install_property (gobject_klass, PROP_EXPOSE_ALL_STREAMS,
g_param_spec_boolean ("expose-all-streams", "Expose All Streams",
"Expose all streams, including those of unknown type or that don't match the 'caps' property",
DEFAULT_EXPOSE_ALL_STREAMS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstParseBin2::connection-speed
*
* Network connection speed in kbps (0 = unknownw)
*/
g_object_class_install_property (gobject_klass, PROP_CONNECTION_SPEED,
g_param_spec_uint64 ("connection-speed", "Connection Speed",
"Network connection speed in kbps (0 = unknown)",
0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
klass->autoplug_continue =
GST_DEBUG_FUNCPTR (gst_parse_bin_autoplug_continue);
klass->autoplug_factories =
GST_DEBUG_FUNCPTR (gst_parse_bin_autoplug_factories);
klass->autoplug_sort = GST_DEBUG_FUNCPTR (gst_parse_bin_autoplug_sort);
klass->autoplug_select = GST_DEBUG_FUNCPTR (gst_parse_bin_autoplug_select);
klass->autoplug_query = GST_DEBUG_FUNCPTR (gst_parse_bin_autoplug_query);
gst_element_class_add_pad_template (gstelement_klass,
gst_static_pad_template_get (&parse_bin_sink_template));
gst_element_class_add_pad_template (gstelement_klass,
gst_static_pad_template_get (&parse_bin_src_template));
gst_element_class_set_static_metadata (gstelement_klass,
"Parse Bin", "Generic/Bin/Parser",
"Parse and de-multiplex to elementary stream",
"<NAME> <<EMAIL>>, "
"<NAME> <<EMAIL>>");
gstelement_klass->change_state =
GST_DEBUG_FUNCPTR (gst_parse_bin_change_state);
gstbin_klass->handle_message =
GST_DEBUG_FUNCPTR (gst_parse_bin_handle_message);
g_type_class_ref (GST_TYPE_PARSE_PAD);
}
static void
gst_parse_bin_update_factories_list (GstParseBin * parsebin)
{
guint cookie;
cookie = gst_registry_get_feature_list_cookie (gst_registry_get ());
if (!parsebin->factories || parsebin->factories_cookie != cookie) {
if (parsebin->factories)
gst_plugin_feature_list_free (parsebin->factories);
parsebin->factories =
gst_element_factory_list_get_elements
(GST_ELEMENT_FACTORY_TYPE_DECODABLE, GST_RANK_MARGINAL);
parsebin->factories =
g_list_sort (parsebin->factories,
gst_playback_utils_compare_factories_func);
parsebin->factories_cookie = cookie;
}
}
static void
gst_parse_bin_init (GstParseBin * parse_bin)
{
/* first filter out the interesting element factories */
g_mutex_init (&parse_bin->factories_lock);
/* we create the typefind element only once */
parse_bin->typefind = gst_element_factory_make ("typefind", "typefind");
if (!parse_bin->typefind) {
g_warning ("can't find typefind element, ParseBin will not work");
} else {
GstPad *pad;
GstPad *gpad;
GstPadTemplate *pad_tmpl;
/* add the typefind element */
if (!gst_bin_add (GST_BIN (parse_bin), parse_bin->typefind)) {
g_warning ("Could not add typefind element, ParseBin will not work");
gst_object_unref (parse_bin->typefind);
parse_bin->typefind = NULL;
}
/* get the sinkpad */
pad = gst_element_get_static_pad (parse_bin->typefind, "sink");
/* get the pad template */
pad_tmpl = gst_static_pad_template_get (&parse_bin_sink_template);
/* ghost the sink pad to ourself */
gpad = gst_ghost_pad_new_from_template ("sink", pad, pad_tmpl);
gst_pad_set_active (gpad, TRUE);
gst_element_add_pad (GST_ELEMENT (parse_bin), gpad);
gst_object_unref (pad_tmpl);
gst_object_unref (pad);
}
g_mutex_init (&parse_bin->expose_lock);
parse_bin->parse_chain = NULL;
g_mutex_init (&parse_bin->dyn_lock);
parse_bin->shutdown = FALSE;
parse_bin->blocked_pads = NULL;
g_mutex_init (&parse_bin->subtitle_lock);
parse_bin->encoding = g_strdup (DEFAULT_SUBTITLE_ENCODING);
parse_bin->expose_allstreams = DEFAULT_EXPOSE_ALL_STREAMS;
parse_bin->connection_speed = DEFAULT_CONNECTION_SPEED;
g_mutex_init (&parse_bin->cleanup_lock);
parse_bin->cleanup_thread = NULL;
GST_OBJECT_FLAG_SET (parse_bin, GST_BIN_FLAG_STREAMS_AWARE);
}
static void
gst_parse_bin_dispose (GObject * object)
{
GstParseBin *parse_bin;
parse_bin = GST_PARSE_BIN (object);
if (parse_bin->factories)
gst_plugin_feature_list_free (parse_bin->factories);
parse_bin->factories = NULL;
if (parse_bin->parse_chain)
gst_parse_chain_free (parse_bin->parse_chain);
parse_bin->parse_chain = NULL;
g_free (parse_bin->encoding);
parse_bin->encoding = NULL;
g_list_free (parse_bin->subtitles);
parse_bin->subtitles = NULL;
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
gst_parse_bin_finalize (GObject * object)
{
GstParseBin *parse_bin;
parse_bin = GST_PARSE_BIN (object);
g_mutex_clear (&parse_bin->expose_lock);
g_mutex_clear (&parse_bin->dyn_lock);
g_mutex_clear (&parse_bin->subtitle_lock);
g_mutex_clear (&parse_bin->factories_lock);
g_mutex_clear (&parse_bin->cleanup_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_parse_bin_set_sink_caps (GstParseBin * parsebin, GstCaps * caps)
{
GST_DEBUG_OBJECT (parsebin, "Setting new caps: %" GST_PTR_FORMAT, caps);
g_object_set (parsebin->typefind, "force-caps", caps, NULL);
}
static GstCaps *
gst_parse_bin_get_sink_caps (GstParseBin * parsebin)
{
GstCaps *caps;
GST_DEBUG_OBJECT (parsebin, "Getting currently set caps");
g_object_get (parsebin->typefind, "force-caps", &caps, NULL);
return caps;
}
static void
gst_parse_bin_set_subs_encoding (GstParseBin * parsebin, const gchar * encoding)
{
GList *walk;
GST_DEBUG_OBJECT (parsebin, "Setting new encoding: %s",
GST_STR_NULL (encoding));
SUBTITLE_LOCK (parsebin);
g_free (parsebin->encoding);
parsebin->encoding = g_strdup (encoding);
/* set the subtitle encoding on all added elements */
for (walk = parsebin->subtitles; walk; walk = g_list_next (walk)) {
g_object_set (G_OBJECT (walk->data), "subtitle-encoding",
parsebin->encoding, NULL);
}
SUBTITLE_UNLOCK (parsebin);
}
static gchar *
gst_parse_bin_get_subs_encoding (GstParseBin * parsebin)
{
gchar *encoding;
GST_DEBUG_OBJECT (parsebin, "Getting currently set encoding");
SUBTITLE_LOCK (parsebin);
encoding = g_strdup (parsebin->encoding);
SUBTITLE_UNLOCK (parsebin);
return encoding;
}
static void
gst_parse_bin_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstParseBin *parsebin;
parsebin = GST_PARSE_BIN (object);
switch (prop_id) {
case PROP_SUBTITLE_ENCODING:
gst_parse_bin_set_subs_encoding (parsebin, g_value_get_string (value));
break;
case PROP_SINK_CAPS:
gst_parse_bin_set_sink_caps (parsebin, g_value_get_boxed (value));
break;
case PROP_EXPOSE_ALL_STREAMS:
parsebin->expose_allstreams = g_value_get_boolean (value);
break;
case PROP_CONNECTION_SPEED:
GST_OBJECT_LOCK (parsebin);
parsebin->connection_speed = g_value_get_uint64 (value) * 1000;
GST_OBJECT_UNLOCK (parsebin);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_parse_bin_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstParseBin *parsebin;
parsebin = GST_PARSE_BIN (object);
switch (prop_id) {
case PROP_SUBTITLE_ENCODING:
g_value_take_string (value, gst_parse_bin_get_subs_encoding (parsebin));
break;
case PROP_SINK_CAPS:
g_value_take_boxed (value, gst_parse_bin_get_sink_caps (parsebin));
break;
case PROP_EXPOSE_ALL_STREAMS:
g_value_set_boolean (value, parsebin->expose_allstreams);
break;
case PROP_CONNECTION_SPEED:
GST_OBJECT_LOCK (parsebin);
g_value_set_uint64 (value, parsebin->connection_speed / 1000);
GST_OBJECT_UNLOCK (parsebin);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
/*****
* Default autoplug signal handlers
*****/
static gboolean
gst_parse_bin_autoplug_continue (GstElement * element, GstPad * pad,
GstCaps * caps)
{
static GstStaticCaps raw_caps = GST_STATIC_CAPS (DEFAULT_RAW_CAPS);
GST_DEBUG_OBJECT (element, "caps %" GST_PTR_FORMAT, caps);
/* If it matches our target caps, expose it */
if (gst_caps_can_intersect (caps, gst_static_caps_get (&raw_caps))) {
GST_DEBUG_OBJECT (element, "autoplug-continue returns FALSE");
return FALSE;
}
GST_DEBUG_OBJECT (element, "autoplug-continue returns TRUE");
return TRUE;
}
static GValueArray *
gst_parse_bin_autoplug_factories (GstElement * element, GstPad * pad,
GstCaps * caps)
{
GList *list, *tmp;
GValueArray *result;
GstParseBin *parsebin = GST_PARSE_BIN_CAST (element);
GST_DEBUG_OBJECT (element, "finding factories");
/* return all compatible factories for caps */
g_mutex_lock (&parsebin->factories_lock);
gst_parse_bin_update_factories_list (parsebin);
list =
gst_element_factory_list_filter (parsebin->factories, caps, GST_PAD_SINK,
gst_caps_is_fixed (caps));
g_mutex_unlock (&parsebin->factories_lock);
result = g_value_array_new (g_list_length (list));
for (tmp = list; tmp; tmp = tmp->next) {
GstElementFactory *factory = GST_ELEMENT_FACTORY_CAST (tmp->data);
GValue val = { 0, };
g_value_init (&val, G_TYPE_OBJECT);
g_value_set_object (&val, factory);
g_value_array_append (result, &val);
g_value_unset (&val);
}
gst_plugin_feature_list_free (list);
GST_DEBUG_OBJECT (element, "autoplug-factories returns %p", result);
return result;
}
static GValueArray *
gst_parse_bin_autoplug_sort (GstElement * element, GstPad * pad,
GstCaps * caps, GValueArray * factories)
{
return NULL;
}
static GstAutoplugSelectResult
gst_parse_bin_autoplug_select (GstElement * element, GstPad * pad,
GstCaps * caps, GstElementFactory * factory)
{
/* Try factory. */
return GST_AUTOPLUG_SELECT_TRY;
}
static gboolean
gst_parse_bin_autoplug_query (GstElement * element, GstPad * pad,
GstQuery * query)
{
/* No query handled here */
return FALSE;
}
/********
* Discovery methods
*****/
static gboolean is_demuxer_element (GstElement * srcelement);
static gboolean connect_pad (GstParseBin * parsebin, GstElement * src,
GstParsePad * parsepad, GstPad * pad, GstCaps * caps,
GValueArray * factories, GstParseChain * chain, gchar ** deadend_details);
static GList *connect_element (GstParseBin * parsebin, GstParseElement * pelem,
GstParseChain * chain);
static void expose_pad (GstParseBin * parsebin, GstElement * src,
GstParsePad * parsepad, GstPad * pad, GstCaps * caps,
GstParseChain * chain);
static void pad_added_cb (GstElement * element, GstPad * pad,
GstParseChain * chain);
static void pad_removed_cb (GstElement * element, GstPad * pad,
GstParseChain * chain);
static void no_more_pads_cb (GstElement * element, GstParseChain * chain);
static GstParseGroup *gst_parse_chain_get_current_group (GstParseChain * chain);
static gboolean
clear_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
{
GST_DEBUG_OBJECT (pad, "clearing sticky event %" GST_PTR_FORMAT, *event);
gst_event_unref (*event);
*event = NULL;
return TRUE;
}
static gboolean
copy_sticky_events (GstPad * pad, GstEvent ** eventptr, gpointer user_data)
{
GstParsePad *ppad = GST_PARSE_PAD (user_data);
GstEvent *event = gst_event_ref (*eventptr);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:{
GstCaps *caps = NULL;
gst_event_parse_caps (event, &caps);
gst_parse_pad_update_caps (ppad, caps);
break;
}
case GST_EVENT_STREAM_START:{
event = gst_parse_pad_stream_start_event (ppad, event);
break;
}
case GST_EVENT_STREAM_COLLECTION:{
GstStreamCollection *collection = NULL;
gst_event_parse_stream_collection (event, &collection);
gst_parse_pad_update_stream_collection (ppad, collection);
break;
}
default:
break;
}
GST_DEBUG_OBJECT (ppad, "store sticky event %" GST_PTR_FORMAT, event);
gst_pad_store_sticky_event (GST_PAD_CAST (ppad), event);
gst_event_unref (event);
return TRUE;
}
static void
parse_pad_set_target (GstParsePad * parsepad, GstPad * target)
{
GstPad *old_target = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (parsepad));
if (old_target)
gst_object_unref (old_target);
if (old_target == target)
return;
gst_pad_sticky_events_foreach (GST_PAD_CAST (parsepad),
clear_sticky_events, NULL);
gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (parsepad), target);
if (target == NULL) {
GST_LOG_OBJECT (parsepad->parsebin, "Setting pad %" GST_PTR_FORMAT
" target to NULL", parsepad);
} else {
GST_LOG_OBJECT (parsepad->parsebin, "Setting pad %" GST_PTR_FORMAT
" target to %" GST_PTR_FORMAT, parsepad, target);
gst_pad_sticky_events_foreach (target, copy_sticky_events, parsepad);
}
}
/* called when a new pad is discovered. It will perform some basic actions
* before trying to link something to it.
*
* - Check the caps, don't do anything when there are no caps or when they have
* no good type.
* - signal AUTOPLUG_CONTINUE to check if we need to continue autoplugging this
* pad.
* - if the caps are non-fixed, setup a handler to continue autoplugging when
* the caps become fixed (connect to notify::caps).
* - get list of factories to autoplug.
* - continue autoplugging to one of the factories.
*/
static void
analyze_new_pad (GstParseBin * parsebin, GstElement * src, GstPad * pad,
GstCaps * caps, GstParseChain * chain)
{
gboolean apcontinue = TRUE;
GValueArray *factories = NULL, *result = NULL;
GstParsePad *parsepad;
GstElementFactory *factory;
const gchar *classification;
gboolean is_parser_converter = FALSE;
gboolean res;
gchar *deadend_details = NULL;
GST_DEBUG_OBJECT (parsebin, "Pad %s:%s caps:%" GST_PTR_FORMAT,
GST_DEBUG_PAD_NAME (pad), caps);
if (chain->elements
&& src != ((GstParseElement *) chain->elements->data)->element
&& src != ((GstParseElement *) chain->elements->data)->capsfilter) {
GST_ERROR_OBJECT (parsebin,
"New pad from not the last element in this chain");
return;
}
if (chain->demuxer) {
GstParseGroup *group;
GstParseChain *oldchain = chain;
GstParseElement *demux = (chain->elements ? chain->elements->data : NULL);
if (chain->current_pad)
gst_object_unref (chain->current_pad);
chain->current_pad = NULL;
/* we are adding a new pad for a demuxer (see is_demuxer_element(),
* start a new chain for it */
CHAIN_MUTEX_LOCK (oldchain);
group = gst_parse_chain_get_current_group (chain);
if (group && !g_list_find (group->children, chain)) {
chain = gst_parse_chain_new (parsebin, group, pad, caps);
group->children = g_list_prepend (group->children, chain);
}
CHAIN_MUTEX_UNLOCK (oldchain);
if (!group) {
GST_WARNING_OBJECT (parsebin, "No current group");
return;
}
/* If this is not a dynamic pad demuxer, we're no-more-pads
* already before anything else happens
*/
if (demux == NULL || !demux->no_more_pads_id)
group->no_more_pads = TRUE;
}
/* From here on we own a reference to the caps as
* we might create new caps below and would need
* to unref them later */
if (caps)
gst_caps_ref (caps);
if ((caps == NULL) || gst_caps_is_empty (caps))
goto unknown_type;
if (gst_caps_is_any (caps))
goto any_caps;
if (!chain->current_pad)
chain->current_pad = gst_parse_pad_new (parsebin, chain);
parsepad = gst_object_ref (chain->current_pad);
gst_pad_set_active (GST_PAD_CAST (parsepad), TRUE);
parse_pad_set_target (parsepad, pad);
/* 1. Emit 'autoplug-continue' the result will tell us if this pads needs
* further autoplugging. Only do this for fixed caps, for unfixed caps
* we will later come here again from the notify::caps handler. The
* problem with unfixed caps is that, we can't reliably tell if the output
* is e.g. accepted by a sink because only parts of the possible final
* caps might be accepted by the sink. */
if (gst_caps_is_fixed (caps))
g_signal_emit (G_OBJECT (parsebin),
gst_parse_bin_signals[SIGNAL_AUTOPLUG_CONTINUE], 0, parsepad, caps,
&apcontinue);
else
apcontinue = TRUE;
/* 1.a if autoplug-continue is FALSE or caps is a raw format, goto pad_is_final */
if (!apcontinue)
goto expose_pad;
/* 1.b For Parser/Converter that can output different stream formats
* we insert a capsfilter with the sorted caps of all possible next
* elements and continue with the capsfilter srcpad */
factory = gst_element_get_factory (src);
classification =
gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_KLASS);
is_parser_converter = (strstr (classification, "Parser")
&& strstr (classification, "Converter"));
/* FIXME: We just need to be sure that the next element is not a parser */
/* 1.c when the caps are not fixed yet, we can't be sure what element to
* connect. We delay autoplugging until the caps are fixed */
if (!is_parser_converter && !gst_caps_is_fixed (caps)) {
goto non_fixed;
} else if (!is_parser_converter) {
gst_caps_unref (caps);
caps = gst_pad_get_current_caps (pad);
if (!caps) {
GST_DEBUG_OBJECT (parsebin,
"No final caps set yet, delaying autoplugging");
gst_object_unref (parsepad);
goto setup_caps_delay;
}
}
/* 1.d else get the factories and if there's no compatible factory goto
* unknown_type */
g_signal_emit (G_OBJECT (parsebin),
gst_parse_bin_signals[SIGNAL_AUTOPLUG_FACTORIES], 0, parsepad, caps,
&factories);
/* NULL means that we can expose the pad */
if (factories == NULL)
goto expose_pad;
/* if the array is empty, we have a type for which we have no parser */
if (factories->n_values == 0) {
/* if not we have a unhandled type with no compatible factories */
g_value_array_free (factories);
gst_object_unref (parsepad);
goto unknown_type;
}
/* 1.e sort some more. */
g_signal_emit (G_OBJECT (parsebin),
gst_parse_bin_signals[SIGNAL_AUTOPLUG_SORT], 0, parsepad, caps, factories,
&result);
if (result) {
g_value_array_free (factories);
factories = result;
}
/* 1.g now get the factory template caps and insert the capsfilter if this
* is a parser/converter
*/
if (is_parser_converter) {
GstCaps *filter_caps;
gint i;
GstPad *p;
GstParseElement *pelem;
g_assert (chain->elements != NULL);
pelem = (GstParseElement *) chain->elements->data;
filter_caps = gst_caps_new_empty ();
for (i = 0; i < factories->n_values; i++) {
GstElementFactory *factory =
g_value_get_object (g_value_array_get_nth (factories, i));
GstCaps *tcaps, *intersection;
const GList *tmps;
GST_DEBUG ("Trying factory %s",
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
if (gst_element_get_factory (src) == factory ||
gst_element_factory_list_is_type (factory,
GST_ELEMENT_FACTORY_TYPE_PARSER)) {
GST_DEBUG ("Skipping factory");
continue;
}
for (tmps = gst_element_factory_get_static_pad_templates (factory); tmps;
tmps = tmps->next) {
GstStaticPadTemplate *st = (GstStaticPadTemplate *) tmps->data;
if (st->direction != GST_PAD_SINK || st->presence != GST_PAD_ALWAYS)
continue;
tcaps = gst_static_pad_template_get_caps (st);
intersection =
gst_caps_intersect_full (tcaps, caps, GST_CAPS_INTERSECT_FIRST);
filter_caps = gst_caps_merge (filter_caps, intersection);
gst_caps_unref (tcaps);
}
}
/* Append the parser caps to prevent any not-negotiated errors */
filter_caps = gst_caps_merge (filter_caps, gst_caps_ref (caps));
pelem->capsfilter = gst_element_factory_make ("capsfilter", NULL);
g_object_set (G_OBJECT (pelem->capsfilter), "caps", filter_caps, NULL);
gst_caps_unref (filter_caps);
gst_element_set_state (pelem->capsfilter, GST_STATE_PAUSED);
gst_bin_add (GST_BIN_CAST (parsebin), gst_object_ref (pelem->capsfilter));
parse_pad_set_target (parsepad, NULL);
p = gst_element_get_static_pad (pelem->capsfilter, "sink");
gst_pad_link_full (pad, p, GST_PAD_LINK_CHECK_NOTHING);
gst_object_unref (p);
p = gst_element_get_static_pad (pelem->capsfilter, "src");
parse_pad_set_target (parsepad, p);
pad = p;
gst_caps_unref (caps);
caps = gst_pad_get_current_caps (pad);
if (!caps) {
GST_DEBUG_OBJECT (parsebin,
"No final caps set yet, delaying autoplugging");
gst_object_unref (parsepad);
g_value_array_free (factories);
goto setup_caps_delay;
}
}
/* 1.h else continue autoplugging something from the list. */
GST_LOG_OBJECT (pad, "Let's continue discovery on this pad");
res =
connect_pad (parsebin, src, parsepad, pad, caps, factories, chain,
&deadend_details);
/* Need to unref the capsfilter srcpad here if
* we inserted a capsfilter */
if (is_parser_converter)
gst_object_unref (pad);
gst_object_unref (parsepad);
g_value_array_free (factories);
if (!res)
goto unknown_type;
gst_caps_unref (caps);
return;
expose_pad:
{
GST_LOG_OBJECT (parsebin, "Pad is final. autoplug-continue:%d", apcontinue);
expose_pad (parsebin, src, parsepad, pad, caps, chain);
gst_object_unref (parsepad);
gst_caps_unref (caps);
return;
}
unknown_type:
{
GST_LOG_OBJECT (pad, "Unknown type, posting message and firing signal");
chain->deadend_details = deadend_details;
chain->deadend = TRUE;
chain->drained = TRUE;
chain->endcaps = caps;
gst_object_replace ((GstObject **) & chain->current_pad, NULL);
gst_element_post_message (GST_ELEMENT_CAST (parsebin),
gst_missing_decoder_message_new (GST_ELEMENT_CAST (parsebin), caps));
g_signal_emit (G_OBJECT (parsebin),
gst_parse_bin_signals[SIGNAL_UNKNOWN_TYPE], 0, pad, caps);
/* Try to expose anything */
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain) {
if (gst_parse_chain_is_complete (parsebin->parse_chain)) {
gst_parse_bin_expose (parsebin);
}
}
EXPOSE_UNLOCK (parsebin);
if (src == parsebin->typefind) {
if (!caps || gst_caps_is_empty (caps)) {
GST_ELEMENT_ERROR (parsebin, STREAM, TYPE_NOT_FOUND,
(_("Could not determine type of stream")), (NULL));
}
}
return;
}
#if 1
non_fixed:
{
GST_DEBUG_OBJECT (pad, "pad has non-fixed caps delay autoplugging");
gst_object_unref (parsepad);
goto setup_caps_delay;
}
#endif
any_caps:
{
GST_DEBUG_OBJECT (pad, "pad has ANY caps, delaying auto-plugging");
goto setup_caps_delay;
}
setup_caps_delay:
{
GstPendingPad *ppad;
/* connect to caps notification */
CHAIN_MUTEX_LOCK (chain);
GST_LOG_OBJECT (parsebin, "Chain %p has now %d dynamic pads", chain,
g_list_length (chain->pending_pads));
ppad = g_slice_new0 (GstPendingPad);
ppad->pad = gst_object_ref (pad);
ppad->chain = chain;
ppad->event_probe_id =
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
pad_event_cb, ppad, NULL);
chain->pending_pads = g_list_prepend (chain->pending_pads, ppad);
ppad->notify_caps_id = g_signal_connect (pad, "notify::caps",
G_CALLBACK (caps_notify_cb), chain);
CHAIN_MUTEX_UNLOCK (chain);
/* If we're here because we have a Parser/Converter
* we have to unref the pad */
if (is_parser_converter)
gst_object_unref (pad);
if (caps)
gst_caps_unref (caps);
return;
}
}
static void
add_error_filter (GstParseBin * parsebin, GstElement * element)
{
GST_OBJECT_LOCK (parsebin);
parsebin->filtered = g_list_prepend (parsebin->filtered, element);
GST_OBJECT_UNLOCK (parsebin);
}
static void
remove_error_filter (GstParseBin * parsebin, GstElement * element,
GstMessage ** error)
{
GList *l;
GST_OBJECT_LOCK (parsebin);
parsebin->filtered = g_list_remove (parsebin->filtered, element);
if (error)
*error = NULL;
l = parsebin->filtered_errors;
while (l) {
GstMessage *msg = l->data;
if (GST_MESSAGE_SRC (msg) == GST_OBJECT_CAST (element)) {
/* Get the last error of this element, i.e. the earliest */
if (error)
gst_message_replace (error, msg);
gst_message_unref (msg);
l = parsebin->filtered_errors =
g_list_delete_link (parsebin->filtered_errors, l);
} else {
l = l->next;
}
}
GST_OBJECT_UNLOCK (parsebin);
}
typedef struct
{
gboolean ret;
GstPad *peer;
} SendStickyEventsData;
static gboolean
send_sticky_event (GstPad * pad, GstEvent ** event, gpointer user_data)
{
SendStickyEventsData *data = user_data;
gboolean ret;
ret = gst_pad_send_event (data->peer, gst_event_ref (*event));
if (!ret)
data->ret = FALSE;
return data->ret;
}
static gboolean
send_sticky_events (GstParseBin * parsebin, GstPad * pad)
{
SendStickyEventsData data;
data.ret = TRUE;
data.peer = gst_pad_get_peer (pad);
gst_pad_sticky_events_foreach (pad, send_sticky_event, &data);
gst_object_unref (data.peer);
return data.ret;
}
static gchar *
error_message_to_string (GstMessage * msg)
{
GError *err;
gchar *debug, *message, *full_message;
gst_message_parse_error (msg, &err, &debug);
message = gst_error_get_message (err->domain, err->code);
if (debug)
full_message = g_strdup_printf ("%s\n%s\n%s", message, err->message, debug);
else
full_message = g_strdup_printf ("%s\n%s", message, err->message);
g_free (message);
g_free (debug);
g_clear_error (&err);
return full_message;
}
/* We consider elements as "simple demuxer" when they are a demuxer
* with one and only one ALWAYS source pad.
*/
static gboolean
is_simple_demuxer_factory (GstElementFactory * factory)
{
if (strstr (gst_element_factory_get_metadata (factory,
GST_ELEMENT_METADATA_KLASS), "Demuxer")) {
const GList *tmp;
gint num_alway_srcpads = 0;
for (tmp = gst_element_factory_get_static_pad_templates (factory);
tmp; tmp = tmp->next) {
GstStaticPadTemplate *template = tmp->data;
if (template->direction == GST_PAD_SRC) {
if (template->presence == GST_PAD_ALWAYS) {
if (num_alway_srcpads >= 0)
num_alway_srcpads++;
} else {
num_alway_srcpads = -1;
}
}
}
if (num_alway_srcpads == 1)
return TRUE;
}
return FALSE;
}
/* connect_pad:
*
* Try to connect the given pad to an element created from one of the factories,
* and recursively.
*
* Note that parsepad is ghosting pad, and so pad is linked; be sure to unset parsepad's
* target before trying to link pad.
*
* Returns TRUE if an element was properly created and linked
*/
static gboolean
connect_pad (GstParseBin * parsebin, GstElement * src, GstParsePad * parsepad,
GstPad * pad, GstCaps * caps, GValueArray * factories,
GstParseChain * chain, gchar ** deadend_details)
{
gboolean res = FALSE;
GString *error_details = NULL;
g_return_val_if_fail (factories != NULL, FALSE);
g_return_val_if_fail (factories->n_values > 0, FALSE);
GST_DEBUG_OBJECT (parsebin,
"pad %s:%s , chain:%p, %d factories, caps %" GST_PTR_FORMAT,
GST_DEBUG_PAD_NAME (pad), chain, factories->n_values, caps);
error_details = g_string_new ("");
/* 2. Try to create an element and link to it */
while (factories->n_values > 0) {
GstAutoplugSelectResult ret;
GstElementFactory *factory;
GstParseElement *pelem;
GstElement *element;
GstPad *sinkpad;
GParamSpec *pspec;
gboolean subtitle;
GList *to_connect = NULL;
gboolean is_parser_converter = FALSE, is_simple_demuxer = FALSE;
/* Set parsepad target to pad again, it might've been unset
* below but we came back here because something failed
*/
parse_pad_set_target (parsepad, pad);
/* take first factory */
factory = g_value_get_object (g_value_array_get_nth (factories, 0));
/* Remove selected factory from the list. */
g_value_array_remove (factories, 0);
GST_LOG_OBJECT (src, "trying factory %" GST_PTR_FORMAT, factory);
/* Check if the caps are really supported by the factory. The
* factory list is non-empty-subset filtered while caps
* are only accepted by a pad if they are a subset of the
* pad caps.
*
* FIXME: Only do this for fixed caps here. Non-fixed caps
* can happen if a Parser/Converter was autoplugged before
* this. We then assume that it will be able to convert to
* everything that the parser would want.
*
* A subset check will fail here because the parser caps
* will be generic and while the parser will only
* support a subset of the parser caps.
*/
if (gst_caps_is_fixed (caps)) {
const GList *templs;
gboolean skip = FALSE;
templs = gst_element_factory_get_static_pad_templates (factory);
while (templs) {
GstStaticPadTemplate *templ = (GstStaticPadTemplate *) templs->data;
if (templ->direction == GST_PAD_SINK) {
GstCaps *templcaps = gst_static_caps_get (&templ->static_caps);
if (!gst_caps_is_subset (caps, templcaps)) {
GST_DEBUG_OBJECT (src,
"caps %" GST_PTR_FORMAT " not subset of %" GST_PTR_FORMAT, caps,
templcaps);
gst_caps_unref (templcaps);
skip = TRUE;
break;
}
gst_caps_unref (templcaps);
}
templs = g_list_next (templs);
}
if (skip)
continue;
}
/* If the factory is for a parser we first check if the factory
* was already used for the current chain. If it was used already
* we would otherwise create an infinite loop here because the
* parser apparently accepts its own output as input.
* This is only done for parsers because it's perfectly valid
* to have other element classes after each other because a
* parser is the only one that does not change the data. A
* valid example for this would be multiple id3demux in a row.
*/
is_parser_converter = strstr (gst_element_factory_get_metadata (factory,
GST_ELEMENT_METADATA_KLASS), "Parser") != NULL;
is_simple_demuxer = is_simple_demuxer_factory (factory);
if (is_parser_converter) {
gboolean skip = FALSE;
GList *l;
CHAIN_MUTEX_LOCK (chain);
for (l = chain->elements; l; l = l->next) {
GstParseElement *pelem = (GstParseElement *) l->data;
GstElement *otherelement = pelem->element;
if (gst_element_get_factory (otherelement) == factory) {
skip = TRUE;
break;
}
}
if (!skip && chain->parent && chain->parent->parent) {
GstParseChain *parent_chain = chain->parent->parent;
GstParseElement *pelem =
parent_chain->elements ? parent_chain->elements->data : NULL;
if (pelem && gst_element_get_factory (pelem->element) == factory)
skip = TRUE;
}
CHAIN_MUTEX_UNLOCK (chain);
if (skip) {
GST_DEBUG_OBJECT (parsebin,
"Skipping factory '%s' because it was already used in this chain",
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE_CAST (factory)));
continue;
}
}
/* Expose pads if the next factory is a decoder */
if (gst_element_factory_list_is_type (factory,
GST_ELEMENT_FACTORY_TYPE_DECODER)) {
ret = GST_AUTOPLUG_SELECT_EXPOSE;
} else {
/* emit autoplug-select to see what we should do with it. */
g_signal_emit (G_OBJECT (parsebin),
gst_parse_bin_signals[SIGNAL_AUTOPLUG_SELECT],
0, parsepad, caps, factory, &ret);
}
switch (ret) {
case GST_AUTOPLUG_SELECT_TRY:
GST_DEBUG_OBJECT (parsebin, "autoplug select requested try");
break;
case GST_AUTOPLUG_SELECT_EXPOSE:
GST_DEBUG_OBJECT (parsebin, "autoplug select requested expose");
/* expose the pad, we don't have the source element */
expose_pad (parsebin, src, parsepad, pad, caps, chain);
res = TRUE;
goto beach;
case GST_AUTOPLUG_SELECT_SKIP:
GST_DEBUG_OBJECT (parsebin, "autoplug select requested skip");
continue;
default:
GST_WARNING_OBJECT (parsebin, "autoplug select returned unhandled %d",
ret);
break;
}
/* 2.0. Unlink pad */
parse_pad_set_target (parsepad, NULL);
/* 2.1. Try to create an element */
if ((element = gst_element_factory_create (factory, NULL)) == NULL) {
GST_WARNING_OBJECT (parsebin, "Could not create an element from %s",
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
g_string_append_printf (error_details,
"Could not create an element from %s\n",
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
continue;
}
/* Filter errors, this will prevent the element from causing the pipeline
* to error while we test it using READY state. */
add_error_filter (parsebin, element);
/* We don't yet want the bin to control the element's state */
gst_element_set_locked_state (element, TRUE);
/* ... add it ... */
if (!(gst_bin_add (GST_BIN_CAST (parsebin), element))) {
GST_WARNING_OBJECT (parsebin, "Couldn't add %s to the bin",
GST_ELEMENT_NAME (element));
remove_error_filter (parsebin, element, NULL);
g_string_append_printf (error_details, "Couldn't add %s to the bin\n",
GST_ELEMENT_NAME (element));
gst_object_unref (element);
continue;
}
/* Find its sink pad. */
sinkpad = NULL;
GST_OBJECT_LOCK (element);
if (element->sinkpads != NULL)
sinkpad = gst_object_ref (element->sinkpads->data);
GST_OBJECT_UNLOCK (element);
if (sinkpad == NULL) {
GST_WARNING_OBJECT (parsebin, "Element %s doesn't have a sink pad",
GST_ELEMENT_NAME (element));
remove_error_filter (parsebin, element, NULL);
g_string_append_printf (error_details,
"Element %s doesn't have a sink pad", GST_ELEMENT_NAME (element));
gst_bin_remove (GST_BIN (parsebin), element);
continue;
}
/* ... and try to link */
if ((gst_pad_link_full (pad, sinkpad,
GST_PAD_LINK_CHECK_NOTHING)) != GST_PAD_LINK_OK) {
GST_WARNING_OBJECT (parsebin, "Link failed on pad %s:%s",
GST_DEBUG_PAD_NAME (sinkpad));
remove_error_filter (parsebin, element, NULL);
g_string_append_printf (error_details, "Link failed on pad %s:%s",
GST_DEBUG_PAD_NAME (sinkpad));
gst_object_unref (sinkpad);
gst_bin_remove (GST_BIN (parsebin), element);
continue;
}
/* ... activate it ... */
if ((gst_element_set_state (element,
GST_STATE_READY)) == GST_STATE_CHANGE_FAILURE) {
GstMessage *error_msg;
GST_WARNING_OBJECT (parsebin, "Couldn't set %s to READY",
GST_ELEMENT_NAME (element));
remove_error_filter (parsebin, element, &error_msg);
if (error_msg) {
gchar *error_string = error_message_to_string (error_msg);
g_string_append_printf (error_details, "Couldn't set %s to READY:\n%s",
GST_ELEMENT_NAME (element), error_string);
gst_message_unref (error_msg);
g_free (error_string);
} else {
g_string_append_printf (error_details, "Couldn't set %s to READY",
GST_ELEMENT_NAME (element));
}
gst_object_unref (sinkpad);
gst_bin_remove (GST_BIN (parsebin), element);
continue;
}
/* check if we still accept the caps on the pad after setting
* the element to READY */
if (!gst_pad_query_accept_caps (sinkpad, caps)) {
GstMessage *error_msg;
GST_WARNING_OBJECT (parsebin, "Element %s does not accept caps",
GST_ELEMENT_NAME (element));
remove_error_filter (parsebin, element, &error_msg);
if (error_msg) {
gchar *error_string = error_message_to_string (error_msg);
g_string_append_printf (error_details,
"Element %s does not accept caps:\n%s", GST_ELEMENT_NAME (element),
error_string);
gst_message_unref (error_msg);
g_free (error_string);
} else {
g_string_append_printf (error_details,
"Element %s does not accept caps", GST_ELEMENT_NAME (element));
}
gst_element_set_state (element, GST_STATE_NULL);
gst_object_unref (sinkpad);
gst_bin_remove (GST_BIN (parsebin), element);
continue;
}
gst_object_unref (sinkpad);
GST_LOG_OBJECT (parsebin, "linked on pad %s:%s", GST_DEBUG_PAD_NAME (pad));
CHAIN_MUTEX_LOCK (chain);
pelem = g_slice_new0 (GstParseElement);
pelem->element = gst_object_ref (element);
pelem->capsfilter = NULL;
chain->elements = g_list_prepend (chain->elements, pelem);
chain->demuxer = is_demuxer_element (element);
/* If we plugging a parser, mark the chain as parsed */
chain->parsed |= is_parser_converter;
CHAIN_MUTEX_UNLOCK (chain);
/* Set connection-speed property if needed */
if (chain->demuxer) {
GParamSpec *pspec;
if ((pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (element),
"connection-speed"))) {
guint64 speed = parsebin->connection_speed / 1000;
gboolean wrong_type = FALSE;
if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_UINT) {
GParamSpecUInt *pspecuint = G_PARAM_SPEC_UINT (pspec);
speed = CLAMP (speed, pspecuint->minimum, pspecuint->maximum);
} else if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_INT) {
GParamSpecInt *pspecint = G_PARAM_SPEC_INT (pspec);
speed = CLAMP (speed, pspecint->minimum, pspecint->maximum);
} else if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_UINT64) {
GParamSpecUInt64 *pspecuint = G_PARAM_SPEC_UINT64 (pspec);
speed = CLAMP (speed, pspecuint->minimum, pspecuint->maximum);
} else if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_INT64) {
GParamSpecInt64 *pspecint = G_PARAM_SPEC_INT64 (pspec);
speed = CLAMP (speed, pspecint->minimum, pspecint->maximum);
} else {
GST_WARNING_OBJECT (parsebin,
"The connection speed property %" G_GUINT64_FORMAT " of type %s"
" is not usefull not setting it", speed,
g_type_name (G_PARAM_SPEC_TYPE (pspec)));
wrong_type = TRUE;
}
if (!wrong_type) {
GST_DEBUG_OBJECT (parsebin,
"setting connection-speed=%" G_GUINT64_FORMAT
" to demuxer element", speed);
g_object_set (element, "connection-speed", speed, NULL);
}
}
}
/* try to configure the subtitle encoding property when we can */
pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (element),
"subtitle-encoding");
if (pspec && G_PARAM_SPEC_VALUE_TYPE (pspec) == G_TYPE_STRING) {
SUBTITLE_LOCK (parsebin);
GST_DEBUG_OBJECT (parsebin,
"setting subtitle-encoding=%s to element", parsebin->encoding);
g_object_set (G_OBJECT (element), "subtitle-encoding", parsebin->encoding,
NULL);
SUBTITLE_UNLOCK (parsebin);
subtitle = TRUE;
} else {
subtitle = FALSE;
}
/* link this element further */
to_connect = connect_element (parsebin, pelem, chain);
if ((is_simple_demuxer || is_parser_converter) && to_connect) {
GList *l;
for (l = to_connect; l; l = g_list_next (l)) {
GstPad *opad = GST_PAD_CAST (l->data);
GstCaps *ocaps;
ocaps = get_pad_caps (opad);
analyze_new_pad (parsebin, pelem->element, opad, ocaps, chain);
if (ocaps)
gst_caps_unref (ocaps);
gst_object_unref (opad);
}
g_list_free (to_connect);
to_connect = NULL;
}
/* Bring the element to the state of the parent */
/* First lock element's sinkpad stream lock so no data reaches
* the possible new element added when caps are sent by element
* while we're still sending sticky events */
GST_PAD_STREAM_LOCK (sinkpad);
if ((gst_element_set_state (element,
GST_STATE_PAUSED)) == GST_STATE_CHANGE_FAILURE) {
GstParseElement *dtmp = NULL;
GstElement *tmp = NULL;
GstMessage *error_msg;
GST_PAD_STREAM_UNLOCK (sinkpad);
GST_WARNING_OBJECT (parsebin, "Couldn't set %s to PAUSED",
GST_ELEMENT_NAME (element));
g_list_free_full (to_connect, (GDestroyNotify) gst_object_unref);
to_connect = NULL;
remove_error_filter (parsebin, element, &error_msg);
if (error_msg) {
gchar *error_string = error_message_to_string (error_msg);
g_string_append_printf (error_details, "Couldn't set %s to PAUSED:\n%s",
GST_ELEMENT_NAME (element), error_string);
gst_message_unref (error_msg);
g_free (error_string);
} else {
g_string_append_printf (error_details, "Couldn't set %s to PAUSED",
GST_ELEMENT_NAME (element));
}
/* Remove all elements in this chain that were just added. No
* other thread could've added elements in the meantime */
CHAIN_MUTEX_LOCK (chain);
do {
GList *l;
dtmp = chain->elements->data;
tmp = dtmp->element;
/* Disconnect any signal handlers that might be connected
* in connect_element() or analyze_pad() */
if (dtmp->pad_added_id)
g_signal_handler_disconnect (tmp, dtmp->pad_added_id);
if (dtmp->pad_removed_id)
g_signal_handler_disconnect (tmp, dtmp->pad_removed_id);
if (dtmp->no_more_pads_id)
g_signal_handler_disconnect (tmp, dtmp->no_more_pads_id);
for (l = chain->pending_pads; l;) {
GstPendingPad *pp = l->data;
GList *n;
if (GST_PAD_PARENT (pp->pad) != tmp) {
l = l->next;
continue;
}
gst_pending_pad_free (pp);
/* Remove element from the list, update list head and go to the
* next element in the list */
n = l->next;
chain->pending_pads = g_list_delete_link (chain->pending_pads, l);
l = n;
}
if (dtmp->capsfilter) {
gst_bin_remove (GST_BIN (parsebin), dtmp->capsfilter);
gst_element_set_state (dtmp->capsfilter, GST_STATE_NULL);
gst_object_unref (dtmp->capsfilter);
}
gst_bin_remove (GST_BIN (parsebin), tmp);
gst_element_set_state (tmp, GST_STATE_NULL);
gst_object_unref (tmp);
g_slice_free (GstParseElement, dtmp);
chain->elements = g_list_delete_link (chain->elements, chain->elements);
} while (tmp != element);
CHAIN_MUTEX_UNLOCK (chain);
continue;
} else {
send_sticky_events (parsebin, pad);
/* Everything went well, the spice must flow now */
GST_PAD_STREAM_UNLOCK (sinkpad);
}
/* Remove error filter now, from now on we can't gracefully
* handle errors of the element anymore */
remove_error_filter (parsebin, element, NULL);
/* Now let the bin handle the state */
gst_element_set_locked_state (element, FALSE);
if (subtitle) {
SUBTITLE_LOCK (parsebin);
/* we added the element now, add it to the list of subtitle-encoding
* elements when we can set the property */
parsebin->subtitles = g_list_prepend (parsebin->subtitles, element);
SUBTITLE_UNLOCK (parsebin);
}
if (to_connect) {
GList *l;
for (l = to_connect; l; l = g_list_next (l)) {
GstPad *opad = GST_PAD_CAST (l->data);
GstCaps *ocaps;
ocaps = get_pad_caps (opad);
analyze_new_pad (parsebin, pelem->element, opad, ocaps, chain);
if (ocaps)
gst_caps_unref (ocaps);
gst_object_unref (opad);
}
g_list_free (to_connect);
to_connect = NULL;
}
res = TRUE;
break;
}
beach:
if (error_details)
*deadend_details = g_string_free (error_details, (error_details->len == 0
|| res));
else
*deadend_details = NULL;
return res;
}
static GstCaps *
get_pad_caps (GstPad * pad)
{
GstCaps *caps;
/* first check the pad caps, if this is set, we are positively sure it is
* fixed and exactly what the element will produce. */
caps = gst_pad_get_current_caps (pad);
/* then use the getcaps function if we don't have caps. These caps might not
* be fixed in some cases, in which case analyze_new_pad will set up a
* notify::caps signal to continue autoplugging. */
if (caps == NULL)
caps = gst_pad_query_caps (pad, NULL);
return caps;
}
/* Returns a list of pads that can be connected to already and
* connects to pad-added and related signals */
static GList *
connect_element (GstParseBin * parsebin, GstParseElement * pelem,
GstParseChain * chain)
{
GstElement *element = pelem->element;
GList *pads;
gboolean dynamic = FALSE;
GList *to_connect = NULL;
GST_DEBUG_OBJECT (parsebin,
"Attempting to connect element %s [chain:%p] further",
GST_ELEMENT_NAME (element), chain);
/* 1. Loop over pad templates, grabbing existing pads along the way */
for (pads = GST_ELEMENT_GET_CLASS (element)->padtemplates; pads;
pads = g_list_next (pads)) {
GstPadTemplate *templ = GST_PAD_TEMPLATE (pads->data);
const gchar *templ_name;
/* we are only interested in source pads */
if (GST_PAD_TEMPLATE_DIRECTION (templ) != GST_PAD_SRC)
continue;
templ_name = GST_PAD_TEMPLATE_NAME_TEMPLATE (templ);
GST_DEBUG_OBJECT (parsebin, "got a source pad template %s", templ_name);
/* figure out what kind of pad this is */
switch (GST_PAD_TEMPLATE_PRESENCE (templ)) {
case GST_PAD_ALWAYS:
{
/* get the pad that we need to autoplug */
GstPad *pad = gst_element_get_static_pad (element, templ_name);
if (pad) {
GST_DEBUG_OBJECT (parsebin, "got the pad for always template %s",
templ_name);
/* here is the pad, we need to autoplug it */
to_connect = g_list_prepend (to_connect, pad);
} else {
/* strange, pad is marked as always but it's not
* there. Fix the element */
GST_WARNING_OBJECT (parsebin,
"could not get the pad for always template %s", templ_name);
}
break;
}
case GST_PAD_SOMETIMES:
{
/* try to get the pad to see if it is already created or
* not */
GstPad *pad = gst_element_get_static_pad (element, templ_name);
if (pad) {
GST_DEBUG_OBJECT (parsebin, "got the pad for sometimes template %s",
templ_name);
/* the pad is created, we need to autoplug it */
to_connect = g_list_prepend (to_connect, pad);
} else {
GST_DEBUG_OBJECT (parsebin,
"did not get the sometimes pad of template %s", templ_name);
/* we have an element that will create dynamic pads */
dynamic = TRUE;
}
break;
}
case GST_PAD_REQUEST:
/* ignore request pads */
GST_DEBUG_OBJECT (parsebin, "ignoring request padtemplate %s",
templ_name);
break;
}
}
/* 2. if there are more potential pads, connect to relevant signals */
if (dynamic) {
GST_LOG_OBJECT (parsebin, "Adding signals to element %s in chain %p",
GST_ELEMENT_NAME (element), chain);
pelem->pad_added_id = g_signal_connect (element, "pad-added",
G_CALLBACK (pad_added_cb), chain);
pelem->pad_removed_id = g_signal_connect (element, "pad-removed",
G_CALLBACK (pad_removed_cb), chain);
pelem->no_more_pads_id = g_signal_connect (element, "no-more-pads",
G_CALLBACK (no_more_pads_cb), chain);
}
/* 3. return all pads that can be connected to already */
return to_connect;
}
/* expose_pad:
*
* Expose the given pad on the chain as a parsed pad.
*/
static void
expose_pad (GstParseBin * parsebin, GstElement * src, GstParsePad * parsepad,
GstPad * pad, GstCaps * caps, GstParseChain * chain)
{
GST_DEBUG_OBJECT (parsebin, "pad %s:%s, chain:%p",
GST_DEBUG_PAD_NAME (pad), chain);
gst_parse_pad_activate (parsepad, chain);
chain->endpad = gst_object_ref (parsepad);
if (caps)
chain->endcaps = gst_caps_ref (caps);
else
chain->endcaps = NULL;
}
static void
type_found (GstElement * typefind, guint probability,
GstCaps * caps, GstParseBin * parse_bin)
{
GstPad *pad, *sink_pad;
GST_DEBUG_OBJECT (parse_bin, "typefind found caps %" GST_PTR_FORMAT, caps);
/* If the typefinder (but not something else) finds text/plain - i.e. that's
* the top-level type of the file - then error out.
*/
if (gst_structure_has_name (gst_caps_get_structure (caps, 0), "text/plain")) {
GST_ELEMENT_ERROR (parse_bin, STREAM, WRONG_TYPE,
(_("This appears to be a text file")),
("ParseBin cannot parse plain text files"));
goto exit;
}
/* FIXME: we can only deal with one type, we don't yet support dynamically changing
* caps from the typefind element */
if (parse_bin->have_type || parse_bin->parse_chain)
goto exit;
parse_bin->have_type = TRUE;
pad = gst_element_get_static_pad (typefind, "src");
sink_pad = gst_element_get_static_pad (typefind, "sink");
/* need some lock here to prevent race with shutdown state change
* which might yank away e.g. parse_chain while building stuff here.
* In typical cases, STREAM_LOCK is held and handles that, it need not
* be held (if called from a proxied setcaps), so grab it anyway */
GST_PAD_STREAM_LOCK (sink_pad);
parse_bin->parse_chain = gst_parse_chain_new (parse_bin, NULL, pad, caps);
analyze_new_pad (parse_bin, typefind, pad, caps, parse_bin->parse_chain);
GST_PAD_STREAM_UNLOCK (sink_pad);
gst_object_unref (sink_pad);
gst_object_unref (pad);
exit:
return;
}
static GstPadProbeReturn
pad_event_cb (GstPad * pad, GstPadProbeInfo * info, gpointer data)
{
GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
GstPendingPad *ppad = (GstPendingPad *) data;
GstParseChain *chain = ppad->chain;
GstParseBin *parsebin = chain->parsebin;
g_assert (ppad);
g_assert (chain);
g_assert (parsebin);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
GST_DEBUG_OBJECT (pad, "Received EOS on a non final pad, this stream "
"ended too early");
chain->deadend = TRUE;
chain->drained = TRUE;
gst_object_replace ((GstObject **) & chain->current_pad, NULL);
/* we don't set the endcaps because NULL endcaps means early EOS */
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain)
if (gst_parse_chain_is_complete (parsebin->parse_chain))
gst_parse_bin_expose (parsebin);
EXPOSE_UNLOCK (parsebin);
break;
default:
break;
}
return GST_PAD_PROBE_OK;
}
static void
pad_added_cb (GstElement * element, GstPad * pad, GstParseChain * chain)
{
GstCaps *caps;
GstParseBin *parsebin;
parsebin = chain->parsebin;
GST_DEBUG_OBJECT (pad, "pad added, chain:%p", chain);
caps = get_pad_caps (pad);
analyze_new_pad (parsebin, element, pad, caps, chain);
if (caps)
gst_caps_unref (caps);
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain) {
if (gst_parse_chain_is_complete (parsebin->parse_chain)) {
GST_LOG_OBJECT (parsebin,
"That was the last dynamic object, now attempting to expose the group");
if (!gst_parse_bin_expose (parsebin))
GST_WARNING_OBJECT (parsebin, "Couldn't expose group");
}
} else {
GST_DEBUG_OBJECT (parsebin, "No parse chain, new pad ignored");
}
EXPOSE_UNLOCK (parsebin);
}
static void
pad_removed_cb (GstElement * element, GstPad * pad, GstParseChain * chain)
{
GList *l;
GST_LOG_OBJECT (pad, "pad removed, chain:%p", chain);
/* In fact, we don't have to do anything here, the active group will be
* removed when the group's multiqueue is drained */
CHAIN_MUTEX_LOCK (chain);
for (l = chain->pending_pads; l; l = l->next) {
GstPendingPad *ppad = l->data;
GstPad *opad = ppad->pad;
if (pad == opad) {
gst_pending_pad_free (ppad);
chain->pending_pads = g_list_delete_link (chain->pending_pads, l);
break;
}
}
CHAIN_MUTEX_UNLOCK (chain);
}
static void
no_more_pads_cb (GstElement * element, GstParseChain * chain)
{
GstParseGroup *group = NULL;
GST_LOG_OBJECT (element, "got no more pads");
CHAIN_MUTEX_LOCK (chain);
if (!chain->elements
|| ((GstParseElement *) chain->elements->data)->element != element) {
GST_LOG_OBJECT (chain->parsebin, "no-more-pads from old chain element '%s'",
GST_OBJECT_NAME (element));
CHAIN_MUTEX_UNLOCK (chain);
return;
} else if (!chain->demuxer) {
GST_LOG_OBJECT (chain->parsebin,
"no-more-pads from a non-demuxer element '%s'",
GST_OBJECT_NAME (element));
CHAIN_MUTEX_UNLOCK (chain);
return;
}
/* when we received no_more_pads, we can complete the pads of the chain */
if (!chain->next_groups && chain->active_group) {
group = chain->active_group;
} else if (chain->next_groups) {
GList *iter;
for (iter = chain->next_groups; iter; iter = g_list_next (iter)) {
group = iter->data;
if (!group->no_more_pads)
break;
}
}
if (!group) {
GST_ERROR_OBJECT (chain->parsebin, "can't find group for element");
CHAIN_MUTEX_UNLOCK (chain);
return;
}
GST_DEBUG_OBJECT (element, "Setting group %p to complete", group);
group->no_more_pads = TRUE;
CHAIN_MUTEX_UNLOCK (chain);
EXPOSE_LOCK (chain->parsebin);
if (chain->parsebin->parse_chain) {
if (gst_parse_chain_is_complete (chain->parsebin->parse_chain)) {
gst_parse_bin_expose (chain->parsebin);
}
}
EXPOSE_UNLOCK (chain->parsebin);
}
static void
caps_notify_cb (GstPad * pad, GParamSpec * unused, GstParseChain * chain)
{
GstElement *element;
GList *l;
GST_LOG_OBJECT (pad, "Notified caps for pad %s:%s", GST_DEBUG_PAD_NAME (pad));
/* Disconnect this; if we still need it, we'll reconnect to this in
* analyze_new_pad */
element = GST_ELEMENT_CAST (gst_pad_get_parent (pad));
CHAIN_MUTEX_LOCK (chain);
for (l = chain->pending_pads; l; l = l->next) {
GstPendingPad *ppad = l->data;
if (ppad->pad == pad) {
gst_pending_pad_free (ppad);
chain->pending_pads = g_list_delete_link (chain->pending_pads, l);
break;
}
}
CHAIN_MUTEX_UNLOCK (chain);
pad_added_cb (element, pad, chain);
gst_object_unref (element);
}
/* Decide whether an element is a demuxer based on the
* klass and number/type of src pad templates it has */
static gboolean
is_demuxer_element (GstElement * srcelement)
{
GstElementFactory *srcfactory;
GstElementClass *elemclass;
GList *walk;
const gchar *klass;
gint potential_src_pads = 0;
srcfactory = gst_element_get_factory (srcelement);
klass =
gst_element_factory_get_metadata (srcfactory, GST_ELEMENT_METADATA_KLASS);
/* Can't be a demuxer unless it has Demux in the klass name */
if (!strstr (klass, "Demux"))
return FALSE;
/* Walk the src pad templates and count how many the element
* might produce */
elemclass = GST_ELEMENT_GET_CLASS (srcelement);
walk = gst_element_class_get_pad_template_list (elemclass);
while (walk != NULL) {
GstPadTemplate *templ;
templ = (GstPadTemplate *) walk->data;
if (GST_PAD_TEMPLATE_DIRECTION (templ) == GST_PAD_SRC) {
switch (GST_PAD_TEMPLATE_PRESENCE (templ)) {
case GST_PAD_ALWAYS:
case GST_PAD_SOMETIMES:
if (strstr (GST_PAD_TEMPLATE_NAME_TEMPLATE (templ), "%"))
potential_src_pads += 2; /* Might make multiple pads */
else
potential_src_pads += 1;
break;
case GST_PAD_REQUEST:
potential_src_pads += 2;
break;
}
}
walk = g_list_next (walk);
}
if (potential_src_pads < 2)
return FALSE;
return TRUE;
}
/* gst_parse_chain_get_current_group:
*
* Returns the current group of this chain, to which
* new chains should be attached or NULL if the last
* group didn't have no-more-pads.
*
* Not MT-safe: Call with parent chain lock!
*/
static GstParseGroup *
gst_parse_chain_get_current_group (GstParseChain * chain)
{
GstParseGroup *group;
/* Now we know that we can really return something useful */
if (!chain->active_group) {
chain->active_group = group = gst_parse_group_new (chain->parsebin, chain);
} else if (!chain->active_group->no_more_pads) {
group = chain->active_group;
} else {
GList *iter;
group = NULL;
for (iter = chain->next_groups; iter; iter = g_list_next (iter)) {
GstParseGroup *next_group = iter->data;
if (!next_group->no_more_pads) {
group = next_group;
break;
}
}
}
if (!group) {
group = gst_parse_group_new (chain->parsebin, chain);
chain->next_groups = g_list_append (chain->next_groups, group);
}
return group;
}
static void gst_parse_group_free_internal (GstParseGroup * group,
gboolean hide);
static void
gst_parse_chain_free_internal (GstParseChain * chain, gboolean hide)
{
GList *l, *set_to_null = NULL;
CHAIN_MUTEX_LOCK (chain);
GST_DEBUG_OBJECT (chain->parsebin, "%s chain %p",
(hide ? "Hiding" : "Freeing"), chain);
if (chain->active_group) {
gst_parse_group_free_internal (chain->active_group, hide);
if (!hide)
chain->active_group = NULL;
}
for (l = chain->next_groups; l; l = l->next) {
gst_parse_group_free_internal ((GstParseGroup *) l->data, hide);
if (!hide)
l->data = NULL;
}
if (!hide) {
g_list_free (chain->next_groups);
chain->next_groups = NULL;
}
if (!hide) {
for (l = chain->old_groups; l; l = l->next) {
GstParseGroup *group = l->data;
gst_parse_group_free (group);
}
g_list_free (chain->old_groups);
chain->old_groups = NULL;
}
gst_object_replace ((GstObject **) & chain->current_pad, NULL);
for (l = chain->pending_pads; l; l = l->next) {
GstPendingPad *ppad = l->data;
gst_pending_pad_free (ppad);
l->data = NULL;
}
g_list_free (chain->pending_pads);
chain->pending_pads = NULL;
for (l = chain->elements; l; l = l->next) {
GstParseElement *pelem = l->data;
GstElement *element = pelem->element;
if (pelem->pad_added_id)
g_signal_handler_disconnect (element, pelem->pad_added_id);
pelem->pad_added_id = 0;
if (pelem->pad_removed_id)
g_signal_handler_disconnect (element, pelem->pad_removed_id);
pelem->pad_removed_id = 0;
if (pelem->no_more_pads_id)
g_signal_handler_disconnect (element, pelem->no_more_pads_id);
pelem->no_more_pads_id = 0;
if (pelem->capsfilter) {
if (GST_OBJECT_PARENT (pelem->capsfilter) ==
GST_OBJECT_CAST (chain->parsebin))
gst_bin_remove (GST_BIN_CAST (chain->parsebin), pelem->capsfilter);
if (!hide) {
set_to_null =
g_list_append (set_to_null, gst_object_ref (pelem->capsfilter));
}
}
if (GST_OBJECT_PARENT (element) == GST_OBJECT_CAST (chain->parsebin))
gst_bin_remove (GST_BIN_CAST (chain->parsebin), element);
if (!hide) {
set_to_null = g_list_append (set_to_null, gst_object_ref (element));
}
SUBTITLE_LOCK (chain->parsebin);
/* remove possible subtitle element */
chain->parsebin->subtitles =
g_list_remove (chain->parsebin->subtitles, element);
SUBTITLE_UNLOCK (chain->parsebin);
if (!hide) {
if (pelem->capsfilter) {
gst_object_unref (pelem->capsfilter);
pelem->capsfilter = NULL;
}
gst_object_unref (element);
l->data = NULL;
g_slice_free (GstParseElement, pelem);
}
}
if (!hide) {
g_list_free (chain->elements);
chain->elements = NULL;
}
if (chain->endpad) {
if (chain->endpad->exposed) {
GstPad *endpad = GST_PAD_CAST (chain->endpad);
GST_DEBUG_OBJECT (chain->parsebin, "Removing pad %s:%s",
GST_DEBUG_PAD_NAME (endpad));
gst_pad_push_event (endpad, gst_event_new_eos ());
gst_element_remove_pad (GST_ELEMENT_CAST (chain->parsebin), endpad);
}
parse_pad_set_target (chain->endpad, NULL);
chain->endpad->exposed = FALSE;
if (!hide) {
gst_object_unref (chain->endpad);
chain->endpad = NULL;
}
}
if (!hide && chain->current_pad) {
gst_object_unref (chain->current_pad);
chain->current_pad = NULL;
}
if (chain->pad) {
gst_object_unref (chain->pad);
chain->pad = NULL;
}
if (chain->start_caps) {
gst_caps_unref (chain->start_caps);
chain->start_caps = NULL;
}
if (chain->endcaps) {
gst_caps_unref (chain->endcaps);
chain->endcaps = NULL;
}
g_free (chain->deadend_details);
chain->deadend_details = NULL;
GST_DEBUG_OBJECT (chain->parsebin, "%s chain %p", (hide ? "Hidden" : "Freed"),
chain);
CHAIN_MUTEX_UNLOCK (chain);
while (set_to_null) {
GstElement *element = set_to_null->data;
set_to_null = g_list_delete_link (set_to_null, set_to_null);
gst_element_set_state (element, GST_STATE_NULL);
gst_object_unref (element);
}
if (!hide) {
g_mutex_clear (&chain->lock);
g_slice_free (GstParseChain, chain);
}
}
/* gst_parse_chain_free:
*
* Completely frees and removes the chain and all
* child groups from ParseBin.
*
* MT-safe, don't hold the chain lock or any child chain's lock
* when calling this!
*/
static void
gst_parse_chain_free (GstParseChain * chain)
{
gst_parse_chain_free_internal (chain, FALSE);
}
/* gst_parse_chain_new:
*
* Creates a new parse chain and initializes it.
*
* It's up to the caller to add it to the list of child chains of
* a group!
*/
static GstParseChain *
gst_parse_chain_new (GstParseBin * parsebin, GstParseGroup * parent,
GstPad * pad, GstCaps * start_caps)
{
GstParseChain *chain = g_slice_new0 (GstParseChain);
GST_DEBUG_OBJECT (parsebin, "Creating new chain %p with parent group %p",
chain, parent);
chain->parsebin = parsebin;
chain->parent = parent;
g_mutex_init (&chain->lock);
chain->pad = gst_object_ref (pad);
if (start_caps)
chain->start_caps = gst_caps_ref (start_caps);
return chain;
}
/****
* GstParseGroup functions
****/
static void
gst_parse_group_free_internal (GstParseGroup * group, gboolean hide)
{
GList *l;
GST_DEBUG_OBJECT (group->parsebin, "%s group %p",
(hide ? "Hiding" : "Freeing"), group);
for (l = group->children; l; l = l->next) {
GstParseChain *chain = (GstParseChain *) l->data;
gst_parse_chain_free_internal (chain, hide);
if (!hide)
l->data = NULL;
}
if (!hide) {
g_list_free (group->children);
group->children = NULL;
}
GST_DEBUG_OBJECT (group->parsebin, "%s group %p", (hide ? "Hid" : "Freed"),
group);
if (!hide)
g_slice_free (GstParseGroup, group);
}
/* gst_parse_group_free:
*
* Completely frees and removes the parse group and all
* it's children.
*
* Never call this from any streaming thread!
*
* Not MT-safe, call with parent's chain lock!
*/
static void
gst_parse_group_free (GstParseGroup * group)
{
gst_parse_group_free_internal (group, FALSE);
}
/* gst_parse_group_hide:
*
* Hide the parse group only, this means that
* all child endpads are removed from ParseBin
* and all signals are unconnected.
*
* No element is set to NULL state and completely
* unrefed here.
*
* Can be called from streaming threads.
*
* Not MT-safe, call with parent's chain lock!
*/
static void
gst_parse_group_hide (GstParseGroup * group)
{
gst_parse_group_free_internal (group, TRUE);
}
/* gst_parse_chain_free_hidden_groups:
*
* Frees any parse groups that were hidden previously.
* This allows keeping memory use from ballooning when
* switching chains repeatedly.
*
* A new throwaway thread will be created to free the
* groups, so any delay does not block the setup of a
* new group.
*
* Not MT-safe, call with parent's chain lock!
*/
static void
gst_parse_chain_free_hidden_groups (GList * old_groups)
{
GList *l;
for (l = old_groups; l; l = l->next) {
GstParseGroup *group = l->data;
gst_parse_group_free (group);
}
g_list_free (old_groups);
}
static void
gst_parse_chain_start_free_hidden_groups_thread (GstParseChain * chain)
{
GThread *thread;
GError *error = NULL;
GList *old_groups;
GstParseBin *parsebin = chain->parsebin;
old_groups = chain->old_groups;
if (!old_groups)
return;
/* If we already have a thread running, wait for it to finish */
g_mutex_lock (&parsebin->cleanup_lock);
if (parsebin->cleanup_thread) {
g_thread_join (parsebin->cleanup_thread);
parsebin->cleanup_thread = NULL;
}
chain->old_groups = NULL;
thread = g_thread_try_new ("free-hidden-groups",
(GThreadFunc) gst_parse_chain_free_hidden_groups, old_groups, &error);
if (!thread || error) {
GST_ERROR ("Failed to start free-hidden-groups thread: %s",
error ? error->message : "unknown reason");
g_clear_error (&error);
chain->old_groups = old_groups;
g_mutex_unlock (&parsebin->cleanup_lock);
return;
}
parsebin->cleanup_thread = thread;
g_mutex_unlock (&parsebin->cleanup_lock);
GST_DEBUG_OBJECT (chain->parsebin, "Started free-hidden-groups thread");
}
/* gst_parse_group_new:
* @parsebin: Parent ParseBin
* @parent: Parent chain or %NULL
*
* Creates a new GstParseGroup. It is up to the caller to add it to the list
* of groups.
*/
static GstParseGroup *
gst_parse_group_new (GstParseBin * parsebin, GstParseChain * parent)
{
GstParseGroup *group = g_slice_new0 (GstParseGroup);
GST_DEBUG_OBJECT (parsebin, "Creating new group %p with parent chain %p",
group, parent);
group->parsebin = parsebin;
group->parent = parent;
return group;
}
/* gst_parse_group_is_complete:
*
* Checks if the group is complete, this means that
* a) no-more-pads happened
* b) all child chains are complete
*
* Not MT-safe, always call with ParseBin expose lock
*/
static gboolean
gst_parse_group_is_complete (GstParseGroup * group)
{
GList *l;
gboolean complete = TRUE;
if (!group->no_more_pads) {
complete = FALSE;
goto out;
}
for (l = group->children; l; l = l->next) {
GstParseChain *chain = l->data;
/* Any blocked chain requires we complete this group
* since everything is synchronous, we can't proceed otherwise */
if (chain->endpad && chain->endpad->blocked)
goto out;
if (!gst_parse_chain_is_complete (chain)) {
complete = FALSE;
goto out;
}
}
out:
GST_DEBUG_OBJECT (group->parsebin, "Group %p is complete: %d", group,
complete);
return complete;
}
/* gst_parse_chain_is_complete:
*
* Returns TRUE if the chain is complete, this means either
* a) This chain is a dead end, i.e. we have no suitable plugins
* b) This chain ends in an endpad and this is blocked or exposed
* c) The chain has gotten far enough to have plugged 1 parser at least.
*
* Not MT-safe, always call with ParseBin expose lock
*/
static gboolean
gst_parse_chain_is_complete (GstParseChain * chain)
{
gboolean complete = FALSE;
CHAIN_MUTEX_LOCK (chain);
if (chain->parsebin->shutdown)
goto out;
if (chain->deadend) {
complete = TRUE;
goto out;
}
if (chain->endpad && (chain->endpad->blocked || chain->endpad->exposed)) {
complete = TRUE;
goto out;
}
if (chain->demuxer) {
if (chain->active_group
&& gst_parse_group_is_complete (chain->active_group)) {
complete = TRUE;
goto out;
}
}
if (chain->parsed) {
complete = TRUE;
goto out;
}
out:
CHAIN_MUTEX_UNLOCK (chain);
GST_DEBUG_OBJECT (chain->parsebin, "Chain %p is complete: %d", chain,
complete);
return complete;
}
static void
chain_remove_old_groups (GstParseChain * chain)
{
GList *tmp;
/* First go in child */
if (chain->active_group) {
for (tmp = chain->active_group->children; tmp; tmp = tmp->next) {
GstParseChain *child = (GstParseChain *) tmp->data;
chain_remove_old_groups (child);
}
}
if (chain->old_groups) {
gst_parse_group_hide (chain->old_groups->data);
gst_parse_chain_start_free_hidden_groups_thread (chain);
}
}
static gboolean
drain_and_switch_chains (GstParseChain * chain, GstParsePad * drainpad,
gboolean * last_group, gboolean * drained, gboolean * switched);
/* drain_and_switch_chains/groups:
*
* CALL WITH CHAIN LOCK (or group parent) TAKEN !
*
* Goes down the chains/groups until it finds the chain
* to which the drainpad belongs.
*
* It marks that pad/chain as drained and then will figure
* out which group to switch to or not.
*
* last_chain will be set to TRUE if the group to which the
* pad belongs is the last one.
*
* drained will be set to TRUE if the chain/group is drained.
*
* Returns: TRUE if the chain contained the target pad */
static gboolean
drain_and_switch_group (GstParseGroup * group, GstParsePad * drainpad,
gboolean * last_group, gboolean * drained, gboolean * switched)
{
gboolean handled = FALSE;
GList *tmp;
GST_DEBUG ("Checking group %p (target pad %s:%s)",
group, GST_DEBUG_PAD_NAME (drainpad));
/* Definitely can't be in drained groups */
if (G_UNLIKELY (group->drained)) {
goto beach;
}
/* Figure out if all our chains are drained with the
* new information */
group->drained = TRUE;
for (tmp = group->children; tmp; tmp = tmp->next) {
GstParseChain *chain = (GstParseChain *) tmp->data;
gboolean subdrained = FALSE;
handled |=
drain_and_switch_chains (chain, drainpad, last_group, &subdrained,
switched);
if (!subdrained)
group->drained = FALSE;
}
beach:
GST_DEBUG ("group %p (last_group:%d, drained:%d, switched:%d, handled:%d)",
group, *last_group, group->drained, *switched, handled);
*drained = group->drained;
return handled;
}
static gboolean
drain_and_switch_chains (GstParseChain * chain, GstParsePad * drainpad,
gboolean * last_group, gboolean * drained, gboolean * switched)
{
gboolean handled = FALSE;
GstParseBin *parsebin = chain->parsebin;
GST_DEBUG ("Checking chain %p %s:%s (target pad %s:%s)",
chain, GST_DEBUG_PAD_NAME (chain->pad), GST_DEBUG_PAD_NAME (drainpad));
CHAIN_MUTEX_LOCK (chain);
/* Definitely can't be in drained chains */
if (G_UNLIKELY (chain->drained)) {
goto beach;
}
if (chain->endpad) {
/* Check if we're reached the target endchain */
if (drainpad != NULL && chain == drainpad->chain) {
GST_DEBUG ("Found the target chain");
drainpad->drained = TRUE;
handled = TRUE;
}
chain->drained = chain->endpad->drained;
goto beach;
}
/* We known there are groups to switch to */
if (chain->next_groups)
*last_group = FALSE;
/* Check the active group */
if (chain->active_group) {
gboolean subdrained = FALSE;
handled = drain_and_switch_group (chain->active_group, drainpad,
last_group, &subdrained, switched);
/* The group is drained, see if we can switch to another */
if ((handled || drainpad == NULL) && subdrained && !*switched) {
if (chain->next_groups) {
/* Switch to next group, the actual removal of the current group will
* be done when the next one is activated */
GST_DEBUG_OBJECT (parsebin, "Moving current group %p to old groups",
chain->active_group);
chain->old_groups =
g_list_prepend (chain->old_groups, chain->active_group);
GST_DEBUG_OBJECT (parsebin, "Switching to next group %p",
chain->next_groups->data);
chain->active_group = chain->next_groups->data;
chain->next_groups =
g_list_delete_link (chain->next_groups, chain->next_groups);
*switched = TRUE;
chain->drained = FALSE;
} else {
GST_DEBUG ("Group %p was the last in chain %p", chain->active_group,
chain);
chain->drained = TRUE;
/* We're drained ! */
}
} else {
if (subdrained && !chain->next_groups)
*drained = TRUE;
}
}
beach:
CHAIN_MUTEX_UNLOCK (chain);
GST_DEBUG
("Chain %p (%s:%s handled:%d, last_group:%d, drained:%d, switched:%d, deadend:%d)",
chain, GST_DEBUG_PAD_NAME (chain->pad), handled, *last_group,
chain->drained, *switched, chain->deadend);
*drained = chain->drained;
return handled;
}
/* check if the group is drained, meaning all pads have seen an EOS
* event. */
static gboolean
gst_parse_pad_handle_eos (GstParsePad * pad)
{
gboolean last_group = TRUE;
gboolean switched = FALSE;
gboolean drained = FALSE;
GstParseChain *chain = pad->chain;
GstParseBin *parsebin = chain->parsebin;
GST_LOG_OBJECT (parsebin, "pad %p", pad);
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain) {
drain_and_switch_chains (parsebin->parse_chain, pad, &last_group, &drained,
&switched);
GST_LOG_OBJECT (parsebin, "drained:%d switched:%d", drained, switched);
if (switched) {
/* If we resulted in a group switch, expose what's needed */
if (gst_parse_chain_is_complete (parsebin->parse_chain))
gst_parse_bin_expose (parsebin);
}
if (drained) {
GST_DEBUG_OBJECT (parsebin, "We are fully drained, emitting signal");
g_signal_emit (parsebin, gst_parse_bin_signals[SIGNAL_DRAINED], 0, NULL);
}
}
EXPOSE_UNLOCK (parsebin);
return last_group;
}
/* gst_parse_group_is_drained:
*
* Check is this group is drained and cache this result.
* The group is drained if all child chains are drained.
*
* Not MT-safe, call with group->parent's lock */
static gboolean
gst_parse_group_is_drained (GstParseGroup * group)
{
GList *l;
gboolean drained = TRUE;
if (group->drained) {
drained = TRUE;
goto out;
}
for (l = group->children; l; l = l->next) {
GstParseChain *chain = l->data;
CHAIN_MUTEX_LOCK (chain);
if (!gst_parse_chain_is_drained (chain))
drained = FALSE;
CHAIN_MUTEX_UNLOCK (chain);
if (!drained)
goto out;
}
group->drained = drained;
out:
GST_DEBUG_OBJECT (group->parsebin, "Group %p is drained: %d", group, drained);
return drained;
}
/* gst_parse_chain_is_drained:
*
* Check is the chain is drained, which means that
* either
*
* a) it's endpad is drained
* b) there are no pending pads, the active group is drained
* and there are no next groups
*
* Not MT-safe, call with chain lock
*/
static gboolean
gst_parse_chain_is_drained (GstParseChain * chain)
{
gboolean drained = FALSE;
if (chain->endpad) {
drained = chain->endpad->drained;
goto out;
}
if (chain->pending_pads) {
drained = FALSE;
goto out;
}
if (chain->active_group && gst_parse_group_is_drained (chain->active_group)
&& !chain->next_groups) {
drained = TRUE;
goto out;
}
out:
GST_DEBUG_OBJECT (chain->parsebin, "Chain %p is drained: %d", chain, drained);
return drained;
}
/* sort_end_pads:
* GCompareFunc to use with lists of GstPad.
* Sorts pads by mime type.
* First video (raw, then non-raw), then audio (raw, then non-raw),
* then others.
*
* Return: negative if a<b, 0 if a==b, positive if a>b
*/
static gint
sort_end_pads (GstParsePad * da, GstParsePad * db)
{
gint va, vb;
GstCaps *capsa, *capsb;
GstStructure *sa, *sb;
const gchar *namea, *nameb;
gchar *ida, *idb;
gint ret;
capsa = get_pad_caps (GST_PAD_CAST (da));
capsb = get_pad_caps (GST_PAD_CAST (db));
sa = gst_caps_get_structure ((const GstCaps *) capsa, 0);
sb = gst_caps_get_structure ((const GstCaps *) capsb, 0);
namea = gst_structure_get_name (sa);
nameb = gst_structure_get_name (sb);
if (g_strrstr (namea, "video/x-raw"))
va = 0;
else if (g_strrstr (namea, "video/"))
va = 1;
else if (g_strrstr (namea, "image/"))
va = 2;
else if (g_strrstr (namea, "audio/x-raw"))
va = 3;
else if (g_strrstr (namea, "audio/"))
va = 4;
else
va = 5;
if (g_strrstr (nameb, "video/x-raw"))
vb = 0;
else if (g_strrstr (nameb, "video/"))
vb = 1;
else if (g_strrstr (nameb, "image/"))
vb = 2;
else if (g_strrstr (nameb, "audio/x-raw"))
vb = 3;
else if (g_strrstr (nameb, "audio/"))
vb = 4;
else
vb = 5;
gst_caps_unref (capsa);
gst_caps_unref (capsb);
if (va != vb)
return va - vb;
/* if otherwise the same, sort by stream-id */
ida = gst_pad_get_stream_id (GST_PAD_CAST (da));
idb = gst_pad_get_stream_id (GST_PAD_CAST (db));
ret = (ida) ? ((idb) ? strcmp (ida, idb) : -1) : 1;
g_free (ida);
g_free (idb);
return ret;
}
static gboolean
debug_sticky_event (GstPad * pad, GstEvent ** event, gpointer user_data)
{
GST_DEBUG_OBJECT (pad, "sticky event %s (%p)", GST_EVENT_TYPE_NAME (*event),
*event);
return TRUE;
}
/* Must only be called if the toplevel chain is complete and blocked! */
/* Not MT-safe, call with ParseBin expose lock! */
static gboolean
gst_parse_bin_expose (GstParseBin * parsebin)
{
GList *tmp, *endpads;
gboolean missing_plugin;
GString *missing_plugin_details;
gboolean already_exposed;
gboolean last_group;
gboolean uncollected_streams;
GstStreamCollection *fallback_collection = NULL;
retry:
endpads = NULL;
missing_plugin = FALSE;
already_exposed = TRUE;
last_group = TRUE;
missing_plugin_details = g_string_new ("");
GST_DEBUG_OBJECT (parsebin, "Exposing currently active chains/groups");
/* Don't expose if we're currently shutting down */
DYN_LOCK (parsebin);
if (G_UNLIKELY (parsebin->shutdown)) {
GST_WARNING_OBJECT (parsebin,
"Currently, shutting down, aborting exposing");
DYN_UNLOCK (parsebin);
return FALSE;
}
DYN_UNLOCK (parsebin);
/* Get the pads that we're going to expose and mark things as exposed */
uncollected_streams = FALSE;
CHAIN_MUTEX_LOCK (parsebin->parse_chain);
if (!gst_parse_chain_expose (parsebin->parse_chain, &endpads, &missing_plugin,
missing_plugin_details, &last_group, &uncollected_streams)) {
g_list_free_full (endpads, (GDestroyNotify) gst_object_unref);
g_string_free (missing_plugin_details, TRUE);
GST_ERROR_OBJECT (parsebin, "Broken chain/group tree");
CHAIN_MUTEX_UNLOCK (parsebin->parse_chain);
return FALSE;
}
CHAIN_MUTEX_UNLOCK (parsebin->parse_chain);
if (endpads == NULL) {
if (missing_plugin) {
if (missing_plugin_details->len > 0) {
gchar *details = g_string_free (missing_plugin_details, FALSE);
GST_ELEMENT_ERROR (parsebin, CORE, MISSING_PLUGIN, (NULL),
("no suitable plugins found:\n%s", details));
g_free (details);
} else {
g_string_free (missing_plugin_details, TRUE);
GST_ELEMENT_ERROR (parsebin, CORE, MISSING_PLUGIN, (NULL),
("no suitable plugins found"));
}
} else {
/* in this case, the stream ended without buffers,
* just post a warning */
g_string_free (missing_plugin_details, TRUE);
GST_WARNING_OBJECT (parsebin, "All streams finished without buffers. "
"Last group: %d", last_group);
if (last_group) {
GST_ELEMENT_ERROR (parsebin, STREAM, FAILED, (NULL),
("all streams without buffers"));
} else {
gboolean switched = FALSE;
gboolean drained = FALSE;
drain_and_switch_chains (parsebin->parse_chain, NULL, &last_group,
&drained, &switched);
GST_ELEMENT_WARNING (parsebin, STREAM, FAILED, (NULL),
("all streams without buffers"));
if (switched) {
if (gst_parse_chain_is_complete (parsebin->parse_chain))
goto retry;
else
return FALSE;
}
}
}
return FALSE;
}
if (uncollected_streams) {
/* FIXME: Collect and use a stream id from the top chain as
* upstream ID? */
fallback_collection = gst_stream_collection_new (NULL);
build_fallback_collection (parsebin->parse_chain, fallback_collection);
gst_element_post_message (GST_ELEMENT (parsebin),
gst_message_new_stream_collection (GST_OBJECT (parsebin),
fallback_collection));
}
g_string_free (missing_plugin_details, TRUE);
/* Check if this was called when everything was exposed already,
* and see if we need to post a new fallback collection */
for (tmp = endpads; tmp && already_exposed; tmp = tmp->next) {
GstParsePad *parsepad = tmp->data;
already_exposed &= parsepad->exposed;
}
if (already_exposed) {
GST_DEBUG_OBJECT (parsebin, "Everything was exposed already!");
if (fallback_collection)
gst_object_unref (fallback_collection);
g_list_free_full (endpads, (GDestroyNotify) gst_object_unref);
return TRUE;
}
/* Set all already exposed pads to blocked */
for (tmp = endpads; tmp; tmp = tmp->next) {
GstParsePad *parsepad = tmp->data;
if (parsepad->exposed) {
GST_DEBUG_OBJECT (parsepad, "blocking exposed pad");
gst_parse_pad_set_blocked (parsepad, TRUE);
}
}
/* re-order pads : video, then audio, then others */
endpads = g_list_sort (endpads, (GCompareFunc) sort_end_pads);
/* Don't expose if we're currently shutting down */
DYN_LOCK (parsebin);
if (G_UNLIKELY (parsebin->shutdown)) {
GST_WARNING_OBJECT (parsebin,
"Currently, shutting down, aborting exposing");
DYN_UNLOCK (parsebin);
return FALSE;
}
/* Expose pads */
for (tmp = endpads; tmp; tmp = tmp->next) {
GstParsePad *parsepad = (GstParsePad *) tmp->data;
gchar *padname;
//if (!parsepad->blocked)
//continue;
/* 1. rewrite name */
padname = g_strdup_printf ("src_%u", parsebin->nbpads);
parsebin->nbpads++;
GST_DEBUG_OBJECT (parsebin, "About to expose parsepad %s as %s",
GST_OBJECT_NAME (parsepad), padname);
gst_object_set_name (GST_OBJECT (parsepad), padname);
g_free (padname);
gst_pad_sticky_events_foreach (GST_PAD_CAST (parsepad), debug_sticky_event,
parsepad);
/* 2. activate and add */
if (!parsepad->exposed) {
parsepad->exposed = TRUE;
if (!gst_element_add_pad (GST_ELEMENT (parsebin),
GST_PAD_CAST (parsepad))) {
/* not really fatal, we can try to add the other pads */
g_warning ("error adding pad to ParseBin");
parsepad->exposed = FALSE;
continue;
}
#if 0
/* HACK: Send an empty gap event to push sticky events */
gst_pad_push_event (GST_PAD (parsepad),
gst_event_new_gap (0, GST_CLOCK_TIME_NONE));
#endif
}
GST_INFO_OBJECT (parsepad, "added new parsed pad");
}
DYN_UNLOCK (parsebin);
/* Unblock internal pads. The application should have connected stuff now
* so that streaming can continue. */
for (tmp = endpads; tmp; tmp = tmp->next) {
GstParsePad *parsepad = (GstParsePad *) tmp->data;
if (parsepad->exposed) {
GST_DEBUG_OBJECT (parsepad, "unblocking");
gst_parse_pad_unblock (parsepad);
GST_DEBUG_OBJECT (parsepad, "unblocked");
}
/* Send stream-collection events for any pads that don't have them,
* and post a stream-collection onto the bus */
if (parsepad->active_collection == NULL && fallback_collection) {
gst_pad_push_event (GST_PAD (parsepad),
gst_event_new_stream_collection (fallback_collection));
}
gst_object_unref (parsepad);
}
g_list_free (endpads);
if (fallback_collection)
gst_object_unref (fallback_collection);
/* Remove old groups */
chain_remove_old_groups (parsebin->parse_chain);
GST_DEBUG_OBJECT (parsebin, "Exposed everything");
return TRUE;
}
/* gst_parse_chain_expose:
*
* Check if the chain can be exposed and add all endpads
* to the endpads list.
*
* Not MT-safe, call with ParseBin expose lock! *
*/
static gboolean
gst_parse_chain_expose (GstParseChain * chain, GList ** endpads,
gboolean * missing_plugin, GString * missing_plugin_details,
gboolean * last_group, gboolean * uncollected_streams)
{
GstParseGroup *group;
GList *l;
gboolean ret = FALSE;
if (chain->deadend) {
if (chain->endcaps) {
if (chain->deadend_details) {
g_string_append (missing_plugin_details, chain->deadend_details);
g_string_append_c (missing_plugin_details, '\n');
} else {
gchar *desc = gst_pb_utils_get_codec_description (chain->endcaps);
gchar *caps_str = gst_caps_to_string (chain->endcaps);
g_string_append_printf (missing_plugin_details,
"Missing parser: %s (%s)\n", desc, caps_str);
g_free (caps_str);
g_free (desc);
}
*missing_plugin = TRUE;
}
return TRUE;
}
if (chain->endpad == NULL && chain->parsed && chain->pending_pads) {
/* The chain has a pending pad from a parser, let's just
* expose that now as the endpad */
GList *cur = chain->pending_pads;
GstPendingPad *ppad = (GstPendingPad *) (cur->data);
GstPad *endpad = gst_object_ref (ppad->pad);
GstElement *elem =
GST_ELEMENT (gst_object_get_parent (GST_OBJECT (endpad)));
chain->pending_pads = g_list_remove (chain->pending_pads, ppad);
gst_pending_pad_free (ppad);
GST_DEBUG_OBJECT (chain->parsebin,
"Exposing pad %" GST_PTR_FORMAT " with incomplete caps "
"because it's parsed", endpad);
expose_pad (chain->parsebin, elem, chain->current_pad, endpad, NULL, chain);
gst_object_unref (endpad);
gst_object_unref (elem);
}
if (chain->endpad) {
GstParsePad *p = chain->endpad;
if (p->active_stream && p->active_collection == NULL
&& !p->in_a_fallback_collection)
*uncollected_streams = TRUE;
*endpads = g_list_prepend (*endpads, gst_object_ref (p));
return TRUE;
}
if (chain->next_groups)
*last_group = FALSE;
group = chain->active_group;
if (!group) {
GstParsePad *p = chain->current_pad;
if (p->active_stream && p->active_collection == NULL
&& !p->in_a_fallback_collection)
*uncollected_streams = TRUE;
return FALSE;
}
for (l = group->children; l; l = l->next) {
GstParseChain *childchain = l->data;
CHAIN_MUTEX_LOCK (childchain);
ret |= gst_parse_chain_expose (childchain, endpads, missing_plugin,
missing_plugin_details, last_group, uncollected_streams);
CHAIN_MUTEX_UNLOCK (childchain);
}
return ret;
}
static void
build_fallback_collection (GstParseChain * chain,
GstStreamCollection * collection)
{
GstParseGroup *group = chain->active_group;
GList *l;
/* If it's an end pad, or a not-finished chain that's
* not a group, put it in the collection */
if (chain->endpad || (chain->current_pad && group == NULL)) {
GstParsePad *p = chain->current_pad;
if (p->active_stream != NULL && p->active_collection == NULL) {
GST_DEBUG_OBJECT (p, "Adding stream to fallback collection");
if (G_UNLIKELY (gst_stream_get_stream_type (p->active_stream) ==
GST_STREAM_TYPE_UNKNOWN)) {
GstCaps *caps;
caps = get_pad_caps (GST_PAD_CAST (p));
if (caps) {
GstStreamType type = guess_stream_type_from_caps (caps);
if (type != GST_STREAM_TYPE_UNKNOWN) {
gst_stream_set_stream_type (p->active_stream, type);
gst_stream_set_caps (p->active_stream, caps);
}
gst_caps_unref (caps);
}
}
gst_stream_collection_add_stream (collection,
gst_object_ref (p->active_stream));
p->in_a_fallback_collection = TRUE;
}
return;
}
if (!group)
return;
/* we used g_list_prepend when adding children, so iterate from last
* to first to maintain the original order they were added in */
for (l = g_list_last (group->children); l != NULL; l = l->prev) {
GstParseChain *childchain = l->data;
build_fallback_collection (childchain, collection);
}
}
/*************************
* GstParsePad functions
*************************/
static void gst_parse_pad_dispose (GObject * object);
static void
gst_parse_pad_class_init (GstParsePadClass * klass)
{
GObjectClass *gobject_klass;
gobject_klass = (GObjectClass *) klass;
gobject_klass->dispose = gst_parse_pad_dispose;
}
static void
gst_parse_pad_init (GstParsePad * pad)
{
pad->chain = NULL;
pad->blocked = FALSE;
pad->exposed = FALSE;
pad->drained = FALSE;
gst_object_ref_sink (pad);
}
static void
gst_parse_pad_dispose (GObject * object)
{
GstParsePad *parsepad = (GstParsePad *) (object);
parse_pad_set_target (parsepad, NULL);
gst_object_replace ((GstObject **) & parsepad->active_collection, NULL);
gst_object_replace ((GstObject **) & parsepad->active_stream, NULL);
G_OBJECT_CLASS (gst_parse_pad_parent_class)->dispose (object);
}
static GstPadProbeReturn
source_pad_blocked_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
GstParsePad *parsepad = user_data;
GstParseChain *chain;
GstParseBin *parsebin;
GstPadProbeReturn ret = GST_PAD_PROBE_OK;
if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) {
GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
GST_LOG_OBJECT (pad, "Seeing event '%s'", GST_EVENT_TYPE_NAME (event));
if (!GST_EVENT_IS_SERIALIZED (event)) {
/* do not block on sticky or out of band events otherwise the allocation query
from demuxer might block the loop thread */
GST_LOG_OBJECT (pad, "Letting OOB event through");
return GST_PAD_PROBE_PASS;
}
if (GST_EVENT_IS_STICKY (event) && GST_EVENT_TYPE (event) != GST_EVENT_EOS) {
GstPad *peer;
/* manually push sticky events to ghost pad to avoid exposing pads
* that don't have the sticky events. Handle EOS separately as we
* want to block the pad on it if we didn't get any buffers before
* EOS and expose the pad then. */
peer = gst_pad_get_peer (pad);
gst_pad_send_event (peer, event);
gst_object_unref (peer);
GST_LOG_OBJECT (pad, "Manually pushed sticky event through");
ret = GST_PAD_PROBE_HANDLED;
goto done;
}
} else if (GST_PAD_PROBE_INFO_TYPE (info) &
GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info);
if (!GST_QUERY_IS_SERIALIZED (query)) {
/* do not block on non-serialized queries */
GST_LOG_OBJECT (pad, "Letting non-serialized query through");
return GST_PAD_PROBE_PASS;
}
if (!gst_pad_has_current_caps (pad)) {
/* do not block on allocation queries before we have caps,
* this would deadlock because we are doing no autoplugging
* without caps.
* TODO: Try to do autoplugging based on the query caps
*/
GST_LOG_OBJECT (pad, "Letting serialized query before caps through");
return GST_PAD_PROBE_PASS;
}
}
chain = parsepad->chain;
parsebin = chain->parsebin;
GST_LOG_OBJECT (parsepad, "blocked: parsepad->chain:%p", chain);
parsepad->blocked = TRUE;
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain) {
if (!gst_parse_bin_expose (parsebin))
GST_WARNING_OBJECT (parsebin, "Couldn't expose group");
}
EXPOSE_UNLOCK (parsebin);
done:
return ret;
}
/* FIXME: We can probably do some cleverer things, and maybe move this into
* pbutils. Ideas:
* if there are tags look if it's got an AUDIO_CODEC VIDEO_CODEC CONTAINER_FORMAT tag
* Look at the factory klass designation of parsers in the chain
* Consider demuxer pad names as well, sometimes they give the type away
*/
static GstStreamType
guess_stream_type_from_caps (GstCaps * caps)
{
GstStructure *s;
const gchar *name;
if (gst_caps_get_size (caps) < 1)
return GST_STREAM_TYPE_UNKNOWN;
s = gst_caps_get_structure (caps, 0);
name = gst_structure_get_name (s);
if (g_str_has_prefix (name, "video/") || g_str_has_prefix (name, "image/"))
return GST_STREAM_TYPE_VIDEO;
if (g_str_has_prefix (name, "audio/"))
return GST_STREAM_TYPE_AUDIO;
if (g_str_has_prefix (name, "text/") ||
g_str_has_prefix (name, "subpicture/") ||
g_str_has_prefix (name, "closedcaption/"))
return GST_STREAM_TYPE_TEXT;
return GST_STREAM_TYPE_UNKNOWN;
}
static void
gst_parse_pad_update_caps (GstParsePad * parsepad, GstCaps * caps)
{
if (caps && parsepad->active_stream) {
GST_DEBUG_OBJECT (parsepad, "Storing caps %" GST_PTR_FORMAT
" on stream %" GST_PTR_FORMAT, caps, parsepad->active_stream);
if (gst_caps_is_fixed (caps))
gst_stream_set_caps (parsepad->active_stream, caps);
/* intuit a type */
if (gst_stream_get_stream_type (parsepad->active_stream) ==
GST_STREAM_TYPE_UNKNOWN) {
GstStreamType new_type = guess_stream_type_from_caps (caps);
if (new_type != GST_STREAM_TYPE_UNKNOWN)
gst_stream_set_stream_type (parsepad->active_stream, new_type);
}
}
}
static void
gst_parse_pad_update_tags (GstParsePad * parsepad, GstTagList * tags)
{
if (tags && gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM
&& parsepad->active_stream) {
GST_DEBUG_OBJECT (parsepad,
"Storing new tags %" GST_PTR_FORMAT " on stream %" GST_PTR_FORMAT, tags,
parsepad->active_stream);
gst_stream_set_tags (parsepad->active_stream, tags);
}
}
static GstEvent *
gst_parse_pad_stream_start_event (GstParsePad * parsepad, GstEvent * event)
{
GstStream *stream = NULL;
const gchar *stream_id = NULL;
gboolean repeat_event = FALSE;
gst_event_parse_stream_start (event, &stream_id);
if (parsepad->active_stream != NULL &&
g_str_equal (parsepad->active_stream->stream_id, stream_id))
repeat_event = TRUE;
else {
/* A new stream requires a new collection event, or else
* we'll place it in a fallback collection later */
gst_object_replace ((GstObject **) & parsepad->active_collection, NULL);
parsepad->in_a_fallback_collection = FALSE;
}
gst_event_parse_stream (event, &stream);
if (stream == NULL) {
GstCaps *caps = gst_pad_get_current_caps (GST_PAD_CAST (parsepad));
if (caps == NULL) {
/* Try and get caps from the parsepad peer */
GstPad *peer = gst_ghost_pad_get_target (GST_GHOST_PAD (parsepad));
caps = gst_pad_get_current_caps (peer);
gst_object_unref (peer);
}
if (caps == NULL && parsepad->chain && parsepad->chain->start_caps) {
/* Still no caps, use the chain start caps */
caps = gst_caps_ref (parsepad->chain->start_caps);
}
GST_DEBUG_OBJECT (parsepad,
"Saw stream_start with no GstStream. Adding one. Caps %"
GST_PTR_FORMAT, caps);
if (repeat_event) {
stream = gst_object_ref (parsepad->active_stream);
} else {
stream =
gst_stream_new (stream_id, NULL, GST_STREAM_TYPE_UNKNOWN,
GST_STREAM_FLAG_NONE);
gst_object_replace ((GstObject **) & parsepad->active_stream,
(GstObject *) stream);
}
if (caps) {
gst_parse_pad_update_caps (parsepad, caps);
gst_caps_unref (caps);
}
event = gst_event_make_writable (event);
gst_event_set_stream (event, stream);
}
gst_object_unref (stream);
GST_LOG_OBJECT (parsepad, "Saw stream %s (GstStream %p)",
stream->stream_id, stream);
return event;
}
static void
gst_parse_pad_update_stream_collection (GstParsePad * parsepad,
GstStreamCollection * collection)
{
GST_LOG_OBJECT (parsepad, "Got new stream collection %p", collection);
gst_object_replace ((GstObject **) & parsepad->active_collection,
(GstObject *) collection);
parsepad->in_a_fallback_collection = FALSE;
}
static GstPadProbeReturn
gst_parse_pad_event (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
GstObject *parent = gst_pad_get_parent (pad);
GstParsePad *parsepad = GST_PARSE_PAD (parent);
gboolean forwardit = TRUE;
GST_LOG_OBJECT (pad, "%s parsepad:%p", GST_EVENT_TYPE_NAME (event), parsepad);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:{
GstCaps *caps = NULL;
gst_event_parse_caps (event, &caps);
gst_parse_pad_update_caps (parsepad, caps);
break;
}
case GST_EVENT_TAG:{
GstTagList *tags;
gst_event_parse_tag (event, &tags);
gst_parse_pad_update_tags (parsepad, tags);
break;
}
case GST_EVENT_STREAM_START:{
GST_PAD_PROBE_INFO_DATA (info) =
gst_parse_pad_stream_start_event (parsepad, event);
break;
}
case GST_EVENT_STREAM_COLLECTION:{
GstStreamCollection *collection = NULL;
gst_event_parse_stream_collection (event, &collection);
gst_parse_pad_update_stream_collection (parsepad, collection);
gst_element_post_message (GST_ELEMENT (parsepad->parsebin),
gst_message_new_stream_collection (GST_OBJECT (parsepad->parsebin),
collection));
break;
}
case GST_EVENT_EOS:{
GST_DEBUG_OBJECT (pad, "we received EOS");
/* Check if all pads are drained.
* * If there is no next group, we will let the EOS go through.
* * If there is a next group but the current group isn't completely
* drained, we will drop the EOS event.
* * If there is a next group to expose and this was the last non-drained
* pad for that group, we will remove the ghostpad of the current group
* first, which unlinks the peer and so drops the EOS. */
forwardit = gst_parse_pad_handle_eos (parsepad);
}
default:
break;
}
gst_object_unref (parent);
if (forwardit)
return GST_PAD_PROBE_OK;
else
return GST_PAD_PROBE_DROP;
}
static void
gst_parse_pad_set_blocked (GstParsePad * parsepad, gboolean blocked)
{
GstParseBin *parsebin = parsepad->parsebin;
GstPad *opad;
DYN_LOCK (parsebin);
GST_DEBUG_OBJECT (parsepad, "blocking pad: %d", blocked);
opad = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (parsepad));
if (!opad)
goto out;
/* do not block if shutting down.
* we do not consider/expect it blocked further below, but use other trick */
if (!blocked || !parsebin->shutdown) {
if (blocked) {
if (parsepad->block_id == 0)
parsepad->block_id =
gst_pad_add_probe (opad,
GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM |
GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, source_pad_blocked_cb,
gst_object_ref (parsepad), (GDestroyNotify) gst_object_unref);
} else {
if (parsepad->block_id != 0) {
gst_pad_remove_probe (opad, parsepad->block_id);
parsepad->block_id = 0;
}
parsepad->blocked = FALSE;
}
}
if (blocked) {
if (parsebin->shutdown) {
/* deactivate to force flushing state to prevent NOT_LINKED errors */
gst_pad_set_active (GST_PAD_CAST (parsepad), FALSE);
/* note that deactivating the target pad would have no effect here,
* since elements are typically connected first (and pads exposed),
* and only then brought to PAUSED state (so pads activated) */
} else {
gst_object_ref (parsepad);
parsebin->blocked_pads =
g_list_prepend (parsebin->blocked_pads, parsepad);
}
} else {
GList *l;
if ((l = g_list_find (parsebin->blocked_pads, parsepad))) {
gst_object_unref (parsepad);
parsebin->blocked_pads = g_list_delete_link (parsebin->blocked_pads, l);
}
}
gst_object_unref (opad);
out:
DYN_UNLOCK (parsebin);
}
static void
gst_parse_pad_activate (GstParsePad * parsepad, GstParseChain * chain)
{
g_return_if_fail (chain != NULL);
parsepad->chain = chain;
gst_pad_set_active (GST_PAD_CAST (parsepad), TRUE);
gst_parse_pad_set_blocked (parsepad, TRUE);
}
static void
gst_parse_pad_unblock (GstParsePad * parsepad)
{
gst_parse_pad_set_blocked (parsepad, FALSE);
}
static gboolean
gst_parse_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
GstParsePad *parsepad = GST_PARSE_PAD (parent);
gboolean ret = FALSE;
CHAIN_MUTEX_LOCK (parsepad->chain);
if (!parsepad->exposed && !parsepad->parsebin->shutdown
&& !parsepad->chain->deadend && parsepad->chain->elements) {
GstParseElement *pelem = parsepad->chain->elements->data;
ret = FALSE;
GST_DEBUG_OBJECT (parsepad->parsebin,
"calling autoplug-query for %s (element %s): %" GST_PTR_FORMAT,
GST_PAD_NAME (parsepad), GST_ELEMENT_NAME (pelem->element), query);
g_signal_emit (G_OBJECT (parsepad->parsebin),
gst_parse_bin_signals[SIGNAL_AUTOPLUG_QUERY], 0, parsepad,
pelem->element, query, &ret);
if (ret)
GST_DEBUG_OBJECT (parsepad->parsebin,
"autoplug-query returned %d: %" GST_PTR_FORMAT, ret, query);
else
GST_DEBUG_OBJECT (parsepad->parsebin, "autoplug-query returned %d", ret);
}
CHAIN_MUTEX_UNLOCK (parsepad->chain);
/* If exposed or nothing handled the query use the default handler */
if (!ret)
ret = gst_pad_query_default (pad, parent, query);
return ret;
}
/*gst_parse_pad_new:
*
* Creates a new GstParsePad for the given pad.
*/
static GstParsePad *
gst_parse_pad_new (GstParseBin * parsebin, GstParseChain * chain)
{
GstParsePad *parsepad;
GstProxyPad *ppad;
GstPadTemplate *pad_tmpl;
GST_DEBUG_OBJECT (parsebin, "making new parsepad");
pad_tmpl = gst_static_pad_template_get (&parse_bin_src_template);
parsepad =
g_object_new (GST_TYPE_PARSE_PAD, "direction", GST_PAD_SRC,
"template", pad_tmpl, NULL);
gst_ghost_pad_construct (GST_GHOST_PAD_CAST (parsepad));
parsepad->chain = chain;
parsepad->parsebin = parsebin;
gst_object_unref (pad_tmpl);
ppad = gst_proxy_pad_get_internal (GST_PROXY_PAD (parsepad));
gst_pad_set_query_function (GST_PAD_CAST (ppad), gst_parse_pad_query);
/* Add downstream event probe */
GST_LOG_OBJECT (parsepad, "Adding event probe on internal pad %"
GST_PTR_FORMAT, ppad);
gst_pad_add_probe (GST_PAD_CAST (ppad),
GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, gst_parse_pad_event, parsepad, NULL);
gst_object_unref (ppad);
return parsepad;
}
static void
gst_pending_pad_free (GstPendingPad * ppad)
{
g_assert (ppad);
g_assert (ppad->pad);
if (ppad->event_probe_id != 0)
gst_pad_remove_probe (ppad->pad, ppad->event_probe_id);
if (ppad->notify_caps_id)
g_signal_handler_disconnect (ppad->pad, ppad->notify_caps_id);
gst_object_unref (ppad->pad);
g_slice_free (GstPendingPad, ppad);
}
/*****
* Element add/remove
*****/
/* call with dyn_lock held */
static void
unblock_pads (GstParseBin * parsebin)
{
GList *tmp;
GST_LOG_OBJECT (parsebin, "unblocking pads");
for (tmp = parsebin->blocked_pads; tmp; tmp = tmp->next) {
GstParsePad *parsepad = (GstParsePad *) tmp->data;
GstPad *opad;
opad = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (parsepad));
if (!opad)
continue;
GST_DEBUG_OBJECT (parsepad, "unblocking");
if (parsepad->block_id != 0) {
gst_pad_remove_probe (opad, parsepad->block_id);
parsepad->block_id = 0;
}
parsepad->blocked = FALSE;
/* make flushing, prevent NOT_LINKED */
gst_pad_set_active (GST_PAD_CAST (parsepad), FALSE);
gst_object_unref (parsepad);
gst_object_unref (opad);
GST_DEBUG_OBJECT (parsepad, "unblocked");
}
/* clear, no more blocked pads */
g_list_free (parsebin->blocked_pads);
parsebin->blocked_pads = NULL;
}
static GstStateChangeReturn
gst_parse_bin_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstParseBin *parsebin = GST_PARSE_BIN (element);
GstParseChain *chain_to_free = NULL;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
if (parsebin->typefind == NULL)
goto missing_typefind;
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* Make sure we've cleared all existing chains */
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain) {
gst_parse_chain_free (parsebin->parse_chain);
parsebin->parse_chain = NULL;
}
EXPOSE_UNLOCK (parsebin);
DYN_LOCK (parsebin);
GST_LOG_OBJECT (parsebin, "clearing shutdown flag");
parsebin->shutdown = FALSE;
DYN_UNLOCK (parsebin);
parsebin->have_type = FALSE;
/* connect a signal to find out when the typefind element found
* a type */
parsebin->have_type_id =
g_signal_connect (parsebin->typefind, "have-type",
G_CALLBACK (type_found), parsebin);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
if (parsebin->have_type_id)
g_signal_handler_disconnect (parsebin->typefind,
parsebin->have_type_id);
parsebin->have_type_id = 0;
DYN_LOCK (parsebin);
GST_LOG_OBJECT (parsebin, "setting shutdown flag");
parsebin->shutdown = TRUE;
unblock_pads (parsebin);
DYN_UNLOCK (parsebin);
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (G_UNLIKELY (ret == GST_STATE_CHANGE_FAILURE))
goto activate_failed;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
EXPOSE_LOCK (parsebin);
if (parsebin->parse_chain) {
chain_to_free = parsebin->parse_chain;
gst_parse_chain_free_internal (parsebin->parse_chain, TRUE);
parsebin->parse_chain = NULL;
}
EXPOSE_UNLOCK (parsebin);
if (chain_to_free)
gst_parse_chain_free (chain_to_free);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
g_mutex_lock (&parsebin->cleanup_lock);
if (parsebin->cleanup_thread) {
g_thread_join (parsebin->cleanup_thread);
parsebin->cleanup_thread = NULL;
}
g_mutex_unlock (&parsebin->cleanup_lock);
default:
break;
}
return ret;
/* ERRORS */
missing_typefind:
{
gst_element_post_message (element,
gst_missing_element_message_new (element, "typefind"));
GST_ELEMENT_ERROR (parsebin, CORE, MISSING_PLUGIN, (NULL),
("no typefind!"));
return GST_STATE_CHANGE_FAILURE;
}
activate_failed:
{
GST_DEBUG_OBJECT (element,
"element failed to change states -- activation problem?");
return GST_STATE_CHANGE_FAILURE;
}
}
static void
gst_parse_bin_handle_message (GstBin * bin, GstMessage * msg)
{
GstParseBin *parsebin = GST_PARSE_BIN (bin);
gboolean drop = FALSE;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:{
/* Don't pass errors when shutting down. Sometimes,
* elements can generate spurious errors because we set the
* output pads to flushing, and they can't detect that if they
* send an event at exactly the wrong moment */
DYN_LOCK (parsebin);
drop = parsebin->shutdown;
DYN_UNLOCK (parsebin);
if (!drop) {
GST_OBJECT_LOCK (parsebin);
drop =
(g_list_find (parsebin->filtered, GST_MESSAGE_SRC (msg)) != NULL);
if (drop)
parsebin->filtered_errors =
g_list_prepend (parsebin->filtered_errors, gst_message_ref (msg));
GST_OBJECT_UNLOCK (parsebin);
}
break;
}
default:
break;
}
if (drop)
gst_message_unref (msg);
else
GST_BIN_CLASS (parent_class)->handle_message (bin, msg);
}
gboolean
gst_parse_bin_plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_parse_bin_debug, "parsebin", 0, "parser bin");
return gst_element_register (plugin, "parsebin", GST_RANK_NONE,
GST_TYPE_PARSE_BIN);
}
|
cuba-platform/cuba-thesis | modules/core/src/com/haulmont/cuba/security/app/LoginWorker.java | /*
* Copyright (c) 2008-2013 Haulmont. All rights reserved.
* Use is subject to license terms, see http://www.cuba-platform.com/license for details.
*/
package com.haulmont.cuba.security.app;
import com.haulmont.cuba.security.entity.User;
import com.haulmont.cuba.security.global.LoginException;
import com.haulmont.cuba.security.global.UserSession;
import javax.annotation.Nullable;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
/**
* Interface to {@link com.haulmont.cuba.security.app.LoginWorkerBean}
*
* @author krivopustov
* @version $Id$
*/
public interface LoginWorker {
String NAME = "cuba_LoginWorker";
/**
* @see LoginService#login(String, String, java.util.Locale)
*/
UserSession login(String login, String password, Locale locale) throws LoginException;
/**
* @see LoginService#login(String, String, java.util.Locale, java.util.Map)
*/
UserSession login(String login, String password, Locale locale, Map<String, Object> params) throws LoginException;
/**
* @see LoginService#loginTrusted(String, String, java.util.Locale)
*/
UserSession loginTrusted(String login, String password, Locale locale) throws LoginException;
/**
* @see LoginService#loginTrusted(String, String, java.util.Locale, java.util.Map))
*/
UserSession loginTrusted(String login, String password, Locale locale, Map<String, Object> params)
throws LoginException;
/**
* @see LoginService#loginByRememberMe(String, String, java.util.Locale))
*/
UserSession loginByRememberMe(String login, String rememberMeToken, Locale locale) throws LoginException;
/**
* @see LoginService#loginTrusted(String, String, java.util.Locale, java.util.Map))
*/
UserSession loginByRememberMe(String login, String rememberMeToken, Locale locale, Map<String, Object> params)
throws LoginException;
/**
* @see LoginService#logout()
*/
void logout();
/**
* @see LoginService#substituteUser(User)
*/
UserSession substituteUser(User substitutedUser);
/**
* @see LoginService#getSession(UUID)
*/
@Nullable
UserSession getSession(UUID sessionId);
/**
* Log in from a middleware component. This method should not be exposed to any client tier.
*
* @param login login of a system user
* @return system user session that is not replicated in cluster
* @throws LoginException in case of unsuccessful log in
*/
UserSession loginSystem(String login) throws LoginException;
/**
* @see com.haulmont.cuba.security.app.LoginService#checkRememberMe(String, String)
*/
boolean checkRememberMe(String login, String rememberMeToken);
} |
alexpan0610/Elastos.ELA.SPV.Cpp | Test/PayloadRecordTest.cpp | // Copyright (c) 2012-2018 The Elastos Open Source Project
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#define CATCH_CONFIG_MAIN
#include "catch.hpp"
#include "TestHelper.h"
#include <SDK/Plugin/Transaction/Payload/Record.h>
#include <SDK/Common/Log.h>
using namespace Elastos::ElaWallet;
TEST_CASE("Record test", "Record") {
Log::registerMultiLogger();
SECTION("Serialize and deserialize") {
Record p1(getRandString(20), getRandBytes(50)), p2;
ByteStream stream;
p1.Serialize(stream, 0);
REQUIRE(p2.Deserialize(stream, 0));
REQUIRE(p1.GetRecordType() == p2.GetRecordType());
REQUIRE((p1.GetRecordData() == p2.GetRecordData()));
}
SECTION("to json and from json") {
Record p1(getRandString(20), getRandBytes(50)), p2;
nlohmann::json p1Json = p1.ToJson(0);
p2.FromJson(p1Json, 0);
REQUIRE(p1.GetRecordType() == p2.GetRecordType());
REQUIRE(p1.GetRecordData() == p2.GetRecordData());
}
}
|
internaru/Pinetree_P | lsp_shiloh/common/devices/pip/csc/include/csc_api.h | /******************************************************************************
* Copyright (c) 2009 - 2010 Marvell International, Ltd. All Rights Reserved
*
* Marvell Confidential
******************************************************************************/
#ifndef __CSC_FUNC_H_INCLUDED
#define __CSC_FUNC_H_INCLUDED
#include <stdint.h>
#include "asic.h"
#include "logger.h"
#include "csc_cfg.h"
void csc_setup(CSC_DAVINCI_REGS_t *csc_reg, csc_cfg_t *csc_cfg);
void csc_print(CSC_DAVINCI_REGS_t *csc_reg);
void csc_loadcoeff(CSC_DAVINCI_REGS_t *csc_reg, uint32_t coeff[9]);
void csc_loadinlut(CSC_DAVINCI_REGS_t *csc_reg, uint32_t *ptr, uint32_t overflow [3]);
void csc_loadoutlut(CSC_DAVINCI_REGS_t *csc_reg, uint32_t *ptr, uint32_t overflow [3]);
#endif
|
2276089666/DataStructuresAndAlgorithms | algorithms/manacher/Manacher.java | <filename>algorithms/manacher/Manacher.java<gh_stars>1-10
package manacher;
/**
* @Author ws
* @Date 2021/5/6 15:42
* @Version 1.0
*/
public class Manacher {
/**
* 假设字符串str长度为N,想返回最长回文子串的长度
* <p>
* 在下面不同的分支下,通过R的变化,发现R最大变化为N,所以,时间复杂度O(N)
*/
public static int getMaxLength(String str) {
if (str == null || str.length() == 0) {
return 0;
}
// 生成我们的manacher串,方便我们的虚轴判断
char[] arr = manacherString(str);
// 回文的半径大小
int[] pArr = new int[arr.length];
int C = -1; // 中轴
int R = -1; // 中轴C的最长回文右边界,第一个破坏回文规则的下标
int max = Integer.MIN_VALUE;
for (int i = 0; i < arr.length; i++) {
/**
* (一).i大于R,没有优化,以i为中轴,一个个的向两边尝试
* (二).i小于R,分下面三种情况:
*
* 1. #2#2#1#3#3#1#2#5#
* i对称点+(i对称点的回文半径)小于R,当i的位置到达后面的一个1的位置,中轴C在两个3中间的#的位置,
* R在5左边的#的位置,此时后面位置的1的回文半径必然等于前面位置的1的回文半径,
* 类似动态规划,不用求,直接得答案
* 2. i对称点+(i对称点的回文半径)大于R,由于R不能再向右扩,推导发现,此时回文半径必是R-i
* 3. i对称点+(i对称点的回文半径)等于R,从i+(i的对称点的回文半径)向两边尝试
*/
// pArr[i]至少不用向外扩的区域
// 情况(一)下pArr[i]=1
// 情况(二)下,1,2两种情况可以优化,向两边扩的次数可以减少,3情况和(一)情况一样,挨个向两边尝试
pArr[i] = i >= R ? 1 : Math.min(pArr[2 * C - i], R - i);
// 试探以i为中轴的回文半径
while (i + pArr[i] < arr.length && i - pArr[i] > -1) {
if (arr[i + pArr[i]] == arr[i - pArr[i]])
pArr[i]++;
else {
break;
}
}
// 移动C,R下标
if (i+pArr[i]>R){
R=i+pArr[i];
C=i;
}
max=Math.max(max,pArr[i]);
}
return max-1;
}
// 处理结果: #2#2#1#3#3#1#2#5#
private static char[] manacherString(String str) {
char[] array = str.toCharArray();
char[] result = new char[str.length() * 2 + 1];
for (int i = 0; i < result.length; i++) {
if (i % 2 == 0) {
result[i] = '#';
} else {
result[i] = array[i / 2];
}
}
return result;
}
public static void main(String[] args) {
String a="22133125";
int maxLength = getMaxLength(a);
System.out.println(maxLength);
}
}
|
bpedersen2/python-gr | gr/runtime_helper.py | <gh_stars>10-100
# coding: utf-8
"""
"""
import ctypes
import os
import sys
def required_runtime_version():
# TODO: load runtime version from file
return '0.58.0'
def version_string_to_tuple(version_string):
if not isinstance(version_string, str):
version_string = version_string.decode('utf-8')
if version_string.startswith('v'):
version_string = version_string[1:]
if '-' in version_string:
version_string = version_string.split('-', 1)[0]
version_string = version_string.replace('.post', '.')
return tuple(int(v) for v in version_string.split('.'))
def load_runtime(search_dirs=(), silent=False, lib_name='libGR'):
if sys.platform == "win32":
library_extensions = (".dll",)
library_directory = "bin"
elif sys.platform == "darwin":
library_extensions = (".dylib", ".so")
library_directory = "lib"
else:
library_extensions = (".so",)
library_directory = "lib"
search_directories = list(search_dirs)
search_directories.extend([
os.environ.get('GRLIB'),
os.path.realpath(os.path.join(os.path.dirname(__file__), library_directory)),
os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'build', 'lib', 'gr')),
])
if sys.platform != "win32":
search_directories.extend(
[
os.path.join(os.path.expanduser('~'), 'gr', 'lib'),
'/usr/local/gr/lib',
'/usr/gr/lib',
]
)
search_path = os.environ.get('PATH', '')
for directory in search_directories:
if directory is None:
continue
if not os.path.isdir(directory):
continue
directory = os.path.abspath(directory)
for library_extension in library_extensions:
library_filename = os.path.join(directory, lib_name + library_extension)
if os.path.isfile(library_filename):
if sys.platform == "win32":
os.environ["PATH"] = search_path + ";" + directory
try:
library = ctypes.CDLL(library_filename)
except OSError:
# library exists but could not be loaded (e.g. due to missing dependencies)
if silent:
return None
else:
raise
if lib_name == 'libGR':
library.gr_version.argtypes = []
library.gr_version.restype = ctypes.c_char_p
library_version_string = library.gr_version()
library_version = version_string_to_tuple(library_version_string)
required_version = version_string_to_tuple(required_runtime_version())
version_compatible = library_version[0] == required_version[0] and library_version >= required_version
if version_compatible:
return library
# TODO: other libraries, such as libGRM, require some form of
# validation as well, but currently no mechanism for this has
# been implemented
return library
if not silent:
sys.stderr.write("""GR runtime not found.
Please visit https://gr-framework.org and install at least the following version of the GR runtime:
{}
Also, please ensure that you have all required dependencies:
Debian/Ubuntu: apt install libxt6 libxrender1 libgl1-mesa-glx libqt5widgets5
CentOS 7: yum install libXt libXrender libXext mesa-libGL qt5-qtbase-gui
Fedora 28: dnf install -y libXt libXrender libXext mesa-libGL qt5-qtbase-gui
openSUSE 42.3 / 15: zypper install -y libXt6 libXrender1 libXext6 Mesa-libGL1 libQt5Widgets5
FreeBSD: pkg install libXt libXrender libXext mesa-libs qt5
""".format(required_runtime_version()))
return None
def register_gksterm():
if sys.platform == 'darwin':
# register GKSTerm.app on macOS
app = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'GKSTerm.app'))
os.system('/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister -f {}'.format(app))
|
maxima-us/crypto-dom | src/crypto_dom/binance/__write_symbols.py | <reponame>maxima-us/crypto-dom<filename>src/crypto_dom/binance/__write_symbols.py<gh_stars>0
import asyncio
import json
import httpx
from crypto_dom.binance.market_data.exchange_info import URL
async def _write_binance_symbols(folder):
async with httpx.AsyncClient() as client:
r = await client.get(URL)
rjson = r.json()
symbols_data = rjson["symbols"]
symbols_list = [k["symbol"] for k in symbols_data]
with open(f"{folder}_data_symbols.json", "w") as file:
json.dump(symbols_data, file)
with open(f"{folder}_definitions_symbols.py", "w") as file:
file.write("# This file is auto-generated\n\n")
file.write("from typing_extensions import Literal\n\n")
file.write("SYMBOL = Literal[\n")
file.writelines(map(lambda x: f"{4*' '}'{str(x)}',\n", symbols_list))
file.write("]")
if __name__ == "__main__":
asyncio.run(_write_binance_symbols()) |
zytx/pyblog | joplin/apps.py | from django.apps import AppConfig
class JoplinConfig(AppConfig):
name = 'joplin'
|
genewoo/ruby-china | app/models/authorization.rb | <reponame>genewoo/ruby-china<gh_stars>0
class Authorization
include Mongoid::Document
include Mongoid::Timestamps
include Mongoid::BaseModel
field :provider
field :uid, type: String
embedded_in :user, inverse_of: :authorizations
validates :uid, :provider, presence: true
validates :uid, uniqueness: { scope: :provider }
end
|
yeeeeeeti/3D_feature_extract | src/asiAlgo/features/asiAlgo_ExtractFeaturesResult.cpp | //-----------------------------------------------------------------------------
// Created on: 04 February 2019
//-----------------------------------------------------------------------------
// Copyright (c) 2019-present, <NAME>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the copyright holder(s) nor the
// names of all contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//-----------------------------------------------------------------------------
// Own include
#include <asiAlgo_ExtractFeaturesResult.h>
//-----------------------------------------------------------------------------
void
asiAlgo_ExtractFeaturesResult::GetFaceIndices(TColStd_PackedMapOfInteger& faceIds) const
{
for ( t_data::Iterator dit(m_data); dit.More(); dit.Next() )
for ( t_features::Iterator fit( dit.Value() ); fit.More(); fit.Next() )
faceIds.Unite( fit.Value() );
}
//-----------------------------------------------------------------------------
TColStd_PackedMapOfInteger&
asiAlgo_ExtractFeaturesResult::FindFeature(const int type,
const int featId)
{
// Check if the type of interest is already settled. If not, do it now
t_features* featuresPtr = m_data.ChangeSeek(type);
if ( featuresPtr == NULL )
featuresPtr = m_data.Bound( type, t_features() );
TColStd_PackedMapOfInteger* mapPtr = (*featuresPtr).ChangeSeek(featId);
// Check if the feature id of interest is already settled. If not, do it now
if ( mapPtr == NULL )
mapPtr = (*featuresPtr).Bound( featId, TColStd_PackedMapOfInteger() );
//
return (*mapPtr);
}
//-----------------------------------------------------------------------------
bool asiAlgo_ExtractFeaturesResult::ContainsFeaturesOfType(const int type) const
{
return m_data.IsBound(type);
}
//-----------------------------------------------------------------------------
bool asiAlgo_ExtractFeaturesResult::ContainsFeature(const int type,
const int featId) const
{
const t_features* featuresPtr = m_data.Seek(type);
if ( featuresPtr == NULL )
return false;
return (*featuresPtr).IsBound(featId);
}
|
mcopik/perf-taint | benchmarks/milc/milc_qcd-7.8.1/libraries/include/inline_C_grow4wvecs.h | /***************** grow4wvecs.c (in su3.a) ****************************
* *
* If sum=0, *
* Grow and add four wilson_vectors *
* If sum=1, *
* Grow and sum four wilson_vectors to another wilson_vector *
* void grow_add_four_wvecs( wilson_vector *a, half_wilson_vector *b1, *
* half_wilson_vector *b2, half_wilson_vector *b3, *
* half_wilson_vector *b4, int sign, int sum ) *
* A <- B1 + B2 + B3 + B4 or *
* A <- A + B1 + B2 + B3 + B4 *
* B1 is expanded using gamma_x, B2 using gamma_y, etc. *
*/
/* grow and sum four wilson_vectors */
#define _inline_C_grow_add_four_wvecs( a, b1, b2, b3, b4, sign, sum ) {\
int _i;\
if((sum)==0)\
{\
/* wp_grow( b1,a,XUP,sign); */\
\
/* case XUP: */\
if((sign)==PLUS)\
{\
for(_i=0;_i<3;_i++){\
(a)->d[0].c[_i] = (b1)->h[0].c[_i];\
(a)->d[1].c[_i] = (b1)->h[1].c[_i];\
TIMESMINUSI( (b1)->h[0].c[_i], (a)->d[3].c[_i]);\
TIMESMINUSI( (b1)->h[1].c[_i], (a)->d[2].c[_i]);\
}\
}\
else\
{\
/* case XDOWN: */\
for(_i=0;_i<3;_i++){\
(a)->d[0].c[_i] = (b1)->h[0].c[_i];\
(a)->d[1].c[_i] = (b1)->h[1].c[_i];\
TIMESPLUSI( (b1)->h[0].c[_i], (a)->d[3].c[_i]);\
TIMESPLUSI( (b1)->h[1].c[_i], (a)->d[2].c[_i]);\
}\
}\
}\
else\
{\
/*wp_grow_add( b1,a,XUP,sign); */\
\
/* case XUP: */\
if((sign)==PLUS)\
{\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b1)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b1)->h[1].c[_i]);\
CSUM_TMI( (a)->d[2].c[_i], (b1)->h[1].c[_i] );\
CSUM_TMI( (a)->d[3].c[_i], (b1)->h[0].c[_i] );\
}\
}\
else\
{\
/* case XDOWN: */\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b1)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b1)->h[1].c[_i]);\
CSUM_TPI( (a)->d[2].c[_i], (b1)->h[1].c[_i] );\
CSUM_TPI( (a)->d[3].c[_i], (b1)->h[0].c[_i] );\
}\
}\
}\
\
/* wp_grow_add( b2,a,YUP,sign); */\
\
if((sign)==PLUS)\
{\
/* case YUP: */\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b2)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b2)->h[1].c[_i]);\
CSUM( (a)->d[2].c[_i], (b2)->h[1].c[_i]);\
CSUB( (a)->d[3].c[_i], (b2)->h[0].c[_i], (a)->d[3].c[_i] );\
}\
}\
else\
{\
/* case YDOWN: */\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b2)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b2)->h[1].c[_i]);\
CSUB( (a)->d[2].c[_i], (b2)->h[1].c[_i], (a)->d[2].c[_i] );\
CSUM( (a)->d[3].c[_i], (b2)->h[0].c[_i]);\
}\
}\
\
/* wp_grow_add( b3,a,ZUP,sign); */\
\
if((sign)==PLUS)\
{\
/* case ZUP: */\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b3)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b3)->h[1].c[_i]);\
CSUM_TMI( (a)->d[2].c[_i], (b3)->h[0].c[_i] );\
CSUM_TPI( (a)->d[3].c[_i], (b3)->h[1].c[_i] );\
}\
}\
else\
{\
/* case ZDOWN:*/\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b3)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b3)->h[1].c[_i]);\
CSUM_TPI( (a)->d[2].c[_i], (b3)->h[0].c[_i] );\
CSUM_TMI( (a)->d[3].c[_i], (b3)->h[1].c[_i] );\
}\
}\
\
/* wp_grow_add( b4,a,TUP,sign); */\
\
if((sign)==PLUS)\
{\
/* case TUP: */\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b4)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b4)->h[1].c[_i]);\
CSUM( (a)->d[2].c[_i], (b4)->h[0].c[_i]);\
CSUM( (a)->d[3].c[_i], (b4)->h[1].c[_i]);\
}\
}\
else\
{\
/* case TDOWN: */\
for(_i=0;_i<3;_i++){\
CSUM( (a)->d[0].c[_i], (b4)->h[0].c[_i]);\
CSUM( (a)->d[1].c[_i], (b4)->h[1].c[_i]);\
CSUB( (a)->d[2].c[_i], (b4)->h[0].c[_i], (a)->d[2].c[_i] );\
CSUB( (a)->d[3].c[_i], (b4)->h[1].c[_i], (a)->d[3].c[_i] );\
}\
}\
}
|
1c7/tactview | tactview-ui/src/main/java/com/helospark/tactview/ui/javafx/uicomponents/propertyvalue/ValueListPropertyValueSetterChainItem.java | package com.helospark.tactview.ui.javafx.uicomponents.propertyvalue;
import java.util.LinkedHashMap;
import java.util.Map;
import com.helospark.lightdi.annotation.Component;
import com.helospark.tactview.core.timeline.effect.EffectParametersRepository;
import com.helospark.tactview.core.timeline.effect.interpolation.ValueProviderDescriptor;
import com.helospark.tactview.core.timeline.effect.interpolation.provider.ValueListElement;
import com.helospark.tactview.core.timeline.effect.interpolation.provider.ValueListProvider;
import com.helospark.tactview.core.timeline.message.KeyframeAddedRequest;
import com.helospark.tactview.ui.javafx.UiCommandInterpreterService;
import com.helospark.tactview.ui.javafx.UiTimelineManager;
import com.helospark.tactview.ui.javafx.commands.impl.AddKeyframeForPropertyCommand;
import com.helospark.tactview.ui.javafx.uicomponents.propertyvalue.contextmenu.ContextMenuAppender;
import javafx.scene.control.ComboBox;
import javafx.scene.control.RadioButton;
import javafx.scene.control.Toggle;
import javafx.scene.control.ToggleGroup;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.HBox;
@Component
public class ValueListPropertyValueSetterChainItem extends TypeBasedPropertyValueSetterChainItem<ValueListProvider> {
private static final int COMBOBOX_THRESHOLD = 3;
private UiCommandInterpreterService commandInterpreter;
private EffectParametersRepository effectParametersRepository;
private UiTimelineManager timelineManager;
private ContextMenuAppender contextMenuAppender;
public ValueListPropertyValueSetterChainItem(EffectParametersRepository effectParametersRepository,
UiCommandInterpreterService commandInterpreter, UiTimelineManager timelineManager, ContextMenuAppender contextMenuAppender) {
super(ValueListProvider.class);
this.commandInterpreter = commandInterpreter;
this.effectParametersRepository = effectParametersRepository;
this.timelineManager = timelineManager;
this.contextMenuAppender = contextMenuAppender;
}
@Override
protected EffectLine handle(ValueListProvider valueProvider, ValueProviderDescriptor descriptor) {
ValueListProvider<ValueListElement> typeFixedValueProvider = valueProvider; // thanks Java
if (typeFixedValueProvider.getElements().size() > COMBOBOX_THRESHOLD) {
return createCombobox(typeFixedValueProvider, descriptor);
} else {
return createRadioButtons(typeFixedValueProvider, descriptor);
}
}
private PrimitiveEffectLine createRadioButtons(ValueListProvider<ValueListElement> typeFixedValueProvider, ValueProviderDescriptor descriptor) {
HBox box = new HBox();
ToggleGroup group = new ToggleGroup();
String currentlySelectedId = currentlySelectedId(typeFixedValueProvider.getId());
Map<String, ValueListElement> elements = typeFixedValueProvider.getElements();
Map<String, Toggle> toggleMap = new LinkedHashMap<>();
for (var element : elements.entrySet()) {
RadioButton radioButton = new RadioButton(element.getValue().getText());
radioButton.setToggleGroup(group);
radioButton.setUserData(element.getValue());
if (element.getKey().equals(currentlySelectedId)) {
radioButton.setSelected(true);
}
toggleMap.put(element.getKey(), radioButton);
box.getChildren().add(radioButton);
}
PrimitiveEffectLine result = PrimitiveEffectLine.builder()
.withCurrentValueProvider(() -> group.getSelectedToggle().getUserData())
.withDescriptorId(typeFixedValueProvider.getId())
.withUpdateFunction(position -> {
String id = effectParametersRepository.getValueAt(typeFixedValueProvider.getId(), position);
if (id != null) {
Toggle toggle = toggleMap.get(id);
group.selectToggle(toggle);
}
})
.withVisibleNode(box)
.withDescriptor(descriptor)
.withCommandInterpreter(commandInterpreter)
.withEffectParametersRepository(effectParametersRepository)
.withUpdateFromValue(value -> {
group.selectToggle(toggleMap.get(((ValueListElement) value).getId()));
})
.build();
group.selectedToggleProperty().addListener((a, oldValue, newValue) -> {
KeyframeAddedRequest keyframeRequest = KeyframeAddedRequest.builder()
.withDescriptorId(typeFixedValueProvider.getId())
.withGlobalTimelinePosition(timelineManager.getCurrentPosition())
.withValue(newValue.getUserData())
.withRevertable(true)
.build();
commandInterpreter.sendWithResult(new AddKeyframeForPropertyCommand(effectParametersRepository, keyframeRequest));
});
contextMenuAppender.addContextMenu(result, typeFixedValueProvider, descriptor, box);
return result;
}
private PrimitiveEffectLine createCombobox(ValueListProvider<ValueListElement> typeFixedValueProvider, ValueProviderDescriptor descriptor) {
Map<String, ValueListElement> elements = typeFixedValueProvider.getElements();
ComboBox<ComboBoxElement> comboBox = new ComboBox<>();
String currentlySelectedId = currentlySelectedId(typeFixedValueProvider.getId());
System.out.println("Current selected id " + currentlySelectedId);
Map<String, ComboBoxElement> comboBoxElements = new LinkedHashMap<>();
elements.values()
.stream()
.forEach(a -> {
var entry = new ComboBoxElement(a.getId(), a.getText());
comboBox.getItems().add(entry);
comboBoxElements.put(a.getId(), entry);
});
comboBox.getSelectionModel().select(comboBoxElements.get(currentlySelectedId));
comboBox.setMaxWidth(200);
PrimitiveEffectLine result = PrimitiveEffectLine.builder()
.withCurrentValueProvider(() -> comboBox.getValue())
.withDescriptorId(typeFixedValueProvider.getId())
.withUpdateFunction(position -> {
String value = effectParametersRepository.getValueAt(typeFixedValueProvider.getId(), position);
if (value != null) {
comboBox.getSelectionModel().select(comboBoxElements.get(value));
}
})
.withDisabledUpdater(disabled -> comboBox.setDisable(disabled))
.withVisibleNode(comboBox)
.withDescriptor(descriptor)
.withCommandInterpreter(commandInterpreter)
.withEffectParametersRepository(effectParametersRepository)
.withUpdateFromValue(value -> {
comboBox.getSelectionModel().select(comboBoxElements.get(((ValueListElement) value).getId()));
})
.build();
comboBox.setOnAction(e -> {
KeyframeAddedRequest keyframeRequest = KeyframeAddedRequest.builder()
.withDescriptorId(typeFixedValueProvider.getId())
.withGlobalTimelinePosition(timelineManager.getCurrentPosition())
.withValue(elements.get(comboBox.getSelectionModel().getSelectedItem().getId()))
.withRevertable(true)
.build();
commandInterpreter.sendWithResult(new AddKeyframeForPropertyCommand(effectParametersRepository, keyframeRequest));
});
contextMenuAppender.addContextMenu(result, typeFixedValueProvider, descriptor, comboBox);
// Do not trigger combobox dropdown to allow context-menu to be viewed
comboBox.addEventFilter(MouseEvent.MOUSE_RELEASED, e -> {
if (e.getButton().equals(MouseButton.SECONDARY)) {
e.consume();
}
});
return result;
}
public String currentlySelectedId(String id) {
return effectParametersRepository.getValueAt(id, timelineManager.getCurrentPosition());
}
}
|
KjetilBerg/Verse | src/main/java/com/kbindiedev/verse/gfx/UniformLayout.java | package com.kbindiedev.verse.gfx;
import org.joml.Matrix4f;
import java.util.HashMap;
/** Describes the uniforms of materials and shaders */
public class UniformLayout {
private HashMap<String, UniformValueType> map;
public UniformLayout() {
map = new HashMap<>();
}
public HashMap<String, UniformValueType> getUniformTypeMap() { return map; }
public void addUniformEntry(String key, UniformValueType valueType) {
map.put(key, valueType);
}
public boolean isValid(String key, Object value) {
UniformValueType vt = map.get(key);
if (vt == null) return false;
return vt.validate(value);
}
/**
* Defines value types for uniform fields. Special cases may come up.
* In the case of TEXTURE_ARRAY, upon using material, textures must be bound/readied accordingly
* if any given texture is not loaded, an error may be thrown
*/
public enum UniformValueType {
MATRIX4f(Matrix4f.class),
TEXTURE_ARRAY(Texture[].class);
private Class<?> valid;
UniformValueType(Class<?> valid) { this.valid = valid; }
public boolean validate(Object toValidate) { return valid.isInstance(toValidate); }
}
public static class Predefined {
private static final UniformLayout MVP_LAYOUT; //TODO temp: unusued
public static final UniformLayout SPRITEBATCH;
static {
MVP_LAYOUT = new UniformLayout();
MVP_LAYOUT.addUniformEntry("uModel", UniformValueType.MATRIX4f);
MVP_LAYOUT.addUniformEntry("uView", UniformValueType.MATRIX4f);
MVP_LAYOUT.addUniformEntry("uProjection", UniformValueType.MATRIX4f);
SPRITEBATCH = new UniformLayout();
SPRITEBATCH.addUniformEntry("uView", UniformValueType.MATRIX4f);
SPRITEBATCH.addUniformEntry("uProjection", UniformValueType.MATRIX4f);
SPRITEBATCH.addUniformEntry("uTexArray", UniformValueType.TEXTURE_ARRAY);
}
}
}
|
bantalon/pytezos | pytezos/michelson/format.py | import json
from datetime import datetime
from pprint import pprint
line_size = 100
def format_timestamp(timestamp: int) -> str:
""" Format unix timestamp.
:param timestamp: Unix timestamp (seconds)
"""
dt = datetime.utcfromtimestamp(timestamp)
return dt.strftime('%Y-%m-%dT%H:%M:%SZ')
class MichelsonFormatterError(ValueError):
pass
def is_framed(node):
if node['prim'] in {'Pair', 'Left', 'Right', 'Some', 'pair', 'or', 'option', 'map', 'big_map', 'list', 'set',
'contract', 'lambda', 'ticket', 'sapling_state', 'sapling_transaction'}:
return True
elif node['prim'] in {'key', 'unit', 'signature', 'operation', 'int', 'nat', 'string', 'bytes', 'mutez', 'bool',
'key_hash', 'timestamp', 'address', 'bls12_381_g1', 'bls12_381_g2', 'bls12_381_fr',
'chain_id', 'never'}:
return 'annots' in node
return False
def is_complex(node):
return node['prim'] == 'LAMBDA' \
or node['prim'].startswith('IF')
def is_inline(node):
return node['prim'] == 'PUSH'
def is_script(node):
return all(map(
lambda x: isinstance(x, dict) and x.get('prim') in ['parameter', 'storage', 'code'],
node))
def format_node(node, indent='', inline=False, is_root=False, wrapped=False):
if isinstance(node, list):
is_script_root = is_root and is_script(node)
seq_indent = indent if is_script_root else indent + ' ' * 2
items = list(map(lambda x: format_node(x, seq_indent, inline, wrapped=True), node))
if items:
length = len(indent) + sum(map(len, items)) + 4
space = '' if is_script_root else ' '
if inline or length < line_size:
seq = f'{space}; '.join(items)
else:
seq = f'{space};\n{seq_indent}'.join(items)
return seq if is_script_root else f'{{ {seq} }}'
else:
return '{}'
elif isinstance(node, dict):
if node.get('prim'):
expr = ' '.join([node['prim']] + node.get('annots', []))
args = node.get('args', [])
if is_complex(node):
arg_indent = indent + ' ' * 2
items = list(map(lambda x: format_node(x, arg_indent, inline), args))
length = len(indent) + len(expr) + sum(map(len, items)) + len(items) + 1
if inline or length < line_size:
expr = f'{expr} {" ".join(items)}'
else:
expr = f'\n{arg_indent}'.join([expr] + items)
elif len(args) == 1:
arg_indent = indent + ' ' * (len(expr) + 1)
expr = f'{expr} {format_node(args[0], arg_indent, inline)}'
elif len(args) > 1:
arg_indent = indent + ' ' * 2
alt_indent = indent + ' ' * (len(expr) + 2)
for arg in args:
item = format_node(arg, arg_indent, inline)
length = len(indent) + len(expr) + len(item) + 1
if inline or is_inline(node) or length < line_size:
arg_indent = alt_indent
expr = f'{expr} {item}'
else:
expr = f'{expr}\n{arg_indent}{item}'
if is_framed(node) and not is_root and not wrapped:
return f'({expr})'
else:
return expr
else:
core_type, value = next((k, v) for k, v in node.items() if k[0] != '_' and k != 'annots')
if core_type == 'int':
return value
elif core_type == 'bytes':
return f'0x{value}'
elif core_type == 'string':
return json.dumps(value)
else:
assert False, f'unexpected core node {node}'
else:
assert False, f'unexpected node {node}'
def micheline_to_michelson(data, inline=False, wrap=False) -> str:
""" Converts micheline expression into formatted Michelson source.
:param data: Micheline expression
:param inline: produce single line, used for tezos-client arguments (False by default)
:param wrap: ensure expression is wrapped in brackets
"""
try:
res = format_node(data, inline=inline, is_root=True)
if wrap and any(map(res.startswith, ['Left', 'Right', 'Some', 'Pair'])):
return f'({res})'
else:
return res
except (KeyError, IndexError, TypeError) as e:
pprint(data, compact=True)
raise MichelsonFormatterError(e.args)
|
rfedorenkov/JavaRushTasks | 1.JavaSyntax/src/com/javarush/task/pro/task06/task0606/Solution.java | package com.javarush.task.pro.task06.task0606;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Универсальный солдат
* Неплохо было бы иметь один метод для решения различных задач. У тебя есть возможность написать такой.
* Создай еще 9 методов universalMethod(). В сумме их должно получиться 10.
* Параметры, которые они должны принимать, придумай сам.
*
*
* Требования:
* 1. Программа должна содержать 10 методов с именем universalMethod.
*/
public class Solution {
public static void main(String[] args) {
}
public static void universalMethod(){
}
public static void universalMethod(String s){
}
public static void universalMethod(String s, int i){
}
public static void universalMethod(String s, int x, int y){
}
public static void universalMethod(int x, int y){
}
public static void universalMethod(int x, int y, int z){
}
public static void universalMethod(int i){
}
public static <T> void universalMethod(List<T> list){
}
public static <T> void universalMethod(Set<T> set){
}
public static <K, V> void universalMethod(Map<K, V> map){
}
}
|
fergy/aplit_linux-5 | drivers/video/fbdev/via/share.h | /*
* Copyright 1998-2008 VIA Technologies, Inc. All Rights Reserved.
* Copyright 2001-2008 S3 Graphics, Inc. All Rights Reserved.
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation;
* either version 2, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTIES OR REPRESENTATIONS; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE.See the GNU General Public License
* for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc.,
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#ifndef __SHARE_H__
#define __SHARE_H__
#include "via_modesetting.h"
/* Define Bit Field */
#define BIT0 0x01
#define BIT1 0x02
#define BIT2 0x04
#define BIT3 0x08
#define BIT4 0x10
#define BIT5 0x20
#define BIT6 0x40
#define BIT7 0x80
/* Video Memory Size */
#define VIDEO_MEMORY_SIZE_16M 0x1000000
/*
* Lengths of the VPIT structure arrays.
*/
#define StdCR 0x19
#define StdSR 0x04
#define StdGR 0x09
#define StdAR 0x14
#define PatchCR 11
/* Display path */
#define IGA1 1
#define IGA2 2
/* Define Color Depth */
#define MODE_8BPP 1
#define MODE_16BPP 2
#define MODE_32BPP 4
#define GR20 0x20
#define GR21 0x21
#define GR22 0x22
/* Sequencer Registers */
#define SR01 0x01
#define SR10 0x10
#define SR12 0x12
#define SR15 0x15
#define SR16 0x16
#define SR17 0x17
#define SR18 0x18
#define SR1B 0x1B
#define SR1A 0x1A
#define SR1C 0x1C
#define SR1D 0x1D
#define SR1E 0x1E
#define SR1F 0x1F
#define SR20 0x20
#define SR21 0x21
#define SR22 0x22
#define SR2A 0x2A
#define SR2D 0x2D
#define SR2E 0x2E
#define SR30 0x30
#define SR39 0x39
#define SR3D 0x3D
#define SR3E 0x3E
#define SR3F 0x3F
#define SR40 0x40
#define SR43 0x43
#define SR44 0x44
#define SR45 0x45
#define SR46 0x46
#define SR47 0x47
#define SR48 0x48
#define SR49 0x49
#define SR4A 0x4A
#define SR4B 0x4B
#define SR4C 0x4C
#define SR52 0x52
#define SR57 0x57
#define SR58 0x58
#define SR59 0x59
#define SR5D 0x5D
#define SR5E 0x5E
#define SR65 0x65
/* CRT Controller Registers */
#define CR00 0x00
#define CR01 0x01
#define CR02 0x02
#define CR03 0x03
#define CR04 0x04
#define CR05 0x05
#define CR06 0x06
#define CR07 0x07
#define CR08 0x08
#define CR09 0x09
#define CR0A 0x0A
#define CR0B 0x0B
#define CR0C 0x0C
#define CR0D 0x0D
#define CR0E 0x0E
#define CR0F 0x0F
#define CR10 0x10
#define CR11 0x11
#define CR12 0x12
#define CR13 0x13
#define CR14 0x14
#define CR15 0x15
#define CR16 0x16
#define CR17 0x17
#define CR18 0x18
/* Extend CRT Controller Registers */
#define CR30 0x30
#define CR31 0x31
#define CR32 0x32
#define CR33 0x33
#define CR34 0x34
#define CR35 0x35
#define CR36 0x36
#define CR37 0x37
#define CR38 0x38
#define CR39 0x39
#define CR3A 0x3A
#define CR3B 0x3B
#define CR3C 0x3C
#define CR3D 0x3D
#define CR3E 0x3E
#define CR3F 0x3F
#define CR40 0x40
#define CR41 0x41
#define CR42 0x42
#define CR43 0x43
#define CR44 0x44
#define CR45 0x45
#define CR46 0x46
#define CR47 0x47
#define CR48 0x48
#define CR49 0x49
#define CR4A 0x4A
#define CR4B 0x4B
#define CR4C 0x4C
#define CR4D 0x4D
#define CR4E 0x4E
#define CR4F 0x4F
#define CR50 0x50
#define CR51 0x51
#define CR52 0x52
#define CR53 0x53
#define CR54 0x54
#define CR55 0x55
#define CR56 0x56
#define CR57 0x57
#define CR58 0x58
#define CR59 0x59
#define CR5A 0x5A
#define CR5B 0x5B
#define CR5C 0x5C
#define CR5D 0x5D
#define CR5E 0x5E
#define CR5F 0x5F
#define CR60 0x60
#define CR61 0x61
#define CR62 0x62
#define CR63 0x63
#define CR64 0x64
#define CR65 0x65
#define CR66 0x66
#define CR67 0x67
#define CR68 0x68
#define CR69 0x69
#define CR6A 0x6A
#define CR6B 0x6B
#define CR6C 0x6C
#define CR6D 0x6D
#define CR6E 0x6E
#define CR6F 0x6F
#define CR70 0x70
#define CR71 0x71
#define CR72 0x72
#define CR73 0x73
#define CR74 0x74
#define CR75 0x75
#define CR76 0x76
#define CR77 0x77
#define CR78 0x78
#define CR79 0x79
#define CR7A 0x7A
#define CR7B 0x7B
#define CR7C 0x7C
#define CR7D 0x7D
#define CR7E 0x7E
#define CR7F 0x7F
#define CR80 0x80
#define CR81 0x81
#define CR82 0x82
#define CR83 0x83
#define CR84 0x84
#define CR85 0x85
#define CR86 0x86
#define CR87 0x87
#define CR88 0x88
#define CR89 0x89
#define CR8A 0x8A
#define CR8B 0x8B
#define CR8C 0x8C
#define CR8D 0x8D
#define CR8E 0x8E
#define CR8F 0x8F
#define CR90 0x90
#define CR91 0x91
#define CR92 0x92
#define CR93 0x93
#define CR94 0x94
#define CR95 0x95
#define CR96 0x96
#define CR97 0x97
#define CR98 0x98
#define CR99 0x99
#define CR9A 0x9A
#define CR9B 0x9B
#define CR9C 0x9C
#define CR9D 0x9D
#define CR9E 0x9E
#define CR9F 0x9F
#define CRA0 0xA0
#define CRA1 0xA1
#define CRA2 0xA2
#define CRA3 0xA3
#define CRD2 0xD2
#define CRD3 0xD3
#define CRD4 0xD4
/* LUT Table*/
#define LUT_DATA 0x3C9 /* DACDATA */
#define LUT_INDEX_READ 0x3C7 /* DACRX */
#define LUT_INDEX_WRITE 0x3C8 /* DACWX */
#define DACMASK 0x3C6
/* Definition Device */
#define DEVICE_CRT 0x01
#define DEVICE_DVI 0x03
#define DEVICE_LCD 0x04
/* Device output interface */
#define INTERFACE_NONE 0x00
#define INTERFACE_ANALOG_RGB 0x01
#define INTERFACE_DVP0 0x02
#define INTERFACE_DVP1 0x03
#define INTERFACE_DFP_HIGH 0x04
#define INTERFACE_DFP_LOW 0x05
#define INTERFACE_DFP 0x06
#define INTERFACE_LVDS0 0x07
#define INTERFACE_LVDS1 0x08
#define INTERFACE_LVDS0LVDS1 0x09
#define INTERFACE_TMDS 0x0A
#define HW_LAYOUT_LCD_ONLY 0x01
#define HW_LAYOUT_DVI_ONLY 0x02
#define HW_LAYOUT_LCD_DVI 0x03
#define HW_LAYOUT_LCD1_LCD2 0x04
#define HW_LAYOUT_LCD_EXTERNAL_LCD2 0x10
/* Definition CRTC Timing Index */
#define H_TOTAL_INDEX 0
#define H_ADDR_INDEX 1
#define H_BLANK_START_INDEX 2
#define H_BLANK_END_INDEX 3
#define H_SYNC_START_INDEX 4
#define H_SYNC_END_INDEX 5
#define V_TOTAL_INDEX 6
#define V_ADDR_INDEX 7
#define V_BLANK_START_INDEX 8
#define V_BLANK_END_INDEX 9
#define V_SYNC_START_INDEX 10
#define V_SYNC_END_INDEX 11
#define H_TOTAL_SHADOW_INDEX 12
#define H_BLANK_END_SHADOW_INDEX 13
#define V_TOTAL_SHADOW_INDEX 14
#define V_ADDR_SHADOW_INDEX 15
#define V_BLANK_SATRT_SHADOW_INDEX 16
#define V_BLANK_END_SHADOW_INDEX 17
#define V_SYNC_SATRT_SHADOW_INDEX 18
#define V_SYNC_END_SHADOW_INDEX 19
/* LCD display method
*/
#define LCD_EXPANDSION 0x00
#define LCD_CENTERING 0x01
/* LCD mode
*/
#define LCD_OPENLDI 0x00
#define LCD_SPWG 0x01
struct crt_mode_table {
int refresh_rate;
int h_sync_polarity;
int v_sync_polarity;
struct via_display_timing crtc;
};
struct io_reg {
int port;
u8 index;
u8 mask;
u8 value;
};
#endif /* __SHARE_H__ */
|
infiniticg/equella | Platform/Infrastructure/com.tle.jpfclasspath/src/com/tle/jpfclasspath/parser/ManifestInfoHandler.java | <gh_stars>1-10
/*****************************************************************************
* Java Plug-in Framework (JPF) Copyright (C) 2004-2007 <NAME> This
* library is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version. This library is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
* General Public License for more details. You should have received a copy of
* the GNU Lesser General Public License along with this library; if not, write
* to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
* 02111-1307 USA
*****************************************************************************/
package com.tle.jpfclasspath.parser;
import org.xml.sax.Attributes;
import org.xml.sax.EntityResolver;
import org.xml.sax.SAXException;
/**
* @version $Id$
*/
final class ManifestInfoHandler extends BaseHandler
{
private ModelManifestInfo manifest = null;
ManifestInfoHandler(final EntityResolver anEntityResolver)
{
super(anEntityResolver);
}
/**
* @see org.xml.sax.ContentHandler#startElement(java.lang.String,
* java.lang.String, java.lang.String, org.xml.sax.Attributes)
*/
@Override
public void startElement(final String uri, final String localName, final String qName,
final Attributes attributes) throws SAXException
{
if( log.isDebugEnabled() )
{
log.debug("startElement - [" + uri + "]/[" //$NON-NLS-1$ //$NON-NLS-2$
+ localName + "]/[" + qName + "]"); //$NON-NLS-1$ //$NON-NLS-2$
}
String name = qName;
if( "plugin".equals(name) ) { //$NON-NLS-1$
if( manifest != null )
{
throw new SAXException("unexpected [" + name //$NON-NLS-1$
+ "] element (manifest already defined)"); //$NON-NLS-1$
}
manifest = new ModelManifestInfo();
manifest.setId(attributes.getValue("id")); //$NON-NLS-1$
manifest.setVersion(attributes.getValue("version")); //$NON-NLS-1$
manifest.setVendor(attributes.getValue("vendor")); //$NON-NLS-1$
}
else if( "plugin-fragment".equals(name) ) { //$NON-NLS-1$
if( manifest != null )
{
throw new SAXException("unexpected [" + name //$NON-NLS-1$
+ "] element (manifest already defined)"); //$NON-NLS-1$
}
manifest = new ModelManifestInfo();
manifest.setId(attributes.getValue("id")); //$NON-NLS-1$
manifest.setVersion(attributes.getValue("version")); //$NON-NLS-1$
manifest.setVendor(attributes.getValue("vendor")); //$NON-NLS-1$
manifest.setPluginId(attributes.getValue("plugin-id")); //$NON-NLS-1$
if( attributes.getValue("plugin-version") != null ) { //$NON-NLS-1$
manifest.setPluginVersion(attributes.getValue("plugin-version")); //$NON-NLS-1$
}
if( attributes.getValue("match") != null ) { //$NON-NLS-1$
manifest.setMatchingRule(MatchingRule.fromCode(attributes.getValue("match"))); //$NON-NLS-1$
}
else
{
manifest.setMatchingRule(MatchingRule.COMPATIBLE);
}
}
else
{
// ignore all other elements
}
}
/**
* @see org.xml.sax.ContentHandler#endElement(java.lang.String,
* java.lang.String, java.lang.String)
*/
@Override
public void endElement(final String uri, final String localName, final String qName)
{
if( log.isDebugEnabled() )
{
log.debug("endElement - [" + uri + "]/[" + localName //$NON-NLS-1$ //$NON-NLS-2$
+ "]/[" + qName + "]"); //$NON-NLS-1$ //$NON-NLS-2$
}
// no-op
}
/**
* @see org.xml.sax.ContentHandler#characters(char[], int, int)
*/
@Override
public void characters(final char[] ch, final int start, final int length)
{
// ignore all characters
}
ModelManifestInfo getResult()
{
return manifest;
}
}
|
skkuse-adv/2019Fall_team2 | analysis/reverse-engineering/decompile-fitts-with-gradle-20191031-2200/src/main/java/org/jetbrains/anko/AlertDialogBuilder.java | package org.jetbrains.anko;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewManager;
import android.widget.ListAdapter;
import com.kakao.auth.StringSet;
import io.fabric.sdk.android.services.settings.SettingsJsonConstants;
import java.util.List;
import kotlin.TypeCastException;
import kotlin.Unit;
import kotlin.jvm.functions.Function0;
import kotlin.jvm.functions.Function1;
import kotlin.jvm.functions.Function2;
import kotlin.jvm.internal.Intrinsics;
import org.jetbrains.anko.internals.AnkoInternals;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public final class AlertDialogBuilder {
private Builder builder;
@NotNull
private final Context ctx;
@Nullable
private AlertDialog dialog;
public AlertDialogBuilder(@NotNull Context context) {
Intrinsics.checkParameterIsNotNull(context, "ctx");
this.ctx = context;
this.builder = new Builder(this.ctx);
}
@NotNull
public final Context getCtx() {
return this.ctx;
}
private final void setDialog(AlertDialog alertDialog) {
this.dialog = alertDialog;
}
@Nullable
public final AlertDialog getDialog() {
return this.dialog;
}
public AlertDialogBuilder(@NotNull AnkoContext<?> ankoContext) {
Intrinsics.checkParameterIsNotNull(ankoContext, "ankoContext");
this(ankoContext.getCtx());
}
public final void dismiss() {
AlertDialog alertDialog = this.dialog;
if (alertDialog != null) {
alertDialog.dismiss();
}
}
private final void checkBuilder() {
if (this.builder == null) {
throw new IllegalStateException("show() was already called for this AlertDialogBuilder");
}
}
@NotNull
public final AlertDialogBuilder show() {
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
this.dialog = builder2.create();
this.builder = null;
AlertDialog alertDialog = this.dialog;
if (alertDialog == null) {
Intrinsics.throwNpe();
}
alertDialog.show();
return this;
}
public final void title(@NotNull CharSequence charSequence) {
Intrinsics.checkParameterIsNotNull(charSequence, "title");
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setTitle(charSequence);
}
public final void title(int i) {
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setTitle(i);
}
public final void message(@NotNull CharSequence charSequence) {
Intrinsics.checkParameterIsNotNull(charSequence, SettingsJsonConstants.PROMPT_MESSAGE_KEY);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setMessage(charSequence);
}
public final void message(int i) {
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setMessage(i);
}
public final void icon(int i) {
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setIcon(i);
}
public final void icon(@NotNull Drawable drawable) {
Intrinsics.checkParameterIsNotNull(drawable, SettingsJsonConstants.APP_ICON_KEY);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setIcon(drawable);
}
public final void customTitle(@NotNull View view) {
Intrinsics.checkParameterIsNotNull(view, "view");
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setCustomTitle(view);
}
public final void customTitle(@NotNull Function1<? super ViewManager, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, "dsl");
checkBuilder();
Context context = this.ctx;
AnkoInternals ankoInternals = AnkoInternals.INSTANCE;
AnkoContextImpl ankoContextImpl = new AnkoContextImpl(context, context, false);
function1.invoke(ankoContextImpl);
View view = ankoContextImpl.getView();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setCustomTitle(view);
}
public final void customView(@NotNull View view) {
Intrinsics.checkParameterIsNotNull(view, "view");
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setView(view);
}
public final void customView(@NotNull Function1<? super ViewManager, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, "dsl");
checkBuilder();
Context context = this.ctx;
AnkoInternals ankoInternals = AnkoInternals.INSTANCE;
AnkoContextImpl ankoContextImpl = new AnkoContextImpl(context, context, false);
function1.invoke(ankoContextImpl);
View view = ankoContextImpl.getView();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setView(view);
}
public static /* synthetic */ void cancellable$default(AlertDialogBuilder alertDialogBuilder, boolean z, int i, Object obj) {
if ((i & 1) != 0) {
z = true;
}
alertDialogBuilder.cancellable(z);
}
public final void cancellable(boolean z) {
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setCancelable(z);
}
public final void onCancel(@NotNull Function0<Unit> function0) {
Intrinsics.checkParameterIsNotNull(function0, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setOnCancelListener(new AlertDialogBuilder$onCancel$1(function0));
}
public final void onKey(@NotNull Function2<? super Integer, ? super KeyEvent, Boolean> function2) {
Intrinsics.checkParameterIsNotNull(function2, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setOnKeyListener(new AlertDialogBuilder$onKey$1(function2));
}
public static /* synthetic */ void neutralButton$default(AlertDialogBuilder alertDialogBuilder, int i, Function1 function1, int i2, Object obj) {
if ((i2 & 1) != 0) {
i = 17039370;
}
if ((i2 & 2) != 0) {
function1 = AlertDialogBuilder$neutralButton$1.INSTANCE;
}
alertDialogBuilder.neutralButton(i, function1);
}
public final void neutralButton(int i, @NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(i);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(neutralText)");
neutralButton((CharSequence) string, function1);
}
public static /* synthetic */ void neutralButton$default(AlertDialogBuilder alertDialogBuilder, CharSequence charSequence, Function1 function1, int i, Object obj) {
if ((i & 2) != 0) {
function1 = AlertDialogBuilder$neutralButton$2.INSTANCE;
}
alertDialogBuilder.neutralButton(charSequence, function1);
}
public final void neutralButton(@NotNull CharSequence charSequence, @NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(charSequence, "neutralText");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setNeutralButton(charSequence, new AlertDialogBuilder$neutralButton$3(function1));
}
public final void positiveButton(int i, @NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(i);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(positiveText)");
positiveButton((CharSequence) string, function1);
}
public final void okButton(@NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(17039370);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(R.string.ok)");
positiveButton((CharSequence) string, function1);
}
public final void yesButton(@NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(17039379);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(R.string.yes)");
positiveButton((CharSequence) string, function1);
}
public final void positiveButton(@NotNull CharSequence charSequence, @NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(charSequence, "positiveText");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setPositiveButton(charSequence, new AlertDialogBuilder$positiveButton$1(function1));
}
public static /* synthetic */ void negativeButton$default(AlertDialogBuilder alertDialogBuilder, int i, Function1 function1, int i2, Object obj) {
if ((i2 & 2) != 0) {
function1 = AlertDialogBuilder$negativeButton$1.INSTANCE;
}
alertDialogBuilder.negativeButton(i, function1);
}
public final void negativeButton(int i, @NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(i);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(negativeText)");
negativeButton((CharSequence) string, function1);
}
public static /* synthetic */ void cancelButton$default(AlertDialogBuilder alertDialogBuilder, Function1 function1, int i, Object obj) {
if ((i & 1) != 0) {
function1 = AlertDialogBuilder$cancelButton$1.INSTANCE;
}
alertDialogBuilder.cancelButton(function1);
}
public final void cancelButton(@NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(17039360);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(R.string.cancel)");
negativeButton((CharSequence) string, function1);
}
public static /* synthetic */ void noButton$default(AlertDialogBuilder alertDialogBuilder, Function1 function1, int i, Object obj) {
if ((i & 1) != 0) {
function1 = AlertDialogBuilder$noButton$1.INSTANCE;
}
alertDialogBuilder.noButton(function1);
}
public final void noButton(@NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
String string = this.ctx.getString(17039369);
Intrinsics.checkExpressionValueIsNotNull(string, "ctx.getString(R.string.no)");
negativeButton((CharSequence) string, function1);
}
public static /* synthetic */ void negativeButton$default(AlertDialogBuilder alertDialogBuilder, CharSequence charSequence, Function1 function1, int i, Object obj) {
if ((i & 2) != 0) {
function1 = AlertDialogBuilder$negativeButton$2.INSTANCE;
}
alertDialogBuilder.negativeButton(charSequence, function1);
}
public final void negativeButton(@NotNull CharSequence charSequence, @NotNull Function1<? super DialogInterface, Unit> function1) {
Intrinsics.checkParameterIsNotNull(charSequence, "negativeText");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setNegativeButton(charSequence, new AlertDialogBuilder$negativeButton$3(function1));
}
public final void items(int i, @NotNull Function1<? super Integer, Unit> function1) {
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
Resources resources = this.ctx.getResources();
if (resources == null) {
Intrinsics.throwNpe();
}
CharSequence[] textArray = resources.getTextArray(i);
Intrinsics.checkExpressionValueIsNotNull(textArray, "ctx.resources!!.getTextArray(itemsId)");
items(textArray, function1);
}
public final void items(@NotNull CharSequence[] charSequenceArr, @NotNull Function1<? super Integer, Unit> function1) {
Intrinsics.checkParameterIsNotNull(charSequenceArr, "items");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setItems(charSequenceArr, new AlertDialogBuilder$items$1(function1));
}
public final void adapter(@NotNull ListAdapter listAdapter, @NotNull Function1<? super Integer, Unit> function1) {
Intrinsics.checkParameterIsNotNull(listAdapter, "adapter");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setAdapter(listAdapter, new AlertDialogBuilder$adapter$1(function1));
}
public final void adapter(@NotNull Cursor cursor, @NotNull String str, @NotNull Function1<? super Integer, Unit> function1) {
Intrinsics.checkParameterIsNotNull(cursor, "cursor");
Intrinsics.checkParameterIsNotNull(str, "labelColumn");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
checkBuilder();
Builder builder2 = this.builder;
if (builder2 == null) {
Intrinsics.throwNpe();
}
builder2.setCursor(cursor, new AlertDialogBuilder$adapter$2(function1), str);
}
public final void items(@NotNull List<? extends CharSequence> list, @NotNull Function1<? super Integer, Unit> function1) {
Intrinsics.checkParameterIsNotNull(list, "items");
Intrinsics.checkParameterIsNotNull(function1, StringSet.PARAM_CALLBACK);
Object[] array = list.toArray(new CharSequence[0]);
if (array != null) {
items((CharSequence[]) array, function1);
return;
}
throw new TypeCastException("null cannot be cast to non-null type kotlin.Array<T>");
}
}
|
gregstewart/hearthstone-tracker | test.setup.js | <filename>test.setup.js
import chai from 'chai';
import sinonChai from 'sinon-chai';
import sinon from 'sinon';
import 'babel-polyfill';
chai.use(sinonChai);
global.expect = chai.expect;
global.sinon = sinon;
|
wparam-fox/androidkb | java/src/com/wparam/kb/inputmethod/latin/LastComposedWord.java | /*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wparam.kb.inputmethod.latin;
import android.text.TextUtils;
/**
* This class encapsulates data about a word previously composed, but that has been
* committed already. This is used for resuming suggestion, and cancel auto-correction.
*/
public final class LastComposedWord {
// COMMIT_TYPE_USER_TYPED_WORD is used when the word committed is the exact typed word, with
// no hinting from the IME. It happens when some external event happens (rotating the device,
// for example) or when auto-correction is off by settings or editor attributes.
public static final int COMMIT_TYPE_USER_TYPED_WORD = 0;
// COMMIT_TYPE_MANUAL_PICK is used when the user pressed a field in the suggestion strip.
public static final int COMMIT_TYPE_MANUAL_PICK = 1;
// COMMIT_TYPE_DECIDED_WORD is used when the IME commits the word it decided was best
// for the current user input. It may be different from what the user typed (true auto-correct)
// or it may be exactly what the user typed if it's in the dictionary or the IME does not have
// enough confidence in any suggestion to auto-correct (auto-correct to typed word).
public static final int COMMIT_TYPE_DECIDED_WORD = 2;
// COMMIT_TYPE_CANCEL_AUTO_CORRECT is used upon committing back the old word upon cancelling
// an auto-correction.
public static final int COMMIT_TYPE_CANCEL_AUTO_CORRECT = 3;
public static final String NOT_A_SEPARATOR = "";
public final int[] mPrimaryKeyCodes;
public final String mTypedWord;
public final String mCommittedWord;
public final String mSeparatorString;
public final String mPrevWord;
public final int mCapitalizedMode;
public final InputPointers mInputPointers =
new InputPointers(Constants.DICTIONARY_MAX_WORD_LENGTH);
private boolean mActive;
public static final LastComposedWord NOT_A_COMPOSED_WORD =
new LastComposedWord(null, null, "", "", NOT_A_SEPARATOR, null,
WordComposer.CAPS_MODE_OFF);
// Warning: this is using the passed objects as is and fully expects them to be
// immutable. Do not fiddle with their contents after you passed them to this constructor.
public LastComposedWord(final int[] primaryKeyCodes, final InputPointers inputPointers,
final String typedWord, final String committedWord, final String separatorString,
final String prevWord, final int capitalizedMode) {
mPrimaryKeyCodes = primaryKeyCodes;
if (inputPointers != null) {
mInputPointers.copy(inputPointers);
}
mTypedWord = typedWord;
mCommittedWord = committedWord;
mSeparatorString = separatorString;
mActive = true;
mPrevWord = prevWord;
mCapitalizedMode = capitalizedMode;
}
public void deactivate() {
mActive = false;
}
public boolean canRevertCommit() {
return mActive && !TextUtils.isEmpty(mCommittedWord) && !didCommitTypedWord();
}
private boolean didCommitTypedWord() {
return TextUtils.equals(mTypedWord, mCommittedWord);
}
}
|
ferocious-space/eveapi | esi/industry/get_industry_systems_responses.go | <filename>esi/industry/get_industry_systems_responses.go
// Code generated by go-swagger; DO NOT EDIT.
package industry
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"encoding/json"
"fmt"
"io"
"strconv"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
"github.com/ferocious-space/eveapi/models"
)
// GetIndustrySystemsReader is a Reader for the GetIndustrySystems structure.
type GetIndustrySystemsReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *GetIndustrySystemsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewGetIndustrySystemsOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 304:
result := NewGetIndustrySystemsNotModified()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 400:
result := NewGetIndustrySystemsBadRequest()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 420:
result := NewGetIndustrySystemsEnhanceYourCalm()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 500:
result := NewGetIndustrySystemsInternalServerError()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 503:
result := NewGetIndustrySystemsServiceUnavailable()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 504:
result := NewGetIndustrySystemsGatewayTimeout()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("response status code does not match any response statuses defined for this endpoint in the swagger spec", response, response.Code())
}
}
// NewGetIndustrySystemsOK creates a GetIndustrySystemsOK with default headers values
func NewGetIndustrySystemsOK() *GetIndustrySystemsOK {
return &GetIndustrySystemsOK{}
}
/* GetIndustrySystemsOK describes a response with status code 200, with default header values.
A list of cost indicies
*/
type GetIndustrySystemsOK struct {
/* The caching mechanism used
*/
CacheControl string
/* RFC7232 compliant entity tag
*/
ETag string
/* RFC7231 formatted datetime string
*/
Expires string
/* RFC7231 formatted datetime string
*/
LastModified string
Payload []*GetIndustrySystemsOKBodyItems0
}
func (o *GetIndustrySystemsOK) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsOK %+v", 200, o.Payload)
}
func (o *GetIndustrySystemsOK) GetPayload() []*GetIndustrySystemsOKBodyItems0 {
return o.Payload
}
func (o *GetIndustrySystemsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// hydrates response header Cache-Control
hdrCacheControl := response.GetHeader("Cache-Control")
if hdrCacheControl != "" {
o.CacheControl = hdrCacheControl
}
// hydrates response header ETag
hdrETag := response.GetHeader("ETag")
if hdrETag != "" {
o.ETag = hdrETag
}
// hydrates response header Expires
hdrExpires := response.GetHeader("Expires")
if hdrExpires != "" {
o.Expires = hdrExpires
}
// hydrates response header Last-Modified
hdrLastModified := response.GetHeader("Last-Modified")
if hdrLastModified != "" {
o.LastModified = hdrLastModified
}
// response payload
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetIndustrySystemsNotModified creates a GetIndustrySystemsNotModified with default headers values
func NewGetIndustrySystemsNotModified() *GetIndustrySystemsNotModified {
return &GetIndustrySystemsNotModified{}
}
/* GetIndustrySystemsNotModified describes a response with status code 304, with default header values.
Not modified
*/
type GetIndustrySystemsNotModified struct {
/* The caching mechanism used
*/
CacheControl string
/* RFC7232 compliant entity tag
*/
ETag string
/* RFC7231 formatted datetime string
*/
Expires string
/* RFC7231 formatted datetime string
*/
LastModified string
}
func (o *GetIndustrySystemsNotModified) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsNotModified ", 304)
}
func (o *GetIndustrySystemsNotModified) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// hydrates response header Cache-Control
hdrCacheControl := response.GetHeader("Cache-Control")
if hdrCacheControl != "" {
o.CacheControl = hdrCacheControl
}
// hydrates response header ETag
hdrETag := response.GetHeader("ETag")
if hdrETag != "" {
o.ETag = hdrETag
}
// hydrates response header Expires
hdrExpires := response.GetHeader("Expires")
if hdrExpires != "" {
o.Expires = hdrExpires
}
// hydrates response header Last-Modified
hdrLastModified := response.GetHeader("Last-Modified")
if hdrLastModified != "" {
o.LastModified = hdrLastModified
}
return nil
}
// NewGetIndustrySystemsBadRequest creates a GetIndustrySystemsBadRequest with default headers values
func NewGetIndustrySystemsBadRequest() *GetIndustrySystemsBadRequest {
return &GetIndustrySystemsBadRequest{}
}
/* GetIndustrySystemsBadRequest describes a response with status code 400, with default header values.
Bad request
*/
type GetIndustrySystemsBadRequest struct {
Payload *models.BadRequest
}
func (o *GetIndustrySystemsBadRequest) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsBadRequest %+v", 400, o.Payload)
}
func (o *GetIndustrySystemsBadRequest) GetPayload() *models.BadRequest {
return o.Payload
}
func (o *GetIndustrySystemsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.BadRequest)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetIndustrySystemsEnhanceYourCalm creates a GetIndustrySystemsEnhanceYourCalm with default headers values
func NewGetIndustrySystemsEnhanceYourCalm() *GetIndustrySystemsEnhanceYourCalm {
return &GetIndustrySystemsEnhanceYourCalm{}
}
/* GetIndustrySystemsEnhanceYourCalm describes a response with status code 420, with default header values.
Error limited
*/
type GetIndustrySystemsEnhanceYourCalm struct {
Payload *models.ErrorLimited
}
func (o *GetIndustrySystemsEnhanceYourCalm) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsEnhanceYourCalm %+v", 420, o.Payload)
}
func (o *GetIndustrySystemsEnhanceYourCalm) GetPayload() *models.ErrorLimited {
return o.Payload
}
func (o *GetIndustrySystemsEnhanceYourCalm) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorLimited)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetIndustrySystemsInternalServerError creates a GetIndustrySystemsInternalServerError with default headers values
func NewGetIndustrySystemsInternalServerError() *GetIndustrySystemsInternalServerError {
return &GetIndustrySystemsInternalServerError{}
}
/* GetIndustrySystemsInternalServerError describes a response with status code 500, with default header values.
Internal server error
*/
type GetIndustrySystemsInternalServerError struct {
Payload *models.InternalServerError
}
func (o *GetIndustrySystemsInternalServerError) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsInternalServerError %+v", 500, o.Payload)
}
func (o *GetIndustrySystemsInternalServerError) GetPayload() *models.InternalServerError {
return o.Payload
}
func (o *GetIndustrySystemsInternalServerError) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.InternalServerError)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetIndustrySystemsServiceUnavailable creates a GetIndustrySystemsServiceUnavailable with default headers values
func NewGetIndustrySystemsServiceUnavailable() *GetIndustrySystemsServiceUnavailable {
return &GetIndustrySystemsServiceUnavailable{}
}
/* GetIndustrySystemsServiceUnavailable describes a response with status code 503, with default header values.
Service unavailable
*/
type GetIndustrySystemsServiceUnavailable struct {
Payload *models.ServiceUnavailable
}
func (o *GetIndustrySystemsServiceUnavailable) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsServiceUnavailable %+v", 503, o.Payload)
}
func (o *GetIndustrySystemsServiceUnavailable) GetPayload() *models.ServiceUnavailable {
return o.Payload
}
func (o *GetIndustrySystemsServiceUnavailable) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ServiceUnavailable)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetIndustrySystemsGatewayTimeout creates a GetIndustrySystemsGatewayTimeout with default headers values
func NewGetIndustrySystemsGatewayTimeout() *GetIndustrySystemsGatewayTimeout {
return &GetIndustrySystemsGatewayTimeout{}
}
/* GetIndustrySystemsGatewayTimeout describes a response with status code 504, with default header values.
Gateway timeout
*/
type GetIndustrySystemsGatewayTimeout struct {
Payload *models.GatewayTimeout
}
func (o *GetIndustrySystemsGatewayTimeout) Error() string {
return fmt.Sprintf("[GET /v1/industry/systems/][%d] getIndustrySystemsGatewayTimeout %+v", 504, o.Payload)
}
func (o *GetIndustrySystemsGatewayTimeout) GetPayload() *models.GatewayTimeout {
return o.Payload
}
func (o *GetIndustrySystemsGatewayTimeout) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.GatewayTimeout)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
/*GetIndustrySystemsOKBodyItems0 get_industry_systems_200_ok
//
// 200 ok object
swagger:model GetIndustrySystemsOKBodyItems0
*/
type GetIndustrySystemsOKBodyItems0 struct {
// get_industry_systems_cost_indices
//
// cost_indices array
// Required: true
// Max Items: 10
CostIndices []*GetIndustrySystemsOKBodyItems0CostIndicesItems0 `json:"cost_indices"`
// get_industry_systems_solar_system_id
//
// solar_system_id integer
// Required: true
SolarSystemID *int32 `json:"solar_system_id"`
}
// Validate validates this get industry systems o k body items0
func (o *GetIndustrySystemsOKBodyItems0) Validate(formats strfmt.Registry) error {
var res []error
if err := o.validateCostIndices(formats); err != nil {
res = append(res, err)
}
if err := o.validateSolarSystemID(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (o *GetIndustrySystemsOKBodyItems0) validateCostIndices(formats strfmt.Registry) error {
if err := validate.Required("cost_indices", "body", o.CostIndices); err != nil {
return err
}
iCostIndicesSize := int64(len(o.CostIndices))
if err := validate.MaxItems("cost_indices", "body", iCostIndicesSize, 10); err != nil {
return err
}
for i := 0; i < len(o.CostIndices); i++ {
if swag.IsZero(o.CostIndices[i]) { // not required
continue
}
if o.CostIndices[i] != nil {
if err := o.CostIndices[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("cost_indices" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("cost_indices" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
func (o *GetIndustrySystemsOKBodyItems0) validateSolarSystemID(formats strfmt.Registry) error {
if err := validate.Required("solar_system_id", "body", o.SolarSystemID); err != nil {
return err
}
return nil
}
// ContextValidate validate this get industry systems o k body items0 based on the context it is used
func (o *GetIndustrySystemsOKBodyItems0) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
if err := o.contextValidateCostIndices(ctx, formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (o *GetIndustrySystemsOKBodyItems0) contextValidateCostIndices(ctx context.Context, formats strfmt.Registry) error {
for i := 0; i < len(o.CostIndices); i++ {
if o.CostIndices[i] != nil {
if err := o.CostIndices[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("cost_indices" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("cost_indices" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
// MarshalBinary interface implementation
func (o *GetIndustrySystemsOKBodyItems0) MarshalBinary() ([]byte, error) {
if o == nil {
return nil, nil
}
return swag.WriteJSON(o)
}
// UnmarshalBinary interface implementation
func (o *GetIndustrySystemsOKBodyItems0) UnmarshalBinary(b []byte) error {
var res GetIndustrySystemsOKBodyItems0
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*o = res
return nil
}
/*GetIndustrySystemsOKBodyItems0CostIndicesItems0 get_industry_systems_cost_indice
//
// cost_indice object
swagger:model GetIndustrySystemsOKBodyItems0CostIndicesItems0
*/
type GetIndustrySystemsOKBodyItems0CostIndicesItems0 struct {
// get_industry_systems_activity
//
// activity string
// Required: true
// Enum: [copying duplicating invention manufacturing none reaction researching_material_efficiency researching_technology researching_time_efficiency reverse_engineering]
Activity *string `json:"activity"`
// get_industry_systems_cost_index
//
// cost_index number
// Required: true
CostIndex *float32 `json:"cost_index"`
}
// Validate validates this get industry systems o k body items0 cost indices items0
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) Validate(formats strfmt.Registry) error {
var res []error
if err := o.validateActivity(formats); err != nil {
res = append(res, err)
}
if err := o.validateCostIndex(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
var getIndustrySystemsOKBodyItems0CostIndicesItems0TypeActivityPropEnum []interface{}
func init() {
var res []string
if err := json.Unmarshal([]byte(`["copying","duplicating","invention","manufacturing","none","reaction","researching_material_efficiency","researching_technology","researching_time_efficiency","reverse_engineering"]`), &res); err != nil {
panic(err)
}
for _, v := range res {
getIndustrySystemsOKBodyItems0CostIndicesItems0TypeActivityPropEnum = append(getIndustrySystemsOKBodyItems0CostIndicesItems0TypeActivityPropEnum, v)
}
}
const (
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityCopying captures enum value "copying"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityCopying string = "copying"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityDuplicating captures enum value "duplicating"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityDuplicating string = "duplicating"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityInvention captures enum value "invention"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityInvention string = "invention"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityManufacturing captures enum value "manufacturing"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityManufacturing string = "manufacturing"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityNone captures enum value "none"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityNone string = "none"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityReaction captures enum value "reaction"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityReaction string = "reaction"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityResearchingMaterialEfficiency captures enum value "researching_material_efficiency"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityResearchingMaterialEfficiency string = "researching_material_efficiency"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityResearchingTechnology captures enum value "researching_technology"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityResearchingTechnology string = "researching_technology"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityResearchingTimeEfficiency captures enum value "researching_time_efficiency"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityResearchingTimeEfficiency string = "researching_time_efficiency"
// GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityReverseEngineering captures enum value "reverse_engineering"
GetIndustrySystemsOKBodyItems0CostIndicesItems0ActivityReverseEngineering string = "reverse_engineering"
)
// prop value enum
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) validateActivityEnum(path, location string, value string) error {
if err := validate.EnumCase(path, location, value, getIndustrySystemsOKBodyItems0CostIndicesItems0TypeActivityPropEnum, true); err != nil {
return err
}
return nil
}
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) validateActivity(formats strfmt.Registry) error {
if err := validate.Required("activity", "body", o.Activity); err != nil {
return err
}
// value enum
if err := o.validateActivityEnum("activity", "body", *o.Activity); err != nil {
return err
}
return nil
}
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) validateCostIndex(formats strfmt.Registry) error {
if err := validate.Required("cost_index", "body", o.CostIndex); err != nil {
return err
}
return nil
}
// ContextValidate validates this get industry systems o k body items0 cost indices items0 based on context it is used
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
return nil
}
// MarshalBinary interface implementation
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) MarshalBinary() ([]byte, error) {
if o == nil {
return nil, nil
}
return swag.WriteJSON(o)
}
// UnmarshalBinary interface implementation
func (o *GetIndustrySystemsOKBodyItems0CostIndicesItems0) UnmarshalBinary(b []byte) error {
var res GetIndustrySystemsOKBodyItems0CostIndicesItems0
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*o = res
return nil
}
|
isabella232/typeseam | tests/test_base.py | # -*- coding: utf-8 -*-
import os
from flask.ext.testing import TestCase as FlaskTestCase
from typeseam.app import (
create_app as _create_app,
db
)
from tests.utils import get_value_for_name
class BaseTestCase(FlaskTestCase):
'''
A base test case that boots our app
'''
def create_app(self):
os.environ['CONFIG'] = 'typeseam.settings.TestConfig'
app = _create_app()
app.testing = True
return app
def get_input_value(self, name, response):
return get_value_for_name(name, response.data.decode('utf-8'))
def setUp(self):
FlaskTestCase.setUp(self)
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
db.get_engine(self.app).dispose()
|
Mrchanghao/coach-borad | app/components/LoadingIndicator/index.js | import React, { useState } from 'react';
import PropTypes from 'prop-types';
import { BeatLoader } from 'react-spinners';
import Wrapper from './Wrapper';
const CircularLoadingIndicator = ({ size, inverted }) => {
// eslint-disable-next-line no-unused-vars
const [loading, setLoading] = useState(false);
return (
<Wrapper>
<BeatLoader
size={size}
color={inverted ? '#ffffff' : '#293b5a'}
loading={loading}
/>
</Wrapper>
);
};
CircularLoadingIndicator.propTypes = {
size: PropTypes.number.isRequired,
inverted: PropTypes.bool,
};
export default CircularLoadingIndicator;
|
bamsalem/ovirt-web-ui | src/components/VmDetails/cards/DisksCard/index.js | <filename>src/components/VmDetails/cards/DisksCard/index.js
import React from 'react'
import PropTypes from 'prop-types'
import { connect } from 'react-redux'
import { createDiskForVm, editDiskOnVm, removeDisk } from '_/actions'
import { withMsg } from '_/intl'
import { maskForElementId, suggestDiskName, sortDisksForDisplay } from '_/components/utils'
import { Icon } from 'patternfly-react'
import { Grid, Row, Col } from '_/components/Grid'
import BaseCard from '../../BaseCard'
import DiskImageEditor from './DiskImageEditor'
import DiskListItem from './DiskListItem'
import itemStyle from '../../itemListStyle.css'
import baseStyle from '../../style.css'
import style from './style.css'
import { localeCompare } from '_/helpers'
function filterStorageDomains (vm, clusters, storageDomains) {
const clusterId = vm.getIn(['cluster', 'id'])
const dataCenterId = clusters && clusters.getIn([clusterId, 'dataCenterId'])
return storageDomains
.filter(sd =>
sd.get('type') === 'data' &&
sd.getIn(['statusPerDataCenter', dataCenterId]) === 'active' &&
sd.get('canUserUseDomain')
).toList()
}
function suggestDiskName_ (vm) {
return suggestDiskName(
vm.get('name'),
vm.get('disks', []).map(disk => disk.get('name'))
)
}
/*
* Suggest a storage domain to use for new disks based on what storage domains are used by the
* disks already attached the the VM.
*/
function suggestStorageDomain ({ vm, clusters, storageDomains, locale }) {
const filtered = filterStorageDomains(vm, clusters, storageDomains).map(sd => sd.get('id'))
if (vm.get('disks') && vm.get('disks').length === 0) {
return filtered[0]
}
let mostCommon = { id: '', count: 0 }
vm.get('disks')
.map(disk => disk.get('storageDomainId'))
.filter(sdId => filtered.includes(sdId))
.sort((a, b) => localeCompare(storageDomains.get(a).get('name'), storageDomains.get(b).get('name'), locale))
.reduce((acc, sdId) => acc.set(sdId, (acc.get(sdId) || 0) + 1), new Map())
.forEach((count, sdId) => {
if (count > mostCommon.count) {
mostCommon = { id: sdId, count }
}
})
return mostCommon.id
}
/**
* List of disks attached a VM, a few bits of info about each, and allow an edit
* mode to created, edit and delete VM disks.
*/
class DisksCard extends React.Component {
constructor (props) {
super(props)
const { vm, clusters, storageDomains, locale } = this.props
this.state = {
suggestedDiskName: suggestDiskName_(vm),
suggestedStorageDomain: suggestStorageDomain({ vm, clusters, storageDomains, locale }),
filteredStorageDomainList: filterStorageDomains(vm, clusters, storageDomains),
}
this.onCreateConfirm = this.onCreateConfirm.bind(this)
this.onEditConfirm = this.onEditConfirm.bind(this)
this.onDeleteConfirm = this.onDeleteConfirm.bind(this)
}
componentDidUpdate (prevProps, prevState) {
const { vm, clusters, storageDomains, locale } = this.props
const changes = {}
if (prevProps.vm !== vm) {
changes.suggestedDiskName = suggestDiskName_(vm)
changes.suggestedStorageDomain = suggestStorageDomain({ vm, clusters, storageDomains, locale })
}
if (prevProps.vm !== vm || prevProps.storageDomains !== storageDomains) {
changes.filteredStorageDomainList = filterStorageDomains(vm, clusters, storageDomains)
}
if (Object.values(changes).length > 0) {
this.setState(changes) // eslint-disable-line react/no-did-update-set-state
}
}
onCreateConfirm (vmId, disk) {
this.props.addDisk({ vmId, disk })
}
onEditConfirm (vmId, disk) {
this.props.editDisk({ vmId, disk })
}
onDeleteConfirm (vmId, diskId) {
this.props.deleteDisk({ diskId, vmId })
}
render () {
const { vm, onEditChange, msg, locale } = this.props
const { suggestedDiskName, suggestedStorageDomain, filteredStorageDomainList } = this.state
const idPrefix = 'vmdetail-disks'
const canEditTheCard =
vm.get('canUserEditVmStorage') &&
vm.getIn(['pool', 'id']) === undefined
const canCreateDisks = filteredStorageDomainList.size > 0
const canDeleteDisks = vm.get('status') === 'down'
const diskList = sortDisksForDisplay(vm.get('disks'), locale) // ImmutableJS List()
return (
<BaseCard
idPrefix={idPrefix}
icon={{ type: 'pf', name: 'storage-domain' }}
title={msg.disks()}
editTooltip={msg.edit()}
itemCount={diskList.size}
className={baseStyle['cell-card']}
editable={canEditTheCard}
onStartEdit={() => { onEditChange(true) }}
onCancel={() => { onEditChange(false) }}
onSave={() => { onEditChange(false) }}
>
{({ isEditing }) =>
<Grid className={style['disks-container']}>
{ isEditing && canCreateDisks &&
<Row key={`${vm.get('id')}-disk-add`}>
<Col>
<DiskImageEditor
idPrefix={`${idPrefix}-new-disk`}
vm={vm}
suggestedName={suggestedDiskName}
suggestedStorageDomain={suggestedStorageDomain}
storageDomainList={filteredStorageDomainList}
onSave={this.onCreateConfirm}
trigger={({ onClick }) => (
<div className={itemStyle['create-block']}>
<a href='#' id={`${idPrefix}-new-disk-action`} onClick={onClick}>
<Icon className={itemStyle['create-icon']} type='fa' name='plus' />
<span className={itemStyle['create-text']} >{msg.diskActionCreateNew()}</span>
</a>
</div>
)}
/>
</Col>
</Row>
}
{ diskList.size === 0 &&
<Row>
<Col>
<div className={itemStyle['no-items']} id={`${idPrefix}-no-disks`}>{msg.noDisks()}</div>
</Col>
</Row>
}
{ diskList.size > 0 && diskList.map(disk =>
<Row key={disk.get('id')}>
<Col style={{ display: 'block' }}>
<DiskListItem
idPrefix={`${idPrefix}-${maskForElementId(disk.get('name'))}`}
vm={vm}
disk={disk}
storageDomainList={filteredStorageDomainList}
isEditing={isEditing}
canDeleteDisks={canDeleteDisks}
onEdit={this.onEditConfirm}
onDelete={this.onDeleteConfirm}
/>
</Col>
</Row>
)}
</Grid>
}
</BaseCard>
)
}
}
DisksCard.propTypes = {
vm: PropTypes.object.isRequired,
onEditChange: PropTypes.func.isRequired,
storageDomains: PropTypes.object.isRequired, // Map<id, storageDomain>
clusters: PropTypes.object.isRequired,
addDisk: PropTypes.func.isRequired,
editDisk: PropTypes.func.isRequired,
deleteDisk: PropTypes.func.isRequired,
msg: PropTypes.object.isRequired,
locale: PropTypes.string.isRequired,
}
export default connect(
(state) => ({
storageDomains: state.storageDomains,
clusters: state.clusters,
}),
(dispatch) => ({
addDisk: ({ vmId, disk }) => dispatch(createDiskForVm({ vmId, disk })),
editDisk: ({ vmId, disk }) => dispatch(editDiskOnVm({ vmId, disk })),
deleteDisk: ({ vmId, diskId }) => dispatch(removeDisk({ diskId, vmToRefreshId: vmId })),
})
)(withMsg(DisksCard))
|
philou/concurrency-kata | src/main/java/net/bourgau/philippe/concurrency/kata/common/ThreadPoolImplementation.java | package net.bourgau.philippe.concurrency.kata.common;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
public abstract class ThreadPoolImplementation implements Implementation {
private ExecutorService threadPool;
@Override
public ChatRoom startNewChatRoom() {
threadPool = newThreadPool();
return newChatRoom();
}
protected abstract ExecutorService newThreadPool();
protected abstract ChatRoom newChatRoom();
protected ExecutorService threadPool() {
return threadPool;
}
@Override
public Client newClient(String name, ChatRoom chatRoom, Output out) {
return new InProcessClient(name, chatRoom, out);
}
@Override
public void awaitOrShutdown(int count, TimeUnit timeUnit) throws InterruptedException {
awaitOrShutdown(threadPool, count, timeUnit);
}
protected void awaitOrShutdown(ExecutorService threadPool, int count, TimeUnit timeUnit) throws InterruptedException {
threadPool.shutdown();
try {
if (!threadPool.awaitTermination(count, timeUnit)) {
threadPool.shutdownNow();
if (!threadPool.awaitTermination(500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("The thread pool could not force stop all its tasks");
}
}
} catch (InterruptedException ie) {
threadPool.shutdownNow();
Thread.currentThread().interrupt();
throw ie;
}
}
}
|
liufu1986007/tis | tis-common-dao/src/main/java/com/qlangtech/tis/trigger/biz/dal/pojo/TaskExecLog.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qlangtech.tis.trigger.biz.dal.pojo;
import java.io.Serializable;
import java.util.Date;
public class TaskExecLog implements Serializable {
private Long execLogId;
private Long taskId;
private String domain;
private String fromIp;
private Date gmtCreate;
private Date gmtModified;
private String infoType;
private String logContent;
private static final long serialVersionUID = 1L;
public Long getExecLogId() {
return execLogId;
}
public void setExecLogId(Long execLogId) {
this.execLogId = execLogId;
}
public Long getTaskId() {
return taskId;
}
public void setTaskId(Long taskId) {
this.taskId = taskId;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain == null ? null : domain.trim();
}
public String getFromIp() {
return fromIp;
}
public void setFromIp(String fromIp) {
this.fromIp = fromIp == null ? null : fromIp.trim();
}
public Date getGmtCreate() {
return gmtCreate;
}
public void setGmtCreate(Date gmtCreate) {
this.gmtCreate = gmtCreate;
}
public Date getGmtModified() {
return gmtModified;
}
public void setGmtModified(Date gmtModified) {
this.gmtModified = gmtModified;
}
public String getInfoType() {
return infoType;
}
public void setInfoType(String infoType) {
this.infoType = infoType == null ? null : infoType.trim();
}
public String getLogContent() {
return logContent;
}
public void setLogContent(String logContent) {
this.logContent = logContent == null ? null : logContent.trim();
}
}
|
ael-noblegas/pychron | pychron/dvc/defaults.py | # ===============================================================================
# Copyright 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
TRIGA = '''K4039,1.0,0.01
K3839,1.0,0.1
K3739,1.0,0.1
Ca3937,1.0,0.1
Ca3837,1.0,0.1
Ca3637,1.0,0.1
Cl3638,1.0,0.1
Ca_K,1.0,0.1
Cl_K,1.0,0.1
'''
HOLDER_24_SPOKES = '''56,0.0175
0.0,0.405
0.1048,0.3912
0.2025,0.3507
0.2864,0.2864
0.3507,0.2025
0.3912,0.1048
0.405,0.000
0.3912,-0.1048
0.3507,-0.2025
0.2864,-0.2864
0.2025,-0.3507
0.1048,-0.3912
0.0,-0.405
-0.1048,-0.3912
-0.2025,-0.3507
-0.2864,-0.2864
-0.3507,-0.2025
-0.3912,-0.1048
-0.405,0.0
-0.3912,0.1048
-0.3507,0.2025
-0.2864,0.2864
-0.2025,0.3507
-0.1048,0.3912
0.0000,0.3275
0.2316,0.2316
0.3275,0.0000
0.2316,-0.2316
0.0000,-0.3275
-0.2316,-0.2316
-0.3275,0.0000
-0.2316,0.2316
0.0000,0.265
0.1874,0.1874
0.265,0.0000
0.1874,-0.1874
0.0000,-0.265
-0.1874,-0.1874
-0.265,0.0000
-0.1874,0.1874
0.0000,0.2025
0.1432,0.1432
0.2025,0.0000
0.1432,-0.1432
0.0000,-0.2025
-0.1432,-0.1432
-0.2025,0.0000
-0.1432,0.1432
0.0000,0.1400
0.099,0.0990
0.1400,0.0000
0.0990,-0.0990
0.0000,-0.1400
-0.0990,-0.0990
-0.1400,0.0000
-0.0990,0.0990
'''
LASER221 = '''4,0.1
0,1
1,0
0,-1
-1,0
'''
LASER65 = '''4,0.1
0,1
1,0
0,-1
-1,0
'''
# ============= EOF =============================================
|
eBay/block-aggregator | src/Serializable/SerializableDataTypesNumber.h | <gh_stars>10-100
/************************************************************************
Modifications Copyright 2021, eBay, Inc.
Original Copyright:
See URL: https://github.com/ClickHouse/ClickHouse
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#pragma once
#include <type_traits>
#include <Core/Field.h>
#include <Serializable/SerializableDataTypeNumberBase.h>
namespace nuclm {
template <typename T> class SerializableDataTypeNumber final : public SerializableDataTypeNumberBase<T> {
bool equals(const ISerializableDataType& rhs) const override { return typeid(rhs) == typeid(*this); }
bool canBeInsideNullable() const override { return true; }
};
using SerializableDataTypeUInt8 = SerializableDataTypeNumber<DB::UInt8>;
using SerializableDataTypeUInt16 = SerializableDataTypeNumber<DB::UInt16>;
using SerializableDataTypeUInt32 = SerializableDataTypeNumber<DB::UInt32>;
using SerializableDataTypeUInt64 = SerializableDataTypeNumber<DB::UInt64>;
using SerializableDataTypeInt8 = SerializableDataTypeNumber<DB::Int8>;
using SerializableDataTypeInt16 = SerializableDataTypeNumber<DB::Int16>;
using SerializableDataTypeInt32 = SerializableDataTypeNumber<DB::Int32>;
using SerializableDataTypeInt64 = SerializableDataTypeNumber<DB::Int64>;
using SerializableDataTypeFloat32 = SerializableDataTypeNumber<DB::Float32>;
using SerializableDataTypeFloat64 = SerializableDataTypeNumber<DB::Float64>;
template <typename DataType> constexpr bool IsDataTypeNumber = false;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::UInt8>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::UInt16>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::UInt32>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::UInt64>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::Int8>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::Int16>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::Int32>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::Int64>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::Float32>> = true;
template <> inline constexpr bool IsDataTypeNumber<SerializableDataTypeNumber<DB::Float64>> = true;
} // namespace nuclm
|
akash1233/OnBot_Demo | scripts/jira/scripts-slack/edit_issue.js | <filename>scripts/jira/scripts-slack/edit_issue.js
/*******************************************************************************
*Copyright 2018 Cognizant Technology Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
//Load dependency
var request = require("request");
//Function to add comment to particular issue with parameters url,username,password,comment,issue_key
var edit_call = function (jira_repourl, username, password, comment_jira, issue_key, callback_jira_edit){
var jira_repourl = jira_repourl+"/rest/api/2/issue/"+issue_key+"/comment";
var options = {
auth: {
'user': username,
'pass': password
},
method: 'POST',
url: jira_repourl,
headers:
{
'Content-Type': 'application/json'
},
body: {
body: comment_jira
},
json: true
};
request(options, function (error, response, body) {
if (error)
{
callback_jira_edit("Something went wrong","Something went wrong",null);
}
else
{
callback_jira_edit(null,"Comment Posted Successfully",null);
}
});
}
module.exports = {
edit_issue: edit_call // MAIN FUNCTION
}
|
Nakasar/bot-architecture-poc | brain/dashboard/public/js/main.js | <gh_stars>1-10
const base_url = "/"
/**
* Log out user from dashboard, clear session.
*/
function logout() {
console.log("[INFO] Logging out user. Clearing session.");
localStorage.setItem("user_token", null);
Cookies.remove('user_token');
location.reload();
};
|
ybadmus/ASW | src/containers/_PrivateRoute/index.js | import React, {Fragment} from 'react';
import {Route} from 'react-router-dom';
import TopBar from "../../components/TopBar";
import LogoArea from "../../components/LogoArea";
import MainMenu from "../../components/MainMenu";
import FooterArea from "../../components/FooterArea";
const PrivateRoute = (props) => {
const {component: Component, ...rest} = props;
return (
<div className={props.parentClass}>
<Fragment>
<TopBar className="white_bg"/>
<div className="border_black"/>
<MainMenu/>
<LogoArea className="white_bg"/>
</Fragment>
<Route {...rest} render={props => (
<Component {...props} />
)} />
<FooterArea className="primay_bg"/>
</div>
)
};
export default PrivateRoute;
|
eisgroup/kraken-rules | kraken-model-dsl/src/main/java/kraken/model/dsl/KrakenDSLModelParser.java | /*
* Copyright 2018 EIS Ltd and/or one of its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kraken.model.dsl;
import java.net.URI;
import java.util.List;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.PredictionMode;
import kraken.annotations.API;
import kraken.model.dsl.error.DSLErrorStrategy;
import kraken.model.dsl.model.DSLModel;
import kraken.model.dsl.visitor.DSLModelVisitor;
import kraken.model.resource.Resource;
import kraken.utils.ResourceUtils;
/**
* Parses Kraken Model DSL input to {@link Resource} instances
*
* @author mulevicius
*/
@API
public class KrakenDSLModelParser {
private KrakenDSLModelParser() {
}
/**
* @param dsl string to parse
* @return a resource model that represents DSL contents
* @throws DSLParsingException if DSL cannot be parsed
*/
public static Resource parseResource(String dsl, URI uri) {
KrakenDSL parser = forDSL(dsl);
DSLModelVisitor visitor = new DSLModelVisitor();
DSLModel dslModel = visitor.visit(parser.kraken());
return KrakenDSLModelConverter.toResource(dslModel, uri);
}
/**
* can be used only for testing
* @param dsl
* @return
*/
public static Resource parseResource(String dsl) {
return parseResource(dsl, ResourceUtils.randomResourceUri());
}
private static KrakenDSL forDSL(String dsl) {
Common lexer = lexerForExpression(dsl);
lexer.removeErrorListeners();
lexer.addErrorListener(DSLErrorListener.getInstance());
TokenStream tokenStream = new CommonTokenStream(lexer);
KrakenDSL parser = new KrakenDSL(tokenStream);
parser.setErrorHandler(new DSLErrorStrategy(List.of()));
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.removeErrorListeners();
parser.addErrorListener(DSLErrorListener.getInstance());
return parser;
}
private static Common lexerForExpression(String expression) {
CharStream stream = CharStreams.fromString(expression);
return new CommonLexer(stream);
}
static class CommonLexer extends Common {
public CommonLexer(CharStream input) {
super(input);
}
/**
*
* It overrides popMode() of Abstract Lexer to gracefully handle
* syntactically incorrect expressions when expression has more closing curly braces than open curly braces.
* Without this change, ANTLR4 get's stuck in infinte loop and crashes with StackOverflowException when parsing expression.
*
* @return
*/
@Override
public int popMode() {
return _modeStack.isEmpty()
? DEFAULT_MODE
: super.popMode();
}
}
}
|
PressLabs/zinc | zinc/route53/policy.py | import copy
from collections import OrderedDict
import zinc.route53
from zinc.utils import memoized_property
from .record import Record, RECORD_PREFIX
class Policy:
def __init__(self, zone, policy):
assert isinstance(zone, zinc.route53.Zone)
self.zone = zone
self.db_policy = policy
@property
def name(self):
return self.db_policy.name
@property
def id(self):
return self.db_policy.id
@property
def routing(self):
return self.db_policy.routing
@memoized_property
def aws_records(self):
"""What we have in AWS"""
return dict([
(r_id, record) for (r_id, record) in self.zone.records().items()
if record.is_member_of(self)
])
@memoized_property
def desired_records(self):
"""The records we should have (the desired state of the world)"""
return OrderedDict([(record.id, record) for record in self._build_tree()])
def _build_weighted_tree(self, policy_members, region_suffixed=True):
# Build simple tree
records = []
for policy_member in policy_members:
record_type = 'A'
if ':' in policy_member.ip.ip:
record_type = 'AAAA'
health_check_kwa = {}
if policy_member.ip.healthcheck_id:
health_check_kwa['health_check_id'] = str(policy_member.ip.healthcheck_id)
record = Record(
ttl=self.db_policy.ttl,
type=record_type,
values=[policy_member.ip.ip],
set_identifier='{}-{}'.format(str(policy_member.id), policy_member.region),
weight=policy_member.weight,
zone=self.zone,
**health_check_kwa,
)
# TODO: maybe we should have a specialized subclass for PolicyRecords
# and this logic should be moved there
if region_suffixed:
record.name = '{}_{}_{}'.format(RECORD_PREFIX, self.name, policy_member.region)
else:
record.name = '{}_{}'.format(RECORD_PREFIX, self.name)
records.append(record)
return records
def _build_lbr_tree(self, policy_members, regions):
# Build latency based routed tree
records = self._build_weighted_tree(policy_members)
for region in regions:
record = Record(
name='{}_{}'.format(RECORD_PREFIX, self.name),
type='A',
alias_target={
'HostedZoneId': self.zone.id,
'DNSName': '{}_{}_{}.{}'.format(
RECORD_PREFIX, self.name, region, self.zone.root),
'EvaluateTargetHealth': True # len(regions) > 1
},
region=region,
set_identifier=region,
zone=self.zone,
)
if self._has_ipv4_records_in_region(policy_members, region):
records.append(record)
# create a similar AAAA record if there exists IPv6 ips in this region.
if self._has_ipv6_records_in_region(policy_members, region):
record = copy.copy(record)
record.type = 'AAAA'
records.append(record)
return records
def _build_tree(self):
policy_members = self.db_policy.members.exclude(enabled=False).exclude(ip__enabled=False)
# ensure we always build region subtrees in alphabetical order; makes tests simpler
regions = sorted(set([pm.region for pm in policy_members]))
if len(regions) == 0:
raise Exception(
"Policy can't be applied for zone '{}'; "
"There is no member in the '{}' policy.".format(
self.zone, self
)
)
if self.routing == 'latency':
# Here is the case where are multiple regions
records = self._build_lbr_tree(policy_members, regions=regions)
# elif len(regions) == 1:
elif self.routing == 'weighted':
# Case with a single region
records = self._build_weighted_tree(
policy_members, region_suffixed=False)
else:
raise AssertionError('invalid routing {} for policy {}'.format(
self.routing, self.db_policy))
return records
def reconcile(self):
aws_record_ids = self.aws_records.keys()
desired_record_ids = self.desired_records.keys()
to_delete = []
for obsolete_rec_id in aws_record_ids - desired_record_ids:
record = self.aws_records[obsolete_rec_id]
record.deleted = True
to_delete.append(record)
self.zone.process_records(to_delete)
to_upsert = []
for rec_id, desired_record in self.desired_records.items():
existing_record = self.aws_records.get(rec_id)
if existing_record is None:
to_upsert.append(desired_record)
else:
# if desired is a subset of existing
if not desired_record.to_aws().items() <= existing_record.to_aws().items():
to_upsert.append(desired_record)
self.zone.process_records(to_upsert)
def remove(self):
records = list(self.aws_records.values())
for record in records:
record.deleted = True
self.zone.process_records(records)
def _has_ipv6_records_in_region(self, policy_members, region):
has_ipv6 = False
for pm in policy_members:
if region and pm.region != region:
continue
if ':' in pm.ip.ip:
has_ipv6 = True
return has_ipv6
def _has_ipv4_records_in_region(self, policy_members, region):
has_ipv4 = False
for pm in policy_members:
if region and pm.region != region:
continue
if '.' in pm.ip.ip:
has_ipv4 = True
return has_ipv4
|
cookesttttt/ert | src/api/basicSetting/Postmaintenance.js | <reponame>cookesttttt/ert<filename>src/api/basicSetting/Postmaintenance.js<gh_stars>0
import axios from '@/libs/api.request'
// 删除岗位
export const deleteJobsData = (projectId, deptId) => {
return axios.request({
url: 'auth/org/role/' + projectId + '/' + deptId,
method: 'delete'
})
}
// // 获取岗位职责
// export const getJobsObligationData = deptType => {
// return axios.request({
// url: 'auth/org/role/workcontent/' + deptType,
// method: 'get'
// })
// }
// 获取岗位职责
export const getJobsObligationData = (projectId, deptId) => {
return axios.request({
url: 'auth/org/dept/workcontents/' + projectId + '/' + deptId,
method: 'get'
})
}
// 获取岗位职责
export const getTableData = (projectId, deptId) => {
return axios.request({
url: 'auth/org/roles/dept/' + projectId + '/' + deptId,
method: 'get'
})
}
// 岗位分类接口
export const jobsType = () => {
return axios.request({
url: 'auth/org/role/types',
method: 'get'
})
}
// 添加岗位
export const newJobsData = (projectId, data) => {
return axios.request({
url: 'auth/org/role/' + projectId,
method: 'post',
data: data
})
}
// 编辑岗位
export const getEditJobsData = (projectId, id, data) => {
return axios.request({
url: 'auth/org/role/' + projectId + '/' + id,
method: 'put',
data: data
})
}
// 获取项目部组织机构树
export const getAlltreeData = projectId => {
return axios.request({
url: 'auth/org/depts/level4root/' + projectId,
method: 'get'
})
}
|
anandibhat/cloud-native-applications | grpc-hr-example/grpc-hr-service/src/main/java/com/jobinesh/example/grpc/hr/service/GrpcExceptionHandler.java | package com.jobinesh.example.grpc.hr.service;
import io.grpc.*;
public class GrpcExceptionHandler implements ServerInterceptor {
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> serverCall, Metadata metadata,
ServerCallHandler<ReqT, RespT> serverCallHandler) {
ServerCall.Listener<ReqT> listener = serverCallHandler.startCall(serverCall, metadata);
return new ExceptionHandlingServerCallListener<>(listener, serverCall, metadata);
}
private class ExceptionHandlingServerCallListener<ReqT, RespT>
extends ForwardingServerCallListener.SimpleForwardingServerCallListener<ReqT> {
private ServerCall<ReqT, RespT> serverCall;
private Metadata metadata;
ExceptionHandlingServerCallListener(ServerCall.Listener<ReqT> listener, ServerCall<ReqT, RespT> serverCall,
Metadata metadata) {
super(listener);
this.serverCall = serverCall;
this.metadata = metadata;
}
@Override
public void onHalfClose() {
try {
super.onHalfClose();
} catch (RuntimeException ex) {
handleException(ex, serverCall, metadata);
throw ex;
}
}
@Override
public void onReady() {
try {
super.onReady();
} catch (RuntimeException ex) {
handleException(ex, serverCall, metadata);
throw ex;
}
}
private void handleException(RuntimeException exception, ServerCall<ReqT, RespT> serverCall, Metadata metadata) {
if (exception instanceof IllegalArgumentException) {
serverCall.close(Status.INVALID_ARGUMENT.withDescription(exception.getMessage()), metadata);
} else {
serverCall.close(Status.UNKNOWN, metadata);
}
}
}
}
|
KaizerHind/FreeExcersice_Javascript | University_Java/zPrueba1/src/zprueba1/Teclado.java | <reponame>KaizerHind/FreeExcersice_Javascript
package zprueba1;
import java.util.Scanner;
public class Teclado {
Scanner sc = new Scanner(System.in);
public Teclado() {
sc = new Scanner(System.in);
}
public String getString(String titulo) {
String s1 = "";
while (s1.equals("")) {
System.out.println(titulo);
s1 = sc.nextLine();
if (!s1.matches("^[A-Za-z]*$")) {
s1 = "";
}
}
return s1;
}
}
|
Testiduk/gitlabhq | spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb | <reponame>Testiduk/gitlabhq<gh_stars>1000+
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Sidebars::Groups::Menus::KubernetesMenu do
let_it_be(:owner) { create(:user) }
let_it_be(:group) do
build(:group, :private).tap do |g|
g.add_owner(owner)
end
end
let(:user) { owner }
let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
let(:menu) { described_class.new(context) }
describe '#render?' do
context 'when user can read clusters' do
it 'returns true' do
expect(menu.render?).to eq true
end
end
context 'when user cannot read clusters rules' do
let(:user) { nil }
it 'returns false' do
expect(menu.render?).to eq false
end
end
end
end
|
mark-online/server | src/servertest/datatable/MockSOActiveSkillTable.cpp | <filename>src/servertest/datatable/MockSOActiveSkillTable.cpp
#include "ServerTestPCH.h"
#include <gideon/servertest/datatable/MockSOActiveSkillTable.h>
//#include <gideon/servertest/datatable/DataCodes.h>
#include <gideon/cs/datatable/SkillTemplate.h>
#include <gideon/cs/datatable/template/so_active_skill_table.hxx>
namespace gideon { namespace servertest {
const datatable::SOActiveSkillTemplate*
MockSOActiveSkillTable::getSOSkill(SkillCode code) const
{
const SOSkillMap::const_iterator pos = skillMap_.find(code);
if (pos != skillMap_.end()) {
return &(*pos).second;
}
return nullptr;
}
}} // namespace gideon { namespace servertest {
|
jiaguobing/FastCAE | VTK/vtk_7.1.1_x64_Debug/include/vtk-7.1/vtkCommonColorModule.h |
#ifndef VTKCOMMONCOLOR_EXPORT_H
#define VTKCOMMONCOLOR_EXPORT_H
#ifdef VTKCOMMONCOLOR_STATIC_DEFINE
# define VTKCOMMONCOLOR_EXPORT
# define VTKCOMMONCOLOR_NO_EXPORT
#else
# ifndef VTKCOMMONCOLOR_EXPORT
# ifdef vtkCommonColor_EXPORTS
/* We are building this library */
# define VTKCOMMONCOLOR_EXPORT __declspec(dllexport)
# else
/* We are using this library */
# define VTKCOMMONCOLOR_EXPORT __declspec(dllimport)
# endif
# endif
# ifndef VTKCOMMONCOLOR_NO_EXPORT
# define VTKCOMMONCOLOR_NO_EXPORT
# endif
#endif
#ifndef VTKCOMMONCOLOR_DEPRECATED
# define VTKCOMMONCOLOR_DEPRECATED __declspec(deprecated)
# define VTKCOMMONCOLOR_DEPRECATED_EXPORT VTKCOMMONCOLOR_EXPORT __declspec(deprecated)
# define VTKCOMMONCOLOR_DEPRECATED_NO_EXPORT VTKCOMMONCOLOR_NO_EXPORT __declspec(deprecated)
#endif
#define DEFINE_NO_DEPRECATED 0
#if DEFINE_NO_DEPRECATED
# define VTKCOMMONCOLOR_NO_DEPRECATED
#endif
#endif
|
pr0d1r2/shelly | lib/shelly/cli/config.rb | <filename>lib/shelly/cli/config.rb
require "shelly/cli/command"
module Shelly
module CLI
class Config < Command
include Thor::Actions
include Helpers
before_hook :logged_in?, :only => [:list, :show, :create, :new, :edit, :update, :delete]
class_option :cloud, :type => :string, :aliases => "-c", :desc => "Specify cloud"
desc "list", "List configuration files"
def list
app = multiple_clouds(options[:cloud], "list")
configs = app.configs
unless configs.empty?
say "Configuration files for #{app}", :green
user_configs = app.user_configs
unless user_configs.empty?
say "Custom configuration files:"
print_configs(user_configs)
else
say "You have no custom configuration files."
end
shelly_configs = app.shelly_generated_configs
unless shelly_configs.empty?
say "Following files are created by Shelly Cloud:"
print_configs(shelly_configs)
end
else
say "Cloud #{cloud} has no configuration files"
end
end
desc "show PATH", "View configuration file"
def show(path)
app = multiple_clouds(options[:cloud], "show #{path}")
config = app.config(path)
say "Content of #{config["path"]}:", :green
say config["content"]
rescue Client::NotFoundException => e
raise unless e.resource == :config
say_error "Config '#{path}' not found", :with_exit => false
say_error "You can list available config files with `shelly config list --cloud #{app}`"
end
map "new" => :create
desc "create PATH", "Create configuration file"
def create(path)
output = open_editor(path)
app = multiple_clouds(options[:cloud], "create #{path}")
app.create_config(path, output)
say "File '#{path}' created.", :green
say "To make changes to running application redeploy it using:"
say "`shelly redeploy --cloud #{app}`"
rescue Client::ValidationException => e
e.each_error { |error| say_error error, :with_exit => false }
exit 1
end
map "update" => :edit
desc "edit PATH", "Edit configuration file"
def edit(path = nil)
say_error "No configuration file specified" unless path
app = multiple_clouds(options[:cloud], "edit #{path}")
config = app.config(path)
content = open_editor(config["path"], config["content"])
app.update_config(path, content)
say "File '#{config["path"]}' updated.", :green
say "To make changes to running application redeploy it using:"
say "`shelly redeploy --cloud #{app}`"
rescue Client::NotFoundException => e
raise unless e.resource == :config
say_error "Config '#{path}' not found", :with_exit => false
say_error "You can list available config files with `shelly config list --cloud #{app}`"
rescue Client::ValidationException => e
e.each_error { |error| say_error error, :with_exit => false }
exit 1
end
desc "delete PATH", "Delete configuration file"
def delete(path = nil)
say_error "No configuration file specified" unless path
app = multiple_clouds(options[:cloud], "delete #{path}")
answer = yes?("Are you sure you want to delete 'path' (yes/no): ")
if answer
app.delete_config(path)
say "File '#{path}' deleted.", :green
say "To make changes to running application redeploy it using:"
say "`shelly redeploy --cloud #{app}`"
else
say "File not deleted"
end
rescue Client::NotFoundException => e
raise unless e.resource == :config
say_error "Config '#{path}' not found", :with_exit => false
say_error "You can list available config files with `shelly config list --cloud #{app}`"
end
no_tasks do
def print_configs(configs)
print_table(configs.map { |config|
[" * ", config["path"]] })
end
def open_editor(path, output = "")
filename = "shelly-edit-"
0.upto(20) { filename += rand(9).to_s }
filename << File.extname(path)
filename = File.join(Dir.tmpdir, filename)
tf = File.open(filename, "w")
tf.sync = true
tf.puts output
tf.close
no_editor unless system("#{ENV['EDITOR']} #{tf.path}")
tf = File.open(filename, "r")
output = tf.gets(nil)
tf.close
File.unlink(filename)
output
end
def no_editor
say_error "Please set EDITOR environment variable"
end
end
end
end
end
|
school-engagements/pikater | lib/jade-4.3.2/src/jade/tools/sniffer/AbstractPopup.java | <reponame>school-engagements/pikater
/*****************************************************************
JADE - Java Agent DEvelopment Framework is a framework to develop multi-agent systems in compliance with the FIPA specifications.
Copyright (C) 2000 CSELT S.p.A.
GNU Lesser General Public License
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*****************************************************************/
package jade.tools.sniffer;
import javax.swing.AbstractAction;
import javax.swing.Action;
/**
Javadoc documentation for the file
@author <NAME>, <NAME> - Universita` di Parma
<Br>
<a href="mailto:<EMAIL>"> <NAME>(e-mail) </a>
@version $Date: 2002-12-13 14:34:24 +0100 (ven, 13 dic 2002) $ $Revision: 3529 $
*/
/**
* AbstractPopup is the superclass of the actions
* performed by PopupMenu on the Canvas.
*
* This class is abstract because it does not define the
* ActionPerformed(ActionEvent evt) method. In every subClass of
* AbstractPopup this method performs a specific action.
*
* Subclasses of AbstractPopup are:
* @see jade.tools.sniffer.PopSniffAgent
* @see jade.tools.sniffer.PopNoSniffAgent
* @see jade.tools.sniffer.PopShowAgent
* @see jade.tools.sniffer.ViewMessage
*/
abstract public class AbstractPopup extends AbstractAction {
public AbstractPopup(String actionName) {
putValue(Action.NAME,actionName);
}
} |
SkankHunter24/BaseSpringMVC | src/base/BaseServiceSupport.java | package base;
import java.sql.Connection;
import javax.annotation.Resource;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.mvel2.util.ThisLiteral;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Repository;
@Repository("BaseDaoSupport")
@Scope("prototype")
public class BaseServiceSupport{
@Resource(name="sqlSessionFactory")
private SqlSessionFactory sqlSessionFactory;
@Resource(name="sqlSessionTemplate")
private SqlSessionTemplate sqlSessionTemplate;
public SqlSessionFactory getSqlSessionFactory() {
return sqlSessionFactory;
}
public SqlSessionTemplate getSqlSessionTemplate() {
return sqlSessionTemplate;
}
public <T> T getMapper(Class<T> mapper){
return this.getSqlSessionTemplate().getMapper(mapper);
}
/**
* 传入LOG发生的类名获取LOG实例
* logger.warn(String) 警告级别日志
* logger.info(String) 信息级别日志
* logger.error(String)错误级别日志
* @param className 类名
* @return LOG4J实例
* @throws Exception
*/
public Logger getLogger(String className) throws Exception
{
return LogManager.getLogger(className);
}
}
|
anyanfei/hotgo | hotgo-server/app/form/input/admin_dept_input.go | <filename>hotgo-server/app/form/input/admin_dept_input.go
package input
import "github.com/bufanyun/hotgo/app/model/entity"
// 名称是否唯一
type AdminDeptNameUniqueInp struct {
Name string
Id int64
}
type AdminDeptNameUniqueModel struct {
IsUnique bool
}
// 最大排序
type AdminDeptMaxSortInp struct {
Id int64
}
type AdminDeptMaxSortModel struct {
Sort int
}
// 修改/新增字典数据
type AdminDeptEditInp struct {
entity.AdminDept
}
type AdminDeptEditModel struct{}
// 删除字典类型
type AdminDeptDeleteInp struct {
Id interface{}
}
type AdminDeptDeleteModel struct{}
// 获取信息
type AdminDeptViewInp struct {
Id int64
}
type AdminDeptViewModel struct {
entity.AdminDept
}
// 获取列表
type AdminDeptListInp struct {
Name string
}
// 树
type AdminDeptTreeDept struct {
entity.AdminDept
Children []*AdminDeptTreeDept `json:"children"`
}
type AdminDeptListModel AdminDeptTreeDept
// 获取列表树
type AdminDeptListTreeInp struct {
Name string
}
// 树
type AdminDeptListTreeDept struct {
Id int64 `json:"id" `
Key int64 `json:"key" `
Pid int64 `json:"pid" `
Label string `json:"label"`
Title string `json:"title"`
Name string `json:"name"`
Type string `json:"type"`
Children []*AdminDeptListTreeDept `json:"children"`
}
type AdminDeptListTreeModel AdminDeptListTreeDept
|
kooksee/nebulas-p2p | testing/broadcast/main.go | <reponame>kooksee/nebulas-p2p<filename>testing/broadcast/main.go<gh_stars>0
package main
import (
"flag"
"fmt"
"hash/fnv"
"math"
"math/rand"
"reflect"
"sort"
"time"
peer "github.com/libp2p/go-libp2p-peer"
"nebulas-p2p"
)
type nodeIdx []int
// Len returns the size of nodeIdx
func (idx nodeIdx) Len() int {
return len(idx)
}
// Swap swaps the ith with jth
func (idx nodeIdx) Swap(i, j int) {
idx[i], idx[j] = idx[j], idx[i]
}
// Less returns true if ith <= jth else false
func (idx nodeIdx) Less(i, j int) bool {
return idx[i] <= idx[j]
}
//
var (
NodeCount = flag.Int("node_count", 1000, "node count in network, default is 1000")
NeighborCount = flag.Int64("neighbor_count", 50, "neighbor count in route table, default is 50")
MaxTTL = flag.Int64("max_ttl", 3, "max ttl, default is 3")
LoopTimes = flag.Int("loop_times", 20, "number of loop times, default is 20")
)
// Node the simulation of the node
type Node struct {
id int
name string
neighbor []int
bingo bool
ttl int
}
func main() {
flag.Parse()
total := 0
fmt.Printf("Usage: [-node_count] [-neighbor_count] [-max_ttl] [-loop_times]\n")
for i := 0; i < *LoopTimes; i++ {
count := gotask()
total += count
}
fmt.Println("The average rate of coverage:", float32(total)/(float32(*LoopTimes*(*NodeCount))))
}
func gotask() int {
nodeCount := int(*NodeCount)
var nodes []*Node
nodes = initRouteTable(nodeCount, nodes)
random := rand.Intn(nodeCount)
node := nodes[random]
node.bingo = true
broadcast(node, nodes)
count := 0
for _, v := range nodes {
if v.bingo == true {
count++
}
}
fmt.Println("rate of coverage:", float32(count)/float32(nodeCount))
return count
}
func initRouteTable(nodeCount int, nodes []*Node) []*Node {
seed := newNode(0)
nodes = append(nodes, seed)
for i := 1; i < nodeCount; i++ {
node := newNode(i)
nodes = append(nodes, node)
syncRoute(node, seed, nodes)
}
for k := 0; k < 10; k++ {
for i := 0; i < nodeCount; i++ {
node := nodes[i]
rand.Seed(time.Now().UnixNano())
randomList := rand.Perm(len(node.neighbor) - 1)
for i := 0; i < int(math.Sqrt(float64(len(node.neighbor)))); i++ {
id := node.neighbor[randomList[i]]
tar := nodes[id]
syncRoute(node, tar, nodes)
}
}
}
return nodes
}
func newNode(id int) *Node {
networkKey, _ := net.GenerateEd25519Key()
name, _ := peer.IDFromPublicKey(networkKey.GetPublic())
node := &Node{
id: id,
name: name.Pretty(),
neighbor: []int{},
bingo: false,
ttl: 0,
}
return node
}
func broadcast(node *Node, nodes []*Node) {
maxTTL := int(*MaxTTL)
if node.ttl <= maxTTL {
for _, v := range node.neighbor {
n := nodes[v]
if n.id != node.id {
n.bingo = true
if node.ttl <= maxTTL && n.ttl <= maxTTL {
n.ttl = node.ttl + 1
broadcast(n, nodes)
}
}
}
}
return
}
func syncRoute(node *Node, target *Node, nodes []*Node) {
neighborCount := int(*NeighborCount)
if len(target.neighbor) < neighborCount {
for id := range target.neighbor {
node.neighbor = addNewNode(node.neighbor, id, neighborCount)
}
node.neighbor = addNewNode(node.neighbor, target.id, neighborCount)
for id := range target.neighbor {
n := nodes[id]
n.neighbor = addNewNode(n.neighbor, node.id, neighborCount)
}
target.neighbor = addNewNode(target.neighbor, node.id, neighborCount)
return
}
// target.neighbor = shuffle(target.neighbor)
ret := getNearestNode(node, target.neighbor, nodes)
for _, retID := range ret {
node.neighbor = addNewNode(node.neighbor, int(retID), neighborCount)
tempnode := nodes[int(retID)]
tempnode.neighbor = addNewNode(tempnode.neighbor, node.id, neighborCount)
}
target.neighbor = addNewNode(target.neighbor, node.id, neighborCount)
return
}
func getNearestNode(node *Node, ids []int, nodes []*Node) nodeIdx {
var ret nodeIdx
var tmp nodeIdx
var tmpMap = make(map[int]int)
// fmt.Println("nearest id:", len(ids))
for _, id := range ids {
tempnode := nodes[id]
nodeNameInt := int(hash(node.name))
tempnodeNameInt := int(hash(tempnode.name))
distance := nodeNameInt ^ tempnodeNameInt
// distance := node.id ^ id
tmp = append(tmp, distance)
sort.Sort(tmp)
tmpMap[distance] = id
if len(tmp) > len(ids)/2 {
delete(tmpMap, tmp[len(tmp)-1])
tmp = tmp[:len(tmp)-1]
}
}
for _, v := range tmpMap {
ret = append(ret, v)
}
return ret
}
func hash(str string) uint32 {
h := fnv.New32a()
h.Write([]byte(str))
return h.Sum32()
}
func addNewNode(ids []int, id int, limit int) []int {
if len(ids) >= limit {
count := len(ids) - limit
ids = shuffle(ids)
ids = ids[count+1:]
}
if !inArray(id, ids) {
ids = append(ids, id)
}
return ids
}
func inArray(obj interface{}, array interface{}) bool {
arrayValue := reflect.ValueOf(array)
if reflect.TypeOf(array).Kind() == reflect.Array || reflect.TypeOf(array).Kind() == reflect.Slice {
for i := 0; i < arrayValue.Len(); i++ {
if arrayValue.Index(i).Interface() == obj {
return true
}
}
}
return false
}
func shuffle(vals []int) []int {
r := rand.New(rand.NewSource(time.Now().Unix()))
ret := make([]int, len(vals))
perm := r.Perm(len(vals))
for i, randIndex := range perm {
ret[i] = vals[randIndex]
}
return ret
}
func generateRandomNumber(start int, end int, count int) []int {
if end < start || (end-start) < count {
return nil
}
nums := make([]int, 0)
r := rand.New(rand.NewSource(time.Now().UnixNano()))
for len(nums) < count {
num := r.Intn((end - start)) + start
exist := false
for _, v := range nums {
if v == num {
exist = true
break
}
}
if !exist {
nums = append(nums, num)
}
}
return nums
}
|
lpi/codeforces-go | misc/nowcoder/6357/b/b.go | <reponame>lpi/codeforces-go
package main
// github.com/EndlessCheng/codeforces-go
func solve(S, T string) int {
delta := [3]byte{2, 3, 5}
var s, t [4]byte
for i := range S {
s[i] = S[i]
}
for i := range T {
t[i] = T[i]
}
vis := map[[4]byte]bool{s: true}
q := [][4]byte{s}
for d := 0; ; d++ {
qq := q
q = nil
for _, s := range qq {
if s == t {
return d
}
for i := 0; i < 4; i++ {
c := 0
ss := s
for j := 0; j < 4; j++ {
if j == i {
continue
}
ss[j] += delta[c]
if ss[j] > 'z' {
ss[j] -= 26
}
c++
}
if !vis[ss] {
vis[ss] = true
q = append(q, ss)
}
}
}
}
}
|
ANR2ME/pspdecompiler | hash.c | <reponame>ANR2ME/pspdecompiler
/**
* Author: <NAME> (<EMAIL>)
*/
#include <stddef.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include "hash.h"
#include "alloc.h"
#include "utils.h"
#include "types.h"
#define INDEX_FOR(hash, size) ((hash) & ((size) - 1))
struct _entry {
void *key, *value;
unsigned int hash;
struct _entry *next;
};
typedef struct _entry *entry;
struct _hashtable {
struct _hashpool *const pool;
unsigned int tablelength;
unsigned int entrycount;
unsigned int loadlimit;
struct _entry **table;
hashfn hashfn;
hashequalsfn eqfn;
};
struct _hashpool {
fixedpool tablepool;
fixedpool entrypool;
};
hashpool hashpool_create (size_t numtables, size_t numentries)
{
hashpool pool = (hashpool) xmalloc (sizeof (struct _hashpool));
pool->tablepool = fixedpool_create (sizeof (struct _hashtable), numtables, 0);
pool->entrypool = fixedpool_create (sizeof (struct _entry), numentries, 0);
return pool;
}
static
void destroy_hashtable (void *ptr, void *arg)
{
hashtable ht = ptr;
if (ht->table) {
free (ht->table);
ht->table = NULL;
}
}
void hashpool_destroy (hashpool pool)
{
fixedpool_destroy (pool->tablepool, &destroy_hashtable, NULL);
fixedpool_destroy (pool->entrypool, NULL, NULL);
free (pool);
}
static
entry alloc_entry (hashpool pool)
{
return fixedpool_alloc (pool->entrypool);
}
static
void free_entry (hashpool pool, entry e)
{
fixedpool_free (pool->entrypool, e);
}
hashtable hashtable_alloc (hashpool pool, unsigned int size, hashfn hashfn, hashequalsfn eqfn)
{
hashtable ht;
hashpool *ptr;
ht = fixedpool_alloc (pool->tablepool);
ht->table = (entry *) xmalloc (sizeof (entry) * size);
memset (ht->table, 0, size * sizeof (entry));
ptr = (hashpool *) &ht->pool;
*ptr = pool;
ht->tablelength = size;
ht->entrycount = 0;
ht->hashfn = hashfn;
ht->eqfn = eqfn;
ht->loadlimit = size >> 1;
return ht;
}
void hashtable_free (hashtable ht, hashtraversefn destroyfn, void *arg)
{
entry e;
unsigned int i;
for (i = 0; i < ht->tablelength; i++) {
for (e = ht->table[i]; e; e = e->next) {
if (destroyfn)
destroyfn (e->key, e->value, e->hash, arg);
fixedpool_free (ht->pool->entrypool, e);
}
}
fixedpool_grow (ht->pool->entrypool, ht->table, ht->tablelength);
ht->table = NULL;
ht->tablelength = 0;
ht->entrycount = 0;
fixedpool_free (ht->pool->tablepool, ht);
}
static
void hashtable_grow (hashtable ht)
{
entry *newtable;
entry e, ne;
unsigned int newsize, i, index;
newsize = ht->tablelength << 1;
newtable = (entry *) xmalloc (sizeof (entry) * newsize);
memset (newtable, 0, newsize * sizeof (entry));
for (i = 0; i < ht->tablelength; i++) {
for (e = ht->table[i]; e; e = ne) {
ne = e->next;
index = INDEX_FOR (e->hash, newsize);
e->next = newtable[index];
newtable[index] = e;
}
}
fixedpool_grow (ht->pool->entrypool, ht->table, ht->tablelength);
ht->table = newtable;
ht->tablelength = newsize;
ht->loadlimit = newsize >> 1;
}
unsigned int hashtable_count (hashtable ht)
{
return ht->entrycount;
}
void hashtable_insert (hashtable ht, void *key, void *value)
{
hashtable_inserthash (ht, key, value, ht->hashfn (key));
}
void hashtable_inserthash (hashtable ht, void *key, void *value, unsigned int hash)
{
unsigned int index;
entry e;
if (ht->entrycount >= ht->loadlimit) {
hashtable_grow (ht);
}
e = alloc_entry (ht->pool);
e->hash = hash;
index = INDEX_FOR (e->hash, ht->tablelength);
e->key = key;
e->value = value;
e->next = ht->table[index];
ht->entrycount++;
ht->table[index] = e;
}
static
entry find_entry (hashtable ht, void *key, unsigned int hash, int remove)
{
entry e;
entry *prev;
unsigned int index;
index = INDEX_FOR (hash, ht->tablelength);
for (prev = &(ht->table[index]); (e = *prev) ; prev = &e->next) {
if (hash != e->hash) continue;
if (key != e->key)
if (!ht->eqfn (key, e->key, hash))
continue;
if (remove) {
*prev = e->next;
ht->entrycount--;
free_entry (ht->pool, e);
}
return e;
}
return NULL;
}
void *hashtable_search (hashtable ht, void *key, void **key_found)
{
return hashtable_searchhash (ht, key, key_found, ht->hashfn (key));
}
void *hashtable_searchhash (hashtable ht, void *key, void **key_found, unsigned int hash)
{
entry e;
e = find_entry (ht, key, hash, 0);
if (e) {
if (key_found)
*key_found = e->key;
return e->value;
}
return NULL;
}
int hashtable_haskey (hashtable ht, void *key, void **key_found)
{
return hashtable_haskeyhash (ht, key, key_found, ht->hashfn (key));
}
int hashtable_haskeyhash (hashtable ht, void *key, void **key_found, unsigned int hash)
{
entry e = find_entry (ht, key, hash, 0);
if (e) {
if (key_found)
*key_found = e->key;
return TRUE;
}
return FALSE;
}
void *hashtable_remove (hashtable ht, void *key, void **key_found)
{
return hashtable_removehash (ht, key, key_found, ht->hashfn (key));
}
void *hashtable_removehash (hashtable ht, void *key, void **key_found, unsigned int hash)
{
entry e = find_entry (ht, key, hash, 1);
if (e) {
if (key_found)
*key_found = e->key;
return e->value;
}
return NULL;
}
void hashtable_traverse (hashtable ht, hashtraversefn traversefn, void *arg)
{
entry e;
unsigned int i;
for (i = 0; i < ht->tablelength; i++) {
for (e = ht->table[i]; e; e = e->next) {
traversefn (e->key, e->value, e->hash, arg);
}
}
}
int hashtable_string_compare (void *key1, void *key2, unsigned int hash)
{
return (strcmp (key1, key2) == 0);
}
int hashtable_pointer_compare (void *key1, void *key2, unsigned int hash)
{
return (key1 == key2);
}
unsigned int hashtable_hash_bytes (unsigned char *key, size_t len)
{
unsigned int hash = 0;
size_t i;
for (i = 0; i < len; i++) {
hash += key[i];
hash += (hash << 10);
hash ^= (hash >> 6);
}
hash += (hash << 3);
hash ^= (hash >> 11);
hash += (hash << 15);
return hash;
}
unsigned int hashtable_hash_string (void *key)
{
unsigned int hash = 0;
unsigned char *bytes = (unsigned char *) key;
while (*bytes) {
hash += *bytes++;
hash += (hash << 10);
hash ^= (hash >> 6);
}
hash += (hash << 3);
hash ^= (hash >> 11);
hash += (hash << 15);
return hash;
}
|
fridgeresearch/kitchen | backend/python/db/db_handler.py | """
The MIT License (MIT)
Copyright (c) 2016 <NAME> (Stanford University)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""
TODO: License info
Database handler.
"""
import sys, os, warnings, MySQLdb as mdb
class DatabaseHandler:
def __init__(self, db, user, passwd, host, warnings=True, create=False):
"""Initialize."""
if not warnings:
warnings.filterwarnings('ignore', category = mdb.Warning)
if create:
create_con = mdb.connect(user=user, passwd=passwd, host=host)
create_cursor = create_con.cursor()
create_cursor.execute('CREATE DATABASE %s' % db)
create_cursor.close()
self.db_name = db
self.con = mdb.connect(db=db, user=user, passwd=passwd, host=host)
self.cursor = self.con.cursor(mdb.cursors.DictCursor)
self._initTableCols()
def _initTableCols(self):
"""Initialize internal table cols data structure."""
cursor = self.con.cursor()
self.table_cols = {}
cursor.execute("SHOW TABLES")
for (table,) in cursor:
cursor.execute("SELECT * FROM "+table)
self.table_cols[table] = [v[0] for v in cursor.description]
cursor.close()
def printDb(self):
for (table, cols) in sorted(self.table_cols.items()):
rows = self.get([table])
#if table!="GroceryList": continue
print table
rows = [[r["%s.%s"%(table,c)] for c in cols] for r in rows]
rows_t = [[] for var in range(len(cols))]
for v in rows + [cols]:
for (i, x) in enumerate(v):
rows_t[i].append(x)
widths = [max(map(len, map(str, v)))+2 for v in rows_t]
max_len = 100
print '| '.join([str(v).ljust(min(widths[i], max_len)) for (i, v) in enumerate(cols)])
for row in rows:
print '| '.join([str(v)[:max_len].ljust(widths[i]) for (i, v) in enumerate(row)])
print
def checkAuth(self, user, passwd):
constraints = ["name LIKE '%s'"%user, "pwd LIKE password('%s')"%passwd]
r = self.get(["Kitchen"], constraints=constraints)
if not r:
return None
else:
return r[0]
def dropDatabase(self):
self.cursor.execute('DROP DATABASE IF EXISTS %s' % self.db_name)
self.table_cols = {}
def createDatabase(self):
self.cursor.execute('CREATE DATABASE %s' % self.db_name)
self.cursor.execute('USE %s' % self.db_name)
self._initTableCols()
def tableExists(self, table):
qry = '''SELECT * FROM information_schema.tables
WHERE table_name="%s"
AND table_schema="%s"'''
self.cursor.execute(qry % (table, self.db_name))
return self.cursor.fetchone() != None
def dropTable(self, table):
self.cursor.execute("DROP TABLE IF EXISTS %s" % table)
self._initTableCols()
def createTable(self, table, cols, col_props):
cs = ', '.join(["%s %s"%v for v in zip(cols, col_props)])
self.cursor.execute("CREATE TABLE %s (%s)" % (table, cs))
self._initTableCols()
def insert(self, table, obj):
self.insertMany(table, [obj])
def insertMany(self, table, objs):
if not objs:
return
else:
# Convert any passwords.
for obj in objs:
for (k, v) in obj.items():
if isinstance(v, basestring) and not v.find("password("):
self.cursor.execute("SELECT %s"%v)
obj[k] = self.cursor.fetchone()[v]
# Now insert.
row_cols = [tuple(sorted(v.keys())) for v in objs]
if len(set(row_cols)) != 1:
raise Exception("Column names must match for insertion.")
cols = row_cols[0]
cols_str = ', '.join(cols)
vals = ', '.join(["%s"]*len(cols))
qry = 'INSERT INTO %s (%s) VALUES (%s)' % (table, cols_str, vals)
to_insert = [tuple([v[w] for w in cols]) for v in objs]
self.cursor.executemany(qry, to_insert)
def update(self, table, obj, constraints=[]):
insert_obj = {}
for (k, v) in obj.items():
if v==None:
insert_obj[k] = "NULL"
elif isinstance(v, basestring) and v.find("password("):
insert_obj[k] = "'%s'"%v
else:
insert_obj[k] = str(v)
vals = ', '.join(['%s=%s'%(k, v) for (k, v) in insert_obj.items()])
where_clause = " AND ".join(constraints) if constraints else ""
qry = 'UPDATE %s SET %s WHERE %s' % (table, vals, where_clause)
self.cursor.execute(qry)
def delete(self, table, constraints=[]):
where_clause = " AND ".join(constraints) if constraints else ""
qry = 'DELETE FROM %s ' % table
if constraints:
qry = qry + ('WHERE %s' % where_clause)
self.cursor.execute(qry)
def get(self, tables, table_aliases=[], cols=[], constraints=[], order_bys=[]):
"""Gets specified column tables from db given WHERE and ORDER BY clauses.
Args:
tables: Table(s) to get from.
table_aliases: Aliases for tables or [] if table names
cols: Column(s) specified (or [] if want all columns returned).
Note that should be of the form Table.column.
constraints: Constraints for where clause.
order_bys: Columns to order by.
Returns:
Query result dictionary."""
# Construct FROM clause
if not table_aliases: table_aliases = tables
if len(set(table_aliases)) != len(table_aliases):
raise Exception("Must have unique table names.")
from_clause = 'FROM %s' % ', '.join(["%s as %s"%v for v in zip(tables, table_aliases)])
# Construct SELECT clause
if not cols:
cols = ["%s.%s"%(ta,c) for (t,ta) in zip(tables, table_aliases) \
for c in self.table_cols[t]]
col_aliases = [v.lower().replace(".", "_") for v in cols]
select_clause = 'SELECT %s' % ', '.join(["%s as %s"%v for v in zip(cols, col_aliases)])
# Construct WHERE clause
where_clause = ("WHERE %s" % (" AND ".join(constraints))) if constraints else ""
# Construct ORDER BY clause
order_by_clause = ("ORDER BY %s" % (", ".join(order_bys))) if order_bys else ""
# Execute query and return dictionary result.
qry = '%s %s %s %s' % (select_clause, from_clause, where_clause, order_by_clause)
self.cursor.execute(qry)
return [dict([(c,v[a]) for (c, a) in zip(cols, col_aliases)]) \
for v in self.cursor.fetchall()]
def getEvents(self, kitchen_id=None, segment_id=None,
start_time=None, end_time=None, cols=[], constraints=[]):
"""Get events
Args:
kitchen_id: Kitchen ID.
segment_id: Segment ID.
start_time: Time t where only return events with time >= t.
end_time: Time t where only return events with time <= t.
Returns:
Event list."""
constraints = [v for v in constraints]
if kitchen_id != None:
constraints.append('kitchen_id=%d'%kitchen_id)
if segment_id != None:
constraints.append('segment_id=%d'%segment_id)
if start_time != None:
constraints.append('time>="%s"'%start_time)
if end_time != None:
constraints.append('time<="%s"'%end_time)
return self.get(["Event"], cols=cols, constraints=constraints,
order_bys=["time"])
def getItemReads(self, kitchen_id=None, present_time=None,
omit_removed=False, omit_remaining=False):
tables = ["Item", "ItemRead", "Food", "Event", "Event"]
table_aliases = tables[:-2] + ["ArrivalEvent", "RemovalEvent"]
constraints = ["Item.food_id=Food.id",
"ItemRead.item_id=Item.id",
"ItemRead.arrival_event_id=ArrivalEvent.id",
"ItemRead.removal_event_id=RemovalEvent.id"]
if kitchen_id != None:
constraints.append('Item.kitchen_id=%d'%kitchen_id)
if present_time:
constraints.append("ArrivalEvent.time<='%s'"%present_time)
constraints.append("RemovalEvent.time>='%s'"%present_time) # TODO(andrej, jake): discuss
irs = []
# Removed ItemReads.
if not omit_removed:
irs += self.get(tables, table_aliases=table_aliases,
constraints=constraints, order_bys=["ArrivalEvent.time"])
# Remaining ItemReads
if not omit_remaining:
constraints = [v for v in constraints if "Removal" not in v] + \
["ItemRead.removal_event_id IS NULL"]
irs += self.get(tables[:-1], table_aliases=table_aliases[:-1],
constraints=constraints, order_bys=["ArrivalEvent.time"])
# FoodTags
food_tag_hash = self.getFoodTagHash()
for (i, ir) in enumerate(irs):
name = ir["Food.name"]
irs[i]["FoodTags"] = food_tag_hash[name] if name in food_tag_hash else []
# Beacons
beacons = dict([(v["Beacon.id"], v) for v in self.get(["Beacon"])])
for (i, ir) in enumerate(irs):
bid = ir["Item.beacon_id"]
if bid in beacons:
for (k, v) in beacons[bid].items():
irs[i][k] = v
return irs
def getFoodTagHash(self):
tags = self.get(["Food", "FoodTag", "FoodTagAssoc"],
constraints=["FoodTagAssoc.food_id=Food.id", "FoodTagAssoc.foodtag_id=FoodTag.id"])
food_tag_hash = {}
for t in tags:
if t["Food.name"] not in food_tag_hash:
food_tag_hash[t["Food.name"]] = []
food_tag_hash[t["Food.name"]].append(t)
return food_tag_hash
def getPresentItemReads(self, kitchen_id=None):
return self.getItemReads(kitchen_id=kitchen_id, omit_removed=True)
def lastRowId(self):
return self.cursor.lastrowid
def commit(self):
self.con.commit()
|
mkinsner/llvm | libc/test/src/ctype/isalpha_test.cpp | //===-- Unittests for isalpha----------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "src/ctype/isalpha.h"
#include "utils/UnitTest/Test.h"
TEST(LlvmLibcIsAlpha, DefaultLocale) {
// Loops through all characters, verifying that letters return a
// non-zero integer and everything else returns zero.
for (int ch = 0; ch < 255; ++ch) {
if (('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z'))
EXPECT_NE(__llvm_libc::isalpha(ch), 0);
else
EXPECT_EQ(__llvm_libc::isalpha(ch), 0);
}
}
|
navikt/data-catalog-policies | data-catalog-policies-app/src/main/java/no/nav/data/catalog/policies/app/policy/domain/PolicyResponse.java | package no.nav.data.catalog.policies.app.policy.domain;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import no.nav.data.catalog.policies.app.codelist.domain.CodeResponse;
import no.nav.data.catalog.policies.app.dataset.domain.DatasetResponse;
import java.time.LocalDate;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonPropertyOrder({"policyId", "legalBasisDescription", "fom", "tom", "active", "purpose", "dataset"})
public class PolicyResponse {
private Long policyId;
private String legalBasisDescription;
private LocalDate fom;
private LocalDate tom;
private boolean active;
private CodeResponse purpose;
private DatasetResponse dataset;
}
|
iEiffel/Portfolium | Portfolium/PFStudyVC.h | <filename>Portfolium/PFStudyVC.h<gh_stars>0
//
// PFStudyVC.h
// Portfolium
//
// Created by <NAME> on 7/30/14.
// Copyright (c) 2014 Portfolium. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PFIndexedListVC.h"
@class PFOBEducationVC;
@interface PFStudyVC : PFIndexedListVC<UISearchBarDelegate>
+ (PFStudyVC *)_new:(PFOBEducationVC *)delegate;
@end
|
legioner9/Node_Way_source_2 | Education/Gulp/Gulp_project_gulp_pug_1/CreateDirFree_v_1_gulp_pug_1.js | <reponame>legioner9/Node_Way_source_2
const Ex = require ( 'st_ex1' );
const createTechDir_1_v_2 = require ( 'st_ini' ).createTechDir_1_v_2;
// createTechDir_1_v_2.mode = 'debug';
// const arr_creator = [ 'A', 'B', [ 'B', 'BA' ,'BB'] ];
const arr_creator =
[ 'gulpfile.js',
[ 'build', 0 ],
[ 'src',
[ 'img', 0 ],
[ 'js',
'mane.js', ],
[ 'local_modules',
[ 'jquery',
[ 'dist',
'jquery.min.js' ] ] ],
[ 'pages',
'about.pug',
'index.pug',
[ 'common',
'footer.pug',
'header.pug',
'layout.pug',
],
[ 'includes',
'about.pug',
'index.pug',
[ 'about',
'company-info.pug',
],
[ 'index',
'banner.pug',
],
],
],
[ 'stiles',
'style.scss',
[ 'common',
'global.scss' ],
[ 'mixins',
'media.scss' ],
[ 'utils',
'fonts.scss',
'variables.scss', ], ], ],
[ 'gulp',
'config.js',
[ 'tasks',
'clean.js' ] ],
[ 'includes',
'index.pug',
'head.pug',
'foot.pug', ],
];
createTechDir_1_v_2 ( __dirname, arr_creator );
console.log ( 'arr_creator is : ', arr_creator, '\nprn::process.exit ( 0 )' );
process.exit ( 0 );
|
jamesmartinpp/mocca | mocca-client/src/test/java/com/paypal/mocca/client/sample/AsyncSampleClient.java | package com.paypal.mocca.client.sample;
import com.paypal.mocca.client.MoccaClient;
import com.paypal.mocca.client.annotation.Query;
import com.paypal.mocca.client.annotation.RequestHeader;
import java.util.concurrent.CompletableFuture;
@RequestHeader("classheader: classvalue")
public interface AsyncSampleClient extends MoccaClient {
@Query
CompletableFuture<SampleResponseDTO> getOneSample(String variables);
}
|
KevDev0247/FinanceLord | app/src/main/java/protect/Finia/networth/editing/utils/AssetsFragmentChildViewClickListener.java | package protect.Finia.networth.editing.utils;
import android.util.Log;
import android.view.View;
import android.widget.ExpandableListView;
import java.util.List;
import protect.Finia.datastructure.AssetsNodeContainer;
import protect.Finia.datastructure.AssetsTypeTreeProcessor;
/**
* A Listener class that will detect whether a parent of the items in an expandable list has been clicked
*
* @author Owner <NAME>
* created on 2020/03/25
*/
public class AssetsFragmentChildViewClickListener implements ExpandableListView.OnChildClickListener {
private AssetsTypeTreeProcessor typeProcessor;
private List<AssetsNodeContainer> sectionDataSet;
private int level;
public AssetsFragmentChildViewClickListener(List<AssetsNodeContainer> sectionDataSet, AssetsTypeTreeProcessor typeProcessor, int level) {
this.sectionDataSet = sectionDataSet;
this.typeProcessor = typeProcessor;
this.level = level;
}
@Override
public boolean onChildClick(ExpandableListView expandableListView, View view, int i, int i1, long l) {
AssetsNodeContainer sectionItem = sectionDataSet.get(i);
List<AssetsNodeContainer> childSection = typeProcessor.getSubGroup(sectionItem.assetsTypeName, level + 1);
Log.d("Edit_AFragment", "child Clicked: " + childSection.get(i1).assetsTypeName + ", id in DB: " + childSection.get(i1).assetsTypeId);
return true;
}
}
|
ScalablyTyped/SlinkyTyped | w/winrt-uwp/src/main/scala/typingsSlinky/winrtUwp/global/Windows/Media/MediaPlaybackStatus.scala | <filename>w/winrt-uwp/src/main/scala/typingsSlinky/winrtUwp/global/Windows/Media/MediaPlaybackStatus.scala<gh_stars>10-100
package typingsSlinky.winrtUwp.global.Windows.Media
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/** Defines values for the status of the media playback. */
@JSGlobal("Windows.Media.MediaPlaybackStatus")
@js.native
object MediaPlaybackStatus extends StObject {
@JSBracketAccess
def apply(value: Double): js.UndefOr[typingsSlinky.winrtUwp.Windows.Media.MediaPlaybackStatus with Double] = js.native
/* 1 */ val changing: typingsSlinky.winrtUwp.Windows.Media.MediaPlaybackStatus.changing with Double = js.native
/* 0 */ val closed: typingsSlinky.winrtUwp.Windows.Media.MediaPlaybackStatus.closed with Double = js.native
/* 4 */ val paused: typingsSlinky.winrtUwp.Windows.Media.MediaPlaybackStatus.paused with Double = js.native
/* 3 */ val playing: typingsSlinky.winrtUwp.Windows.Media.MediaPlaybackStatus.playing with Double = js.native
/* 2 */ val stopped: typingsSlinky.winrtUwp.Windows.Media.MediaPlaybackStatus.stopped with Double = js.native
}
|
artkoshelev/voyager | pkg/orchestration/updater/objectupdater_typed.go | package k8s
import (
smith_v1 "github.com/atlassian/smith/pkg/apis/smith/v1"
smithClientset "github.com/atlassian/smith/pkg/client/clientset_generated/clientset"
"github.com/atlassian/voyager/pkg/k8s/updater"
meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/tools/cache"
)
func BundleUpdater(existingObjectsIndexer cache.Indexer, specCheck updater.SpecCheck, client smithClientset.Interface) updater.ObjectUpdater {
return updater.ObjectUpdater{
SpecCheck: specCheck,
ExistingObjectsIndexer: existingObjectsIndexer,
Client: updater.ClientAdapter{
CreateMethod: func(ns string, obj runtime.Object) (runtime.Object, error) {
result, createErr := client.SmithV1().Bundles(ns).Create(obj.(*smith_v1.Bundle))
return runtime.Object(result), createErr
},
UpdateMethod: func(ns string, obj runtime.Object) (runtime.Object, error) {
result, updateErr := client.SmithV1().Bundles(ns).Update(obj.(*smith_v1.Bundle))
return runtime.Object(result), updateErr
},
DeleteMethod: func(ns string, name string, options *meta_v1.DeleteOptions) error {
deleteErr := client.SmithV1().Bundles(ns).Delete(name, options)
return deleteErr
},
},
}
}
|
ayan-biswas/cbag | include/cbag/schematic/shape_t.h | <filename>include/cbag/schematic/shape_t.h
#ifndef CBAG_SCHEMATIC_SHAPE_T_H
#define CBAG_SCHEMATIC_SHAPE_T_H
#include <variant>
namespace cbag {
namespace sch {
struct rectangle;
struct polygon;
struct arc;
struct donut;
struct ellipse;
struct line;
struct path;
struct text_t;
struct eval_text;
using shape_t =
std::variant<rectangle, polygon, arc, donut, ellipse, line, path, text_t, eval_text>;
} // namespace sch
} // namespace cbag
#endif
|
nathan-hekman/vpc-java-sdk | modules/vpc/src/test/java/com/ibm/cloud/is/vpc/v1/model/NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDPTest.java | /*
* (C) Copyright IBM Corp. 2021.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.ibm.cloud.is.vpc.v1.model;
import com.ibm.cloud.is.vpc.v1.model.NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP;
import com.ibm.cloud.is.vpc.v1.utils.TestUtilities;
import com.ibm.cloud.sdk.core.service.model.FileWithMetadata;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
/**
* Unit test class for the NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP model.
*/
public class NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDPTest {
final HashMap<String, InputStream> mockStreamMap = TestUtilities.createMockStreamMap();
final List<FileWithMetadata> mockListFileWithMetadata = TestUtilities.creatMockListFileWithMetadata();
@Test
public void testNetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP() throws Throwable {
NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel = new NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP.Builder()
.action("allow")
.destination("192.168.3.2/32")
.direction("inbound")
.name("my-rule-2")
.source("192.168.3.2/32")
.destinationPortMax(Long.valueOf("22"))
.destinationPortMin(Long.valueOf("22"))
.protocol("udp")
.sourcePortMax(Long.valueOf("65535"))
.sourcePortMin(Long.valueOf("49152"))
.build();
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.action(), "allow");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.destination(), "192.168.3.2/32");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.direction(), "inbound");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.name(), "my-rule-2");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.source(), "192.168.3.2/32");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.destinationPortMax(), Long.valueOf("22"));
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.destinationPortMin(), Long.valueOf("22"));
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.protocol(), "udp");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.sourcePortMax(), Long.valueOf("65535"));
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel.sourcePortMin(), Long.valueOf("49152"));
String json = TestUtilities.serialize(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModel);
NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew = TestUtilities.deserialize(json, NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP.class);
assertTrue(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew instanceof NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP);
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.action(), "allow");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.destination(), "192.168.3.2/32");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.direction(), "inbound");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.name(), "my-rule-2");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.source(), "192.168.3.2/32");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.destinationPortMax(), Long.valueOf("22"));
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.destinationPortMin(), Long.valueOf("22"));
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.protocol(), "udp");
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.sourcePortMax(), Long.valueOf("65535"));
assertEquals(networkAclRulePrototypeNetworkAclContextNetworkAclRuleProtocolTcpudpModelNew.sourcePortMin(), Long.valueOf("49152"));
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testNetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDPError() throws Throwable {
new NetworkACLRulePrototypeNetworkACLContextNetworkACLRuleProtocolTCPUDP.Builder().build();
}
} |
yong076/webviewtesting | Libraries/StyleSheet/StyleSheet.js | /**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule StyleSheet
* @flow
*/
'use strict';
var PixelRatio = require('PixelRatio');
var StyleSheetRegistry = require('StyleSheetRegistry');
var StyleSheetValidation = require('StyleSheetValidation');
var flatten = require('flattenStyle');
var hairlineWidth = PixelRatio.roundToNearestPixel(0.4);
if (hairlineWidth === 0) {
hairlineWidth = 1 / PixelRatio.get();
}
/**
* A StyleSheet is an abstraction similar to CSS StyleSheets
*
* Create a new StyleSheet:
*
* ```
* var styles = StyleSheet.create({
* container: {
* borderRadius: 4,
* borderWidth: 0.5,
* borderColor: '#d6d7da',
* },
* title: {
* fontSize: 19,
* fontWeight: 'bold',
* },
* activeTitle: {
* color: 'red',
* },
* });
* ```
*
* Use a StyleSheet:
*
* ```
* <View style={styles.container}>
* <Text style={[styles.title, this.props.isActive && styles.activeTitle]} />
* </View>
* ```
*
* Code quality:
*
* - By moving styles away from the render function, you're making the code
* easier to understand.
* - Naming the styles is a good way to add meaning to the low level components
* in the render function.
*
* Performance:
*
* - Making a stylesheet from a style object makes it possible to refer to it
* by ID instead of creating a new style object every time.
* - It also allows to send the style only once through the bridge. All
* subsequent uses are going to refer an id (not implemented yet).
*/
module.exports = {
/**
* This is defined as the width of a thin line on the platform. It can be
* used as the thickness of a border or division between two elements.
* Example:
* ```
* {
* borderBottomColor: '#bbb',
* borderBottomWidth: StyleSheet.hairlineWidth
* }
* ```
*
* This constant will always be a round number of pixels (so a line defined
* by it look crisp) and will try to match the standard width of a thin line
* on the underlying platform. However, you should not rely on it being a
* constant size, because on different platforms and screen densities its
* value may be calculated differently.
*/
hairlineWidth,
flatten,
/**
* Creates a StyleSheet style reference from the given object.
*/
create(obj: {[key: string]: any}): {[key: string]: number} {
var result = {};
for (var key in obj) {
StyleSheetValidation.validateStyle(key, obj);
result[key] = StyleSheetRegistry.registerStyle(obj[key]);
}
return result;
}
};
|
google-ar/chromium | chrome/android/javatests/src/org/chromium/chrome/browser/signin/OAuth2TokenServiceTest.java | <filename>chrome/android/javatests/src/org/chromium/chrome/browser/signin/OAuth2TokenServiceTest.java
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.signin;
import android.accounts.Account;
import android.support.test.filters.SmallTest;
import android.test.InstrumentationTestCase;
import org.chromium.base.test.util.AdvancedMockContext;
import org.chromium.base.test.util.DisabledTest;
import org.chromium.base.test.util.Feature;
import org.chromium.components.signin.AccountManagerHelper;
import org.chromium.components.signin.test.util.AccountHolder;
import org.chromium.components.signin.test.util.MockAccountManager;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
/** Tests for OAuth2TokenService. */
public class OAuth2TokenServiceTest extends InstrumentationTestCase {
private AdvancedMockContext mContext;
private MockAccountManager mAccountManager;
@Override
protected void setUp() throws Exception {
super.setUp();
// Mock out the account manager on the device.
mContext = new AdvancedMockContext(getInstrumentation().getTargetContext());
mAccountManager = new MockAccountManager(mContext, getInstrumentation().getContext());
AccountManagerHelper.overrideAccountManagerHelperForTests(mContext, mAccountManager);
}
/*
* @SmallTest
* @Feature({"Sync"})
*/
@DisabledTest(message = "crbug.com/533417")
public void testGetAccountsNoAccountsRegistered() {
String[] accounts = OAuth2TokenService.getAccounts(mContext);
assertEquals("There should be no accounts registered", 0, accounts.length);
}
/*@SmallTest
@Feature({"Sync"})*/
@DisabledTest(message = "crbug.com/527852")
public void testGetAccountsOneAccountRegistered() {
Account account1 = AccountManagerHelper.createAccountFromName("<EMAIL>");
AccountHolder accountHolder1 = AccountHolder.create().account(account1).build();
mAccountManager.addAccountHolderExplicitly(accountHolder1);
String[] sysAccounts = OAuth2TokenService.getSystemAccountNames(mContext);
assertEquals("There should be one registered account", 1, sysAccounts.length);
assertEquals("The account should be " + account1, account1.name, sysAccounts[0]);
String[] accounts = OAuth2TokenService.getAccounts(mContext);
assertEquals("There should be zero registered account", 0, accounts.length);
}
/*@SmallTest
@Feature({"Sync"})*/
@DisabledTest(message = "crbug.com/527852")
public void testGetAccountsTwoAccountsRegistered() {
Account account1 = AccountManagerHelper.createAccountFromName("<EMAIL>");
AccountHolder accountHolder1 = AccountHolder.create().account(account1).build();
mAccountManager.addAccountHolderExplicitly(accountHolder1);
Account account2 = AccountManagerHelper.createAccountFromName("<EMAIL>");
AccountHolder accountHolder2 = AccountHolder.create().account(account2).build();
mAccountManager.addAccountHolderExplicitly(accountHolder2);
String[] sysAccounts = OAuth2TokenService.getSystemAccountNames(mContext);
assertEquals("There should be one registered account", 2, sysAccounts.length);
assertTrue("The list should contain " + account1,
Arrays.asList(sysAccounts).contains(account1.name));
assertTrue("The list should contain " + account2,
Arrays.asList(sysAccounts).contains(account2.name));
String[] accounts = OAuth2TokenService.getAccounts(mContext);
assertEquals("There should be zero registered account", 0, accounts.length);
}
@DisabledTest(message = "crbug.com/568620")
@SmallTest
@Feature({"Sync"})
public void testGetOAuth2AccessTokenWithTimeoutOnSuccess() {
String authToken = "some<PASSWORD>";
// Auth token should be successfully received.
runTestOfGetOAuth2AccessTokenWithTimeout(authToken);
}
/*@SmallTest
@Feature({"Sync"})*/
@DisabledTest(message = "crbug.com/527852")
public void testGetOAuth2AccessTokenWithTimeoutOnError() {
String authToken = null;
// Should not crash when auth token is null.
runTestOfGetOAuth2AccessTokenWithTimeout(authToken);
}
private void runTestOfGetOAuth2AccessTokenWithTimeout(String expectedToken) {
String scope = "http://example.com/scope";
Account account = AccountManagerHelper.createAccountFromName("<EMAIL>");
String oauth2Scope = "oauth2:" + scope;
// Add an account with given auth token for the given scope, already accepted auth popup.
AccountHolder accountHolder =
AccountHolder.create()
.account(account)
.hasBeenAccepted(oauth2Scope, true)
.authToken(oauth2Scope, expectedToken).build();
mAccountManager.addAccountHolderExplicitly(accountHolder);
String accessToken = OAuth2TokenService.getOAuth2AccessTokenWithTimeout(
mContext, account, scope, 5, TimeUnit.SECONDS);
assertEquals(expectedToken, accessToken);
}
}
|
comic/comic-django | app/grandchallenge/hanging_protocols/forms.py | from crispy_forms.helper import FormHelper
from crispy_forms.layout import ButtonHolder, Div, Layout, Submit
from django import forms
from grandchallenge.components.models import ComponentInterface
from grandchallenge.core.forms import SaveFormInitMixin
from grandchallenge.core.widgets import JSONEditorWidget
from grandchallenge.hanging_protocols.models import (
HANGING_PROTOCOL_SCHEMA,
VIEW_CONTENT_SCHEMA,
HangingProtocol,
)
class HangingProtocolForm(SaveFormInitMixin, forms.ModelForm):
class Meta:
model = HangingProtocol
fields = ("title", "description", "json")
widgets = {"json": JSONEditorWidget(schema=HANGING_PROTOCOL_SCHEMA)}
help_texts = {
"json": (
"To display a single image in full size, define the "
"protocol as follows: "
'[{"viewport_name": "main", "x": 0,"y": 0,"w": 1,"h": 1,'
'"fullsizable": true,"draggable": false,"selectable": true,'
'"order": 0}]'
)
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.layout = Layout(
Div(
"title",
"description",
"json",
Div(
id="hpVisualization",
css_class="container-fluid m-1 mb-3 position-relative",
style="height: 250px",
),
),
ButtonHolder(Submit("save", "Save")),
)
def clean_json(self):
value = self.cleaned_data["json"]
viewports = [x["viewport_name"] for x in value]
if len(set(viewports)) != len(viewports):
self.add_error(
error="Each viewport can only be used once.", field="json"
)
dims = ["x", "y", "w", "h"]
if any(d in [k for v in value for k in v.keys()] for d in dims):
for viewport in value:
if not all(d in viewport for d in dims):
missing_dims = [d for d in dims if d not in viewport]
self.add_error(
error=f"Either none or all viewports must have x, y, w, and h keys. Viewport {viewport['viewport_name']} missing {', '.join(missing_dims)}.",
field="json",
)
else:
for viewport in value:
if any(d in viewport for d in dims):
missing_dims = [d for d in dims if d not in viewport]
self.add_error(
error=f"Either none or all viewports must have x, y, w, and h keys. Viewport {viewport['viewport_name']} missing {', '.join(missing_dims)}.",
field="json",
)
for viewport in [v for v in value if "parent_id" in v]:
if viewport["parent_id"] not in viewports:
self.add_error(
error=f"Viewport {viewport['viewport_name']} has a parent_id that does not exist.",
field="json",
)
if "draggable" not in viewport or not viewport["draggable"]:
self.add_error(
error=f"Viewport {viewport['viewport_name']} has a parent_id but is not draggable.",
field="json",
)
return value
class ViewContentMixin:
def clean_view_content(self):
mapping = self.cleaned_data["view_content"] or {}
hanging_protocol = self.cleaned_data["hanging_protocol"]
if mapping and hanging_protocol:
if set(mapping.keys()) != {
x["viewport_name"] for x in hanging_protocol.json
}:
self.add_error(
error=(
"Image ports in view_content do not match "
"those in the selected hanging protocol."
),
field="view_content",
)
slugs = {slug for viewport in mapping.values() for slug in viewport}
unknown = []
for slug in slugs:
if not ComponentInterface.objects.filter(slug=slug).exists():
unknown.append(slug)
if len(unknown) > 0:
self.add_error(
error=f"Unkown slugs in view_content: {', '.join(unknown)}",
field="view_content",
)
return mapping
class Meta:
widgets = {
"view_content": JSONEditorWidget(schema=VIEW_CONTENT_SCHEMA),
}
help_texts = {
"view_content": (
"Indicate which Component Interfaces need to be displayed in "
'which image port. E.g. {"main": ["interface1"]}. The first '
"item in the list of interfaces will be the main image in "
"the image port. The first overlay type interface thereafter "
"will be rendered as an overlay. For now, any other items "
"will be ignored by the viewer."
)
}
|
sebaslherrera/holbertonschool-machine_learning | math/0x00-linear_algebra/12-bracin_the_elements.py | <gh_stars>0
#!/usr/bin/env python3
"""Bracing The Elements module"""
def np_elementwise(mat1, mat2):
"""Performs element-wise addition,
subtraction, multiplication, and division"""
return ((mat1 + mat2),
(mat1 - mat2),
(mat1 * mat2),
(mat1 / mat2))
|
channingy/zuihou-admin-cloud | zuihou-common/zuihou-core/src/main/java/com/github/zuihou/exception/CommonException.java | <gh_stars>1-10
package com.github.zuihou.exception;
/**
* 非业务异常
* 用于在处理非业务逻辑时,进行抛出的异常。
*
* @author zuihou
* @version 1.0
* @see Exception
*/
public class CommonException extends BaseCheckedException {
public CommonException(int code, String message) {
super(code, message);
}
public CommonException(int code, String format, Object... args) {
super(code, String.format(format, args));
this.code = code;
this.message = String.format(format, args);
}
public CommonException wrap(int code, String format, Object... args) {
return new CommonException(code, format, args);
}
@Override
public String toString() {
return "BizException [message=" + message + ", code=" + code + "]";
}
}
|
hamdouni/iut | src/coo/ctrl/exo2/Moustache.java | <gh_stars>0
package coo.ctrl.exo2;
public class Moustache extends ActeurDeguise {
public Moustache(Acteur unActeur) {
super(unActeur);
}
public int salaire() {
return this.getActeur().salaire() + 50;
}
public String toString() {
return super.toString() + "avec une moustache ";
}
}
|
youngmonkeys/ezydata | ezydata-redis/src/test/java/com/tvd12/ezydata/redis/test/setting/EzyRedisSettingsBuilderTest.java | <reponame>youngmonkeys/ezydata
package com.tvd12.ezydata.redis.test.setting;
import org.testng.annotations.Test;
import com.tvd12.ezydata.redis.setting.EzyRedisChannelSetting;
import com.tvd12.ezydata.redis.setting.EzyRedisChannelSettingBuilder;
import com.tvd12.ezydata.redis.setting.EzyRedisMapSettingBuilder;
import com.tvd12.ezydata.redis.setting.EzyRedisSettings;
import com.tvd12.ezydata.redis.setting.EzyRedisSettingsBuilder;
import com.tvd12.properties.file.reader.BaseFileReader;
public class EzyRedisSettingsBuilderTest {
@Test
public void testAll() {
// give
String atomicLongMapName = "atomicLongMapNameTest";
EzyRedisSettings redisSettings = new EzyRedisSettingsBuilder()
.atomicLongMapName(atomicLongMapName)
.properties(new BaseFileReader().read("application_test.yaml"))
.addMapSetting("ezydata_key_value2", new EzyRedisMapSettingBuilder()
.keyType(String.class)
.valueType(Integer.class)
.build()
)
.addChannelSetting("ezydata_channel1", new EzyRedisChannelSettingBuilder()
.messageType(String.class)
.subThreadPoolSize(2)
.build())
.mapSettingBuilder("ezydata_key_value_x2")
.parent()
.mapSettingBuilder("ezydata_key_value_x2")
.parent()
.channelSettingBuilder("ezydata_channel_x2")
.parent()
.channelSettingBuilder("ezydata_channel_x2")
.parent()
.build();
// when
// then
assert redisSettings.getAtomicLongMapName().equals(atomicLongMapName);
EzyRedisChannelSetting channelSeting = redisSettings.getChannelSeting("ezydata_channel1");
assert channelSeting.getSubThreadPoolSize() == 2;
assert channelSeting.getMessageType() == String.class;
}
}
|
raxxor45/ValveSecurity | src_main/dx9sdk/Utilities/Source/Maya/DXMNodeStdMtlAdapter.h | #pragma once
#ifndef DXMNODESTDMTLADAPTER_H
#define DXMNODESTDMTLADAPTER_H
class DXMNodeStdMtlAdapter : public DXMNodeAdapter
{
public:
enum SHADERSYNCFLAGS
{
SHADERSYNC_ALL= 1,
};
DWORD SyncFlags;
DXMNodeStdMtlAdapter();
virtual ~DXMNodeStdMtlAdapter();
virtual const CStringA& Signature(){ return MySignature; }
virtual const CStringA& OwnerSignature(){ return DXMGraphStdMtlAdapter::MySignature; }
static CStringA MySignature;
virtual void Initialize(DXMNode* node);
virtual void Destroy();
virtual void SetCallbackState(bool DoCallbacks);
virtual bool Synchronize();
//LPDXCCRESOURCE Resource;
//LPDXCCSHADERPROPERTIES Shader;
//LPDXCCPROPERTYBAG Properties;//propertybag from Shader->GetProperties;
D3DXMATERIAL material;
char texFile[MAX_PATH+1];
void CreateShaderProps();
void DestroyShaderProps();
virtual void OnAttributeChanged( MNodeMessage::AttributeMessage msg, MPlug& plug, MPlug& otherPlug);
static void DispatchAttributeChanged( MNodeMessage::AttributeMessage msg, MPlug& plug, MPlug& otherPlug, void* clientData );
protected:
MCallbackId AttributeChangedCID;
};
#endif |
comments-ink/django-comments-ink | django_comments_ink/tests/test_serializers.py | from __future__ import unicode_literals
import json
from datetime import datetime
from unittest.mock import Mock, patch
import django_comments
import pytest
import pytz
from django.contrib.auth.models import AnonymousUser, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.test import TestCase
from django.urls import reverse
from django_comments.moderation import CommentModerator
from django_comments.signals import comment_will_be_posted
from django_comments_ink.api.serializers import (
FlagSerializer,
ReadCommentSerializer,
WriteCommentReactionSerializer,
WriteCommentSerializer,
)
from django_comments_ink.conf import settings
from django_comments_ink.models import InkComment
from django_comments_ink.moderation import moderator
from django_comments_ink.signals import should_request_be_authorized
from django_comments_ink.tests.models import (
Article,
authorize_api_post_comment,
comment_will_be_rejected,
)
from django_comments_ink.tests.utils import post_comment
from rest_framework.test import APIClient
class FakeRequest:
def __init__(self, user):
self.user = user
self.auth = None
class WriteCommentSerializerAsVisitorTestCase(TestCase):
def setUp(self):
self.patcher = patch("django_comments_ink.views.utils.send_mail")
self.mock_mailer = self.patcher.start()
self.article = Article.objects.create(
title="October", slug="october", body="What I did on October..."
)
self.form = django_comments.get_form()(self.article)
# Remove the following fields on purpose, as we don't know them and
# therefore we don't send them when using the web API (unless when)
# using the JavaScript plugin, but that is not the case being tested
# here.
for field_name in ["security_hash", "timestamp"]:
self.form.initial.pop(field_name)
def tearDown(self):
self.patcher.stop()
def test_post_comment_before_connecting_signal(self):
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
response = post_comment(data)
self.assertEqual(response.status_code, 403)
self.assertEqual(
response.rendered_content,
b'{"detail":"You do not have permission to perform this action."}',
)
self.assertTrue(self.mock_mailer.call_count == 0)
def test_post_comment_after_connecting_signal(self):
should_request_be_authorized.connect(authorize_api_post_comment)
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
client = APIClient()
token = "Token 0<PASSWORD>42468<PASSWORD>bb<PASSWORD>"
client.credentials(HTTP_AUTHORIZATION=token)
self.assertTrue(self.mock_mailer.call_count == 0)
response = client.post(reverse("comments-ink-api-create"), data)
self.assertEqual(response.status_code, 204) # Confirmation req sent.
self.assertTrue(self.mock_mailer.call_count == 1)
should_request_be_authorized.disconnect(authorize_api_post_comment)
class WriteCommentSerializerTestCase(TestCase):
def setUp(self):
self.patcher = patch("django_comments_ink.views.utils.send_mail")
self.mock_mailer = self.patcher.start()
self.user = User.objects.create_user(
"joe", "<EMAIL>", "<PASSWORD>", first_name="Joe", last_name="Bloggs"
)
self.article = Article.objects.create(
title="October", slug="october", body="What I did on October..."
)
self.form = django_comments.get_form()(self.article)
# Remove the following fields on purpose, as we don't know them and
# therefore we don't send them when using the web API (unless when)
# using the JavaScript plugin, but that is not the case being tested
# here.
for field_name in ["security_hash", "timestamp"]:
self.form.initial.pop(field_name)
should_request_be_authorized.connect(authorize_api_post_comment)
def tearDown(self):
self.patcher.stop()
should_request_be_authorized.disconnect(authorize_api_post_comment)
def test_post_comment_as_registered_user_after_connecting_signal(self):
data = {
"name": "",
"email": "",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
client = APIClient()
client.login(username="joe", password="<PASSWORD>")
self.assertTrue(self.mock_mailer.call_count == 0)
response = client.post(reverse("comments-ink-api-create"), data)
self.assertEqual(response.status_code, 201) # Comment created.
self.assertTrue(self.mock_mailer.call_count == 0)
def test_post_comment_can_be_rejected(self):
comment_will_be_posted.connect(comment_will_be_rejected)
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
client = APIClient()
client.login(username="joe", password="<PASSWORD>")
self.assertTrue(self.mock_mailer.call_count == 0)
response = client.post(reverse("comments-ink-api-create"), data)
self.assertEqual(response.status_code, 403) # Comment created.
self.assertTrue(self.mock_mailer.call_count == 0)
comment_will_be_posted.disconnect(comment_will_be_rejected)
def test_post_comment_can_be_put_in_moderation(self): # code 202.
# Set article's date back to 1990. And set up moderation for old
# objects.
self.article.publish = datetime(1990, 1, 1)
self.article.save()
# Create the moderator class, and register it so that comments
# posted to Article are moderated using the class
# PostCommentModerator.
class PostCommentModerator(CommentModerator):
email_notification = False
auto_moderate_field = "publish"
moderate_after = 365
moderator.register(Article, PostCommentModerator)
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
client = APIClient()
client.login(username="joe", password="<PASSWORD>")
self.assertTrue(self.mock_mailer.call_count == 0)
response = client.post(reverse("comments-ink-api-create"), data)
self.assertEqual(response.status_code, 202) # Comment created.
self.assertTrue(self.mock_mailer.call_count == 0)
def test_validate_name_without_value(self):
data = {
"name": "",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This is a comment",
}
data.update(self.form.initial)
mock_user = Mock(spec=self.user, autospec=True, return_value=self.user)
mock_user.configure_mock(
**{"get_username.return_value": self.user.username}
)
delattr(mock_user, "get_full_name")
req = FakeRequest(mock_user)
ser = WriteCommentSerializer(data=data, context={"request": req})
self.assertTrue(ser.is_valid())
self.assertEqual(ser.data["name"], self.user.username)
def test_validate_reply_to_does_not_exist(self):
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 1,
"comment": "This is a comment",
}
data.update(self.form.initial)
req = FakeRequest(AnonymousUser())
ser = WriteCommentSerializer(data=data, context={"request": req})
self.assertFalse(ser.is_valid())
expected_errors = '{"reply_to": ["reply_to comment does not exist"]}'
self.assertEqual(json.dumps(ser.errors), expected_errors)
def test_content_type_or_object_pk_cant_be_None(self):
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
# Make content_type None.
data.update(self.form.initial)
data["content_type"] = None
req = FakeRequest(AnonymousUser())
ser = WriteCommentSerializer(data=data, context={"request": req})
self.assertFalse(ser.is_valid())
expected_errors = '{"content_type": ["This field may not be null."]}'
self.assertEqual(json.dumps(ser.errors), expected_errors)
# Make object_pk None.
data.update(self.form.initial)
data["object_pk"] = None
ser = WriteCommentSerializer(data=data, context={"request": None})
self.assertFalse(ser.is_valid())
expected_errors = '{"object_pk": ["This field may not be null."]}'
self.assertEqual(json.dumps(ser.errors), expected_errors)
def test_validate_forging_content_type_raises_LookupError(self):
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
data["content_type"] = "doesnot.exist"
req = FakeRequest(AnonymousUser())
ser = WriteCommentSerializer(data=data, context={"request": req})
self.assertFalse(ser.is_valid())
expected_errors = (
'{"non_field_errors": ["Invalid content_type '
"value: 'doesnot.exist'\"]}"
)
self.assertEqual(json.dumps(ser.errors), expected_errors)
def test_validate_forging_content_type_raises_model_DoesNotExist(self):
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
data["content_type"] = "auth.user"
data["object_pk"] = "2"
req = FakeRequest(AnonymousUser())
ser = WriteCommentSerializer(data=data, context={"request": req})
self.assertFalse(ser.is_valid())
expected_errors = (
'{"non_field_errors": ["No object matching '
"content-type 'auth.user' and object PK '2'.\"]}"
)
self.assertEqual(json.dumps(ser.errors), expected_errors)
def test_validate_forging_object_pk_raises_ValueError(self):
data = {
"name": "<NAME>",
"email": "<EMAIL>",
"followup": True,
"reply_to": 0,
"comment": "This post comment request should fail",
}
data.update(self.form.initial)
data["object_pk"] = "tal"
req = FakeRequest(AnonymousUser())
ser = WriteCommentSerializer(data=data, context={"request": req})
self.assertFalse(ser.is_valid())
expected_errors = (
'{"non_field_errors": ["Attempting to get '
"content-type 'tests.article' and object "
"PK 'tal' raised ValueError\"]}"
)
self.assertEqual(json.dumps(ser.errors), expected_errors)
def get_fake_avatar(comment):
return f"/fake/avatar/{comment.user.username}"
funcpath = "django_comments_ink.tests.test_serializers.get_fake_avatar"
class ReadCommentsGetUserAvatarTestCase(TestCase):
# Test ReadCommentSerializer method get_user_avatar.
# Change setting comments_ink_API_GET_USER_AVATAR so that it uses a
# deterministic function: get_fake_avatar (here defined). Then send a
# couple of comments and verify that the function is called.
def setUp(self):
joe = User.objects.create_user(
"joe",
"<EMAIL>",
"joepwd",
first_name="Joe",
last_name="Bloggs",
)
alice = User.objects.create_user(
"alice",
"<EMAIL>",
"<PASSWORD>pwd",
first_name="Alice",
last_name="Bloggs",
)
self.article = Article.objects.create(
title="September", slug="september", body="During September..."
)
self.article_ct = ContentType.objects.get(
app_label="tests", model="article"
)
self.site = Site.objects.get(pk=1)
# Testing comment from Bob.
InkComment.objects.create(
content_type=self.article_ct,
object_pk=self.article.id,
content_object=self.article,
site=self.site,
comment="testing comment from Bob",
user=joe,
submit_date=datetime.now(),
)
# Testing comment from Alice.
InkComment.objects.create(
content_type=self.article_ct,
object_pk=self.article.id,
content_object=self.article,
site=self.site,
comment="testing comment from Alice",
user=alice,
submit_date=datetime.now(),
)
class RenderSubmitDateTestCase(TestCase):
def setUp(self):
self.article = Article.objects.create(
title="October", slug="october", body="What I did on October..."
)
def create_comment(self, submit_date_is_aware=True):
site = Site.objects.get(pk=1)
ctype = ContentType.objects.get(app_label="tests", model="article")
if submit_date_is_aware:
utc = pytz.timezone("UTC")
submit_date = datetime(2021, 1, 10, 10, 15, tzinfo=utc)
else:
submit_date = datetime(2021, 1, 10, 10, 15)
self.cm = InkComment.objects.create(
content_type=ctype,
object_pk=self.article.id,
content_object=self.article,
site=site,
name="<NAME>",
email="<EMAIL>",
comment="Just a comment",
submit_date=submit_date,
)
@patch.multiple("django.conf.settings", USE_TZ=False)
@patch.multiple("django_comments_ink.conf.settings", USE_TZ=False)
def test_submit_date_when_use_tz_is_false(self):
self.create_comment(submit_date_is_aware=False)
qs = InkComment.objects.all()
ser = ReadCommentSerializer(qs, context={"request": None}, many=True)
self.assertEqual(
ser.data[0]["submit_date"], "Jan. 10, 2021, 10:15 a.m."
)
@patch.multiple("django.conf.settings", USE_TZ=True)
@patch.multiple("django_comments_ink.conf.settings", USE_TZ=True)
def test_submit_date_when_use_tz_is_true(self):
self.create_comment(submit_date_is_aware=True)
qs = InkComment.objects.all()
ser = ReadCommentSerializer(qs, context={"request": None}, many=True)
self.assertEqual(
ser.data[0]["submit_date"], "Jan. 10, 2021, 11:15 a.m."
)
# ---------------------------------------------------------------------
# Tests for FlagSerializer. Using pytest instead of unittest.
@pytest.mark.django_db
def test_flag_serializer_is_valid(an_articles_comment):
data = {"comment": an_articles_comment.pk, "flag": "report"}
ser = FlagSerializer(data=data)
assert ser.is_valid()
@pytest.mark.django_db
def test_flag_serializer_is_not_valid(an_articles_comment):
data = {"comment": an_articles_comment.pk, "flag": "non-supported-flag"}
ser = FlagSerializer(data=data)
assert not ser.is_valid()
@pytest.mark.django_db
def test_ReadReactionsField(a_comments_reaction, an_user):
context = {"request": None}
ser = ReadCommentSerializer(a_comments_reaction.comment, context=context)
assert len(ser.data["reactions"]) == 1
reaction = ser.data["reactions"][0]
assert "reaction" in reaction and reaction["reaction"] == "+"
assert "label" in reaction and reaction["label"] == "+1"
assert "icon" in reaction and reaction["icon"] == "#128077"
assert "authors" in reaction and len(reaction["authors"]) == 1
author = reaction["authors"][0]
assert author["id"] == an_user.id
assert author["author"] == settings.COMMENTS_INK_API_USER_REPR(an_user)
@pytest.mark.django_db
def test_ReadCommentSerializer_get_flags(a_comments_flag):
context = {"request": None}
ser = ReadCommentSerializer(a_comments_flag.comment, context=context)
assert len(ser.data["flags"]) == 1
flag = ser.data["flags"][0]
assert flag["flag"] == "removal"
assert flag["id"] == a_comments_flag.user.id
user_repr = settings.COMMENTS_INK_API_USER_REPR(a_comments_flag.user)
assert flag["user"] == user_repr
@pytest.mark.django_db
def test_WriteCommentReactionSerializer(an_articles_comment):
# 1st: Test a non-existing reaction is caught by the serializer.
reaction = "?"
ser = WriteCommentReactionSerializer(
data={"reaction": reaction, "comment": an_articles_comment}
)
assert ser.is_valid() == False
assert "reaction" in ser.errors
assert ser.errors["reaction"][0].code == "invalid_choice"
# 2nd: Test an existing reaction makes the serializer valid.
reaction = "+"
ser = WriteCommentReactionSerializer(
data={"reaction": reaction, "comment": an_articles_comment}
)
assert ser.is_valid() == True
|
bzdanny/BaiZe | app/system/service/systemService/systemServiceImpl/sysDictTypeImpl.go | <reponame>bzdanny/BaiZe<gh_stars>1-10
package systemServiceImpl
import (
"baize/app/common/redis"
"baize/app/constant/constants"
"baize/app/utils/exceLize"
"baize/app/system/dao/systemDao"
"baize/app/system/dao/systemDao/systemDaoImpl"
"baize/app/system/models/systemModels"
"baize/app/utils/snowflake"
)
var dictTypeServiceImpl *dictTypeService
func init() {
dictTypeServiceImpl = &dictTypeService{dictTypeDao: systemDaoImpl.GetSysDictTypeDao()}
}
type dictTypeService struct {
dictTypeDao systemDao.IDictTypeDao
}
func GetDictTypeService() *dictTypeService {
return dictTypeServiceImpl
}
func (dictTypeService *dictTypeService) SelectDictTypeList(dictType *systemModels.SysDictTypeDQL) (list []*systemModels.SysDictTypeVo, count *int64) {
return dictTypeService.dictTypeDao.SelectDictTypeList(dictType)
}
func (dictTypeService *dictTypeService) ExportDictType(dictType *systemModels.SysDictTypeDQL) (data []byte) {
list, _ := dictTypeService.dictTypeDao.SelectDictTypeList(dictType)
rows := systemModels.SysDictTypeListToRows(list)
return exceLize.SetRows(rows)
}
func (dictTypeService *dictTypeService) SelectDictTypeById(dictId int64) (dictType *systemModels.SysDictTypeVo) {
return dictTypeService.dictTypeDao.SelectDictTypeById(dictId)
}
func (dictTypeService *dictTypeService) SelectDictTypeByIds(dictId []int64) (dictTypes []string) {
return dictTypeService.dictTypeDao.SelectDictTypeByIds(dictId)
}
func (dictTypeService *dictTypeService) InsertDictType(dictType *systemModels.SysDictTypeDML) {
dictType.DictId = snowflake.GenID()
dictTypeService.dictTypeDao.InsertDictType(dictType)
}
func (dictTypeService *dictTypeService) UpdateDictType(dictType *systemModels.SysDictTypeDML) {
dictTypeService.dictTypeDao.UpdateDictType(dictType)
}
func (dictTypeService *dictTypeService) DeleteDictTypeByIds(dictIds []int64) {
dictTypeService.dictTypeDao.DeleteDictTypeByIds(dictIds)
}
func (dictTypeService *dictTypeService) CheckDictTypeUnique(dictType *systemModels.SysDictTypeDML) bool {
dictId := dictTypeService.dictTypeDao.CheckDictTypeUnique(dictType.DictType)
if dictId == dictType.DictId || dictId == 0 {
return false
}
return true
}
func (dictTypeService *dictTypeService) DictTypeClearCache() {
redis.Delete(constants.SysDictKey + "*")
}
func (dictTypeService *dictTypeService) SelectDictTypeAll() (list []*systemModels.SysDictTypeVo) {
return dictTypeService.dictTypeDao.SelectDictTypeAll()
}
|
putmantime/dipper | dipper/sources/GWASCatalog.py | import logging
import csv
import re
from dipper.sources.Source import Source
from dipper.models.Dataset import Dataset
from dipper import config
from dipper.utils.CurieUtil import CurieUtil
from dipper.utils.DipperUtil import DipperUtil
from dipper.models.Model import Model
from dipper import curie_map
from dipper.models.Genotype import Genotype
from dipper.models.assoc.G2PAssoc import G2PAssoc
from dipper.models.Reference import Reference
from dipper.models.GenomicFeature import Feature, makeChromID
from dipper.graph.RDFGraph import RDFGraph
logger = logging.getLogger(__name__)
class GWASCatalog(Source):
"""
The NHGRI-EBI Catalog of published genome-wide association studies.
We link the variants recorded here to the curated EFO-classes using a
"contributes_to" linkage because the only thing we know is that the SNPs
are associated with the trait/disease,
but we don't know if it is actually causative.
Description of the GWAS catalog is here:
http://www.ebi.ac.uk/gwas/docs/fileheaders#_file_headers_for_catalog_version_1_0_1
GWAS also pulishes Owl files described here
http://www.ebi.ac.uk/gwas/docs/ontology
Status: IN PROGRESS
"""
terms = {
'cell_line_repository': 'CLO:0000008',
'race': 'SIO:001015',
'ethnic_group': 'EFO:0001799',
'age': 'EFO:0000246',
'sampling_time': 'EFO:0000689',
'collection': 'ERO:0002190'
}
GWASFTP = 'ftp://ftp.ebi.ac.uk/pub/databases/gwas/releases/latest'
GWASFILE = 'gwas-catalog-associations_ontology-annotated.tsv'
files = {
'catalog': {
'file': GWASFILE,
'url': GWASFTP + '/' + GWASFILE},
'efo': {
'file': 'efo.owl',
'url': 'http://www.ebi.ac.uk/efo/efo.owl'},
'so': {
'file': 'so.owl',
'url': 'http://purl.obolibrary.org/obo/so.owl'}
}
def __init__(self, graph_type, are_bnodes_skolemized):
super().__init__(graph_type, are_bnodes_skolemized, 'gwascatalog')
if graph_type != 'rdf_graph':
raise ValueError("UDP requires a rdf_graph")
self.dataset = Dataset(
'gwascatalog', 'GWAS Catalog', 'http://www.ebi.ac.uk/gwas/',
'The NHGRI-EBI Catalog of published genome-wide association studies',
'http://creativecommons.org/licenses/by/3.0/', None)
# 'http://www.ebi.ac.uk/gwas/docs/about' # TODO add this
if 'test_ids' not in config.get_config() or \
'gene' not in config.get_config()['test_ids']:
logger.warning("not configured with gene test ids.")
else:
self.test_ids = config.get_config()['test_ids']
# build a dictionary of genomic location to identifiers,
# to try to get the equivalences
self.id_location_map = dict()
return
def fetch(self, is_dl_forced=False):
"""
:param is_dl_forced:
:return:
"""
self.get_files(is_dl_forced)
return
def parse(self, limit=None):
if limit is not None:
logger.info("Only parsing first %s rows of each file", limit)
logger.info("Parsing files...")
if self.testOnly:
self.testMode = True
self.process_catalog(limit)
logger.info("Finished parsing.")
return
def process_catalog(self, limit=None):
"""
:param limit:
:return:
"""
raw = '/'.join((self.rawdir, self.files['catalog']['file']))
logger.info("Processing Data from %s", raw)
efo_ontology = RDFGraph()
logger.info("Loading EFO ontology in separate rdf graph")
efo_ontology.parse(self.files['efo']['url'], format='xml')
efo_ontology.bind_all_namespaces()
logger.info("Finished loading EFO ontology")
so_ontology = RDFGraph()
logger.info("Loading SO ontology in separate rdf graph")
so_ontology.parse(self.files['so']['url'], format='xml')
so_ontology.bind_all_namespaces()
logger.info("Finished loading SO ontology")
line_counter = 0
with open(raw, 'r', encoding="iso-8859-1") as csvfile:
filereader = csv.reader(csvfile, delimiter='\t')
header = next(filereader, None) # the header row
header_len = len(header)
logger.info('header length:\t %i', header_len)
for row in filereader:
if not row:
pass
else:
line_counter += 1
if header_len != len(row):
logger.error(
'BadRow: %i has %i columns', line_counter, row)
pass
(date_added_to_catalog, pubmed_num, first_author,
pub_date, journal, link, study_name, disease_or_trait,
initial_sample_description, replicate_sample_description,
region, chrom_num, chrom_pos, reported_gene_nums,
mapped_gene, upstream_gene_num, downstream_gene_num,
snp_gene_nums, upstream_gene_distance,
downstream_gene_distance, strongest_snp_risk_allele, snps,
merged, snp_id_current, context, intergenic_flag,
risk_allele_frequency, pvalue, pvalue_mlog, pvalue_text,
or_or_beta, confidence_interval_95,
platform_with_snps_passing_qc, cnv_flag, mapped_trait,
mapped_trait_uri, study_accession) = row
intersect = list(
set([str(i) for i in self.test_ids['gene']]) &
set(re.split(r',', snp_gene_nums)))
# skip if no matches found in test set
if self.testMode and len(intersect) == 0:
continue
# 06-May-2015 25917933 <NAME> 20-Nov-2014 <NAME> http://europepmc.org/abstract/MED/25917933
# A genome-wide association study of suicide severity scores in bipolar disorder.
# Suicide in bipolar disorder
# 959 European ancestry individuals NA
# 10p11.22 10 32704340 C10orf68, CCDC7, ITGB1 CCDC7
# rs7079041-A rs7079041 0 7079041 intron 0 2E-6 5.698970
variant_curie, variant_type = \
self._get_curie_and_type_from_id(
strongest_snp_risk_allele)
if strongest_snp_risk_allele.strip() == '':
logger.debug(
"No strongest SNP risk allele for %s:\n%s",
pubmed_num, str(row))
# still consider adding in the EFO terms
# for what the study measured?
continue
if variant_type == 'snp':
self._add_snp_to_graph(
variant_curie, strongest_snp_risk_allele,
chrom_num, chrom_pos,
context, risk_allele_frequency)
self._add_deprecated_snp(variant_curie, snp_id_current,
merged, chrom_num, chrom_pos)
self._add_snp_gene_relation(
variant_curie, snp_gene_nums, upstream_gene_num,
downstream_gene_num)
elif variant_type == 'haplotype':
self._process_haplotype(
variant_curie, strongest_snp_risk_allele,
chrom_num, chrom_pos, context,
risk_allele_frequency, mapped_gene, so_ontology)
elif variant_type is None:
logger.warning(
"There's a snp id i can't manage: %s",
strongest_snp_risk_allele)
continue
description = self._make_description(
disease_or_trait, initial_sample_description,
replicate_sample_description,
platform_with_snps_passing_qc, pvalue)
self._add_variant_trait_association(
variant_curie, mapped_trait_uri, efo_ontology,
pubmed_num, description)
if not self.testMode and\
(limit is not None and line_counter > limit):
break
# TODO loop through the location hash,
# and make all snps at that location equivalent
for l in self.id_location_map:
snp_ids = self.id_location_map[l]
if len(snp_ids) > 1:
logger.info("%s has >1 snp id: %s", l, str(snp_ids))
return
def _process_haplotype(
self, hap_id, hap_label, chrom_num, chrom_pos, context,
risk_allele_frequency, mapped_gene, so_ontology):
tax_id = 'NCBITaxon:9606'
if self.testMode:
g = self.testgraph
else:
g = self.graph
geno = Genotype(g)
model = Model(g)
# add the feature to the graph
hap_description = None
if risk_allele_frequency != '' and \
risk_allele_frequency != 'NR':
hap_description = \
str(risk_allele_frequency) + \
' [risk allele frequency]'
model.addIndividualToGraph(hap_id, hap_label.strip(),
Feature.types['haplotype'], hap_description)
geno.addTaxon(tax_id, hap_id)
snp_labels = re.split(r';\s?', hap_label)
chrom_nums = re.split(r';\s?', chrom_num)
chrom_positions = re.split(r';\s?', chrom_pos)
context_list = re.split(r';\s?', context)
mapped_genes = re.split(r';\s?', mapped_gene)
snp_curies = list()
for index, snp in enumerate(snp_labels):
snp_curie, snp_type = self._get_curie_and_type_from_id(snp)
if snp_type is None:
# make blank node
snp_curie = self.make_id(snp, "_")
g.addTriple(hap_id, geno.object_properties['has_variant_part'],
snp_curie)
snp_curies.append(snp_curie)
# courtesy http://stackoverflow.com/a/16720915
length = len(snp_labels)
if not all(len(lst) == length
for lst in [chrom_nums, chrom_positions, context_list]):
logger.warn(
"Unexpected data field for haplotype {} \n "
"will not add snp details".format(hap_label))
return
variant_in_gene_count = 0
for index, snp_curie in enumerate(snp_curies):
self._add_snp_to_graph(
snp_curie, snp_labels[index], chrom_nums[index],
chrom_positions[index], context_list[index])
if len(mapped_genes) == len(snp_labels):
so_class = self._map_variant_type(context_list[index])
if so_class is None:
raise ValueError("Unknown SO class {} in haplotype {}"
.format(context_list[index], hap_label))
so_query = """
SELECT ?variant_label
WHERE {{
{0} rdfs:subClassOf+ SO:0001564 ;
rdfs:label ?variant_label .
}}
""".format(so_class)
query_result = so_ontology.query(so_query)
if len(list(query_result)) > 0:
gene_id = DipperUtil.get_ncbi_id_from_symbol(
mapped_genes[index])
if gene_id is not None:
geno.addAffectedLocus(snp_curie, gene_id)
geno.addAffectedLocus(hap_id, gene_id)
variant_in_gene_count += 1
if context_list[index] == 'upstream_gene_variant':
gene_id = DipperUtil.get_ncbi_id_from_symbol(
mapped_genes[index])
if gene_id is not None:
g.addTriple(
snp_curie,
Feature.object_properties[
'upstream_of_sequence_of'],
gene_id)
elif context_list[index] == 'downstream_gene_variant':
gene_id = DipperUtil.get_ncbi_id_from_symbol(
mapped_genes[index])
if gene_id is not None:
g.addTriple(
snp_curie,
Feature.object_properties[
'downstream_of_sequence_of'],
gene_id)
else:
logger.warn("More mapped genes than snps, "
"cannot disambiguate for {}".format(hap_label))
# Seperate in case we want to apply a different relation
# If not this is redundant with triples added above
if len(mapped_genes) == variant_in_gene_count \
and len(set(mapped_genes)) == 1:
gene_id = DipperUtil.get_ncbi_id_from_symbol(mapped_genes[0])
geno.addAffectedLocus(hap_id, gene_id)
return
def _add_snp_to_graph(
self, snp_id, snp_label, chrom_num, chrom_pos, context,
risk_allele_frequency=None):
# constants
tax_id = 'NCBITaxon:9606'
genome_version = 'GRCh38'
if self.testMode:
g = self.testgraph
else:
g = self.graph
model = Model(g)
if chrom_num != '' and chrom_pos != '':
location = self._make_location_curie(chrom_num, chrom_pos)
if location not in self.id_location_map:
self.id_location_map[location] = set()
else:
location = None
alteration = re.search(r'-(.*)$', snp_id)
if alteration is not None \
and re.match(r'[ATGC]', alteration.group(1)):
# add variation to snp
pass # TODO
if location is not None:
self.id_location_map[location].add(snp_id)
# create the chromosome
chrom_id = makeChromID(chrom_num, genome_version, 'CHR')
# add the feature to the graph
snp_description = None
if risk_allele_frequency is not None\
and risk_allele_frequency != ''\
and risk_allele_frequency != 'NR':
snp_description = \
str(risk_allele_frequency) + \
' [risk allele frequency]'
f = Feature(
g, snp_id, snp_label.strip(),
Feature.types['SNP'], snp_description)
if chrom_num != '' and chrom_pos != '':
f.addFeatureStartLocation(chrom_pos, chrom_id)
f.addFeatureEndLocation(chrom_pos, chrom_id)
f.addFeatureToGraph()
f.addTaxonToFeature(tax_id)
# TODO consider adding allele frequency as property;
# but would need background info to do that
# also want to add other descriptive info about
# the variant from the context
for c in re.split(r';', context):
cid = self._map_variant_type(c.strip())
if cid is not None:
model.addType(snp_id, cid)
return
def _add_deprecated_snp(self, snp_id, snp_id_current, merged,
chrom_num, chrom_pos):
if self.testMode:
g = self.testgraph
else:
g = self.graph
model = Model(g)
location = self._make_location_curie(chrom_num, chrom_pos)
# add deprecation information
if merged == '1' and str(snp_id_current.strip()) != '':
# get the current rs_id
current_rs_id = 'dbSNP:'
if not re.match(r'rs', snp_id_current):
current_rs_id += 'rs'
current_rs_id += str(snp_id_current)
if location is not None:
if location not in self.id_location_map:
self.id_location_map[location] = set(current_rs_id)
else:
self.id_location_map[location].add(current_rs_id)
model.addDeprecatedIndividual(snp_id, current_rs_id)
# TODO check on this
# should we add the annotations to the current
# or orig?
model.makeLeader(current_rs_id)
else:
model.makeLeader(snp_id)
def _add_snp_gene_relation(self, snp_id, snp_gene_nums,
upstream_gene_num, downstream_gene_num):
if self.testMode:
g = self.testgraph
else:
g = self.graph
geno = Genotype(g)
# add the feature as a sequence alteration
# affecting various genes
# note that intronic variations don't necessarily list
# the genes such as for rs10448080 FIXME
if snp_gene_nums != '':
for s in re.split(r',', snp_gene_nums):
s = s.strip()
# still have to test for this,
# because sometimes there's a leading comma
if s != '':
gene_id = 'NCBIGene:' + s
geno.addAffectedLocus(snp_id, gene_id)
# add the up and downstream genes if they are available
if upstream_gene_num != '':
downstream_gene_id = 'NCBIGene:' + downstream_gene_num
g.addTriple(
snp_id,
Feature.object_properties[
r'upstream_of_sequence_of'],
downstream_gene_id)
if downstream_gene_num != '':
upstream_gene_id = 'NCBIGene:' + upstream_gene_num
g.addTriple(
snp_id,
Feature.object_properties[
'downstream_of_sequence_of'],
upstream_gene_id)
def _add_variant_trait_association(self, variant_id, mapped_trait_uri,
efo_ontology, pubmed_id,
description=None):
if self.testMode:
g = self.testgraph
else:
g = self.graph
model = Model(g)
# make associations to the EFO terms; there can be >1
if mapped_trait_uri.strip() != '':
for trait in re.split(r',', mapped_trait_uri):
trait = trait.strip()
cu = CurieUtil(curie_map.get())
trait_id = cu.get_curie(trait)
dis_query = """
SELECT ?trait
WHERE {{
{0} rdfs:subClassOf+ EFO:0000408 .
{0} rdfs:label ?trait .
}}
""".format(trait_id)
query_result = efo_ontology.query(dis_query)
if len(list(query_result)) > 0:
if re.match(r'^EFO', trait_id):
model.addClassToGraph(trait_id, list(
query_result)[0][0], 'DOID:4')
phenotype_query = """
SELECT ?trait
WHERE {{
{0} rdfs:subClassOf+ EFO:0000651 .
{0} rdfs:label ?trait .
}}
""".format(trait_id)
query_result = efo_ontology.query(phenotype_query)
if len(list(query_result)) > 0:
if re.match(r'^EFO', trait_id):
model.addClassToGraph(
trait_id,
list(query_result)[0][0],
'UPHENO:0001001')
pubmed_curie = 'PMID:' + pubmed_id
ref = Reference(
g, pubmed_curie, Reference.ref_types['journal_article'])
ref.addRefToGraph()
assoc = G2PAssoc(
g, self.name, variant_id, trait_id,
model.object_properties['contributes_to'])
assoc.add_source(pubmed_curie)
# combinatorial evidence
# used in automatic assertion
eco_id = 'ECO:0000213'
assoc.add_evidence(eco_id)
if description is not None:
assoc.set_description(description)
# FIXME score should get added to provenance/study
# assoc.set_score(pvalue)
assoc.add_association_to_graph()
@staticmethod
def _map_variant_type(sample_type):
ctype = None
type_map = {
'stop_gained': 'SO:0001587', # stop-gain variant
'intron_variant': 'SO:0001627', # intron variant
'3_prime_UTR_variant': 'SO:0001624', # 3'utr variant
'5_prime_UTR_variant': 'SO:0001623', # 5'UTR variant
'synonymous_variant': 'SO:0001819', # synonymous variant
'frameshift_variant': 'SO:0001589', # frameshift
'intergenic_variant': 'SO:0001628', # intergenic_variant
'non_coding_transcript_exon_variant': 'SO:0001619', # noncoding transcript variant
'splice_acceptor_variant': 'SO:0001574', # splice acceptor variant
'splice_donor_variant': 'SO:0001575', # splice donor variant
'missense_variant': 'SO:0001583', # missense variant
'downstream_gene_variant': 'SO:0001634', # 500B_downstream_variant
'upstream_gene_variant': 'SO:0001636', # 2KB_upstream_variant
'coding_sequence_variant': 'SO:0001580', # coding_sequence_variant
'non_coding_exon_variant ': 'SO:0001792',
'regulatory_region_variant': 'SO:0001566',
'splice_region_variant': 'SO:0001630',
'stop_lost': 'SO:0001578',
'TF_binding_site_variant': 'SO:0001782'
}
if sample_type.strip() in type_map:
ctype = type_map.get(sample_type)
elif sample_type.strip() != '':
logger.error("Variant type not mapped: %s", sample_type)
return ctype
@staticmethod
def _make_location_curie(chrom_num, chrom_pos):
return 'chr' + str(chrom_num) + ':' + str(chrom_pos)
@staticmethod
def _make_description(disease_or_trait, initial_sample_description,
replicate_sample_description,
platform_with_snps_passing_qc, pvalue):
description = 'A study of ' + disease_or_trait + \
' in ' + initial_sample_description
if replicate_sample_description != '':
description = \
' '.join(
(description, 'with',
replicate_sample_description))
if platform_with_snps_passing_qc != '':
description = ' '.join(
(description, 'on platform',
platform_with_snps_passing_qc))
description = ' '.join((description, '(p=' + pvalue + ')'))
return description
@staticmethod
def _get_curie_and_type_from_id(variant_id):
"""
Given a variant id, our best guess at its curie
and type (snp, haplotype, etc)
None will be used for both curie and type
for IDs that we can't process
:param variant_id:
:return:
"""
curie = None
variant_type = None
# remove space before hyphens
variant_id = re.sub(r' -', '-', variant_id)
if re.search(r' x ', variant_id) \
or re.search(r',', variant_id):
# TODO deal with rs1234 x rs234... (haplotypes?)
logger.warning(
"Cannot parse variant groups of this format: %s",
variant_id)
elif re.search(r';', variant_id):
curie = ':haplotype_' + Source.hash_id(variant_id)
variant_type = "haplotype"
elif re.match(r'rs', variant_id):
curie = 'dbSNP:' + variant_id.strip()
curie = re.sub(r'-.*$', '', curie).strip()
variant_type = "snp"
# remove the alteration
elif re.match(r'kgp', variant_id):
# http://www.1000genomes.org/faq/what-are-kgp-identifiers
curie = ':kgp-' + variant_id.strip()
variant_type = "snp"
elif re.match(r'chr', variant_id):
# like: chr10:106180121-G
#
variant_id = re.sub(r'-?', '-N', variant_id)
variant_id = re.sub(r' ', '', variant_id)
curie = ':gwas-' + re.sub(
r':', '-', variant_id.strip())
variant_type = "snp"
elif variant_id.strip() == '':
pass
else:
logger.warning(
"There's a snp id i can't manage: %s",
variant_id)
return curie, variant_type
def getTestSuite(self):
import unittest
from tests.test_gwascatalog import GWASCatalogTestCase
test_suite = \
unittest.TestLoader().loadTestsFromTestCase(GWASCatalogTestCase)
return test_suite
|
Uniandes-isis2603/s3_watchdogs | s3_watchdogs-web/src/main/webapp/src/modules/faqs/faqs.mod.js | (function (ng) {
var mod = ng.module("faqsModule", ['ui.router']);
mod.config(['$stateProvider', '$urlRouterProvider', function ($stateProvider, $urlRouterProvider) {
var basePath = 'src/modules/faqs/';
$urlRouterProvider.otherwise("/faqsList");
$stateProvider.state('faqsList', {
url: '/faqs/list',
views: {
'navView': {
templateUrl: basePath + 'faqs.nav.html'
},
'completeView': {
templateUrl: basePath + 'faqs.list.html'
}
}
});
}]);
})(window.angular);
|
linyinggaoseng/XiaoWei | Server/common/response.js | var reason = require('./reason');
exports.genHttpResp = function(code, data, err){
var result = {
code: code, //[0 - 成功],[ >= 1 - 失败, 也是错误代码, 1~10保留,11:email已存在、12:电话号码已存在]
msg: reason.getReason(code) // 失败时,存储失败原因,对用户友好的原因说明
};
if(data != null){
result.data = data; //响应数据
}
if(err){
result.err = err.message? err.message : err; //实际的错误信息,这种信息只用于调试定位问题,显示给用户是不友好的
}
return result;
}; |
omtanke/react-material-icons | src/EscalatorWarningSharp.js | function SvgEscalatorWarningSharp(props) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
height='1em'
viewBox='0 0 24 24'
width='1em'
className='svg-icon'
{...props}>
<path fill='none' d='M0 0h24v24H0z' />
<path d='M6.5 2c1.1 0 2 .9 2 2s-.9 2-2 2-2-.9-2-2 .9-2 2-2zm9 7.5c0 .83.67 1.5 1.5 1.5s1.5-.67 1.5-1.5S17.83 8 17 8s-1.5.67-1.5 1.5zm-1.29 3.36l-.92 1.32L9.72 8c-.35-.62-1.01-1-1.73-1H3v8h1.5v7h5V11.61L12.03 16h2.2l.77-1.1V22h4v-5h1v-5h-4.15c-.66 0-1.27.32-1.64.86z' />
</svg>
);
}
export default SvgEscalatorWarningSharp;
|
scp-studios/scp-game-framework | include/scp/core-pch.hpp | #ifndef EF9E1FC6_81B7_4747_B5DB_13E7A798B2AE
#define EF9E1FC6_81B7_4747_B5DB_13E7A798B2AE
#include <cstdint>
#include <string_view>
#include <iostream>
#include <memory>
#endif /* EF9E1FC6_81B7_4747_B5DB_13E7A798B2AE */
|
alancnet/artifactory | web/rest-ui/src/main/java/org/artifactory/ui/rest/service/admin/security/auth/logout/LogoutService.java | <reponame>alancnet/artifactory
package org.artifactory.ui.rest.service.admin.security.auth.logout;
import org.artifactory.api.context.ContextHelper;
import org.artifactory.rest.common.service.ArtifactoryRestRequest;
import org.artifactory.rest.common.service.RestResponse;
import org.artifactory.rest.common.service.RestService;
import org.artifactory.security.AuthenticationHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.security.core.Authentication;
import org.springframework.security.web.authentication.logout.LogoutHandler;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Map;
/**
* @author <NAME>
*/
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class LogoutService implements RestService {
private static final Logger log = LoggerFactory.getLogger(LogoutService.class);
@Override
public void execute(ArtifactoryRestRequest request, RestResponse response) {
try {
Map<String, LogoutHandler> logoutHandlers = ContextHelper.get().beansForType(LogoutHandler.class);
tryToLogoutFromProviders(request, response, logoutHandlers);
}catch (Exception e){
log.debug("failed to perform session logout" , e);
}
}
/**
* iterate security providers and try to logout
*
* @param artifactoryRequest - encapsulate data related to request
* @param artifactoryResponse - encapsulate data require for response
* @param logoutHandlers - map of logout handlers
*/
private void tryToLogoutFromProviders(ArtifactoryRestRequest artifactoryRequest,
RestResponse artifactoryResponse, Map<String, LogoutHandler> logoutHandlers) {
HttpServletRequest servletRequest = artifactoryRequest.getServletRequest();
HttpServletResponse servletResponse = artifactoryResponse.getServletResponse();
Authentication authentication = AuthenticationHelper.getAuthentication();
// logout from all providers
for (LogoutHandler logoutHandler : logoutHandlers.values()) {
logoutHandler.logout(servletRequest, servletResponse, authentication);
}
}
}
|
killf/leetcode_cpp | src/0606.cpp | #include <iostream>
#include <string>
#include <vector>
#include <cmath>
#include <hash_map>
#include <map>
#include <sstream>
#include <type_traits>
using namespace std;
struct TreeNode {
int val;
TreeNode *left;
TreeNode *right;
TreeNode(int x) : val(x), left(NULL), right(NULL) {}
};
class Solution {
public:
string tree2str(TreeNode *t) {
if (!t) return "";
string left = tree2str(t->left);
string right = tree2str(t->right);
string s = to_string(t->val);
if (t->left && t->right) s += "(" + left + ")(" + right + ")";
else if (t->left && !t->right) s += "(" + left + ")";
else if (!t->left && t->right) s += "()(" + right + ")";
return s;
}
}; |
shadialameddin/numerical_tools_and_friends | cpp/0_check/johnstonc_programmingtoday_sourcecode/chapter11_inheritance/ch11_counter/ch.cpp | //Program 11-1 Counter Sample program with a counter object
//File: Ch11Counter.cpp
#include "Ch11Counter.h"
#include <iostream.h>
int main()
{
Counter HowMany; // constructor sets HowMany count = 0
cout << "\n Sample program with class Counter \n";
HowMany.PrintCount();
cout << "\n Increment HowMany twice: ";
++HowMany;
++HowMany;
HowMany.PrintCount();
cout << "\n Now set the count back to zero. ";
HowMany.SetCount(0); // set count value back to zero
HowMany.PrintCount();
cout << "\n\n All finished counting! \n";
return 0;
}
|
AIPHES/ecml-pkdd-2019-J3R-explainable-recommender | refs/librec/core/src/main/java/net/librec/recommender/nn/rating/AutoRecRecommender.java | <reponame>AIPHES/ecml-pkdd-2019-J3R-explainable-recommender
/**
* Copyright (C) 2016 LibRec
* <p>
* This file is part of LibRec.
* LibRec is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* LibRec is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License
* along with LibRec. If not, see <http://www.gnu.org/licenses/>.
*/
package net.librec.recommender.nn.rating;
import net.librec.annotation.ModelData;
import net.librec.common.LibrecException;
import net.librec.math.structure.MatrixEntry;
import net.librec.recommender.MatrixRecommender;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
/**
* Suvash et al., <strong>AutoRec: Autoencoders Meet Collaborative Filtering</strong>, WWW Companion 2015.
*
* @author <NAME>
*/
@ModelData({"isRating", "autorec", "autoRecModel", "trainSet"})
public class AutoRecRecommender extends MatrixRecommender {
/**
* the dimension of input units
*/
private int inputDim;
/**
* the dimension of hidden units
*/
private int hiddenDim;
/**
* the learning rate of the optimization algorithm
*/
private double learningRate;
/**
* the momentum of the optimization algorithm
*/
private double momentum;
/**
* the regularization coefficient of the weights in the neural network
*/
private double lambdaReg;
/**
* the number of iterations
*/
private int numIterations;
/**
* the activation function of the hidden layer in the neural network
*/
private String hiddenActivation;
/**
* the activation function of the output layer in the neural network
*/
private String outputActivation;
/**
* the autorec model
*/
private MultiLayerNetwork autoRecModel;
/**
* the data structure that stores the training data
*/
private INDArray trainSet;
/**
* the data structure that indicates which element in the user-item is non-zero
*/
private INDArray trainSetMask;
@Override
protected void setup() throws LibrecException {
super.setup();
inputDim = numUsers;
hiddenDim = conf.getInt("rec.hidden.dimension");
learningRate = conf.getDouble("rec.iterator.learnrate");
lambdaReg = conf.getDouble("rec.weight.regularization");
numIterations = conf.getInt("rec.iterator.maximum");
hiddenActivation = conf.get("rec.hidden.activation");
outputActivation = conf.get("rec.output.activation");
// transform the sparse matrix to INDArray
int[] matrixShape = {numItems, numUsers};
trainSet = Nd4j.zeros(matrixShape);
trainSetMask = Nd4j.zeros(matrixShape);
for (MatrixEntry me: trainMatrix) {
trainSet.put(me.column(), me.row(), me.get());
trainSetMask.put(me.column(), me.row(), 1);
}
}
@Override
protected void trainModel() throws LibrecException {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.iterations(1)
.updater(Updater.NESTEROVS)
.learningRate(learningRate)
.weightInit(WeightInit.XAVIER_UNIFORM)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.regularization(true)
.l2(lambdaReg)
.list()
.layer(0, new DenseLayer.Builder().nIn(inputDim).nOut(hiddenDim)
//.activation(Activation.SIGMOID)
.activation(Activation.fromString(hiddenActivation))
.biasInit(0.1)
.build())
.layer(1, new OutputLayer.Builder(new AutoRecLossFunction()).nIn(hiddenDim).nOut(inputDim)
//.activation(Activation.IDENTITY)
.activation(Activation.fromString(outputActivation))
.biasInit(0.1)
.build())
.pretrain(false).backprop(true)
.build();
autoRecModel = new MultiLayerNetwork(conf);
autoRecModel.init();
for (int iter = 1; iter <= numIterations; iter++) {
loss = 0.0d;
AutoRecLossFunction.trainMask = trainSetMask;
autoRecModel.fit(trainSet, trainSet);
loss = autoRecModel.score();
if (isConverged(iter) && earlyStop) {
break;
}
lastLoss = loss;
}
}
@Override
protected double predict(int userIdx, int itemIdx) throws LibrecException {
INDArray predictedRatingVector = autoRecModel.output(trainSet.getRow(itemIdx));
return predictedRatingVector.getDouble(userIdx);
}
}
|
switcherSalca/asambleaWeb | src/routes.js | import Assembly from './views/assembly/assembly'
import Dashboard from './views/dashboard/Dashboard'
import ListRegistered from './views/registered/ListRegistered'
import Selection from './views/questions/Selection/Selection'
import Resolution from './views/questions/Resolution/resolution'
import Visualization from './views/questions/Visualization/Visualization'
import Report from './views/questions/Reports/Report'
import ListDiaryPoint from './views/listDiaryPoint/listDiaryPoint'
import Admin from './views/admin/Admin'
import Configuration from './views/configuration/Configuration'
import UploadContract from './views/configuration/UploadContract'
import UploadMemorial from './views/configuration/UploadMemorial'
import VideoList from './views/help/VideoList'
import NewVideo from './views/help/NewVideo'
import Reports from './views/reports/Index'
// https://github.com/ReactTraining/react-router/tree/master/packages/react-router-config
const routes = [
{ path: '/assembly', name: 'Asamblea', component: Assembly },
{ path: '/dashboard', name: 'Dashboard', component: Dashboard },
{ path: '/registered', name: 'Registrados', component: ListRegistered },
{ path: '/question/selection', name: 'SeleccionPreguntas', component: Selection },
{ path: '/question/resolution', name: 'Resolucion de preguntas', component: Resolution },
{ path: '/question/visualization', name: 'Vista de preguntas respondidas', component: Visualization },
{ path: '/question/reports', name: 'Vista de preguntas respondidas', component: Report },
{ path: '/point_diary', name: 'Asamblea puntos', component: ListDiaryPoint },
{ path: '/admin', name: 'Administradores', component: Admin },
{ path: '/configuration', name: 'Configuracion', component: Configuration },
{ path: '/contract', name: 'Contrato', component: UploadContract },
{ path: '/memorial', name: 'Memoria de labores', component: UploadMemorial },
{ path: '/help', exact:true, name: 'Ayuda', component: VideoList },
{ path: '/help/newvideo', name: 'Nuevo Video', component: NewVideo },
{ path: '/reports', name: 'Nuevo Video', component: Reports },
]
export default routes |
mhuisi/lean4 | stage0/stdlib/Init/Data/List/Instances.c | // Lean compiler output
// Module: Init.Data.List.Instances
// Imports: Init.Data.List.Basic Init.Control.Alternative Init.Control.Monad
#include "runtime/lean.h"
#if defined(__clang__)
#pragma clang diagnostic ignored "-Wunused-parameter"
#pragma clang diagnostic ignored "-Wunused-label"
#elif defined(__GNUC__) && !defined(__CLANG__)
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wunused-label"
#pragma GCC diagnostic ignored "-Wunused-but-set-variable"
#endif
#ifdef __cplusplus
extern "C" {
#endif
lean_object* l_List_Monad___closed__5;
lean_object* l_List_Monad___lambda__4(lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_List_map___main___at_List_Monad___spec__3(lean_object*, lean_object*);
lean_object* l_List_Monad___closed__7;
lean_object* l_List_Alternative___lambda__1(lean_object*);
lean_object* l_List_map___main___rarg(lean_object*, lean_object*);
lean_object* l_List_Alternative___closed__2;
lean_object* l_List_map___main___at_List_Monad___spec__4(lean_object*, lean_object*);
lean_object* l_List_Monad___closed__3;
lean_object* l_List_Monad;
lean_object* l_List_pure(lean_object*);
lean_object* l_List_map___main___at_List_Monad___spec__1___rarg(lean_object*, lean_object*);
lean_object* l_List_Monad___closed__10;
lean_object* l_List_append(lean_object*);
lean_object* l_List_map___main___at_List_Monad___spec__5___rarg(lean_object*, lean_object*);
lean_object* l_List_Alternative___closed__3;
lean_object* l_List_Monad___closed__4;
lean_object* l_List_map___main___at_List_Monad___spec__2___rarg(lean_object*, lean_object*);
lean_object* l_List_Monad___closed__9;
lean_object* l_List_map___main___at_List_Monad___spec__2(lean_object*, lean_object*);
lean_object* l_List_Monad___closed__1;
lean_object* l_List_map(lean_object*, lean_object*);
lean_object* l_List_map___main___at_List_Monad___spec__3___rarg(lean_object*, lean_object*);
lean_object* l_List_Monad___closed__8;
lean_object* l_List_join___main___rarg(lean_object*);
lean_object* l_List_map___main___at_List_Monad___spec__4___rarg(lean_object*, lean_object*);
lean_object* l_List_Alternative;
lean_object* l_List_Monad___lambda__3(lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_List_Monad___lambda__2(lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_List_map___main___at_List_Monad___spec__1(lean_object*, lean_object*);
lean_object* l_List_bind(lean_object*, lean_object*);
lean_object* l_List_Monad___lambda__1(lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_List_Monad___closed__6;
lean_object* l_List_Monad___closed__2;
lean_object* l_List_map___main___at_List_Monad___spec__5(lean_object*, lean_object*);
lean_object* l_List_Alternative___closed__1;
lean_object* l_List_map___main___at_List_Monad___spec__1___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
if (lean_obj_tag(x_2) == 0)
{
lean_object* x_3;
lean_dec(x_1);
x_3 = lean_box(0);
return x_3;
}
else
{
uint8_t x_4;
x_4 = !lean_is_exclusive(x_2);
if (x_4 == 0)
{
lean_object* x_5; lean_object* x_6; lean_object* x_7;
x_5 = lean_ctor_get(x_2, 1);
x_6 = lean_ctor_get(x_2, 0);
lean_dec(x_6);
lean_inc(x_1);
x_7 = l_List_map___main___at_List_Monad___spec__1___rarg(x_1, x_5);
lean_ctor_set(x_2, 1, x_7);
lean_ctor_set(x_2, 0, x_1);
return x_2;
}
else
{
lean_object* x_8; lean_object* x_9; lean_object* x_10;
x_8 = lean_ctor_get(x_2, 1);
lean_inc(x_8);
lean_dec(x_2);
lean_inc(x_1);
x_9 = l_List_map___main___at_List_Monad___spec__1___rarg(x_1, x_8);
x_10 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_10, 0, x_1);
lean_ctor_set(x_10, 1, x_9);
return x_10;
}
}
}
}
lean_object* l_List_map___main___at_List_Monad___spec__1(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3;
x_3 = lean_alloc_closure((void*)(l_List_map___main___at_List_Monad___spec__1___rarg), 2, 0);
return x_3;
}
}
lean_object* l_List_map___main___at_List_Monad___spec__2___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
if (lean_obj_tag(x_2) == 0)
{
lean_object* x_3;
lean_dec(x_1);
x_3 = lean_box(0);
return x_3;
}
else
{
uint8_t x_4;
x_4 = !lean_is_exclusive(x_2);
if (x_4 == 0)
{
lean_object* x_5; lean_object* x_6; lean_object* x_7; lean_object* x_8;
x_5 = lean_ctor_get(x_2, 0);
x_6 = lean_ctor_get(x_2, 1);
lean_inc(x_1);
x_7 = l_List_map___main___rarg(x_5, x_1);
x_8 = l_List_map___main___at_List_Monad___spec__2___rarg(x_1, x_6);
lean_ctor_set(x_2, 1, x_8);
lean_ctor_set(x_2, 0, x_7);
return x_2;
}
else
{
lean_object* x_9; lean_object* x_10; lean_object* x_11; lean_object* x_12; lean_object* x_13;
x_9 = lean_ctor_get(x_2, 0);
x_10 = lean_ctor_get(x_2, 1);
lean_inc(x_10);
lean_inc(x_9);
lean_dec(x_2);
lean_inc(x_1);
x_11 = l_List_map___main___rarg(x_9, x_1);
x_12 = l_List_map___main___at_List_Monad___spec__2___rarg(x_1, x_10);
x_13 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_13, 0, x_11);
lean_ctor_set(x_13, 1, x_12);
return x_13;
}
}
}
}
lean_object* l_List_map___main___at_List_Monad___spec__2(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3;
x_3 = lean_alloc_closure((void*)(l_List_map___main___at_List_Monad___spec__2___rarg), 2, 0);
return x_3;
}
}
lean_object* l_List_map___main___at_List_Monad___spec__3___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
if (lean_obj_tag(x_2) == 0)
{
lean_object* x_3;
lean_dec(x_1);
x_3 = lean_box(0);
return x_3;
}
else
{
uint8_t x_4;
x_4 = !lean_is_exclusive(x_2);
if (x_4 == 0)
{
lean_object* x_5; lean_object* x_6; lean_object* x_7; lean_object* x_8; lean_object* x_9;
x_5 = lean_ctor_get(x_2, 1);
x_6 = lean_ctor_get(x_2, 0);
lean_dec(x_6);
x_7 = lean_box(0);
lean_inc(x_1);
lean_ctor_set(x_2, 1, x_7);
lean_ctor_set(x_2, 0, x_1);
x_8 = l_List_map___main___at_List_Monad___spec__3___rarg(x_1, x_5);
x_9 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_9, 0, x_2);
lean_ctor_set(x_9, 1, x_8);
return x_9;
}
else
{
lean_object* x_10; lean_object* x_11; lean_object* x_12; lean_object* x_13; lean_object* x_14;
x_10 = lean_ctor_get(x_2, 1);
lean_inc(x_10);
lean_dec(x_2);
x_11 = lean_box(0);
lean_inc(x_1);
x_12 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_12, 0, x_1);
lean_ctor_set(x_12, 1, x_11);
x_13 = l_List_map___main___at_List_Monad___spec__3___rarg(x_1, x_10);
x_14 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_14, 0, x_12);
lean_ctor_set(x_14, 1, x_13);
return x_14;
}
}
}
}
lean_object* l_List_map___main___at_List_Monad___spec__3(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3;
x_3 = lean_alloc_closure((void*)(l_List_map___main___at_List_Monad___spec__3___rarg), 2, 0);
return x_3;
}
}
lean_object* l_List_map___main___at_List_Monad___spec__4___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
if (lean_obj_tag(x_2) == 0)
{
lean_object* x_3;
lean_dec(x_1);
x_3 = lean_box(0);
return x_3;
}
else
{
uint8_t x_4;
x_4 = !lean_is_exclusive(x_2);
if (x_4 == 0)
{
lean_object* x_5; lean_object* x_6; lean_object* x_7; lean_object* x_8; lean_object* x_9;
x_5 = lean_ctor_get(x_2, 0);
x_6 = lean_ctor_get(x_2, 1);
lean_inc(x_1);
x_7 = l_List_map___main___at_List_Monad___spec__3___rarg(x_5, x_1);
x_8 = l_List_join___main___rarg(x_7);
x_9 = l_List_map___main___at_List_Monad___spec__4___rarg(x_1, x_6);
lean_ctor_set(x_2, 1, x_9);
lean_ctor_set(x_2, 0, x_8);
return x_2;
}
else
{
lean_object* x_10; lean_object* x_11; lean_object* x_12; lean_object* x_13; lean_object* x_14; lean_object* x_15;
x_10 = lean_ctor_get(x_2, 0);
x_11 = lean_ctor_get(x_2, 1);
lean_inc(x_11);
lean_inc(x_10);
lean_dec(x_2);
lean_inc(x_1);
x_12 = l_List_map___main___at_List_Monad___spec__3___rarg(x_10, x_1);
x_13 = l_List_join___main___rarg(x_12);
x_14 = l_List_map___main___at_List_Monad___spec__4___rarg(x_1, x_11);
x_15 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_15, 0, x_13);
lean_ctor_set(x_15, 1, x_14);
return x_15;
}
}
}
}
lean_object* l_List_map___main___at_List_Monad___spec__4(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3;
x_3 = lean_alloc_closure((void*)(l_List_map___main___at_List_Monad___spec__4___rarg), 2, 0);
return x_3;
}
}
lean_object* l_List_map___main___at_List_Monad___spec__5___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
if (lean_obj_tag(x_2) == 0)
{
lean_object* x_3;
lean_dec(x_1);
x_3 = lean_box(0);
return x_3;
}
else
{
uint8_t x_4;
x_4 = !lean_is_exclusive(x_2);
if (x_4 == 0)
{
lean_object* x_5; lean_object* x_6; lean_object* x_7;
x_5 = lean_ctor_get(x_2, 1);
x_6 = lean_ctor_get(x_2, 0);
lean_dec(x_6);
lean_inc(x_1);
x_7 = l_List_map___main___at_List_Monad___spec__5___rarg(x_1, x_5);
lean_ctor_set(x_2, 1, x_7);
lean_ctor_set(x_2, 0, x_1);
return x_2;
}
else
{
lean_object* x_8; lean_object* x_9; lean_object* x_10;
x_8 = lean_ctor_get(x_2, 1);
lean_inc(x_8);
lean_dec(x_2);
lean_inc(x_1);
x_9 = l_List_map___main___at_List_Monad___spec__5___rarg(x_1, x_8);
x_10 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_10, 0, x_1);
lean_ctor_set(x_10, 1, x_9);
return x_10;
}
}
}
}
lean_object* l_List_map___main___at_List_Monad___spec__5(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3;
x_3 = lean_alloc_closure((void*)(l_List_map___main___at_List_Monad___spec__5___rarg), 2, 0);
return x_3;
}
}
lean_object* l_List_Monad___lambda__1(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4) {
_start:
{
lean_object* x_5;
x_5 = l_List_map___main___at_List_Monad___spec__1___rarg(x_3, x_4);
return x_5;
}
}
lean_object* l_List_Monad___lambda__2(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4) {
_start:
{
lean_object* x_5; lean_object* x_6;
x_5 = l_List_map___main___at_List_Monad___spec__2___rarg(x_4, x_3);
x_6 = l_List_join___main___rarg(x_5);
return x_6;
}
}
lean_object* l_List_Monad___lambda__3(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4) {
_start:
{
lean_object* x_5; lean_object* x_6;
x_5 = l_List_map___main___at_List_Monad___spec__4___rarg(x_4, x_3);
x_6 = l_List_join___main___rarg(x_5);
return x_6;
}
}
lean_object* l_List_Monad___lambda__4(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4) {
_start:
{
lean_object* x_5; lean_object* x_6;
x_5 = l_List_map___main___at_List_Monad___spec__5___rarg(x_4, x_3);
x_6 = l_List_join___main___rarg(x_5);
return x_6;
}
}
lean_object* _init_l_List_Monad___closed__1() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_map), 2, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__2() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_Monad___lambda__1), 4, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__3() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = l_List_Monad___closed__1;
x_2 = l_List_Monad___closed__2;
x_3 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_3, 0, x_1);
lean_ctor_set(x_3, 1, x_2);
return x_3;
}
}
lean_object* _init_l_List_Monad___closed__4() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_pure), 1, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__5() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_Monad___lambda__2), 4, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__6() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_Monad___lambda__3), 4, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__7() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_Monad___lambda__4), 4, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__8() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3; lean_object* x_4; lean_object* x_5; lean_object* x_6;
x_1 = l_List_Monad___closed__3;
x_2 = l_List_Monad___closed__4;
x_3 = l_List_Monad___closed__5;
x_4 = l_List_Monad___closed__6;
x_5 = l_List_Monad___closed__7;
x_6 = lean_alloc_ctor(0, 5, 0);
lean_ctor_set(x_6, 0, x_1);
lean_ctor_set(x_6, 1, x_2);
lean_ctor_set(x_6, 2, x_3);
lean_ctor_set(x_6, 3, x_4);
lean_ctor_set(x_6, 4, x_5);
return x_6;
}
}
lean_object* _init_l_List_Monad___closed__9() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_bind), 2, 0);
return x_1;
}
}
lean_object* _init_l_List_Monad___closed__10() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = l_List_Monad___closed__8;
x_2 = l_List_Monad___closed__9;
x_3 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_3, 0, x_1);
lean_ctor_set(x_3, 1, x_2);
return x_3;
}
}
lean_object* _init_l_List_Monad() {
_start:
{
lean_object* x_1;
x_1 = l_List_Monad___closed__10;
return x_1;
}
}
lean_object* l_List_Alternative___lambda__1(lean_object* x_1) {
_start:
{
lean_object* x_2;
x_2 = lean_box(0);
return x_2;
}
}
lean_object* _init_l_List_Alternative___closed__1() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_Alternative___lambda__1), 1, 0);
return x_1;
}
}
lean_object* _init_l_List_Alternative___closed__2() {
_start:
{
lean_object* x_1;
x_1 = lean_alloc_closure((void*)(l_List_append), 1, 0);
return x_1;
}
}
lean_object* _init_l_List_Alternative___closed__3() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3; lean_object* x_4; lean_object* x_5;
x_1 = l_List_Monad;
x_2 = lean_ctor_get(x_1, 0);
lean_inc(x_2);
x_3 = l_List_Alternative___closed__1;
x_4 = l_List_Alternative___closed__2;
x_5 = lean_alloc_ctor(0, 3, 0);
lean_ctor_set(x_5, 0, x_2);
lean_ctor_set(x_5, 1, x_3);
lean_ctor_set(x_5, 2, x_4);
return x_5;
}
}
lean_object* _init_l_List_Alternative() {
_start:
{
lean_object* x_1;
x_1 = l_List_Alternative___closed__3;
return x_1;
}
}
lean_object* initialize_Init_Data_List_Basic(lean_object*);
lean_object* initialize_Init_Control_Alternative(lean_object*);
lean_object* initialize_Init_Control_Monad(lean_object*);
static bool _G_initialized = false;
lean_object* initialize_Init_Data_List_Instances(lean_object* w) {
lean_object * res;
if (_G_initialized) return lean_mk_io_result(lean_box(0));
_G_initialized = true;
res = initialize_Init_Data_List_Basic(lean_io_mk_world());
if (lean_io_result_is_error(res)) return res;
lean_dec_ref(res);
res = initialize_Init_Control_Alternative(lean_io_mk_world());
if (lean_io_result_is_error(res)) return res;
lean_dec_ref(res);
res = initialize_Init_Control_Monad(lean_io_mk_world());
if (lean_io_result_is_error(res)) return res;
lean_dec_ref(res);
l_List_Monad___closed__1 = _init_l_List_Monad___closed__1();
lean_mark_persistent(l_List_Monad___closed__1);
l_List_Monad___closed__2 = _init_l_List_Monad___closed__2();
lean_mark_persistent(l_List_Monad___closed__2);
l_List_Monad___closed__3 = _init_l_List_Monad___closed__3();
lean_mark_persistent(l_List_Monad___closed__3);
l_List_Monad___closed__4 = _init_l_List_Monad___closed__4();
lean_mark_persistent(l_List_Monad___closed__4);
l_List_Monad___closed__5 = _init_l_List_Monad___closed__5();
lean_mark_persistent(l_List_Monad___closed__5);
l_List_Monad___closed__6 = _init_l_List_Monad___closed__6();
lean_mark_persistent(l_List_Monad___closed__6);
l_List_Monad___closed__7 = _init_l_List_Monad___closed__7();
lean_mark_persistent(l_List_Monad___closed__7);
l_List_Monad___closed__8 = _init_l_List_Monad___closed__8();
lean_mark_persistent(l_List_Monad___closed__8);
l_List_Monad___closed__9 = _init_l_List_Monad___closed__9();
lean_mark_persistent(l_List_Monad___closed__9);
l_List_Monad___closed__10 = _init_l_List_Monad___closed__10();
lean_mark_persistent(l_List_Monad___closed__10);
l_List_Monad = _init_l_List_Monad();
lean_mark_persistent(l_List_Monad);
l_List_Alternative___closed__1 = _init_l_List_Alternative___closed__1();
lean_mark_persistent(l_List_Alternative___closed__1);
l_List_Alternative___closed__2 = _init_l_List_Alternative___closed__2();
lean_mark_persistent(l_List_Alternative___closed__2);
l_List_Alternative___closed__3 = _init_l_List_Alternative___closed__3();
lean_mark_persistent(l_List_Alternative___closed__3);
l_List_Alternative = _init_l_List_Alternative();
lean_mark_persistent(l_List_Alternative);
return lean_mk_io_result(lean_box(0));
}
#ifdef __cplusplus
}
#endif
|
SLiGe/mirai-robot | src/main/java/cn/zjiali/robot/factory/MessageEventHandlerFactory.java | <reponame>SLiGe/mirai-robot
package cn.zjiali.robot.factory;
import cn.zjiali.robot.handler.MessageEventHandler;
import cn.zjiali.robot.util.CommonLogger;
import java.lang.reflect.InvocationTargetException;
/**
* @author zJiaLi
* @since 2020-10-30 09:24
*/
public class MessageEventHandlerFactory extends AbstractBeanFactory {
private static final MessageEventHandlerFactory MESSAGE_EVENT_HANDLER_FACTORY = new MessageEventHandlerFactory();
private final CommonLogger commonLogger = new CommonLogger(MessageEventHandlerFactory.class.getName());
public static MessageEventHandlerFactory getInstance() {
return MESSAGE_EVENT_HANDLER_FACTORY;
}
public MessageEventHandler put(String pluginName, String handler) {
try {
commonLogger.info("加载消息处理器 === {}", handler);
Class<?> handlerClass = Class.forName(handler);
MessageEventHandler instance = (MessageEventHandler) handlerClass.getConstructor().newInstance();
putBean(pluginName, instance);
return instance;
} catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
public MessageEventHandler get(String pluginName) {
return getBean(pluginName, MessageEventHandler.class);
}
@Override
public String beanPrefix() {
return "MessageEventHandler-";
}
}
|
Robbbert/messui | src/emu/ui/cmddata.h | <reponame>Robbbert/messui
// license:BSD-3-Clause
// copyright-holders:<NAME>
/*********************************************************************
ui/cmddata.h
*********************************************************************/
#ifndef MAME_EMU_UI_CMDDATA_H
#define MAME_EMU_UI_CMDDATA_H
#pragma once
// command.dat symbols assigned to Unicode PUA U+E000
#define COMMAND_UNICODE (0xe000)
#define MAX_GLYPH_FONT (150)
// Define Game Command Font Converting Conditions
#define COMMAND_DEFAULT_TEXT '_'
// Define Expanded Game Command ShortCut
#define COMMAND_EXPAND_TEXT '^'
// Define Simple Game Command ShortCut
#define COMMAND_CONVERT_TEXT '@'
struct fix_command_t
{
char glyph_char;
unsigned glyph_code;
};
struct fix_strings_t
{
std::string_view glyph_str;
int glyph_code;
};
static fix_command_t const default_text[] =
{
// Alphabetic Buttons (NeoGeo): A~D,H,Z
{ 'A', 1 }, // BTN_A
{ 'B', 2 }, // BTN_B
{ 'C', 3 }, // BTN_C
{ 'D', 4 }, // BTN_D
{ 'H', 8 }, // BTN_H
{ 'Z', 26 }, // BTN_Z
// Numerical Buttons (Capcom): 1~10
{ 'a', 27 }, // BTN_1
{ 'b', 28 }, // BTN_2
{ 'c', 29 }, // BTN_3
{ 'd', 30 }, // BTN_4
{ 'e', 31 }, // BTN_5
{ 'f', 32 }, // BTN_6
{ 'g', 33 }, // BTN_7
{ 'h', 34 }, // BTN_8
{ 'i', 35 }, // BTN_9
{ 'j', 36 }, // BTN_10
// Directions of Arrow, Joystick Ball
{ '+', 39 }, // BTN_+
{ '.', 40 }, // DIR_...
{ '1', 41 }, // DIR_1
{ '2', 42 }, // DIR_2
{ '3', 43 }, // DIR_3
{ '4', 44 }, // DIR_4
{ '5', 45 }, // Joystick Ball
{ '6', 46 }, // DIR_6
{ '7', 47 }, // DIR_7
{ '8', 48 }, // DIR_8
{ '9', 49 }, // DIR_9
{ 'N', 50 }, // DIR_N
// Special Buttons
{ 'S', 51 }, // BTN_START
{ 'P', 53 }, // BTN_PUNCH
{ 'K', 54 }, // BTN_KICK
{ 'G', 55 }, // BTN_GUARD
// Composition of Arrow Directions
{ '!', 90 }, // Arrow
{ 'k', 100 }, // Half Circle Back
{ 'l', 101 }, // Half Circle Front Up
{ 'm', 102 }, // Half Circle Front
{ 'n', 103 }, // Half Circle Back Up
{ 'o', 104 }, // 1/4 Cir For 2 Down
{ 'p', 105 }, // 1/4 Cir Down 2 Back
{ 'q', 106 }, // 1/4 Cir Back 2 Up
{ 'r', 107 }, // 1/4 Cir Up 2 For
{ 's', 108 }, // 1/4 Cir Back 2 Down
{ 't', 109 }, // 1/4 Cir Down 2 For
{ 'u', 110 }, // 1/4 Cir For 2 Up
{ 'v', 111 }, // 1/4 Cir Up 2 Back
{ 'w', 112 }, // Full Clock Forward
{ 'x', 113 }, // Full Clock Back
{ 'y', 114 }, // Full Count Forward
{ 'z', 115 }, // Full Count Back
{ 'L', 116 }, // 2x Forward
{ 'M', 117 }, // 2x Back
{ 'Q', 118 }, // Dragon Screw Forward
{ 'R', 119 }, // Dragon Screw Back
// Big letter Text
{ '^', 121 }, // AIR
{ '?', 122 }, // DIR
{ 'X', 124 }, // TAP
// Condition of Positions
{ '|', 125 }, // Jump
{ 'O', 126 }, // Hold
{ '-', 127 }, // Air
{ '=', 128 }, // Squatting
{ '~', 131 }, // Charge
// Special Character Text
{ '`', 135 }, // Small Dot
{ '@', 136 }, // Double Ball
{ ')', 137 }, // Single Ball
{ '(', 138 }, // Solid Ball
{ '*', 139 }, // Star
{ '&', 140 }, // Solid star
{ '%', 141 }, // Triangle
{ '$', 142 }, // Solid Triangle
{ '#', 143 }, // Double Square
{ ']', 144 }, // Single Square
{ '[', 145 }, // Solid Square
{ '{', 146 }, // Down Triangle
{ '}', 147 }, // Solid Down Triangle
{ '<', 148 }, // Diamond
{ '>', 149 }, // Solid Diamond
{ 0, 0 } // end of array
};
static fix_command_t const expand_text[] =
{
// Alphabetic Buttons (NeoGeo): S (Slash Button)
{ 's', 19 }, // BTN_S
// Special Buttons
{ 'S', 52 }, // BTN_SELECT
// Multiple Punches & Kicks
{ 'E', 57 }, // Light Punch
{ 'F', 58 }, // Middle Punch
{ 'G', 59 }, // Strong Punch
{ 'H', 60 }, // Light Kick
{ 'I', 61 }, // Middle Kick
{ 'J', 62 }, // Strong Kick
{ 'T', 63 }, // 3 Kick
{ 'U', 64 }, // 3 Punch
{ 'V', 65 }, // 2 Kick
{ 'W', 66 }, // 2 Pick
// Composition of Arrow Directions
{ '!', 91 }, // Continue Arrow
// Charge of Arrow Directions
{ '1', 92 }, // Charge DIR_1
{ '2', 93 }, // Charge DIR_2
{ '3', 94 }, // Charge DIR_3
{ '4', 95 }, // Charge DIR_4
{ '6', 96 }, // Charge DIR_6
{ '7', 97 }, // Charge DIR_7
{ '8', 98 }, // Charge DIR_8
{ '9', 99 }, // Charge DIR_9
// Big letter Text
{ 'M', 123 }, // MAX
// Condition of Positions
{ '-', 129 }, // Close
{ '=', 130 }, // Away
{ '*', 132 }, // Serious Tap
{ '?', 133 }, // Any Button
{ 0, 0 } // end of array
};
static const fix_strings_t convert_text[] =
{
// Alphabetic Buttons: A~Z
{ "A-button", 1 }, // BTN_A
{ "B-button", 2 }, // BTN_B
{ "C-button", 3 }, // BTN_C
{ "D-button", 4 }, // BTN_D
{ "E-button", 5 }, // BTN_E
{ "F-button", 6 }, // BTN_F
{ "G-button", 7 }, // BTN_G
{ "H-button", 8 }, // BTN_H
{ "I-button", 9 }, // BTN_I
{ "J-button", 10 }, // BTN_J
{ "K-button", 11 }, // BTN_K
{ "L-button", 12 }, // BTN_L
{ "M-button", 13 }, // BTN_M
{ "N-button", 14 }, // BTN_N
{ "O-button", 15 }, // BTN_O
{ "P-button", 16 }, // BTN_P
{ "Q-button", 17 }, // BTN_Q
{ "R-button", 18 }, // BTN_R
{ "S-button", 19 }, // BTN_S
{ "T-button", 20 }, // BTN_T
{ "U-button", 21 }, // BTN_U
{ "V-button", 22 }, // BTN_V
{ "W-button", 23 }, // BTN_W
{ "X-button", 24 }, // BTN_X
{ "Y-button", 25 }, // BTN_Y
{ "Z-button", 26 }, // BTN_Z
// Special Moves and Buttons
{ "decrease", 37 }, // BTN_DEC
{ "increase", 38 }, // BTN_INC
{ "BALL", 45 }, // Joystick Ball
{ "start", 51 }, // BTN_START
{ "select", 52 }, // BTN_SELECT
{ "punch", 53 }, // BTN_PUNCH
{ "kick", 54 }, // BTN_KICK
{ "guard", 55 }, // BTN_GUARD
{ "L-punch", 57 }, // Light Punch
{ "M-punch", 58 }, // Middle Punch
{ "S-punch", 59 }, // Strong Punch
{ "L-kick", 60 }, // Light Kick
{ "M-kick", 61 }, // Middle Kick
{ "S-kick", 62 }, // Strong Kick
{ "3-kick", 63 }, // 3 Kick
{ "3-punch", 64 }, // 3 Punch
{ "2-kick", 65 }, // 2 Kick
{ "2-punch", 66 }, // 2 Pick
// Custom Buttons and Cursor Buttons
{ "custom1", 67 }, // CUSTOM_1
{ "custom2", 68 }, // CUSTOM_2
{ "custom3", 69 }, // CUSTOM_3
{ "custom4", 70 }, // CUSTOM_4
{ "custom5", 71 }, // CUSTOM_5
{ "custom6", 72 }, // CUSTOM_6
{ "custom7", 73 }, // CUSTOM_7
{ "custom8", 74 }, // CUSTOM_8
{ "up", 75 }, // (Cursor Up)
{ "down", 76 }, // (Cursor Down)
{ "left", 77 }, // (Cursor Left)
{ "right", 78 }, // (Cursor Right)
// Player Lever
{ "lever", 79 }, // Non Player Lever
{ "nplayer", 80 }, // Gray Color Lever
{ "1player", 81 }, // 1 Player Lever
{ "2player", 82 }, // 2 Player Lever
{ "3player", 83 }, // 3 Player Lever
{ "4player", 84 }, // 4 Player Lever
{ "5player", 85 }, // 5 Player Lever
{ "6player", 86 }, // 6 Player Lever
{ "7player", 87 }, // 7 Player Lever
{ "8player", 88 }, // 8 Player Lever
// Composition of Arrow Directions
{ "-->", 90 }, // Arrow
{ "==>", 91 }, // Continue Arrow
{ "hcb", 100 }, // Half Circle Back
{ "huf", 101 }, // Half Circle Front Up
{ "hcf", 102 }, // Half Circle Front
{ "hub", 103 }, // Half Circle Back Up
{ "qfd", 104 }, // 1/4 Cir For 2 Down
{ "qdb", 105 }, // 1/4 Cir Down 2 Back
{ "qbu", 106 }, // 1/4 Cir Back 2 Up
{ "quf", 107 }, // 1/4 Cir Up 2 For
{ "qbd", 108 }, // 1/4 Cir Back 2 Down
{ "qdf", 109 }, // 1/4 Cir Down 2 For
{ "qfu", 110 }, // 1/4 Cir For 2 Up
{ "qub", 111 }, // 1/4 Cir Up 2 Back
{ "fdf", 112 }, // Full Clock Forward
{ "fub", 113 }, // Full Clock Back
{ "fuf", 114 }, // Full Count Forward
{ "fdb", 115 }, // Full Count Back
{ "xff", 116 }, // 2x Forward
{ "xbb", 117 }, // 2x Back
{ "dsf", 118 }, // Dragon Screw Forward
{ "dsb", 119 }, // Dragon Screw Back
// Big letter Text
{ "AIR", 121 }, // AIR
{ "DIR", 122 }, // DIR
{ "MAX", 123 }, // MAX
{ "TAP", 124 }, // TAP
// Condition of Positions
{ "jump", 125 }, // Jump
{ "hold", 126 }, // Hold
{ "air", 127 }, // Air
{ "sit", 128 }, // Squatting
{ "close", 129 }, // Close
{ "away", 130 }, // Away
{ "charge", 131 }, // Charge
{ "tap", 132 }, // Serious Tap
{ "button", 133 }, // Any Button
{ "", 0 } // end of array
};
#endif // MAME_EMU_UI_CMDDATA_H
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.