repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
rvedam/es-operating-system | os/net/src/streamOutput.cpp | /*
* Copyright 2008 Google Inc.
* Copyright 2006, 2007 Nintendo Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include "stream.h"
int StreamReceiver::
countOptionSize(u16 flag)
{
int optlen = 0;
if (flag & TCPHdr::SYN)
{
optlen += sizeof(TCPOptMss);
#ifdef TCP_SACK
optlen += sizeof(TCPOptSackPermitted);
#endif // TCP_SACK
}
#ifdef TCP_SACK
else if (sack && !(flag & TCPHdr::RST) && (flag & TCPHdr::ACK) && asb[0].data)
{
Ring::Vec* block;
optlen += 2;
for (block = &asb[TCPHdr::ASB_MAX - 1]; asb <= block; --block)
{
if (block->data)
{
break;
}
}
optlen += (block + 1 - asb) * (2 * sizeof(s32));
}
#endif // TCP_SACK
return (optlen + 3) & ~3;
}
int StreamReceiver::
fillOptions(u8* opt, u16 flag)
{
u8* ptr = opt;
if (flag & TCPHdr::SYN)
{
new(ptr) TCPOptMss(mss);
ptr += sizeof(TCPOptMss);
#ifdef TCP_SACK
new(ptr) TCPOptSackPermitted();
ptr += sizeof(TCPOptSackPermitted);
#endif
}
#ifdef TCP_SACK
else if (sack && !(flag & TCPHdr::RST) && (flag & TCPHdr::ACK) && asb[0].data)
{
while (((ptr - opt) & 3) != 2)
{
new(ptr) TCPOptNop;
ptr += sizeof(TCPOptNop);
}
TCPOptSack* optSack = new(ptr) TCPOptSack(recvRing.getHead(), recvRing.getSize(), asb, recvNext - recvRing.getUsed());
ptr += optSack->len;
}
#endif // TCP_SACK
while ((ptr - opt) & 3)
{
new(ptr) TCPOptEol;
ptr += sizeof(TCPOptEol);
}
return ptr - opt;
}
// For SYN_RCVD and SYN_SEND
s32 StreamReceiver::
getSendableWithSyn(u16& flag)
{
if (sendNext != iss)
{
// Do not send data only segment until connection is established.
ASSERT(iss == sendUna);
flag = 0;
return 0;
}
flag = TCPHdr::SYN;
return 1 + sendRing.getUsed(); // or one for just SYN
}
s32 StreamReceiver::
getSendable()
{
// In persist state, exit the fast recovery
if (sendWin == 0 && 0 < sendRing.getUsed())
{
if (!rxmitTimer.isEnabled())
{
onxt = sendNext = sendUna; // To send a probe.
}
// Cancel RTT estimators. [Karn's algorithm]
rttTiming = 0;
// RFC 3782
sendRecover = sendMax;
dupAcks = 0;
hole = 0;
sendHoles = 0;
sendFack = sendUna;
rxmitData = 0;
sendAwin = 0;
fastRxmit = false;
}
if (RXMIT_THRESH <= dupAcks)
{
#ifdef TCP_SACK
if (sack)
{
//
// (SACK-2) Override sendNext if any SACK-generated retransmissions.
//
hole = getSackHole();
if (hole)
{
sendNext = hole->rxmit;
}
}
else
#endif // TCP_SACK
{
if (fastRxmit)
{
sendNext = sendUna; // Retransmit the lost segment
}
}
}
s32 sendable = sendRing.getUsed() - (sendNext - sendUna);
#ifdef TCP_SACK
// (SACK-2) Adjust sendable
if (hole)
{
ASSERT(hole->rxmit == sendNext);
sendable = std::min(sendable, hole->end - hole->rxmit);
}
#endif // TCP_SACK
return sendable;
}
// For FIN_WAIT1, CLOSING, and LAST_ACK
s32 StreamReceiver::
getSendableWithFin(u16& flag)
{
s32 sendable = getSendable();
if (sendable < 0)
{
flag = 0;
return 0; // FIN has been sent and the retransmission timer has not expired yet.
}
if (!hole)
{
flag = TCPHdr::FIN;
return sendable + 1;
}
return sendable;
}
// Detects silly window syndrome
bool StreamReceiver::
canSend(s32 len, s32 mss, u16 flag)
{
//
// Receiver silly window avoidance algorithm
//
long reduction = recvRing.getUnused() - recvWin;
if (std::min(recvRing.getSize() / 2, 2L * mss) <= reduction ||
mss <= reduction && recvWin < mss)
{
recvWin = recvRing.getUnused();
return true; // Send a window update
}
if (ackNow || (flag & (TCPHdr::RST | TCPHdr::FIN | TCPHdr::SYN)))
{
return true; // Do not delay
}
//
// The sender's SWS avoidance algorithm
//
if (len <= 0)
{
return false;
}
if (mss <= len)
{
return true; // A maximum-sized segment can be sent.
}
bool acked;
if (nagle)
{
acked = (sendNext == sendUna) ? true : false;
}
else
{
acked = true;
}
if (acked)
{
if (flag & TCPHdr::PSH) // PSH is set if we have no more data to send.
{
return true; // The data is pushed and all queued data can be sent now.
}
if (sendMaxWin / 2 <= len)
{
return true; // At least a fraction of the maximum window can be sent.
}
}
if (sendNext < sendMax)
{
return true; // The override timeout occurs (retransmission)
}
if (fastRxmit)
{
return true; // Bypass sender silly window avoidance for fast retransmit
}
return false;
}
bool StreamReceiver::
send(InetMessenger* m, s32 sendable, u16 flag)
{
// If the window size is zero and the persist timer has been expired,
// send a window probe.
int win = std::min(sendWin, cWin);
if (RXMIT_THRESH <= dupAcks && sack)
{
#ifdef TCP_SACK
//
// (SACK-1) During SACK loss recovery period, cWin is checked later.
//
win = sendWin;
#endif // TCP_SACK
}
#ifdef TCP_LIMITED_TRANSMIT
if (LIMITED_THRESH == dupAcks)
{
// the sender can only send two segments beyond the congestion window
// (cwnd). [RFC 3042]
win = sendWin;
}
#endif // TCP_LIMITED_TRANSMIT
if (win == 0 && !rxmitTimer.isEnabled())
{
persist = true;
if (!(flag & TCPHdr::SYN))
{
ackNow = true; // To override Persist timer SWS
}
win = 1;
}
// Calculate useable size
s32 useable = sendUna + win - sendNext;
#ifdef TCP_SACK
if (sack)
{
//
// (SACK-3) If cwnd - pipe >= 1 SMSS the sender SHOULD transmit one or more
// segments [RFC 3517]
//
if (RXMIT_THRESH <= dupAcks && !fastRxmit)
{
if (cWin - sendAwin < mss)
{
useable = 0;
}
}
}
#endif // TCP_SACK
#ifdef TCP_LIMITED_TRANSMIT
if (LIMITED_THRESH == dupAcks)
{
// The amount of outstanding data would remain less than or equal
// to the congestion window plus 2 segments. [RFC 3042]
useable = std::min(useable, std::max(0, cWin + 2 * mss - sendAwin));
}
#endif // TCP_LIMITED_TRANSMIT
// Calculate send length
int optlen = countOptionSize(flag);
s32 len = std::min(mss - optlen, std::min(sendable, useable));
if (0 < len && len == sendable && !hole &&
(flag & (TCPHdr::SYN | TCPHdr::RST | TCPHdr::FIN)) == 0)
{
flag |= TCPHdr::PSH; // No more data to send:
}
if ((flag & TCPHdr::FIN) && len < sendable)
{
flag &= ~TCPHdr::FIN; // More data to send:
}
if (len <= 0 && flag == 0 && !ackNow)
{
return false;
}
// Perform Silly Window Syndrome avoidance
if (!canSend(len, mss - optlen, flag))
{
// Wait for override timer time-out
if (sendNext < onxt)
{
sendNext = onxt;
}
if (0 < len)
{
// Set persist/SWS override timer. Note SWS avoidance does not
// apply for packet retransmission.
startRxmitTimer();
}
if (recvAcked < recvNext)
{
// Delayed ACK
// 4.2 Generating Acknowledgments [RFC 2581]
// 4.2.3.2 When to Send an ACK Segment [RFC 1122]
startAckTimer();
}
return false;
}
// Make data portion
if (0 < len)
{
long count = len;
if (flag & TCPHdr::SYN)
{
--count;
}
if (flag & TCPHdr::FIN)
{
--count;
}
if (0 < count)
{
m->movePosition(-count);
sendRing.peek(m->fix(count), count, sendNext - sendUna);
}
}
// Make TCP header
m->movePosition(-(sizeof(TCPHdr) + optlen));
TCPHdr* tcphdr = static_cast<TCPHdr*>(m->fix(sizeof(TCPHdr) + optlen));
if (state != &stateSynReceived)
{
recvAcked = recvNext;
}
else
{
recvAcked = irs + 1; // Just for SYN
}
tcphdr->ack = htonl(recvAcked);
tcphdr->src = htons(m->getLocalPort());
tcphdr->dst = htons(m->getRemotePort());
ASSERT(tcphdr->src != 0);
ASSERT(tcphdr->dst != 0);
TCPSeq seq = (0 < len) ? sendNext : sendMax;
tcphdr->seq = htonl(seq);
if(m->getFlag() == es::Socket::MsgOob)
{
sendUp = seq + len;
}
if (seq < sendUp)
{
// Set urgent offset
flag |= TCPHdr::URG;
s32 offset = sendUp - seq;
u16 urg = (u16) ((65495 < offset) ? 65535 : offset);
#ifdef TCP_STD_URG
--urg;
#endif // TCP_STD_URG
tcphdr->urg = htons(urg);
}
else
{
tcphdr->urg = 0;
sendUp = sendUna;
}
if (state != &stateSynSent)
{
flag |= TCPHdr::ACK;
}
ackNow = false;
stopAckTimer();
ASSERT((flag & 0x0fc0) == 0); // RFC 793 only
tcphdr->flag = htons(flag);
tcphdr->win = htons(recvWin);
tcphdr->sum = 0;
tcphdr->setHdrSize(sizeof(TCPHdr) + optlen);
// Make TCP option(s)
fillOptions(reinterpret_cast<u8*>(tcphdr) + sizeof(TCPHdr), flag);
sendNext += len;
if (sendMax < sendNext) // Not a retransmission?
{
// In case sending a window probe, sendNext is out of window and
// must not be reflected to sendMax.
if (sendNext <= TCPSeq(sendUna + sendWin))
{
// ++interface->tcpStat.outSegs;
sendMax = sendNext;
}
else
{
// ++interface->tcpStat.retransSegs;
}
// If round trip timer isn't running, start it
if (rttTiming != 0)
{
rttTiming = DateTime::getNow();
rttSeq = seq;
}
}
else if (0 < len)
{
// ++interface->tcpStat.retransSegs;
}
// Start retransmission timer
if (sendNext != sendUna) // Not for an ACK-only segment
{
startRxmitTimer();
}
#ifdef TCP_SACK
if (hole)
{
//
// (SACK-5) Update scoreboard rxmit pointer and rxmitData size.
//
hole->rxmit += len;
rxmitData += len;
}
#endif // TCP_SACK
//
// (SACK-6) Update aWin
//
// Update sendAwin to reflect the new data that was sent.
sendAwin = (sendMax - sendFack) + rxmitData;
//
// (SACK-7) Turn off fastRxmit
//
fastRxmit = false;
#ifdef TCP_SACK
//
// (SACK-8) Restore sendNext
//
if (sendNext < onxt)
{
sendNext = onxt;
}
#endif
lastSend = DateTime::getNow(); // For re-starting the idle connection.
return true;
}
bool StreamReceiver::
StateSynSent::output(InetMessenger* m, StreamReceiver* s)
{
u16 flag;
s32 count = s->getSendableWithSyn(flag);
return s->send(m, count, flag);
}
bool StreamReceiver::
StateSynReceived::output(InetMessenger* m, StreamReceiver* s)
{
u16 flag;
s32 count = s->getSendableWithSyn(flag);
return s->send(m, count, flag);
}
bool StreamReceiver::
StateEstablished::output(InetMessenger* m, StreamReceiver* s)
{
s32 count = s->getSendable();
return s->send(m, count, 0);
}
bool StreamReceiver::
StateFinWait1::output(InetMessenger* m, StreamReceiver* s)
{
u16 flag;
s32 count = s->getSendableWithFin(flag);
return s->send(m, count, flag);
}
bool StreamReceiver::
StateFinWait2::output(InetMessenger* m, StreamReceiver* s)
{
s32 count = s->getSendable();
return s->send(m, count, 0);
}
bool StreamReceiver::
StateCloseWait::output(InetMessenger* m, StreamReceiver* s)
{
s32 count = s->getSendable();
return s->send(m, count, 0);
}
bool StreamReceiver::
StateLastAck::output(InetMessenger* m, StreamReceiver* s)
{
u16 flag;
s32 count = s->getSendableWithFin(flag);
return s->send(m, count, flag);
}
bool StreamReceiver::
StateClosing::output(InetMessenger* m, StreamReceiver* s)
{
u16 flag;
s32 count = s->getSendableWithFin(flag);
return s->send(m, count, flag);
}
bool StreamReceiver::
StateTimeWait::output(InetMessenger* m, StreamReceiver* s)
{
u16 flag;
s32 count = s->getSendableWithFin(flag);
return s->send(m, count, flag);
}
|
StOriJimmy/chordatlas | src/org/twak/footprints/Footprints.java | package org.twak.footprints;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.Line2D;
import java.awt.image.BufferedImage;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.Arrays;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.imageio.ImageIO;
import javax.xml.parsers.ParserConfigurationException;
import org.geotools.feature.FeatureCollection;
import org.geotools.feature.collection.AbstractFeatureVisitor;
import org.geotools.geometry.jts.JTS;
import org.geotools.gml.GMLFilterDocument;
import org.geotools.gml.GMLFilterGeometry;
import org.geotools.gml.GMLHandlerJTS;
import org.geotools.referencing.CRS;
import org.geotools.util.NullProgressListener;
import org.geotools.xml.handlers.xsi.ComplexContentHandler;
import org.opengis.feature.Feature;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.operation.MathTransform;
import org.twak.tweed.TweedSettings;
import org.twak.utils.Pair;
import org.twak.utils.collections.ConsecutivePairs;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Point;
public class Footprints {
public static void main(String args[]) throws Exception {
// StringReader reader = new StringReader( );
// InputSource input = new InputSource( new FileInputStream("/home/twak/Desktop/wgs84.gml") );
// InputSource input = new InputSource( new FileInputStream("/home/twak/Downloads/Download_around_ucl_562795 (1)/mastermap-topo_1451134/mastermap-topo_1451134_0.gml") );
InputSource input = new InputSource( new FileInputStream("/home/twak/data/Download_around_ucl_562795/buildings.gml") );
Callback result = parse( input );
// Matcher m = name.matcher( "<ogr:buildings fid=\"osgb1000001787210138\"> )");
// if (m.matches()) {
// System.out.println("matches " + m.group(1));
// }
// test();
}
public static Callback parse(InputSource input) throws IOException, SAXException {
// parse xml
XMLReader reader = XMLReaderFactory.createXMLReader();
Callback callback = new Callback();
GMLFilterGeometry geometryCallback = new GMLFilterGeometry( callback );
GMLFilterDocument gmlCallback = new GMLFilterDocument( geometryCallback );
reader.setContentHandler( gmlCallback );
ComplexContentHandler ch;
reader.parse(input);
return callback;
}
/**
* This class is called when the SAX parser has finished
* parsing a Filter.
*/
static class Callback extends DefaultHandler implements GMLHandlerJTS {
CoordinateReferenceSystem sourceCRS, targetCRS;
MathTransform transform;
int count = 0;
String featureName = "none";
private final static Pattern name = Pattern.compile(".*buildings\\ fid=\\\"([^\\\"]*)\\\".*", Pattern.DOTALL);
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
if (localName.equals("buildings") )
featureName = attributes.getValue("fid");
super.startElement(uri, localName, qName, attributes);
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (localName.equals("buildings") ) {
featureName = null;
}
super.endElement(uri, localName, qName);
}
public Callback() {
try {
sourceCRS = CRS.decode( TweedSettings.settings.gmlCoordSystem );
targetCRS = CRS.decode("EPSG:4326"); // lat long
transform = CRS.findMathTransform(sourceCRS, targetCRS, true);
}
catch (Throwable th) {
th.printStackTrace();
}
}
@Override
public void geometry(Geometry arg0) {
if (arg0 == null || arg0.getArea() < 80)
return;
// TODO Auto-generated method stub
if (count != 0) {
System.out.println(arg0.getGeometryType());
try {
Point cen = arg0.getCentroid();
Coordinate latLong = new Coordinate();
JTS.transform(cen.getCoordinate(), latLong, transform);
System.out.println(" ************************* " + count + " " + featureName);
System.out.println("in EPSG:27700 " + cen);
System.out.println("lat long " + latLong);
URL url = new URL("https://maps.googleapis.com/maps/api/staticmap?center="+latLong.x+","+latLong.y+"&zoom=20&size=640x640&maptype=satellite&format=png32&key=AIzaSyDYAQH5nMlF0vEfdIg0seTiGUIcRbLNeI4");
URLConnection connection = url.openConnection();
InputStream is = connection.getInputStream();
//
BufferedImage image = ImageIO.read(is);// new BufferedImage( 640,640, BufferedImage.TYPE_3BYTE_BGR );
// BufferedImage image = new BufferedImage( 640,640, BufferedImage.TYPE_3BYTE_BGR );
Graphics2D g2 = (Graphics2D) image.getGraphics();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
double scale = 11f;
int imageCenX = image.getWidth () >> 1,
imageCenY = image.getHeight() >> 1;
g2.setColor(Color.red);
g2.setStroke(new BasicStroke(2f));
for (Pair<Coordinate, Coordinate> pair : new ConsecutivePairs<Coordinate>( Arrays.asList( arg0.getCoordinates() ), true)) {
double
x1 = pair.first().x - cen.getCoordinate().x,
y1 = pair.first().y - cen.getCoordinate().y,
x2 = pair.second().x - cen.getCoordinate().x,
y2 = pair.second().y - cen.getCoordinate().y;
x1 *= scale; x2 *= scale; y1 *= scale; y2 *= scale;
g2.draw( new Line2D.Double(x1 + imageCenX, - y1 + imageCenY, x2 + imageCenX, - y2 +imageCenY) );
}
g2.drawString( HeightsToRedis.getHeight(featureName) +"m below roof", 5, 15 );
g2.drawString( HeightsToRedis.getRoof(featureName) +"m including roof", 5, 30 );
g2.drawString( latLong.x + ", " + latLong.y + " location ", 5, 45 );
g2.dispose();
ImageIO.write(image, "png", new FileOutputStream ( String.format( "/home/twak/data/footprints/center%04d.png", count )) );
is.close();
if (count > 1000)
System.exit(0);
} catch (Throwable e) {
e.printStackTrace();
System.exit(0);
}
}
count++;
featureName = "?";
}
}
public static void test() throws IOException, SAXException, ParserConfigurationException {
//create the parser with the gml 2.0 configuration
org.geotools.xml.Configuration configuration = new org.geotools.gml2.GMLConfiguration();
org.geotools.xml.Parser parser = new org.geotools.xml.Parser( configuration );
InputStream xml = new FileInputStream("/home/twak/data/around_ucl_buildings.gml");
//parse
FeatureCollection fc = (FeatureCollection) parser.parse( xml );
fc.accepts( new AbstractFeatureVisitor(){
public void visit( Feature feature ) {
System.out.println(feature);
// SimpleFeature f = (Feature) i.next();
//
// Point point = (Point) f.getDefaultGeometry();
// String name = (String) f.getAttribute( "name" );
}
}, new NullProgressListener() );
}
}
|
AlhonGelios/AO | org/apache/xmlbeans/impl/jam/JSourcePosition.java | package org.apache.xmlbeans.impl.jam;
import java.net.URI;
public interface JSourcePosition {
int getColumn();
int getLine();
URI getSourceURI();
}
|
direkshan-digital/ofbiz-framework | framework/minilang/src/main/java/org/apache/ofbiz/minilang/method/entityops/EntityData.java | <filename>framework/minilang/src/main/java/org/apache/ofbiz/minilang/method/entityops/EntityData.java
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.minilang.method.entityops;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import org.apache.ofbiz.base.location.FlexibleLocation;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.collections.FlexibleMapAccessor;
import org.apache.ofbiz.base.util.string.FlexibleStringExpander;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.util.EntityDataAssert;
import org.apache.ofbiz.entity.util.EntitySaxReader;
import org.apache.ofbiz.minilang.MiniLangException;
import org.apache.ofbiz.minilang.MiniLangValidate;
import org.apache.ofbiz.minilang.SimpleMethod;
import org.apache.ofbiz.minilang.method.MethodContext;
import org.w3c.dom.Element;
/**
* Implements the <entity-data> element.
*
* @see <a href="https://cwiki.apache.org/confluence/display/OFBIZ/Mini+Language+-+minilang+-+simple-method+-+Reference">Mini-language Reference</a>
*/
public final class EntityData extends EntityOperation {
private static final String MODULE = EntityData.class.getName();
private final FlexibleMapAccessor<List<Object>> errorListFma;
private final FlexibleStringExpander locationFse;
private final String mode;
private final int timeout;
public EntityData(Element element, SimpleMethod simpleMethod) throws MiniLangException {
super(element, simpleMethod);
if (MiniLangValidate.validationOn()) {
MiniLangValidate.attributeNames(simpleMethod, element, "location", "timeout", "delegator-name", "error-list-name", "mode");
MiniLangValidate.requiredAttributes(simpleMethod, element, "location");
MiniLangValidate.expressionAttributes(simpleMethod, element, "delegator-name");
MiniLangValidate.constantAttributes(simpleMethod, element, "timeout", "mode");
MiniLangValidate.noChildElements(simpleMethod, element);
}
locationFse = FlexibleStringExpander.getInstance(element.getAttribute("location"));
mode = MiniLangValidate.checkAttribute(element.getAttribute("mode"), "load");
String timeoutAttribute = element.getAttribute("timeout");
if (!"load".equals(mode) && !timeoutAttribute.isEmpty()) {
MiniLangValidate.handleError("timeout attribute is valid only when mode=\"load\".", simpleMethod, element);
}
int timeout = -1;
if (!timeoutAttribute.isEmpty()) {
try {
timeout = Integer.parseInt(timeoutAttribute);
} catch (NumberFormatException e) {
MiniLangValidate.handleError("Exception thrown while parsing timeout attribute: " + e.getMessage(), simpleMethod, element);
}
}
this.timeout = timeout;
errorListFma = FlexibleMapAccessor.getInstance(MiniLangValidate.checkAttribute(element.getAttribute("error-list-name"), "error_list"));
}
@Override
public boolean exec(MethodContext methodContext) throws MiniLangException {
List<Object> messages = errorListFma.get(methodContext.getEnvMap());
if (messages == null) {
messages = new LinkedList<>();
errorListFma.put(methodContext.getEnvMap(), messages);
}
String location = this.locationFse.expandString(methodContext.getEnvMap());
Delegator delegator = getDelegator(methodContext);
URL dataUrl = null;
try {
dataUrl = FlexibleLocation.resolveLocation(location, methodContext.getLoader());
} catch (MalformedURLException e) {
messages.add("Could not find Entity Data document in resource: " + location + "; error was: " + e.toString());
}
if (dataUrl == null) {
messages.add("Could not find Entity Data document in resource: " + location);
}
if ("assert".equals(mode)) {
try {
EntityDataAssert.assertData(dataUrl, delegator, messages);
} catch (Exception e) {
String xmlError = "Error checking/asserting XML Resource \"" + dataUrl.toExternalForm() + "\"; Error was: " + e.getMessage();
messages.add(xmlError);
Debug.logWarning(e, xmlError, MODULE);
}
} else {
try {
EntitySaxReader reader = null;
if (timeout > 0) {
reader = new EntitySaxReader(delegator, timeout);
} else {
reader = new EntitySaxReader(delegator);
}
reader.parse(dataUrl);
} catch (Exception e) {
String xmlError = "Error loading XML Resource \"" + dataUrl.toExternalForm() + "\"; Error was: " + e.getMessage();
messages.add(xmlError);
Debug.logWarning(e, xmlError, MODULE);
}
}
return true;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("<entity-data ");
sb.append("location=\"").append(this.locationFse).append("\" ");
sb.append("mode=\"").append(this.mode).append("\" ");
sb.append("timeout=\"").append(this.timeout).append("\" ");
sb.append("error-list-name=\"").append(this.errorListFma).append("\" ");
sb.append("/>");
return sb.toString();
}
/**
* A factory for the <entity-data> element.
*/
public static final class EntityDataFactory implements Factory<EntityData> {
@Override
public EntityData createMethodOperation(Element element, SimpleMethod simpleMethod) throws MiniLangException {
return new EntityData(element, simpleMethod);
}
@Override
public String getName() {
return "entity-data";
}
}
}
|
weex/federation | federation/hostmeta/generators.py | import json
import os
import warnings
from base64 import b64encode
from string import Template
from typing import Dict
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from xrd import XRD, Link, Element
def generate_host_meta(template=None, *args, **kwargs):
"""Generate a host-meta XRD document.
Template specific key-value pairs need to be passed as ``kwargs``, see classes.
:arg template: Ready template to fill with args, for example "diaspora" (optional)
:returns: Rendered XRD document (str)
"""
if template == "diaspora":
hostmeta = DiasporaHostMeta(*args, **kwargs)
else:
hostmeta = BaseHostMeta(*args, **kwargs)
return hostmeta.render()
def generate_legacy_webfinger(template=None, *args, **kwargs):
"""Generate a legacy webfinger XRD document.
Template specific key-value pairs need to be passed as ``kwargs``, see classes.
:arg template: Ready template to fill with args, for example "diaspora" (optional)
:returns: Rendered XRD document (str)
"""
if template == "diaspora":
webfinger = DiasporaWebFinger(*args, **kwargs)
else:
webfinger = BaseLegacyWebFinger(*args, **kwargs)
return webfinger.render()
def generate_nodeinfo2_document(**kwargs):
"""
Generate a NodeInfo2 document.
Pass in a dictionary as per NodeInfo2 1.0 schema:
https://github.com/jaywink/nodeinfo2/blob/master/schemas/1.0/schema.json
Minimum required schema:
{server:
baseUrl
name
software
version
}
openRegistrations
Protocols default will match what this library supports, ie "diaspora" currently.
:return: dict
:raises: KeyError on missing required items
"""
return {
"version": "1.0",
"server": {
"baseUrl": kwargs['server']['baseUrl'],
"name": kwargs['server']['name'],
"software": kwargs['server']['software'],
"version": kwargs['server']['version'],
},
"organization": {
"name": kwargs.get('organization', {}).get('name', None),
"contact": kwargs.get('organization', {}).get('contact', None),
"account": kwargs.get('organization', {}).get('account', None),
},
"protocols": kwargs.get('protocols', ["diaspora"]),
"relay": kwargs.get('relay', ''),
"services": {
"inbound": kwargs.get('service', {}).get('inbound', []),
"outbound": kwargs.get('service', {}).get('outbound', []),
},
"openRegistrations": kwargs['openRegistrations'],
"usage": {
"users": {
"total": kwargs.get('usage', {}).get('users', {}).get('total'),
"activeHalfyear": kwargs.get('usage', {}).get('users', {}).get('activeHalfyear'),
"activeMonth": kwargs.get('usage', {}).get('users', {}).get('activeMonth'),
"activeWeek": kwargs.get('usage', {}).get('users', {}).get('activeWeek'),
},
"localPosts": kwargs.get('usage', {}).get('localPosts'),
"localComments": kwargs.get('usage', {}).get('localComments'),
}
}
def generate_hcard(template=None, **kwargs):
"""Generate a hCard document.
Template specific key-value pairs need to be passed as ``kwargs``, see classes.
:arg template: Ready template to fill with args, for example "diaspora" (optional)
:returns: HTML document (str)
"""
if template == "diaspora":
hcard = DiasporaHCard(**kwargs)
else:
raise NotImplementedError()
return hcard.render()
class BaseHostMeta:
def __init__(self, *args, **kwargs):
self.xrd = XRD()
def render(self):
return self.xrd.to_xml().toprettyxml(indent=" ", encoding="UTF-8")
class DiasporaHostMeta(BaseHostMeta):
"""Diaspora host-meta.
Required keyword args:
* webfinger_host (str)
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
link = Link(
rel='lrdd',
type_='application/xrd+xml',
template='%s/webfinger?q={uri}' % kwargs["webfinger_host"]
)
self.xrd.links.append(link)
class BaseLegacyWebFinger(BaseHostMeta):
"""Legacy XRD WebFinger.
See: https://code.google.com/p/webfinger/wiki/WebFingerProtocol
"""
def __init__(self, address, *args, **kwargs):
super().__init__(*args, **kwargs)
subject = Element("Subject", "acct:%s" % address)
self.xrd.elements.append(subject)
class DiasporaWebFinger(BaseLegacyWebFinger):
"""Diaspora version of legacy WebFinger.
Required keyword args:
* handle (str) - eg user@domain.tld
* host (str) - eg https://domain.tld
* guid (str) - guid of user
* public_key (str) - public key
"""
def __init__(self, handle, host, guid, public_key, *args, **kwargs):
super().__init__(handle, *args, **kwargs)
self.xrd.elements.append(Element("Alias", "%s/people/%s" % (
host, guid
)))
username = handle.split("@")[0]
self.xrd.links.append(Link(
rel="http://microformats.org/profile/hcard",
type_="text/html",
href="%s/hcard/users/%s" %(
host, guid
)
))
self.xrd.links.append(Link(
rel="http://joindiaspora.com/seed_location",
type_="text/html",
href=host
))
self.xrd.links.append(Link(
rel="http://joindiaspora.com/guid",
type_="text/html",
href=guid
))
self.xrd.links.append(Link(
rel="http://webfinger.net/rel/profile-page",
type_="text/html",
href="%s/u/%s" % (
host, username
)
))
self.xrd.links.append(Link(
rel="http://schemas.google.com/g/2010#updates-from",
type_="application/atom+xml",
href="%s/public/%s.atom" % (
host, username
)
))
# Base64 the key
# See https://wiki.diasporafoundation.org/Federation_Protocol_Overview#Diaspora_Public_Key
try:
base64_key = b64encode(bytes(public_key, encoding="UTF-8")).decode("ascii")
except TypeError:
# Python 2
base64_key = b64encode(public_key).decode("ascii")
self.xrd.links.append(Link(
rel="diaspora-public-key",
type_="RSA",
href=base64_key
))
class DiasporaHCard:
"""Diaspora hCard document.
Must receive the `required` attributes as keyword arguments to init.
"""
required = [
"hostname", "fullname", "firstname", "lastname", "photo300", "photo100", "photo50", "searchable", "guid", "public_key", "username",
]
def __init__(self, **kwargs):
self.kwargs = kwargs
template_path = os.path.join(os.path.dirname(__file__), "templates", "hcard_diaspora.html")
with open(template_path) as f:
self.template = Template(f.read())
def render(self):
required = self.required[:]
for key, value in self.kwargs.items():
required.remove(key)
assert value is not None
assert isinstance(value, str)
assert len(required) == 0
return self.template.substitute(self.kwargs)
class SocialRelayWellKnown:
"""A `.well-known/social-relay` document in JSON.
For apps wanting to announce their preferences towards relay applications.
See WIP spec: https://wiki.diasporafoundation.org/Relay_servers_for_public_posts
Schema see `schemas/social-relay-well-known.json`
:arg subscribe: bool
:arg tags: tuple, optional
:arg scope: Should be either "all" or "tags", default is "all" if not given
"""
def __init__(self, subscribe, tags=(), scope="all", *args, **kwargs):
self.doc = {
"subscribe": subscribe,
"scope": scope,
"tags": list(tags),
}
def render(self):
self.validate_doc()
return json.dumps(self.doc)
def validate_doc(self):
schema_path = os.path.join(os.path.dirname(__file__), "schemas", "social-relay-well-known.json")
with open(schema_path) as f:
schema = json.load(f)
validate(self.doc, schema)
class NodeInfo:
"""Generate a NodeInfo document.
See spec: http://nodeinfo.diaspora.software
NodeInfo is unnecessarely restrictive in field values. We wont be supporting such strictness, though
we will raise a warning unless validation is skipped with `skip_validate=True`.
For strictness, `raise_on_validate=True` will cause a `ValidationError` to be raised.
See schema document `federation/hostmeta/schemas/nodeinfo-1.0.json` for how to instantiate this class.
"""
def __init__(self, software, protocols, services, open_registrations, usage, metadata, skip_validate=False,
raise_on_validate=False):
self.doc = {
"version": "1.0",
"software": software,
"protocols": protocols,
"services": services,
"openRegistrations": open_registrations,
"usage": usage,
"metadata": metadata,
}
self.skip_validate = skip_validate
self.raise_on_validate = raise_on_validate
def render(self):
if not self.skip_validate:
self.validate_doc()
return json.dumps(self.doc)
def validate_doc(self):
schema_path = os.path.join(os.path.dirname(__file__), "schemas", "nodeinfo-1.0.json")
with open(schema_path) as f:
schema = json.load(f)
try:
validate(self.doc, schema)
except ValidationError:
if self.raise_on_validate:
raise
warnings.warn("NodeInfo document generated does not validate against NodeInfo 1.0 specification.")
# The default NodeInfo document path
NODEINFO_DOCUMENT_PATH = "/nodeinfo/1.0"
def get_nodeinfo_well_known_document(url, document_path=None):
"""Generate a NodeInfo .well-known document.
See spec: http://nodeinfo.diaspora.software
:arg url: The full base url with protocol, ie https://example.com
:arg document_path: Custom NodeInfo document path if supplied (optional)
:returns: dict
"""
return {
"links": [
{
"rel": "http://nodeinfo.diaspora.software/ns/schema/1.0",
"href": "{url}{path}".format(
url=url, path=document_path or NODEINFO_DOCUMENT_PATH
)
}
]
}
class MatrixClientWellKnown:
"""
Matrix Client well-known as per https://matrix.org/docs/spec/client_server/r0.6.1#server-discovery
"""
def __init__(self, homeserver_base_url: str, identity_server_base_url: str = None, other_keys: Dict = None):
self.homeserver_base_url = homeserver_base_url
self.identity_server_base_url = identity_server_base_url
self.other_keys = other_keys
def render(self):
doc = {
"m.homeserver": {
"base_url": self.homeserver_base_url,
}
}
if self.identity_server_base_url:
doc["m.identity_server"] = {
"base_url": self.identity_server_base_url,
}
if self.other_keys:
doc.update(self.other_keys)
return doc
class MatrixServerWellKnown:
"""
Matrix Server well-known as per https://matrix.org/docs/spec/server_server/r0.1.4#server-discovery
"""
def __init__(self, homeserver_domain_with_port: str):
self.homeserver_domain_with_port = homeserver_domain_with_port
def render(self):
return {
"m.server": self.homeserver_domain_with_port,
}
class RFC7033Webfinger:
"""
RFC 7033 webfinger - see https://tools.ietf.org/html/rfc7033
A Django view is also available, see the child ``django`` module for view and url configuration.
:param id: Profile ActivityPub ID in URL format
:param handle: Profile Diaspora handle
:param guid: Profile Diaspora guid
:param base_url: The base URL of the server (protocol://domain.tld)
:param profile_path: Profile path for the user (for example `/profile/johndoe/`)
:param hcard_path: (Optional) hCard path, defaults to ``/hcard/users/``.
:param atom_path: (Optional) atom feed path
:returns: dict
"""
def __init__(
self, id: str, handle: str, guid: str, base_url: str, profile_path: str, hcard_path: str="/hcard/users/",
atom_path: str=None, search_path: str=None,
):
self.id = id
self.handle = handle
self.guid = guid
self.base_url = base_url
self.hcard_path = hcard_path
self.profile_path = profile_path
self.atom_path = atom_path
self.search_path = search_path
def render(self):
webfinger = {
"subject": "acct:%s" % self.handle,
"aliases": [
f"{self.base_url}{self.profile_path}",
self.id,
],
"links": [
{
"rel": "http://microformats.org/profile/hcard",
"type": "text/html",
"href": "%s%s%s" % (self.base_url, self.hcard_path, self.guid),
},
{
"rel": "http://joindiaspora.com/seed_location",
"type": "text/html",
"href": self.base_url,
},
{
"rel": "http://webfinger.net/rel/profile-page",
"type": "text/html",
"href": "%s%s" % (self.base_url, self.profile_path),
},
{
"rel": "salmon",
"href": "%s/receive/users/%s" % (self.base_url, self.guid),
},
],
}
webfinger["links"].append({
"rel": "self",
"href": self.id,
"type": "application/activity+json",
})
if self.atom_path:
webfinger['links'].append(
{
"rel": "http://schemas.google.com/g/2010#updates-from",
"type": "application/atom+xml",
"href": "%s%s" % (self.base_url, self.atom_path),
}
)
if self.search_path:
webfinger['links'].append(
{
"rel": "http://ostatus.org/schema/1.0/subscribe",
"template": "%s%s{uri}" % (self.base_url, self.search_path),
},
)
return webfinger
|
phalodi/spark-semantic | src/main/java/com.ibm.research.quetzal.core/src/com/ibm/research/rdf/store/sparql11/planner/GraphRestrictionPattern.java | <gh_stars>0
/******************************************************************************
* Copyright (c) 2015 IBM Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*****************************************************************************/
package com.ibm.research.rdf.store.sparql11.planner;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import com.ibm.research.rdf.store.sparql11.model.BinaryUnion;
import com.ibm.research.rdf.store.sparql11.model.BlankNodeVariable;
import com.ibm.research.rdf.store.sparql11.model.Expression;
import com.ibm.research.rdf.store.sparql11.model.IRI;
import com.ibm.research.rdf.store.sparql11.model.Pattern;
import com.ibm.research.rdf.store.sparql11.model.Variable;
import com.ibm.research.rdf.store.sparql11.planner.Planner.Key;
import com.ibm.wala.util.collections.HashSetFactory;
public class GraphRestrictionPattern extends Pattern implements Key {
//private BinaryUnion<Variable, IRI> graphRestriction;
public GraphRestrictionPattern(BinaryUnion<Variable, IRI> graphRestriction) {
super( EPatternSetType.GRAPH);
this.graphRestriction = graphRestriction;
if (graphRestriction == null) {
throw new NullPointerException();
}
}
@Override
public Set<Variable> gatherVariables() {
if (graphRestriction.isFirstType()) {
return Collections.singleton(graphRestriction.getFirst());
} else {
return Collections.emptySet();
}
}
@Override
public boolean isMandatory() {
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("GraphRestrictionPatterny [graphRestriction=");
builder.append(graphRestriction);
builder.append("]");
return builder.toString();
}
@Override
public void setGraphRestriction(BinaryUnion<Variable, IRI> graphRestriction) {
if (!this.graphRestriction.equals(graphRestriction)) {
throw new RuntimeException("Graph restriction cannot be modified!");
}
}
@Override
public void addFilter(Expression e) {
throw new UnsupportedOperationException();
}
@Override
public void addOptional(Pattern optional) {
throw new UnsupportedOperationException();
}
@Override
public Collection<? extends Variable> getVariables() {
return gatherVariables();
}
@Override
public void reverse() {
if (graphRestriction.isSecondType()) {
graphRestriction.getSecond().reverse();
}
}
@Override
public Set<BlankNodeVariable> gatherBlankNodes() {
return Collections.emptySet();
}
@Override
public Set<Variable> gatherOptionalVariablesWithMultipleBindings() {
return Collections.emptySet();
}
@Override
public Set<Variable> gatherVariablesWithOptional() {
return gatherVariables();
}
@Override
public Set<Variable> gatherIRIBoundVariables() {
// no filter allowed
return Collections.emptySet();
}
@Override
public Set<Variable> gatherVariablesInTransitiveClosure() {
// no filter allowed
return Collections.emptySet();
}
@Override
public void replaceFilterBindings() {
// no filter allowed
}
@Override
public int getNumberTriples() {
int nrTriples=0;
return nrTriples;
}
@Override
public Set<Pattern> gatherSubPatterns(boolean includeOptionals) {
Set<Pattern> ret = HashSetFactory.make();
ret.add(this);
return ret;
}
@Override
public Set<Pattern> gatherSubPatternsExcluding(Pattern except,
boolean includeOptionals) {
if (! except.equals(this)) {
Set<Pattern> ret = HashSetFactory.make();
ret.add(this);
return ret;
} else {
return Collections.emptySet();
}
}
@Override
public Set<Pattern> getSubPatterns(boolean includeOptionals) {
return HashSetFactory.make();
}
@Override
public boolean isEmpty() {
return true;
}
//Pattern methods
}
|
masud-technope/ACER-Replication-Package-ASE2017 | corpus/class/eclipse.jdt.core/5402.java | package PackageReference;
public class F {
p3.p2.p.X foo() {
return null;
}
}
|
devefx/validator-web | validator-web/src/main/java/org/devefx/validator/script/handler/JavaScriptHandler.java | /*
* Copyright 2016-2017, <NAME> (<EMAIL>).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.devefx.validator.script.handler;
import java.io.IOException;
import java.util.Locale;
import org.devefx.validator.beans.factory.annotation.Inject;
import org.devefx.validator.beans.factory.annotation.Value;
import org.devefx.validator.script.Compressor;
public abstract class JavaScriptHandler extends CachingHandler {
protected Compressor compressor;
protected boolean debug;
protected String suffix = "";
public JavaScriptHandler() {
setContentType("application/javascript;charset=UTF-8");
}
@Inject(required=false)
public void setCompressor(Compressor compressor) {
this.compressor = compressor;
}
@Value("${debug}")
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Set the suffix that gets appended to view names when building a URL.
*/
public void setSuffix(String suffix) {
this.suffix = (suffix != null ? suffix : "");
}
/**
* Return the suffix that gets appended to view names when building a URL.
*/
protected String getSuffix() {
return this.suffix;
}
@Override
public String generateCachableContent(String contextPath,
String servletPath, String pathInfo, Locale locale) throws IOException {
String javascript = generateJavaScript(contextPath, servletPath, pathInfo, locale);
if (debug || compressor == null || javascript == null) {
return javascript;
}
try {
return compressor.compressJavaScript(javascript);
} catch (Exception ex) {
if (log.isWarnEnabled()) {
log.warn("Compression system (" + compressor.getClass().getSimpleName() +") failed to compress script", ex);
}
return javascript;
}
}
protected abstract String generateJavaScript(String contextPath, String servletPath, String pathInfo, Locale locale) throws IOException;
}
|
aroundble/onap | catalog-be/src/main/java/org/openecomp/sdc/be/components/impl/GroupTypeBusinessLogic.java | <reponame>aroundble/onap
package org.openecomp.sdc.be.components.impl;
import org.openecomp.sdc.be.components.impl.exceptions.ComponentException;
import org.openecomp.sdc.be.components.validation.UserValidations;
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
import org.openecomp.sdc.be.model.GroupTypeDefinition;
import org.openecomp.sdc.be.model.User;
import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
import org.openecomp.sdc.exception.ResponseFormat;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Set;
@Component
public class GroupTypeBusinessLogic {
private final GroupTypeOperation groupTypeOperation;
private final TitanDao titanDao;
private final UserValidations userValidations;
public GroupTypeBusinessLogic(GroupTypeOperation groupTypeOperation, TitanDao titanDao, UserValidations userValidations) {
this.groupTypeOperation = groupTypeOperation;
this.titanDao = titanDao;
this.userValidations = userValidations;
}
public List<GroupTypeDefinition> getAllGroupTypes(String userId, String internalComponentType) {
try {
userValidations.validateUserExists(userId, "get group types", true)
.left()
.on(this::onUserError);
Set<String> excludeGroupTypes = ConfigurationManager.getConfigurationManager().getConfiguration().getExcludedGroupTypesMapping().get(internalComponentType);
return groupTypeOperation.getAllGroupTypes(excludeGroupTypes);
} finally {
titanDao.commit();
}
}
private User onUserError(ResponseFormat responseFormat) {
throw new ComponentException(responseFormat);
}
}
|
sguazt/dcsxx-commons | inc/dcs/meta/bool.hpp | /**
* \file dcs/meta/bool.hpp
*
* \brief Boolean integral constant wrapper.
*
* \author <NAME> (<EMAIL>)
*
* <hr/>
*
* Copyright 2009 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef DCS_META_BOOL_HPP
#define DCS_META_BOOL_HPP
#include <dcs/detail/config/boost.hpp>
#if DCS_DETAIL_CONFIG_BOOST_CHECK_VERSION(103000) // 1.30
#include <boost/mpl/bool.hpp>
namespace dcs { namespace meta {
using ::boost::mpl::bool_;
using ::boost::mpl::true_;
using ::boost::mpl::false_;
}} // Namespace dcs::meta
#else // DCS_DETAIL_CONFIG_BOOST_CHECK_VERSION
#include <dcs/meta/integral_c_tag.hpp>
namespace dcs { namespace meta {
template <bool Cond>
struct bool_
{
static const bool value = Cond;
//enum { value = Cond }; // More portable version
typedef integral_c_tag tag;
typedef bool_ type;
typedef bool value_type;
operator bool() const { return value; }
};
typedef bool_<true> true_;
typedef bool_<false> false_;
}} // Namespace dcs::meta
#endif // DCS_DETAIL_CONFIG_BOOST_CHECK_VERSION
#endif // DCS_META_BOOL_HPP
|
java-catering/Back-End | web/src/main/java/app/config/Bootstrap.java | <filename>web/src/main/java/app/config/Bootstrap.java
package app.config;
import app.models.*;
import app.services.ProductService;
import app.services.PurchaseService;
import app.services.Purchase_ProductService;
import app.services.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Configuration;
@Configuration
public class Bootstrap implements CommandLineRunner
{
@Autowired
private UserService userService;
@Autowired
private ProductService productService;
@Autowired
PurchaseService purchaseService;
@Autowired
Purchase_ProductService purchase_productService;
// we will add all our mock users and purchases here.
@Override
public void run(String... args) throws Exception
{
Product cheeseburger = new Product();
cheeseburger.setDescription("A Cheese burger");
cheeseburger.setIs_available(true);
cheeseburger.setTitle("Cheese burger");
cheeseburger.setUnit_price(119.99D);
productService.save(cheeseburger);
Product pizza = new Product();
pizza.setUnit_price(19.99D);
pizza.setTitle("Pizza");
pizza.setDescription("A pizza");
pizza.setIs_available(true);
productService.save(pizza);
User eric = new User().builder()
.first_name("Eric")
.last_name("Gomez")
.email("<EMAIL>")
.password("<PASSWORD>")
.build();
userService.saveAdmin(eric);
User stan = new User().builder()
.first_name("Stan")
.last_name("Smith")
.email("<EMAIL>")
.password("<PASSWORD>")
.build();
userService.save(stan);
Purchase stans_purchase = new Purchase();
stans_purchase.setUser(stan);
Purchase_Product productOne = new Purchase_Product();
productOne.setProduct(cheeseburger);
productOne.setQuantity(1);
productOne.setPurchase(stans_purchase);
purchaseService.save(stans_purchase);
purchase_productService.save(productOne);
Purchase stans_second_purchase = new Purchase();
stans_second_purchase.setUser(stan);
Purchase_Product productTwo = new Purchase_Product();
productTwo.setPurchase(stans_second_purchase);
productTwo.setProduct(pizza);
productTwo.setQuantity(2);
purchaseService.save(stans_second_purchase);
purchase_productService.save(productTwo);
}
}
|
chlorm-forks/gyp | test/relative/foo/b/b.gyp | {
'targets': [
{
'target_name': 'b',
'type': 'static_library',
'sources': ['b.cc'],
},
],
}
|
Tarnak-public/BoofAndroidDemo | app/src/main/java/org/boofcv/android/recognition/FiducialCalibrationActivity.java | <filename>app/src/main/java/org/boofcv/android/recognition/FiducialCalibrationActivity.java
package org.boofcv.android.recognition;
import android.os.Bundle;
import android.widget.SeekBar;
import android.widget.ToggleButton;
import boofcv.abst.fiducial.FiducialDetector;
import boofcv.abst.fiducial.calib.CalibrationPatterns;
import boofcv.factory.fiducial.FactoryFiducial;
import boofcv.struct.image.GrayU8;
/**
* Detects calibration target fiducials
*/
public class FiducialCalibrationActivity extends FiducialSquareActivity {
public static ConfigAllCalibration cc = new ConfigAllCalibration();
ToggleButton toggle;
public FiducialCalibrationActivity() {
super(FiducialCalibrationHelpActivity.class);
}
@Override
public void onCreate(Bundle savedInstanceState) {
// don't start processing fiducials until the user has selected the specifics
detectFiducial = false;
super.onCreate(savedInstanceState);
SelectCalibrationFiducial dialog = new SelectCalibrationFiducial(cc);
dialog.show(this, ()->{
detectFiducial=true;//needs to be before createNewProcessor
// only enable if the user selected a chessboard
toggle.setEnabled(cc.targetType==CalibrationPatterns.CHESSBOARD);
// Create the detector and start processing images!
createNewProcessor();
});
}
@Override
protected void configureControls(ToggleButton toggle, SeekBar seek) {
this.toggle = toggle;
// disable seek bar some nothing uses it
seek.setEnabled(false);
// We want robust to be configurable for chessboard
toggle.setChecked(false); // default to the fast option for slower devices
robust = toggle.isChecked();
toggle.setOnCheckedChangeListener((buttonView, isChecked) -> {
synchronized (lock) {
robust = isChecked;
if (robust) {
seek.setEnabled(false);
} else {
seek.setEnabled(true);
}
createNewProcessor();
}
});
}
@Override
protected FiducialDetector<GrayU8> createDetector() {
if( cc.targetType == CalibrationPatterns.CHESSBOARD ) {
if( robust ) {
return FactoryFiducial.calibChessboardX(null, cc.chessboard, GrayU8.class);
} else {
return FactoryFiducial.calibChessboardB(null, cc.chessboard, GrayU8.class);
}
} else if( cc.targetType == CalibrationPatterns.SQUARE_GRID ) {
return FactoryFiducial.calibSquareGrid(null,cc.squareGrid, GrayU8.class);
} else if( cc.targetType == CalibrationPatterns.CIRCLE_HEXAGONAL ) {
return FactoryFiducial.calibCircleHexagonalGrid(null,cc.hexagonal, GrayU8.class);
} else if( cc.targetType == CalibrationPatterns.CIRCLE_GRID ) {
return FactoryFiducial.calibCircleRegularGrid(null,cc.circleGrid, GrayU8.class);
} else {
throw new RuntimeException("Unknown");
}
}
}
|
parkjh80/studio | function/python/brightics/function/recommendation/test/als_test.py | <reponame>parkjh80/studio<filename>function/python/brightics/function/recommendation/test/als_test.py
"""
Copyright 2019 Samsung SDS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from brightics.function.recommendation.als import als_train
from brightics.function.recommendation.als import als_predict
from brightics.function.recommendation.als import als_recommend
from brightics.common.datasets import load_iris
import unittest
import pandas as pd
import numpy as np
import HtmlTestRunner
import os
class ALS(unittest.TestCase):
def setUp(self):
print("*** ALS UnitTest Start ***")
data = dict()
np.random.seed(3) ; data['user'] = np.random.randint(10, size=100)
np.random.seed(10) ; data['item'] = np.random.randint(10, size=100)
np.random.seed(5) ; data['rating'] = np.random.randint(5, size=100)
self.testdata = data
def tearDown(self):
print("*** ALS UnitTest End ***")
def test(self):
result = als_train(self.testdata, user_col='user', item_col='item', rating_col='rating', implicit=True, seed=5)['model']
np.testing.assert_array_almost_equal(result['user_factors']['features'][0], [0.7827272522525689, 0.5719906568592842, 0.04662757615567294, 0.08435863658705169, 0.13961930483088836, 0.8158731311149139, -0.2989090206125877, 0.18833942895847422, 0.22372763072542956, 0.24804294365451002], 10)
np.testing.assert_array_almost_equal(result['user_factors']['features'][1], [0.6231809722140008, 0.18638666829515813, 0.6494260943804142, -0.27646543445251864, -0.3845309876582424, 0.6397336295652383, 0.01564816564638764, -0.03867166495432923, 0.7250761840795689, 0.10994264590483944], 10)
np.testing.assert_array_almost_equal(result['user_factors']['features'][2], [0.39027551931624577, -0.15305372642373116, 0.3697306467265087, -0.3061721146289602, 0.25116083826202235, -0.010580822026468721, 0.6106377866190532, 0.3929926281979517, 0.533076138698723, 0.2740249383131869], 10)
np.testing.assert_array_almost_equal(result['user_factors']['features'][3], [0.36880430655625285, 0.33040135139120513, 0.4784710806190769, 0.6539707617726397, -0.07757454178495538, 0.28011644723180507, -0.15975519195760648, 0.10971640101984394, -0.3098423506341671, -0.07909486951433393], 10)
np.testing.assert_array_almost_equal(result['user_factors']['features'][4], [-0.08200541559064711, 0.37848581949552046, -0.28233808896252915, 0.04577983873739822, 0.4285143599922477, 0.6179166850800334, 0.17008419995671525, 0.5559306112245265, 0.30794429302324133, -0.39045653631429367], 10)
predict = als_predict(table=pd.DataFrame(self.testdata), model=result)['out_table']
np.testing.assert_array_almost_equal(predict.prediction[:5], [0.9956982152505974, -0.045045469162887486, 0.9844770330982069, 0.9967581851809032, 0.9956982152505974], 4)
recommend_result = als_recommend(self.testdata, user_col='user', item_col='item', rating_col='rating', filter=False, seed=5)['out_table']
np.testing.assert_array_almost_equal(recommend_result['rating_top1'][:5], [16.976264923981944, 3.9679093140523096, 3.9873630479966105, 3.0033018849257873, 7.9666502915457755], 10)
if __name__ == '__main__':
filepath = os.path.dirname(os.path.abspath(__file__))
reportFoler = filepath + "/../../../../../../../reports"
unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(combine_reports=True, output=reportFoler))
|
dsw9742/angularjs-datalayer-ensighten-demo | src/main/java/com/douglaswhitehead/controller/data/OrderDataController.java | <reponame>dsw9742/angularjs-datalayer-ensighten-demo<gh_stars>0
package com.douglaswhitehead.controller.data;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.mobile.device.Device;
import com.douglaswhitehead.model.OrderForm;
public interface OrderDataController {
public Map<String, Object> checkout(OrderForm orderForm, HttpServletRequest request, Device device, HttpServletResponse response);
public Map<String, Object> complete(OrderForm orderForm, HttpServletRequest request, Device device, HttpServletResponse response);
} |
sim9108/SDKS | LibOpenAL/demo/lec1/lec1.cpp | // lec1.cpp : Defines the entry point for the console application.
//
#include "stdafx.h"
#include <cstdlib>
#include <cstdio>
#include <iostream>
#include <cmath>
#include <AL/al.h>
#include <AL/alc.h>
#include <Al/alext.h>
#include <AL/alut.h>
static void
playFile(const char *fileName){
ALenum error;
/* Create an AL buffer from the given sound file. */
ALuint buffer = alutCreateBufferFromFile(fileName);
error = alutGetError();
if (buffer == AL_NONE)
{
std::cerr<< "Error loading file:"<< alutGetErrorString(error)<<std::endl;
alutExit();
std::exit(EXIT_FAILURE);
}
/* Generate a single source, attach the buffer to it and start playing. */
ALuint source;
alGenSources(1, &source);
alSourcei(source, AL_BUFFER, buffer);
auto _pan = -10000.0f;
float pan = (float)_pan / 10000.0f;
float pan2 = (float)sqrt(1 - pan*pan);
float sourcePosAL[] = { pan,pan2, 0.0f };
alSourcefv(source, AL_POSITION, sourcePosAL);
alSourcePlay(source);
/* Normally nothing should go wrong above, but one never knows... */
error = alGetError();
if (error != ALUT_ERROR_NO_ERROR)
{
std::cerr<<alGetString(error)<<std::endl;
alutExit();
std::exit(EXIT_FAILURE);
}
/* Check every 0.1 seconds if the sound is still playing. */
ALint status;
do
{
alutSleep(0.1f);
alGetSourcei(source, AL_SOURCE_STATE, &status);
} while (status == AL_PLAYING);
}
int
main(int argc, char **argv)
{
/* Initialise ALUT and eat any ALUT-specific commandline flags. */
if (!alutInit(&argc, argv))
{
ALenum error = alutGetError();
std::cerr << alutGetErrorString(error) << std::endl;
std::exit(EXIT_FAILURE);
}
playFile(argv[1]);
if (!alutExit())
{
ALenum error = alutGetError();
std::cerr << alutGetErrorString(error) << std::endl;
std::exit(EXIT_FAILURE);
}
return EXIT_SUCCESS;
} |
mfwarren/FreeCoding | 2015/04/fc_2015_04_15.py | <reponame>mfwarren/FreeCoding<filename>2015/04/fc_2015_04_15.py<gh_stars>0
#!/usr/bin/env python3
# imports go here
import time
import sys
import random
#
# Free Coding session for 2015-04-15
# Written by <NAME>
#
MESSAGE = "too busy coding tonight to write much"
def type_message(message):
for c in message:
sys.stdout.write(c)
sys.stdout.flush()
time.sleep(0.05 + (random.random() / 10))
print("")
if __name__ == '__main__':
type_message(MESSAGE)
|
shenlanAZ/SOAImpl | src/main/java/com/darkblue/rpcimpl/protocol/ResultWrap.java | <gh_stars>1-10
package com.darkblue.rpcimpl.protocol;
import java.io.Serializable;
import java.util.Map;
/**
* 远程结果回传协议Wrap
*/
public class ResultWrap implements Serializable{
private Result result;
private Map<Object,Object> attchment;
public ResultWrap() {
}
public ResultWrap(Result result) {
this.result = result;
}
public Result getResult() {
return result;
}
public void setResult(Result result) {
this.result = result;
}
public Map<Object, Object> getAttchment() {
return attchment;
}
public void setAttchment(Map<Object, Object> attchment) {
this.attchment = attchment;
}
}
|
Rathe001/Rathe001.github.io-neurokore | src/core/ui/actions.js | <filename>src/core/ui/actions.js<gh_stars>1-10
const actions = {
SET_SCALE: 'UI_SET_SCALE',
setScale: (data) => ({
type: actions.SET_SCALE,
payload: data,
}),
};
export default actions;
|
kevinguo1989/dingo | dingo-exec/src/main/java/io/dingodb/exec/channel/SendEndpoint.java | /*
* Copyright 2021 DataCanvas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.dingodb.exec.channel;
import io.dingodb.exec.Services;
import io.dingodb.net.Channel;
import io.dingodb.net.Message;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class SendEndpoint {
private final String host;
private final int port;
@Getter
private final String tag;
private Channel channel;
public SendEndpoint(String host, int port, String tag) {
this.host = host;
this.port = port;
this.tag = tag;
}
public void init() {
EndpointManager.INSTANCE.registerSendEndpoint(this);
// This may block.
channel = Services.openNewChannel(host, port);
if (log.isInfoEnabled()) {
log.info("(tag = {}) Opened channel to {}:{}.", tag, host, port);
}
}
synchronized void wakeUp() {
notify();
}
synchronized void checkStatus() {
ControlStatus status = EndpointManager.INSTANCE.getStatus(tag);
if (status != ControlStatus.READY) {
while (true) {
try {
wait();
} catch (InterruptedException e) {
log.warn("Catch (tag = {}) Interrupted while waiting for channel to be ready.", tag);
continue;
}
status = EndpointManager.INSTANCE.getStatus(tag);
if (status == ControlStatus.READY) {
log.info("ReCheck Status of Instance:{}:{} (tag = {}) Status = {}.",
host, port, tag, status);
break;
}
}
}
}
public void send(byte[] content) {
checkStatus();
Message msg = Message.builder()
.tag(tag)
.content(content)
.build();
channel.send(msg);
}
public void close() throws Exception {
channel.close();
}
}
|
haikusw/jaqalpaq | jaqalpaq/parser/extract_let.py | # Copyright 2020 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains
# certain rights in this software.
"""Extract the constant mappings used by let statements."""
from .tree import TreeRewriteVisitor
from jaqalpaq import JaqalError
def extract_let(tree, use_float=False):
"""Return a dictionary mapping labels in let statements to parser tree fragments to be substituted.
use_float -- If set to True, the resulting dictionary will map Identifier's to float's.
Return a dictionary mapping Identifier's to parse tree fragments, unless use_float is True.
"""
visitor = ExtractLetVisitor(use_float)
visitor.visit(tree)
return visitor.let_mapping
class ExtractLetVisitor(TreeRewriteVisitor):
def __init__(self, use_float):
super().__init__()
self.use_float = bool(use_float)
self.let_mapping = {}
def visit_let_statement(self, identifier, number):
if self.use_float:
number = self.extract_signed_number(number)
ext_identifier = self.extract_identifier(identifier)
if ext_identifier in self.let_mapping:
raise JaqalError(f"Redefinition of let-constant {ext_identifier}")
self.let_mapping[ext_identifier] = number
|
Zitara/BRLCAD | src/proc-db/naca/splprocs.c | <reponame>Zitara/BRLCAD<filename>src/proc-db/naca/splprocs.c
/* S P L P R O C S . C
* BRL-CAD
*
* Copyright (c) 2014-2016 United States Government as represented by
* the U.S. Army Research Laboratory.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/** @file splprocs.c
*
* Brief description
*
*/
#include "common.h"
#include <stdlib.h>
#include "bu/defines.h"
#include "vmath.h"
#include "bn.h"
#include "naca.h"
static fastf_t Zeroin(fastf_t ax, fastf_t bx, fastf_t (*f)(fastf_t x), fastf_t tol);
/* Globals for EvaluateCubic (g prefix for global) */
fastf_t ga, gfa, gfpa;
fastf_t gb, gfb, gfpb;
/**
* Evaluate a cubic polynomial defined by the function and the first
* derivative at two points.
*
* @param[in] u The point to evaluate the function at.
*/
static fastf_t
EvaluateCubic(fastf_t u)
{
fastf_t d = (gfb - gfa) / (gb - ga);
fastf_t t = (u - ga) / (gb - ga);
fastf_t p = 1.0 - t;
return p * gfa + t * gfb - p * t * (gb - ga) * (p * (d - gfpa) - t * (d - gfpb));
}
void
EvaluateCubicAndDerivs(fastf_t a, fastf_t fa, fastf_t fpa,
fastf_t b, fastf_t fb, fastf_t fpb,
fastf_t u,
fastf_t *f, fastf_t *fp, fastf_t *fpp, fastf_t *fppp)
{
/* The "magic" matrix */
const mat_t magic = {2.0, -2.0, 1.0, 1.0,
-3.0, 3.0, -2.0, -1.0,
0.0, 0.0, 1.0, 0.0,
1.0, 0.0, 0.0, 0.0};
hvect_t coef, rhs;
fastf_t h, t;
HSET(rhs, fa, fb, fpa * (b - a), fpb * (b - a));
bn_matXvec(coef, magic, rhs);
/* CAUTION - these are not the coefficients of the cubic in the
* original coordinates. This is the cubic on [0,1] from the
* mapping t=(x-a)/(b-a). That is why the h terms appear in the
* derivatives
*/
h = 1.0/(b - a);
t = (u - a) * h;
if (f) { *f = coef[3] + t * (coef[2] + t * (coef[1] + t * coef[0])); }
if (fp) { *fp = h * (coef[2] + t * (2.0 * coef[1] + t * 3.0 * coef[0])); }
if (fpp) { *fpp = h * h * (2.0 * coef[1] + t * 6.0 * coef[0]); }
if (fppp) { *fppp = h * h * h * 6.0 * coef[0]; }
}
void
FMMspline(struct fortran_array *x, struct fortran_array *y, struct fortran_array *yp)
{
int i, n;
fastf_t deriv1, deriv2;
struct fortran_array *dx, *dy, *delta, *dd, *alpha, *beta, *sigma, *tmp;
/* y and yp must be at least this size */
n = SIZE(x);
if (n < 2) {
INDEX(yp, 1) = 0.0;
return;
}
ALLOCATE(dx, n - 1);
ALLOCATE(dy, n - 1);
ALLOCATE(delta, n - 1);
VSUB2N(F2C(dx), F2CI(dx, 2), F2C(x), n - 1);
VSUB2N(F2C(dy), F2CI(dy, 2), F2C(y), n - 1);
for (i = 1; i <= n; i++) {
INDEX(delta, i) = INDEX(dy, i) / INDEX(dx, i);
}
if (n == 2) {
INDEX(yp, 1) = INDEX(delta, 1);
INDEX(yp, 2) = INDEX(delta, 2);
DEALLOCATE(dx);
DEALLOCATE(dy);
DEALLOCATE(delta);
return;
}
ALLOCATE(dd, n - 2);
VSUB2N(F2C(dd), F2CI(delta, 2), F2C(delta), n - 2);
if (n == 3) {
deriv2 = INDEX(dd, 1) / (INDEX(x, 3) - INDEX(x, 1));
deriv1 = INDEX(delta, 1) - deriv2 * INDEX(dx, 1);
INDEX(yp, 1) = deriv1;
INDEX(yp, 2) = deriv1 + deriv2 * INDEX(dx, 1);
INDEX(yp, 3) = deriv1 + deriv2 * (INDEX(x, 3) - INDEX(x, 1));
DEALLOCATE(dx);
DEALLOCATE(dy);
DEALLOCATE(delta);
DEALLOCATE(dd);
}
/* This gets rid of the trivial cases n = 1, 2, 3. Assume from here on n > 3 */
ALLOCATE(alpha, n);
ALLOCATE(beta, n);
ALLOCATE(sigma, n);
ALLOCATE(tmp, n);
INDEX(alpha, 1) = -INDEX(dx, 1);
VADD2N(F2CI(alpha, 2), F2CI(dx, 1), F2CI(dx, 2), n - 2);
VSCALEN(F2CI(alpha, 2), F2CI(alpha, 2), 2.0, n - 2);
/* Serial loop, fwd elimination */
for (i = 2; i <= n - 1; i ++) {
INDEX(alpha, i) = INDEX(alpha, i) - INDEX(dx, i - 1) * INDEX(dx, i - 1) / INDEX(alpha, i - 1);
}
INDEX(alpha, n) = -INDEX(dx, n - 1) - INDEX(dx, n - 1) * INDEX(dx, n - 1) / INDEX(alpha, n - 1);
INDEX(beta, 1) = INDEX(dd, 2) / (INDEX(x, 4) - INDEX(x, 2)) - INDEX(dd, 1) / (INDEX(x, 3) - INDEX(x, 1));
INDEX(beta, 1) = INDEX(beta, 1) * INDEX(dx, 1) * INDEX(dx, 1) / (INDEX(x, 4) - INDEX(x, 1));
SLICE(tmp, beta, 2, n-1);
COPY(tmp, dd, 1, n - 2);
INDEX(beta, n) = INDEX(dd, n - 2) / (INDEX(x, n) - INDEX(x, n - 2)) - INDEX(dd, n - 3) / (INDEX(x, n - 1 ) - INDEX(x, n - 3));
INDEX(beta, n) = -INDEX(beta, n) * INDEX(dx, n - 1) * INDEX(dx, n - 1) / (INDEX(x, n) - INDEX(x, n - 3));
/* Serial loop, fwd elimination */
for (i = 2; i <= n; i++) {
INDEX(beta, i) = INDEX(beta, i) - INDEX(dx, i - 1) * INDEX(beta, i - 1) / INDEX(alpha, i - 1);
}
INDEX(sigma, n) = INDEX(beta, n) / INDEX(alpha, n);
/* Reverse order serial loop, back substitution */
for (i = n - 1; i <= 1; i--) {
INDEX(sigma, i) = (INDEX(beta, i) - INDEX(dx, i) * INDEX(sigma, i + 1)) / INDEX(alpha, i);
}
for (i = 1; i <= n - 1; i++) {
INDEX(yp, i) = INDEX(delta, i) - INDEX(dx, i) * (INDEX(sigma, i) + INDEX(sigma, i) + INDEX(sigma, i + 1));
}
INDEX(yp, n) = INDEX(yp, n - 1) + INDEX(dx, n - 1) * 3.0 * (INDEX(sigma, n) + INDEX(sigma, n - 1));
DEALLOCATE(dx);
DEALLOCATE(dy);
DEALLOCATE(delta);
DEALLOCATE(alpha);
DEALLOCATE(beta);
DEALLOCATE(sigma);
DEALLOCATE(dd);
return;
}
/**
* Compute the value of the interpolating polynomial thru x- and
* y-arrays at the x-value of u, using Lagrange's equation.
*
* @param[in] x Tables of coordinates
* @param[in] y Tables of coordinates
* @param[in] u value of x-coordinate for interpolation
*/
static double
InterpolatePolynomial(struct fortran_array *x, struct fortran_array *y, fastf_t u)
{
int n = SIZE(x);
struct fortran_array *du;
fastf_t sum = 0.0;
int i, j;
ALLOCATE(du, n);
VSETALLN(F2C(du), u, n);
VSUB2N(F2C(du), F2C(du), F2C(x), n);
for (j = 1; j <= n; j++) {
fastf_t fact = 1.0;
for (i = 1; i <= n; i++) {
if (i != j) {
fact = fact * INDEX(du, i) / (INDEX(x, j) - INDEX(x, i));
}
}
sum = sum + INDEX(y, j) * fact;
}
return sum;
}
/**
* Search a sorted (increasing) array to find the interval bounding a
* given number. If n is the size of the array a
* return 0 if number x is less than a(1)
* return n if x > a(n)
* return i if a(i) <= x < a(i+1)
* If x is exactly equal to a(n), return n
*
* @param[in] xtab Input array
* @param[in] x Input number
*/
static int
Lookup(struct fortran_array *xtab, fastf_t x)
{
int i;
int j, k, n;
n = SIZE(xtab);
if (n <= 0) {
return -1;
}
if (x < INDEX(xtab, 1)) {
return 0;
}
if (x > INDEX(xtab, n)) {
return n;
}
i = 1;
j = SIZE(xtab);
while (1) {
if (j <= i + 1) { break; }
k = (i + j)/2; /* Integer division */
if (x < INDEX(xtab, k)) {
j = k;
} else {
i = k;
}
}
return i;
}
void
PClookup(struct fortran_array *x, struct fortran_array *y, struct fortran_array *yp,
fastf_t u,
fastf_t *f, fastf_t *fp, fastf_t *fpp, fastf_t *fppp)
{
fastf_t ud, a, fa, fpa, b, fb, fpb;
int k;
k = Lookup(x, u);
V_MIN(k, SIZE(x)-1);
V_MAX(k, 1);
a = INDEX(x, k);
fa = INDEX(y, k);
fpa = INDEX(yp, k);
b = INDEX(x, k + 1);
fb = INDEX(y, k + 1);
fpb = INDEX(yp, k + 1);
ud = u;
EvaluateCubicAndDerivs(a, fa, fpa, b, fb, fpb, ud, f, fp, fpp, fppp);
}
void
SplineZero(struct fortran_array *x, struct fortran_array *f, struct fortran_array *fp,
fastf_t fbar, fastf_t tol,
fastf_t *xbar, int *errCode)
{
int k, n;
struct fortran_array *fLocal;
n = SIZE(x);
/* Look for an exact match. Could happen... */
for (k = 1; k <= n; k++) {
if (fabs(INDEX(f, k) - fbar) < tol) {
*xbar = INDEX(x, k);
*errCode = 0;
return;
}
}
ALLOCATE(fLocal, n);
VSETALLN(F2C(fLocal), fbar, n);
VSUB2N(F2C(fLocal), F2C(f), F2C(fLocal), n);
/* Look for a zero of fLocal */
for (k = 2; k <= n; k++) {
if (INDEX(fLocal, k - 1) * INDEX(fLocal, k) < 0.0) break;
}
if (k == n + 1) { /* No crossing could be found */
*errCode = 1;
DEALLOCATE(fLocal);
return;
}
*errCode = 0;
/* Set the global variables for EvaluateCubic */
ga = INDEX(x, k - 1);
gfa = INDEX(fLocal, k - 1);
gfpa = INDEX(fp, k - 1);
gb = INDEX(x, k);
gfb = INDEX(fLocal, k);
gfpb = INDEX(fp, k);
DEALLOCATE(fLocal);
*xbar = Zeroin(ga, gb, EvaluateCubic, tol);
}
fastf_t
TableLookup(struct fortran_array *x, struct fortran_array *y, int order, fastf_t u)
{
int j, m;
/* Used for the parameters to InterpolatePolynomial */
struct fortran_array *xp, *yp;
m = order + 1;
V_MIN(m, SIZE(x)); /* number of points used for interpolating poly */
j = Lookup(x, u);
j = j - (m / 2 - 1);
V_MIN(j, 1 + SIZE(x) - m); /* j + m - 1 must not exceed SIZE(x) */
V_MIN(j, 1); /* j must be positive */
/* Use points j through j + m - 1 for interpolation (m points) */
SLICE(xp, x, j, j + m - 1);
SLICE(yp, y, j, j + m - 1);
return InterpolatePolynomial(xp, yp, u);
}
/**
* Compute a zero of f in the interval (ax, bx)
*
* ax and bx are the lift and right endpoints of the interval, tol is
* the desired interval of uncertainty.
*
* @param[in] ax left endpoint of interval
* @param[in] bx right endpoint of interval
* @param[in] tol desired interval of uncertainty
*/
static fastf_t
Zeroin(fastf_t ax, fastf_t bx, fastf_t (*f)(fastf_t x), fastf_t tol)
{
const int MAX_ITER = 500;
int k;
fastf_t a, b, c, eps, fa, fb, fc, tol1, xm, p, r, s, tmp;
fastf_t d = 0.0;
fastf_t e = 0.0;
fastf_t q = 0.0;
eps = SMALL_FASTF;
a = ax; /* initialization */
b = bx;
fa = f(a);
fb = f(b); /* should test that fa and fb have opposite signs */
c = b;
fc = fb;
e = b - a;
for (k = 1; k <= MAX_ITER; k++) { /* begin iteration */
if (((fb > 0.0) && (fc > 0.0)) || ((fb < 0.0 && fc < 0.0))) {
c = a;
fc = fa;
d = b - a;
e = d;
}
if (fabs(fc) < fabs(fb)) {
a = b;
b = c;
c = a;
fa = fb;
fb = fc;
fc = fa;
}
tol1 = 2.0 * eps * fabs(b) + 0.5 * tol; /* convergence test */
xm = 0.5 * (c - b);
if ((fabs(xm) <= tol1) || (NEAR_ZERO(fb, tol))) {
return b;
}
/* Is bisection necessary */
if ((fabs(e) <= tol1) && (fabs(fa) > fabs(fb))) {
s = fb / fa; /* is quadratic interpolation possible? */
if (NEAR_EQUAL(a, c, tol)) {
s = fb / fa; /* Use linear interpolation */
p = 2.0 * xm * s;
q = 1.0 - s;
} else {
q = fa / fc; /* Use inverse quadratic interpolation */
r = fb / fc;
s = fb / fa;
p = s * (2.0 * xm * q * (q - r) - (b - a) * (r - 1.0));
q = (q - 1.0) * (r - 1.0) * (s - 1.0);
}
if (p > 0.0) { q = -q; } /* adjust signs */
p = fabs(p);
tmp = 3.0 * xm * q - fabs(tol1 * q);
V_MIN(tmp, fabs(e * q));
if (p + p < tmp) {
e = d; /* Use interpolation */
d = p / q;
} else {
d = xm; /* Use bisection */
e = d;
}
} else {
d = xm; /* Use bisection */
e = d;
}
a = b;
fa = fb;
if (fabs(d) > tol1) {
b = b + d;
} else {
b = b + SIGN(tol1, xm);
}
fb = f(b);
}
return b; /* but this is a bad return. Max iterations exceeded */
}
/*
* Local Variables:
* tab-width: 8
* mode: C
* indent-tabs-mode: t
* c-file-style: "stroustrup"
* End:
* ex: shiftwidth=4 tabstop=8
*/
|
amlodzianowski/python-terrascript | terrascript/dme/__init__.py | <reponame>amlodzianowski/python-terrascript
# terrascript/dme/__init__.py
import terrascript
class dme(terrascript.Provider):
pass
|
hpi-dhc/alps | frontend/src/actions/app.js | import {
APP_SET_TITLE
} from '../constants/ActionTypes'
export const setAppTitle = (title) => ({
type: APP_SET_TITLE,
title
})
|
Batterii/appengine-mapreduce | java/src/main/java/com/google/appengine/tools/mapreduce/MapJob.java | <reponame>Batterii/appengine-mapreduce
// Copyright 2014 Google Inc. All Rights Reserved.
package com.google.appengine.tools.mapreduce;
import com.google.appengine.tools.mapreduce.impl.BaseContext;
import com.google.appengine.tools.mapreduce.impl.CountersImpl;
import com.google.appengine.tools.mapreduce.impl.MapOnlyShardTask;
import com.google.appengine.tools.mapreduce.impl.WorkerController;
import com.google.appengine.tools.mapreduce.impl.WorkerShardTask;
import com.google.appengine.tools.mapreduce.impl.pipeline.ExamineStatusAndReturnResult;
import com.google.appengine.tools.mapreduce.impl.pipeline.ResultAndStatus;
import com.google.appengine.tools.mapreduce.impl.pipeline.ShardedJob;
import com.google.appengine.tools.mapreduce.impl.shardedjob.ShardedJobServiceFactory;
import com.google.appengine.tools.mapreduce.impl.shardedjob.ShardedJobSettings;
import com.google.appengine.tools.pipeline.FutureValue;
import com.google.appengine.tools.pipeline.Job0;
import com.google.appengine.tools.pipeline.JobSetting;
import com.google.appengine.tools.pipeline.PipelineService;
import com.google.appengine.tools.pipeline.PipelineServiceFactory;
import com.google.appengine.tools.pipeline.PromisedValue;
import com.google.appengine.tools.pipeline.Value;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A Pipeline job that runs a map jobs.
*
* @param <I> type of input values``
* @param <O> type of output values
* @param <R> type of final result
*/
public class MapJob<I, O, R> extends Job0<MapReduceResult<R>> {
private static final long serialVersionUID = 723635736794527552L;
private static final Logger log = Logger.getLogger(MapJob.class.getName());
private final MapSpecification<I, O, R> specification;
private final MapSettings settings;
public MapJob(MapSpecification<I, O, R> specification, MapSettings settings) {
this.specification = specification;
this.settings = settings;
}
/**
* Starts a {@link MapJob} with the given parameters in a new Pipeline.
* Returns the pipeline id.
*/
public static <I, O, R> String start(MapSpecification<I, O, R> specification,
MapSettings settings) {
if (settings.getWorkerQueueName() == null) {
settings = new MapSettings.Builder(settings).setWorkerQueueName("default").build();
}
PipelineService pipelineService = PipelineServiceFactory.newPipelineService();
return pipelineService.startNewPipeline(
new MapJob<>(specification, settings), settings.toJobSettings());
}
@Override
public Value<MapReduceResult<R>> run() {
MapSettings settings = this.settings;
if (settings.getWorkerQueueName() == null) {
String queue = getOnQueue();
if (queue == null) {
log.warning("workerQueueName is null and current queue is not available in the pipeline"
+ " job, using 'default'");
queue = "default";
}
settings = new MapReduceSettings.Builder().setWorkerQueueName(queue).build();
}
String jobId = getJobKey().getName();
Context context = new BaseContext(jobId);
Input<I> input = specification.getInput();
input.setContext(context);
List<? extends InputReader<I>> readers;
try {
readers = input.createReaders();
} catch (IOException e) {
throw new RuntimeException(e);
}
Output<O, R> output = specification.getOutput();
output.setContext(context);
List<? extends OutputWriter<O>> writers = output.createWriters(readers.size());
Preconditions.checkState(readers.size() == writers.size(), "%s: %s readers, %s writers",
jobId, readers.size(), writers.size());
ImmutableList.Builder<WorkerShardTask<I, O, MapOnlyMapperContext<O>>> mapTasks =
ImmutableList.builder();
for (int i = 0; i < readers.size(); i++) {
mapTasks.add(new MapOnlyShardTask<>(jobId, i, readers.size(), readers.get(i),
specification.getMapper(), writers.get(i), settings.getMillisPerSlice()));
}
ShardedJobSettings shardedJobSettings = settings.toShardedJobSettings(jobId, getPipelineKey());
PromisedValue<ResultAndStatus<R>> resultAndStatus = newPromise();
WorkerController<I, O, R, MapOnlyMapperContext<O>> workerController = new WorkerController<>(
jobId, new CountersImpl(), output, resultAndStatus.getHandle());
ShardedJob<?> shardedJob =
new ShardedJob<>(jobId, mapTasks.build(), workerController, shardedJobSettings);
FutureValue<Void> shardedJobResult = futureCall(shardedJob, settings.toJobSettings());
JobSetting[] jobSetting = settings.toJobSettings(waitFor(shardedJobResult),
statusConsoleUrl(shardedJobSettings.getMapReduceStatusUrl()), maxAttempts(1));
return futureCall(new ExamineStatusAndReturnResult<R>(jobId), resultAndStatus, jobSetting);
}
/**
* @param ex The cancellation exception
*/
public Value<MapReduceResult<R>> handleException(CancellationException ex) {
String mrJobId = getJobKey().getName();
ShardedJobServiceFactory.getShardedJobService().abortJob(mrJobId);
return null;
}
public Value<MapReduceResult<R>> handleException(Throwable t) throws Throwable {
log.log(Level.SEVERE, "MapJob failed because of: ", t);
throw t;
}
@Override
public String getJobDisplayName() {
return Optional.fromNullable(specification.getJobName()).or(super.getJobDisplayName());
}
}
|
bren-kam/wav-react | src/services/UserService.js | import config from '../config/ApiConfig';
import { getAsync, postAsync } from '../helpers/RequestHelper';
import authStorage from "../storage/AuthStorage";
const UserService = {
loadUser
};
function loadUser(userId) {
return getAsync({
url: `${config.apiHost}/api/v1/getUser`,
headers: getHeaders()
});
}
function getHeaders() {
return { 'x-key': authStorage.getLoggedUser().username };
}
export default UserService |
izeki/Autoware | ros/src/computing/planning/decision/libs/libstate/include/state_flags.hpp | #ifndef __STATE_FLAGS_HPP__
#define __STATE_FLAGS_HPP__
namespace state_machine
{
enum StateFlags
{
NULL_STATE = 1ULL << 0,
START_STATE = 1ULL << 1,
INITIAL_STATE = 1ULL << 2,
INITIAL_LOCATEVEHICLE_STATE = 1ULL << 3,
DRIVE_STATE = 1ULL << 4,
MISSION_COMPLETE_STATE = 1ULL << 5,
EMERGENCY_STATE = 1ULL << 6,
DRIVE_SUB_STATE_START = 1ULL << 10,
DRIVE_ACC_ACCELERATION_STATE = 1ULL << 11,
DRIVE_ACC_DECELERATION_STATE = 1ULL << 12,
DRIVE_ACC_KEEP_STATE = 1ULL << 13,
DRIVE_ACC_STOP_STATE = 1ULL << 14,
DRIVE_ACC_STOPLINE_STATE = 1ULL << 15,
DRIVE_ACC_CRAWL_STATE = 1ULL << 16,
DRIVE_STR_STRAIGHT_STATE = 1ULL << 21,
DRIVE_STR_LEFT_STATE = 1ULL << 22,
DRIVE_STR_RIGHT_STATE = 1ULL << 23,
DRIVE_BEHAVIOR_LANECHANGE_LEFT_STATE = 1ULL << 31,
DRIVE_BEHAVIOR_LANECHANGE_RIGHT_STATE = 1ULL << 32,
DRIVE_BEHAVIOR_OBSTACLE_AVOIDANCE_STATE = 1ULL << 33,
DRIVE_BEHAVIOR_TRAFFICLIGHT_RED_STATE = 1ULL << 34,
DRIVE_BEHAVIOR_TRAFFICLIGHT_GREEN_STATE = 1ULL << 35,
DRIVE_BEHAVIOR_STOPLINE_PLAN_STATE = 1ULL << 36,
DRIVE_BEHAVIOR_ACCEPT_LANECHANGE_STATE = 1ULL << 37,
STATE_SUB_END = 1ULL << 40,
STATE_END = 1ULL << 41,
};
enum StateKinds
{
MAIN_STATE = 1,
ACC_STATE = 2,
STR_STATE = 3,
BEHAVIOR_STATE = 4,
PERCEPTION_STATE = 5,
OTHER_STATE = 6,
UNKNOWN_STATE = 7,
};
enum TrafficLightColors
{
E_RED = 0,
E_YELLOW = 0,
E_GREEN = 1,
E_COLOR_ERROR = 2
};
}
#endif
|
JustinACoder/H22-GR3-UnrealAI | Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/contrib/distribute/python/prefetching_ops_v2.py | <reponame>JustinACoder/H22-GR3-UnrealAI<gh_stars>1-10
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Extension of prefetching_ops to support more than one device."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
from tensorflow.python.data.experimental.ops import prefetching_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.util import nest as data_nest
from tensorflow.python.data.util import sparse
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_experimental_dataset_ops as ged_ops
from tensorflow.python.util import nest
# pylint: disable=protected-access
class _PrefetchToDeviceIterator(object):
"""A replacement for `tf.data.Iterator` that prefetches to another device.
Args:
input_dataset: The input dataset.
one_shot: If true, we make a one shot iterator that's already initialized.
devices: Devices on which to prefetch.
buffer_size: Size of the prefetching buffer.
shared_name: (Optional.) If non-empty, the returned iterator will be shared
under the given name across multiple sessions that share the same devices
(e.g. when using a remote server). Only used if one_shot is False.
Returns:
An Iterator type object.
"""
def __init__(self,
input_dataset,
one_shot,
devices,
buffer_size,
shared_name=None):
self._input_dataset = input_dataset
self._get_next_call_count = 0
self._one_shot = one_shot
if shared_name is None:
shared_name = ""
self._devices = devices
if self._one_shot:
self._input_iterator = input_dataset.make_one_shot_iterator()
else:
self._input_iterator = iterator_ops.Iterator.from_structure(
self._input_dataset.output_types, self._input_dataset.output_shapes,
shared_name, self._input_dataset.output_classes)
input_iterator_handle = self._input_iterator.string_handle()
@function.Defun(dtypes.string)
def _prefetch_fn(handle):
"""Prefetches one element from `input_iterator`."""
remote_iterator = iterator_ops.Iterator.from_string_handle(
handle, self._input_iterator.output_types,
self._input_iterator.output_shapes,
self._input_iterator.output_classes)
ret = remote_iterator.get_next()
return nest.flatten(sparse.serialize_sparse_tensors(ret))
target_device = ged_ops.experimental_iterator_get_device(
self._input_iterator._iterator_resource)
self._buffering_resources = []
for device in nest.flatten(self._devices):
with ops.device(device):
buffer_resource_handle = prefetching_ops.function_buffering_resource(
f=_prefetch_fn,
output_types=data_nest.flatten(
sparse.as_dense_types(self._input_dataset.output_types,
self._input_dataset.output_classes)),
target_device=target_device,
string_arg=input_iterator_handle,
buffer_size=buffer_size,
shared_name=shared_name)
self._buffering_resources.append(buffer_resource_handle)
if not self._one_shot:
reset_ops = []
for buffer_resource in self._buffering_resources:
reset_ops.append(
ged_ops.experimental_function_buffering_resource_reset(
buffer_resource))
with ops.control_dependencies(reset_ops):
self._initializer = self._input_iterator.make_initializer(
self._input_dataset)
def get_next(self, name=None):
"""See `tf.data.Iterator.get_next`."""
self._get_next_call_count += 1
if self._get_next_call_count > iterator_ops.GET_NEXT_CALL_WARNING_THRESHOLD:
warnings.warn(iterator_ops.GET_NEXT_CALL_WARNING_MESSAGE)
flat_result = []
# TODO(priyag): This will fail if the input size (typically number of
# batches) is not divisible by number of devices.
# How do we handle that more gracefully / let the user know?
for buffer_resource in self._buffering_resources:
flat_ret = ged_ops.experimental_function_buffering_resource_get_next(
buffer_resource,
output_types=data_nest.flatten(
sparse.as_dense_types(self.output_types, self.output_classes)),
name=name)
ret = sparse.deserialize_sparse_tensors(
data_nest.pack_sequence_as(self.output_types, flat_ret),
self.output_types, self.output_shapes, self.output_classes)
for tensor, shape in zip(
data_nest.flatten(ret), data_nest.flatten(self.output_shapes)):
if isinstance(tensor, ops.Tensor):
tensor.set_shape(shape)
flat_result.append(ret)
return nest.pack_sequence_as(self._devices, flat_result)
@property
def initializer(self):
if self._one_shot:
raise NotImplementedError("Can't initialize a one_shot_iterator")
return self._initializer
@property
def output_classes(self):
return self._input_dataset.output_classes
@property
def output_shapes(self):
return self._input_dataset.output_shapes
@property
def output_types(self):
return self._input_dataset.output_types
# pylint: enable=protected-access
class _PrefetchToDeviceDataset(dataset_ops.UnaryDataset):
"""A `Dataset` whose iterator prefetches elements to other device(s)."""
def __init__(self, input_dataset, devices, buffer_size):
super(_PrefetchToDeviceDataset, self).__init__(input_dataset)
self._input_dataset = input_dataset
self._devices = devices
self._buffer_size = buffer_size if buffer_size is not None else 1
def make_one_shot_iterator(self):
return _PrefetchToDeviceIterator(
self._input_dataset,
one_shot=True,
devices=self._devices,
buffer_size=self._buffer_size)
def make_initializable_iterator(self, shared_name=None):
if context.executing_eagerly():
raise RuntimeError(
"make_initializable_iterator is not supported when eager "
"execution is enabled.")
return _PrefetchToDeviceIterator(
self._input_dataset,
one_shot=False,
devices=self._devices,
buffer_size=self._buffer_size,
shared_name=shared_name)
def _as_variant_tensor(self):
# TODO(mrry): Raise this error earlier (e.g. when one of the Dataset
# transformation methods is called.
# TODO(mrry): Investigate support for chaining further transformations after
# the prefetch, including GPU support.
raise NotImplementedError("`prefetch_to_devices()` must be the last "
"transformation in a dataset pipeline.")
# TODO(priyag): Fix the output types, shapes and classes to match the result
# of get_next (which has the additional nesting layer of devices now).
@property
def output_types(self):
return self._input_dataset.output_types
@property
def output_shapes(self):
return self._input_dataset.output_shapes
@property
def output_classes(self):
return self._input_dataset.output_classes
def prefetch_to_devices(devices, buffer_size=None):
"""A transformation that prefetches dataset values to the given `devices`.
NOTE: Although the transformation creates a `tf.data.Dataset`, the
transformation must be the final `Dataset` in the input pipeline.
Args:
devices: A nested structure of devices on which to prefetch the data. It can
be a single device name, or a tuple or list of device names.
buffer_size: (Optional.) The number of elements to buffer on each device.
Defaults to an automatically chosen value.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
return _PrefetchToDeviceDataset(dataset, devices, buffer_size)
return _apply_fn
|
twilightgod/twilight-poj-solution | 3044/5317826_AC_16MS_216K.cpp | <reponame>twilightgod/twilight-poj-solution<filename>3044/5317826_AC_16MS_216K.cpp
/*******************************************************************************
* Online Judge : POJ
* Problem Title : City Skyline
* Problem URL : http://acm.pku.edu.cn/JudgeOnline/problem?id=3044
* ID : 3044
* Date : 6/21/2009
* Time : 19:38:14
* Computer Name : EVERLASTING-PC
* Wizard Version : 20090522P
*******************************************************************************/
#include<iostream>
#include<stack>
using namespace std;
int n,w,x,y,ans;
stack<int> s;
int main()
{
#ifndef ONLINE_JUDGE
freopen("in_3044.txt","r",stdin);
#endif
s.push(0);
ans=0;
scanf("%d%d",&n,&w);
while(n--)
{
scanf("%d%d",&x,&y);
while(s.top()>y)
{
ans++;
s.pop();
}
if(s.top()!=y)
{
s.push(y);
}
}
ans+=s.size()-1;
printf("%d\n",ans);
return 0;
}
|
ehdoq/java101 | AtmSwitchCase.java | <reponame>ehdoq/java101
import java.util.Scanner;
public class AtmSwitchCase
{
public static void main(String[] args)
{
String kullanici_adi, sifre;
Scanner giris = new Scanner(System.in);
int kalan_hak = 3;
int hesaptaki_para = 1500;
int islem;
while(kalan_hak > 0)
{
System.out.print("Kullanıcı Adınız :");
kullanici_adi = giris.nextLine();
System.out.print("Parolanız : ");
sifre = giris.nextLine();
if (kullanici_adi.equals("mustafa") && sifre.equals("abc123"))
{
System.out.println("Merhaba, Kodluyoruz Bankasına Hoşgeldiniz!");
do
{
System.out.println("1-Para yatırma\n"
+ "2-Para Çekme\n"
+ "3-Bakiye Sorgula\n"
+ "4-Çıkış Yap");
System.out.print("Lütfen yapmak istediğiniz işlemi seçiniz : ");
islem = giris.nextInt();
switch(islem)
{
case 1:
System.out.print("Yatırmak isteğiniz para miktarını yazın : ");
int yatirilan_para_miktari = giris.nextInt();
hesaptaki_para += yatirilan_para_miktari;
break;
case 2:
System.out.print("Çekmek istediğiniz para miktarı yazın : ");
int cekilen_para_miktari = giris.nextInt();
if (cekilen_para_miktari > hesaptaki_para)
{
System.out.println("Bakiye yetersiz.");
}
else
{
hesaptaki_para -= cekilen_para_miktari;
}
break;
case 3:
System.out.println("Hesap bakiyeniz : " + hesaptaki_para);
break;
}
}
while(islem != 4);
{
System.out.println("Tekrar görüşmek üzere.");
break;
}
}
else
{
kalan_hak--;
System.out.println("Hatalı kullanıcı adı veya şifre. Tekrar deneyiniz.");
if (kalan_hak == 0)
{
System.out.println("Hesabınız bloke olmuştur lütfen banka ile iletişime geçiniz.");
}
else
{
System.out.println("Kalan Hakkınız : " + kalan_hak);
}
}
}
}
}
|
LiShanwenGit/Qdriver | src/arch/arm/mach-stm32f10x/drivers/stm32f10x-gpio.c | <filename>src/arch/arm/mach-stm32f10x/drivers/stm32f10x-gpio.c
#include "gpio-core.h"
#include "stm32f10x.h"
// #define GPIOA_BASE (APB2PERIPH_BASE + 0x0800)
// #define GPIOB_BASE (APB2PERIPH_BASE + 0x0C00)
// #define GPIOC_BASE (APB2PERIPH_BASE + 0x1000)
// #define GPIOD_BASE (APB2PERIPH_BASE + 0x1400)
// #define GPIOE_BASE (APB2PERIPH_BASE + 0x1800)
// #define GPIOF_BASE (APB2PERIPH_BASE + 0x1C00)
// #define GPIOG_BASE (APB2PERIPH_BASE + 0x2000)
static int8_t stm32f10x_gpio_get(gpio_desp_t *gpio_desp)
{
RCC->APB2ENR |= (1<<(GPIO_NUM2PORT(gpio_desp->pin_num)+2)); //enable the gpio clock
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) &= ~(0xff<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0x03)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
gpio_desp->ctl->ops.set_mode(gpio_desp,gpio_desp->mode);
}
static void stm32f10x_gpio_set_mode(gpio_desp_t *gpio_desp, gpio_mode_t mode)
{
gpio_desp->mode = mode;
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) &= ~(0xff<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
switch(mode)
{
case GPIO_INPUT:break;
case GPIO_OUT_PP:
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0x03)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
break;
case GPIO_OUT_OD:
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0x07)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
break;
case GPIO_OUT_FLEX_PP:
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0xB)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
break;
case GPIO_OUT_FLEX_OD:
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0xF)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
break;
case GPIO_OUT_PP_UP:
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0x03)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
*(uint32_t*)(GPIOA_BASE + 0x0C + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= (1<<GPIO_NUM2PIN(gpio_desp->pin_num));
break;
case GPIO_OUT_PP_DOWN:
*(uint64_t*)(GPIOA_BASE + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= ((0x03)<<(GPIO_NUM2PIN(gpio_desp->pin_num)*4));
*(uint32_t*)(GPIOA_BASE + 0x0C + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) &= ~(1<<GPIO_NUM2PIN(gpio_desp->pin_num));
break;
default:break;
}
if(gpio_desp->flag)
{
*(uint32_t*)(GPIOA_BASE + 0x0C + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) |= (1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
else
{
*(uint32_t*)(GPIOA_BASE + 0x0C + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400) &= ~(1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
}
static void stm32f10x_gpio_set_value(gpio_desp_t *gpio_desp, uint8_t value)
{
if(value)
{
*((volatile uint32_t*)(GPIOA_BASE + 0x10 + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400)) |= (1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
else
{
*((volatile uint32_t*)(GPIOA_BASE + 0x14 + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400)) |= (1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
}
static void stm32f10x_gpio_set_bit(gpio_desp_t *gpio_desp)
{
*((volatile uint32_t*)(GPIOA_BASE + 0x10 + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400)) |= (1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
static void stm32f10x_gpio_reset_bit(gpio_desp_t *gpio_desp)
{
*((volatile uint32_t*)(GPIOA_BASE + 0x14 + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400)) |= (1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
static uint32_t stm32f10x_gpio_get_value(gpio_desp_t *gpio_desp)
{
return (*(uint32_t*)(GPIOA_BASE + 0x08 + (GPIO_NUM2PORT(gpio_desp->pin_num))*0x400))&(1<<GPIO_NUM2PIN(gpio_desp->pin_num));
}
static void stm32f10x_gpio_put(gpio_desp_t *gpio_desp)
{
}
gpio_controller_t stm32f10x_gpio_ctrl =
{
.ops = {
.get = stm32f10x_gpio_get,
.get_value = stm32f10x_gpio_get_value,
.put = stm32f10x_gpio_put,
.set_mode = stm32f10x_gpio_set_mode,
.set_value = stm32f10x_gpio_set_value,
.set_bit = stm32f10x_gpio_set_bit,
.reset_bit = stm32f10x_gpio_reset_bit,
},
};
MACH_GPIO_ADD(stm32f10x_gpio_ctrl);
|
Testtaccount/TamaAndroid | Tama_app/src/main/java/com/tama/chat/ui/adapters/search/FindARetailersAdapter.java | package com.tama.chat.ui.adapters.search;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Filter;
import android.widget.Filterable;
import android.widget.TextView;
import com.tama.chat.R;
import com.tama.chat.tamaAccount.entry.findARetailerPojos.RetailerResult;
import com.tama.chat.ui.adapters.search.FindARetailersAdapter.RetailersHolder;
import java.util.ArrayList;
import java.util.List;
public class FindARetailersAdapter extends RecyclerView.Adapter<RetailersHolder> implements
Filterable {
private ArrayList<RetailerResult> mRetailerResults;
private ArrayList<RetailerResult> filterList;
private OnItemClickListener itemClickListener;
private CustomFilter filter;
public FindARetailersAdapter(ArrayList<RetailerResult> mRetailerResults,
OnItemClickListener itemClickListener) {
this.itemClickListener = itemClickListener;
this.mRetailerResults = mRetailerResults;
this.filterList = mRetailerResults;
}
public boolean isEmpty() {
return mRetailerResults.size() == 0;
}
@Override
public RetailersHolder onCreateViewHolder(ViewGroup parent, int viewType) {
// Get the RecyclerView item layout
LayoutInflater inflater = LayoutInflater.from(parent.getContext());
View view = inflater.inflate(R.layout.item_search_retailer, parent, false);
return new RetailersHolder(view, itemClickListener);
}
@Override
public void onBindViewHolder(RetailersHolder holder, int position) {
Log.d("testt", "position" + position);
holder.bindData(mRetailerResults.get(position));
}
@Override
public void onAttachedToRecyclerView(RecyclerView recyclerView) {
super.onAttachedToRecyclerView(recyclerView);
}
@Override
public int getItemCount() {
if (mRetailerResults.isEmpty()) {
return 0;
}
return mRetailerResults.size();
}
//RETURN FILTER OBJ
@Override
public Filter getFilter() {
if (filter == null) {
filter = new CustomFilter(filterList, this);
}
return filter;
}
public void setList(List<RetailerResult> list) {
if (list != null ) {
this.mRetailerResults.clear();
this.mRetailerResults.addAll(list);
}
notifyDataSetChanged();
}
/**
* RetailersHolder class for the recycler view item
*/
public class CustomFilter extends Filter {
FindARetailersAdapter adapter;
ArrayList<RetailerResult> filterList;
public CustomFilter(ArrayList<RetailerResult> filterList, FindARetailersAdapter adapter) {
this.adapter = adapter;
this.filterList = filterList;
}
//FILTERING OCURS
@Override
protected FilterResults performFiltering(CharSequence constraint) {
FilterResults results = new FilterResults();
//CHECK CONSTRAINT VALIDITY
if (constraint != null && constraint.length() > 0) {
//CHANGE TO UPPER
constraint = constraint.toString().toUpperCase();
//STORE OUR FILTERED PLAYERS
ArrayList<RetailerResult> filterSpinnerItems = new ArrayList<>();
for (int i = 0; i < filterList.size(); i++) {
//CHECK
if (filterList.get(i).getShopName().toUpperCase().contains(constraint)) {
//ADD PLAYER TO FILTERED PLAYERS
filterSpinnerItems.add(filterList.get(i));
}
}
results.count = filterSpinnerItems.size();
results.values = filterSpinnerItems;
} else {
results.count = filterList.size();
results.values = filterList;
}
return results;
}
@Override
protected void publishResults(CharSequence constraint, FilterResults results) {
adapter.mRetailerResults = (ArrayList<RetailerResult>) results.values;
//REFRESH
adapter.notifyDataSetChanged();
}
}
public class RetailersHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private CardView rootCv;
private TextView shopNameTv;
private TextView shopAddressTv;
public RetailerResult mRetailerResult;
private OnItemClickListener itemClickListener;
public RetailersHolder(View itemView, OnItemClickListener itemClickListener) {
super(itemView);
this.itemClickListener = itemClickListener;
findViews(itemView);
}
void findViews(View view) {
rootCv = (CardView) view.findViewById(R.id.item_search_retailer_root);
rootCv.setOnClickListener(this);
shopNameTv = (TextView) view.findViewById(R.id.tv_shop_name_label);
shopAddressTv = (TextView) view.findViewById(R.id.tv_shop_address_label);
}
public void bindData(RetailerResult retailerResult) {
this.mRetailerResult = retailerResult;
shopNameTv.setText(retailerResult.getShopName());
shopAddressTv.setText(retailerResult.getShopAddress());
}
private void notifyItemClicked() {
if (itemClickListener != null) {
itemClickListener.onItemClick(mRetailerResult);
}
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.item_search_retailer_root:
notifyItemClicked();
break;
}
}
}
public interface OnItemClickListener {
void onItemClick(RetailerResult retailerResult);
}
}
|
attenuation/srs | trunk/3rdparty/srs-bench/vendor/github.com/pion/turn/v2/internal/server/errors.go | <filename>trunk/3rdparty/srs-bench/vendor/github.com/pion/turn/v2/internal/server/errors.go
package server
import "errors"
var (
errFailedToGenerateNonce = errors.New("failed to generate nonce")
errFailedToSendError = errors.New("failed to send error message")
errDuplicatedNonce = errors.New("duplicated Nonce generated, discarding request")
errNoSuchUser = errors.New("no such user exists")
errUnexpectedClass = errors.New("unexpected class")
errUnexpectedMethod = errors.New("unexpected method")
errFailedToHandle = errors.New("failed to handle")
errUnhandledSTUNPacket = errors.New("unhandled STUN packet")
errUnableToHandleChannelData = errors.New("unable to handle ChannelData")
errFailedToCreateSTUNPacket = errors.New("failed to create stun message from packet")
errFailedToCreateChannelData = errors.New("failed to create channel data from packet")
errRelayAlreadyAllocatedForFiveTuple = errors.New("relay already allocated for 5-TUPLE")
errRequestedTransportMustBeUDP = errors.New("RequestedTransport must be UDP")
errNoDontFragmentSupport = errors.New("no support for DONT-FRAGMENT")
errRequestWithReservationTokenAndEvenPort = errors.New("Request must not contain RESERVATION-TOKEN and EVEN-PORT")
errNoAllocationFound = errors.New("no allocation found")
errNoPermission = errors.New("unable to handle send-indication, no permission added")
errShortWrite = errors.New("packet write smaller than packet")
errNoSuchChannelBind = errors.New("no such channel bind")
errFailedWriteSocket = errors.New("failed writing to socket")
)
|
viewdy/phantomjs | src/qt/qtwebkit/Source/WebCore/html/HTMLFormControlElement.h | /*
* Copyright (C) 1999 <NAME> (<EMAIL>)
* (C) 1999 <NAME> (<EMAIL>)
* (C) 2000 <NAME> <EMAIL>)
* Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef HTMLFormControlElement_h
#define HTMLFormControlElement_h
#include "FormAssociatedElement.h"
#include "LabelableElement.h"
namespace WebCore {
class FormDataList;
class HTMLFieldSetElement;
class HTMLFormElement;
class HTMLLegendElement;
class ValidationMessage;
class ValidityState;
// HTMLFormControlElement is the default implementation of FormAssociatedElement,
// and form-associated element implementations should use HTMLFormControlElement
// unless there is a special reason.
class HTMLFormControlElement : public LabelableElement, public FormAssociatedElement {
public:
virtual ~HTMLFormControlElement();
HTMLFormElement* form() const { return FormAssociatedElement::form(); }
String formEnctype() const;
void setFormEnctype(const String&);
String formMethod() const;
void setFormMethod(const String&);
bool formNoValidate() const;
void ancestorDisabledStateWasChanged();
virtual void reset() { }
virtual bool formControlValueMatchesRenderer() const { return m_valueMatchesRenderer; }
virtual void setFormControlValueMatchesRenderer(bool b) { m_valueMatchesRenderer = b; }
bool wasChangedSinceLastFormControlChangeEvent() const { return m_wasChangedSinceLastFormControlChangeEvent; }
void setChangedSinceLastFormControlChangeEvent(bool);
virtual void dispatchFormControlChangeEvent();
void dispatchChangeEvent();
void dispatchFormControlInputEvent();
virtual bool isDisabledFormControl() const OVERRIDE;
virtual bool isFocusable() const OVERRIDE;
virtual bool isEnumeratable() const { return false; }
bool isRequired() const;
const AtomicString& type() const { return formControlType(); }
virtual const AtomicString& formControlType() const = 0;
virtual bool canTriggerImplicitSubmission() const { return false; }
// Override in derived classes to get the encoded name=value pair for submitting.
// Return true for a successful control (see HTML4-17.13.2).
virtual bool appendFormData(FormDataList&, bool) { return false; }
virtual bool isSuccessfulSubmitButton() const { return false; }
virtual bool isActivatedSubmit() const { return false; }
virtual void setActivatedSubmit(bool) { }
virtual bool willValidate() const;
void updateVisibleValidationMessage();
void hideVisibleValidationMessage();
bool checkValidity(Vector<RefPtr<FormAssociatedElement> >* unhandledInvalidControls = 0);
// This must be called when a validation constraint or control value is changed.
void setNeedsValidityCheck();
virtual void setCustomValidity(const String&) OVERRIDE;
bool isReadOnly() const { return m_isReadOnly; }
bool isDisabledOrReadOnly() const { return isDisabledFormControl() || m_isReadOnly; }
bool hasAutofocused() { return m_hasAutofocused; }
void setAutofocused() { m_hasAutofocused = true; }
static HTMLFormControlElement* enclosingFormControlElement(Node*);
using Node::ref;
using Node::deref;
protected:
HTMLFormControlElement(const QualifiedName& tagName, Document*, HTMLFormElement*);
virtual void parseAttribute(const QualifiedName&, const AtomicString&) OVERRIDE;
virtual void requiredAttributeChanged();
virtual void disabledAttributeChanged();
virtual void attach(const AttachContext& = AttachContext()) OVERRIDE;
virtual InsertionNotificationRequest insertedInto(ContainerNode*) OVERRIDE;
virtual void removedFrom(ContainerNode*) OVERRIDE;
virtual void didMoveToNewDocument(Document* oldDocument) OVERRIDE;
virtual bool supportsFocus() const OVERRIDE;
virtual bool isKeyboardFocusable(KeyboardEvent*) const OVERRIDE;
virtual bool isMouseFocusable() const OVERRIDE;
virtual void didRecalcStyle(StyleChange) OVERRIDE;
virtual void dispatchBlurEvent(PassRefPtr<Element> newFocusedElement) OVERRIDE;
// This must be called any time the result of willValidate() has changed.
void setNeedsWillValidateCheck();
virtual bool recalcWillValidate() const;
bool validationMessageShadowTreeContains(Node*) const;
private:
virtual void refFormAssociatedElement() { ref(); }
virtual void derefFormAssociatedElement() { deref(); }
virtual bool isFormControlElement() const { return true; }
virtual bool alwaysCreateUserAgentShadowRoot() const OVERRIDE { return true; }
virtual short tabIndex() const OVERRIDE FINAL;
virtual HTMLFormElement* virtualForm() const;
virtual bool isDefaultButtonForForm() const;
virtual bool isValidFormControlElement();
void updateAncestorDisabledState() const;
OwnPtr<ValidationMessage> m_validationMessage;
bool m_disabled : 1;
bool m_isReadOnly : 1;
bool m_isRequired : 1;
bool m_valueMatchesRenderer : 1;
enum AncestorDisabledState { AncestorDisabledStateUnknown, AncestorDisabledStateEnabled, AncestorDisabledStateDisabled };
mutable AncestorDisabledState m_ancestorDisabledState;
enum DataListAncestorState { Unknown, InsideDataList, NotInsideDataList };
mutable enum DataListAncestorState m_dataListAncestorState;
// The initial value of m_willValidate depends on the derived class. We can't
// initialize it with a virtual function in the constructor. m_willValidate
// is not deterministic as long as m_willValidateInitialized is false.
mutable bool m_willValidateInitialized: 1;
mutable bool m_willValidate : 1;
// Cache of validity()->valid().
// But "candidate for constraint validation" doesn't affect m_isValid.
bool m_isValid : 1;
bool m_wasChangedSinceLastFormControlChangeEvent : 1;
bool m_hasAutofocused : 1;
};
inline bool isHTMLFormControlElement(const Node* node)
{
return node->isElementNode() && toElement(node)->isFormControlElement();
}
inline HTMLFormControlElement* toHTMLFormControlElement(Node* node)
{
ASSERT_WITH_SECURITY_IMPLICATION(!node || isHTMLFormControlElement(node));
return static_cast<HTMLFormControlElement*>(node);
}
// This will catch anyone doing an unnecessary cast.
void toHTMLFormControlElement(const HTMLFormControlElement*);
} // namespace
#endif
|
Mindhome/field_service | mindhome_alpha/erpnext/hr/doctype/attendance/attendance.py | <gh_stars>1-10
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import getdate, nowdate
from frappe import _
from frappe.model.document import Document
from frappe.utils import cstr, get_datetime, formatdate
class Attendance(Document):
def validate(self):
from erpnext.controllers.status_updater import validate_status
validate_status(self.status, ["Present", "Absent", "On Leave", "Half Day", "Work From Home"])
self.validate_attendance_date()
self.validate_duplicate_record()
self.check_leave_record()
def validate_attendance_date(self):
date_of_joining = frappe.db.get_value("Employee", self.employee, "date_of_joining")
# leaves can be marked for future dates
if self.status != 'On Leave' and not self.leave_application and getdate(self.attendance_date) > getdate(nowdate()):
frappe.throw(_("Attendance can not be marked for future dates"))
elif date_of_joining and getdate(self.attendance_date) < getdate(date_of_joining):
frappe.throw(_("Attendance date can not be less than employee's joining date"))
def validate_duplicate_record(self):
res = frappe.db.sql("""
select name from `tabAttendance`
where employee = %s
and attendance_date = %s
and name != %s
and docstatus != 2
""", (self.employee, getdate(self.attendance_date), self.name))
if res:
frappe.throw(_("Attendance for employee {0} is already marked").format(self.employee))
def check_leave_record(self):
leave_record = frappe.db.sql("""
select leave_type, half_day, half_day_date
from `tabLeave Application`
where employee = %s
and %s between from_date and to_date
and status = 'Approved'
and docstatus = 1
""", (self.employee, self.attendance_date), as_dict=True)
if leave_record:
for d in leave_record:
self.leave_type = d.leave_type
if d.half_day_date == getdate(self.attendance_date):
self.status = 'Half Day'
frappe.msgprint(_("Employee {0} on Half day on {1}")
.format(self.employee, formatdate(self.attendance_date)))
else:
self.status = 'On Leave'
frappe.msgprint(_("Employee {0} is on Leave on {1}")
.format(self.employee, formatdate(self.attendance_date)))
if self.status in ("On Leave", "Half Day"):
if not leave_record:
frappe.msgprint(_("No leave record found for employee {0} on {1}")
.format(self.employee, formatdate(self.attendance_date)), alert=1)
elif self.leave_type:
self.leave_type = None
self.leave_application = None
def validate_employee(self):
emp = frappe.db.sql("select name from `tabEmployee` where name = %s and status = 'Active'",
self.employee)
if not emp:
frappe.throw(_("Employee {0} is not active or does not exist").format(self.employee))
@frappe.whitelist()
def get_events(start, end, filters=None):
events = []
employee = frappe.db.get_value("Employee", {"user_id": frappe.session.user})
if not employee:
return events
from frappe.desk.reportview import get_filters_cond
conditions = get_filters_cond("Attendance", filters, [])
add_attendance(events, start, end, conditions=conditions)
return events
def add_attendance(events, start, end, conditions=None):
query = """select name, attendance_date, status
from `tabAttendance` where
attendance_date between %(from_date)s and %(to_date)s
and docstatus < 2"""
if conditions:
query += conditions
for d in frappe.db.sql(query, {"from_date":start, "to_date":end}, as_dict=True):
e = {
"name": d.name,
"doctype": "Attendance",
"start": d.attendance_date,
"end": d.attendance_date,
"title": cstr(d.status),
"docstatus": d.docstatus
}
if e not in events:
events.append(e)
def mark_attendance(employee, attendance_date, status, shift=None, leave_type=None, ignore_validate=False):
if not frappe.db.exists('Attendance', {'employee':employee, 'attendance_date':attendance_date, 'docstatus':('!=', '2')}):
company = frappe.db.get_value('Employee', employee, 'company')
attendance = frappe.get_doc({
'doctype': 'Attendance',
'employee': employee,
'attendance_date': attendance_date,
'status': status,
'company': company,
'shift': shift,
'leave_type': leave_type
})
attendance.flags.ignore_validate = ignore_validate
attendance.insert()
attendance.submit()
return attendance.name
@frappe.whitelist()
def mark_bulk_attendance(data):
import json
from pprint import pprint
if isinstance(data, frappe.string_types):
data = json.loads(data)
data = frappe._dict(data)
company = frappe.get_value('Employee', data.employee, 'company')
for date in data.unmarked_days:
doc_dict = {
'doctype': 'Attendance',
'employee': data.employee,
'attendance_date': get_datetime(date),
'status': data.status,
'company': company,
}
attendance = frappe.get_doc(doc_dict).insert()
attendance.submit()
def get_month_map():
return frappe._dict({
"January": 1,
"February": 2,
"March": 3,
"April": 4,
"May": 5,
"June": 6,
"July": 7,
"August": 8,
"September": 9,
"October": 10,
"November": 11,
"December": 12
})
@frappe.whitelist()
def get_unmarked_days(employee, month):
import calendar
month_map = get_month_map()
today = get_datetime()
dates_of_month = ['{}-{}-{}'.format(today.year, month_map[month], r) for r in range(1, calendar.monthrange(today.year, month_map[month])[1] + 1)]
length = len(dates_of_month)
month_start, month_end = dates_of_month[0], dates_of_month[length-1]
records = frappe.get_all("Attendance", fields = ['attendance_date', 'employee'] , filters = [
["attendance_date", ">=", month_start],
["attendance_date", "<=", month_end],
["employee", "=", employee],
["docstatus", "!=", 2]
])
marked_days = [get_datetime(record.attendance_date) for record in records]
unmarked_days = []
for date in dates_of_month:
date_time = get_datetime(date)
if today.day == date_time.day and today.month == date_time.month:
break
if date_time not in marked_days:
unmarked_days.append(date)
return unmarked_days
|
ingjrs01/adventofcode | 2021/day16/generator.py | <reponame>ingjrs01/adventofcode
from packet import Packet
class Generator():
def createPacket(self,t):
packet = Packet()
return packet |
karthik-git-user/SAP-Stage | core-customize/hybris/bin/modules/integration-apis/odata2services/src/de/hybris/platform/odata2services/odata/monitoring/InboundMonitoringException.java | /*
* [y] hybris Platform
*
* Copyright (c) 2018 SAP SE or an SAP affiliate company.
* All rights reserved.
*
* This software is the confidential and proprietary information of SAP
* ("Confidential Information"). You shall not disclose such Confidential
* Information and shall use it only in accordance with the terms of the
* license agreement you entered into with SAP.
*/
package de.hybris.platform.odata2services.odata.monitoring;
/**
* An InboundMonitoringException represents an exception that occurs during monitoring processing
*/
public class InboundMonitoringException extends RuntimeException
{
public InboundMonitoringException(final Throwable cause)
{
super(cause);
}
}
|
tanujadasari/Awesome_Python_Scripts | ImageProcessingScripts/Map Coloring/map_coloring.py | <gh_stars>1-10
# MAP COLORING
# imported necessary library
import tkinter
from tkinter import *
import tkinter as tk
import tkinter.messagebox as mbox
from tkinter import ttk
from tkinter import filedialog
from PIL import ImageTk, Image
import cv2
import os
import numpy as np
from cv2 import *
import random
# Main Window
frame = Tk()
frame.title('Map Coloring')
frame.geometry('1300x750')
# frame.configure(bg = "white")
# image on the main window
path = "Images/front.jpg"
# Creates a Tkinter-compatible photo image, which can be used everywhere Tkinter expects an image object.
img1 = ImageTk.PhotoImage(Image.open(path))
# The Label widget is a standard Tkinter widget used to display a text or image on the screen.
panel = tk.Label(frame, image = img1)
panel.place(x = 150, y = 110)
# starting label
start1 = Label(frame, text='MAP COLORING', font=("Arial", 55,"underline"),fg="magenta")
start1.place(x=350,y=10)
def start_fun():
frame.destroy()
# creating an exit button
prevB = Button(frame, text='START', command=start_fun, font=("Arial", 25), bg = "light green", fg = "blue", borderwidth=3, relief="raised")
prevB.place(x = 200, y = 640)
# defined exit_win function, to show a exit dialog box when tried to exit
def exit_win():
if mbox.askokcancel("Exit", "Do you want to exit?"):
frame.destroy()
# creating an exit button
prevB = Button(frame, text='EXIT', command=exit_win, font=("Arial", 25), bg = "red", fg = "blue", borderwidth=3, relief="raised")
prevB.place(x = 930, y = 640)
# this is done to show the exit dialog box when tried to exit from the main window, using the top-roght close button of titlebar
frame.protocol("WM_DELETE_WINDOW", exit_win)
frame.mainloop()
#created main window
window = Tk()
window.geometry("1300x750")
window.title("Map Coloring")
# defined variable
global count, emig
# global bright, con
# global frp, tname # list of paths
frp = []
tname = []
con = 1
bright = 0
panelB = None
panelA = None
# function defined to get the path of the image selected
def getpath(path):
a = path.split(r'/')
# print(a)
fname = a[-1]
l = len(fname)
location = path[:-l]
return location
# function defined to get the folder name from which image is selected
def getfoldername(path):
a = path.split(r'/')
# print(a)
name = a[-1]
return name
# function defined to get the file name of image is selected
def getfilename(path):
a = path.split(r'/')
fname = a[-1]
a = fname.split('.')
a = a[0]
# print(a)
return a
# function defined to open the image file
def openfilename():
global filename
filename = filedialog.askopenfilename(title='"pen')
return filename
# function defined to open the selected image
def open_img():
global x, panelA, panelB
global count, eimg, location, filename
count = 0
x = openfilename()
img = Image.open(x)
eimg = img
img = ImageTk.PhotoImage(img)
temp = x
location = getpath(temp)
filename = getfilename(temp)
name_entry.delete(0,END)
name_entry.insert(0,str(getfilename(x)))
if panelA is None or panelB is None:
panelA = Label(image=img)
panelA.image = img
panelA.pack(side="left", padx=50, pady=10)
panelB = Label(image=img)
panelB.image = img
panelB.pack(side="right", padx=50, pady=10)
else:
panelA.configure(image=img)
panelB.configure(image=img)
panelA.image = img
panelB.image = img
# function defined for make the sketch of image selected
def en_fun():
global x
THRESH = 240
orig = cv2.imread(x)
img = cv2.cvtColor(orig, cv2.COLOR_BGR2GRAY)
# Make the faint 1-pixel boundary bolder
rows, cols = img.shape
new_img = np.full_like(img, 255) # pure white image
for y in range(rows):
if not (y % 10):
print('Row = %d (%.2f%%)' % (y, 100. * y / rows))
for x in range(cols):
score = 1 if y > 0 and img.item(y - 1, x) < THRESH else 0
score += 1 if x > 0 and img.item(y, x - 1) < THRESH else 0
score += 1 if y < rows - 1 and img.item(y + 1, x) < THRESH else 0
score += 1 if x < cols - 1 and img.item(y, x + 1) < THRESH else 0
if img.item(y, x) < THRESH or score >= 2:
new_img[y, x] = 0 # black pixels show boundary
cv2.imwrite('segmented_image.jpg', new_img)
# Find all contours on the map
contours, hierarchy = cv2.findContours(new_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
print("Number of contours detected = %d" % len(contours))
# Fill second level regions on the map
coln = 0
colors = [
[127, 0, 255],
[255, 0, 127],
[255, 127, 0],
[127, 255, 0],
[0, 127, 255],
[0, 255, 127],
]
hierarchy = hierarchy[0]
for i in range(len(contours)):
area = cv2.contourArea(contours[i])
if hierarchy[i][3] == 1:
print(i, area)
coln = (coln + 1) % len(colors)
cv2.drawContours(orig, contours, i, colors[coln], -1)
cv2.imwrite("colored_map.jpg", orig)
imge = Image.open('colored_map.jpg')
imge = ImageTk.PhotoImage(imge)
panelB.configure(image=imge)
panelB.image = imge
mbox.showinfo("Color Status", "Map Colored successfully.")
# function defined to reset the edited image to original one
def reset():
global x
# print(x)
image = cv2.imread(x)[:, :, ::-1]
global count, eimg
count = 6
global o6
o6 = image
image = Image.fromarray(o6)
eimg = image
image = ImageTk.PhotoImage(image)
panelB.configure(image=image)
panelB.image = image
mbox.showinfo("Success", "Image reset to original format!")
# function defined to same the edited image
def save_img():
global location, filename, eimg
# print(filename)
# eimg.save(location + filename + r"_edit.png")
filename = filedialog.asksaveasfile(mode='w', defaultextension=".jpg")
if not filename:
return
eimg.save(filename)
mbox.showinfo("Success", "Colored Map Saved Successfully!")
# top label
start1 = tk.Label(text = "MAP COLORING", font=("Arial", 50, "underline"), fg="magenta") # same way bg
start1.place(x = 370, y = 10)
# original image label
start1 = tk.Label(text = "Original\nMap", font=("Arial", 40), fg="magenta") # same way bg
start1.place(x = 100, y = 270)
# edited image label
start1 = tk.Label(text = "Colored\nMap", font=("Arial", 40), fg="magenta") # same way bg
start1.place(x = 920, y = 270)
# save button created
saveb = Button(window, text="SAVE",command=save_img,font=("Arial", 25), bg = "orange", fg = "blue", borderwidth=3, relief="raised")
saveb.place(x =80 , y =20 )
# choose button created
chooseb = Button(window, text="SELECT",command=open_img,font=("Arial", 25), bg = "orange", fg = "blue", borderwidth=3, relief="raised")
chooseb.place(x =200 , y =650 )
# Name Entry Box
name_entry = Entry(window, font=("Arial", 30), fg='brown', bg="light yellow", borderwidth=3, width=13)
name_entry.place(x=480, y=655)
# Encrypt button created
enb = Button(window, text="COLOR",command=en_fun,font=("Arial", 25), bg = "light green", fg = "blue", borderwidth=3, relief="raised")
enb.place(x =900 , y =650 )
# # reset button created
# resetb = Button(window, text="RESET",command=reset,font=("Arial", 25), bg = "yellow", fg = "blue", borderwidth=3, relief="raised")
# resetb.place(x =500 , y =650 )
# function created for exiting
def exit_win():
if mbox.askokcancel("Exit", "Do you want to exit?"):
window.destroy()
# exit button created
exitb = Button(window, text="EXIT",command=exit_win,font=("Arial", 25), bg = "red", fg = "blue", borderwidth=3, relief="raised")
exitb.place(x =1100 , y =20 )
window.protocol("WM_DELETE_WINDOW", exit_win)
window.mainloop() |
cuplv/thresher | apps/pldi13/OpenSudoku/src/cz/romario/opensudoku/gui/importing/ExtrasImportTask.java | package cz.romario.opensudoku.gui.importing;
import cz.romario.opensudoku.db.SudokuInvalidFormatException;
/**
* Handles import of puzzles via intent's extras.
*
* @author romario
*
*/
public class ExtrasImportTask extends AbstractImportTask {
private String mFolderName;
private String mGames;
private boolean mAppendToFolder;
public ExtrasImportTask(String folderName, String games, boolean appendToFolder) {
mFolderName = folderName;
mGames = games;
mAppendToFolder = appendToFolder;
}
@Override
protected void processImport() throws SudokuInvalidFormatException {
if (mAppendToFolder) {
appendToFolder(mFolderName);
} else {
importFolder(mFolderName);
}
for (String game : mGames.split("\n")) {
importGame(game);
}
}
}
|
ataraxy-zhi/leetcode_questions_with_java | src/com/solution/onequestionperday/may2021/poweroffour342/PowerOfFour.java | <reponame>ataraxy-zhi/leetcode_questions_with_java
package com.solution.onequestionperday.may2021.poweroffour342;
/**
* @ClassName PowerOfFour
* @Description 342. Power of Four
* Given an integer n, return true if it is a power of four. Otherwise, return false.
* <p>
* An integer n is a power of four, if there exists an integer x such that n == 4x.
* <p>
* <p>
* <p>
* Example 1:
* <p>
* Input: n = 16
* Output: true
* Example 2:
* <p>
* Input: n = 5
* Output: false
* Example 3:
* <p>
* Input: n = 1
* Output: true
* <p>
* <p>
* Constraints:
* <p>
* -231 <= n <= 231 - 1
* <p>
* <p>
* Follow up: Could you solve it without loops/recursion?
* <p>
* <p>
* 342. 4的幂
* 给定一个整数,写一个函数来判断它是否是 4 的幂次方。如果是,返回 true ;否则,返回 false 。
* <p>
* 整数 n 是 4 的幂次方需满足:存在整数 x 使得 n == 4x
* <p>
*
* <p>
* 示例 1:
* <p>
* 输入:n = 16
* 输出:true
* 示例 2:
* <p>
* 输入:n = 5
* 输出:false
* 示例 3:
* <p>
* 输入:n = 1
* 输出:true
*
* <p>
* 提示:
* <p>
* -231 <= n <= 231 - 1
*
* <p>
* 进阶:
* <p>
* 你能不使用循环或者递归来完成本题吗?
* <p>
* 来源:力扣(LeetCode)
* 链接:https://leetcode-cn.com/problems/power-of-four
* 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
* @Author yi
* @Date 2021/5/31 22:54
*/
public class PowerOfFour {
}
|
addstone/unrealengine3 | Development/Src/Engine/Src/AFileLog.cpp | /*=============================================================================
AFileLog.cpp: Unreal Tournament 2003 mod author logging
Copyright 1997-2002 Epic Games, Inc. All Rights Reserved.
Revision history:
* Created by <NAME>
=============================================================================*/
#include "EnginePrivate.h"
/*-----------------------------------------------------------------------------
Stat Log Implementation.
-----------------------------------------------------------------------------*/
IMPLEMENT_CLASS(AFileLog);
void AFileLog::OpenLog(FString &fileName,FString &extension)
{
if (LogAr == NULL)
{
// Strip all pathing characters from the name
for (INT i = 0; i < fileName.Len(); i++)
{
if ( (*fileName)[i]=='\\' || (*fileName)[i]=='.')
{
((TCHAR*)(*fileName))[i] = '_';
}
}
// append the extension
fileName += extension;
// save the new log file name
fileName = FString::Printf(TEXT("%s%s"),*appGameLogDir(),*fileName);
LogFileName = fileName;
debugf(TEXT("Opening user log %s"),*LogFileName);
// and create the actual archive
LogAr = GFileManager->CreateFileWriter(*LogFileName, FILEWRITE_EvenIfReadOnly);
}
}
void AFileLog::execOpenLog( FFrame& Stack, RESULT_DECL )
{
P_GET_STR(fileName);
P_GET_STR_OPTX(extension,TEXT(".txt"));
P_FINISH;
OpenLog(fileName,extension);
}
void AFileLog::execCloseLog( FFrame& Stack, RESULT_DECL )
{
P_FINISH;
// if the archive exists
if (LogAr != NULL)
{
// delete it
delete (FArchive*)LogAr;
}
LogAr = NULL;
}
void AFileLog::Logf(FString &logString)
{
if (LogAr != NULL)
{
// append eol characters
logString += TEXT("\r\n");
check(logString.Len() < 1024 && "Can only log strings <1024 length");
// and convert to ansi
ANSICHAR ansiStr[1024];
INT idx;
for (idx = 0; idx < logString.Len(); idx++)
{
ansiStr[idx] = ToAnsi((*logString)[idx]);
}
// null terminate
ansiStr[idx] = '\0';
// and serialize to the archive
((FArchive*)LogAr)->Serialize(ansiStr, idx);
((FArchive*)LogAr)->Flush();
}
}
void AFileLog::execLogf( FFrame& Stack, RESULT_DECL )
{
P_GET_STR(logString);
P_FINISH;
Logf(logString);
}
|
GavinRay97/teiid | connectors/jdbc/translator-jdbc/src/test/java/org/teiid/translator/jdbc/exasol/TestExasolTranslator.java | <filename>connectors/jdbc/translator-jdbc/src/test/java/org/teiid/translator/jdbc/exasol/TestExasolTranslator.java
/*
* Copyright Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags and
* the COPYRIGHT.txt file distributed with this work.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teiid.translator.jdbc.exasol;
import org.junit.BeforeClass;
import org.junit.Test;
import org.teiid.translator.TranslatorException;
import org.teiid.translator.jdbc.TranslationHelper;
public class TestExasolTranslator {
private static ExasolExecutionFactory translator;
@BeforeClass
public static void setupOnce() throws Exception {
translator = new ExasolExecutionFactory();
translator.start();
}
public void helpTestVisitor(String input, String expectedOutput) throws TranslatorException {
TranslationHelper.helpTestVisitor(TranslationHelper.BQT_VDB, input, expectedOutput, translator);
}
@Test
public void testConvertBooleanToDouble() throws TranslatorException {
String input = "SELECT convert(BooleanValue, double) FROM BQT1.SmallA";
String output = "SELECT cast(SmallA.BooleanValue as double precision) FROM SmallA";
helpTestVisitor(input, output);
}
@Test
public void testConvertBooleanToString() throws TranslatorException {
String input = "SELECT convert(BooleanValue, byte) FROM BQT1.SmallA";
String output = "SELECT cast(SmallA.BooleanValue as decimal(3)) FROM SmallA";
helpTestVisitor(input, output);
}
}
|
TotallyNotChase/c-iterplus | include/itplus_enumerate.h | /**
* @file
* @brief Macros for implementing the `enumerate` abstraction using the `IterEnumr` struct.
*
* https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.enumerate
* An IterEnumr struct is a struct that stores an iterable, and its iteration index. Its `next` function implementation
* that returns a `Pair` of 2 elements, first being the index, and second being an element from the source iterable.
*/
#ifndef LIB_ITPLUS_ENUMR_H
#define LIB_ITPLUS_ENUMR_H
#include "itplus_iterator.h"
#include "itplus_macro_utils.h"
#include "itplus_maybe.h"
#include "itplus_pair.h"
#include <stddef.h>
/**
* @def IterEnumr(T, U)
* @brief Convenience macro to get the type of the IterEnumr struct with given element types.
*
* # Example
*
* @code
* DefineIterEnumr(int);
* IterEnumr(int) i; // Declares a variable of type IterEnumr(int)
* @endcode
*
* @param T The type of value the `Iterable` wrapped in this `IterEnumr` will yield. Must be the same type name
* passed to #DefineIterEnumr(T).
*
* @note If `T` is a pointer, it needs to be typedef-ed into a type that does not contain the `*`. Only
* alphanumerics.
*/
#define IterEnumr(T) ITPL_CONCAT(IterEnumr_, T)
/**
* @def DefineIterEnumr(T)
* @brief Define an IterEnumr struct that works with on `Iterable(T)`s.
*
* # Example
*
* @code
* DefineIterEnumr(int); // Defines an IterEnumr(int) struct
* @endcode
*
* @param T The type of value the `Iterable` wrapped in this `IterEnumr` will yield.
*
* @note If `T` is a pointer, it needs to be typedef-ed into a type that does not contain the `*`. Only
* alphanumerics.
* @note An #Iterator(T) for the given `T` **must** also exist.
*/
#define DefineIterEnumr(T) \
typedef struct \
{ \
size_t i; \
Iterable(T) src; \
} IterEnumr(T)
/**
* @def define_iterenumr_func(T, Name)
* @brief Define a function to turn an #IterEnumr(T) into an #Iterable(T) where `T = Pair(size_t, T)`.
*
* Define the `next` function implementation for the #IterEnumr(T) struct, and use it to implement the Iterator
* typeclass, for given `T`.
*
* The defined function takes in a value of type `IterEnumr(T)*` and wraps it in an `Iterable(Pair(size_t, T))`.
*
* # Example
*
* @code
* DefineIterEnumr(int);
*
* // Implement `Iterator` for `IterEnumr(int)`
* // The defined function has the signature- `Iterable(Pair(size_t, int)) wrap_intenumr(IterEnumr(int)* x)`
* define_iterenumr_func(int, wrap_intenumr)
* @endcode
*
* Usage of the defined function-
*
* @code
* // Enumerate `it` (of type `Iterable(int)`) to create a new iterable
* Iterable(Pair(size_t, int)) enumerated = wrap_intenumr(&(IterEnumr(int)){ .src = it });
* @endcode
*
* @param T The type of value the first `Iterable` wrapped in this `IterEnumr` will yield.
* @param Name Name to define the function as.
*
* @note If `T` is a pointer, it needs to be typedef-ed into a type that does not contain the `*`. Only
* alphanumerics.
* @note An #IterEnumr(T) for the given `T` **must** exist.
* @note An #Iterator(T), with `T = Pair(size_t, T)`, for the given `T`.
* @note This should not be delimited by a semicolon.
*/
#define define_iterenumr_func(T, Name) \
static Maybe(Pair(size_t, T)) ITPL_CONCAT(IterEnumr(T), _nxt)(IterEnumr(T) * self) \
{ \
Iterable(T) const srcit = self->src; \
Maybe(T) const res = srcit.tc->next(srcit.self); \
return is_just(res) ? Just(PairOf(self->i++, from_just_(res), size_t, T), Pair(size_t, T)) \
: Nothing(Pair(size_t, T)); \
} \
impl_iterator(IterEnumr(T)*, Pair(size_t, T), Name, ITPL_CONCAT(IterEnumr(T), _nxt))
#endif /* !LIB_ITPLUS_ENUMR_H */
|
halv00rsen/Scorez4.0 | client/pages/admin/templates/new_scoring_type/new_scoring_type.js |
Template.new_scoring_type_admin.events({
"submit form": function(event, template) {
event.preventDefault();
var data = {
name: event.target.scoring_name.value,
description: event.target.description.value,
min_point: Number(event.target.min_value.value),
max_point: Number(event.target.max_value.value),
multiple_scorez: event.target.multiple_scorez.checked,
plus_minus: event.target.plus_minus_scorez.checked
};
if (data.min_point > data.max_point) {
Show_message("Max point have to be larger than min point.");
return;
}
Meteor.call("create_group_scoring", data, function(error, result) {
if (error) {
Show_message(error.reason);
} else {
Show_message("The scoring was saved!");
event.target.scoring_name.value = "";
event.target.description.value = "";
event.target.min_value.value = "";
event.target.max_value.value = "";
event.target.multiple_scorez.checked = false;
event.target.plus_minus_scorez.checked = false;
}
});
}
});
|
crypticspawn/BashSupport | test/com/ansorgit/plugins/bash/lang/psi/impl/word/BashWordImplTest.java | <filename>test/com/ansorgit/plugins/bash/lang/psi/impl/word/BashWordImplTest.java
package com.ansorgit.plugins.bash.lang.psi.impl.word;
import com.ansorgit.plugins.bash.LightBashCodeInsightFixtureTestCase;
import com.ansorgit.plugins.bash.file.BashFileType;
import com.ansorgit.plugins.bash.lang.psi.api.BashCharSequence;
import com.ansorgit.plugins.bash.lang.psi.api.BashString;
import com.ansorgit.plugins.bash.lang.psi.api.word.BashWord;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
public class BashWordImplTest extends LightBashCodeInsightFixtureTestCase {
public void testEmpty() throws Exception {
BashCharSequence string = configureWord("''");
Assert.assertTrue(string.isWrapped());
Assert.assertTrue(string.isStatic());
Assert.assertEquals(TextRange.create(1, 1), string.getTextContentRange());
Assert.assertEquals("'abcdefghijk'", string.createEquallyWrappedString("abcdefghijk"));
}
public void testWrapped() throws Exception {
BashCharSequence string = configureWord("'abc def'");
Assert.assertTrue(string.isWrapped());
Assert.assertTrue(string.isStatic());
Assert.assertEquals(TextRange.create(1, 8), string.getTextContentRange());
Assert.assertEquals("'abcdefghijk'", string.createEquallyWrappedString("abcdefghijk"));
}
public void testWrappedDollar() throws Exception {
BashCharSequence string = configureWord("$'abc def'");
Assert.assertTrue(string.isWrapped());
Assert.assertTrue(string.isStatic());
Assert.assertEquals(TextRange.create(2, 9), string.getTextContentRange());
Assert.assertEquals("$'abcdefghijk'", string.createEquallyWrappedString("abcdefghijk"));
}
public void testDynamic() throws Exception {
BashCharSequence string = configureWord("$'$VAR abc def'");
Assert.assertTrue(string.isStatic());
Assert.assertEquals(TextRange.create(2, 14), string.getTextContentRange());
Assert.assertEquals("$'abcdefghijk'", string.createEquallyWrappedString("abcdefghijk"));
}
public void testDynamic2() throws Exception {
BashCharSequence string = configureWord("$'abcd $VAR'");
Assert.assertTrue(string.isStatic());
Assert.assertEquals(TextRange.create(2, 11), string.getTextContentRange());
Assert.assertEquals("$'abcdefghijk'", string.createEquallyWrappedString("abcdefghijk"));
}
public void testUnwrapped() throws Exception {
BashWord string = configureWord("abcde");
Assert.assertTrue(string.isStatic());
Assert.assertFalse(string.isWrapped());
Assert.assertFalse("A simple sring should not be wrappable (no unnecessary warning about wrapping a single word)", string.isWrappable());
Assert.assertEquals(TextRange.create(0, 5), string.getTextContentRange());
Assert.assertEquals("abcdefghijk", string.createEquallyWrappedString("abcdefghijk"));
}
public void testUnwrappedDynamic() throws Exception {
BashWord string = configureWord("abcde${ABC}abc");
Assert.assertFalse(string.isStatic());
Assert.assertFalse(string.isWrapped());
Assert.assertFalse(string.isWrappable());
Assert.assertEquals(TextRange.create(0, 14), string.getTextContentRange());
Assert.assertEquals("abcdefghijk", string.createEquallyWrappedString("abcdefghijk"));
}
@NotNull
private BashWord configureWord(String text) {
PsiFile file = myFixture.configureByText(BashFileType.BASH_FILE_TYPE, text);
BashWord string = PsiTreeUtil.findChildOfType(file, BashWord.class);
Assert.assertNotNull(string);
return string;
}
} |
maxmarkus/console | service-catalog-ui/src/components/ServiceClassList/styled.js | <filename>service-catalog-ui/src/components/ServiceClassList/styled.js<gh_stars>0
import styled from 'styled-components';
import { media } from '@kyma-project/react-components';
export const ServiceClassListWrapper = styled.div`
box-sizing: border-box;
width: 100%;
align-items: flex-start;
padding: 0;
display: flex;
flex-flow: row nowrap;
${props => (props.phoneRows ? media.phone`flex-flow: column nowrap;` : '')};
`;
export const CardsWrapper = styled.div`
box-sizing: border-box;
display: flex;
flex-flow: row wrap;
width: 100%;
padding: 0 20px 30px 20px;
`;
export const ServiceClassDescription = styled.div`
color: #74777a;
font-size: 16px;
text-align: left;
padding: 30px 30px 0;
`;
export const StatusesList = styled.ul`
margin: 0 0 0 10px;
align-items: center;
display: grid;
grid-gap: 3px;
`;
export const StatusWrapper = styled.li`
grid-row: 1;
&:first-child {
margin-left: 0;
}
.fd-badge {
padding: 1px 3px;
}
`;
export const EmptyList = styled.div`
width: 100%;
font-family: '72';
text-align: center;
font-size: 20px;
color: #32363a;
margin: 50px 0;
`;
|
victorsoares96/checklist-frontend | src/app/pages/Checklists/Available/index.js | import React, { useEffect, useState } from 'react';
import {
Box,
CssBaseline,
Breadcrumbs,
Typography,
} from '@material-ui/core';
import useChecklist from '../../../utils/useChecklist';
import useAuth from '../../../utils/useAuth';
import ChecklistsList from '../../../shared/components/ChecklistsList';
import LinearLoading from '../../../shared/components/LinearLoading';
const AvailableChecklists = () => {
const { user } = useAuth();
const { listChecklists, loading: loadingChecklist } = useChecklist();
const [availableChecklists, setAvailableChecklists] = useState([]);
useEffect(() => {
async function loadAvailableChecklists() {
let filteredChecklists = [];
const checklists = await listChecklists();
if(user.type === 'admin' || user.type === 'diretor') {
const filter = checklists.filter(checklist => {
return checklist.ativo === true;
});
setAvailableChecklists(filter);
}
else {
checklists.map((checklist) => {
checklist.ativo && (
checklist.permissions.write.map(cargoID => {
if(cargoID === user.funcao) {
filteredChecklists.push(checklist);
}
})
)
});
setAvailableChecklists(filteredChecklists);
}
}
loadAvailableChecklists();
}, [listChecklists, user.funcao, user.type]);
if(loadingChecklist) return <LinearLoading />
return (
<Box>
<CssBaseline />
<Breadcrumbs aria-label="breadcrumb">
<Typography color='inherit' style={{fontWeight: 600}}>Checklist's Disponíveis</Typography>
</Breadcrumbs>
<ChecklistsList data={availableChecklists} storageType='online' />
</Box>
);
}
export default AvailableChecklists; |
dilawar/pypet | pypet/tests/unittests/brian2tests/module_test.py | __author__ = 'robert'
import sys
import unittest
try:
import brian2
import pypet.brian2
from pypet.brian2 import *
except ImportError as exc:
#print('Import Error: %s' % str(exc))
brian2 = None
from pypet.tests.testutils.ioutils import get_root_logger, parse_args, run_suite
import inspect
@unittest.skipIf(brian2 is None, 'Can only be run with brian!')
class TestAllBrian2Import(unittest.TestCase):
tags = 'unittest', 'brian2', 'import'
def test_import_star(self):
for class_name in pypet.brian2.__all__:
logstr = 'Evaluauting %s: %s' % (class_name, repr(eval(class_name)))
get_root_logger().info(logstr)
def test_if_all_is_complete(self):
for item in pypet.brian2.__dict__.values():
if inspect.isclass(item) or inspect.isfunction(item):
self.assertTrue(item.__name__ in pypet.brian2.__all__)
if __name__ == '__main__':
opt_args = parse_args()
run_suite(**opt_args) |
kodeah/p3Server | src/main/java/utils/scripts/ScriptBuilder.java | <filename>src/main/java/utils/scripts/ScriptBuilder.java<gh_stars>0
package utils.scripts;
import java.util.LinkedList;
public class ScriptBuilder {
private final LinkedList< String[] > lines = new LinkedList<>();
public ScriptBuilder appendLine( String... commandAndParameters ) {
lines.addLast( commandAndParameters );
return this;
}
public Script toScript() {
return new Script( lines );
}
}
|
KentAlojado/codescholar-beta | src/pages/tutorial-javascript/lesson-9.js | import React from 'react'
import "font-awesome/css/font-awesome.min.css";
import Layout from '../../components/layout';
import HTMLImgA from "../../images/jscript-9-1.jpg";
import "font-awesome/css/font-awesome.min.css";
const TutorialJScriptPage9 = () => (
<Layout>
<div className="wrapping tutorial">
<div className="third-wrap">
<div className="one-third">
<div className="tutorial-navigation">
<h4 className="media-heading course-title">Basics</h4>
<ol className="lesson-nav">
<li><a href="/tutorial-javascript/lesson-1">Introduction to JavaScript</a></li>
<li><a href="/tutorial-javascript/lesson-2">Let’s Learn JavaScript</a></li>
<li><a href="/tutorial-javascript/lesson-3">JavaScript Placement</a></li>
<li><a href="/tutorial-javascript/lesson-4">Syntax</a></li>
<li><a href="/tutorial-javascript/lesson-5">Variables</a></li>
<li><a href="/tutorial-javascript/lesson-6">Arrays</a></li>
<li><a href="/tutorial-javascript/lesson-7">Operatorsm</a></li>
<li><a href="/tutorial-javascript/lesson-8">If Statements</a></li>
<li className="current-link">While Loop</li>
<li><a href="/tutorial-javascript/lesson-10">For Loop</a></li>
<li><a href="/tutorial-javascript/lesson-11">Functions</a></li>
<li><a href="/tutorial-javascript/lesson-12">Events</a></li>
<li><a href="/tutorial-javascript/lesson-13">DOM</a></li>
</ol>
</div>
</div>
<div className="two-thirds">
<h4>While Loop</h4>
<p>There will be situations where you need to execute a set of code over and over again in programming. It is not practical to write same lines of code again and again. In such situations, you would need to write loop statements to reduce the number of lines.</p>
<p>Loops checks a condition. If it returns true, a code block will run. Then the condition is chacked again and if it still returns true, the code will run again. It repeats until the loop condition returns false.</p>
<p>The most basic form of loops is While Loop. While loop will continue to run as long as condition is true. While loops are generally useful if you do not the exact number of times the loop should run. Here's an example:</p>
<p><img src={HTMLImgA} alt="tutorial"/></p>
<p>This while loop writes out multiplication table of 5. Each time the loop is run, each step is written to the page. This loop will continue to run for as long as the condition in the parentheses is true (counter is less than or equal to 10). The second statement inside while loop 'counter++;' increments the counter variable by one.</p>
<p>
<em>In this example, the condition specifies that the code should run ten times. A more typical use of a while loop would be when you do not know how many times you want the code to run. It should continue to run as long as a condition is met.</em></p>
<p className="bg-cs lft">
<a href="/tutorial-javascript/lesson-10" >NEXT LESSON</a>
</p>
</div>
</div>
</div>
</Layout>
)
export default TutorialJScriptPage9
|
jsing3r/simplib | lib/facter/cpuinfo.rb | <reponame>jsing3r/simplib
# Returns the contents of /proc/cpuinfo as a hash
# The numeric entries in this should be changed to their proper data types in
# the future
Facter.add('cpuinfo') do
confine { Gem::Version.new(Facter.version) >= Gem::Version.new('2') }
confine { File.exist?('/proc/cpuinfo') }
setcode do
retval = {}
begin
File.read('/proc/cpuinfo').split(/^\s*$/).each do |section|
procinfo = section.split("\n").map{|x| x = x.split(':').map(&:strip)}
entry_hash = {}
procinfo.each do |entry|
next if (!entry || entry.empty?)
key = entry.first.gsub(/\s+/,'_')
value = entry.last
if key == 'flags'
value = value.split(/\s+/)
end
entry_hash[key] = value
end
proc_id = entry_hash.delete('processor')
retval[%(processor#{proc_id})] = entry_hash
end
rescue => details
Facter.warn("Could not gather data from /proc/cpuinfo: #{details.message}")
end
retval
end
end
|
Claiyre/todo-app-with-nej | webapp/js/nej/src/util/ajax/loader/loader.js | /*
* ------------------------------------------
* 资源加载器实现文件
* @version 1.0
* @author genify(<EMAIL>)
* ------------------------------------------
*/
var f = function(){
// variable declaration
var _ = NEJ.P,
_f = NEJ.F,
_g = _('nej.g'),
_v = _('nej.v'),
_u = _('nej.u'),
_t = _('nej.ut'),
_p = _('nej.ut.j'),
_proLoader,
_timeout = 60000; // default timeout
if (!!_p._$$Loader) return;
/**
* 资源加载器
* @class {nej.ut.j._$$Loader} 资源加载器
* @extends {nej.ut._$$Event}
* @param {Object} 可选配置参数,已处理的参数列表如下所示
* @config {String} version 版本信息
* @config {Number} timeout 超时时间,0表示禁止超时监测
*
* [hr]
*
* @event {onerror} 资源载入失败回调
* @param {String} 错误信息
*
* [hr]
*
* @event {onloaded} 资源载入成功回调
*
* [hr]
*
* @event {onloading} 资源加载中回调
*
*/
_p._$$Loader = NEJ.C();
_proLoader = _p._$$Loader._$extend(_t._$$Event);
/**
* 控件初始化
* @protected
* @method {__init}
* @return {Void}
*/
_proLoader.__init = function(){
this.__supInit();
this.__qopt = {onerror:this.__onQueueError._$bind(this),
onloaded:this.__onQueueLoaded._$bind(this)};
if (!this.constructor.__cache)
// url : {request:script,timer:2,bind:[instance1,instance2 ... ]}
// key : {error:0,loaded:0,total:0,bind:[instance1,instance2 ... ]}
this.constructor.__cache = {loaded:{}};
};
/**
* 控件重置
* @protected
* @method {__reset}
* @param {Object} 可选配置参数
* @return {Void}
*/
_proLoader.__reset = function(_options){
this.__supReset(_options);
this.__version = _options.version;
this.__timeout = _options.timeout;
this.__qopt.version = this.__version;
this.__qopt.timeout = this.__timeout;
};
/**
* 删除加载信息
* @protected
* @method {__delLoadData}
* @param {String} 标识
* @return {Object} 加载信息
*/
_proLoader.__delLoadData = function(_key){
delete this.constructor.__cache[_key];
};
/**
* 取加载信息
* @protected
* @method {__getLoadData}
* @param {String} 标识
* @return {Object} 加载信息
*/
_proLoader.__getLoadData = function(_key){
return this.constructor.__cache[_key];
};
/**
* 设置加载信息
* @protected
* @method {__setLoadData}
* @param {String} 标识
* @param {Object} 加载信息
* @return {Void}
*/
_proLoader.__setLoadData = function(_key,_data){
this.constructor.__cache[_key] = _data;
};
/**
* 取资源载入控件,子类实现具体逻辑
* @protected
* @method {__getRequest}
* @return {Script|Link} 控件
*/
_proLoader.__getRequest = _f;
/**
* 清理控件
* @protected
* @method {__clearRequest}
* @param {Script|Link} 控件
* @return {Void}
*/
_proLoader.__clearRequest = function(_request){
_v._$clearEvent(_request);
};
/**
* 资源载入,子类重写具体逻辑
* @protected
* @method {__doRequest}
* @param {Script|Link} _request 控件
* @return {Void}
*/
_proLoader.__doRequest = function(_request){
_request.src = this.__url;
document.head.appendChild(_request);
};
/**
* 执行清理任务
* @protected
* @method {__doClear}
* @return {Void}
*/
_proLoader.__doClear = function(){
var _cache = this.__getLoadData(this.__url);
if (!_cache) return;
window.clearTimeout(_cache.timer);
this.__clearRequest(_cache.request);
delete _cache.bind;
delete _cache.timer;
delete _cache.request;
this.__delLoadData(this.__url);
this.__getLoadData('loaded')[this.__url] = !0;
};
/**
* 执行回调
* @protected
* @method {__doCallback}
* @param {String} 回调名称
* @return {Void}
*/
_proLoader.__doCallback = function(_name){
var _cache = this.__getLoadData(this.__url);
if (!_cache) return;
var _list = _cache.bind;
this.__doClear();
if (!!_list&&_list.length>0){
var _instance;
while(_list.length){
_instance = _list.shift();
try{
_instance._$dispatchEvent(_name,arguments[1]);
}catch(ex){
// ignore
}
_instance._$recycle();
}
}
};
/**
* 资源载入异常事件
* @protected
* @method {__onError}
* @param {Object} 错误信息
* @return {Void}
*/
_proLoader.__onError = function(_error){
this.__doCallback('onerror',_error);
};
/**
* 资源载入成功事件
* @protected
* @method {__onLoaded}
* @return {Void}
*/
_proLoader.__onLoaded = function(){
this.__doCallback('onloaded');
};
/**
* 载入队列资源
* @protected
* @method {__doLoadQueue}
* @param {String} 资源地址
* @return {Void}
*/
_proLoader.__doLoadQueue = function(_url){
this.constructor._$allocate(this.__qopt)._$load(_url);
};
/**
* 检查队列状况
* @protected
* @method {__onQueueCheck}
* @return {Void}
*/
_proLoader.__onQueueCheck = function(_error){
var _cache = this.__getLoadData(this.__key);
if (!_cache) return;
if (!!_error)
_cache.error++;
_cache.loaded ++;
if (_cache.loaded<_cache.total) return;
this.__delLoadData(this.__key);
this._$dispatchEvent(_cache.error>0?'onerror':'onloaded');
};
/**
* 队列载入资源异常事件
* @protected
* @method {__onQueueError}
* @param {Object} 错误信息
* @return {Void}
*/
_proLoader.__onQueueError = function(_error){
this.__onQueueCheck(!0);
};
/**
* 队列载入资源成功事件
* @protected
* @method {__onQueueLoaded}
* @return {Void}
*/
_proLoader.__onQueueLoaded = function(){
this.__onQueueCheck();
};
/**
* 载入资源<br/>
* 脚本举例
* [code]
* var _p = NEJ.P('nej.ut.j');
* // 载入指定html,10秒超时
* var _loader = _p._$$HtmlLoader._$allocate({timeout:10000,
* onloaded:function(){
* // 载入资源成功的回调
* }
* });
* // 绝对路径或者当前页面的相对路径
* _loader._$load('../../../html/util/formTest.html');
* // 载入指定script,20秒超时
* var _loader = _p._$$ScriptLoader._$allocate({timeout:20000,
* onloaded:function(){
* // 载入资源成功的回调
* }
* });
* // 绝对路径或者当前页面的相对路径
* _loader._$load('../../../javascript/log.js');
* // 载入指定style,30秒超时
* var _loader = _p._$$StyleLoader._$allocate({timeout:30000,
* onloaded:function(){
* // 载入资源成功的回调
* }
* // 绝对路径或者当前页面的相对路径
* _loader._$load('../../../base/qunit.css');
* });
* [/code]
* @method {_$load}
* @param {String} 资源地址
* @return {nej.ut.j._$$Loader}
*/
_proLoader._$load = function(_url){
_url = _u._$absolute(_url);
if (!_url){
this._$dispatchEvent('onerror',{
code:_g._$CODE_NOTASGN,
message:'请指定要载入的资源地址!'
});
return this;
};
this.__url = _url;
if (!!this.__version)
this.__url += (this.__url.indexOf('?')<0?'?':'&')+this.__version;
if (this.__getLoadData('loaded')[this.__url]){
try{
this._$dispatchEvent('onloaded');
}catch(ex){
// ignore
}
this._$recycle();
return this;
}
var _cache = this.__getLoadData(this.__url),_request;
if (!!_cache){
_cache.bind.unshift(this);
_cache.timer = window.clearTimeout(_cache.timer);
}else{
_request = this.__getRequest();
_cache = {request:_request,bind:[this]};
this.__setLoadData(this.__url,_cache);
_v._$addEvent(_request,'load',this.__onLoaded._$bind(this));
_v._$addEvent(_request,'error',this.__onError._$bind(this,{
code:_g._$CODE_ERRSERV,
message:'无法加载指定资源文件['+this.__url+']!'
}));
}
if (this.__timeout!=0)
_cache.timer = window.setTimeout(
this.__onError._$bind(this,{
code:_g._$CODE_TIMEOUT,
message:'指定资源文件['+this.__url+']载入超时!'
}),this.__timeout||_timeout);
if (!!_request)
this.__doRequest(_request);
this._$dispatchEvent('onloading');
return this;
};
/**
* 队列载入资源<br/>
* 脚本举例
* [code]
* var _p = NEJ.P('nej.ut.j');
* var _loader = _p._$$HtmlLoader._$allocate({
* onloaded:function(){
* // 载入队列资源成功的回调
* }
* });
* // 路径列表,可以是绝对路径也可以是当前页面的相对路径
* var _list = ['../../../html/util/formTest.html','../../../html/util/cacheTest.html']
* _loader._$queue(_list);
* [/code]
* @method {_$queue}
* @param {Array} 资源地址队列
* @return {nej.ut.j._$$Loader}
*/
_proLoader._$queue = function(_list){
if (!_list||!_list.length){
this._$dispatchEvent('onerror',{
code:_g._$CODE_NOTASGN,
message:'请指定要载入的资源队列!'
});
return this;
}
this.__key = _u._$randNumberString();
var _cache = {error:0,loaded:0,total:_list.length};
this.__setLoadData(this.__key,_cache);
for(var i=0,l=_list.length;i<l;i++){
if (!_list[i]){
_cache.total--;
continue;
}
this.__doLoadQueue(_list[i]);
}
this._$dispatchEvent('onloading');
return this;
};
};
define('{lib}util/ajax/loader/loader.js',
['{lib}base/constant.js'
,'{lib}base/event.js'
,'{lib}util/event.js'],f); |
BessPtck/MINDS-i-Drone | src/input/APM/MPU6000.h | #include "input/InertialManager.h"
#include "input/Sensor.h"
#include "input/SPIcontroller.h"
#include "input/AxisTranslator.h"
#include "util/byteConv.h"
#include "util/LTATune.h"
#include <SPI.h>
#include "MPUregs.h"
//MPU6000 Accelerometer and Gyroscope on SPI
namespace{
//this holds raw mpu data (accl x,y,z; temp; gyro x,y,z);
union rawData{
struct { uint8_t bytes[14]; };
struct { int16_t accl[3], temp, gyro[3]; };
};
}
class MPU6000 : public InertialVec {
protected:
static const uint8_t APM26_CS_PIN = 53;
static const uint16_t CAL_SAMPLE_SIZE = 200; //for gyro calibration
static const float SAMPLE_RATE;//sample at 200Hz
static const float dPlsb;//+- 2000 dps per least sig bit, in ms
static const float GYRO_CONVERSION_FACT;
SPIcontroller spiControl;
LTATune LTA;
bool writeTo(uint8_t addr, uint8_t msg);
bool writeTo(uint8_t addr, uint8_t len, uint8_t* msg);
bool readFrom(uint8_t addr, uint8_t len, uint8_t* data);
rawData readSensors(); //optimized for just sensor data
public:
//clock speed 8E6 instead of default(4E6) makes readSensors about 50% faster
MPU6000()
: spiControl(APM26_CS_PIN, SPISettings(8E6, MSBFIRST, SPI_MODE0)) {}
MPU6000(uint8_t chip_select)
: spiControl(chip_select , SPISettings(8E6, MSBFIRST, SPI_MODE0)) {}
void begin();
void end();
Sensor::Status status();
void calibrate();
void update(InertialManager& man, Translator axis);
//end of sensor interface
void getSensors(int16_t (&accl)[3], int16_t (&gyro)[3]);
void tuneAccl(LTATune t);
float acclX();
float acclY();
float acclZ();
float gyroX();
float gyroY();
float gyroZ();
float pitch();
float roll();
};
const float MPU6000::SAMPLE_RATE = 200; //sample at 200Hz
const float MPU6000::dPlsb = 2.f*(2.f/65535.f);
const float MPU6000::GYRO_CONVERSION_FACT = 2.f*(2.f/65535.f) *PI/180.l;
rawData
MPU6000::readSensors(){
//Note: its faster to read and ignore temp than make two transfers
//Note: this is unrolled for efficiency
rawData data;
spiControl.capture();
SPI.transfer(REG_DATA_START | 0x80); //last bit set to specify a read
//the order in memory is accl x,y,z; temp; gyro x,y,z
data.bytes[1] = SPI.transfer(0); data.bytes[0] = SPI.transfer(0);
data.bytes[3] = SPI.transfer(0); data.bytes[2] = SPI.transfer(0);
data.bytes[5] = SPI.transfer(0); data.bytes[4] = SPI.transfer(0);
data.bytes[7] = SPI.transfer(0); data.bytes[6] = SPI.transfer(0);
data.bytes[9] = SPI.transfer(0); data.bytes[8] = SPI.transfer(0);
data.bytes[11] = SPI.transfer(0); data.bytes[10] = SPI.transfer(0);
data.bytes[13] = SPI.transfer(0); data.bytes[12] = SPI.transfer(0);
spiControl.release();
return data;
}
bool
MPU6000::readFrom(uint8_t addr, uint8_t len, uint8_t* data){
spiControl.capture();
SPI.transfer(addr | 0x80); //last bit set to specify a read
for(int i=0; i<len; i++) data[i] = SPI.transfer(0);
return spiControl.release();
}
bool
MPU6000::writeTo(uint8_t addr, uint8_t len, uint8_t* msg){
spiControl.capture();
SPI.transfer(addr & ~0x80); //clear last bit to specify a write
for(int i=0; i<len; i++) SPI.transfer(msg[i]);
return spiControl.release();
}
bool
MPU6000::writeTo(uint8_t addr, uint8_t msg){
return writeTo(addr, 1, &msg);
}
// ---- public functions below -------------------------------------------------
void
MPU6000::tuneAccl(LTATune t){
LTA = t;
}
void
MPU6000::begin(){
// Turn off barometer SPI line
// Without this, running the MPU without instancing a MS5611 will fail
// Only applies to APM2.* hardware though
pinMode(40, OUTPUT);
digitalWrite(40, HIGH);
writeTo(REG_PWR_MGMT_1 , BIT_H_RESET); //chip reset
delay(100);
writeTo(REG_PWR_MGMT_1 , MPU_CLK_SEL_PLLGYROZ); //set GyroZ clock
writeTo(REG_USER_CTRL , BIT_I2C_DIS); //Disable I2C as recommended on datasheet
writeTo(REG_SMPLRT_DIV , ((1000/SAMPLE_RATE)-1) ); // Set Sample rate; 1khz/(value+1) = (rate)Hz
writeTo(REG_CONFIG , BITS_DLPF_CFG_188HZ); //set low pass filter to 188hz
writeTo(REG_GYRO_CONFIG , BITS_FS_2000DPS); //Gyro scale 1000º/s
writeTo(REG_ACCEL_CONFIG, 0x08); //Accel scale 4g
}
void
MPU6000::end(){
}
Sensor::Status
MPU6000::status(){
/*#MPUFAIL No response or incorrect WHO_AM_I response from the MPU */
//poll WHO_AM_I for the correct value to see if its an MPU is present
uint8_t buf[1];
readFrom(REG_WHOAMI, 1, buf);
if(buf[0] == WHOIIS) return Sensor::OK;
return Sensor::BAD("MPUFAIL");
}
void
MPU6000::calibrate(){
//calibrate gyro
}
void
MPU6000::update(InertialManager& man, Translator axis){
rawData data = readSensors();
float accl[3];
LTA.calibrate<int16_t>(data.accl, accl);
float gyro[3];
for(int i=0; i<3; i++){
gyro[i] = (((float)data.gyro[i])*GYRO_CONVERSION_FACT);
}
man.gyro = axis(gyro);
man.accl = axis(accl);
}
void
MPU6000::getSensors(int16_t (&accl)[3], int16_t (&gyro)[3]){
rawData data = readSensors();
for (int i = 0; i < 3; i++){
accl[i] = data.accl[i];
gyro[i] = data.gyro[i];
}
}
float
MPU6000::acclX(){
rawData data = readSensors();
return LTA.apply((float)data.accl[0],0);
}
float
MPU6000::acclY(){
rawData data = readSensors();
return LTA.apply((float)data.accl[1],1);
}
float
MPU6000::acclZ(){
rawData data = readSensors();
return LTA.apply((float)data.accl[2],2);
}
float
MPU6000::gyroX(){
rawData data = readSensors();
return (((float)data.gyro[0])*GYRO_CONVERSION_FACT);
}
float
MPU6000::gyroY(){
rawData data = readSensors();
return (((float)data.gyro[1])*GYRO_CONVERSION_FACT);
}
float
MPU6000::gyroZ(){
rawData data = readSensors();
return (((float)data.gyro[2])*GYRO_CONVERSION_FACT);
}
|
Wzzzx/chromium-crosswalk | blimp/common/compositor/blimp_task_graph_runner.h | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BLIMP_COMMON_COMPOSITOR_BLIMP_TASK_GRAPH_RUNNER_H_
#define BLIMP_COMMON_COMPOSITOR_BLIMP_TASK_GRAPH_RUNNER_H_
#include "base/macros.h"
#include "base/threading/simple_thread.h"
#include "blimp/common/blimp_common_export.h"
#include "cc/raster/single_thread_task_graph_runner.h"
namespace blimp {
// TaskGraphRunner that runs on a single thread. See
// cc::SingleThreadTaskGraphRunner for details. Used by client's compositor and
// engine's dummy UI context factory.
class BLIMP_COMMON_EXPORT BlimpTaskGraphRunner
: public cc::SingleThreadTaskGraphRunner {
public:
BlimpTaskGraphRunner();
~BlimpTaskGraphRunner() override;
private:
DISALLOW_COPY_AND_ASSIGN(BlimpTaskGraphRunner);
};
} // namespace blimp
#endif // BLIMP_COMMON_COMPOSITOR_BLIMP_TASK_GRAPH_RUNNER_H_
|
gajubadge11/hackerrank-3 | python/quicksort2.py | def print_arr(arr):
print(" ".join(map(str, arr)))
def quicksort(arr, pivot_i=0):
if len(arr) <= 1:
return arr
pivot = arr[pivot_i]
left, right = divide(arr, pivot)
sorted_arr = quicksort(left) + [pivot] + quicksort(right)
print_arr(sorted_arr)
return sorted_arr
def divide(arr, pivot):
left = []
right = []
for i in arr:
if i < pivot:
left.append(i)
elif i > pivot:
right.append(i)
return left, right
n = int(input().strip())
arr = list(map(int, input().strip().split(" ")))
quicksort(arr)
|
flexiooss/codingmatters-value-objects | cdm-value-objects-generation/src/test/java/org/codingmatters/value/objects/generation/ToStringTest.java | package org.codingmatters.value.objects.generation;
import org.codingmatters.tests.compile.CompiledCode;
import org.codingmatters.tests.compile.FileHelper;
import org.codingmatters.tests.compile.helpers.ClassLoaderHelper;
import org.codingmatters.tests.compile.helpers.helpers.ClassHelper;
import org.codingmatters.tests.compile.helpers.helpers.ObjectHelper;
import org.codingmatters.value.objects.spec.PropertyCardinality;
import org.codingmatters.value.objects.spec.Spec;
import org.codingmatters.value.objects.spec.TypeKind;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import static org.codingmatters.tests.reflect.ReflectMatchers.aPackagePrivate;
import static org.codingmatters.tests.reflect.ReflectMatchers.aPublic;
import static org.codingmatters.value.objects.spec.PropertySpec.property;
import static org.codingmatters.value.objects.spec.PropertyTypeSpec.type;
import static org.codingmatters.value.objects.spec.Spec.spec;
import static org.codingmatters.value.objects.spec.ValueSpec.valueSpec;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
/**
* Created by nelt on 9/27/16.
*/
public class ToStringTest {
@Rule
public TemporaryFolder dir = new TemporaryFolder();
@Rule
public FileHelper fileHelper = new FileHelper();
private final Spec spec = spec()
.addValue(valueSpec().name("val")
.addProperty(property().name("prop1").type(type().typeRef(String.class.getName()).typeKind(TypeKind.JAVA_TYPE)))
.addProperty(property().name("prop2").type(type().typeRef(String.class.getName()).typeKind(TypeKind.JAVA_TYPE)))
.addProperty(property().name("prop3").type(type().typeKind(TypeKind.ENUM).enumValues("A", "B", "C")))
.addProperty(property().name("prop4").type(type().typeKind(TypeKind.ENUM).enumValues("A", "B", "C")
.cardinality(PropertyCardinality.LIST)))
.addProperty(property().name("binary").type(type().typeRef(byte[].class.getName()).typeKind(TypeKind.JAVA_TYPE)))
)
.addValue(valueSpec().name("noPropertyVal"))
.addValue(valueSpec().name("complexVal")
.addProperty(property().name("prop").type(type().typeRef("val").typeKind(TypeKind.IN_SPEC_VALUE_OBJECT)))
)
.build();
private ClassLoaderHelper classes;
@Before
public void setUp() throws Exception {
new SpecCodeGenerator(this.spec, "org.generated", dir.getRoot()).generate();
this.classes = CompiledCode.builder().source(this.dir.getRoot()).compile().classLoader();
}
@Test
public void signature() throws Exception {
assertThat(classes.get("org.generated.ValImpl").get(),
is(aPackagePrivate().class_()
.with(aPublic().method()
.named("toString")
.withoutParameters()
.returning(String.class)
)
)
);
}
@Test
public void simple() throws Exception {
this.fileHelper.printFile(this.dir.getRoot(), "ValImpl.java");
ClassHelper enum3 = classes.get("org.generated.Val$Prop3");
ClassHelper enum4 = classes.get("org.generated.Val$Prop4");
ObjectHelper prop4Value = enum4.array().newArray(
enum4.call("valueOf", String.class).with("A").get(),
enum4.call("valueOf", String.class).with("B").get()
);
Object aValue = classes.get("org.generated.Val").call("builder")
.call("prop1", String.class).with("v1")
.call("prop2", String.class).with("v2")
.call("prop3", enum3.get()).with(enum3.call("valueOf", String.class).with("A").get())
.call("prop4", enum4.array().get()).with(prop4Value.get())
.call("binary", byte[].class).with("binary".getBytes())
.call("build").get();
assertThat(aValue.toString(), is("Val{prop1=v1, prop2=v2, prop3=A, prop4=[A, B], binary=[98, 105, 110, 97, 114, 121]}"));
}
@Test
public void simpleWithNulls() throws Exception {
Object aValue = classes.get("org.generated.Val").call("builder")
.call("prop1", String.class).with("v1")
.call("build").get();
assertThat(aValue.toString(), is("Val{prop1=v1, prop2=null, prop3=null, prop4=null, binary=null}"));
}
@Test
public void complex() throws Exception {
Object builded = classes.get("org.generated.Val").call("builder")
.call("prop1", String.class).with("v1")
.call("prop2", String.class).with("v2")
.call("build").get();
Object complexValue = classes.get("org.generated.ComplexVal").call("builder")
.call("prop", classes.get("org.generated.Val").get()).with(builded)
.call("build").get()
;
assertThat(complexValue.toString(), is("ComplexVal{prop=Val{prop1=v1, prop2=v2, prop3=null, prop4=null, binary=null}}"));
}
@Test
public void noPropertyValue() throws Exception {
Object aValue = classes.get("org.generated.NoPropertyVal").call("builder").call("build").get();
assertThat(aValue.toString(), is("NoPropertyVal{}"));
}
}
|
sezero/windows-sdk-headers | Include/10.0.19041.0/cppwinrt/winrt/impl/Windows.AI.MachineLearning.1.h | <filename>Include/10.0.19041.0/cppwinrt/winrt/impl/Windows.AI.MachineLearning.1.h<gh_stars>1-10
// C++/WinRT v2.0.190620.2
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef WINRT_Windows_AI_MachineLearning_1_H
#define WINRT_Windows_AI_MachineLearning_1_H
#include "winrt/impl/Windows.AI.MachineLearning.0.h"
namespace winrt::Windows::AI::MachineLearning
{
struct __declspec(empty_bases) IImageFeatureDescriptor :
Windows::Foundation::IInspectable,
impl::consume_t<IImageFeatureDescriptor>
{
IImageFeatureDescriptor(std::nullptr_t = nullptr) noexcept {}
IImageFeatureDescriptor(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) IImageFeatureValue :
Windows::Foundation::IInspectable,
impl::consume_t<IImageFeatureValue>
{
IImageFeatureValue(std::nullptr_t = nullptr) noexcept {}
IImageFeatureValue(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) IImageFeatureValueStatics :
Windows::Foundation::IInspectable,
impl::consume_t<IImageFeatureValueStatics>
{
IImageFeatureValueStatics(std::nullptr_t = nullptr) noexcept {}
IImageFeatureValueStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModel :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModel>
{
ILearningModel(std::nullptr_t = nullptr) noexcept {}
ILearningModel(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelBinding :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelBinding>
{
ILearningModelBinding(std::nullptr_t = nullptr) noexcept {}
ILearningModelBinding(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelBindingFactory :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelBindingFactory>
{
ILearningModelBindingFactory(std::nullptr_t = nullptr) noexcept {}
ILearningModelBindingFactory(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelDevice :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelDevice>
{
ILearningModelDevice(std::nullptr_t = nullptr) noexcept {}
ILearningModelDevice(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelDeviceFactory :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelDeviceFactory>
{
ILearningModelDeviceFactory(std::nullptr_t = nullptr) noexcept {}
ILearningModelDeviceFactory(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelDeviceStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelDeviceStatics>
{
ILearningModelDeviceStatics(std::nullptr_t = nullptr) noexcept {}
ILearningModelDeviceStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelEvaluationResult :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelEvaluationResult>
{
ILearningModelEvaluationResult(std::nullptr_t = nullptr) noexcept {}
ILearningModelEvaluationResult(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelFeatureDescriptor :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelFeatureDescriptor>
{
ILearningModelFeatureDescriptor(std::nullptr_t = nullptr) noexcept {}
ILearningModelFeatureDescriptor(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelFeatureValue :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelFeatureValue>
{
ILearningModelFeatureValue(std::nullptr_t = nullptr) noexcept {}
ILearningModelFeatureValue(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelOperatorProvider :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelOperatorProvider>
{
ILearningModelOperatorProvider(std::nullptr_t = nullptr) noexcept {}
ILearningModelOperatorProvider(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelSession :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelSession>
{
ILearningModelSession(std::nullptr_t = nullptr) noexcept {}
ILearningModelSession(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelSessionFactory :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelSessionFactory>
{
ILearningModelSessionFactory(std::nullptr_t = nullptr) noexcept {}
ILearningModelSessionFactory(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelSessionFactory2 :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelSessionFactory2>
{
ILearningModelSessionFactory2(std::nullptr_t = nullptr) noexcept {}
ILearningModelSessionFactory2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelSessionOptions :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelSessionOptions>
{
ILearningModelSessionOptions(std::nullptr_t = nullptr) noexcept {}
ILearningModelSessionOptions(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelSessionOptions2 :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelSessionOptions2>
{
ILearningModelSessionOptions2(std::nullptr_t = nullptr) noexcept {}
ILearningModelSessionOptions2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ILearningModelStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ILearningModelStatics>
{
ILearningModelStatics(std::nullptr_t = nullptr) noexcept {}
ILearningModelStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) IMapFeatureDescriptor :
Windows::Foundation::IInspectable,
impl::consume_t<IMapFeatureDescriptor>
{
IMapFeatureDescriptor(std::nullptr_t = nullptr) noexcept {}
IMapFeatureDescriptor(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ISequenceFeatureDescriptor :
Windows::Foundation::IInspectable,
impl::consume_t<ISequenceFeatureDescriptor>
{
ISequenceFeatureDescriptor(std::nullptr_t = nullptr) noexcept {}
ISequenceFeatureDescriptor(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensor :
Windows::Foundation::IInspectable,
impl::consume_t<ITensor>,
impl::require<Windows::AI::MachineLearning::ITensor, Windows::AI::MachineLearning::ILearningModelFeatureValue>
{
ITensor(std::nullptr_t = nullptr) noexcept {}
ITensor(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorBoolean :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorBoolean>
{
ITensorBoolean(std::nullptr_t = nullptr) noexcept {}
ITensorBoolean(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorBooleanStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorBooleanStatics>
{
ITensorBooleanStatics(std::nullptr_t = nullptr) noexcept {}
ITensorBooleanStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorBooleanStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorBooleanStatics2>
{
ITensorBooleanStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorBooleanStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorDouble :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorDouble>
{
ITensorDouble(std::nullptr_t = nullptr) noexcept {}
ITensorDouble(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorDoubleStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorDoubleStatics>
{
ITensorDoubleStatics(std::nullptr_t = nullptr) noexcept {}
ITensorDoubleStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorDoubleStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorDoubleStatics2>
{
ITensorDoubleStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorDoubleStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFeatureDescriptor :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFeatureDescriptor>
{
ITensorFeatureDescriptor(std::nullptr_t = nullptr) noexcept {}
ITensorFeatureDescriptor(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFloat :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFloat>
{
ITensorFloat(std::nullptr_t = nullptr) noexcept {}
ITensorFloat(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFloat16Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFloat16Bit>
{
ITensorFloat16Bit(std::nullptr_t = nullptr) noexcept {}
ITensorFloat16Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFloat16BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFloat16BitStatics>
{
ITensorFloat16BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorFloat16BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFloat16BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFloat16BitStatics2>
{
ITensorFloat16BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorFloat16BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFloatStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFloatStatics>
{
ITensorFloatStatics(std::nullptr_t = nullptr) noexcept {}
ITensorFloatStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorFloatStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorFloatStatics2>
{
ITensorFloatStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorFloatStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt16Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt16Bit>
{
ITensorInt16Bit(std::nullptr_t = nullptr) noexcept {}
ITensorInt16Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt16BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt16BitStatics>
{
ITensorInt16BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorInt16BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt16BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt16BitStatics2>
{
ITensorInt16BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorInt16BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt32Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt32Bit>
{
ITensorInt32Bit(std::nullptr_t = nullptr) noexcept {}
ITensorInt32Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt32BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt32BitStatics>
{
ITensorInt32BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorInt32BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt32BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt32BitStatics2>
{
ITensorInt32BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorInt32BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt64Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt64Bit>
{
ITensorInt64Bit(std::nullptr_t = nullptr) noexcept {}
ITensorInt64Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt64BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt64BitStatics>
{
ITensorInt64BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorInt64BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt64BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt64BitStatics2>
{
ITensorInt64BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorInt64BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt8Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt8Bit>
{
ITensorInt8Bit(std::nullptr_t = nullptr) noexcept {}
ITensorInt8Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt8BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt8BitStatics>
{
ITensorInt8BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorInt8BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorInt8BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorInt8BitStatics2>
{
ITensorInt8BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorInt8BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorString :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorString>
{
ITensorString(std::nullptr_t = nullptr) noexcept {}
ITensorString(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorStringStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorStringStatics>
{
ITensorStringStatics(std::nullptr_t = nullptr) noexcept {}
ITensorStringStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorStringStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorStringStatics2>
{
ITensorStringStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorStringStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt16Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt16Bit>
{
ITensorUInt16Bit(std::nullptr_t = nullptr) noexcept {}
ITensorUInt16Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt16BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt16BitStatics>
{
ITensorUInt16BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorUInt16BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt16BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt16BitStatics2>
{
ITensorUInt16BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorUInt16BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt32Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt32Bit>
{
ITensorUInt32Bit(std::nullptr_t = nullptr) noexcept {}
ITensorUInt32Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt32BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt32BitStatics>
{
ITensorUInt32BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorUInt32BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt32BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt32BitStatics2>
{
ITensorUInt32BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorUInt32BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt64Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt64Bit>
{
ITensorUInt64Bit(std::nullptr_t = nullptr) noexcept {}
ITensorUInt64Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt64BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt64BitStatics>
{
ITensorUInt64BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorUInt64BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt64BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt64BitStatics2>
{
ITensorUInt64BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorUInt64BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt8Bit :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt8Bit>
{
ITensorUInt8Bit(std::nullptr_t = nullptr) noexcept {}
ITensorUInt8Bit(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt8BitStatics :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt8BitStatics>
{
ITensorUInt8BitStatics(std::nullptr_t = nullptr) noexcept {}
ITensorUInt8BitStatics(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) ITensorUInt8BitStatics2 :
Windows::Foundation::IInspectable,
impl::consume_t<ITensorUInt8BitStatics2>
{
ITensorUInt8BitStatics2(std::nullptr_t = nullptr) noexcept {}
ITensorUInt8BitStatics2(void* ptr, take_ownership_from_abi_t) noexcept : Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
}
#endif
|
1050669722/LeetCode-Answers | Python/problem0728.py | <reponame>1050669722/LeetCode-Answers
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 15 09:22:52 2019
@author: ASUS
"""
class Solution:
def selfDividingNumbers(self, left: int, right: int) -> list:
ans = []
for k in range(left, right+1):
if self.isSelfDividingNumber(k) == 1:
ans.append(k)
return ans
def isSelfDividingNumber(self, num):
List = []
Num = num
while num:
tmp = num%10
List.append(tmp)
num //= 10
if 0 in List:
return 0
for n in List:
if Num % n != 0:
return 0
return 1
solu = Solution()
left, right = 1, 22
print(solu.selfDividingNumbers(left, right)) |
bionlplab/radtext | src/radtext/cmd/neg.py | """
Usage:
neg [options] -i FILE -o FILE
Options:
--regex_patterns FILE [default: resources/patterns/regex_patterns.yml]
--ngrex_patterns FILE [default: resources/patterns/ngrex_patterns.yml]
--overwrite
--sort_anns
-o FILE
-i FILE
"""
import bioc
from radtext.core import BioCProcessor, BioCPipeline
"""
--regex_negation FILE [default: resources/patterns/regex_negation.yml]
--regex_uncertainty_pre_neg FILE [default: resources/patterns/regex_uncertainty_pre_negation.yml]
--regex_uncertainty_post_neg FILE [default: resources/patterns/regex_uncertainty_post_negation.yml]
--regex_double_neg FILE [default: resources/patterns/regex_double_negation.yml]
--ngrex_negation FILE [default: resources/patterns/ngrex_negation.yml]
--ngrex_uncertainty_pre_neg FILE [default: resources/patterns/ngrex_uncertainty_pre_negation.yml]
--ngrex_uncertainty_post_neg FILE [default: resources/patterns/ngrex_uncertainty_post_negation.yml]
--ngrex_double_neg FILE [default: resources/patterns/ngrex_double_negation.yml]
"""
import docopt
import tqdm
from radtext.cmd.cmd_utils import process_options, process_file
from radtext.models.neg.match_ngrex import NegGrexPatterns
from radtext.models.neg import NegRegexPatterns
from radtext.models.neg import NegCleanUp
from radtext.models.neg.neg import BioCNeg
def main():
argv = docopt.docopt(__doc__)
process_options(argv)
regex_actor = NegRegexPatterns()
regex_actor.load_yml2(argv['--regex_patterns'])
ngrex_actor = NegGrexPatterns()
ngrex_actor.load_yml2(argv['--ngrex_patterns'])
neg_actor = BioCNeg(regex_actor=regex_actor, ngrex_actor=ngrex_actor)
cleanup_actor = NegCleanUp(argv['--sort_anns'])
pipeline = BioCPipeline()
pipeline.processors = [neg_actor, cleanup_actor]
process_file(argv['-i'], argv['-o'], pipeline, bioc.PASSAGE)
if __name__ == '__main__':
main()
|
wxclaude/jjzdpt | src/main/java/io/renren/modules/df/service/impl/DfZhzhServiceImpl.java | <reponame>wxclaude/jjzdpt<filename>src/main/java/io/renren/modules/df/service/impl/DfZhzhServiceImpl.java
package io.renren.modules.df.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import io.renren.modules.df.dao.DfSjfnDao;
import io.renren.modules.df.dao.DfZhzhDao;
import io.renren.modules.df.entity.DfSjfn;
import io.renren.modules.df.entity.DfZhzh;
import io.renren.modules.df.service.DfSjfnService;
import io.renren.modules.df.service.DfZhzhService;
import org.springframework.stereotype.Service;
@Service
public class DfZhzhServiceImpl extends ServiceImpl<DfZhzhDao, DfZhzh> implements DfZhzhService {
}
|
marstau/shinsango | src/util/sound/gme/Kss_Emu.cpp | <filename>src/util/sound/gme/Kss_Emu.cpp
// Game_Music_Emu 0.5.5. http://www.slack.net/~ant/
#include "Kss_Emu.h"
#include "blargg_endian.h"
#include <string.h>
/* Copyright (C) 2006 <NAME>. This module is free software; you
can redistribute it and/or modify it under the terms of the GNU Lesser
General Public License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version. This
module is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details. You should have received a copy of the GNU Lesser General Public
License along with this module; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */
#include "blargg_source.h"
long const clock_rate = 3579545;
int const osc_count = Ay_Apu::osc_count + Scc_Apu::osc_count;
Kss_Emu::Kss_Emu()
{
sn = 0;
set_type( gme_kss_type );
set_silence_lookahead( 6 );
static const char* const names [osc_count] = {
"Square 1", "Square 2", "Square 3",
"Wave 1", "Wave 2", "Wave 3", "Wave 4", "Wave 5"
};
set_voice_names( names );
static int const types [osc_count] = {
wave_type | 0, wave_type | 1, wave_type | 2,
wave_type | 3, wave_type | 4, wave_type | 5, wave_type | 6, wave_type | 7
};
set_voice_types( types );
memset( unmapped_read, 0xFF, sizeof unmapped_read );
}
Kss_Emu::~Kss_Emu() { unload(); }
void Kss_Emu::unload()
{
delete sn;
sn = 0;
Classic_Emu::unload();
}
// Track info
static void copy_kss_fields( Kss_Emu::header_t const& h, track_info_t* out )
{
const char* system = "MSX";
if ( h.device_flags & 0x02 )
{
system = "Sega Master System";
if ( h.device_flags & 0x04 )
system = "Game Gear";
}
Gme_File::copy_field_( out->system, system );
}
blargg_err_t Kss_Emu::track_info_( track_info_t* out, int ) const
{
copy_kss_fields( header_, out );
return 0;
}
static blargg_err_t check_kss_header( void const* header )
{
if ( memcmp( header, "KSCC", 4 ) && memcmp( header, "KSSX", 4 ) )
return gme_wrong_file_type;
return 0;
}
struct Kss_File : Gme_Info_
{
Kss_Emu::header_t header_;
Kss_File() { set_type( gme_kss_type ); }
blargg_err_t load_( Data_Reader& in )
{
blargg_err_t err = in.read( &header_, Kss_Emu::header_size );
if ( err )
return (err == in.eof_error ? gme_wrong_file_type : err);
return check_kss_header( &header_ );
}
blargg_err_t track_info_( track_info_t* out, int ) const
{
copy_kss_fields( header_, out );
return 0;
}
};
static Music_Emu* new_kss_emu () { return BLARGG_NEW Kss_Emu ; }
static Music_Emu* new_kss_file() { return BLARGG_NEW Kss_File; }
static gme_type_t_ const gme_kss_type_ = { "MSX", 256, &new_kss_emu, &new_kss_file, "KSS", 0x03 };
gme_type_t const gme_kss_type = &gme_kss_type_;
// Setup
void Kss_Emu::update_gain()
{
double g = gain() * 1.4;
if ( scc_accessed )
g *= 1.5;
ay.volume( g );
scc.volume( g );
if ( sn )
sn->volume( g );
}
blargg_err_t Kss_Emu::load_( Data_Reader& in )
{
memset( &header_, 0, sizeof header_ );
assert( offsetof (header_t,device_flags) == header_size - 1 );
assert( offsetof (ext_header_t,msx_audio_vol) == ext_header_size - 1 );
RETURN_ERR( rom.load( in, header_size, STATIC_CAST(header_t*,&header_), 0 ) );
RETURN_ERR( check_kss_header( header_.tag ) );
if ( header_.tag [3] == 'C' )
{
if ( header_.extra_header )
{
header_.extra_header = 0;
set_warning( "Unknown data in header" );
}
if ( header_.device_flags & ~0x0F )
{
header_.device_flags &= 0x0F;
set_warning( "Unknown data in header" );
}
}
else
{
ext_header_t& ext = header_;
memcpy( &ext, rom.begin(), min( (int) ext_header_size, (int) header_.extra_header ) );
if ( header_.extra_header > 0x10 )
set_warning( "Unknown data in header" );
}
if ( header_.device_flags & 0x09 )
set_warning( "FM sound not supported" );
scc_enabled = 0xC000;
if ( header_.device_flags & 0x04 )
scc_enabled = 0;
if ( header_.device_flags & 0x02 && !sn )
CHECK_ALLOC( sn = BLARGG_NEW( Sms_Apu ) );
set_voice_count( osc_count );
return setup_buffer( ::clock_rate );
}
void Kss_Emu::update_eq( blip_eq_t const& eq )
{
ay.treble_eq( eq );
scc.treble_eq( eq );
if ( sn )
sn->treble_eq( eq );
}
void Kss_Emu::set_voice( int i, Blip_Buffer* center, Blip_Buffer* left, Blip_Buffer* right )
{
int i2 = i - ay.osc_count;
if ( i2 >= 0 )
scc.osc_output( i2, center );
else
ay.osc_output( i, center );
if ( sn && i < sn->osc_count )
sn->osc_output( i, center, left, right );
}
// Emulation
void Kss_Emu::set_tempo_( double t )
{
blip_time_t period =
(header_.device_flags & 0x40 ? ::clock_rate / 50 : ::clock_rate / 60);
play_period = blip_time_t (period / t);
}
blargg_err_t Kss_Emu::start_track_( int track )
{
RETURN_ERR( Classic_Emu::start_track_( track ) );
memset( ram, 0xC9, 0x4000 );
memset( ram + 0x4000, 0, sizeof ram - 0x4000 );
// copy driver code to lo RAM
static byte const bios [] = {
0xD3, 0xA0, 0xF5, 0x7B, 0xD3, 0xA1, 0xF1, 0xC9, // $0001: WRTPSG
0xD3, 0xA0, 0xDB, 0xA2, 0xC9 // $0009: RDPSG
};
static byte const vectors [] = {
0xC3, 0x01, 0x00, // $0093: WRTPSG vector
0xC3, 0x09, 0x00, // $0096: RDPSG vector
};
memcpy( ram + 0x01, bios, sizeof bios );
memcpy( ram + 0x93, vectors, sizeof vectors );
// copy non-banked data into RAM
unsigned load_addr = get_le16( header_.load_addr );
long orig_load_size = get_le16( header_.load_size );
long load_size = min( orig_load_size, rom.file_size() );
load_size = min( load_size, long (mem_size - load_addr) );
if ( load_size != orig_load_size )
set_warning( "Excessive data size" );
memcpy( ram + load_addr, rom.begin() + header_.extra_header, load_size );
rom.set_addr( -load_size - header_.extra_header );
// check available bank data
blargg_long const bank_size = this->bank_size();
int max_banks = (rom.file_size() - load_size + bank_size - 1) / bank_size;
bank_count = header_.bank_mode & 0x7F;
if ( bank_count > max_banks )
{
bank_count = max_banks;
set_warning( "Bank data missing" );
}
//debug_printf( "load_size : $%X\n", load_size );
//debug_printf( "bank_size : $%X\n", bank_size );
//debug_printf( "bank_count: %d (%d claimed)\n", bank_count, header_.bank_mode & 0x7F );
ram [idle_addr] = 0xFF;
cpu::reset( unmapped_write, unmapped_read );
cpu::map_mem( 0, mem_size, ram, ram );
ay.reset();
scc.reset();
if ( sn )
sn->reset();
r.sp = 0xF380;
ram [--r.sp] = idle_addr >> 8;
ram [--r.sp] = idle_addr & 0xFF;
r.b.a = track;
r.pc = get_le16( header_.init_addr );
next_play = play_period;
scc_accessed = false;
gain_updated = false;
update_gain();
ay_latch = 0;
return 0;
}
void Kss_Emu::set_bank( int logical, int physical )
{
unsigned const bank_size = this->bank_size();
unsigned addr = 0x8000;
if ( logical && bank_size == 8 * 1024 )
addr = 0xA000;
physical -= header_.first_bank;
if ( (unsigned) physical >= (unsigned) bank_count )
{
byte* data = ram + addr;
cpu::map_mem( addr, bank_size, data, data );
}
else
{
long phys = physical * (blargg_long) bank_size;
for ( unsigned offset = 0; offset < bank_size; offset += page_size )
cpu::map_mem( addr + offset, page_size,
unmapped_write, rom.at_addr( phys + offset ) );
}
}
void Kss_Emu::cpu_write( unsigned addr, int data )
{
data &= 0xFF;
switch ( addr )
{
case 0x9000:
set_bank( 0, data );
return;
case 0xB000:
set_bank( 1, data );
return;
}
int scc_addr = (addr & 0xDFFF) ^ 0x9800;
if ( scc_addr < scc.reg_count )
{
scc_accessed = true;
scc.write( time(), scc_addr, data );
return;
}
debug_printf( "LD ($%04X),$%02X\n", addr, data );
}
void kss_cpu_write( Kss_Cpu* cpu, unsigned addr, int data )
{
*cpu->write( addr ) = data;
if ( (addr & STATIC_CAST(Kss_Emu&,*cpu).scc_enabled) == 0x8000 )
STATIC_CAST(Kss_Emu&,*cpu).cpu_write( addr, data );
}
void kss_cpu_out( Kss_Cpu* cpu, cpu_time_t time, unsigned addr, int data )
{
data &= 0xFF;
Kss_Emu& emu = STATIC_CAST(Kss_Emu&,*cpu);
switch ( addr & 0xFF )
{
case 0xA0:
emu.ay_latch = data & 0x0F;
return;
case 0xA1:
GME_APU_HOOK( &emu, emu.ay_latch, data );
emu.ay.write( time, emu.ay_latch, data );
return;
case 0x06:
if ( emu.sn && (emu.header_.device_flags & 0x04) )
{
emu.sn->write_ggstereo( time, data );
return;
}
break;
case 0x7E:
case 0x7F:
if ( emu.sn )
{
GME_APU_HOOK( &emu, 16, data );
emu.sn->write_data( time, data );
return;
}
break;
case 0xFE:
emu.set_bank( 0, data );
return;
#ifndef NDEBUG
case 0xF1: // FM data
if ( data )
break; // trap non-zero data
case 0xF0: // FM addr
case 0xA8: // PPI
return;
#endif
}
debug_printf( "OUT $%04X,$%02X\n", addr, data );
}
int kss_cpu_in( Kss_Cpu*, cpu_time_t, unsigned addr )
{
//Kss_Emu& emu = STATIC_CAST(Kss_Emu&,*cpu);
//switch ( addr & 0xFF )
//{
//}
debug_printf( "IN $%04X\n", addr );
return 0;
}
// Emulation
blargg_err_t Kss_Emu::run_clocks( blip_time_t& duration, int )
{
while ( time() < duration )
{
blip_time_t end = min( duration, next_play );
cpu::run( min( duration, next_play ) );
if ( r.pc == idle_addr )
set_time( end );
if ( time() >= next_play )
{
next_play += play_period;
if ( r.pc == idle_addr )
{
if ( !gain_updated )
{
gain_updated = true;
if ( scc_accessed )
update_gain();
}
ram [--r.sp] = idle_addr >> 8;
ram [--r.sp] = idle_addr & 0xFF;
r.pc = get_le16( header_.play_addr );
GME_FRAME_HOOK( this );
}
}
}
duration = time();
next_play -= duration;
check( next_play >= 0 );
adjust_time( -duration );
ay.end_frame( duration );
scc.end_frame( duration );
if ( sn )
sn->end_frame( duration );
return 0;
}
|
Ujjawalgupta42/Hacktoberfest2021-DSA | 04. Arrays/Merge Sorted Array.cpp | /*
You are given two integer arrays nums1 and nums2, sorted in non-decreasing order, and two integers m and n, representing the number of elements in nums1 and nums2 respectively.
Merge nums1 and nums2 into a single array sorted in non-decreasing order.
The final sorted array should not be returned by the function, but instead be stored inside the array nums1. To accommodate this, nums1 has a length of m + n, where the first m elements denote the elements that should be merged, and the last n elements are set to 0 and should be ignored. nums2 has a length of n.
Example 1:
Input: nums1 = [1,2,3,0,0,0], m = 3, nums2 = [2,5,6], n = 3
Output: [1,2,2,3,5,6]
Explanation: The arrays we are merging are [1,2,3] and [2,5,6].
The result of the merge is [1,2,2,3,5,6] with the underlined elements coming from nums1.
Example 2:
Input: nums1 = [1], m = 1, nums2 = [], n = 0
Output: [1]
Explanation: The arrays we are merging are [1] and [].
The result of the merge is [1].
Example 3:
Input: nums1 = [0], m = 0, nums2 = [1], n = 1
Output: [1]
Explanation: The arrays we are merging are [] and [1].
The result of the merge is [1].
Note that because m = 0, there are no elements in nums1. The 0 is only there to ensure the merge result can fit in nums1.
*/
#include<bits/stdc++.h>
using namespace std;
void merge(vector<int>& nums1, int m, vector<int>& nums2, int n) {
int i=m-1,j=n-1,k=m+n-1;
while(i>=0&&j>=0)
{
if(nums1[i]>nums2[j])
{
nums1[k]=nums1[i];
i--;
k--;
}
else
{
nums1[k]=nums2[j];
j--;
k--;
}
}
while(i>=0)
nums1[k--]=nums1[i--];
while(j>=0)
nums1[k--]=nums2[j--];
}
int main(){
int n,m;
cout<<"Enter Length of two arrays: ";
cin>>n>>m;
vector<int>nums1(n+m),nums2(m);
cout<<"Enter values in first array";
for(int i=0;i<n;i++){
cin>>nums1[i];
}
cout<<"Enter values in second array";
for(int i=0;i<m;i++){
cin>>nums2[i];
}
merge(nums1,n,nums2,m);
cout<<"The Merged Sorted arrays are: ";
for(int i=0;i<n+m;i++){
cout<<nums1[i]<<" ";
}
return 0;
}
|
shilad/wikibrain | wikibrain-parser/src/main/java/org/wikibrain/parser/wiki/InterLanguageLinkVisitor.java | <reponame>shilad/wikibrain<filename>wikibrain-parser/src/main/java/org/wikibrain/parser/wiki/InterLanguageLinkVisitor.java
package org.wikibrain.parser.wiki;
import org.wikibrain.core.WikiBrainException;
import org.wikibrain.core.dao.*;
import org.wikibrain.core.lang.Language;
import org.wikibrain.core.lang.LanguageInfo;
import org.wikibrain.core.lang.LanguageSet;
import org.wikibrain.core.model.InterLanguageLink;
import org.wikibrain.core.model.LocalLink;
import org.wikibrain.core.model.RawPage;
import org.wikibrain.core.model.Title;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*/
public class InterLanguageLinkVisitor extends ParserVisitor {
private static final Logger LOG = LoggerFactory.getLogger(InterLanguageLinkVisitor.class);
private final LocalPageDao pageDao;
private final MetaInfoDao metaDao;
private final InterLanguageLinkDao illDao;
private final LanguageSet destLangs;
private AtomicInteger encountered = new AtomicInteger();
private AtomicInteger retained = new AtomicInteger();
public InterLanguageLinkVisitor(InterLanguageLinkDao illDao, LocalPageDao pageDao, MetaInfoDao metaDao) {
this(illDao, pageDao, metaDao, LanguageSet.ALL);
}
public InterLanguageLinkVisitor(InterLanguageLinkDao illDao, LocalPageDao pageDao, MetaInfoDao metaDao, LanguageSet destLangs) {
this.illDao = illDao;
this.pageDao = pageDao;
this.metaDao = metaDao;
this.destLangs = destLangs;
}
@Override
public void ill(ParsedIll ill) throws WikiBrainException {
int c = encountered.getAndIncrement();
if(c % 10000==0) LOG.info("Encountered ill #" + c + ", retained " + retained.get());
Language srcLang = null;
try {
srcLang = ill.location.getXml().getLanguage();
int srcId = pageDao.getIdByTitle(ill.location.getXml().getTitle());
Language destLang = ill.title.getLanguage();
int destId = pageDao.getIdByTitle(ill.title);
if (srcId > 0 && destId > 0 && destLangs.containsLanguage(destLang)) {
illDao.save(new InterLanguageLink(srcLang, srcId, destLang, destId));
retained.incrementAndGet();
}
metaDao.incrementRecords(InterLanguageLinkDao.class, srcLang);
} catch (DaoException e) {
metaDao.incrementErrorsQuietly(InterLanguageLinkDao.class, srcLang);
throw new WikiBrainException(e);
}
}
@Override
public void parseError(RawPage rp, Exception e) {
metaDao.incrementErrorsQuietly(LocalLink.class, rp.getLanguage());
}
}
|
saeedya/docker-ansible | venv/lib/python3.8/site-packages/ansible_test/_internal/provider/source/__init__.py | """Common code for source providers."""
from __future__ import annotations
import abc
import typing as t
from .. import (
PathProvider,
)
class SourceProvider(PathProvider):
"""Base class for source providers."""
@abc.abstractmethod
def get_paths(self, path): # type: (str) -> t.List[str]
"""Return the list of available content paths under the given path."""
|
Surya-98/Snapcuit | NGSpice/ngspice-30/src/xspice/evt/evtnode_copy.c | /*============================================================================
FILE EVTnode_copy.c
MEMBER OF process XSPICE
Copyright 1991
Georgia Tech Research Corporation
Atlanta, Georgia 30332
All Rights Reserved
PROJECT A-8503
AUTHORS
9/12/91 <NAME>
MODIFICATIONS
<date> <person name> <nature of modifications>
SUMMARY
This file contains function EVTnode_copy which copies the state
of a node structure.
INTERFACES
void EVTnode_copy(CKTcircuit *ckt, int node_index, Evt_Node_t *from,
Evt_Node_t **to)
REFERENCED FILES
None.
NON-STANDARD FEATURES
None.
============================================================================*/
#include "ngspice/ngspice.h"
#include "ngspice/cktdefs.h"
//#include "util.h"
#include "ngspice/mif.h"
#include "ngspice/evt.h"
#include "ngspice/evtudn.h"
#include "ngspice/mifproto.h"
#include "ngspice/evtproto.h"
#include "ngspice/cm.h"
/*
EVTnode_copy
This function copies the state of a node structure.
If the destination is NULL, it is allocated before the copy. This is the
case when EVTiter copies a node during a transient analysis to
save the state of an element of rhsold into the node data structure
lists.
If the destination is non-NULL, only the internal elements of the node
structure are copied. This is the case when EVTbackup restores that state
of nodes that existed at a certain timestep back into rhs and rhsold.
*/
void EVTnode_copy(
CKTcircuit *ckt, /* The circuit structure */
int node_index, /* The node to copy */
Evt_Node_t *from, /* Location to copy from */
Evt_Node_t **to) /* Location to copy to */
{
int i;
int udn_index;
int num_outputs;
Mif_Boolean_t invert;
Evt_Node_Data_t *node_data;
Evt_Node_Info_t **node_table;
Evt_Node_t *here;
/* Digital_t *dummy;*/
/* char buff[128];*/
/* Get data for fast access */
node_data = ckt->evt->data.node;
node_table = ckt->evt->info.node_table;
udn_index = node_table[node_index]->udn_index;
num_outputs = node_table[node_index]->num_outputs;
invert = node_table[node_index]->invert;
/* If destination is not allocated, allocate it */
/* otherwise we just copy into the node struct */
here = *to;
if(here == NULL)
{
/* Use allocated structure on free list if available */
/* Otherwise, allocate a new one */
here = node_data->free[node_index];
if(here)
{
*to = here;
node_data->free[node_index] = here->next;
here->next = NULL;
}
else
{
here = TMALLOC(Evt_Node_t, 1);
*to = here;
/* Allocate/initialize the data in the new node struct */
if(num_outputs > 1)
{
here->output_value = TMALLOC(void *, num_outputs);
for(i = 0; i < num_outputs; i++)
{
g_evt_udn_info[udn_index]->create
( &(here->output_value[i]) );
}
}
here->node_value = NULL;
g_evt_udn_info[udn_index]->create ( &(here->node_value) );
if(invert)
g_evt_udn_info[udn_index]->create ( &(here->inverted_value) );
}
}
/* Copy the node data */
here->op = from->op;
here->step = from->step;
if(num_outputs > 1)
{
for(i = 0; i < num_outputs; i++)
{
g_evt_udn_info[udn_index]->copy (from->output_value[i],
here->output_value[i]);
}
}
g_evt_udn_info[udn_index]->copy (from->node_value, here->node_value);
if(invert)
{
g_evt_udn_info[udn_index]->copy (from->inverted_value,
here->inverted_value);
}
}
|
jihoonson/tajo-2 | tajo-storage/tajo-storage-hbase/src/main/java/org/apache/tajo/storage/hbase/HBaseScanner.java | <reponame>jihoonson/tajo-2
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.storage.hbase;
import com.google.common.base.Preconditions;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.tajo.catalog.Column;
import org.apache.tajo.catalog.Schema;
import org.apache.tajo.catalog.TableMeta;
import org.apache.tajo.catalog.statistics.ColumnStats;
import org.apache.tajo.catalog.statistics.TableStats;
import org.apache.tajo.conf.TajoConf;
import org.apache.tajo.datum.Datum;
import org.apache.tajo.datum.NullDatum;
import org.apache.tajo.datum.TextDatum;
import org.apache.tajo.exception.*;
import org.apache.tajo.plan.expr.EvalNode;
import org.apache.tajo.storage.*;
import org.apache.tajo.storage.fragment.Fragment;
import org.apache.tajo.util.BytesUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.NavigableMap;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
public class HBaseScanner implements Scanner {
private static final Log LOG = LogFactory.getLog(HBaseScanner.class);
private static final int DEFAULT_FETCH_SIZE = 1000;
private static final int MAX_LIST_SIZE = 100;
protected boolean inited = false;
private TajoConf conf;
private Schema schema;
private TableMeta meta;
private HBaseFragment fragment;
private Scan scan;
private HTableInterface htable;
private Configuration hbaseConf;
private Column[] targets;
private TableStats tableStats;
private ResultScanner scanner;
private AtomicBoolean finished = new AtomicBoolean(false);
private float progress = 0.0f;
private int scanFetchSize;
private Result[] scanResults;
private int scanResultIndex = -1;
private Column[] schemaColumns;
private ColumnMapping columnMapping;
private int[] targetIndexes;
private int numRows = 0;
private byte[][][] mappingColumnFamilies;
private boolean[] isRowKeyMappings;
private boolean[] isBinaryColumns;
private boolean[] isColumnKeys;
private boolean[] isColumnValues;
private int[] rowKeyFieldIndexes;
private char rowKeyDelimiter;
private Tuple outTuple;
public HBaseScanner (Configuration conf, Schema schema, TableMeta meta, Fragment fragment) throws IOException {
Preconditions.checkNotNull(conf);
Preconditions.checkNotNull(schema);
Preconditions.checkNotNull(meta);
Preconditions.checkNotNull(fragment);
Preconditions.checkArgument(conf instanceof TajoConf);
this.conf = (TajoConf) conf;
this.schema = schema;
this.meta = meta;
this.fragment = (HBaseFragment)fragment;
this.tableStats = new TableStats();
}
@Override
public void init() throws IOException {
inited = true;
schemaColumns = schema.toArray();
if (fragment != null) {
tableStats.setNumBytes(0);
tableStats.setNumBlocks(1);
}
for (Column eachColumn : schema.getRootColumns()) {
ColumnStats columnStats = new ColumnStats(eachColumn);
tableStats.addColumnStat(columnStats);
}
scanFetchSize = Integer.parseInt(
meta.getOption(HBaseStorageConstants.META_FETCH_ROWNUM_KEY, "" + DEFAULT_FETCH_SIZE));
if (targets == null) {
targets = schema.toArray();
}
outTuple = new VTuple(targets.length);
try {
columnMapping = new ColumnMapping(schema, meta.getOptions());
} catch (TajoException e) {
new TajoInternalError(e);
}
targetIndexes = new int[targets.length];
int index = 0;
for (Column eachTargetColumn: targets) {
targetIndexes[index++] = schema.getColumnId(eachTargetColumn.getQualifiedName());
}
mappingColumnFamilies = columnMapping.getMappingColumns();
isRowKeyMappings = columnMapping.getIsRowKeyMappings();
isBinaryColumns = columnMapping.getIsBinaryColumns();
isColumnKeys = columnMapping.getIsColumnKeys();
isColumnValues = columnMapping.getIsColumnValues();
rowKeyDelimiter = columnMapping.getRowKeyDelimiter();
rowKeyFieldIndexes = columnMapping.getRowKeyFieldIndexes();
HBaseTablespace space = (HBaseTablespace) TablespaceManager.get(fragment.getUri()).get();
hbaseConf = space.getHbaseConf();
initScanner();
}
private void initScanner() throws IOException {
scan = new Scan();
scan.setBatch(scanFetchSize);
scan.setCacheBlocks(false);
scan.setCaching(scanFetchSize);
FilterList filters = null;
if (targetIndexes == null || targetIndexes.length == 0) {
filters = new FilterList(FilterList.Operator.MUST_PASS_ALL);
filters.addFilter(new FirstKeyOnlyFilter());
filters.addFilter(new KeyOnlyFilter());
} else {
boolean[] isRowKeyMappings = columnMapping.getIsRowKeyMappings();
for (int eachIndex : targetIndexes) {
if (isRowKeyMappings[eachIndex]) {
continue;
}
byte[][] mappingColumn = columnMapping.getMappingColumns()[eachIndex];
if (mappingColumn[1] == null) {
scan.addFamily(mappingColumn[0]);
} else {
scan.addColumn(mappingColumn[0], mappingColumn[1]);
}
}
}
scan.setStartRow(fragment.getStartRow());
if (fragment.isLast() && fragment.getStopRow() != null &&
fragment.getStopRow().length > 0) {
// last and stopRow is not empty
if (filters == null) {
filters = new FilterList();
}
filters.addFilter(new InclusiveStopFilter(fragment.getStopRow()));
} else {
scan.setStopRow(fragment.getStopRow());
}
if (filters != null) {
scan.setFilter(filters);
}
if (htable == null) {
HConnection hconn = ((HBaseTablespace) TablespaceManager.get(fragment.getUri()).get()).getConnection();
htable = hconn.getTable(fragment.getHbaseTableName());
}
scanner = htable.getScanner(scan);
}
@Override
public Tuple next() throws IOException {
if (finished.get()) {
return null;
}
if (scanResults == null || scanResultIndex >= scanResults.length) {
scanResults = scanner.next(scanFetchSize);
if (scanResults == null || scanResults.length == 0) {
finished.set(true);
progress = 1.0f;
return null;
}
scanResultIndex = 0;
}
Result result = scanResults[scanResultIndex++];
for (int i = 0; i < targetIndexes.length; i++) {
outTuple.put(i, getDatum(result, targetIndexes[i]));
}
numRows++;
return outTuple;
}
private Datum getDatum(Result result, int fieldId) throws IOException {
byte[] value = null;
if (isRowKeyMappings[fieldId]) {
value = result.getRow();
if (!isBinaryColumns[fieldId] && rowKeyFieldIndexes[fieldId] >= 0) {
int rowKeyFieldIndex = rowKeyFieldIndexes[fieldId];
byte[][] rowKeyFields = BytesUtils.splitPreserveAllTokens(
value, rowKeyDelimiter, columnMapping.getNumColumns());
if (rowKeyFields.length < rowKeyFieldIndex) {
return NullDatum.get();
} else {
value = rowKeyFields[rowKeyFieldIndex];
}
}
} else {
if (isColumnKeys[fieldId]) {
NavigableMap<byte[], byte[]> cfMap = result.getFamilyMap(mappingColumnFamilies[fieldId][0]);
if (cfMap != null) {
Set<byte[]> keySet = cfMap.keySet();
if (keySet.size() == 1) {
try {
return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], keySet.iterator().next());
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
} else {
StringBuilder sb = new StringBuilder();
sb.append("[");
int count = 0;
for (byte[] eachKey : keySet) {
if (count > 0) {
sb.append(", ");
}
Datum datum = HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], eachKey);
sb.append("\"").append(datum.asChars()).append("\"");
count++;
if (count > MAX_LIST_SIZE) {
break;
}
}
sb.append("]");
return new TextDatum(sb.toString());
}
}
} else if (isColumnValues[fieldId]) {
NavigableMap<byte[], byte[]> cfMap = result.getFamilyMap(mappingColumnFamilies[fieldId][0]);
if (cfMap != null) {
Collection<byte[]> valueList = cfMap.values();
if (valueList.size() == 1) {
try {
return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], valueList.iterator().next());
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
} else {
StringBuilder sb = new StringBuilder();
sb.append("[");
int count = 0;
for (byte[] eachValue : valueList) {
if (count > 0) {
sb.append(", ");
}
Datum datum = HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], eachValue);
sb.append("\"").append(datum.asChars()).append("\"");
count++;
if (count > MAX_LIST_SIZE) {
break;
}
}
sb.append("]");
return new TextDatum(sb.toString());
}
}
} else {
if (mappingColumnFamilies[fieldId][1] == null) {
NavigableMap<byte[], byte[]> cfMap = result.getFamilyMap(mappingColumnFamilies[fieldId][0]);
if (cfMap != null && !cfMap.isEmpty()) {
int count = 0;
String delim = "";
if (cfMap.size() == 0) {
return NullDatum.get();
} else if (cfMap.size() == 1) {
// If a column family is mapped without column name like "cf1:" and the number of cells is one,
// return value is flat format not json format.
NavigableMap.Entry<byte[], byte[]> entry = cfMap.entrySet().iterator().next();
byte[] entryKey = entry.getKey();
byte[] entryValue = entry.getValue();
if (entryKey == null || entryKey.length == 0) {
try {
if (isBinaryColumns[fieldId]) {
return HBaseBinarySerializerDeserializer.deserialize(schemaColumns[fieldId], entryValue);
} else {
return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], entryValue);
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
}
StringBuilder sb = new StringBuilder();
sb.append("{");
for (NavigableMap.Entry<byte[], byte[]> entry : cfMap.entrySet()) {
byte[] entryKey = entry.getKey();
byte[] entryValue = entry.getValue();
String keyText = new String(entryKey);
String valueText = null;
if (entryValue != null) {
try {
if (isBinaryColumns[fieldId]) {
valueText = HBaseBinarySerializerDeserializer.deserialize(schemaColumns[fieldId], entryValue).asChars();
} else {
valueText = HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], entryValue).asChars();
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
sb.append(delim).append("\"").append(keyText).append("\":\"").append(valueText).append("\"");
delim = ", ";
count++;
if (count > MAX_LIST_SIZE) {
break;
}
} //end of for
sb.append("}");
return new TextDatum(sb.toString());
} else {
value = null;
}
} else {
value = result.getValue(mappingColumnFamilies[fieldId][0], mappingColumnFamilies[fieldId][1]);
}
}
}
if (value == null) {
return NullDatum.get();
} else {
try {
if (isBinaryColumns[fieldId]) {
return HBaseBinarySerializerDeserializer.deserialize(schemaColumns[fieldId], value);
} else {
return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], value);
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
}
@Override
public void reset() throws IOException {
progress = 0.0f;
scanResultIndex = -1;
scanResults = null;
finished.set(false);
tableStats = new TableStats();
if (scanner != null) {
scanner.close();
scanner = null;
}
initScanner();
}
@Override
public void close() throws IOException {
progress = 1.0f;
finished.set(true);
if (scanner != null) {
try {
scanner.close();
scanner = null;
} catch (Exception e) {
LOG.warn("Error while closing hbase scanner: " + e.getMessage(), e);
}
}
if (htable != null) {
htable.close();
htable = null;
}
}
@Override
public boolean isProjectable() {
return true;
}
@Override
public void setTarget(Column[] targets) {
if (inited) {
throw new IllegalStateException("Should be called before init()");
}
this.targets = targets;
}
@Override
public boolean isSelectable() {
return false;
}
@Override
public void setFilter(EvalNode filter) {
throw new TajoRuntimeException(new UnsupportedException());
}
@Override
public boolean isSplittable() {
return true;
}
@Override
public float getProgress() {
return progress;
}
@Override
public TableStats getInputStats() {
tableStats.setNumRows(numRows);
return tableStats;
}
@Override
public Schema getSchema() {
return schema;
}
}
|
cgoettert/sistema-base | kopp-framework/src/main/java/br/com/kopp/framework/controller/KoppController.java | <gh_stars>0
package br.com.kopp.framework.controller;
import br.com.kopp.framework.datatables.ResponseData;
import br.com.kopp.framework.exception.KoppException;
import br.com.kopp.framework.message.FeedBuilder;
import br.com.kopp.framework.message.KoppFeedBuilder;
import br.com.kopp.framework.message.MessageBundle;
import br.com.kopp.framework.message.MessageType;
import br.com.kopp.framework.message.code.MessageCode;
import javax.inject.Inject;
import javax.ws.rs.core.Response;
/**
*
* @author cgoettert
*/
public abstract class KoppController {
private final FeedBuilder feedBuilder = new KoppFeedBuilder();
private MessageBundle message;
public KoppController() {
}
@Inject
public KoppController(MessageBundle message) {
this.message = message;
}
protected void addData(Object obj) {
feedBuilder.add(obj);
}
protected void addData(String key, Object obj) {
feedBuilder.add(key, obj);
}
protected void addData(ResponseData responseData) {
feedBuilder.add(responseData);
}
protected void addMessage(MessageCode code) {
feedBuilder.add(message.getText(MessageType.SUCCESS, code));
}
protected void addMessage(MessageCode code, Object... params) {
feedBuilder.add(message.getText(MessageType.SUCCESS, code, params));
}
protected void addInfo(MessageCode code) {
feedBuilder.add(message.getText(MessageType.INFO, code));
}
protected void addInfo(MessageCode code, Object... params) {
feedBuilder.add(message.getText(MessageType.INFO, code, params));
}
protected void addWarning(MessageCode code) {
feedBuilder.add(message.getText(MessageType.WARNING, code));
}
protected void addWarning(MessageCode code, Object... params) {
feedBuilder.add(message.getText(MessageType.WARNING, code, params));
}
protected void addError(MessageCode code) {
feedBuilder.add(message.getText(MessageType.ERROR, code));
}
protected void addError(MessageCode code, Object... params) {
feedBuilder.add(message.getText(MessageType.ERROR, code, params));
}
protected void addError(KoppException exception) {
feedBuilder.add(message.getText(MessageType.ERROR, exception));
}
protected Response build() {
return feedBuilder.build();
}
}
|
codethereforam/rent-X | rentx-common/src/main/java/priv/thinkam/rentx/common/exception/BusinessException.java | <reponame>codethereforam/rent-X
package priv.thinkam.rentx.common.exception;
/**
* 必须要处理的业务异常
*
* @author thinkam
* @date 2019/01/01
*/
public abstract class BusinessException extends RuntimeException {
public BusinessException(String message) {
super(message);
}
public BusinessException(String message, Throwable cause) {
super(message, cause);
}
public BusinessException(Throwable cause) {
super(cause);
}
}
|
Sonu589/Hacktoberfest2021-1 | Python/Tasks/factorial.py | <filename>Python/Tasks/factorial.py
def factorial(n):
print factorial(-6)
|
youngbai/leetcode | src/main/java/edu/neu/leetcode/day20_Bit_Manipulation/LC260_Single_Number_III.java | <gh_stars>0
package edu.neu.leetcode.day20_Bit_Manipulation;
public class LC260_Single_Number_III {
/*
Thinking:
- Bit Manipulation
- find the last 1 digit of ans1^ans2
- use the last 1 digit to separate the numbers into two groups
- ans1,ans2 will not in the same group
- for other numbers, the same number will be in the same group, eg. 2 2 will be in group1
- so xor every number in each group, which will filter out the same number end leave ans1 or ans2
- Find the last 1 digit
- x & (-x) where -x is the complement code of x
- x & (~x + 1) where ~x is the inverse code; Adding 1 will convert the inverse code to complement code
- so `-x` is the same as `~x + 1`
- Original code, Inverse code, Complement code
- original code: x
- inverse code: ~x
- complement code: -x or (~x + 1)
Time: O(N)
Space: O(1)
*/
class Solution1_BitManipulation {
public int[] singleNumber(int[] nums) {
// find the ans1^ans2
int xor = 0;
for (int n : nums) xor ^= n;
// find the last 1 digit of ans1^ans2
int lastDigit = xor & (-xor); // use complement code
// int lastDigit = xor & (~xor + 1); // also works with inverse code and adding 1
int group1 = 0, group2 = 0;
for (int n : nums) {
if ((lastDigit & n) == 0) group1 ^= n;
else group2 ^= n;
}
return new int[]{group1, group2};
}
}
}
|
iasmaro/GitLabAnalyzer | frontend/src/Components/MergeRequestTab/MergeRequestTab.js | <filename>frontend/src/Components/MergeRequestTab/MergeRequestTab.js
import React, { useEffect, useState } from 'react';
import Button from 'react-bootstrap/Button';
import updateMRScore from 'Utils/updateMRScore';
import updateMrCommitScore from 'Utils/updateMrCommitScore';
import MergeRequestList from 'Components/MergeRequestList/MergeRequestList';
import CommitsList from 'Components/CommitsList/CommitsList';
import CodeDifferenceList from 'Components/CodeDifferenceList/CodeDifferenceList';
import './MergeRequestTab.css';
const MergeRequestTab = (props) => {
const { configInfo, mergerequests: MRs, updateCommitsTotal, updateMRsTotal, student, reportName, diffs, setDiffs, activeCommits, setActiveCommits } = props || {};
const [mergerequests, setMergeRequests] = useState();
const [expand, setExpand] = useState(false);
const [diffsTitle, setDiffsTitle] = useState();
const [selected, setSelected] = useState();
const [selectedMR, setSelectedMR] = useState();
const [selectedCommit, setSelectedCommit] = useState();
useEffect(()=> {
setMergeRequests(MRs);
}, [MRs]);
const setCommit = (commitList) => {
setActiveCommits(commitList);
}
const setCodeDiffs = (diffsList, mergeRequestLink) => {
for (let i = 0; mergerequests.length; i++) {
if (mergerequests[i].mergeRequestLink === mergeRequestLink) {
setSelectedMR(i);
break;
}
}
setDiffs(diffsList);
setSelected('MR');
}
const setCommitCodeDiffs = (diffsList, commitLink) => {
for (let i = 0; activeCommits.length; i++) {
if (activeCommits[i].commitLink === commitLink) {
setSelectedCommit(i);
break;
}
}
setDiffs(diffsList);
setSelected('commit');
}
const handleExpand = () => {
setExpand(!expand);
}
const changeMRScore = (scoreChange, diffIndex) => {
if (selected === 'commit') {
const newCommits = activeCommits.slice();
const oldScore = parseFloat(newCommits[selectedCommit].commitScore);
const newScore = Math.round(10*(oldScore + scoreChange)) / 10;
newCommits[selectedCommit].commitScore = newScore;
let newDiffScore = 0;
if (newCommits[selectedCommit]?.commitDiffs[diffIndex]?.scoreDTO) {
const originalScore = newCommits[selectedCommit].commitDiffs[diffIndex].scoreDTO.score;
const modifiedScore = newCommits[selectedCommit].commitDiffs[diffIndex].scoreDTO.modifiedScore;
const oldScore = modifiedScore !== -1 ? modifiedScore : originalScore;
newDiffScore = oldScore + scoreChange;
newCommits[selectedCommit].commitDiffs[diffIndex].scoreDTO.modifiedScore = newDiffScore;
}
setActiveCommits(newCommits);
updateCommitsTotal(scoreChange);
const newMRs = mergerequests.slice();
newMRs[selectedMR].commitDTOList = newCommits;
newMRs[selectedMR].sumOfCommitScore += scoreChange;
setMergeRequests(newMRs);
updateMrCommitScore(selectedMR, selectedCommit, diffIndex, student, newDiffScore, reportName);
} else {
const newMRs = mergerequests.slice();
const oldScore = parseFloat(newMRs[selectedMR].mrscore);
const newScore = Math.round(10*(oldScore + scoreChange)) / 10;
newMRs[selectedMR].mrscore = newScore;
let newDiffScore = 0;
if (newMRs[selectedMR].mergeRequestDiffs[diffIndex]?.scoreDTO) {
const originalScore = newMRs[selectedMR].mergeRequestDiffs[diffIndex].scoreDTO.score;
const modifiedScore = newMRs[selectedMR].mergeRequestDiffs[diffIndex].scoreDTO.modifiedScore;
const oldScore = modifiedScore !== -1 ? modifiedScore : originalScore;
newDiffScore = oldScore + scoreChange;
newMRs[selectedMR].mergeRequestDiffs[diffIndex].scoreDTO.modifiedScore = newDiffScore;
}
updateMRsTotal(scoreChange);
setMergeRequests(newMRs);
updateMRScore(selectedMR, diffIndex, student, newDiffScore, reportName);
}
}
return (
<div className="merge-request-tab">
{!expand && <div className="mrs-left">
<div className="mrs-top">
<MergeRequestList {...props} mergerequests={mergerequests} setCommit={setCommit} setCodeDiffs={setCodeDiffs} setDiffsTitle={setDiffsTitle} />
</div>
<div className="mrs-bottom">
{activeCommits && <CommitsList {...props} commits={activeCommits} setCodeDiffs={setCommitCodeDiffs} setDiffsTitle={setDiffsTitle}/>}
</div>
</div>}
{diffs && <div className="mrs-right">
<Button variant="dark" className="expand-button" onClick={handleExpand}>{expand ? '>' : '<'}</Button>
<div className={`mr-code-diffs ${expand ? 'expanded' : ''}`}>
<CodeDifferenceList diffs={diffs} diffsTitle={diffsTitle} changeMRScore={changeMRScore} configInfo={configInfo}/>
</div>
</div>}
</div>
);
}
export default MergeRequestTab; |
lealceldeiro/org.wcdevs.blog.core | rest/src/test/java/org/wcdevs/blog/core/rest/auth/JwtKeycloakConverterTest.java | <filename>rest/src/test/java/org/wcdevs/blog/core/rest/auth/JwtKeycloakConverterTest.java
package org.wcdevs.blog.core.rest.auth;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.wcdevs.blog.core.rest.TestsUtil.aString;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.junit.jupiter.api.Test;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.oauth2.jwt.Jwt;
import org.wcdevs.blog.core.rest.TestsUtil;
class JwtKeycloakConverterTest {
@Test
void providerAuthorities() {
// given
JwtKeycloakConverter converter = new JwtKeycloakConverter();
var realmStub = Map.of(JwtKeycloakConverter.ROLES, List.of(aString(), aString()));
Map<String, Object> claimsStub = Map.of(aString(), aString(),
JwtKeycloakConverter.REALM_ACCESS, realmStub);
var expected = realmStub.get(JwtKeycloakConverter.ROLES)
.stream()
.map(roleMock -> Role.PREFIX + roleMock)
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
var jwtMock = mock(Jwt.class);
when(jwtMock.getClaims()).thenReturn(claimsStub);
// when
var actual = converter.providerAuthorities(jwtMock);
// then
assertEquals(expected, actual);
}
@Test
void customClaimsWithActualValue() {
var username = TestsUtil.aString();
var domain = TestsUtil.aString();
var claim = username + "@" + domain;
var jwtMock = mock(Jwt.class);
when(jwtMock.getClaim(JwtKeycloakConverter.PREFERRED_USERNAME)).thenReturn(claim);
var actual = new JwtKeycloakConverter().customClaims(jwtMock);
assertEquals(Map.of(JwtConverter.PRINCIPAL_USERNAME, username), actual);
}
@Test
void customClaimsWithAnonymousValue() {
var jwtMock = mock(Jwt.class);
when(jwtMock.getClaim(JwtKeycloakConverter.PREFERRED_USERNAME)).thenReturn(null);
var actual = new JwtKeycloakConverter().customClaims(jwtMock);
assertEquals(Map.of(JwtConverter.PRINCIPAL_USERNAME, JwtKeycloakConverter.ANONYMOUS), actual);
}
}
|
yuwenhuisama/Iris-Language | IrisLangLibrary/src/IrisComponents/IrisStatements/IrisInterfaceFunctionStatement.cpp | #include "IrisComponents/IrisStatements/IrisInterfaceFunctionStatement.h"
#include "IrisUnil/IrisIdentifier.h"
#include "IrisCompiler.h"
#include "IrisInstructorMaker.h"
#include "IrisFatalErrorHandler.h"
#include "IrisValidator/IrisStatementValidateVisitor.h"
#include <list>
using namespace std;
bool IrisInterfaceFunctionStatement::Generate()
{
IrisCompiler* pCompiler = IrisCompiler::CurrentCompiler();
IrisInstructorMaker* pMaker = IrisInstructorMaker::CurrentInstructor();
pCompiler->SetLineNumber(m_nLineNumber);
unsigned int nNameIndex = pCompiler->GetIdentifierIndex(m_pFunctionName->GetIdentifierString(), pCompiler->GetCurrentFileIndex());
list<IR_DWORD> lsParameters;
if (m_pParameters) {
if(!m_pParameters->Ergodic(
[&](IrisIdentifier*& pIdentifier) -> bool{
lsParameters.push_back(pCompiler->GetIdentifierIndex(pIdentifier->GetIdentifierString(), pCompiler->GetCurrentFileIndex()));
return true;
}
))
return false;
}
IR_DWORD dwVariablePrameterIndex = -1;
if (m_pVariableParameter) {
dwVariablePrameterIndex = pCompiler->GetIdentifierIndex(m_pVariableParameter->GetIdentifierString(), pCompiler->GetCurrentFileIndex());
}
pMaker->def_infs(nNameIndex, lsParameters, dwVariablePrameterIndex);
return true;
}
IrisInterfaceFunctionStatement::IrisInterfaceFunctionStatement(IrisIdentifier* pFunctionName, IrisList<IrisIdentifier*>* pParameters, IrisIdentifier* pVariableParameters) : m_pFunctionName(pFunctionName), m_pParameters(pParameters), m_pVariableParameter(pVariableParameters)
{
}
IrisInterfaceFunctionStatement::~IrisInterfaceFunctionStatement()
{
if (m_pFunctionName)
delete m_pFunctionName;
if (m_pParameters) {
m_pParameters->Ergodic([](IrisIdentifier*& x) -> bool { delete x; x = nullptr; return true; });
m_pParameters->Clear();
delete m_pParameters;
}
if (m_pVariableParameter)
delete m_pVariableParameter;
}
bool IrisInterfaceFunctionStatement::Validate()
{
auto pCompiler = IrisCompiler::CurrentCompiler();
if (pCompiler->GetTopUpperType() != IrisCompiler::UpperType::InterfaceBlock) {
IrisFatalErrorHandler::CurrentFatalHandler()->ShowFatalErrorMessage(IrisFatalErrorHandler::FatalErrorType::IdenfierTypeIrregular, m_nLineNumber, pCompiler->GetCurrentFileIndex(), "interface function of" + m_pFunctionName->GetIdentifierString() + "must be defined in Interface body.");
return false;
}
if (m_pParameters) {
if (!m_pParameters->Ergodic(
[&](IrisIdentifier*& pIdentifier) -> bool {
if (pIdentifier->GetType() != IrisIdentifierType::LocalVariable) {
IrisFatalErrorHandler::CurrentFatalHandler()->ShowFatalErrorMessage(IrisFatalErrorHandler::FatalErrorType::IdenfierTypeIrregular, m_nLineNumber, pCompiler->GetCurrentFileIndex(), "Identifier of " + pIdentifier->GetIdentifierString() + " must be a LOCAL VARIABLE name.");
return false;
}
return true;
}
))
return false;
}
if (m_pFunctionName->GetType() != IrisIdentifierType::LocalVariable
|| m_pFunctionName->GetType() != IrisIdentifierType::Constance) {
IrisFatalErrorHandler::CurrentFatalHandler()->ShowFatalErrorMessage(IrisFatalErrorHandler::FatalErrorType::IdenfierTypeIrregular, m_nLineNumber, pCompiler->GetCurrentFileIndex(), "Identifier of " + m_pFunctionName->GetIdentifierString() + " is NOT a valid method name.");
return false;
}
if (m_pVariableParameter && m_pVariableParameter->GetType() != IrisIdentifierType::LocalVariable) {
IrisFatalErrorHandler::CurrentFatalHandler()->ShowFatalErrorMessage(IrisFatalErrorHandler::FatalErrorType::IdenfierTypeIrregular, m_nLineNumber, pCompiler->GetCurrentFileIndex(), "Identifier of " + m_pVariableParameter->GetIdentifierString() + " must be a LOCAL VARIABLE name.");
return false;
}
return true;
}
|
rocketraman/ritzy | src/core/RichText.js | <gh_stars>100-1000
import _ from 'lodash'
import invariant from 'react/lib/invariant'
import Spec from 'swarm/lib/Spec'
import Syncable from 'swarm/lib/Syncable'
import { pushSet, setIntersection } from './utils'
class Char {
constructor(id, char, deletedIds, attributes) {
this.id = id
this.char = char
this.deletedIds = _.isArray(deletedIds) ? new Set(deletedIds) : deletedIds
this._attributes = attributes ? Object.freeze(attributes) : null
}
get attributes() {
return this._attributes
}
set attributes(attrs) {
this._attributes = attrs ? Object.freeze(attrs) : null
}
copyOfAttributes() {
return this._attributes ? _.clone(this._attributes) : null
}
toString() {
let char = this.char.replace('\n', '↵').replace(' ', '␣')
return `${char} (${this.id})}`
}
}
const BASE_CHAR = new Char('00000+swarm', '', null, null)
const EOF = -1
/**
* Contains the textual data and corresponding lamport timestamps (ids) for each character. Each character
* has a primary id, but may have secondary ids in a Set representing deleted characters at that position. In
* addition, each character has a list of other "rich" attributes, such as bold, color, and so forth.
*
* Currently the data storage is in regular JS arrays, but perhaps we could use immutable-js:
* - (possible) faster or more consistent insertion performance, splice performance is implementation dependent
* - blazing fast reference equality comparisons
*/
class TextData {
constructor() {
BASE_CHAR.deletedIds = new Set()
this.chars = [BASE_CHAR]
}
len() {
return this.chars.length
}
getChar(pos) {
invariant(pos < this.len(), 'Index ' + pos + ' out of bounds.')
// TODO Char should be immutable so that it cannot be modified outside of this class, use Immutable.js Record?
return this.chars[pos]
}
insertChar(pos, char, id, attributes) {
invariant(pos !== 0, 'Cannot insert at position 0.')
invariant(pos <= this.len(), 'Index ' + pos + ' out of bounds.')
this.chars.splice(pos, 0, new Char(id, char, null, this._normalizeAttrs(attributes)))
}
deleteChar(pos) {
invariant(pos !== 0, 'Cannot delete position 0.')
invariant(pos < this.len(), 'Index ' + pos + ' out of bounds.')
let previousChar = this.chars[pos - 1]
let deletedChar = this.chars.splice(pos, 1)[0]
if(!previousChar.deletedIds) {
previousChar.deletedIds = new Set()
}
previousChar.deletedIds.add(deletedChar.id)
if(deletedChar.deletedIds) {
pushSet(deletedChar.deletedIds, previousChar.deletedIds)
}
}
setCharAttr(pos, attributes) {
invariant(pos !== 0, 'Cannot set attributes of position 0.')
invariant(pos < this.len(), 'Index ' + pos + ' out of bounds.')
this.chars[pos].attributes = this._normalizeAttrs(_.clone(attributes))
}
matches(pos, ids, includeDeleted) {
invariant(pos < this.len(), 'Index out of bounds.')
includeDeleted = includeDeleted !== false
if(_.isArray(ids) || ids.iterator) {
if(!ids.iterator) {
ids = new Set(ids)
}
if(ids.has(this.chars[pos].id)) {
return true
}
if(includeDeleted && this.chars[pos].deletedIds) {
return setIntersection(this.chars[pos].deletedIds, ids).length > 0
}
} else {
if(ids === this.chars[pos].id) {
return true
}
if(includeDeleted && this.chars[pos].deletedIds) {
return this.chars[pos].deletedIds.has(ids)
}
}
return false
}
matchCount(pos, ids, includeDeleted) {
invariant(pos < this.len(), 'Index out of bounds.')
includeDeleted = includeDeleted !== false
let matches = 0
if(_.isArray(ids) || ids.iterator) {
if(!ids.iterator) {
ids = new Set(ids)
}
if(ids.has(this.chars[pos].id)) {
matches += 1
}
if(includeDeleted && this.chars[pos].deletedIds) {
matches += setIntersection(this.chars[pos].deletedIds, ids).length
}
} else {
if(ids === this.chars[pos].id) {
matches += 1
}
if(includeDeleted && this.chars[pos].deletedIds && this.chars[pos].deletedIds.has(ids)) {
matches += 1
}
}
return matches
}
text() {
return this.chars.map(c => c.char).join('')
}
_normalizeAttrs(attrs) {
if(!attrs) return null
Object.keys(attrs).filter(a => !attrs[a]).forEach(a => delete attrs[a])
return _.isEmpty(attrs) ? null : attrs
}
}
/**
* This is based on the Text.js demo class from the SwarmJS library by @gritzko, with the following primary
* differences:
*
* 1) The `weave` was replaced with an array of Char objects within TextData.
*
* 2) The `weave` contained characters and then backspace characters for deletions. Deletions are now stored in
* per-character buckets so that they don't have to be constantly filtered out of the weave. This is also quite
* amenable to tombstone clearing.
*
* 3) Added the ability to store rich-text and other attributes in the Char objects.
*
* 4) Created an API to get/set changes via "deltas". The delta format is from https://github.com/ottypes/rich-text.
* This provides some limited support to applications that wish to convert CRDT ops to/from operational transform
* ops. This support is not currently used by Ritzy and may be removed in the future.
*
* 5) A bug in concurrent insertion: the `insert` op was modifying the `ins` object by reference, causing the
* incorrect information to be transmitted to peers. The insert op needs to remain the same for proper application
* on other peers.
*
* Note that for non-basic multilingual plane (BMP) characters (rare!) using string.length could be wrong in
* Javascript. See https://mathiasbynens.be/notes/javascript-encoding.
*/
let Text = Syncable.extend('Text', {
// naive uncompressed CT weave implementation based on Swarm Text.js
defaults: {
data: {type: TextData},
_oplog: Object
},
ops: {
insert(spec, ins, src) { // eslint-disable-line no-unused-vars
let vt = spec.token('!'), v = vt.bare
let ts = v.substr(0, 5), seq = v.substr(5) || '00'
let seqi = Spec.base2int(seq)
let genTs
let insertKeys = ins ? Object.keys(ins) : []
let matchedInsKeys = []
for (let i = 0; i < this.data.len() && matchedInsKeys.length < insertKeys.length; i++) {
for(let j = 0; j < insertKeys.length; j++) {
let insKey = insertKeys[j]
if (this.data.matches(i, insKey)) {
matchedInsKeys.push(insKey)
let str = ins[insKey].value
let attrs = ins[insKey].attributes
let insertionIndex = i + 1
// check for concurrent edits
while (insertionIndex < this.data.len() && this.data.getChar(insertionIndex).id > vt.body) {
insertionIndex++
}
for (let k = 0; k < str.length; k++) {
genTs = ts + (seqi ? Spec.int2base(seqi++, 2) : '') + '+' + vt.ext
this.data.insertChar(insertionIndex + k, str.charAt(k), genTs, attrs)
if (!seqi) {
seqi = 1 // FIXME repeat ids, double insert
}
}
i = str.length + insertionIndex - 1
}
}
}
if(matchedInsKeys.length < insertKeys.length) {
console.warn('Insert op does not match any tree content, ignoring. Failed ops=',
_.difference(insertKeys, matchedInsKeys))
}
if (genTs) {
this._host.clock.checkTimestamp(genTs)
}
},
remove(spec, rm, src) { // eslint-disable-line no-unused-vars
//let v = spec.version()
if(!rm) return
let rmKeys = Object.keys(rm)
for (let i = 1; i < this.data.len(); i++) {
if (this.data.matches(i, rmKeys)) {
this.data.deleteChar(i)
i -= 1
}
}
},
/**
* Set attributes for the given chars. Attributes are overwritten, therefore it is client code's
* responsibility to "merge" existing attributes with new ones.
*/
setAttributes(spec, attrs, src) { // eslint-disable-line no-unused-vars
if(!attrs) return
let attrKeys = Object.keys(attrs)
for (let i = 1; i < this.data.len(); i++) {
for(let j = 0; j < attrKeys.length; j++) {
if (this.data.matches(i, attrKeys[j], false)) {
this.data.setCharAttr(i, attrs[attrKeys[j]])
}
}
}
}
},
text() {
return this.data.text()
},
/**
* A delta is based on the operational transform rich text type. See https://github.com/ottypes/rich-text.
* @param delta
*/
applyDelta(delta) {
let rm = null
let ins = null
let pos = 1 // skip \n #00000+swarm
for(let i = 0; i < delta.length; i++) {
let op = delta[i]
if(op.insert) {
invariant(pos > 0, 'Cannot insert at position 0.')
if(!ins) ins = {}
ins[this.data.getChar(pos - 1).id] = {
value: op.insert,
attributes: op.attributes
}
// we don't increment pos here because the insert hasn't actually happened yet
}
if(op.delete) {
invariant(pos > 0, 'Cannot delete position 0.')
if(!rm) rm = {}
let rmcount = op.delete
for (let j = 0; j < rmcount; j++) {
rm[this.data.getChar(pos).id] = true
pos += 1
}
}
if(op.retain) {
pos += op.retain
}
}
if(rm) this.remove(rm)
if(ins) this.insert(ins)
},
/**
* Obtain a delta based on an insert operation. Note that this must be run *after* the insert has already
* occurred on the replica. This can be used to obtain deltas for updating a local editor based on an op received
* from the replica event system.
* @param op
* @returns {Array}
*/
deltaFromInsert(op) {
let delta = []
let foundCount = 0
let opKeys = op ? Object.keys(op) : []
let lastInsert = 0
for (let i = 0; i < this.data.len(); i++) {
for(let j = 0; j < opKeys.length; j++) {
let opKey = opKeys[j]
if (this.data.matches(i, opKey)) {
if (i - lastInsert > 0) delta.push({retain: i - lastInsert})
let str = op[opKey].value
let deltaOp = {insert: str}
let attrs = op[opKey].attributes
if(attrs) {
deltaOp.attributes = attrs
}
delta.push(deltaOp)
lastInsert = i + str.length
foundCount += 1
if (foundCount >= opKeys.length) {
return delta
}
}
}
}
return delta
},
/**
* Obtain a delta based on a remove operation. Note that this must be run *after* the remove has already
* occurred on the replica. This can be used to obtain deltas for updating a local editor based on an op received
* from the replica event system.
* @param op
* @returns {Array}
*/
deltaFromRemove(op) {
let delta = []
let foundCount = 0
let opKeys = Object.keys(op)
let lastRemove = 0
for (let i = 0; i < this.data.len(); i++) {
let matchCount = this.data.matchCount(i, opKeys)
if (matchCount > 0) {
if(i - lastRemove > 0) delta.push({ retain: i - lastRemove })
// since the delete has already occurred we need to use the number of matched ids at the current char
delta.push({ delete: matchCount })
lastRemove = i
foundCount += matchCount
if(foundCount >= opKeys.length) {
return delta
}
}
}
return delta
},
/**
* Insert chars with optional attributes at a given position.
* @param {Char} char The position at which to insert.
* @param {string} value The string value to insert.
* @param {object} [attributes] Attributes to set, or no attributes if not set. The attributes are
* cloned before setting so that they cannot be modified by simply changing the object reference.
* This type of change would not propagate through the replica.
*/
insertCharsAt(char, value, attributes) {
let ins = {}
ins[char.id] = {
value: value,
attributes: attributes
}
this.insert(ins)
},
/**
* Delete the given chars.
* @param {Char|Char[]} chars
*/
rmChars(chars) {
if(!chars) return
let rm = {}
if(_.isArray(chars)) {
for(let i = 0; i < chars.length; i++) {
rm[chars[i].id] = true
}
} else {
rm[chars.id] = true
}
this.remove(rm)
},
/**
* Sets new text. All current text contents are deleted (though the deleted ids remain).
* @param {string} newText
* @param {object} [attributes]
*/
set(newText, attributes) {
this.rmChars(this.getTextRange(BASE_CHAR))
this.insertCharsAt(BASE_CHAR, newText, attributes)
},
/**
* Gets the length of the current replica data, including the BASE_CHAR (the length of the actual data).
* @returns {number}
*/
len() {
return this.data.len()
},
/**
* Gets the char for the given char or id. Can be used to "refresh" the char information which is
* a snapshot with the latest replica information.
* @param {Char|number} charOrId
* @returns {*}
*/
getChar(charOrId) {
return this.getCharRelativeTo(charOrId, 0, 'error')
},
/**
* Gets the char at the given position. Position 0 is always the BASE_CHAR. An Error is thrown
* if the position is out of bounds.
* @param {number} pos
* @returns {*}
*/
getCharAt(pos) {
return this.data.getChar(pos)
},
/**
* Returns the index of a given char or ID. Index 0 is always the BASE_CHAR. If the char is not
* found, returns -1.
* @param {Char|number} charOrId
* @param {boolean} [includeDeleted=true] Whether to include deletec chars in the match.
* @returns number
*/
indexOf(charOrId, includeDeleted) {
invariant(charOrId, 'From char must be defined.')
let id = _.has(charOrId, 'id') ? charOrId.id : charOrId
for (let i = 0; i < this.data.len(); i++) {
if (this.data.matches(i, id, includeDeleted)) return i
}
return -1
},
/**
* Gets a character relative to another character. Relative can be positive or
* negative. If the position becomes out of bound, the position can wrap, limit to
* the end, or error (depending on the last parameter).
* @param {Char|string} charOrId
* @param {number} relative
* @param {string} [wrap='wrap'] The behavior when the index is out of bounds. Must be one
* of 'wrap', 'limit', 'eof', or 'error'. 'eof' returns EOF (-1) if past the end.
* @return {*}
*/
getCharRelativeTo(charOrId, relative, wrap) {
invariant(charOrId, 'Char must be defined.')
if(_.isUndefined(relative)) relative = 0
if(_.isUndefined(wrap)) wrap = 'wrap'
if(charOrId === EOF) {
if(relative > 0 && (wrap === 'limit' || wrap === 'eof')) return EOF
else if (relative > 0 && wrap === 'error') throw new Error('Index out of bounds, past EOF by: ' + relative)
else if(relative > 0 && wrap === 'wrap') {
charOrId = this.data.getChar(this.data.len() - 1)
}
else {
charOrId = this.data.getChar(this.data.len() - 1)
relative += 1
}
}
let id = _.has(charOrId, 'id') ? charOrId.id : charOrId
for (let i = 0; i < this.data.len(); i++) {
if (this.data.matches(i, id)) {
let index = i + relative
if(wrap === 'wrap') {
if(index < 0) index = this.data.len() + index
else if(index >= this.data.len()) index = index - this.data.len()
} else if (wrap === 'limit') {
if(index < 0) index = 0
else if(index >= this.data.len()) index = this.data.len() - 1
} else if (wrap === 'eof') {
if(index < 0) index = 0
else if(index >= this.data.len()) return EOF
} else if (wrap === 'error') {
if(index < 0 || index >= this.data.len()) {
throw new Error('Index out of bounds: ' + index)
}
} else {
throw new Error('Undefined wrap value: ' + wrap)
}
return this.getCharAt(index)
}
}
},
/**
* Gets all the chars from a given ID (exclusive) to a given ID (inclusive). The length of the returned
* range is going to be `pos(toChar) - pos(fromChar)`.
* @param {Char|string} fromCharOrId
* @param {Char|string} [toCharOrId = last] If the to char does not exist, then to char is the last char.
* @returns {Array}
*/
getTextRange(fromCharOrId, toCharOrId) {
invariant(fromCharOrId, 'From char must be defined.')
let fromMatched = false
let chars = []
let fromId = _.has(fromCharOrId, 'id') ? fromCharOrId.id : fromCharOrId
let toId
if(!_.isUndefined(toCharOrId)) {
toId = _.has(toCharOrId, 'id') ? toCharOrId.id : toCharOrId
}
if(fromId === toId) {
return chars
}
for (let i = 0; i < this.data.len(); i++) {
if (!fromMatched && this.data.matches(i, fromId)) {
// the fromId is exclusive
fromMatched = true
if(fromId === toId) {
chars.push(this.getCharAt(i))
return chars
}
} else if(toId && this.data.matches(i, toId)) {
invariant(fromMatched, 'From id must precede To id.')
chars.push(this.getCharAt(i))
return chars
} else if(fromMatched) {
chars.push(this.getCharAt(i))
}
}
return chars
},
/**
* Compares the position of two chars. Follows the contract of Java Comparator
* (http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html#compare-T-T-) and returns
* a negative integer, zero, or a positive integer as the first argument is positioned before,
* equal to, or positioned after the second.
* @param {Char|string} charOrId1
* @param {Char|string} charOrId2
* @return {number}
*/
compareCharPos(charOrId1, charOrId2) {
invariant(charOrId1, 'First char must be defined.')
invariant(charOrId2, 'Second char must be defined.')
if(charOrId1 === EOF && charOrId2 === EOF) return 0
else if(charOrId1 === EOF) return 1
else if(charOrId2 === EOF) return -1
let char1Id = _.has(charOrId1, 'id') ? charOrId1.id : charOrId1
let char2Id = _.has(charOrId2, 'id') ? charOrId2.id : charOrId2
let seen1 = false
let seen1Index
let seen2 = false
let seen2Index
for (let i = 0; i < this.data.len(); i++) {
if (!seen1 && this.data.matches(i, char1Id)) {
seen1 = true
seen1Index = i
// special case same char
if(char1Id === char2Id) {
return 0
}
}
if (!seen2 && this.data.matches(i, char2Id)) {
seen2 = true
seen2Index = i
}
if (seen1 && seen2) {
if(seen1Index < seen2Index) return -1
else if(seen1Index === seen2Index) return 0
else return 1
}
}
throw new Error('One or both chars were not found.')
}
})
export default Text
export { BASE_CHAR, EOF, Char, TextData }
|
MrEric125/louis-coub | demoProject/demo/src/main/java/louis/coub/netty/nio/channel/ChannelAccept.java | <reponame>MrEric125/louis-coub<filename>demoProject/demo/src/main/java/louis/coub/netty/nio/channel/ChannelAccept.java<gh_stars>0
package louis.coub.netty.nio.channel;
import java.nio.ByteBuffer;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.net.InetSocketAddress;
/**
* Test nonblocking accept( ) using ServerSocketChannel.
* Start this program, then "telnet localhost 1234" to
* connect to it.
*
* @author <NAME> (<EMAIL>)
*/
public class ChannelAccept {
public static final String GREETING = "Hello I must be going.\r\n";
public static void main(String[] argv)
throws Exception {
int port = 1234; // default
if (argv.length > 0) {
port = Integer.parseInt(argv[0]);
}
ByteBuffer buffer = ByteBuffer.wrap(GREETING.getBytes());
ServerSocketChannel ssc = ServerSocketChannel.open();
ssc.socket().bind(new InetSocketAddress(port));
ssc.configureBlocking(false);
while (true) {
System.out.println("Waiting for connections");
SocketChannel sc = ssc.accept();
if (sc == null) {
// no connections, snooze a while
Thread.sleep(2000);
} else {
System.out.println("Incoming connection from: "
+ sc.socket().getRemoteSocketAddress());
buffer.rewind();
sc.write(buffer);
sc.close();
}
}
}
} |
ifwe/digsby | digsby/src/gui/uberwidgets/formattedinput2/fromattedinputevents.py | <filename>digsby/src/gui/uberwidgets/formattedinput2/fromattedinputevents.py
#@PydevCodeAnalysisIgnore
import wx
import wx.lib.newevent
newevt = wx.lib.newevent.NewCommandEvent
TextFormatChangedEvent, EVT_TEXT_FORMAT_CHANGED = newevt() |
richardcpeterson/hyper63 | packages/adapter-couchdb/index.js | globalThis.fetch = require('@vercel/fetch-retry')(require('node-fetch'))
const createAdapter = require('./adapter')
const { asyncFetch, createHeaders, handleResponse } = require('./async-fetch')
/**
* @param {object} config
* @returns {object}
*/
module.exports = function CouchDataAdapter (config) {
/**
* @param {object} env
*/
function load() {
return config
}
/**
* @param {object} env
* @returns {function}
*/
function link(env={url: 'http://localhost:5984'}) {
/**
* @param {object} adapter
* @returns {object}
*/
return function () {
// parse url
const config = new URL(env.url)
return createAdapter({ config })
}
}
return Object.freeze({
id: 'couchdb-data-adapter',
port: 'data',
load,
link
})
}
|
drmorr0/backuppy | itests/manifest_guarantees_test.py | import argparse
import os
import sqlite3
import sys
import pytest
from backuppy.cli.backup import main as backup
from backuppy.util import sha_to_path
from itests.conftest import _TestFileData
from itests.conftest import BACKUP_DIR
from itests.conftest import DATA_DIRS
from itests.conftest import get_latest_manifest
from itests.conftest import ITEST_CONFIG
from itests.conftest import itest_setup
from itests.conftest import ItestException
from itests.conftest import make_trace_func
test_file_history = dict() # type: ignore
DATA_DIR = DATA_DIRS[0]
BACKUP_ARGS = argparse.Namespace(
log_level='debug',
config=ITEST_CONFIG,
preserve_scratch_dir=True,
dry_run=False,
name='data1_backup',
)
def abort():
raise ItestException('abort')
def assert_manifest_correct(before):
manifest_conn = sqlite3.connect(get_latest_manifest())
manifest_conn.row_factory = sqlite3.Row
manifest_cursor = manifest_conn.cursor()
manifest_cursor.execute('select * from manifest')
rows = manifest_cursor.fetchall()
assert len(rows) == (3 if before else 4)
for row in rows:
start_pos = row['abs_file_name'].find(DATA_DIR)
filename = row['abs_file_name'][start_pos:]
assert row['sha'] in set([tfd.sha for tfd in test_file_history[filename]])
@pytest.fixture(autouse=True, scope='module')
def setup_manifest():
with itest_setup(
test_file_history,
_TestFileData('foo', 'asdf'),
_TestFileData('bar', 'hjkl'),
_TestFileData('baz/buz', 'qwerty'),
):
backup(BACKUP_ARGS)
yield
def test_m1_crash_before_save():
sys.settrace(make_trace_func('test_m1_crash_before_save', abort))
with itest_setup(
test_file_history,
_TestFileData('foo', 'asdfhjkl'),
), pytest.raises(Exception):
backup(BACKUP_ARGS)
assert_manifest_correct(before=True)
file_data_path = os.path.join(DATA_DIR, 'foo')
file_backup_path = os.path.join(
BACKUP_DIR,
sha_to_path(test_file_history[file_data_path][1].sha),
)
assert os.path.exists(file_backup_path)
def test_m1_crash_after_save():
sys.settrace(make_trace_func('test_m1_crash_after_save', abort))
with itest_setup(test_file_history), pytest.raises(Exception):
backup(BACKUP_ARGS)
assert_manifest_correct(before=False)
def test_m2_crash_before_file_save():
sys.settrace(make_trace_func('test_m2_crash_before_file_save', abort))
with itest_setup(
test_file_history,
_TestFileData('another_file', '1234'),
):
backup(BACKUP_ARGS)
manifest_conn = sqlite3.connect(get_latest_manifest())
manifest_conn.row_factory = sqlite3.Row
manifest_cursor = manifest_conn.cursor()
manifest_cursor.execute('select * from manifest where abs_file_name like "%another_file"')
rows = manifest_cursor.fetchall()
assert not rows
def test_m2_crash_after_file_save():
sys.settrace(make_trace_func('test_m2_crash_after_file_save', abort))
with itest_setup(test_file_history):
backup(BACKUP_ARGS)
manifest_conn = sqlite3.connect(get_latest_manifest())
manifest_conn.row_factory = sqlite3.Row
manifest_cursor = manifest_conn.cursor()
manifest_cursor.execute('select * from manifest where abs_file_name like "%another_file"')
rows = manifest_cursor.fetchall()
assert len(rows) == 1
assert rows[0]['sha'] == test_file_history[os.path.join(DATA_DIR, 'another_file')][0].sha
|
mib1185/core | homeassistant/components/juicenet/number.py | <gh_stars>1000+
"""Support for controlling juicenet/juicepoint/juicebox based EVSE numbers."""
from __future__ import annotations
from dataclasses import dataclass
from pyjuicenet import Api, Charger
from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.components.number.const import DEFAULT_MAX_VALUE
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
from .entity import JuiceNetDevice
@dataclass
class JuiceNetNumberEntityDescriptionMixin:
"""Mixin for required keys."""
setter_key: str
@dataclass
class JuiceNetNumberEntityDescription(
NumberEntityDescription, JuiceNetNumberEntityDescriptionMixin
):
"""An entity description for a JuiceNetNumber."""
native_max_value_key: str | None = None
NUMBER_TYPES: tuple[JuiceNetNumberEntityDescription, ...] = (
JuiceNetNumberEntityDescription(
name="Amperage Limit",
key="current_charging_amperage_limit",
native_min_value=6,
native_max_value_key="max_charging_amperage",
native_step=1,
setter_key="set_charging_amperage_limit",
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the JuiceNet Numbers."""
juicenet_data = hass.data[DOMAIN][config_entry.entry_id]
api: Api = juicenet_data[JUICENET_API]
coordinator = juicenet_data[JUICENET_COORDINATOR]
entities = [
JuiceNetNumber(device, description, coordinator)
for device in api.devices
for description in NUMBER_TYPES
]
async_add_entities(entities)
class JuiceNetNumber(JuiceNetDevice, NumberEntity):
"""Implementation of a JuiceNet number."""
entity_description: JuiceNetNumberEntityDescription
def __init__(
self,
device: Charger,
description: JuiceNetNumberEntityDescription,
coordinator: DataUpdateCoordinator,
) -> None:
"""Initialise the number."""
super().__init__(device, description.key, coordinator)
self.entity_description = description
self._attr_name = f"{self.device.name} {description.name}"
@property
def native_value(self) -> float | None:
"""Return the value of the entity."""
return getattr(self.device, self.entity_description.key, None)
@property
def native_max_value(self) -> float:
"""Return the maximum value."""
if self.entity_description.native_max_value_key is not None:
return getattr(self.device, self.entity_description.native_max_value_key)
if self.entity_description.native_max_value is not None:
return self.entity_description.native_max_value
return DEFAULT_MAX_VALUE
async def async_set_native_value(self, value: float) -> None:
"""Update the current value."""
await getattr(self.device, self.entity_description.setter_key)(value)
|
Tonypz/core-ui | src/form/validators/emailValidator.js | <gh_stars>0
export default config => {
const options = _.extend(
{
type: 'email',
message: Localizer.get('CORE.FORM.VALIDATION.EMAIL')
},
config
);
return function email(value) {
options.value = value;
const err = {
type: options.type,
message: typeof options.message === 'function' ? options.message(options) : options.message
};
if (value === null || value === undefined || value === false || value === '') {
return;
}
if (!/^[\w\-]{1,}([\w\-\+.]{1,1}[\w\-]{1,}){0,}[@][\w\-]{1,}([.]([\w\-]{1,})){1,3}$/.test(value)) {
return err;
}
};
};
|
cheminfo/pdb-explorer | site/visualizer/modules/types/science/chemistry/jsme/lib/jsme/deferredjs/F76C633D69E95E4CB8276B2C4E34C4EA/3.cache.js | <reponame>cheminfo/pdb-explorer
$wnd.jsme.runAsyncCallback3('r(577,574,Bh);_.$c=function(){this.a.ac&&XK(this.a.ac);this.a.ac=new bL(1,this.a)};x(OG)(3);\n//@ sourceURL=3.js\n')
|
crazyskateface/lumberyard | dev/Code/Framework/AzToolsFramework/AzToolsFramework/UI/Logging/LogControl.h | /*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#ifndef LOGCONTROL_H
#define LOGCONTROL_H
#pragma once
#include <AzCore/Memory/SystemAllocator.h>
#include "LoggingCommon.h"
#include <QWidget>
#include <QString>
class QIcon;
class QAction;
class QTableView;
class QAbstractItemModel;
namespace AzToolsFramework
{
// to use this control, put one of these controls in your ui. You can inherit from this class
// and override functionality like the GetItemColumn() functions, if you want to swap the order of columns around
// or eliminate columns because your data does not have those columns.
// once you have the view ready, you need a data model class. The data model can be the same as the view in fact. Its any class
// which exposes QAbstractItemModel interface. Most often its another QObject, though.
// Your data model can be derived from any item model, (but I recommend QAbstractTableModel) and should implement at least the following overrides:
// * virtual int rowCount(const QModelIndex&) const;
// * virtual int columnCount(const QModelIndex &) const;
// * virtual QVariant data(const QModelIndex &,int role) const;
// * virtual Qt::ItemFlags flags(const QModelIndex &index) const;
// then call ConnectModelToView() to attach the model to this view.
// for an example see LogPanel_Panel.h and cpp - see AZTracePrintFLogTab - its an implementation of this control
// which listens on the AZ::Debug::TraceMessageBus and forwards the messages to the log.
// for an example of a data model, see LogPanelDataModel in LogPanel_Panel.h and cpp.
namespace LogPanel
{
class BaseLogView
: public QWidget
{
Q_OBJECT;
public:
AZ_CLASS_ALLOCATOR(BaseLogView, AZ::SystemAllocator, 0);
BaseLogView(QWidget* pParent);
virtual ~BaseLogView();
// you MUST call this, it sets up the model and connects various signals.
void ConnectModelToView(QAbstractItemModel* ptrModel);
// whether to expand the row height of the current item to show the full message text.
void SetCurrentItemExpandsToFit(bool expandsToFit);
// utility functions to retrieve standard icons without having to load them or prepare them over and over.
static QIcon& GetInformationIcon();
static QIcon& GetWarningIcon();
static QIcon& GetErrorIcon();
static QIcon& GetDebugIcon();
protected:
QTableView* m_ptrLogView; // this is the actual table view. You probably don't need to do anything to this, since its model view based.
// return -1 for any of these to indicate that your data has no such column.
// make sure your model has the same semantics!
virtual int GetIconColumn() { return 0; }
virtual int GetTimeColumn() { return 1; }
virtual int GetWindowColumn() { return 2; }
virtual int GetMessageColumn() { return 3; } // you may not return -1 for this one.
// override this if you want to provide an implementation that decorates the text in some way.
// its only used in copy and paste, so this is what formats for the clipboard.
// the default implementation simply concatenates all columns of text with " - " seperating them.
virtual QString ConvertRowToText(const QModelIndex& row);
// utility function. You can call this to determine if you need to continue to keep scrolling to the bottom whenever
// you alter the data. call scrollToBottom to scroll to the bottom!
bool IsAtMaxScroll() const;
// Context menus. you can add whatever else you want to it. You don't need to override this.
// but you can if you want to delete the actions so that you cant select.
// just create QActions, and add them to 'this' at any time.
virtual void CreateContextMenu();
// Backing code to the context menu
QAction* actionSelectAll;
QAction* actionSelectNone;
QAction* actionCopySelected;
QAction* actionCopyAll;
public slots:
// call this base if you override this, please. It makes sure items are the right size.
virtual void rowsInserted(const QModelIndex& parent, int start, int end);
// Backing code to the context menu. You can override these to do what ever
// the default implementation will call ConvertRowToText().
virtual void SelectAll();
virtual void SelectNone();
virtual void CopyAll();
virtual void CopySelected();
void CurrentItemChanged(const QModelIndex& current, const QModelIndex& previous);
// connect to this signal if you want to know when someone clicked a link in a URL in a rich text box.
signals:
void onLinkActivated(const QString& link);
private:
bool m_currentItemExpandsToFit;
// used to avoid reloading the icons over and over...
static int s_panelRefCount;
static QIcon* s_criticalIcon;
static QIcon* s_warningIcon;
static QIcon* s_informationIcon;
static QIcon* s_debugIcon;
};
} // namespace LogPanel
} // namespace AzToolsFramework
#endif
|
phatblat/macOSPrivateFrameworks | PrivateFrameworks/CoreHandwriting/CHMergedStrokeGroupingResults.h | <reponame>phatblat/macOSPrivateFrameworks
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import <CoreHandwriting/CHStrokeGroupingResult.h>
@class NSDictionary;
@interface CHMergedStrokeGroupingResults : CHStrokeGroupingResult
{
NSDictionary *_groupingResultsByStrategyIdentifier;
}
@property(readonly, retain, nonatomic) NSDictionary *groupingResultsByStrategyIdentifier; // @synthesize groupingResultsByStrategyIdentifier=_groupingResultsByStrategyIdentifier;
- (void)dealloc;
- (id)initWithStrokeGroups:(id)arg1 createdStrokeGroups:(id)arg2 deletedStrokeGroups:(id)arg3 groupingResultsByStrategyIdentifier:(id)arg4;
@end
|
AppGyver/kontena | server/app/mutations/stacks/deploy.rb | module Stacks
class Deploy < Mutations::Command
include Workers
required do
model :current_user, class: User
model :stack, class: Stack
end
def validate
self.stack.grid_services.each do |service|
outcome = GridServices::Deploy.validate(grid_service: service)
unless outcome.success?
add_error(:service, :deploy, outcome.errors.message)
end
end
end
def execute
self.stack.state = :deployed
self.stack.save
# Deploy all services of the stack
self.stack.grid_services.each do |service|
outcome = GridServices::Deploy.run(grid_service: service)
unless outcome.success?
add_error(:service, :deploy, outcome.errors.message)
end
end
end
end
end
|
ant-nihil/routen-qgroundcontrol | libs/qmlglsink/gst-plugins-good/tests/check/elements/udpsrc.c | <reponame>ant-nihil/routen-qgroundcontrol<filename>libs/qmlglsink/gst-plugins-good/tests/check/elements/udpsrc.c
/* GStreamer UDP source unit tests
* Copyright (C) 2011 <NAME> <tim centricular net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/check/gstcheck.h>
#include <gio/gio.h>
#include <stdlib.h>
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
static gboolean
udpsrc_setup (GstElement ** udpsrc, GSocket ** socket,
GstPad ** sinkpad, GSocketAddress ** sa)
{
GInetAddress *ia;
int port = 0;
gchar *s;
*udpsrc = gst_check_setup_element ("udpsrc");
fail_unless (*udpsrc != NULL);
g_object_set (*udpsrc, "port", 0, NULL);
*sinkpad = gst_check_setup_sink_pad_by_name (*udpsrc, &sinktemplate, "src");
fail_unless (*sinkpad != NULL);
gst_pad_set_active (*sinkpad, TRUE);
gst_element_set_state (*udpsrc, GST_STATE_PLAYING);
g_object_get (*udpsrc, "port", &port, NULL);
GST_INFO ("udpsrc port = %d", port);
*socket = g_socket_new (G_SOCKET_FAMILY_IPV4, G_SOCKET_TYPE_DATAGRAM,
G_SOCKET_PROTOCOL_UDP, NULL);
if (*socket == NULL) {
GST_WARNING ("Could not create IPv4 UDP socket for unit test");
return FALSE;
}
ia = g_inet_address_new_loopback (G_SOCKET_FAMILY_IPV4);
s = g_inet_address_to_string (ia);
GST_LOG ("inet address %s", s);
g_free (s);
*sa = g_inet_socket_address_new (ia, port);
g_object_unref (ia);
return TRUE;
}
GST_START_TEST (test_udpsrc_empty_packet)
{
GSocketAddress *sa = NULL;
GstElement *udpsrc = NULL;
GSocket *socket = NULL;
GstPad *sinkpad = NULL;
if (!udpsrc_setup (&udpsrc, &socket, &sinkpad, &sa))
goto no_socket;
if (g_socket_send_to (socket, sa, "HeLL0", 0, NULL, NULL) == 0) {
GST_INFO ("sent 0 bytes");
if (g_socket_send_to (socket, sa, "HeLL0", 6, NULL, NULL) == 6) {
GstMapInfo map;
GstBuffer *buf;
guint len = 0;
GST_INFO ("sent 6 bytes");
g_mutex_lock (&check_mutex);
len = g_list_length (buffers);
while (len < 1) {
g_cond_wait (&check_cond, &check_mutex);
len = g_list_length (buffers);
GST_INFO ("%u buffers", len);
}
/* wait a bit more for a second buffer */
if (len < 2) {
g_cond_wait_until (&check_cond, &check_mutex,
g_get_monotonic_time () + G_TIME_SPAN_SECOND / 100);
len = g_list_length (buffers);
GST_INFO ("%u buffers", len);
}
fail_unless (len == 1 || len == 2);
/* last buffer should be our HeLL0 string */
buf = GST_BUFFER (g_list_nth_data (buffers, len - 1));
gst_buffer_map (buf, &map, GST_MAP_READ);
fail_unless_equals_int (map.size, 6);
fail_unless_equals_string ((gchar *) map.data, "HeLL0");
gst_buffer_unmap (buf, &map);
/* if there's another buffer, it should be 0 bytes */
if (len == 2) {
buf = GST_BUFFER (g_list_nth_data (buffers, 0));
fail_unless_equals_int (gst_buffer_get_size (buf), 0);
}
g_mutex_unlock (&check_mutex);
} else {
GST_WARNING ("send_to(6 bytes) failed");
}
} else {
GST_WARNING ("send_to(0 bytes) failed");
}
no_socket:
gst_element_set_state (udpsrc, GST_STATE_NULL);
gst_check_drop_buffers ();
gst_check_teardown_pad_by_name (udpsrc, "src");
gst_check_teardown_element (udpsrc);
g_object_unref (socket);
g_object_unref (sa);
}
GST_END_TEST;
GST_START_TEST (test_udpsrc)
{
GSocketAddress *sa = NULL;
GstElement *udpsrc = NULL;
GSocket *socket = NULL;
GstPad *sinkpad = NULL;
GstBuffer *buf;
GstMemory *mem;
gchar data[48000];
gsize max_size;
int i, len = 0;
gssize sent;
GError *err = NULL;
for (i = 0; i < G_N_ELEMENTS (data); ++i)
data[i] = i & 0xff;
if (!udpsrc_setup (&udpsrc, &socket, &sinkpad, &sa))
goto no_socket;
if ((sent = g_socket_send_to (socket, sa, data, 48000, NULL, &err)) == -1)
goto send_failure;
fail_unless_equals_int (sent, 48000);
if ((sent = g_socket_send_to (socket, sa, data, 21000, NULL, &err)) == -1)
goto send_failure;
fail_unless_equals_int (sent, 21000);
if ((sent = g_socket_send_to (socket, sa, data, 500, NULL, &err)) == -1)
goto send_failure;
fail_unless_equals_int (sent, 500);
if ((sent = g_socket_send_to (socket, sa, data, 1600, NULL, &err)) == -1)
goto send_failure;
fail_unless_equals_int (sent, 1600);
if ((sent = g_socket_send_to (socket, sa, data, 1400, NULL, &err)) == -1)
goto send_failure;
fail_unless_equals_int (sent, 1400);
GST_INFO ("sent some packets");
g_mutex_lock (&check_mutex);
len = g_list_length (buffers);
while (len < 5) {
g_cond_wait (&check_cond, &check_mutex);
len = g_list_length (buffers);
GST_INFO ("%u buffers", len);
}
/* check that large packets are made up of multiple memory chunks and that
* the first one is fairly small */
buf = GST_BUFFER (g_list_nth_data (buffers, 0));
fail_unless_equals_int (gst_buffer_get_size (buf), 48000);
fail_unless_equals_int (gst_buffer_n_memory (buf), 2);
mem = gst_buffer_peek_memory (buf, 0);
gst_memory_get_sizes (mem, NULL, &max_size);
fail_unless (max_size <= 2000);
buf = GST_BUFFER (g_list_nth_data (buffers, 1));
fail_unless_equals_int (gst_buffer_get_size (buf), 21000);
fail_unless_equals_int (gst_buffer_n_memory (buf), 2);
mem = gst_buffer_peek_memory (buf, 0);
gst_memory_get_sizes (mem, NULL, &max_size);
fail_unless (max_size <= 2000);
buf = GST_BUFFER (g_list_nth_data (buffers, 2));
fail_unless_equals_int (gst_buffer_get_size (buf), 500);
fail_unless_equals_int (gst_buffer_n_memory (buf), 1);
mem = gst_buffer_peek_memory (buf, 0);
gst_memory_get_sizes (mem, NULL, &max_size);
fail_unless (max_size <= 2000);
buf = GST_BUFFER (g_list_nth_data (buffers, 3));
fail_unless_equals_int (gst_buffer_get_size (buf), 1600);
fail_unless_equals_int (gst_buffer_n_memory (buf), 2);
mem = gst_buffer_peek_memory (buf, 0);
gst_memory_get_sizes (mem, NULL, &max_size);
fail_unless (max_size <= 2000);
buf = GST_BUFFER (g_list_nth_data (buffers, 4));
fail_unless_equals_int (gst_buffer_get_size (buf), 1400);
fail_unless_equals_int (gst_buffer_n_memory (buf), 1);
mem = gst_buffer_peek_memory (buf, 0);
gst_memory_get_sizes (mem, NULL, &max_size);
fail_unless (max_size <= 2000);
g_list_foreach (buffers, (GFunc) gst_buffer_unref, NULL);
g_list_free (buffers);
buffers = NULL;
g_mutex_unlock (&check_mutex);
no_socket:
send_failure:
if (err) {
GST_WARNING ("Socket send error, skipping test: %s", err->message);
g_clear_error (&err);
}
gst_element_set_state (udpsrc, GST_STATE_NULL);
gst_check_drop_buffers ();
gst_check_teardown_pad_by_name (udpsrc, "src");
gst_check_teardown_element (udpsrc);
g_object_unref (socket);
g_object_unref (sa);
}
GST_END_TEST;
static Suite *
udpsrc_suite (void)
{
Suite *s = suite_create ("udpsrc");
TCase *tc_chain = tcase_create ("udpsrc");
suite_add_tcase (s, tc_chain);
tcase_add_test (tc_chain, test_udpsrc_empty_packet);
tcase_add_test (tc_chain, test_udpsrc);
return s;
}
GST_CHECK_MAIN (udpsrc)
|
GhostMachineSoftware/SPFx_GetListItems | node_modules/office-ui-fabric-react/lib/components/TextField/TextField.doc.js | <filename>node_modules/office-ui-fabric-react/lib/components/TextField/TextField.doc.js
import * as React from 'react';
import { TextFieldBasicExample } from './examples/TextField.Basic.Example';
import { TextFieldBorderlessExample } from './examples/TextField.Borderless.Example';
import { TextFieldCustomRenderExample } from './examples/TextField.CustomRender.Example';
import { TextFieldErrorMessageExample } from './examples/TextField.ErrorMessage.Example';
import { TextFieldMultilineExample } from './examples/TextField.Multiline.Example';
import { TextFieldControlledExample } from './examples/TextField.Controlled.Example';
import { TextFieldPrefixAndSuffixExample } from './examples/TextField.PrefixAndSuffix.Example';
import { TextFieldStyledExample } from './examples/TextField.Styled.Example';
import { TextFieldMaskedExample } from './examples/TextField.Masked.Example';
var TextFieldBasicExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.Basic.Example.tsx');
var TextFieldBorderlessExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.Borderless.Example.tsx');
var TextFieldCustomRenderExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.CustomRender.Example.tsx');
var TextFieldErrorMessageExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.ErrorMessage.Example.tsx');
var TextFieldMultilineExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.Multiline.Example.tsx');
var TextFieldControlledExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.Controlled.Example.tsx');
var TextFieldPrefixAndSuffixExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.PrefixAndSuffix.Example.tsx');
var TextFieldStyledExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.Styled.Example.tsx');
var TextFieldMaskedExampleCode = require('!raw-loader!office-ui-fabric-react/src/components/TextField/examples/TextField.Masked.Example.tsx');
export var TextFieldPageProps = {
title: 'TextField',
componentName: 'TextField',
componentUrl: 'https://github.com/OfficeDev/office-ui-fabric-react/tree/master/packages/office-ui-fabric-react/src/components/TextField',
examples: [
{
title: 'Basic TextFields',
code: TextFieldBasicExampleCode,
view: React.createElement(TextFieldBasicExample, null)
},
{
title: 'Controlled TextFields',
code: TextFieldControlledExampleCode,
view: React.createElement(TextFieldControlledExample, null)
},
{
title: 'Multiline TextField',
code: TextFieldMultilineExampleCode,
view: React.createElement(TextFieldMultilineExample, null)
},
{
title: 'Underlined and borderless TextFields',
code: TextFieldBorderlessExampleCode,
view: React.createElement(TextFieldBorderlessExample, null)
},
{
title: 'Customizable Masked TextField',
code: TextFieldMaskedExampleCode,
view: React.createElement(TextFieldMaskedExample, null)
},
{
title: 'TextField with prefix and/or suffix',
code: TextFieldPrefixAndSuffixExampleCode,
view: React.createElement(TextFieldPrefixAndSuffixExample, null)
},
{
title: 'TextFields with custom rendering',
code: TextFieldCustomRenderExampleCode,
view: React.createElement(TextFieldCustomRenderExample, null)
},
{
title: 'TextField error message variations',
code: TextFieldErrorMessageExampleCode,
view: React.createElement(TextFieldErrorMessageExample, null)
},
{
title: 'TextField subcomponent styling',
code: TextFieldStyledExampleCode,
view: React.createElement(TextFieldStyledExample, null)
}
],
overview: require('!raw-loader!office-ui-fabric-react/src/components/TextField/docs/TextFieldOverview.md'),
bestPractices: '',
dos: require('!raw-loader!office-ui-fabric-react/src/components/TextField/docs/TextFieldDos.md'),
donts: require('!raw-loader!office-ui-fabric-react/src/components/TextField/docs/TextFieldDonts.md'),
isHeaderVisible: false,
isFeedbackVisible: true,
allowNativeProps: true,
nativePropsElement: ['input', 'textarea']
};
//# sourceMappingURL=TextField.doc.js.map |
hdm/deepmac-tracker | data/js/48/6d/bb/00/00/00.24.js | <filename>data/js/48/6d/bb/00/00/00.24.js
macDetailCallback("486dbb000000/24",[{"d":"2016-05-21","t":"add","a":"Organize san Manisa Turket TR 45030 ","c":"TR","o":"Vestel Elektronik San ve Tic. A.Ş."}]);
|
HoEmpire/slambook2 | 3rdparty/meshlab-master/src/plugins_unsupported/io_epoch/epoch_io.cpp | <gh_stars>1-10
/****************************************************************************
* MeshLab o o *
* An extendible mesh processor o o *
* _ O _ *
* Copyright(C) 2005, 2006 \/)\/ *
* Visual Computing Lab /\/| *
* ISTI - Italian National Research Council | *
* \ *
* All rights reserved. *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License (http://www.gnu.org/licenses/gpl.txt) *
* for more details. *
* *
****************************************************************************/
#include <Qt>
#include <QtGui>
#include <QtXml/QDomDocument>
#include <QtXml/QDomElement>
#include <QtXml/QDomNode>
#include <QMessageBox>
#include <QFileDialog>
#include "epoch_io.h"
#include "epoch_reconstruction.h"
#include <vcg/complex/append.h>
#include <vcg/math/histogram.h>
#include <vcg/complex/algorithms/create/platonic.h>
#include <vcg/complex/algorithms/clustering.h>
#include <vcg/complex/algorithms/hole.h>
#include <wrap/io_trimesh/export_ply.h>
#include <meshlab/alnParser.h>
FILE *logFP=0;
using namespace std;
using namespace vcg;
void EpochModel::depthFilter(FloatImage &depthImgf, FloatImage &countImgf, float depthJumpThr,
bool dilation, int dilationNumPasses, int dilationWinsize,
bool erosion, int erosionNumPasses, int erosionWinsize)
{
FloatImage depth;
FloatImage depth2;
int w = depthImgf.w;
int h = depthImgf.h;
depth=depthImgf;
if (dilation)
{
for (int k = 0; k < dilationNumPasses; k++)
{
depth.Dilate(depth2, dilationWinsize / 2);
depth=depth2;
}
}
if (erosion)
{
for (int k = 0; k < erosionNumPasses; k++)
{
depth.Erode(depth2, erosionWinsize / 2);
depth=depth2;
}
}
Histogramf HH;
HH.Clear();
HH.SetRange(0,depthImgf.MaxVal()-depthImgf.MinVal(),10000);
for(int i=1; i < static_cast<int>(depthImgf.v.size()); ++i)
HH.Add(fabs(depthImgf.v[i]-depth.v[i-1]));
if(logFP) fprintf(logFP,"**** Depth histogram 2 Min %f Max %f Avg %f Percentiles ((10)%f (25)%f (50)%f (75)%f (90)%f)\n",HH.MinV(),HH.MaxV(),HH.Avg(),
HH.Percentile(.1),HH.Percentile(.25),HH.Percentile(.5),HH.Percentile(.75),HH.Percentile(.9));
int deletedCnt=0;
depthJumpThr = static_cast<float>(HH.Percentile(0.8));
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
{
if ((depthImgf.Val(x, y) - depth.Val(x, y)) / depthImgf.Val(x, y) > 0.6)
{
countImgf.Val(x, y) = 0.0f;
++deletedCnt;
}
}
countImgf.convertToQImage().save("tmp_filteredcount.jpg","jpg");
if(logFP) fprintf(logFP,"**** depthFilter: deleted %i on %i\n",deletedCnt,w*h);
}
float EpochModel::ComputeDepthJumpThr(FloatImage &depthImgf, float percentile)
{
Histogramf HH;
HH.Clear();
HH.SetRange(0,depthImgf.MaxVal()-depthImgf.MinVal(),10000);
for(unsigned int i=1; i < static_cast<unsigned int>(depthImgf.v.size()); ++i)
HH.Add(fabs(depthImgf.v[i]-depthImgf.v[i-1]));
if(logFP) fprintf(logFP,"**** Depth histogram Min %f Max %f Avg %f Percentiles ((10)%f (25)%f (50)%f (75)%f (90)%f)\n",HH.MinV(),HH.MaxV(),HH.Avg(),
HH.Percentile(.1),HH.Percentile(.25),HH.Percentile(.5),HH.Percentile(.75),HH.Percentile(.9));
return HH.Percentile(percentile);
}
/// Apply the hand drawn mask image
bool EpochModel::CombineHandMadeMaskAndCount(CharImage &CountImg, QString maskName )
{
QImage maskImg(maskName);
qDebug("Trying to read maskname %s",qPrintable(maskName));
if(maskImg.isNull())
return false;
if( (maskImg.width()!= CountImg.w) || (maskImg.height()!= CountImg.h) )
{
qDebug("Warning mask and images does not match! %i %i vs %i %i",maskImg.width(),CountImg.w,maskImg.height(),CountImg.h);
return false;
}
for(int j=0;j<maskImg.height();++j)
for(int i=0;i<maskImg.width();++i)
if(qRed(maskImg.pixel(i,j))>128)
CountImg.Val(i,j)=0;
return true;
}
void EpochModel::SmartSubSample(int factor, FloatImage &fli, CharImage &chi, FloatImage &subD, FloatImage &subQ, int minCount)
{
assert(fli.w==chi.w && fli.h==chi.h);
int w=fli.w/factor;
int h=fli.h/factor;
subQ.resize(w,h);
subD.resize(w,h);
for(int i=0;i<w;++i)
for(int j=0;j<h;++j)
{
float maxcount=0;
int cnt=0;
float bestVal=0;
for(int ki=0;ki<factor;++ki)
for(int kj=0;kj<factor;++kj)
{
float q= chi.Val(i*factor+ki,j*factor+kj) - minCount+1 ;
if(q>0)
{
maxcount+= q;
bestVal +=q*fli.Val(i*factor+ki,j*factor+kj);
cnt++;
}
}
if(cnt>0)
{
subD.Val(i,j)=float(bestVal)/maxcount;
subQ.Val(i,j)=minCount-1 + float(maxcount)/cnt ;
}
else
{
subD.Val(i,j)=0;
subQ.Val(i,j)=0;
}
}
}
/*
This filter average apply a laplacian smoothing over a depth map averaging the samples with a weighting scheme that follows the Counting masks.
The result of the laplacian is applied only on sample with low quality.
*/
void EpochModel::Laplacian2(FloatImage &depthImg, FloatImage &countImg, int minCount, CharImage &featureMask, float depthThr)
{
FloatImage Sum;
int w=depthImg.w,h=depthImg.h;
Sum.resize(w,h);
for(int y=1;y<h-1;++y)
for(int x=1;x<w-1;++x)
{
float curDepth=depthImg.Val(x,y);
int cnt=0;
for(int j=-1;j<=1;++j)
for(int i=-1;i<=1;++i)
{
int q=countImg.Val(x+i,y+j)-minCount+1;
if(q>0 && fabs(depthImg.Val(x+i,y+j)-curDepth) < depthThr) {
Sum.Val(x,y)+=q*depthImg.Val(x+i,y+j);
cnt+=q;
}
}
if(cnt>0) {
Sum.Val(x,y)/=cnt;
}
else Sum.Val(x,y)=depthImg.Val(x,y);
}
for(int y=1;y<h-1;++y)
for(int x=1;x<w-1;++x)
{
float q=(featureMask.Val(x,y)/255.0);
depthImg.Val(x,y) = depthImg.Val(x,y)*q + Sum.Val(x,y)*(1-q);
}
}
// It generate a feature mask that mark the featureless area of the original photo.
// Featureless areas are usually affected by noise and have to be smoothed more
void EpochModel::GenerateGradientSmoothingMask(int subsampleFactor, QImage &OriginalTexture, CharImage &mask)
{
CharImage gray(OriginalTexture);
CharImage grad;
grad.resize(gray.w,gray.h);
int w=gray.w,h=gray.h;
for(int x=1;x<w-1;++x)
for(int y=1;y<h-1;++y)
{
int dx=abs(int(gray.Val(x,y))-int(gray.Val(x-1,y))) + abs(int(gray.Val(x,y))-int(gray.Val(x+1,y)));
int dy=abs(int(gray.Val(x,y))-int(gray.Val(x,y-1))) + abs(int(gray.Val(x,y))-int(gray.Val(x,y+1)));
grad.Val(x,y)=min(255,16*dx+dy);
}
// create subsampled mask
int ws=gray.w/subsampleFactor, hs=gray.h/subsampleFactor;
mask.resize(ws,hs);
for(int x=0;x<ws;++x)
for(int y=0;y<hs;++y)
{
unsigned char maxGrad=0;
for(int si=0;si<subsampleFactor;++si)
for(int sj=0;sj<subsampleFactor;++sj)
maxGrad = max(maxGrad, grad.Val(x*subsampleFactor+sj,y*subsampleFactor+si));
mask.Val(x,y) = maxGrad;
}
CharImage mask2;
mask2.resize(ws, hs);
// average filter (11 x 11)
int avg;
int wsize = 5;
for (int y = wsize; y < hs-wsize; y++)
for (int x = wsize; x < ws-wsize; x++)
{
avg = 0;
for (int yy = y - wsize; yy <= y + wsize; yy++)
for (int xx = x - wsize; xx <= x + wsize; xx++)
avg += mask.Val(xx, yy);
mask2.Val(x, y) = min(255, avg / ((2 * wsize + 1)* (2 * wsize +1)));
}
mask.convertToQImage().save("tmp_testmask.jpg","jpg");
mask2.convertToQImage().save("tmp_testmaskSmooth.jpg","jpg");
// erosion filter (7 x 7)
int minimum;
wsize = 3;
for (int y = wsize; y < hs-wsize; y++)
for (int x = wsize; x < ws-wsize; x++)
{
minimum = mask2.Val(x, y);
for (int yy = y - wsize; yy <= y + wsize; yy++)
for (int xx = x - wsize; xx <= x + wsize; xx++)
if (mask2.Val(xx, yy) < minimum)
minimum = mask2.Val(xx, yy);
mask.Val(x, y) = minimum;
}
grad.convertToQImage().save("tmp_test.jpg","jpg");
mask.convertToQImage().save("tmp_testmaskeroded.jpg","jpg");
}
/*
Main processing function;
it takes a depth map, a count map,
- resample them to a (width/subsample,height/subsample) image
- leave only the faces that are within a given orientation range
- that have a count greater than minCount.
- and smooth them with a count/quality aware laplacian filter
*/
bool EpochModel::BuildMesh(CMeshO &m, int subsampleFactor, int minCount, float minAngleCos, int smoothSteps,
bool dilation, int dilationPasses, int dilationSize,
bool erosion, int erosionPasses, int erosionSize,float scalingFactor)
{
FloatImage depthImgf;
CharImage countImgc;
int ttt0=clock();
depthImgf.Open(depthName.toAscii());
countImgc.Open(countName.toAscii());
QImage TextureImg;
TextureImg.load(textureName);
int ttt1=clock();
if(logFP) fprintf(logFP,"**** Buildmesh: Opening files %i\n",ttt1-ttt0);
CombineHandMadeMaskAndCount(countImgc,maskName); // set count to zero for all masked points
FloatImage depthSubf; // the subsampled depth image
FloatImage countSubf; // the subsampled quality image (quality == count)
SmartSubSample(subsampleFactor,depthImgf,countImgc,depthSubf,countSubf,minCount);
CharImage FeatureMask; // the subsampled image with (quality == features)
GenerateGradientSmoothingMask(subsampleFactor, TextureImg, FeatureMask);
depthSubf.convertToQImage().save("tmp_depth.jpg", "jpg");
int ttt2=clock();
if(logFP) fprintf(logFP,"**** Buildmesh: SubSample and Gradient %i\n",ttt2-ttt1);
float depthThr = ComputeDepthJumpThr(depthSubf,0.8f);
for(int ii=0;ii<smoothSteps;++ii)
Laplacian2(depthSubf,countSubf,minCount,FeatureMask,depthThr);
int ttt3=clock();
if(logFP) fprintf(logFP,"**** Buildmesh: Smoothing %i\n",ttt3-ttt2);
vcg::tri::Grid<CMeshO>(m,depthSubf.w,depthSubf.h,depthImgf.w,depthImgf.h,&*depthSubf.v.begin());
int ttt4=clock();
if(logFP) fprintf(logFP,"**** Buildmesh: trimesh building %i\n",ttt4-ttt3);
// The depth is filtered and the minimum count mask is update accordingly.
// To be more specific the border of the depth map are identified by erosion
// and the relative vertex removed (by setting mincount equal to 0).
float depthThr2 = ComputeDepthJumpThr(depthSubf,0.95f);
depthFilter(depthSubf, countSubf, depthThr2,
dilation, dilationPasses, dilationSize,
erosion, erosionPasses, erosionSize);
int vn = m.vn;
for(int i=0;i<vn;++i)
if(countSubf.v[i]<minCount)
{
m.vert[i].SetD();
m.vn--;
}
cam.Open(cameraName.toAscii());
CMeshO::VertexIterator vi;
Matrix33d Rinv= Inverse(cam.R);
for(vi=m.vert.begin();vi!=m.vert.end();++vi)if(!(*vi).IsD())
{
Point3f in=(*vi).P();
Point3d out;
cam.DepthTo3DPoint(in[0], in[1], in[2], out);
(*vi).P().Import(out);
QRgb c = TextureImg.pixel(int(in[0]), int(in[1]));
(*vi).C().SetRGB(qRed(c),qGreen(c),qBlue(c));
//(*vi).Q()=chi.Val(in[0], in[1]);
//(*vi).Q()=flisubQ.Val(in[0]/subsample, in[1]/subsample);
if(FeatureMask.Val(int(in[0]/subsampleFactor), int(in[1]/subsampleFactor))<200) (*vi).Q()=0;
else (*vi).Q()=1;
(*vi).Q()=float(FeatureMask.Val(in[0]/subsampleFactor, in[1]/subsampleFactor))/255.0;
}
int ttt5=clock();
if(logFP) fprintf(logFP,"**** Buildmesh: Projecting and Coloring %i\n",ttt5-ttt4);
CMeshO::FaceIterator fi;
Point3f CameraPos=Point3f::Construct(cam.t);
for(fi=m.face.begin();fi!=m.face.end();++fi)
{
if((*fi).V(0)->IsD() ||(*fi).V(1)->IsD() ||(*fi).V(2)->IsD() )
{
(*fi).SetD();
--m.fn;
}
else
{
Point3f n=vcg::Normal(*fi);
n.Normalize();
Point3f dir=CameraPos-vcg::Barycenter(*fi);
dir.Normalize();
if(dir.dot(n) < minAngleCos)
{
(*fi).SetD();
--m.fn;
}
}
}
tri::Clean<CMeshO>::RemoveUnreferencedVertex(m);
int ttt6=clock();
if(logFP) fprintf(logFP,"**** Buildmesh: Deleting skewed %i\n",ttt6-ttt5);
// Matrix44d Rot;
// Rot.SetRotate(M_PI,Point3d(1,0,0));
// vcg::tri::UpdatePosition<CMeshO>::Matrix(m, Rot);
Matrix44f scaleMat;
scaleMat.SetScale(scalingFactor,scalingFactor,scalingFactor);
vcg::tri::UpdatePosition<CMeshO>::Matrix(m, scaleMat);
return true;
}
void EpochModel::AddCameraIcon(CMeshO &m)
{
tri::Allocator<CMeshO>::AddVertices(m,3);
m.vert[m.vert.size()-3].P()=Point3f::Construct(cam.t+Point3d(0,0,0));
m.vert[m.vert.size()-3].C()=Color4b::Green;
m.vert[m.vert.size()-2].P()=Point3f::Construct(cam.t+Point3d(0,1,0));
m.vert[m.vert.size()-2].C()=Color4b::Green;
m.vert[m.vert.size()-1].P()=Point3f::Construct(cam.t+Point3d(1,0,0));
m.vert[m.vert.size()-1].C()=Color4b::Green;
tri::Allocator<CMeshO>::AddFaces(m,1);
m.face[m.face.size()-1].V(0)= &m.vert[m.vert.size()-3];
m.face[m.face.size()-1].V(1)= &m.vert[m.vert.size()-2];
m.face[m.face.size()-1].V(2)= &m.vert[m.vert.size()-1];
}
bool EpochModel::Init(QDomNode &node)
{
if(!node.hasAttributes()) return false;
QDomNamedNodeMap attr= node.attributes();
QString indexString = (attr.namedItem("index")).nodeValue() ;
qDebug("reading Model with index %i ",indexString.toInt());
for(QDomNode n = node.firstChild(); !n.isNull(); n = n.nextSibling())
{
if(n.nodeName() == QString("camera")) cameraName = n.attributes().namedItem("filename").nodeValue();
if(n.nodeName() == QString("texture")) textureName= n.attributes().namedItem("filename").nodeValue();
if(n.nodeName() == QString("depth")) depthName = n.attributes().namedItem("filename").nodeValue();
if(n.nodeName() == QString("count")) countName = n.attributes().namedItem("filename").nodeValue();
}
QString tmpName=textureName.left(textureName.length()-4);
maskName = tmpName.append(".mask.png");
return true;
}
QString EpochModel::ThumbName(QString &_imageName)
{
QString tmpName=_imageName.left(_imageName.length()-4);
return tmpName.append(".thumb.jpg");
}
EpochIO::EpochIO()
{
epochDialog = new v3dImportDialog();
epochDialog->hide();
}
EpochIO::~EpochIO()
{
delete epochDialog;
}
bool EpochIO::open(const QString &formatName, const QString &fileName, MeshModel &m, int& mask,const RichParameterSet & /*par*/, CallBackPos *cb, QWidget *parent)
{
EpochReconstruction er;
mask = vcg::tri::io::Mask::IOM_VERTCOLOR | vcg::tri::io::Mask::IOM_VERTQUALITY;
// just to be sure...
if (fileName.isEmpty()) return false;
// initializing progress bar status
if (cb != NULL) (*cb)(0, "Loading...");
// this change of dir is needed for subsequent texture/material loading
QString FileNameDir = fileName.left(fileName.lastIndexOf("/"));
QDir::setCurrent(FileNameDir);
QString errorMsgFormat = "Error encountered while loading file %1:\n%2";
string stdfilename = QFile::encodeName(fileName).constData ();
//string filename = fileName.toUtf8().data();
QDomDocument doc;
if(formatName.toUpper() == tr("V3D") && fileName.endsWith(".v3d"))
{
QFile file(fileName);
if (file.open(QIODevice::ReadOnly) && doc.setContent(&file))
{
file.close();
QDomElement root = doc.documentElement();
if (root.nodeName() == tr("reconstruction"))
{
QDomNode nhead = root.firstChildElement("head");
for(QDomNode n = nhead.firstChildElement("meta"); !n.isNull(); n = n.nextSiblingElement("meta"))
{
if(!n.hasAttributes()) return false;
QDomNamedNodeMap attr= n.attributes();
if(attr.contains("name")) er.name = (attr.namedItem("name")).nodeValue() ;
if(attr.contains("author")) er.author = (attr.namedItem("author")).nodeValue() ;
if(attr.contains("created")) er.created = (attr.namedItem("created")).nodeValue() ;
}
for(QDomNode n = root.firstChildElement("model"); !n.isNull(); n = n.nextSiblingElement("model"))
{
EpochModel em;
em.Init(n);
er.modelList.push_back(em);
}
}
}
}
epochDialog->setEpochReconstruction( &er, cb);
do
{
epochDialog->exportToPLY=false;
//Here we invoke the modal dialog and wait for its termination
int continueValue = epochDialog->exec();
// The user has pressed the ok button: now start the real processing:
if(epochDialog->exportToPLY == true) qDebug("Starting the ply exporting process");
int t0=clock();
logFP=fopen("epoch.log","w");
int subSampleVal = epochDialog->subsampleSpinBox->value();
int minCountVal= epochDialog->minCountSpinBox->value();
float maxCCDiagVal= epochDialog->maxCCDiagSpinBox->value();
int mergeResolution=epochDialog->mergeResolutionSpinBox->value();
int smoothSteps=epochDialog->smoothSpinBox->value();
bool closeHole = epochDialog->holeCheckBox->isChecked();
int maxHoleSize = epochDialog->holeSpinBox->value();
if (continueValue == QDialog::Rejected)
{
QMessageBox::warning(parent, "Open V3d format","Aborted");
return false;
}
CMeshO mm;
QTableWidget *qtw=epochDialog->imageTableWidget;
float MinAngleCos=cos(vcg::math::ToRad(epochDialog->qualitySpinBox->value()));
bool clustering=epochDialog->fastMergeCheckBox->isChecked();
bool removeSmallCC=epochDialog->removeSmallCCCheckBox->isChecked();
vcg::tri::Clustering<CMeshO, vcg::tri::AverageColorCell<CMeshO> > Grid;
int selectedNum=0,selectedCount=0;
int i;
for(i=0;i<qtw->rowCount();++i) if(qtw->isItemSelected(qtw->item(i,0))) ++selectedNum;
if(selectedNum==0)
{
QMessageBox::warning(parent, "Open V3d format","No range map selected. Nothing loaded");
return false;
}
bool dilationFlag = epochDialog->dilationCheckBox->isChecked();
int dilationN = epochDialog->dilationNumPassSpinBox->value();
int dilationSz = epochDialog->dilationSizeSlider->value() * 2 + 1;
bool erosionFlag = epochDialog->erosionCheckBox->isChecked();
int erosionN = epochDialog->erosionNumPassSpinBox->value();
int erosionSz = epochDialog->erosionSizeSlider->value() * 2 + 1;
float scalingFactor = epochDialog->scaleLineEdit->text().toFloat();
std::vector<string> savedMeshVector;
bool firstTime=true;
QList<EpochModel>::iterator li;
for(li=er.modelList.begin(), i=0;li!=er.modelList.end();++li,++i)
{
if(qtw->isItemSelected(qtw->item(i,0)))
{
++selectedCount;
mm.Clear();
int tt0=clock();
(*li).BuildMesh(mm,subSampleVal,minCountVal,MinAngleCos,smoothSteps,
dilationFlag, dilationN, dilationSz, erosionFlag, erosionN, erosionSz,scalingFactor);
int tt1=clock();
if(logFP) fprintf(logFP,"** Mesh %i : Build in %i\n",selectedCount,tt1-tt0);
if(epochDialog->exportToPLY)
{
QString plyFilename =(*li).textureName.left((*li).textureName.length()-4);
plyFilename.append(".x.ply");
savedMeshVector.push_back(qPrintable(plyFilename));
int mask= tri::io::Mask::IOM_VERTCOORD + tri::io::Mask::IOM_VERTCOLOR + tri::io::Mask::IOM_VERTQUALITY;
tri::io::ExporterPLY<CMeshO>::Save(mm,qPrintable(plyFilename),mask);
}
else
{
if(clustering)
{
if (firstTime)
{
//Grid.Init(mm.bbox,100000);
vcg::tri::UpdateBounding<CMeshO>::Box(mm);
//Grid.Init(mm.bbox,1000.0*pow(10.0,mergeResolution),mm.bbox.Diag()/1000.0f);
Grid.Init(mm.bbox,100000.0*pow(10.0,mergeResolution));
firstTime=false;
}
Grid.AddMesh(mm);
}
else
tri::Append<CMeshO,CMeshO>::Mesh(m.cm,mm); // append mesh mr to ml
}
int tt2=clock();
if(logFP) fprintf(logFP,"** Mesh %i : Append in %i\n",selectedCount,tt2-tt1);
}
if (cb)(*cb)(selectedCount*90/selectedNum, "Building meshes");
}
if (cb != NULL) (*cb)(90, "Final Processing: clustering");
if(clustering)
{
Grid.ExtractPointSet(m.cm);
}
if(epochDialog->exportToPLY)
{
QString ALNfilename = fileName.left(fileName.length()-4).append(".aln");
ALNParser::SaveALN(qPrintable(ALNfilename), savedMeshVector);
}
int t1=clock();
if(logFP) fprintf(logFP,"Extracted %i meshes in %i\n",selectedCount,t1-t0);
if (cb != NULL) (*cb)(95, "Final Processing: Removing Small Connected Components");
if(removeSmallCC)
{
vcg::tri::UpdateBounding<CMeshO>::Box(m.cm); // updates bounding box
m.updateDataMask(MeshModel::MM_FACEFACETOPO | MeshModel::MM_FACEFLAGBORDER | MeshModel::MM_FACEMARK);
tri::Clean<CMeshO>::RemoveSmallConnectedComponentsDiameter(m.cm,m.cm.bbox.Diag()*maxCCDiagVal/100.0);
}
int t2=clock();
if(logFP) fprintf(logFP,"Topology and removed CC in %i\n",t2-t1);
vcg::tri::UpdateBounding<CMeshO>::Box(m.cm); // updates bounding box
if (cb != NULL) (*cb)(97, "Final Processing: Closing Holes");
if(closeHole)
{
m.updateDataMask(MeshModel::MM_FACEFACETOPO | MeshModel::MM_FACEFLAGBORDER | MeshModel::MM_FACEMARK);
tri::UpdateNormals<CMeshO>::PerVertexNormalizedPerFace(m.cm);
vcg::tri::Hole<CMeshO>::EarCuttingFill<vcg::tri::MinimumWeightEar< CMeshO> >(m.cm,maxHoleSize,false);
}
if (cb != NULL) (*cb)(100, "Done");
// vcg::tri::UpdateNormals<CMeshO>::PerVertex(m.cm); // updates normals
m.updateDataMask(MeshModel::MM_VERTCOLOR);
int t3=clock();
if(logFP) fprintf(logFP,"---------- Total Processing Time%i\n\n\n",t3-t0);
if(logFP) fclose(logFP);
logFP=0;
} while(epochDialog->exportToPLY);
return true;
}
bool EpochIO::save(const QString &/*formatName*/,const QString &/*fileName*/, MeshModel &/*m*/, const int /*mask*/, const RichParameterSet &, vcg::CallBackPos * /*cb*/, QWidget *parent)
{
QMessageBox::warning(parent, "Unknown type", "file's extension not supported!!!");
return false;
}
QList<MeshIOInterface::Format> EpochIO::importFormats() const
{
QList<Format> formatList;
formatList << Format("Epoch Reconstructed mesh","V3D");
return formatList;
};
QIcon *EpochModel::getIcon()
{
QString iconName(textureName);
iconName+=QString(".xbm");
QIcon *ico=new QIcon();
return ico;
}
Q_EXPORT_PLUGIN(EpochIO)
|
Trinsdar/StevesCarts | src/main/java/vswe/stevescarts/containers/ContainerManager.java | <reponame>Trinsdar/StevesCarts
package vswe.stevescarts.containers;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import vswe.stevescarts.blocks.tileentities.TileEntityBase;
import vswe.stevescarts.blocks.tileentities.TileEntityManager;
public abstract class ContainerManager extends ContainerBase {
private TileEntityManager manager;
public short lastHeader;
public short lastColor;
public short lastAmount;
@Override
public IInventory getMyInventory() {
return manager;
}
@Override
public TileEntityBase getTileEntity() {
return manager;
}
public ContainerManager(final TileEntityManager manager) {
this.manager = manager;
}
protected void addPlayer(final IInventory invPlayer) {
for (int k = 0; k < 3; ++k) {
for (int j1 = 0; j1 < 9; ++j1) {
addSlotToContainer(new Slot(invPlayer, j1 + k * 9 + 9, j1 * 18 + offsetX(), 104 + k * 18 + 36));
}
}
for (int l = 0; l < 9; ++l) {
addSlotToContainer(new Slot(invPlayer, l, l * 18 + offsetX(), 198));
}
}
protected abstract int offsetX();
}
|
fredblain/docQE-corp | systems/2015/english-russian/newstest2015.online-A.0.en-ru/289-argumenti.ru.newstest2015.online-A.0.en-ru.ru | <reponame>fredblain/docQE-corp<gh_stars>0
<NAME> попал в аварию, пытаясь спрятаться от папарацци
<NAME> попал в аварию на мотоцикле, пытаясь спасти себя от папарацци.
Это произошло в Лос-Анджелесе на углу бульвара Сансет.
Британский футболист получил от легко с просто сотрясение мозга и растяжение правой рукой.
Бекхэм покинул тату салон, где папарацци были лагерь ждет его.
При попытке к бегству их погоня на мотоцикле, он потерял управление на углу и упал, пишет TMZ, ссылаясь на источники, близкие к футболист.
В общем Бекхэм получил сотрясение мозга, подвернул правую руку и царапин.
|
fabricio7p/Ikebana-App-Frontend | src/components/pages/Account/components/Password/Password.js | <reponame>fabricio7p/Ikebana-App-Frontend
import React, { useState, useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useHistory } from 'react-router-dom';
import api from 'services/api'
import { useForm } from 'react-hook-form';
import Modal from '../../../../aux/Modal';
import ErrorDialog from '../../../../aux/ErrorDialog/ErrorDialog';
import InfoDialog from '../../../../aux/InfoDialog/InfoDialog';
import './styles.scss';
export default function Password() {
const { register, handleSubmit, errors, watch } = useForm();
const authKey = useSelector(state => state.auth.keys)
const userData = useSelector(state => state.account.userData[0])
const [loading, setLoading] = useState(false);
const [error, setError] = useState(false);
const [info, setInfo] = useState(false);
const [data, setData] = useState({})
const [member, setMember] = useState(false);
const history = useHistory();
const onSubmit = async ({ old_pass, new_pass, confirm_pass }) => {
setLoading(true)
try {
const response = await api({
method: 'post',
url: '/user',
headers: {
'Authorization': `Bearer ${authKey[0].key}`,
},
data: {
old_pass,
new_pass,
confirm_pass
}
})
response.data.password && setInfo('Senha alterada com sucesso.')
} catch(e) {
e.message.includes('401') && setError("Senha atual incorreta")
} finally {
setLoading(false)
}
}
useEffect(() => {
setData(userData)
data.isPartner && setMember(true)
}, [])
return(
<>
<div className="password">
<div className="password__header">
<p className="password__header--title">Editar informações pessoais.</p>
</div>
<form onSubmit={handleSubmit(onSubmit)}>
<div className="password__inputs">
<p className="password__inputs--desc">Editar senha</p>
<div className="password__inputs--container acc-field">
<label htmlFor="old_pass"
className="input-label acc-field__title">
Senha Antiga</label>
<input ref={register({required: true, minLength: 6})} name="old_pass"
type="password" className="account-input" id="old_pass"/>
</div>
<div className="password__inputs--container acc-field">
<label htmlFor="new_pass"
className="password-label acc-field__title">
Senha Nova
{errors.new_pass && <div title="Mínimo de 6 caracteres"
className="validation-overlay-acc">
<i className="material-icons">error_outline</i></div> }
</label>
<input type="password" ref={register({required: true, minLength: 6})}
name="new_pass" className="account-input" id="new_pass"/>
</div>
<div className="password__inputs--container acc-field">
<label htmlFor="confirm_pass"
className="input-label acc-field__title">
Confirmar Senha
{errors.confirm_pass && <div title="As senhas não coincidem"
className="validation-overlay-acc">
<i className="material-icons">error_outline</i></div> }
</label>
<input ref={register({required: true, minLength: 6,
validate: value => value === watch('new_pass') }) }
name="confirm_pass" type="password" className="account-input"
id="confirm_pass"/>
</div>
<button type="submit" className="app-btn btn-filled">
{
loading === true ? <div className="sizer">
<div class="lds-ellipsis"><div></div>
<div></div><div></div><div></div></div>
</div>
:
<p>{ member === true ? 'Atualizar' : 'Concluir' }</p>
}
</button>
</div>
</form>
</div>
{
error &&
<Modal>
<ErrorDialog value={error} handler={() =>{
setError(false)
} }/>
</Modal>
}
{
info &&
<Modal>
<InfoDialog value={info} handler={() => {
setInfo(false)
window.location.pathname = '/home'
}}/>
</Modal>
}
</>
);
}
|
Nickinthebox/polyfill-library | polyfills/localStorage/tests.js | <gh_stars>0
/* eslint-env mocha, browser */
/* global proclaim */
beforeEach(function () {
localStorage.clear();
localStorage.setItem('hi', 'there');
});
describe('getItem', function () {
it('should get an item', function () {
proclaim.equal(localStorage.getItem('hi'), 'there');
});
});
describe('setItem', function () {
it('should set an item', function () {
localStorage.setItem('x', 'y');
proclaim.equal(localStorage.getItem('x'), 'y');
});
it('should set an item with special characters', function () {
var key = 'x [{]}\\|;:\'",<.>/?!@#$%^&*()-_=+\n\t'
localStorage.setItem(key, 'y');
proclaim.equal(localStorage.getItem(key), 'y');
});
it('should set an item with an empty key', function () {
localStorage.setItem('', 'y');
proclaim.equal(localStorage.getItem(''), 'y');
});
});
describe('removeItem', function () {
it('should remove an item', function () {
localStorage.removeItem('hi');
proclaim.equal(localStorage.getItem('hi'), null);
});
});
describe('key', function () {
it('should get a key', function () {
proclaim.equal(localStorage.key(0), 'hi');
});
});
describe('clear', function () {
it('should clear all items', function () {
localStorage.clear();
proclaim.equal(localStorage.getItem('hi'), null);
});
});
describe('length', function () {
it('should get number of items', function () {
proclaim.equal(localStorage.length, 1);
});
});
|
cz-it/magnode | src/net/net.h | <reponame>cz-it/magnode<filename>src/net/net.h<gh_stars>1-10
/**
* Author :cz <EMAIL>
* Licence MIT
*/
#ifndef MAGNODE_NET_MN_NET_H_
#define MAGNODE_NET_MN_NET_H_
#include "magnode.h"
#include "os.h"
#if defined MN_APPLE || defined MN_ANDROID
#include <sys/select.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <netdb.h>
#include <unistd.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#include <sys/socket.h>
#endif
#if defined MN_WIN
#include <winsock2.h>
#include <WS2tcpip.h>
#include <windows.h>
# if defined(_MSC_VER)
# include <BaseTsd.h>
typedef SSIZE_T ssize_t;
//typedef SIZE_T size_t;
# endif
#endif
#include <stdint.h>
#include <stddef.h>
#ifdef __cplusplus
extern "C" {
#endif
#define MAX_HOST_LEN 256
#define MN_HAUSNUMERO 1000
#ifndef MN__ETIMEOUT
#define MN__ETIMEOUT -(MN_HAUSNUMERO + 1)
#endif
#ifndef MN__EPROTO
#define MN__EPROTO -(MN_HAUSNUMERO + 2)
#endif
#ifndef MN__ESEND
#define MN__ESEND -(MN_HAUSNUMERO + 3)
#endif
#ifndef MN__ERECVFROM
#define MN__ERECVFROM -(MN_HAUSNUMERO + 4)
#endif
#ifndef MN__ENULLARG
#define MN__ENULLARG -(MN_HAUSNUMERO + 5)
#endif
#ifndef MN__EURL
#define MN__EURL -(MN_HAUSNUMERO + 6)
#endif
#ifndef MN__EPOLL
#define MN__EPOLL -(MN_HAUSNUMERO + 7)
#endif
#ifndef MN__ENULL
#define MN__ENULL -(MN_HAUSNUMERO + 8)
#endif
#ifndef MN__ECONN
#define MN__ECONN -(MN_HAUSNUMERO + 9)
#endif
#ifndef MN__EARG
#define MN__EARG -(MN_HAUSNUMERO + 10)
#endif
#ifndef MN__ESENDTO
#define MN__ESENDTO -(MN_HAUSNUMERO + 11)
#endif
#ifndef MN__ERECV
#define MN__ERECV -(MN_HAUSNUMERO + 12)
#endif
#ifndef MN__EHOST
#define MN__EHOST -(MN_HAUSNUMERO + 13)
#endif
#ifndef MN__ECLOSED
#define MN__ECLOSED -(MN_HAUSNUMERO + 14)
#endif
enum net_proto
{
NET_TCP,
NET_UDP,
NET_UNKNOWN,
};
enum net_bufsize
{
NET_RECV_BUF_SIZE = 1*1024*1024,
NET_SEND_BUF_SIZE = 1*1024*1024,
};
struct mn_socket
{
int sfd;
enum net_proto proto;
struct sockaddr dest_addr;
socklen_t addrlen;
};
struct mn_sockaddr
{
enum net_proto proto;
char host[MAX_HOST_LEN];
uint16_t port;
};
int mn_net_close(struct mn_socket *sfd);
int mn_net_connect(struct mn_socket *sfd, const char *url, uint64_t timeout);
int mn_net_send(struct mn_socket *sfd,const void *buf,size_t *len,uint64_t timeout);
int mn_net_recv(struct mn_socket *sfd,void *buf,size_t *len,uint64_t timeout);
#ifdef __cplusplus
}
#endif
#endif /* defined(MAGNODE_NET_MN_NET_H_) */
|
heftyy/play-framework-oauth | modules/oauth-ws/app/oauth/webservice/scopes/ScopesLoader.java | <filename>modules/oauth-ws/app/oauth/webservice/scopes/ScopesLoader.java
package oauth.webservice.scopes;
public interface ScopesLoader {
/**
* Loads the settings file with scopes and url patterns. Checks the project
* default directory.
*
* @param fileName String: Name of the file with extension (example:
* settings.xml, scopes.json).
*/
void load(String fileName);
}
|
muhammadmuhlas/g3nd | gui_label.go | <reponame>muhammadmuhlas/g3nd<gh_stars>0
package main
import (
"github.com/g3n/engine/graphic"
"github.com/g3n/engine/gui"
"github.com/g3n/engine/math32"
"strings"
)
func init() {
TestMap["gui.label"] = &GuiLabel{}
}
type GuiLabel struct{}
func (t *GuiLabel) Initialize(ctx *Context) {
axis := graphic.NewAxisHelper(1)
ctx.Scene.Add(axis)
l1 := gui.NewLabel("label1")
l1.SetPosition(10, 10)
ctx.Gui.Add(l1)
l2 := gui.NewLabel("label2")
l2.SetPosition(60, 10)
l2.SetBorders(1, 1, 1, 1)
l2.SetBordersColor(&math32.Black)
l2.SetPaddings(2, 2, 2, 2)
ctx.Gui.Add(l2)
l3 := gui.NewLabel("label3")
l3.SetPosition(120, 10)
l3.SetBgColor(&math32.Green)
l3.SetBorders(1, 1, 1, 1)
l3.SetPaddings(4, 6, 4, 6)
ctx.Gui.Add(l3)
l4 := gui.NewLabel("label4")
l4.SetPosition(200, 10)
l4.SetBgColor(&math32.Blue)
l4.SetColor(&math32.White)
l4.SetBorders(1, 1, 1, 1)
l4.SetPaddings(4, 20, 4, 20)
l4.SetFontSize(20)
ctx.Gui.Add(l4)
l5 := gui.NewLabel("label5")
l5.SetPosition(320, 10)
l5.SetFontSize(28)
l5.SetColor(&math32.Red)
l5.SetBorders(1, 1, 1, 1)
l5.SetBordersColor(&math32.White)
l5.SetPaddings(4, 20, 4, 20)
l5.SetSize(100, 100)
ctx.Gui.Add(l5)
l6 := gui.NewLabel("label6")
l6.SetPosition(450, 10)
l6.SetColor(&math32.Red)
l6.SetBorders(1, 1, 1, 1)
l6.SetBordersColor(&math32.White)
l6.SetPaddings(4, 20, 4, 20)
l6.SetSize(100, 100)
l6.SetFontSize(28)
ctx.Gui.Add(l6)
lines := []string{
"We are merely picking up pebbles on the beach",
"while the great ocean of truth",
"lays completely undiscovered before us.",
}
l7 := gui.NewLabel(strings.Join(lines, "\n"))
l7.SetPosition(10, 120)
l7.SetBordersColor(&math32.Red)
l7.SetBgColor(&math32.Green)
l7.SetColor(&math32.Blue)
l7.SetBorders(10, 4, 10, 4)
l7.SetPaddings(4, 20, 4, 20)
l7.SetFontSize(22)
ctx.Gui.Add(l7)
}
func (t *GuiLabel) Render(ctx *Context) {
}
|
fourierjoe/ql-wt | micro-services/workflow/service-surety-workflow-apilist/src/main/java/yhao/micro/service/workflow/apilist/enums/flow/ItemExceptResult.java | <filename>micro-services/workflow/service-surety-workflow-apilist/src/main/java/yhao/micro/service/workflow/apilist/enums/flow/ItemExceptResult.java
package yhao.micro.service.workflow.apilist.enums.flow;
/**
* @Description:
* @Created by ql on 2019/1/17 11:00
* @Version: v1.0
*/
public enum ItemExceptResult {
R001,
R002,
R003,
R004,
R005,
R006,
R007,
;
}
|
cragkhit/elasticsearch | references/bcb_chosen_clones/selected#2156248#11#58.java | <filename>references/bcb_chosen_clones/selected#2156248#11#58.java<gh_stars>10-100
public double[] fit(double[] x, double[] y, double[] sigmaX, double[] sigmaY, int order) {
final double parameters[] = new double[2 * order + 2];
final int numOfPoints = x.length;
final int nk = parameters.length / 2;
double[][] alpha = new double[nk][nk];
double[] beta = new double[nk];
double term = 0;
double product1 = 1.0f, product2 = 1.0f;
for (int k = 0; k < nk; k++) {
for (int j = k; j < nk; j++) {
term = 0.0f;
alpha[k][j] = 0.0f;
for (int i = 0; i < numOfPoints; i++) {
product1 = 1.0f;
if (k > 0) for (int m = 0; m < k; m++) product1 *= x[i];
product2 = 1.0f;
if (j > 0) for (int m = 0; m < j; m++) product2 *= x[i];
term = (product1 * product2);
if (sigmaY != null && sigmaY[i] != 0.0f) term /= (sigmaY[i] * sigmaY[i]);
alpha[k][j] += term;
}
alpha[j][k] = alpha[k][j];
}
for (int i = 0; i < numOfPoints; i++) {
product1 = 1.0f;
if (k > 0) for (int m = 0; m < k; m++) product1 *= x[i];
term = (y[i] * product1);
if (sigmaY != null && sigmaY[i] != 0.0) term /= (sigmaY[i] * sigmaY[i]);
beta[k] += term;
}
}
Matrix alphaMatrix = new Matrix(alpha);
QRDecomposition alphaQRDecomposition = new QRDecomposition(alphaMatrix);
Matrix betaMatrix = new Matrix(beta, nk);
Matrix parameterMatrix;
try {
parameterMatrix = alphaQRDecomposition.solve(betaMatrix);
} catch (Exception e) {
e.printStackTrace();
return new double[0];
}
Matrix covarianceMatrix = alphaMatrix.inverse();
for (int k = 0; k < nk; k++) {
parameters[k] = parameterMatrix.get(k, 0);
parameters[k + nk] = Math.sqrt(covarianceMatrix.get(k, k));
}
return parameters;
}
|
liftting/XmWeiBo | XmWei/app/src/main/java/wm/xmwei/core/lib/support/view/gif/GifIOException.java | <gh_stars>1-10
package wm.xmwei.core.lib.support.view.gif;
import java.io.IOException;
/**
* Exception encapsulating {@link GifError}s.
*
* @author koral--
*/
public class GifIOException extends IOException {
private static final long serialVersionUID = 13038402904505L;
/**
* Reason which caused an exception
*/
public final GifError reason;
GifIOException(GifError reason) {
super(reason.getFormattedDescription());
this.reason = reason;
}
@SuppressWarnings("UnusedDeclaration")
// native code only
GifIOException(int errorCode) {
this(GifError.fromCode(errorCode));
}
}
|
zhuanghm/GoldenDance | app/src/main/java/com/goldendance/client/course/history/CourseHistoryListFragment.java | package com.goldendance.client.course.history;
import android.content.Context;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.goldendance.client.R;
import com.goldendance.client.bean.CourseBean;
import com.goldendance.client.bean.CourseHistoryBean;
import com.goldendance.client.bean.CourseListBean;
import com.goldendance.client.bean.DataResultBean;
import com.goldendance.client.course.CourseAdapter;
import com.goldendance.client.http.GDHttpManager;
import com.goldendance.client.http.GDOnResponseHandler;
import com.goldendance.client.model.CourseModel;
import com.goldendance.client.utils.JsonUtils;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import java.util.ArrayList;
/**
* A simple {@link Fragment} subclass.
* Activities that contain this fragment must implement the
* {@link CourseHistoryListFragment.OnFragmentInteractionListener} interface
* to handle interaction events.
* Use the {@link CourseHistoryListFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class CourseHistoryListFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_DATE = "param1";
private static final String ARG_PARAM2 = "param2";
public static String storeId = "";
// TODO: Rename and change types of parameters
private String date;
private String mParam2;
private OnFragmentInteractionListener mListener;
private View empty_view;
private TextView tvEmpty;
private CourseAdapter adapter;
private SwipeRefreshLayout refreshView;
private LinearLayoutManager manager;
public CourseHistoryListFragment() {
// Required empty public constructor
}
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param param1 Parameter 1.
* @param param2 Parameter 2.
* @return A new instance of fragment CourseListFragment.
*/
// TODO: Rename and change types and number of parameters
public static CourseHistoryListFragment newInstance(String param1, String param2) {
CourseHistoryListFragment fragment = new CourseHistoryListFragment();
Bundle args = new Bundle();
args.putString(ARG_DATE, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
date = getArguments().getString(ARG_DATE);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_course_list, container, false);
initView(view);
onrefresh();
return view;
}
public void onrefresh() {
adapter.setLoadText("加载中...");
hasMoreData = true;
page = 1;
initData();
}
public void onrefresh2() {
adapter.setmList(null);
adapter.notifyDataSetChanged();
onrefresh();
}
private static int ROWS = 20;
private int page = 1;
private void initData() {
refreshView.setRefreshing(true);
new CourseModel().getCourseRecord(new GDOnResponseHandler() {
@Override
public void onEnd() {
super.onEnd();
refreshView.setRefreshing(false);
}
@Override
public void onFailed(IOException e) {
super.onFailed(e);
if (adapter.getItemCount() < 1) {
showEmptyView("网络请求超时");
}
}
@Override
public void onSuccess(int code, String json) {
super.onSuccess(code, json);
if (GDHttpManager.CODE200 != code) {
showEmptyView("newwork error " + code);
return;
}
DataResultBean<CourseHistoryBean> base = JsonUtils.fromJson(json, new TypeToken<DataResultBean<CourseHistoryBean>>() {
});
if (base == null) {
showEmptyView("data parse error");
return;
}
if (GDHttpManager.CODE200 != base.getCode()) {
showEmptyView(base.getMessage());
return;
}
ArrayList<CourseBean> data = base.getData().getList();
if (data == null) {
showEmptyView("data is null");
return;
}
if (data == null || data.size() < ROWS) {
hasMoreData = false;
adapter.setHasNoData(true);
adapter.setLoadText("没有更多课程了");
// Toast.makeText(getActivity(), "没有更多课程了", Toast.LENGTH_SHORT).show();
}
page++;
adapter.addMoreList(data);
adapter.notifyDataSetChanged();
if (adapter.getItemCount() < 2) {
adapter.setLoadText("");
showEmptyView("暂无课程信息");
}
}
});
}
void showEmptyView(String msg) {
empty_view.setVisibility(View.VISIBLE);
tvEmpty.setText(msg);
}
private boolean hasMoreData = true;
private void initView(View view) {
RecyclerView rvList = (RecyclerView) view.findViewById(R.id.rvList);
manager = new LinearLayoutManager(getActivity());
manager.setOrientation(LinearLayoutManager.VERTICAL);
rvList.setLayoutManager(manager);
adapter = new CourseAdapter(getActivity());
rvList.setAdapter(adapter);
rvList.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
super.onScrollStateChanged(recyclerView, newState);
if (RecyclerView.SCROLL_STATE_IDLE == newState && hasMoreData) {
if (manager.findLastVisibleItemPosition() >= adapter.getItemCount() - 1) {
initData();
}
}
}
});
empty_view = view.findViewById(R.id.empty_view);
empty_view.setVisibility(View.GONE);
tvEmpty = (TextView) empty_view.findViewById(R.id.tvEmpty);
tvEmpty.setText("暂无课程");
//刷新
refreshView = (SwipeRefreshLayout) view.findViewById(R.id.refreshView);
//设置刷新时动画的颜色,可以设置4个
refreshView.setProgressBackgroundColorSchemeResource(android.R.color.white);
refreshView.setColorSchemeResources(android.R.color.holo_blue_light,
android.R.color.holo_red_light, android.R.color.holo_orange_light,
android.R.color.holo_green_light);
refreshView.setProgressViewOffset(false, 0, (int) TypedValue
.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 24, getResources()
.getDisplayMetrics()));
refreshView.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
adapter.setmList(null);
adapter.notifyDataSetChanged();
onrefresh();
}
});
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnFragmentInteractionListener) {
mListener = (OnFragmentInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
/**
* This interface must be implemented by activities that contain this
* fragment to allow an interaction in this fragment to be communicated
* to the activity and potentially other fragments contained in that
* activity.
* <p>
* See the Android Training lesson <a href=
* "http://developer.android.com/training/basics/fragments/communicating.html"
* >Communicating with Other Fragments</a> for more information.
*/
public interface OnFragmentInteractionListener {
// TODO: Update argument type and name
void onFragmentInteraction(Uri uri);
}
}
|
AlexWayfer/sentry | tests/sentry/metrics/test_datadog.py | <reponame>AlexWayfer/sentry<filename>tests/sentry/metrics/test_datadog.py
from __future__ import absolute_import
from mock import patch
from datadog.util.hostname import get_hostname
from sentry.metrics.datadog import DatadogMetricsBackend
from sentry.testutils import TestCase
class DatadogMetricsBackendTest(TestCase):
def setUp(self):
self.backend = DatadogMetricsBackend(prefix='sentrytest.')
@patch('datadog.threadstats.base.ThreadStats.increment')
def test_incr(self, mock_incr):
self.backend.incr('foo', instance='bar')
mock_incr.assert_called_once_with(
'sentrytest.foo',
1,
sample_rate=1,
tags=['instance:bar'],
host=get_hostname(),
)
@patch('datadog.threadstats.base.ThreadStats.timing')
def test_timing(self, mock_timing):
self.backend.timing('foo', 30, instance='bar')
mock_timing.assert_called_once_with(
'sentrytest.foo',
30,
sample_rate=1,
tags=['instance:bar'],
host=get_hostname(),
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.