code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django.core.urlresolvers import reverse from django.template import defaultfilters from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ungettext_lazy from horizon_lib import tables from openstack_horizon import api from openstack_horizon.dashboards.identity.groups import constants LOG = logging.getLogger(__name__) LOGOUT_URL = 'logout' STATUS_CHOICES = ( ("true", True), ("false", False) ) class CreateGroupLink(tables.LinkAction): name = "create" verbose_name = _("Create Group") url = constants.GROUPS_CREATE_URL classes = ("ajax-modal",) icon = "plus" policy_rules = (("identity", "identity:create_group"),) def allowed(self, request, group): return api.keystone.keystone_can_edit_group() class EditGroupLink(tables.LinkAction): name = "edit" verbose_name = _("Edit Group") url = constants.GROUPS_UPDATE_URL classes = ("ajax-modal",) icon = "pencil" policy_rules = (("identity", "identity:update_group"),) def allowed(self, request, group): return api.keystone.keystone_can_edit_group() class DeleteGroupsAction(tables.DeleteAction): @staticmethod def action_present(count): return ungettext_lazy( u"Delete Group", u"Delete Groups", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Deleted Group", u"Deleted Groups", count ) name = "delete" policy_rules = (("identity", "identity:delete_group"),) def allowed(self, request, datum): return api.keystone.keystone_can_edit_group() def delete(self, request, obj_id): LOG.info('Deleting group "%s".' % obj_id) api.keystone.group_delete(request, obj_id) class ManageUsersLink(tables.LinkAction): name = "users" verbose_name = _("Modify Users") url = constants.GROUPS_MANAGE_URL icon = "pencil" policy_rules = (("identity", "identity:get_group"), ("identity", "identity:list_users"),) def allowed(self, request, datum): return api.keystone.keystone_can_edit_group() class GroupFilterAction(tables.FilterAction): def filter(self, table, groups, filter_string): """Naive case-insensitive search.""" q = filter_string.lower() def comp(group): if q in group.name.lower(): return True return False return filter(comp, groups) class GroupsTable(tables.DataTable): name = tables.Column('name', verbose_name=_('Name')) description = tables.Column(lambda obj: getattr(obj, 'description', None), verbose_name=_('Description')) id = tables.Column('id', verbose_name=_('Group ID')) class Meta: name = "groups" verbose_name = _("Groups") row_actions = (ManageUsersLink, EditGroupLink, DeleteGroupsAction) table_actions = (GroupFilterAction, CreateGroupLink, DeleteGroupsAction) class UserFilterAction(tables.FilterAction): def filter(self, table, users, filter_string): """Naive case-insensitive search.""" q = filter_string.lower() return [user for user in users if q in user.name.lower() or q in getattr(user, 'email', '').lower()] class RemoveMembers(tables.DeleteAction): @staticmethod def action_present(count): return ungettext_lazy( u"Remove User", u"Remove Users", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Removed User", u"Removed Users", count ) name = "removeGroupMember" policy_rules = (("identity", "identity:remove_user_from_group"),) def allowed(self, request, user=None): return api.keystone.keystone_can_edit_group() def action(self, request, obj_id): user_obj = self.table.get_object_by_id(obj_id) group_id = self.table.kwargs['group_id'] LOG.info('Removing user %s from group %s.' % (user_obj.id, group_id)) api.keystone.remove_group_user(request, group_id=group_id, user_id=user_obj.id) # TODO(lin-hua-cheng): Fix the bug when removing current user # Keystone revokes the token of the user removed from the group. # If the logon user was removed, redirect the user to logout. class AddMembersLink(tables.LinkAction): name = "add_user_link" verbose_name = _("Add...") classes = ("ajax-modal",) icon = "plus" url = constants.GROUPS_ADD_MEMBER_URL policy_rules = (("identity", "identity:list_users"), ("identity", "identity:add_user_to_group"),) def allowed(self, request, user=None): return api.keystone.keystone_can_edit_group() def get_link_url(self, datum=None): return reverse(self.url, kwargs=self.table.kwargs) class UsersTable(tables.DataTable): name = tables.Column('name', verbose_name=_('User Name')) email = tables.Column('email', verbose_name=_('Email'), filters=[defaultfilters.escape, defaultfilters.urlize]) id = tables.Column('id', verbose_name=_('User ID')) enabled = tables.Column('enabled', verbose_name=_('Enabled'), status=True, status_choices=STATUS_CHOICES, empty_value="False") class GroupMembersTable(UsersTable): class Meta: name = "group_members" verbose_name = _("Group Members") table_actions = (UserFilterAction, AddMembersLink, RemoveMembers) class AddMembers(tables.BatchAction): @staticmethod def action_present(count): return ungettext_lazy( u"Add User", u"Add Users", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Added User", u"Added Users", count ) name = "addMember" icon = "plus" requires_input = True success_url = constants.GROUPS_MANAGE_URL policy_rules = (("identity", "identity:add_user_to_group"),) def allowed(self, request, user=None): return api.keystone.keystone_can_edit_group() def action(self, request, obj_id): user_obj = self.table.get_object_by_id(obj_id) group_id = self.table.kwargs['group_id'] LOG.info('Adding user %s to group %s.' % (user_obj.id, group_id)) api.keystone.add_group_user(request, group_id=group_id, user_id=user_obj.id) # TODO(lin-hua-cheng): Fix the bug when adding current user # Keystone revokes the token of the user added to the group. # If the logon user was added, redirect the user to logout. def get_success_url(self, request=None): group_id = self.table.kwargs.get('group_id', None) return reverse(self.success_url, args=[group_id]) class GroupNonMembersTable(UsersTable): class Meta: name = "group_non_members" verbose_name = _("Non-Members") table_actions = (UserFilterAction, AddMembers)
mrunge/openstack_horizon
openstack_horizon/dashboards/identity/groups/tables.py
Python
apache-2.0
8,157
package cn.felord.wepay.ali.sdk.api.response; import java.util.List; import cn.felord.wepay.ali.sdk.api.internal.mapping.ApiField; import cn.felord.wepay.ali.sdk.api.internal.mapping.ApiListField; import cn.felord.wepay.ali.sdk.api.domain.ListListSmMockModel; import cn.felord.wepay.ali.sdk.api.AlipayResponse; /** * ALIPAY API: alipay.mobile.public.std.mock.listsmlist.api response. * * @author auto create * @version $Id: $Id */ public class AlipayMobilePublicStdMockListsmlistApiResponse extends AlipayResponse { private static final long serialVersionUID = 4548186429959737171L; /** * 简单对象嵌套List */ @ApiListField("list_sm_model_list") @ApiField("list_list_sm_mock_model") private List<ListListSmMockModel> listSmModelList; /** * <p>Setter for the field <code>listSmModelList</code>.</p> * * @param listSmModelList a {@link java.util.List} object. */ public void setListSmModelList(List<ListListSmMockModel> listSmModelList) { this.listSmModelList = listSmModelList; } /** * <p>Getter for the field <code>listSmModelList</code>.</p> * * @return a {@link java.util.List} object. */ public List<ListListSmMockModel> getListSmModelList( ) { return this.listSmModelList; } }
NotFound403/WePay
src/main/java/cn/felord/wepay/ali/sdk/api/response/AlipayMobilePublicStdMockListsmlistApiResponse.java
Java
apache-2.0
1,234
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.websocket.jsr356.server.samples; import javax.websocket.CloseReason; import javax.websocket.OnClose; import javax.websocket.Session; import javax.websocket.server.ServerEndpoint; import org.eclipse.jetty.websocket.jsr356.server.TrackingSocket; @ServerEndpoint(value="/basic") public class BasicCloseSessionReasonSocket extends TrackingSocket { @OnClose public void onClose(Session session, CloseReason reason) { addEvent("onClose(%s,%s)",session,reason); closeLatch.countDown(); } }
sdw2330976/Research-jetty-9.2.5
jetty-websocket/javax-websocket-server-impl/src/test/java/org/eclipse/jetty/websocket/jsr356/server/samples/BasicCloseSessionReasonSocket.java
Java
apache-2.0
1,339
/******************************************************************************* * * Copyright (c) 2011, 2012, 2013, 2014, 2015 Olaf Bergmann (TZI) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v. 1.0 which accompanies this distribution. * * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Olaf Bergmann - initial API and implementation * Hauke Mehrtens - memory optimization, ECC integration * *******************************************************************************/ #ifndef _DTLS_NETQ_H_ #define _DTLS_NETQ_H_ #include "tinydtls.h" #include "global.h" #include "dtls.h" #include "dtls_time.h" /** * \defgroup netq Network Packet Queue * The netq utility functions implement an ordered queue of data packets * to send over the network and can also be used to queue received packets * from the network. * @{ */ #ifndef NETQ_MAXCNT #ifdef DTLS_ECC #define NETQ_MAXCNT 5 /**< maximum number of elements in netq structure */ #elif defined(DTLS_PSK) #define NETQ_MAXCNT 3 /**< maximum number of elements in netq structure */ #endif #endif /** * Datagrams in the netq_t structure have a fixed maximum size of * DTLS_MAX_BUF to simplify memory management on constrained nodes. */ typedef unsigned char netq_packet_t[DTLS_MAX_BUF]; typedef struct netq_t { struct netq_t *next; clock_time_t t; /**< when to send PDU for the next time */ unsigned int timeout; /**< randomized timeout value */ dtls_peer_t *peer; /**< remote address */ uint16_t epoch; uint8_t type; unsigned char retransmit_cnt; /**< retransmission counter, will be removed when zero */ size_t length; /**< actual length of data */ #if !(defined (WITH_CONTIKI)) && !(defined (RIOT_VERSION)) unsigned char data[]; /**< the datagram to send */ #else netq_packet_t data; /**< the datagram to send */ #endif } netq_t; #if !(defined (WITH_CONTIKI)) && !(defined (RIOT_VERSION)) static inline void netq_init(void) { } #else void netq_init(void); #endif /** * Adds a node to the given queue, ordered by their time-stamp t. * This function returns @c 0 on error, or non-zero if @p node has * been added successfully. * * @param queue A pointer to the queue head where @p node will be added. * @param node The new item to add. * @return @c 0 on error, or non-zero if the new item was added. */ int netq_insert_node(netq_t **queue, netq_t *node); /** Destroys specified node and releases any memory that was allocated * for the associated datagram. */ void netq_node_free(netq_t *node); /** Removes all items from given queue and frees the allocated storage */ void netq_delete_all(netq_t **queue); /** Creates a new node suitable for adding to a netq_t queue. */ netq_t *netq_node_new(size_t size); /** * Returns a pointer to the first item in given queue or NULL if * empty. */ netq_t *netq_head(netq_t **queue); netq_t *netq_next(netq_t *p); void netq_remove(netq_t **queue, netq_t *p); /** * Removes the first item in given queue and returns a pointer to the * removed element. If queue is empty when netq_pop_first() is called, * this function returns NULL. */ netq_t *netq_pop_first(netq_t **queue); /**@}*/ #endif /* _DTLS_NETQ_H_ */
krzychb/rtd-test-bed
components/coap/libcoap/ext/tinydtls/netq.h
C
apache-2.0
3,504
package com.bytebeats.lucene.query; /** * ${DESCRIPTION} * * @author Ricky Fung * @create 2016-12-07 18:15 */ public class MultiFieldQueryDemo extends BaseDemo { @Override public void start() { } }
FBing/lucene-codelab
src/main/java/com/bytebeats/lucene/query/MultiFieldQueryDemo.java
Java
apache-2.0
218
package io.ebean.ignite.config; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for l2CacheMatch complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="l2CacheMatch"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;all> * &lt;element name="config" type="{http://ebean-orm.github.io/xml/ns/ignite}l2CacheConfig" minOccurs="0"/> * &lt;/all> * &lt;attribute name="matchClasses" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="typeQuery" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;attribute name="typeBean" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;attribute name="typeKey" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;attribute name="typeManyId" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "l2CacheMatch", propOrder = { }) public class L2CacheMatch { protected L2CacheConfig config; @XmlAttribute(name = "matchClasses", required = true) protected String matchClasses; @XmlAttribute(name = "typeQuery") protected Boolean typeQuery; @XmlAttribute(name = "typeBean") protected Boolean typeBean; @XmlAttribute(name = "typeKey") protected Boolean typeKey; @XmlAttribute(name = "typeManyId") protected Boolean typeManyId; public String toString() { String val = "classes:" + matchClasses; if (Boolean.TRUE.equals(typeQuery)) { val += " typeQuery:true"; } if (Boolean.TRUE.equals(typeBean)) { val += " typeBean:true"; } if (Boolean.TRUE.equals(typeKey)) { val += " typeKey:true"; } if (Boolean.TRUE.equals(typeManyId)) { val += " typeManyId:true"; } return val; } /** * Gets the value of the config property. * * @return possible object is * {@link L2CacheConfig } */ public L2CacheConfig getConfig() { return config; } /** * Sets the value of the config property. * * @param value allowed object is * {@link L2CacheConfig } */ public void setConfig(L2CacheConfig value) { this.config = value; } /** * Gets the value of the matchClasses property. * * @return possible object is * {@link String } */ public String getMatchClasses() { return matchClasses; } /** * Sets the value of the matchClasses property. * * @param value allowed object is * {@link String } */ public void setMatchClasses(String value) { this.matchClasses = value; } /** * Gets the value of the typeQuery property. * * @return possible object is * {@link Boolean } */ public Boolean isTypeQuery() { return typeQuery; } /** * Sets the value of the typeQuery property. * * @param value allowed object is * {@link Boolean } */ public void setTypeQuery(Boolean value) { this.typeQuery = value; } /** * Gets the value of the typeBean property. * * @return possible object is * {@link Boolean } */ public Boolean isTypeBean() { return typeBean; } /** * Sets the value of the typeBean property. * * @param value allowed object is * {@link Boolean } */ public void setTypeBean(Boolean value) { this.typeBean = value; } /** * Gets the value of the typeKey property. * * @return possible object is * {@link Boolean } */ public Boolean isTypeKey() { return typeKey; } /** * Sets the value of the typeKey property. * * @param value allowed object is * {@link Boolean } */ public void setTypeKey(Boolean value) { this.typeKey = value; } /** * Gets the value of the typeManyId property. * * @return possible object is * {@link Boolean } */ public Boolean isTypeManyId() { return typeManyId; } /** * Sets the value of the typeManyId property. * * @param value allowed object is * {@link Boolean } */ public void setTypeManyId(Boolean value) { this.typeManyId = value; } }
ebean-orm/avaje-ebeanorm-ignite
src/main/java/io/ebean/ignite/config/L2CacheMatch.java
Java
apache-2.0
4,505
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using QuantConnect.Algorithm.Framework.Alphas; using QuantConnect.Data; using QuantConnect.Data.UniverseSelection; using QuantConnect.Securities; using System; using System.Collections.Generic; using System.Linq; using Accord.Statistics; using QuantConnect.Util; using Accord.Math; namespace QuantConnect.Algorithm.Framework.Portfolio { /// <summary> /// Provides an implementation of Black-Litterman portfolio optimization. The model adjusts equilibrium market /// returns by incorporating views from multiple alpha models and therefore to get the optimal risky portfolio /// reflecting those views. If insights of all alpha models have None magnitude or there are linearly dependent /// vectors in link matrix of views, the expected return would be the implied excess equilibrium return. /// The interval of weights in optimization method can be changed based on the long-short algorithm. /// The default model uses the 0.0025 as weight-on-views scalar parameter tau. The optimization method /// maximizes the Sharpe ratio with the weight range from -1 to 1. /// </summary> public class BlackLittermanOptimizationPortfolioConstructionModel : PortfolioConstructionModel { private readonly int _lookback; private readonly int _period; private readonly Resolution _resolution; private readonly double _riskFreeRate; private readonly double _tau; private readonly IPortfolioOptimizer _optimizer; private readonly List<Symbol> _pendingRemoval; private readonly Dictionary<Symbol, ReturnsSymbolData> _symbolDataDict; /// <summary> /// Initialize the model /// </summary> /// <param name="lookback">Historical return lookback period</param> /// <param name="period">The time interval of history price to calculate the weight</param> /// <param name="resolution">The resolution of the history price</param> /// <param name="riskFreeRate">The risk free rate</param> /// <param name="tau">The model parameter indicating the uncertainty of the CAPM prior</param> /// <param name="optimizer">The portfolio optimization algorithm. If no algorithm is explicitly provided then the default will be max Sharpe ratio optimization.</param> public BlackLittermanOptimizationPortfolioConstructionModel( int lookback = 1, int period = 63, Resolution resolution = Resolution.Daily, double riskFreeRate = 0.0, double tau = 0.025, IPortfolioOptimizer optimizer = null ) { _lookback = lookback; _period = period; _resolution = resolution; _riskFreeRate = riskFreeRate; _tau = tau; _optimizer = optimizer ?? new MaximumSharpeRatioPortfolioOptimizer(riskFreeRate: riskFreeRate); _pendingRemoval = new List<Symbol>(); _symbolDataDict = new Dictionary<Symbol, ReturnsSymbolData>(); } /// <summary> /// Create portfolio targets from the specified insights /// </summary> /// <param name="algorithm">The algorithm instance</param> /// <param name="insights">The insights to create portoflio targets from</param> /// <returns>An enumerable of portoflio targets to be sent to the execution model</returns> public override IEnumerable<IPortfolioTarget> CreateTargets(QCAlgorithmFramework algorithm, Insight[] insights) { var targets = new List<IPortfolioTarget>(); // remove pending foreach (var symbol in _pendingRemoval) { targets.Add(new PortfolioTarget(symbol, 0)); } _pendingRemoval.Clear(); var symbols = insights.Select(x => x.Symbol).Distinct(); if (symbols.Count() == 0 || insights.All(x => x.Magnitude == 0)) { return targets; } // Get symbols' returns var returns = _symbolDataDict.FormReturnsMatrix(symbols); // Calculate equilibrium returns double[] Π; double[,] Σ; GetEquilibriumReturns(returns, out Π, out Σ); // Calculate implied equilibrium returns double[,] P; double[] Q; if (TryGetViews(insights, out P, out Q)) { // Create the diagonal covariance matrix of error terms from the expressed views var Ω = P.Dot(Σ).DotWithTransposed(P).Multiply(_tau); double[,] matrix = Matrix.Diagonal(P.Dot(Σ).DotWithTransposed(P).Multiply(_tau).Diagonal()); if (Ω.Determinant() != 0) { var invCov = Σ.Multiply(_tau).Inverse(); var PTomega = P.TransposeAndDot(Ω.Inverse()); var A = invCov.Add(PTomega.Dot(P)); var B = invCov.Dot(Π).Add(PTomega.Dot(Q)); Π = A.Inverse().Dot(B); } } // The optimization method processes the data frame var W = _optimizer.Optimize(returns, expectedReturns: Π); // Create portfolio targets from the specified insights if (W.Length > 0) { int sidx = 0; foreach (var symbol in symbols) { var weight = (decimal)W[sidx]; targets.Add(PortfolioTarget.Percent(algorithm, symbol, weight)); sidx++; } } return targets; } /// <summary> /// Event fired each time the we add/remove securities from the data feed /// </summary> /// <param name="algorithm">The algorithm instance that experienced the change in securities</param> /// <param name="changes">The security additions and removals from the algorithm</param> public override void OnSecuritiesChanged(QCAlgorithmFramework algorithm, SecurityChanges changes) { // clean up data for removed securities foreach (var removed in changes.RemovedSecurities) { _pendingRemoval.Add(removed.Symbol); ReturnsSymbolData data; if (_symbolDataDict.TryGetValue(removed.Symbol, out data)) { _symbolDataDict.Remove(removed.Symbol); data.Reset(); } } // initialize data for added securities var addedSymbols = new List<Symbol>(); foreach (var added in changes.AddedSecurities) { if (!_symbolDataDict.ContainsKey(added.Symbol)) { var symbolData = new ReturnsSymbolData(added.Symbol, _lookback, _period); _symbolDataDict[added.Symbol] = symbolData; addedSymbols.Add(added.Symbol); } } if (addedSymbols.Count == 0) return; // warmup our indicators by pushing history through the consolidators algorithm.History(addedSymbols, _lookback * _period, _resolution) .PushThrough(bar => { ReturnsSymbolData symbolData; if (_symbolDataDict.TryGetValue(bar.Symbol, out symbolData)) { symbolData.Update(bar.EndTime, bar.Value); } }); } /// <summary> /// Calculate equilibrium returns and convariance /// </summary> /// <param name="returns">Returns</param> /// <param name="Π">Equilibrium returns</param> /// <param name="Σ">Covariance</param> private void GetEquilibriumReturns(double[,] returns, out double[] Π, out double[,] Σ) { // equal weighting scheme double[] W = Vector.Create(returns.GetLength(1), 1.0 / returns.GetLength(1)); // annualized covariance Σ = returns.Covariance().Multiply(252); //annualized return double annualReturn = W.Dot(Elementwise.Add(returns.Mean(0), 1.0).Pow(252.0).Subtract(1.0)); //annualized variance of return double annualVariance = W.Dot(Σ.Dot(W)); // the risk aversion coefficient var riskAversion = (annualReturn - _riskFreeRate) / annualVariance; // the implied excess equilibrium return Vector (N x 1 column vector) Π = Σ.Dot(W).Multiply(riskAversion); } /// <summary> /// Generate views from multiple alpha models /// </summary> /// <param name="insights"></param> /// <param name="P">a matrix that identifies the assets involved in the views (size: K x N)</param> /// <param name="Q">a view vector (size: K x 1)</param> /// <returns></returns> private bool TryGetViews(Insight[] insights, out double[,] P, out double[] Q) { try { var tmpP = insights.GroupBy(insight => insight.SourceModel) .Select(values => { var results = _symbolDataDict.ToDictionary(x => x.Key, v => 0.0); var upInsightsSum = values.Where(i => i.Direction == InsightDirection.Up).Sum(i => (int)i.Direction); var dnInsightsSum = values.Where(i => i.Direction == InsightDirection.Down).Sum(i => (int)i.Direction); foreach (var insight in values) { var direction = (double)insight.Direction; if (direction == 0) continue; var sum = direction > 0 ? upInsightsSum : -dnInsightsSum; results[insight.Symbol] = direction / sum; } return new { View = values.Key, Results = results }; }) .Where(r => !r.Results.Select(v => Math.Abs(v.Value)).Sum().IsNaNOrZero()) .ToDictionary(k => k.View, v => v.Results); var tmpQ = insights.GroupBy(insight => insight.SourceModel) .Select(values => { var q = 0.0; foreach (var insight in values) { q += tmpP[values.Key][insight.Symbol] * (insight.Magnitude ?? 0.0); } return q; }); P = Matrix.Create(tmpP.Select(d => d.Value.Values.ToArray()).ToArray()); Q = tmpQ.ToArray(); } catch { P = null; Q = null; return false; } return true; } } }
AnshulYADAV007/Lean
Algorithm.Framework/Portfolio/BlackLittermanOptimizationPortfolioConstructionModel.cs
C#
apache-2.0
11,806
/* * APDPlat - Application Product Development Platform * Copyright (c) 2013, 杨尚川, yang-shangchuan@qq.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.apdplat.superword.system; import org.apdplat.superword.tools.MySQLUtils; import org.apdplat.superword.tools.WordLinker; import org.apdplat.superword.tools.WordLinker.Dictionary; import org.apdplat.superword.tools.WordSources; import java.util.Map; /** * instant tip * Created by ysc on 12/11/15. */ public class InstantTip { private static final int DATA_MAX_LENGTH = 60; private static final int RECORD_COUNT_LIMIT = 10; private static final Map<String, String> WORDS_TO_DEFINITION; static { WORDS_TO_DEFINITION = MySQLUtils.getAllWordDefinitionMap(Dictionary.YOUDAO.name(), WordSources.getAll()); WORDS_TO_DEFINITION.entrySet().stream().forEach(entry->{ String definition = entry.getValue(); definition = definition.replace("<br/>", " "); definition = definition.length() > DATA_MAX_LENGTH ? definition.substring(0, DATA_MAX_LENGTH) + "..." : definition; entry.setValue(definition); }); } public static String getWordsByPrefix(String prefix){ prefix = prefix.toLowerCase().replaceAll("\\s+", ""); StringBuilder html= new StringBuilder(); html.append("<ol>\n"); int i=0; for(String word : WORDS_TO_DEFINITION.keySet()){ if(word.toLowerCase().startsWith(prefix)){ html.append("<li>").append(WordLinker.toLink(word)).append(" ").append(WORDS_TO_DEFINITION.get(word)).append("</li>\n"); if((++i) >= RECORD_COUNT_LIMIT){ break; } } } html.append("</ol>\n"); return html.toString(); } public static void main(String[] args) { System.out.println(getWordsByPrefix("lo")); } }
ysc/superword
src/main/java/org/apdplat/superword/system/InstantTip.java
Java
apache-2.0
2,545
var angular = require('angular'); angular.module('example.photos.controllers', ['example.api.services']) .controller('PhotosController', ['$scope', 'postService', 'postPhotoService', function ($scope, postService, postPhotoService) { $scope.photos = {}; $scope.posts = postService.query(); // Obtiene todos los posts $scope.posts.$promise.then(function (results) { // Esto se realiza por cada post results.forEach(function (post) { // Esto obtendra todas las fotos por cada post postPhotoService.query({post_id: post.id}, function (data) { $scope.photos[post.id] = data; }); }); }); }]);
iLordTony/django-angular-example
static/app/photos/controllers.js
JavaScript
apache-2.0
733
<!DOCTYPE html> <html lang="en" xmlns:th="http://www.thymeleaf.org" xmlns:layout="http://www.ultraq.net.nz/web/thymeleaf/layout" layout:decorator="layout/master"> <head> <meta charset="UTF-8"/> <title>Title</title> </head> <body> <div layout:fragment="content"> <h2>example view</h2> </div> </body> </html>
peiyucn/spring-boot-demo
src/main/resources/templates/view/example.html
HTML
apache-2.0
336
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_60-ea) on Thu Dec 15 09:48:36 EST 2016 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Class org.wildfly.swarm.mod_cluster.ModclusterProperties (Public javadocs 2016.12.1 API)</title> <meta name="date" content="2016-12-15"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.wildfly.swarm.mod_cluster.ModclusterProperties (Public javadocs 2016.12.1 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/wildfly/swarm/mod_cluster/ModclusterProperties.html" title="class in org.wildfly.swarm.mod_cluster">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2016.12.1</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/mod_cluster/class-use/ModclusterProperties.html" target="_top">Frames</a></li> <li><a href="ModclusterProperties.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.wildfly.swarm.mod_cluster.ModclusterProperties" class="title">Uses of Class<br>org.wildfly.swarm.mod_cluster.ModclusterProperties</h2> </div> <div class="classUseContainer">No usage of org.wildfly.swarm.mod_cluster.ModclusterProperties</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/wildfly/swarm/mod_cluster/ModclusterProperties.html" title="class in org.wildfly.swarm.mod_cluster">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2016.12.1</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/mod_cluster/class-use/ModclusterProperties.html" target="_top">Frames</a></li> <li><a href="ModclusterProperties.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2016 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
wildfly-swarm/wildfly-swarm-javadocs
2016.12.1/apidocs/org/wildfly/swarm/mod_cluster/class-use/ModclusterProperties.html
HTML
apache-2.0
4,931
package main.domain.profession; import main.domain.Repository; public interface ProfessionRepository extends Repository<Profession> { }
lucasvschenatto/eventex
src/main/domain/profession/ProfessionRepository.java
Java
apache-2.0
138
#!/bin/bash if [ "$1" = "" ]; then for n in {1,10}000000; do ./so-test $n || exit 1 done else for n in {1,10,100,200}000000; do ./so-test $n || exit 1 done fi
ivantishchenko/OpenMP-CUDA
so0/benchmark.sh
Shell
apache-2.0
192
use std::marker::PhantomData; use expression::Expression; use query_builder::{QueryBuilder, BuildQueryResult}; use query_source::{Table, Column}; use types::NativeSqlType; /// Represents that a structure can be used to to insert a new row into the database. /// Implementations can be automatically generated by /// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name). /// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than /// one record. pub trait Insertable<T: Table> { type Columns: InsertableColumns<T>; type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>; fn columns() -> Self::Columns; fn values(self) -> Self::Values; } pub trait InsertableColumns<T: Table> { type SqlType: NativeSqlType; fn names(&self) -> String; } impl<'a, T, U> Insertable<T> for &'a [U] where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: self, _marker: PhantomData, } } } impl<'a, T, U> Insertable<T> for &'a Vec<U> where T: Table, &'a U: Insertable<T>, { type Columns = <&'a U as Insertable<T>>::Columns; type Values = InsertValues<'a, T, U>; fn columns() -> Self::Columns { <&'a U>::columns() } fn values(self) -> Self::Values { InsertValues { values: &*self, _marker: PhantomData, } } } pub struct InsertValues<'a, T, U: 'a> { values: &'a [U], _marker: PhantomData<T>, } impl<'a, T, U> Expression for InsertValues<'a, T, U> where T: Table, &'a U: Insertable<T>, { type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType; fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { self.to_insert_sql(out) } fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult { for (i, record) in self.values.into_iter().enumerate() { if i != 0 { out.push_sql(", "); } try!(record.values().to_insert_sql(out)); } Ok(()) } } impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C { type SqlType = <Self as Expression>::SqlType; fn names(&self) -> String { Self::name().to_string() } }
tohou/diesel
diesel/src/persistable.rs
Rust
apache-2.0
2,563
angular.module("Thesaurus") .service("NodeHandler", ['$http', 'PagingSet', function($http, PagingSet) { var nr_of_displayed_pages = 5; var items_per_page = 25; function mapData(data) { data.items.forEach(function(child) { child.loadingChildren = false; child.currentPage = 1; child.expanded = false; child.children = null; child.pages = null; child.selected = false; }); } function findChild(children, uri) { for (var i = 0; i < children.length; i++) { if (children[i].value.link === uri) { return children[i]; } } return null; } function configureNodePagingSet(node, start, totalResults) { node.pagingSet = new PagingSet(nr_of_displayed_pages); node.currentPage = Math.floor(start / items_per_page) + 1; node.numberOfPages = Math.ceil(totalResults / items_per_page); var pages = node.pagingSet.assemblePagingSet(node.currentPage, node.numberOfPages); if (pages.length > 3) { node.pages = pages; } else { node.pages = null; } } this.expand = function expand(node, itemPath) { console.log('expanding node ' + node.value.link); if (!node.hasChildren) { // ignore return; } if (!node.expanded) { // when the root is selected further nodes will be missing if (itemPath && itemPath.nodes.length) { var childRow = itemPath.nodes[0].row - 1; var onPageNumber = Math.floor(childRow / items_per_page) + 1; node.currentPage = onPageNumber; this.loadChildren(node, itemPath); } else { this.loadChildren(node); node.selected = true; } } }; this.loadChildren = function(node, itemPath) { node.loadingChildren = true; var start = (node.currentPage - 1) * items_per_page; var that = this; $http.get('/item/children', { params: { uri: node.value.link, start: start, count: items_per_page } }) .success(function(data) { console.log('setting loaded children for node ' + node.value.link); mapData(data); node.children = data.items; configureNodePagingSet(node, data.start, data.totalResults); node.loadingChildren = false; node.expanded = true; // itemPath is passed in when we are part of path if (itemPath) { // find the relevant child var children = node.children; var childPathNode = itemPath.nodes.shift(); var childNode = findChild(children, childPathNode.uri); if (itemPath.nodes.length > 0) { that.expand(childNode, itemPath); } else { // leaf if (childNode) { childNode.selected = true; // when showing leaf with further children, expand node if (childNode.hasChildren) { that.expand(childNode); } } } } }); }; this.fold = function(node) { console.log('folding on node ' + node.value.link + ' from ' + node.expanded); if (!node.hasChildren) { // ignore return; } if (node.expanded) { // clean the children node.expanded = false; node.children = null; } else { this.loadChildren(node); } }; this.gotoPage = function(node, link) { if (!link) { return false; } console.log('paging to page ' + link); node.currentPage = link; this.loadChildren(node); return false; }; } ]);
Joppe-A/rce-doorzoeker
RceDoorzoeker/App/thesaurus/NodeHandler.js
JavaScript
apache-2.0
3,344
#include "common.h" #include "control.h" #include "error.h" #include "params.h" static eeprom_params eeprom; void params_init() { // Read the parameters in the EEPROM into local memory EEPROM_read_block(0, (uint8_t*)&eeprom, sizeof(eeprom)); } int params_set_gain(uint8_t gain) { uint16_t amppot_set = 0, amppot_get = 0; // Get the potentiometer setting for the gain from the EEPROM if (gain == PARAM_GAIN_LOW) amppot_set = eeprom.gainL_amppot; else if (gain == PARAM_GAIN_HIGH) amppot_set = eeprom.gainH_amppot; else if (gain == PARAM_GAIN_CAL) amppot_set = POT_WIPERPOS_MAX; else halt(ERROR_AMPPOT_SET_FAILED); // Set the potentiometer setting pot_wiperpos_set(POT_AMP_CS_PIN_gm, amppot_set); amppot_get = pot_wiperpos_get(POT_AMP_CS_PIN_gm); if (amppot_get != amppot_set) halt(ERROR_AMPPOT_SET_FAILED); return 0; } void params_set_samplerate() { uint16_t tovf = 0, tdiv = 0; uint16_t filpot_set = 0, filpot_get = 0; if (g_control_mode == CONTROL_MODE_STREAM) { tovf = eeprom.uart_tovf; tdiv = eeprom.uart_tdiv; filpot_set = eeprom.uart_filpot; } else if (g_control_mode == CONTROL_MODE_USB_STORE || g_control_mode == CONTROL_MODE_PORT_STORE) { tovf = eeprom.sd_tovf; tdiv = eeprom.sd_tdiv; filpot_set = eeprom.sd_filpot; } else halt(ERROR_SAMPLERATE_SET_WRONG_MODE); // set timer prescaler and period timer_set(&TCD0, tdiv, tovf); // set filter potentiomter pot_wiperpos_set(POT_FIL_CS_PIN_gm, filpot_set); filpot_get = pot_wiperpos_get(POT_FIL_CS_PIN_gm); if (filpot_get != filpot_set) halt(ERROR_FILPOT_SET_FAILED); } int8_t params_get_port_ovs_bits() { if (eeprom.version < PARAMS_EEPROM_PORT_OVS_BITS_VER) return -1; return eeprom.port_ovs_bits; }
aschulm/battor
fw/params.c
C
apache-2.0
1,730
package jvm.applet.ii;/* * Copyright (c) 1996-1999 Bill Venners. All Rights Reserved. * * This Java source file is part of the Interactive Illustrations Web * Site, which is delivered in the applets directory of the CD-ROM * that accompanies the book "Inside the Java 2 Virtual Machine" by Bill * Venners, published by McGraw-Hill, 1999, ISBN: 0-07-135093-4. This * source file is provided for evaluation purposes only, but you can * redistribute it under certain conditions, described in the full * copyright notice below. * * Full Copyright Notice: * * All the web pages and Java applets delivered in the applets * directory of the CD-ROM, consisting of ".html," ".gif," ".class," * and ".java" files, are copyrighted (c) 1996-1999 by Bill * Venners, and all rights are reserved. This material may be copied * and placed on any commercial or non-commercial web server on any * network (including the internet) provided that the following * guidelines are followed: * * a. All the web pages and Java Applets (".html," ".gif," ".class," * and ".java" files), including the source code, that are delivered * in the applets directory of the CD-ROM that * accompanies the book must be published together on the same web * site. * * b. All the web pages and Java Applets (".html," ".gif," ".class," * and ".java" files) must be published "as is" and may not be altered * in any way. * * c. All use and access to this web site must be free, and no fees * can be charged to view these materials, unless express written * permission is obtained from Bill Venners. * * d. The web pages and Java Applets may not be distributed on any * media, other than a web server on a network, and may not accompany * any book or publication. * * BILL VENNERS MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE * SUITABILITY OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING * BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, * FITNESS FOR PARTICULAR PURPOSE, OR NON-INFRINGEMENT. BILL VENNERS * SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY A LICENSEE AS A * RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS * DERIVATIVES. */ import java.awt.*; /** * This class provides a Label that allows its background color * to be set. * * @author Bill Venners */ class ColoredLabel extends Panel { private Label theLabel; ColoredLabel(String label, int alignment, Color color) { setLayout(new GridLayout(1, 1)); setBackground(color); theLabel = new Label(label, alignment); add(theLabel); } public void setLabelText(String s) { theLabel.setText(s); } public Insets insets() { return new Insets(0, 0, 0, 0); } }
whyDK37/pinenut
java-core/jdk/src/main/java/jvm/applet/ii/ColoredLabel.java
Java
apache-2.0
2,667
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Oct 12 22:25:38 2017 @author: sitibanc """ import math import numpy as np def entropy(p1, n1): # postive, negative if p1 == 0 and n1 == 0: return 1 value = 0 pp = p1 / (p1 + n1) pn = n1 / (p1 + n1) if pp > 0: value -= pp * math.log2(pp) if pn > 0: value -= pn * math.log2(pn) return value def infoGain(p1, n1, p2, n2): total = p1 + n1 + p2 + n2 s1 = p1 + n1 s2 = p2 + n2 return entropy(p1 + p2, n1 + n2) - s1 / total * entropy(p1, n1) - s2 / total * entropy(p2, n2) def buildDT(feature, target, positive, negative): ### node structure (dictionary) # node.leaf = 0/1 # node.selectf = feature index # node.threshold = some value (regards feature value) # node.child = index of childs (leaft, right) ### # root node node = dict() node['data'] = range(len(target)) ### tree structure (list) tree = [] tree.append(node) ### i = 0 while i < len(tree): idx = tree[i]['data'] # data中的值是否相同 if sum(target[idx] == negative) == len(idx): #全負 tree[i]['leaf'] = 1 # is leaf node tree[i]['decision'] = negative elif sum(target[idx] == positive) == len(idx): #全正 tree[i]['leaf'] = 1 tree[i]['decision'] = positive # 試圖找出最好的切分方法 else: bestIG = 0 # 從該node(tree[j])中取出集合,決定threshold for j in range(feature.shape[1]): # feature.shape回傳(rows長度, columns長度)的tuple pool = list(set(feature[idx, j])) #以集合觀念處理去掉重複的值 for k in range(len(pool) - 1): threshold = (pool[k] + pool[k + 1]) / 2 G1 = [] #左子樹 G2 = [] #右子樹 for t in idx: if feature[t, j] <= threshold: G1.append(t) else: G2.append(t) # Calculate infoGain thisIG = infoGain(sum(target[G1] == positive), sum(target[G1] == negative), sum(target[G2] == positive), sum(target[G2] == negative)) # Update bestIG if thisIG > bestIG: bestIG = thisIG bestG1 = G1 bestG2 = G2 bestThreshold = threshold bestf = j if bestIG > 0: tree[i]['leaf'] = 0 tree[i]['selectf'] = bestf tree[i]['threshold'] = bestThreshold tree[i]['child'] = [len(tree),len(tree) + 1] # 先放左子樹 node = dict() node['data'] = bestG1 tree.append(node) # 後放右子樹 node = dict() node['data'] = bestG2 tree.append(node) # 沒有更好的切分方法 else: tree[i]['leaf'] = 1 # 預測結果從多數決 if sum(target[idx] == positive) > sum(target[idx] == negative): tree[i]['decision'] = positive else: tree[i]['decision'] = negative i += 1 return tree data = np.loadtxt('PlayTennis.txt',usecols=range(5),dtype=int) feature = data[:,0:4] target = data[:,4]-1 def DT(feature, target): node = dict() node['data'] = range(len(target)) Tree = []; Tree.append(node) t = 0 while(t<len(Tree)): idx = Tree[t]['data'] if(sum(target[idx])==0): print(idx) Tree[t]['leaf']=1 Tree[t]['decision']=0 elif(sum(target[idx])==len(idx)): print(idx) Tree[t]['leaf']=1 Tree[t]['decision']=1 else: bestIG = 0 for i in range(feature.shape[1]): pool = list(set(feature[idx,i])) for j in range(len(pool)-1): thres = (pool[j]+pool[j+1])/2 G1 = [] G2 = [] for k in idx: if(feature[k,i]<=thres): G1.append(k) else: G2.append(k) thisIG = infoGain(sum(target[G1]==1),sum(target[G1]==0),sum(target[G2]==1),sum(target[G2]==0)) if(thisIG>bestIG): bestIG = thisIG bestG1 = G1 bestG2 = G2 bestthres = thres bestf = i if(bestIG>0): Tree[t]['leaf']=0 Tree[t]['selectf']=bestf Tree[t]['threshold']=bestthres Tree[t]['child']=[len(Tree),len(Tree)+1] node = dict() node['data']=bestG1 Tree.append(node) node = dict() node['data']=bestG2 Tree.append(node) else: Tree[t]['leaf']=1 if(sum(target(idx)==1)>sum(target(idx)==0)): Tree[t]['decision']=1 else: Tree[t]['decision']=0 t+=1 return Tree Tree = buildDT(feature, target, 1, 0) #Tree = DT(feature, target) for i in range(len(target)): test_feature = feature[i,:] now = 0 while(Tree[now]['leaf']==0): bestf = Tree[now]['selectf'] thres = Tree[now]['threshold'] if(test_feature[bestf]<=thres): now = Tree[now]['child'][0] else: now = Tree[now]['child'][1] print(target[i],Tree[now]['decision'])
SitiBanc/1061_NCTU_IOMDS
1011/Course Material/temp.py
Python
apache-2.0
5,952
package com.performance.net.bio.demo1; /** BIO方式 1对1方式 创建连接 */
nero520/performance
java-net/src/main/java/com/performance/net/bio/demo1/package-info.java
Java
apache-2.0
86
using System.Collections.Generic; using System.Linq; namespace Adfc.Msbuild { public class ErrorCodes { /// <summary>Configuration JPath wasn't found in document.</summary> public static readonly ErrorCodes Adfc0001 = new ErrorCodes("ADFC0001", ErrorSeverity.Warning); /// <summary>File specified in configuration wasn't found.</summary> public static readonly ErrorCodes Adfc0002 = new ErrorCodes("ADFC0002", ErrorSeverity.Warning); /// <summary>Unable to determine JSON artefact type.</summary> public static readonly ErrorCodes Adfc0003 = new ErrorCodes("ADFC0003", ErrorSeverity.Error); /// <summary>Unable to parse JSON.</summary> public static readonly ErrorCodes Adfc0004 = new ErrorCodes("ADFC0004", ErrorSeverity.Error); /// <summary>Failed JSON schema validation.</summary> public static readonly ErrorCodes Adfc0005 = new ErrorCodes("ADFC0005", ErrorSeverity.Error); /// <summary>Failed implicit JSON schema validation.</summary> public static readonly ErrorCodes Adfc0006 = new ErrorCodes("ADFC0006", ErrorSeverity.Error); /// <summary>Error during JSON schema validation.</summary> public static readonly ErrorCodes Adfc0007 = new ErrorCodes("ADFC0007", ErrorSeverity.Error); /// <summary>Artefact name is not unique.</summary> public static readonly ErrorCodes Adfc0008 = new ErrorCodes("ADFC0008", ErrorSeverity.Error); public static IReadOnlyDictionary<string, ErrorCodes> All = new[] { Adfc0001, Adfc0002, Adfc0003, Adfc0004, Adfc0005, Adfc0006, Adfc0007, Adfc0008 } .ToDictionary(e => e.Code, e => e); public string Code { get; } public ErrorSeverity DefaultSeverity { get; } public ErrorCodes(string code, ErrorSeverity severity) { Code = code; DefaultSeverity = severity; } } }
ADFCommunity/Adfc.Msbuild
src/Adfc.Msbuild/ErrorCodes.cs
C#
apache-2.0
1,945
.PHONY: rel stagedevrel deps test all: deps compile compile: ./rebar compile deps: ./rebar get-deps clean: ./rebar clean distclean: clean ./rebar delete-deps test: ./rebar compile eunit ## ## Doc targets ## docs: ./rebar doc APPS = kernel stdlib sasl erts ssl tools os_mon runtime_tools crypto inets \ xmerl webtool snmp public_key mnesia eunit COMBO_PLT = $(HOME)/.riak_combo_dialyzer_plt check_plt: compile dialyzer --check_plt --plt $(COMBO_PLT) --apps $(APPS) build_plt: compile dialyzer --build_plt --output_plt $(COMBO_PLT) --apps $(APPS) dialyzer: compile @echo @echo Use "'make check_plt'" to check PLT prior to using this target. @echo Use "'make build_plt'" to build PLT prior to using this target. @echo @sleep 1 dialyzer -Wunmatched_returns --plt $(COMBO_PLT) ebin | \ fgrep -v -f ./dialyzer.ignore-warnings cleanplt: @echo @echo "Are you sure? It takes about 1/2 hour to re-build." @echo Deleting $(COMBO_PLT) in 5 seconds. @echo sleep 5 rm $(COMBO_PLT)
DeadZen/lager
Makefile
Makefile
apache-2.0
1,009
<div class="bg-light lter b-b wrapper-md"> <h1 class="m-n font-thin h3">Post</h1> </div> <div class="wrapper-md"> <div class="row"> <div class="col-sm-9"> <div class="blog-post"> <div class="panel"> <div> <img src="static/webapp/img/c0.jpg" class="img-full"> </div> <div class="wrapper-lg"> <h2 class="m-t-none"><a href>7 things you need to know about the flat design</a></h2> <div> <p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi id neque quam. Aliquam sollicitudin venenatis ipsum ac feugiat. Vestibulum ullamcorper sodales nisi nec condimentum. Mauris convallis mauris at pellentesque volutpat. <br><br> Phasellus at ultricies neque, quis malesuada augue. Donec eleifend condimentum nisl eu consectetur. Integer eleifend, nisl venenatis consequat iaculis, lectus arcu malesuada sem, dapibus porta quam lacus eu neque.</p> </div> <div class="line line-lg b-b b-light"></div> <div class="text-muted"> <i class="fa fa-user text-muted"></i> by <a href class="m-r-sm">Admin</a> <i class="fa fa-clock-o text-muted"></i> Feb 20, 2013 <a href class="m-l-sm"><i class="fa fa-comment-o text-muted"></i> 2 comments</a> </div> </div> </div> <div class="panel"> <div class="wrapper-lg"> <h2 class="m-t-none"><a href>Bootstrap 3: What you need to know</a></h2> <div> <p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi id neque quam. Aliquam sollicitudin venenatis ipsum ac feugiat. Vestibulum ullamcorper sodales nisi nec condimentum. Mauris convallis mauris at pellentesque volutpat. </p> <h3>Html5 and CSS3</h3> <p> Phasellus at ultricies neque, quis malesuada augue. Donec eleifend condimentum nisl eu consectetur. Integer eleifend, nisl venenatis consequat iaculis, lectus arcu malesuada sem, dapibus porta quam lacus eu neque.</p> </div> <div class="line line-lg b-b b-light"></div> <div class="text-muted"> <i class="fa fa-user text-muted"></i> by <a href class="m-r-sm">Admin</a> <i class="fa fa-clock-o text-muted"></i> Feb 15, 2013 <a href class="m-l-sm"><i class="fa fa-comment-o text-muted"></i> 4 comments</a> </div> </div> </div> </div> <div class="text-center m-t-lg m-b-lg"> <ul class="pagination pagination-md"> <li><a href><i class="fa fa-chevron-left"></i></a></li> <li class="active"><a href>1</a></li> <li><a href>2</a></li> <li><a href>3</a></li> <li><a href>4</a></li> <li><a href>5</a></li> <li><a href><i class="fa fa-chevron-right"></i></a></li> </ul> </div> <h4 class="m-t-lg m-b">3 Comments</h4> <div> <div> <a class="pull-left thumb-sm"> <img src="static/webapp/img/a0.jpg" class="img-circle"> </a> <div class="m-l-xxl m-b"> <div> <a href><strong>John smith</strong></a> <label class="label bg-info m-l-xs">Editor</label> <span class="text-muted text-xs block m-t-xs"> 24 minutes ago </span> </div> <div class="m-t-sm">Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi id neque quam. Aliquam sollicitudin venenatis ipsum ac feugiat. Vestibulum.</div> </div> </div> <div class="m-l-xxl"> <a class="pull-left thumb-sm"> <img src="static/webapp/img/a1.jpg" class="img-circle"> </a> <div class="m-l-xxl m-b"> <div> <a href><strong>John smith</strong></a> <label class="label bg-dark m-l-xs">Admin</label> <span class="text-muted text-xs block m-t-xs"> 26 minutes ago </span> </div> <div class="m-t-sm">Lorem ipsum dolor sit amet, consecteter adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet.</div> </div> </div> <div> <a class="pull-left thumb-sm"> <img src="static/webapp/img/a2.jpg" class="img-circle"> </a> <div class="m-l-xxl m-b"> <div> <a href><strong>John smith</strong></a> <label class="label bg-dark m-l-xs">Admin</label> <span class="text-muted text-xs block m-t-xs"> 26 minutes ago </span> </div> <blockquote class="m-t"> <p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer posuere erat a ante.</p> <small>Someone famous in <cite title="Source Title">Source Title</cite></small> </blockquote> <div class="m-t-sm">Lorem ipsum dolor sit amet, consecteter adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet.</div> </div> </div> </div> <h4 class="m-t-lg m-b">Leave a comment</h4> <form> <div class="form-group pull-in clearfix"> <div class="col-sm-6"> <label>Your name</label> <input type="text" class="form-control" placeholder="Name"> </div> <div class="col-sm-6"> <label >Email</label> <input type="email" class="form-control" placeholder="Enter email"> </div> </div> <div class="form-group"> <label>Comment</label> <textarea class="form-control" rows="5" placeholder="Type your comment"></textarea> </div> <div class="form-group"> <button type="submit" class="btn btn-success">Submit comment</button> </div> </form> </div> <div class="col-sm-3"> <h5 class="font-bold">Categories</h5> <ul class="list-group"> <li class="list-group-item"> <a href> <span class="badge pull-right">15</span> Photograph </a> </li> <li class="list-group-item"> <a href> <span class="badge pull-right">30</span> Life style </a> </li> <li class="list-group-item"> <a href> <span class="badge pull-right">9</span> Food </a> </li> <li class="list-group-item"> <a href> <span class="badge pull-right">4</span> Travel world </a> </li> </ul> <div class="tags m-b-lg l-h-2x"> <a href class="label bg-primary">Bootstrap</a> <a href class="label bg-primary">Application</a> <a href class="label bg-primary">Apple</a> <a href class="label bg-primary">Less</a> <a href class="label bg-primary">Theme</a> <a href class="label bg-primary">Wordpress</a> </div> <h5 class="font-bold">Recent Posts</h5> <div> <div> <a class="pull-left thumb thumb-wrapper m-r"> <img src="static/webapp/img/b0.jpg"> </a> <div class="clear"> <a href class="font-semibold text-ellipsis">Bootstrap 3: What you need to know</a> <div class="text-xs block m-t-xs"><a href>Travel</a> 2 minutes ago</div> </div> </div> <div class="line"></div> <div> <a class="pull-left thumb thumb-wrapper m-r"> <img src="static/webapp/img/b1.jpg"> </a> <div class="clear"> <a href class="font-semibold text-ellipsis">Lorem ipsum dolor sit amet, consectetur adipiscing elit.</a> <div class="text-xs block m-t-xs"><a href>Design</a> 2 hours ago</div> </div> </div> <div class="line"></div> <div> <a class="pull-left thumb thumb-wrapper m-r"> <img src="static/webapp/img/b2.jpg"> </a> <div class="clear"> <a href class="font-semibold text-ellipsis">Sed diam nonummy nibh euismod tincidunt ut laoreet</a> <div class="text-xs block m-t-xs"><a href>MFC</a> 1 week ago</div> </div> </div> </div> </div> </div> </div>
yinziyang/mywork
static/webapp/tpl/page_post.html
HTML
apache-2.0
8,695
/** @file * Copyright (c) 2016, ARM Limited or its affiliates. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ /** This file is common to all test cases and Val layer of the Suite */ #ifndef __SBSA_AVS_COMMON_H__ #define __SBSA_AVS_COMMON_H__ #define TEST_NAME_HELPER(x,y) c##x##y #define TEST_NAME(x,y) TEST_NAME_HELPER(x,y) #define AVS_PE_TEST_NUM_BASE 0 #define AVS_GIC_TEST_NUM_BASE 20 #define AVS_TIMER_TEST_NUM_BASE 30 #define AVS_WD_TEST_NUM_BASE 40 #define AVS_PCIE_TEST_NUM_BASE 50 #define AVS_WAKEUP_TEST_NUM_BASE 70 #define AVS_PER_TEST_NUM_BASE 80 #define AVS_SMMU_TEST_NUM_BASE 90 #define AVS_SECURE_TEST_NUM_BASE 900 #define STATE_BIT 28 #define STATE_MASK 0xF //These are the states a test can be in */ #define TEST_START_VAL 0x1 #define TEST_END_VAL 0x2 #define TEST_PASS_VAL 0x4 #define TEST_FAIL_VAL 0x8 #define TEST_SKIP_VAL 0x9 #define TEST_PENDING_VAL 0xA #define CPU_NUM_BIT 32 #define CPU_NUM_MASK 0xFFFFFFFF #define LEVEL_BIT 24 #define LEVEL_MASK 0xF #define TEST_NUM_BIT 16 #define TEST_NUM_MASK 0xFF /* TEST start and Stop defines */ #define SBSA_AVS_START(level, test_num) (((TEST_START_VAL) << STATE_BIT) | ((level) << LEVEL_BIT) | ((test_num) << TEST_NUM_BIT)) #define SBSA_AVS_END(level, test_num) (((TEST_END_VAL) << STATE_BIT) | ((level) << LEVEL_BIT) | ((test_num) << TEST_NUM_BIT)) /* TEST Result defines */ #define RESULT_PASS(level, test_num, status) (((TEST_PASS_VAL) << STATE_BIT) | ((level) << LEVEL_BIT) | ((test_num) << TEST_NUM_BIT) | (status)) #define RESULT_FAIL(level, test_num, status) (((TEST_FAIL_VAL) << STATE_BIT) | ((level) << LEVEL_BIT) | ((test_num) << TEST_NUM_BIT) | (status)) #define RESULT_SKIP(level, test_num, status) (((TEST_SKIP_VAL) << STATE_BIT) | ((level) << LEVEL_BIT) | ((test_num) << TEST_NUM_BIT) | (status)) #define RESULT_PENDING(level, test_num) (((TEST_PENDING_VAL) << STATE_BIT) | \ ((level) << LEVEL_BIT) | ((test_num) << TEST_NUM_BIT)) #define IS_TEST_START(value) (((value >> STATE_BIT) & (STATE_MASK)) == TEST_START_VAL) #define IS_TEST_END(value) (((value >> STATE_BIT) & (STATE_MASK)) == TEST_END_VAL) #define IS_RESULT_PENDING(value) (((value >> STATE_BIT) & (STATE_MASK)) == TEST_PENDING_VAL) #define IS_TEST_PASS(value) (((value >> STATE_BIT) & (STATE_MASK)) == TEST_PASS_VAL) #define IS_TEST_FAIL(value) (((value >> STATE_BIT) & (STATE_MASK)) == TEST_FAIL_VAL) #define IS_TEST_SKIP(value) (((value >> STATE_BIT) & (STATE_MASK)) == TEST_SKIP_VAL) #define IS_TEST_FAIL_SKIP(value) ((IS_TEST_FAIL(value)) || (IS_TEST_SKIP(value))) uint32_t val_mmio_read(addr_t addr); void val_mmio_write(addr_t addr, uint32_t data); uint32_t val_initialize_test(uint32_t test_num, char8_t * desc, uint32_t num_pe, uint32_t level); uint32_t val_check_for_error(uint32_t test_num, uint32_t num_pe); void val_run_test_payload(uint32_t test_num, uint32_t num_pe, void (*payload)(void), uint64_t test_input); void val_data_cache_ops_by_va(addr_t addr, uint32_t type); #endif
anurag-mishra28/sbsa-acs
val/include/sbsa_avs_common.h
C
apache-2.0
3,638
--- title: werf helm secret file permalink: reference/cli/werf_helm_secret_file.html --- {% include /reference/cli/werf_helm_secret_file.md %}
flant/dapp
docs/documentation/pages_en/reference/cli/werf_helm_secret_file.md
Markdown
apache-2.0
144
package cn.dcs.leef.leetcode; public class CountAndSay { public static String countAndSay(int n) { String res="1"; while(--n>0){ StringBuilder builder=new StringBuilder(); String str=res; char pre=str.charAt(0);int count=1; char p = pre; for(int i=1;i<str.length();i++){ p=str.charAt(i); if(p==pre){ count++; }else { builder.append(count+""+pre); count=1; pre=p; } } builder.append(count+""+p); res=builder.toString(); } return res; } public static void main(String []args){ System.out.println(countAndSay(5)); } public static String countAndSay1(int n) { //³ä·ÖÀí½âÌâÄ¿Òâ˼×ö´ð£¬Á½²ãÑ­»·£¬nÖ»ÓÃÀ´¼ÆÊý String res = "1"; while (--n > 0) { StringBuilder sb = new StringBuilder(); char[] prev = res.toCharArray(); //Óô˷½·¨½ÏºÃЩ for (int i = 0; i < prev.length; i++) { int count = 1; // initialize current count as 1 while (i + 1 < prev.length && prev[i] == prev[i + 1]) { count++; // search for same char i++; } sb.append(count).append(prev[i]); } res = sb.toString(); } return res; } }
fitzlee/JavaAlgorithmsDataStructure
src/cn/dcs/leef/leetcode/CountAndSay.java
Java
apache-2.0
1,287
package ru.job4j.priorityqueue; public class Task { private String desc; private int priority; public Task(String desc, int priority) { this.desc = desc; this.priority = priority; } public String getDesc() { return desc; } public int getPriority() { return priority; } }
ilya-moskovtsev/imoskovtsev
intern/chapter_003_collections_light/src/main/java/ru/job4j/priorityqueue/Task.java
Java
apache-2.0
339
# Cercospora bernardiae F. Stevens SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Cercospora bernardiae F. Stevens ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Dothideomycetes/Capnodiales/Mycosphaerellaceae/Cercospora/Cercospora bernardiae/README.md
Markdown
apache-2.0
193
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.core.client.session.command.impl; import javax.enterprise.context.Dependent; import javax.inject.Inject; import org.kie.workbench.common.stunner.core.client.canvas.util.CanvasFileExport; @Dependent public class ExportToPdfSessionCommand extends AbstractExportSessionCommand { private final CanvasFileExport canvasFileExport; protected ExportToPdfSessionCommand() { this(null); } @Inject public ExportToPdfSessionCommand(final CanvasFileExport canvasFileExport) { super(true); this.canvasFileExport = canvasFileExport; } @Override protected void export(final String fileName) { canvasFileExport.exportToPdf(getSession().getCanvasHandler(), fileName); } }
ederign/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-commons/kie-wb-common-stunner-client-common/src/main/java/org/kie/workbench/common/stunner/core/client/session/command/impl/ExportToPdfSessionCommand.java
Java
apache-2.0
1,428
/* * Copyright 2016 Utkin Dmitry <loentar@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This file is part of ngrest-db: http://github.com/loentar/ngrest-db */ #ifndef NGREST_QUERYIMPL_H #define NGREST_QUERYIMPL_H #include <string> namespace ngrest { class QueryImpl { public: virtual ~QueryImpl(); virtual void reset() = 0; inline bool query(const std::string& query) { prepare(query); return next(); } virtual void prepare(const std::string& query) = 0; virtual void bindNull(int arg) = 0; virtual void bindBool(int arg, bool value) = 0; virtual void bindInt(int arg, int value) = 0; virtual void bindBigInt(int arg, int64_t value) = 0; virtual void bindFloat(int arg, double value) = 0; virtual void bindString(int arg, const std::string& value) = 0; virtual bool next() = 0; virtual bool resultIsNull(int column) = 0; virtual bool resultBool(int column) = 0; virtual int resultInt(int column) = 0; virtual int64_t resultBigInt(int column) = 0; virtual double resultFloat(int column) = 0; virtual void resultString(int column, std::string& value) = 0; virtual int64_t lastInsertId() = 0; }; } // namespace ngrest #endif // NGREST_QUERYIMPL_H
loentar/ngrest-db
common/src/QueryImpl.h
C
apache-2.0
1,798
/** * Created by Ali on 4/08/2015. */ "use strict"; const Hoek = require("hoek"), Joi = require("joi"), _get = require("lodash/get"), _set = require("lodash/set"), model_factory = require("../../../model"); const options_schema = Joi .object({ enabled: Joi .boolean() .default(false) }) .required(); /** * * @param server * @param options * @param next */ const resource_set_plugin = async (server, options) => { return new Promise(async (resolve, reject) => { let results = Joi.validate(options, options_schema); try { Hoek.assert(!results.error, results.error); } catch (err) { return reject(err); } let resource_set_settings = results.value; server.ext("onPreHandler", async (request, h) => { let routeEnabled = _get(request, "route.settings.plugins.resource_set.enabled", resource_set_settings.enabled || false); request.plugins.valde_resource_set = {}; if (routeEnabled === true) { try { let resource_set = await model_factory.create_model_for_rest_request(request, h); _set(request, "plugins.valde_resource_set", resource_set); return h.continue; } catch (err) { _set(request, "plugins.valde_resource_set", {}); return h.continue; } } else { return h.continue; } }); return resolve(); }); }; module.exports.plugin = { register: resource_set_plugin, pkg: require("./package.json") };
gaaiatinc/valde-hapi
lib/server/plugins/resource_set/index.js
JavaScript
apache-2.0
1,694
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:cci_class_4.rs extern crate cci_class_4; use cci_class_4::kitties::cat; pub fn main() { let mut nyan = cat(0u, 2, ~"nyan"); nyan.eat(); assert!((!nyan.eat())); for _ in range(1u, 10u) { nyan.speak(); }; assert!((nyan.eat())); }
pythonesque/rust
src/test/run-pass/classes-cross-crate.rs
Rust
apache-2.0
734
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: GenericRefreshProtocol.proto package org.apache.hadoop.ipc.proto; public final class GenericRefreshProtocolProtos { private GenericRefreshProtocolProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface GenericRefreshRequestProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.common.GenericRefreshRequestProto) com.google.protobuf.MessageOrBuilder { /** * <code>optional string identifier = 1;</code> */ boolean hasIdentifier(); /** * <code>optional string identifier = 1;</code> */ java.lang.String getIdentifier(); /** * <code>optional string identifier = 1;</code> */ com.google.protobuf.ByteString getIdentifierBytes(); /** * <code>repeated string args = 2;</code> */ com.google.protobuf.ProtocolStringList getArgsList(); /** * <code>repeated string args = 2;</code> */ int getArgsCount(); /** * <code>repeated string args = 2;</code> */ java.lang.String getArgs(int index); /** * <code>repeated string args = 2;</code> */ com.google.protobuf.ByteString getArgsBytes(int index); } /** * Protobuf type {@code hadoop.common.GenericRefreshRequestProto} * * <pre> ** * Refresh request. * </pre> */ public static final class GenericRefreshRequestProto extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:hadoop.common.GenericRefreshRequestProto) GenericRefreshRequestProtoOrBuilder { // Use GenericRefreshRequestProto.newBuilder() to construct. private GenericRefreshRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GenericRefreshRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GenericRefreshRequestProto defaultInstance; public static GenericRefreshRequestProto getDefaultInstance() { return defaultInstance; } public GenericRefreshRequestProto getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GenericRefreshRequestProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; identifier_ = bs; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { args_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000002; } args_.add(bs); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { args_ = args_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshRequestProto_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.Builder.class); } public static com.google.protobuf.Parser<GenericRefreshRequestProto> PARSER = new com.google.protobuf.AbstractParser<GenericRefreshRequestProto>() { public GenericRefreshRequestProto parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GenericRefreshRequestProto(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GenericRefreshRequestProto> getParserForType() { return PARSER; } private int bitField0_; public static final int IDENTIFIER_FIELD_NUMBER = 1; private java.lang.Object identifier_; /** * <code>optional string identifier = 1;</code> */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string identifier = 1;</code> */ public java.lang.String getIdentifier() { java.lang.Object ref = identifier_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { identifier_ = s; } return s; } } /** * <code>optional string identifier = 1;</code> */ public com.google.protobuf.ByteString getIdentifierBytes() { java.lang.Object ref = identifier_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); identifier_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ARGS_FIELD_NUMBER = 2; private com.google.protobuf.LazyStringList args_; /** * <code>repeated string args = 2;</code> */ public com.google.protobuf.ProtocolStringList getArgsList() { return args_; } /** * <code>repeated string args = 2;</code> */ public int getArgsCount() { return args_.size(); } /** * <code>repeated string args = 2;</code> */ public java.lang.String getArgs(int index) { return args_.get(index); } /** * <code>repeated string args = 2;</code> */ public com.google.protobuf.ByteString getArgsBytes(int index) { return args_.getByteString(index); } private void initFields() { identifier_ = ""; args_ = com.google.protobuf.LazyStringArrayList.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getIdentifierBytes()); } for (int i = 0; i < args_.size(); i++) { output.writeBytes(2, args_.getByteString(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getIdentifierBytes()); } { int dataSize = 0; for (int i = 0; i < args_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(args_.getByteString(i)); } size += dataSize; size += 1 * getArgsList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto)) { return super.equals(obj); } org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto other = (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto) obj; boolean result = true; result = result && (hasIdentifier() == other.hasIdentifier()); if (hasIdentifier()) { result = result && getIdentifier() .equals(other.getIdentifier()); } result = result && getArgsList() .equals(other.getArgsList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasIdentifier()) { hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; hash = (53 * hash) + getIdentifier().hashCode(); } if (getArgsCount() > 0) { hash = (37 * hash) + ARGS_FIELD_NUMBER; hash = (53 * hash) + getArgsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.common.GenericRefreshRequestProto} * * <pre> ** * Refresh request. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hadoop.common.GenericRefreshRequestProto) org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshRequestProto_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshRequestProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.Builder.class); } // Construct using org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); identifier_ = ""; bitField0_ = (bitField0_ & ~0x00000001); args_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshRequestProto_descriptor; } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto getDefaultInstanceForType() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.getDefaultInstance(); } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto build() { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto buildPartial() { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto result = new org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.identifier_ = identifier_; if (((bitField0_ & 0x00000002) == 0x00000002)) { args_ = args_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000002); } result.args_ = args_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto) { return mergeFrom((org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto other) { if (other == org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { bitField0_ |= 0x00000001; identifier_ = other.identifier_; onChanged(); } if (!other.args_.isEmpty()) { if (args_.isEmpty()) { args_ = other.args_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureArgsIsMutable(); args_.addAll(other.args_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object identifier_ = ""; /** * <code>optional string identifier = 1;</code> */ public boolean hasIdentifier() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string identifier = 1;</code> */ public java.lang.String getIdentifier() { java.lang.Object ref = identifier_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { identifier_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string identifier = 1;</code> */ public com.google.protobuf.ByteString getIdentifierBytes() { java.lang.Object ref = identifier_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); identifier_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string identifier = 1;</code> */ public Builder setIdentifier( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; identifier_ = value; onChanged(); return this; } /** * <code>optional string identifier = 1;</code> */ public Builder clearIdentifier() { bitField0_ = (bitField0_ & ~0x00000001); identifier_ = getDefaultInstance().getIdentifier(); onChanged(); return this; } /** * <code>optional string identifier = 1;</code> */ public Builder setIdentifierBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; identifier_ = value; onChanged(); return this; } private com.google.protobuf.LazyStringList args_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureArgsIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { args_ = new com.google.protobuf.LazyStringArrayList(args_); bitField0_ |= 0x00000002; } } /** * <code>repeated string args = 2;</code> */ public com.google.protobuf.ProtocolStringList getArgsList() { return args_.getUnmodifiableView(); } /** * <code>repeated string args = 2;</code> */ public int getArgsCount() { return args_.size(); } /** * <code>repeated string args = 2;</code> */ public java.lang.String getArgs(int index) { return args_.get(index); } /** * <code>repeated string args = 2;</code> */ public com.google.protobuf.ByteString getArgsBytes(int index) { return args_.getByteString(index); } /** * <code>repeated string args = 2;</code> */ public Builder setArgs( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureArgsIsMutable(); args_.set(index, value); onChanged(); return this; } /** * <code>repeated string args = 2;</code> */ public Builder addArgs( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureArgsIsMutable(); args_.add(value); onChanged(); return this; } /** * <code>repeated string args = 2;</code> */ public Builder addAllArgs( java.lang.Iterable<java.lang.String> values) { ensureArgsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, args_); onChanged(); return this; } /** * <code>repeated string args = 2;</code> */ public Builder clearArgs() { args_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <code>repeated string args = 2;</code> */ public Builder addArgsBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureArgsIsMutable(); args_.add(value); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hadoop.common.GenericRefreshRequestProto) } static { defaultInstance = new GenericRefreshRequestProto(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hadoop.common.GenericRefreshRequestProto) } public interface GenericRefreshResponseProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.common.GenericRefreshResponseProto) com.google.protobuf.MessageOrBuilder { /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ boolean hasExitStatus(); /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ int getExitStatus(); /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ boolean hasUserMessage(); /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ java.lang.String getUserMessage(); /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ com.google.protobuf.ByteString getUserMessageBytes(); /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ boolean hasSenderName(); /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ java.lang.String getSenderName(); /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ com.google.protobuf.ByteString getSenderNameBytes(); } /** * Protobuf type {@code hadoop.common.GenericRefreshResponseProto} * * <pre> ** * A single response from a refresh handler. * </pre> */ public static final class GenericRefreshResponseProto extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:hadoop.common.GenericRefreshResponseProto) GenericRefreshResponseProtoOrBuilder { // Use GenericRefreshResponseProto.newBuilder() to construct. private GenericRefreshResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GenericRefreshResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GenericRefreshResponseProto defaultInstance; public static GenericRefreshResponseProto getDefaultInstance() { return defaultInstance; } public GenericRefreshResponseProto getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GenericRefreshResponseProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; exitStatus_ = input.readInt32(); break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; userMessage_ = bs; break; } case 26: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000004; senderName_ = bs; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseProto_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder.class); } public static com.google.protobuf.Parser<GenericRefreshResponseProto> PARSER = new com.google.protobuf.AbstractParser<GenericRefreshResponseProto>() { public GenericRefreshResponseProto parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GenericRefreshResponseProto(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GenericRefreshResponseProto> getParserForType() { return PARSER; } private int bitField0_; public static final int EXITSTATUS_FIELD_NUMBER = 1; private int exitStatus_; /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ public boolean hasExitStatus() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ public int getExitStatus() { return exitStatus_; } public static final int USERMESSAGE_FIELD_NUMBER = 2; private java.lang.Object userMessage_; /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public boolean hasUserMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public java.lang.String getUserMessage() { java.lang.Object ref = userMessage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { userMessage_ = s; } return s; } } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public com.google.protobuf.ByteString getUserMessageBytes() { java.lang.Object ref = userMessage_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); userMessage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SENDERNAME_FIELD_NUMBER = 3; private java.lang.Object senderName_; /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public boolean hasSenderName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public java.lang.String getSenderName() { java.lang.Object ref = senderName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { senderName_ = s; } return s; } } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public com.google.protobuf.ByteString getSenderNameBytes() { java.lang.Object ref = senderName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); senderName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { exitStatus_ = 0; userMessage_ = ""; senderName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, exitStatus_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getUserMessageBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getSenderNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, exitStatus_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getUserMessageBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getSenderNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto)) { return super.equals(obj); } org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto other = (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto) obj; boolean result = true; result = result && (hasExitStatus() == other.hasExitStatus()); if (hasExitStatus()) { result = result && (getExitStatus() == other.getExitStatus()); } result = result && (hasUserMessage() == other.hasUserMessage()); if (hasUserMessage()) { result = result && getUserMessage() .equals(other.getUserMessage()); } result = result && (hasSenderName() == other.hasSenderName()); if (hasSenderName()) { result = result && getSenderName() .equals(other.getSenderName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasExitStatus()) { hash = (37 * hash) + EXITSTATUS_FIELD_NUMBER; hash = (53 * hash) + getExitStatus(); } if (hasUserMessage()) { hash = (37 * hash) + USERMESSAGE_FIELD_NUMBER; hash = (53 * hash) + getUserMessage().hashCode(); } if (hasSenderName()) { hash = (37 * hash) + SENDERNAME_FIELD_NUMBER; hash = (53 * hash) + getSenderName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.common.GenericRefreshResponseProto} * * <pre> ** * A single response from a refresh handler. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hadoop.common.GenericRefreshResponseProto) org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseProto_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder.class); } // Construct using org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); exitStatus_ = 0; bitField0_ = (bitField0_ & ~0x00000001); userMessage_ = ""; bitField0_ = (bitField0_ & ~0x00000002); senderName_ = ""; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseProto_descriptor; } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto getDefaultInstanceForType() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.getDefaultInstance(); } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto build() { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto buildPartial() { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto result = new org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.exitStatus_ = exitStatus_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.userMessage_ = userMessage_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.senderName_ = senderName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto) { return mergeFrom((org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto other) { if (other == org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.getDefaultInstance()) return this; if (other.hasExitStatus()) { setExitStatus(other.getExitStatus()); } if (other.hasUserMessage()) { bitField0_ |= 0x00000002; userMessage_ = other.userMessage_; onChanged(); } if (other.hasSenderName()) { bitField0_ |= 0x00000004; senderName_ = other.senderName_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int exitStatus_ ; /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ public boolean hasExitStatus() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ public int getExitStatus() { return exitStatus_; } /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ public Builder setExitStatus(int value) { bitField0_ |= 0x00000001; exitStatus_ = value; onChanged(); return this; } /** * <code>optional int32 exitStatus = 1;</code> * * <pre> * unix exit status to return * </pre> */ public Builder clearExitStatus() { bitField0_ = (bitField0_ & ~0x00000001); exitStatus_ = 0; onChanged(); return this; } private java.lang.Object userMessage_ = ""; /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public boolean hasUserMessage() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public java.lang.String getUserMessage() { java.lang.Object ref = userMessage_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { userMessage_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public com.google.protobuf.ByteString getUserMessageBytes() { java.lang.Object ref = userMessage_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); userMessage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public Builder setUserMessage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; userMessage_ = value; onChanged(); return this; } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public Builder clearUserMessage() { bitField0_ = (bitField0_ & ~0x00000002); userMessage_ = getDefaultInstance().getUserMessage(); onChanged(); return this; } /** * <code>optional string userMessage = 2;</code> * * <pre> * to be displayed to the user * </pre> */ public Builder setUserMessageBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; userMessage_ = value; onChanged(); return this; } private java.lang.Object senderName_ = ""; /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public boolean hasSenderName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public java.lang.String getSenderName() { java.lang.Object ref = senderName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { senderName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public com.google.protobuf.ByteString getSenderNameBytes() { java.lang.Object ref = senderName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); senderName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public Builder setSenderName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; senderName_ = value; onChanged(); return this; } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public Builder clearSenderName() { bitField0_ = (bitField0_ & ~0x00000004); senderName_ = getDefaultInstance().getSenderName(); onChanged(); return this; } /** * <code>optional string senderName = 3;</code> * * <pre> * which handler sent this message * </pre> */ public Builder setSenderNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; senderName_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hadoop.common.GenericRefreshResponseProto) } static { defaultInstance = new GenericRefreshResponseProto(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hadoop.common.GenericRefreshResponseProto) } public interface GenericRefreshResponseCollectionProtoOrBuilder extends // @@protoc_insertion_point(interface_extends:hadoop.common.GenericRefreshResponseCollectionProto) com.google.protobuf.MessageOrBuilder { /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ java.util.List<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto> getResponsesList(); /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto getResponses(int index); /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ int getResponsesCount(); /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ java.util.List<? extends org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder> getResponsesOrBuilderList(); /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder getResponsesOrBuilder( int index); } /** * Protobuf type {@code hadoop.common.GenericRefreshResponseCollectionProto} * * <pre> ** * Collection of responses from zero or more handlers. * </pre> */ public static final class GenericRefreshResponseCollectionProto extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:hadoop.common.GenericRefreshResponseCollectionProto) GenericRefreshResponseCollectionProtoOrBuilder { // Use GenericRefreshResponseCollectionProto.newBuilder() to construct. private GenericRefreshResponseCollectionProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private GenericRefreshResponseCollectionProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final GenericRefreshResponseCollectionProto defaultInstance; public static GenericRefreshResponseCollectionProto getDefaultInstance() { return defaultInstance; } public GenericRefreshResponseCollectionProto getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GenericRefreshResponseCollectionProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { responses_ = new java.util.ArrayList<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto>(); mutable_bitField0_ |= 0x00000001; } responses_.add(input.readMessage(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.PARSER, extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { responses_ = java.util.Collections.unmodifiableList(responses_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseCollectionProto_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseCollectionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.Builder.class); } public static com.google.protobuf.Parser<GenericRefreshResponseCollectionProto> PARSER = new com.google.protobuf.AbstractParser<GenericRefreshResponseCollectionProto>() { public GenericRefreshResponseCollectionProto parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GenericRefreshResponseCollectionProto(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GenericRefreshResponseCollectionProto> getParserForType() { return PARSER; } public static final int RESPONSES_FIELD_NUMBER = 1; private java.util.List<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto> responses_; /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public java.util.List<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto> getResponsesList() { return responses_; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public java.util.List<? extends org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder> getResponsesOrBuilderList() { return responses_; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public int getResponsesCount() { return responses_.size(); } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto getResponses(int index) { return responses_.get(index); } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder getResponsesOrBuilder( int index) { return responses_.get(index); } private void initFields() { responses_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < responses_.size(); i++) { output.writeMessage(1, responses_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < responses_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, responses_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto)) { return super.equals(obj); } org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto other = (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto) obj; boolean result = true; result = result && getResponsesList() .equals(other.getResponsesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getResponsesCount() > 0) { hash = (37 * hash) + RESPONSES_FIELD_NUMBER; hash = (53 * hash) + getResponsesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hadoop.common.GenericRefreshResponseCollectionProto} * * <pre> ** * Collection of responses from zero or more handlers. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:hadoop.common.GenericRefreshResponseCollectionProto) org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProtoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseCollectionProto_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseCollectionProto_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.Builder.class); } // Construct using org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getResponsesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (responsesBuilder_ == null) { responses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { responsesBuilder_.clear(); } return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.internal_static_hadoop_common_GenericRefreshResponseCollectionProto_descriptor; } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto getDefaultInstanceForType() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance(); } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto build() { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto buildPartial() { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto result = new org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto(this); int from_bitField0_ = bitField0_; if (responsesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { responses_ = java.util.Collections.unmodifiableList(responses_); bitField0_ = (bitField0_ & ~0x00000001); } result.responses_ = responses_; } else { result.responses_ = responsesBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto) { return mergeFrom((org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto other) { if (other == org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance()) return this; if (responsesBuilder_ == null) { if (!other.responses_.isEmpty()) { if (responses_.isEmpty()) { responses_ = other.responses_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResponsesIsMutable(); responses_.addAll(other.responses_); } onChanged(); } } else { if (!other.responses_.isEmpty()) { if (responsesBuilder_.isEmpty()) { responsesBuilder_.dispose(); responsesBuilder_ = null; responses_ = other.responses_; bitField0_ = (bitField0_ & ~0x00000001); responsesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getResponsesFieldBuilder() : null; } else { responsesBuilder_.addAllMessages(other.responses_); } } } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto> responses_ = java.util.Collections.emptyList(); private void ensureResponsesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { responses_ = new java.util.ArrayList<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto>(responses_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder> responsesBuilder_; /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public java.util.List<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto> getResponsesList() { if (responsesBuilder_ == null) { return java.util.Collections.unmodifiableList(responses_); } else { return responsesBuilder_.getMessageList(); } } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public int getResponsesCount() { if (responsesBuilder_ == null) { return responses_.size(); } else { return responsesBuilder_.getCount(); } } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto getResponses(int index) { if (responsesBuilder_ == null) { return responses_.get(index); } else { return responsesBuilder_.getMessage(index); } } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder setResponses( int index, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto value) { if (responsesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResponsesIsMutable(); responses_.set(index, value); onChanged(); } else { responsesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder setResponses( int index, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder builderForValue) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.set(index, builderForValue.build()); onChanged(); } else { responsesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder addResponses(org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto value) { if (responsesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResponsesIsMutable(); responses_.add(value); onChanged(); } else { responsesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder addResponses( int index, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto value) { if (responsesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResponsesIsMutable(); responses_.add(index, value); onChanged(); } else { responsesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder addResponses( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder builderForValue) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.add(builderForValue.build()); onChanged(); } else { responsesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder addResponses( int index, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder builderForValue) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.add(index, builderForValue.build()); onChanged(); } else { responsesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder addAllResponses( java.lang.Iterable<? extends org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto> values) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, responses_); onChanged(); } else { responsesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder clearResponses() { if (responsesBuilder_ == null) { responses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { responsesBuilder_.clear(); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public Builder removeResponses(int index) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.remove(index); onChanged(); } else { responsesBuilder_.remove(index); } return this; } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder getResponsesBuilder( int index) { return getResponsesFieldBuilder().getBuilder(index); } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder getResponsesOrBuilder( int index) { if (responsesBuilder_ == null) { return responses_.get(index); } else { return responsesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public java.util.List<? extends org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder> getResponsesOrBuilderList() { if (responsesBuilder_ != null) { return responsesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(responses_); } } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder addResponsesBuilder() { return getResponsesFieldBuilder().addBuilder( org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.getDefaultInstance()); } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder addResponsesBuilder( int index) { return getResponsesFieldBuilder().addBuilder( index, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.getDefaultInstance()); } /** * <code>repeated .hadoop.common.GenericRefreshResponseProto responses = 1;</code> */ public java.util.List<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder> getResponsesBuilderList() { return getResponsesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder> getResponsesFieldBuilder() { if (responsesBuilder_ == null) { responsesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto.Builder, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProtoOrBuilder>( responses_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); responses_ = null; } return responsesBuilder_; } // @@protoc_insertion_point(builder_scope:hadoop.common.GenericRefreshResponseCollectionProto) } static { defaultInstance = new GenericRefreshResponseCollectionProto(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hadoop.common.GenericRefreshResponseCollectionProto) } /** * Protobuf service {@code hadoop.common.GenericRefreshProtocolService} * * <pre> ** * Protocol which is used to refresh a user-specified feature. * </pre> */ public static abstract class GenericRefreshProtocolService implements com.google.protobuf.Service { protected GenericRefreshProtocolService() {} public interface Interface { /** * <code>rpc refresh(.hadoop.common.GenericRefreshRequestProto) returns (.hadoop.common.GenericRefreshResponseCollectionProto);</code> */ public abstract void refresh( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto> done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new GenericRefreshProtocolService() { @java.lang.Override public void refresh( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto> done) { impl.refresh(controller, request, done); } }; } public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request) throws com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.refresh(controller, (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } /** * <code>rpc refresh(.hadoop.common.GenericRefreshRequestProto) returns (.hadoop.common.GenericRefreshResponseCollectionProto);</code> */ public abstract void refresh( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto> done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request, com.google.protobuf.RpcCallback< com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.refresh(controller, (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshProtocolService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } private final com.google.protobuf.RpcChannel channel; public com.google.protobuf.RpcChannel getChannel() { return channel; } public void refresh( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto> done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.class, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance())); } } public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto refresh( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto request) throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } private final com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto refresh( com.google.protobuf.RpcController controller, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto.getDefaultInstance()); } } // @@protoc_insertion_point(class_scope:hadoop.common.GenericRefreshProtocolService) } private static final com.google.protobuf.Descriptors.Descriptor internal_static_hadoop_common_GenericRefreshRequestProto_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hadoop_common_GenericRefreshRequestProto_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_hadoop_common_GenericRefreshResponseProto_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hadoop_common_GenericRefreshResponseProto_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_hadoop_common_GenericRefreshResponseCollectionProto_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hadoop_common_GenericRefreshResponseCollectionProto_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\034GenericRefreshProtocol.proto\022\rhadoop.c" + "ommon\">\n\032GenericRefreshRequestProto\022\022\n\ni" + "dentifier\030\001 \001(\t\022\014\n\004args\030\002 \003(\t\"Z\n\033Generic" + "RefreshResponseProto\022\022\n\nexitStatus\030\001 \001(\005" + "\022\023\n\013userMessage\030\002 \001(\t\022\022\n\nsenderName\030\003 \001(" + "\t\"f\n%GenericRefreshResponseCollectionPro" + "to\022=\n\tresponses\030\001 \003(\0132*.hadoop.common.Ge" + "nericRefreshResponseProto2\213\001\n\035GenericRef" + "reshProtocolService\022j\n\007refresh\022).hadoop." + "common.GenericRefreshRequestProto\0324.hado", "op.common.GenericRefreshResponseCollecti" + "onProtoBA\n\033org.apache.hadoop.ipc.protoB\034" + "GenericRefreshProtocolProtos\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); internal_static_hadoop_common_GenericRefreshRequestProto_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hadoop_common_GenericRefreshRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hadoop_common_GenericRefreshRequestProto_descriptor, new java.lang.String[] { "Identifier", "Args", }); internal_static_hadoop_common_GenericRefreshResponseProto_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hadoop_common_GenericRefreshResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hadoop_common_GenericRefreshResponseProto_descriptor, new java.lang.String[] { "ExitStatus", "UserMessage", "SenderName", }); internal_static_hadoop_common_GenericRefreshResponseCollectionProto_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hadoop_common_GenericRefreshResponseCollectionProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hadoop_common_GenericRefreshResponseCollectionProto_descriptor, new java.lang.String[] { "Responses", }); } // @@protoc_insertion_point(outer_class_scope) }
bruthe/hadoop-2.6.0r
src/proto/java/org/apache/hadoop/ipc/proto/GenericRefreshProtocolProtos.java
Java
apache-2.0
101,241
<?php /** * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * For instructions on how to run the full sample: * * @see https://github.com/GoogleCloudPlatform/php-docs-samples/tree/master/storage/README.md */ namespace Google\Cloud\Samples\Storage; # [START storage_define_bucket_website_configuration] use Google\Cloud\Storage\StorageClient; /** * Update the given bucket's website configuration. * * @param string $bucketName The name of your Cloud Storage bucket. * @param string $indexPageObject the name of an object in the bucket to use as * an index page for a static website bucket. * @param string $notFoundPageObject the name of an object in the bucket to use * as the 404 Not Found page. */ function define_bucket_website_configuration($bucketName, $indexPageObject, $notFoundPageObject) { // $bucketName = 'my-bucket'; // $indexPageObject = 'index.html'; // $notFoundPageObject = '404.html'; $storage = new StorageClient(); $bucket = $storage->bucket($bucketName); $bucket->update([ 'website' => [ 'mainPageSuffix' => $indexPageObject, 'notFoundPage' => $notFoundPageObject ] ]); printf( 'Static website bucket %s is set up to use %s as the index page and %s as the 404 page.', $bucketName, $indexPageObject, $notFoundPageObject ); } # [END storage_define_bucket_website_configuration] // The following 2 lines are only needed to run the samples require_once __DIR__ . '/../../testing/sample_helpers.php'; \Google\Cloud\Samples\execute_sample(__FILE__, __NAMESPACE__, $argv);
GoogleCloudPlatform/php-docs-samples
storage/src/define_bucket_website_configuration.php
PHP
apache-2.0
2,168
package es.udc.fic.acs.infmsb01.atm.common.model.message.instance.data; import es.udc.fic.acs.infmsb01.atm.common.model.agentinfo.RecipientInfo; import es.udc.fic.acs.infmsb01.atm.common.model.message.DataResponseMessage; import es.udc.fic.acs.infmsb01.atm.common.model.message.code.DataResponseCode; public final class ResponseAccountsTransfer extends DataResponseMessage { /** * */ private static final long serialVersionUID = 1L; private Double sourceAccountBalance; private Double destinationAccountBalance; public ResponseAccountsTransfer(RecipientInfo from, RecipientInfo to, byte channelNumber, short messageNumber, boolean bankOnline, DataResponseCode responseCode, Double sourceAccountBalance, Double destinationAccountBalance) { super(from, to, channelNumber, messageNumber, bankOnline, responseCode); this.destinationAccountBalance = destinationAccountBalance; this.sourceAccountBalance = sourceAccountBalance; } public ResponseAccountsTransfer() { super(); } public void setSourceAccountBalance(Double sourceAccountBalance) { this.sourceAccountBalance = sourceAccountBalance; } public void setDestinationAccountBalance(Double destinationAccountBalance) { this.destinationAccountBalance = destinationAccountBalance; } public Double getSourceAccountBalance() { return sourceAccountBalance; } public Double getDestinationAccountBalance() { return destinationAccountBalance; } @Override public String toString() { return "[" + getClass().getSimpleName() + "] " + super.toString() + " sab:" + sourceAccountBalance + " dab:" + destinationAccountBalance; } }
marcos-sb/distributed-banking-system
common/src/main/java/es/udc/fic/acs/infmsb01/atm/common/model/message/instance/data/ResponseAccountsTransfer.java
Java
apache-2.0
1,636
/* * Copyright 2014 Lukas Benda <lbenda at lbenda.cz>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cz.lbenda.coursing.server.service; import cz.lbenda.coursing.server.dto.DogImpl; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; /** * Created by Lukas Benda <lbenda @ lbenda.cz> on 6/21/14. */ @Repository public interface DogRepository extends JpaRepository<DogImpl, String> { }
lbenda/Coursing
server/src/main/java/cz/lbenda/coursing/server/service/DogRepository.java
Java
apache-2.0
975
# Hosta fortunei var. stenantha VARIETY #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Asparagaceae/Hosta/Hosta sieboldiana/ Syn. Hosta fortunei stenantha/README.md
Markdown
apache-2.0
186
(clear) (unwatch compilations) (watch statistics) (set-strategy depth) (load clipsfiles/waltz/waltz.clp) (reset) (load-facts clipsfiles/waltz/waltz37.fct) (run)
RWTH-i5-IDSG/jamocha
clipsfiles/waltz/waltz37.bat
Batchfile
apache-2.0
161
/** * Copyright (c) 2014-2017 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.provisioning.ucf.impl.connid; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.GregorianCalendar; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.xml.namespace.QName; import org.identityconnectors.common.security.GuardedString; import org.identityconnectors.framework.common.objects.Attribute; import org.identityconnectors.framework.common.objects.AttributeBuilder; import org.identityconnectors.framework.common.objects.AttributeDelta; import org.identityconnectors.framework.common.objects.AttributeDeltaBuilder; import org.identityconnectors.framework.common.objects.AttributeValueCompleteness; import org.identityconnectors.framework.common.objects.ConnectorObject; import org.identityconnectors.framework.common.objects.ObjectClass; import org.identityconnectors.framework.common.objects.OperationalAttributes; import org.identityconnectors.framework.common.objects.PredefinedAttributes; import org.identityconnectors.framework.common.objects.Uid; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismObjectDefinition; import com.evolveum.midpoint.prism.PrismPropertyValue; import com.evolveum.midpoint.prism.crypto.EncryptionException; import com.evolveum.midpoint.prism.crypto.Protector; import com.evolveum.midpoint.prism.xml.XmlTypeConverter; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.processor.ObjectClassComplexTypeDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttribute; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainer; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainerDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttributeDefinition; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.LockoutStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType; /** * @author semancik * */ public class ConnIdConvertor { private static final Trace LOGGER = TraceManager.getTrace(ConnIdConvertor.class); private String resourceSchemaNamespace; private Protector protector; private ConnIdNameMapper icfNameMapper; public ConnIdConvertor(Protector protector, String resourceSchemaNamespace) { super(); this.protector = protector; this.resourceSchemaNamespace = resourceSchemaNamespace; } public ConnIdNameMapper getIcfNameMapper() { return icfNameMapper; } public void setIcfNameMapper(ConnIdNameMapper icfNameMapper) { this.icfNameMapper = icfNameMapper; } /** * Converts ICF ConnectorObject to the midPoint ResourceObject. * <p/> * All the attributes are mapped using the same way as they are mapped in * the schema (which is actually no mapping at all now). * <p/> * If an optional ResourceObjectDefinition was provided, the resulting * ResourceObject is schema-aware (getDefinition() method works). If no * ResourceObjectDefinition was provided, the object is schema-less. TODO: * this still needs to be implemented. * * @param co * ICF ConnectorObject to convert * @param def * ResourceObjectDefinition (from the schema) or null * @param full * if true it describes if the returned resource object should * contain all of the attributes defined in the schema, if false * the returned resource object will contain only attributed with * the non-null values. * @return new mapped ResourceObject instance. * @throws SchemaException */ <T extends ShadowType> PrismObject<T> convertToResourceObject(ConnectorObject co, PrismObjectDefinition<T> objectDefinition, boolean full, boolean caseIgnoreAttributeNames, boolean legacySchema) throws SchemaException { PrismObject<T> shadowPrism = null; if (objectDefinition != null) { shadowPrism = objectDefinition.instantiate(); } else { throw new SchemaException("No definition"); } // LOGGER.trace("Instantiated prism object {} from connector object.", // shadowPrism.debugDump()); T shadow = shadowPrism.asObjectable(); ResourceAttributeContainer attributesContainer = (ResourceAttributeContainer) shadowPrism .findOrCreateContainer(ShadowType.F_ATTRIBUTES); ResourceAttributeContainerDefinition attributesContainerDefinition = attributesContainer.getDefinition(); shadow.setObjectClass(attributesContainerDefinition.getTypeName()); List<ObjectClassComplexTypeDefinition> auxiliaryObjectClassDefinitions = new ArrayList<>(); // too loud // if (LOGGER.isTraceEnabled()) { // LOGGER.trace("Resource attribute container definition {}.", attributesContainerDefinition.debugDump()); // } for (Attribute icfAttr : co.getAttributes()) { if (icfAttr.is(PredefinedAttributes.AUXILIARY_OBJECT_CLASS_NAME)) { List<QName> auxiliaryObjectClasses = shadow.getAuxiliaryObjectClass(); for (Object auxiliaryIcfObjectClass: icfAttr.getValue()) { QName auxiliaryObjectClassQname = icfNameMapper.objectClassToQname(new ObjectClass((String)auxiliaryIcfObjectClass), resourceSchemaNamespace, legacySchema); auxiliaryObjectClasses.add(auxiliaryObjectClassQname); ObjectClassComplexTypeDefinition auxiliaryObjectClassDefinition = icfNameMapper.getResourceSchema().findObjectClassDefinition(auxiliaryObjectClassQname); if (auxiliaryObjectClassDefinition == null) { throw new SchemaException("Resource object "+co+" refers to auxiliary object class "+auxiliaryObjectClassQname+" which is not in the schema"); } auxiliaryObjectClassDefinitions.add(auxiliaryObjectClassDefinition); } break; } } for (Attribute connIdAttr : co.getAttributes()) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Reading ICF attribute {}: {}", connIdAttr.getName(), connIdAttr.getValue()); } if (connIdAttr.getName().equals(Uid.NAME)) { // UID is handled specially (see above) continue; } if (connIdAttr.is(PredefinedAttributes.AUXILIARY_OBJECT_CLASS_NAME)) { // Already processed continue; } if (connIdAttr.getName().equals(OperationalAttributes.PASSWORD_NAME)) { // password has to go to the credentials section ProtectedStringType password = getSingleValue(connIdAttr, ProtectedStringType.class); if (password == null) { // equals() instead of == is needed. The AttributeValueCompleteness enum may be loaded by different classloader if (!AttributeValueCompleteness.INCOMPLETE.equals(connIdAttr.getAttributeValueCompleteness())) { continue; } // There is no password value in the ConnId attribute. But it was indicated that // that attribute is incomplete. Therefore we can assume that there in fact is a value. // We just do not know it. ShadowUtil.setPasswordIncomplete(shadow); LOGGER.trace("Converted password: (incomplete)"); } else { ShadowUtil.setPassword(shadow, password); LOGGER.trace("Converted password: {}", password); } continue; } if (connIdAttr.getName().equals(OperationalAttributes.ENABLE_NAME)) { Boolean enabled = getSingleValue(connIdAttr, Boolean.class); if (enabled == null) { continue; } ActivationType activationType = ShadowUtil.getOrCreateActivation(shadow); ActivationStatusType activationStatusType; if (enabled) { activationStatusType = ActivationStatusType.ENABLED; } else { activationStatusType = ActivationStatusType.DISABLED; } activationType.setAdministrativeStatus(activationStatusType); activationType.setEffectiveStatus(activationStatusType); LOGGER.trace("Converted activation administrativeStatus: {}", activationStatusType); continue; } if (connIdAttr.getName().equals(OperationalAttributes.ENABLE_DATE_NAME)) { Long millis = getSingleValue(connIdAttr, Long.class); if (millis == null) { continue; } ActivationType activationType = ShadowUtil.getOrCreateActivation(shadow); activationType.setValidFrom(XmlTypeConverter.createXMLGregorianCalendar(millis)); continue; } if (connIdAttr.getName().equals(OperationalAttributes.DISABLE_DATE_NAME)) { Long millis = getSingleValue(connIdAttr, Long.class); if (millis == null) { continue; } ActivationType activationType = ShadowUtil.getOrCreateActivation(shadow); activationType.setValidTo(XmlTypeConverter.createXMLGregorianCalendar(millis)); continue; } if (connIdAttr.getName().equals(OperationalAttributes.LOCK_OUT_NAME)) { Boolean lockOut = getSingleValue(connIdAttr, Boolean.class); if (lockOut == null) { continue; } ActivationType activationType = ShadowUtil.getOrCreateActivation(shadow); LockoutStatusType lockoutStatusType; if (lockOut) { lockoutStatusType = LockoutStatusType.LOCKED; } else { lockoutStatusType = LockoutStatusType.NORMAL; } activationType.setLockoutStatus(lockoutStatusType); LOGGER.trace("Converted activation lockoutStatus: {}", lockoutStatusType); continue; } QName qname = icfNameMapper.convertAttributeNameToQName(connIdAttr.getName(), attributesContainerDefinition); ResourceAttributeDefinition attributeDefinition = attributesContainerDefinition.findAttributeDefinition(qname, caseIgnoreAttributeNames); if (attributeDefinition == null) { // Try to locate definition in auxiliary object classes for (ObjectClassComplexTypeDefinition auxiliaryObjectClassDefinition: auxiliaryObjectClassDefinitions) { attributeDefinition = auxiliaryObjectClassDefinition.findAttributeDefinition(qname, caseIgnoreAttributeNames); if (attributeDefinition != null) { break; } } if (attributeDefinition == null) { throw new SchemaException("Unknown attribute " + qname + " in definition of object class " + attributesContainerDefinition.getTypeName() + ". Original ConnId name: " + connIdAttr.getName() + " in resource object identified by " + co.getName(), qname); } } if (caseIgnoreAttributeNames) { qname = attributeDefinition.getName(); // normalized version } ResourceAttribute<Object> resourceAttribute = attributeDefinition.instantiate(qname); // if true, we need to convert whole connector object to the // resource object also with the null-values attributes if (full) { if (connIdAttr.getValue() != null) { // Convert the values. While most values do not need // conversions, some // of them may need it (e.g. GuardedString) for (Object connIdValue : connIdAttr.getValue()) { Object value = convertValueFromIcf(connIdValue, qname); resourceAttribute.add(new PrismPropertyValue<>(value)); } } LOGGER.trace("Converted attribute {}", resourceAttribute); attributesContainer.getValue().add(resourceAttribute); // in this case when false, we need only the attributes with the // non-null values. } else { if (connIdAttr.getValue() != null && !connIdAttr.getValue().isEmpty()) { // Convert the values. While most values do not need // conversions, some of them may need it (e.g. GuardedString) boolean empty = true; for (Object connIdValue : connIdAttr.getValue()) { if (connIdValue != null) { Object value = convertValueFromIcf(connIdValue, qname); empty = false; resourceAttribute.add(new PrismPropertyValue<>(value)); } } if (!empty) { LOGGER.trace("Converted attribute {}", resourceAttribute); attributesContainer.getValue().add(resourceAttribute); } } } } // Add Uid if it is not there already. It can be already present, // e.g. if Uid and Name represent the same attribute Uid uid = co.getUid(); ObjectClassComplexTypeDefinition ocDef = attributesContainerDefinition.getComplexTypeDefinition(); ResourceAttributeDefinition<String> uidDefinition = ConnIdUtil.getUidDefinition(ocDef); if (uidDefinition == null) { throw new SchemaException("No definition for ConnId UID attribute found in definition " + ocDef); } if (attributesContainer.getValue().findItem(uidDefinition.getName()) == null) { ResourceAttribute<String> uidRoa = uidDefinition.instantiate(); uidRoa.setValue(new PrismPropertyValue<>(uid.getUidValue())); attributesContainer.getValue().add(uidRoa); } return shadowPrism; } Set<Attribute> convertFromResourceObjectToConnIdAttributes(ResourceAttributeContainer attributesPrism, ObjectClassComplexTypeDefinition ocDef) throws SchemaException { Collection<ResourceAttribute<?>> resourceAttributes = attributesPrism.getAttributes(); return convertFromResourceObjectToConnIdAttributes(resourceAttributes, ocDef); } private Set<Attribute> convertFromResourceObjectToConnIdAttributes(Collection<ResourceAttribute<?>> mpResourceAttributes, ObjectClassComplexTypeDefinition ocDef) throws SchemaException { Set<Attribute> attributes = new HashSet<>(); if (mpResourceAttributes == null) { // returning empty set return attributes; } for (ResourceAttribute<?> attribute : mpResourceAttributes) { attributes.add(convertToConnIdAttribute(attribute, ocDef)); } return attributes; } private Attribute convertToConnIdAttribute(ResourceAttribute<?> mpAttribute, ObjectClassComplexTypeDefinition ocDef) throws SchemaException { QName midPointAttrQName = mpAttribute.getElementName(); if (midPointAttrQName.equals(SchemaConstants.ICFS_UID)) { throw new SchemaException("ICF UID explicitly specified in attributes"); } String connIdAttrName = icfNameMapper.convertAttributeNameToConnId(mpAttribute, ocDef); Set<Object> connIdAttributeValues = new HashSet<>(); for (PrismPropertyValue<?> pval: mpAttribute.getValues()) { connIdAttributeValues.add(ConnIdUtil.convertValueToIcf(pval, protector, mpAttribute.getElementName())); } try { return AttributeBuilder.build(connIdAttrName, connIdAttributeValues); } catch (IllegalArgumentException e) { throw new SchemaException(e.getMessage(), e); } } private <T> T getSingleValue(Attribute icfAttr, Class<T> type) throws SchemaException { List<Object> values = icfAttr.getValue(); if (values != null && !values.isEmpty()) { if (values.size() > 1) { throw new SchemaException("Expected single value for " + icfAttr.getName()); } Object val = convertValueFromIcf(values.get(0), null); if (val == null) { return null; } if (type.isAssignableFrom(val.getClass())) { return (T) val; } else { throw new SchemaException("Expected type " + type.getName() + " for " + icfAttr.getName() + " but got " + val.getClass().getName()); } } else { return null; } } private Object convertValueFromIcf(Object icfValue, QName propName) { if (icfValue == null) { return null; } if (icfValue instanceof ZonedDateTime) { return XmlTypeConverter.createXMLGregorianCalendar((ZonedDateTime)icfValue); } if (icfValue instanceof GuardedString) { return fromGuardedString((GuardedString) icfValue); } return icfValue; } private ProtectedStringType fromGuardedString(GuardedString icfValue) { final ProtectedStringType ps = new ProtectedStringType(); icfValue.access(new GuardedString.Accessor() { @Override public void access(char[] passwordChars) { try { ps.setClearValue(new String(passwordChars)); protector.encrypt(ps); } catch (EncryptionException e) { throw new IllegalStateException("Protector failed to encrypt password"); } } }); return ps; } }
arnost-starosta/midpoint
provisioning/ucf-impl-connid/src/main/java/com/evolveum/midpoint/provisioning/ucf/impl/connid/ConnIdConvertor.java
Java
apache-2.0
16,663
/** * Copyright 2015, deepsense.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.deepsense.commons.auth.usercontext import scala.concurrent.Future /** * Translates a token to a user context. */ trait TokenTranslator { def translate(token: String): Future[UserContext] } abstract class TokenTranslatorException(message: String) extends Throwable(message) abstract class InvalidTokenException(message: String, token: String) extends TokenTranslatorException(message) case class CannotGetUserException(token: String) extends InvalidTokenException("TokenApi.getUserOfToken returned null for token: " + token, token) case class CannotGetTokenException(token: String) extends InvalidTokenException("TokenApi.get returned null for token: " + token, token) case class NoTenantSpecifiedException(token: String) extends InvalidTokenException("Tenant is null! Token: " + token, token)
deepsense-io/seahorse-workflow-executor
commons/src/main/scala/io/deepsense/commons/auth/usercontext/TokenTranslator.scala
Scala
apache-2.0
1,418
/* * * honggfuzz - architecture dependent code (LINUX/UNWIND) * ----------------------------------------- * * Author: Robert Swiecki <swiecki@google.com> * * Copyright 2010-2015 by Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * */ #include "../common.h" #include "unwind.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include <endian.h> #include <libunwind-ptrace.h> #include "../log.h" /* * WARNING: Ensure that _UPT-info structs are not shared between threads * http://www.nongnu.org/libunwind/man/libunwind-ptrace(3).html */ // libunwind error codes used for debugging static const char *UNW_ER[] = { "UNW_ESUCCESS", /* no error */ "UNW_EUNSPEC", /* unspecified (general) error */ "UNW_ENOMEM", /* out of memory */ "UNW_EBADREG", /* bad register number */ "UNW_EREADONLYREG", /* attempt to write read-only register */ "UNW_ESTOPUNWIND", /* stop unwinding */ "UNW_EINVALIDIP", /* invalid IP */ "UNW_EBADFRAME", /* bad frame */ "UNW_EINVAL", /* unsupported operation or bad value */ "UNW_EBADVERSION", /* unwind info has unsupported version */ "UNW_ENOINFO" /* no unwind info found */ }; typedef struct { unsigned long start; unsigned long end; char perms[6]; unsigned long offset; char dev[8]; unsigned long inode; char name[PATH_MAX]; } procMap_t; static procMap_t *arch_parsePidMaps(pid_t pid, size_t * mapsCount) { FILE *f = NULL; char fProcMaps[PATH_MAX] = { 0 }; snprintf(fProcMaps, PATH_MAX, "/proc/%d/maps", pid); if ((f = fopen(fProcMaps, "rb")) == NULL) { PLOG_E("Couldn't open '%s' - R/O mode", fProcMaps); return 0; } defer { fclose(f); }; *mapsCount = 0; procMap_t *mapsList = malloc(sizeof(procMap_t)); if (mapsList == NULL) { PLOG_W("malloc(size='%zu')", sizeof(procMap_t)); return NULL; } while (!feof(f)) { char buf[sizeof(procMap_t) + 1]; if (fgets(buf, sizeof(buf), f) == 0) { break; } mapsList[*mapsCount].name[0] = '\0'; sscanf(buf, "%lx-%lx %5s %lx %7s %ld %s", &mapsList[*mapsCount].start, &mapsList[*mapsCount].end, mapsList[*mapsCount].perms, &mapsList[*mapsCount].offset, mapsList[*mapsCount].dev, &mapsList[*mapsCount].inode, mapsList[*mapsCount].name); *mapsCount += 1; if ((mapsList = realloc(mapsList, (*mapsCount + 1) * sizeof(procMap_t))) == NULL) { PLOG_W("realloc failed (sz=%zu)", (*mapsCount + 1) * sizeof(procMap_t)); free(mapsList); return NULL; } } return mapsList; } static char *arch_searchMaps(unsigned long addr, size_t mapsCnt, procMap_t * mapsList) { for (size_t i = 0; i < mapsCnt; i++) { if (addr >= mapsList[i].start && addr <= mapsList[i].end) { return mapsList[i].name; } /* Benefit from maps being sorted by address */ if (addr < mapsList[i].start) { break; } } return NULL; } #ifndef __ANDROID__ size_t arch_unwindStack(pid_t pid, funcs_t * funcs) { size_t num_frames = 0, mapsCnt = 0; procMap_t *mapsList = arch_parsePidMaps(pid, &mapsCnt); defer { free(mapsList); }; unw_addr_space_t as = unw_create_addr_space(&_UPT_accessors, __BYTE_ORDER); if (!as) { LOG_E("[pid='%d'] unw_create_addr_space failed", pid); return num_frames; } defer { unw_destroy_addr_space(as); }; void *ui = _UPT_create(pid); if (ui == NULL) { LOG_E("[pid='%d'] _UPT_create failed", pid); return num_frames; } defer { _UPT_destroy(ui); }; unw_cursor_t c; int ret = unw_init_remote(&c, as, ui); if (ret < 0) { LOG_E("[pid='%d'] unw_init_remote failed (%s)", pid, UNW_ER[-ret]); return num_frames; } for (num_frames = 0; unw_step(&c) > 0 && num_frames < _HF_MAX_FUNCS; num_frames++) { unw_word_t ip; char *mapName = NULL; ret = unw_get_reg(&c, UNW_REG_IP, &ip); if (ret < 0) { LOG_E("[pid='%d'] [%zd] failed to read IP (%s)", pid, num_frames, UNW_ER[-ret]); funcs[num_frames].pc = 0; } else { funcs[num_frames].pc = (void *)ip; } if (mapsCnt > 0 && (mapName = arch_searchMaps(ip, mapsCnt, mapsList)) != NULL) { memcpy(funcs[num_frames].mapName, mapName, sizeof(funcs[num_frames].mapName)); } else { strncpy(funcs[num_frames].mapName, "UNKNOWN", sizeof(funcs[num_frames].mapName)); } } return num_frames; } #else /* !defined(__ANDROID__) */ size_t arch_unwindStack(pid_t pid, funcs_t * funcs) { size_t num_frames = 0, mapsCnt = 0; procMap_t *mapsList = arch_parsePidMaps(pid, &mapsCnt); defer { free(mapsList); } unw_addr_space_t as = unw_create_addr_space(&_UPT_accessors, __BYTE_ORDER); if (!as) { LOG_E("[pid='%d'] unw_create_addr_space failed", pid); return num_frames; } defer { unw_destroy_addr_space(as); }; struct UPT_info *ui = (struct UPT_info *)_UPT_create(pid); if (ui == NULL) { LOG_E("[pid='%d'] _UPT_create failed", pid); return num_frames; } defer { _UPT_destroy(ui); }; unw_cursor_t cursor; int ret = unw_init_remote(&cursor, as, ui); if (ret < 0) { LOG_E("[pid='%d'] unw_init_remote failed (%s)", pid, UNW_ER[-ret]); return num_frames; } do { char *mapName = NULL; unw_word_t pc = 0, offset = 0; char buf[_HF_FUNC_NAME_SZ] = { 0 }; ret = unw_get_reg(&cursor, UNW_REG_IP, &pc); if (ret < 0) { LOG_E("[pid='%d'] [%zd] failed to read IP (%s)", pid, num_frames, UNW_ER[-ret]); // We don't want to try to extract info from an arbitrary IP // TODO: Maybe abort completely (goto out)) goto skip_frame_info; } unw_proc_info_t frameInfo; ret = unw_get_proc_info(&cursor, &frameInfo); if (ret < 0) { LOG_D("[pid='%d'] [%zd] unw_get_proc_info (%s)", pid, num_frames, UNW_ER[-ret]); // Not safe to keep parsing frameInfo goto skip_frame_info; } ret = unw_get_proc_name(&cursor, buf, sizeof(buf), &offset); if (ret < 0) { LOG_D("[pid='%d'] [%zd] unw_get_proc_name() failed (%s)", pid, num_frames, UNW_ER[-ret]); buf[0] = '\0'; } skip_frame_info: // Compared to bfd, line var plays the role of offset from func_name // Reports format is adjusted accordingly to reflect in saved file funcs[num_frames].line = offset; funcs[num_frames].pc = (void *)pc; memcpy(funcs[num_frames].func, buf, sizeof(funcs[num_frames].func)); if (mapsCnt > 0 && (mapName = arch_searchMaps(pc, mapsCnt, mapsList)) != NULL) { memcpy(funcs[num_frames].mapName, mapName, sizeof(funcs[num_frames].mapName)); } else { strncpy(funcs[num_frames].mapName, "UNKNOWN", sizeof(funcs[num_frames].mapName)); } num_frames++; ret = unw_step(&cursor); } while (ret > 0 && num_frames < _HF_MAX_FUNCS); return num_frames; } #endif /* defined(__ANDROID__) */ /* * Nested loop not most efficient approach, although it's assumed that list is * usually target specific and thus small. */ char *arch_btContainsSymbol(size_t symbolsListSz, char **symbolsList, size_t num_frames, funcs_t * funcs) { for (size_t frame = 0; frame < num_frames; frame++) { size_t len = strlen(funcs[frame].func); /* Try only for frames that have symbol name from backtrace */ if (strlen(funcs[frame].func) > 0) { for (size_t i = 0; i < symbolsListSz; i++) { /* Wildcard symbol string special case */ char *wOff = strchr(symbolsList[i], '*'); if (wOff) { /* Length always > 3 as checked at input file parsing step */ len = wOff - symbolsList[i] - 1; } if (strncmp(funcs[frame].func, symbolsList[i], len) == 0) { return funcs[frame].func; } } } } return NULL; }
riusksk/riufuzz
linux/unwind.c
C
apache-2.0
9,122
// Copyright 2017 Intermodalics All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include <string> #include <jni.h> #include <glog/logging.h> #include <image_transport/publisher_plugin.h> #include <nodelet/loader.h> #include <pluginlib/class_loader.h> #include <ros/ros.h> #include "tango_helper.h" #ifdef __cplusplus extern "C" { #endif JNIEXPORT jboolean JNICALL Java_eu_intermodalics_nodelet_1manager_TangoInitializationHelper_setBinderTangoService( JNIEnv* env, jclass /*class*/, jobject binder) { return tango_helper::SetBinder(env, binder); } JNIEXPORT jboolean JNICALL Java_eu_intermodalics_nodelet_1manager_TangoInitializationHelper_isTangoVersionOk( JNIEnv* env, jclass /*class*/, jobject activity) { return tango_helper::IsTangoVersionOk(env, activity); } JNIEXPORT jint JNICALL Java_eu_intermodalics_nodelet_1manager_TangoNodeletManager_execute( JNIEnv* env, jobject /*obj*/, jstring master_uri_value, jstring host_ip_value, jstring node_name_value, jobjectArray remapping_objects_value) { const char* master_uri = env->GetStringUTFChars(master_uri_value, NULL); const char* host_ip = env->GetStringUTFChars(host_ip_value, NULL); const char* node_name = env->GetStringUTFChars(node_name_value, NULL); int argc = 3; std::string master_uri_string("__master:=" + std::string(master_uri)); std::string host_ip_string("__ip:=" + std::string(host_ip)); const std::string node_name_string(node_name); char* argv[] = {"/", &master_uri_string[0], &host_ip_string[0]}; env->ReleaseStringUTFChars(master_uri_value, master_uri); env->ReleaseStringUTFChars(host_ip_value, host_ip); env->ReleaseStringUTFChars(node_name_value, node_name); std::map<std::string, std::string> remappings; if (remapping_objects_value == NULL || env->GetArrayLength(remapping_objects_value) == 0) { LOG(INFO) << "No remapping to be done."; } else { int remappingStringCount = env->GetArrayLength(remapping_objects_value); for (int i = 0; i < remappingStringCount; ++i) { jstring remap_arg_value = (jstring) (env->GetObjectArrayElement(remapping_objects_value, i)); const char* remap_arg = env->GetStringUTFChars(remap_arg_value, NULL); // Parse remapping argument to extract old and new names. // According to ROS doc, the syntax for remapping arguments is: old_name:=new_name. // See http://wiki.ros.org/Remapping%20Arguments. std::string remap_arg_string = std::string(remap_arg); std::string delimiter = ":="; size_t delimiter_position = remap_arg_string.find(delimiter); if (delimiter_position == std::string::npos) { LOG(ERROR) << "Invalid remapping argument: " << remap_arg << ". The correct syntax is old_name:=new_name."; return 1; } std::string remap_old_name = remap_arg_string.substr(0, delimiter_position); remap_arg_string.erase(0, delimiter_position + delimiter.length()); std::string remap_new_name = remap_arg_string; remappings.insert(std::pair<std::string, std::string>(remap_old_name, remap_new_name)); LOG(INFO) << "Remapping " << remap_old_name << " to " << remap_new_name; env->ReleaseStringUTFChars(remap_arg_value, remap_arg); } } ros::init(argc, argv, node_name_string.c_str()); nodelet::Loader loader; std::vector<std::string> nodelet_argv; LOG(INFO) << "Start loading nodelets."; const bool result = loader.load("/tango", "tango_ros_native/TangoRosNodelet", remappings, nodelet_argv); if (!result) { LOG(ERROR) << "Problem loading Tango ROS nodelet!"; return 1; } LOG(INFO) << "Finished loading nodelets."; // Check that all necessary plugins are available. pluginlib::ClassLoader<image_transport::PublisherPlugin> image_transport_pub_loader("image_transport", "image_transport::PublisherPlugin"); if (!image_transport_pub_loader.isClassAvailable("image_transport/raw_pub")) { LOG(ERROR) << "Plugin image_transport/raw_pub is not available."; return 1; } if (!image_transport_pub_loader.isClassAvailable("image_transport/compressed_pub")) { LOG(ERROR) << "Plugin image_transport/compressed_pub is not available."; return 1; } ros::AsyncSpinner spinner(4); spinner.start(); ros::waitForShutdown(); return 0; } JNIEXPORT jint JNICALL Java_eu_intermodalics_nodelet_1manager_TangoNodeletManager_shutdown( JNIEnv* /*env*/, jobject /*obj*/) { return 0; } #ifdef __cplusplus } #endif
Intermodalics/tango_ros
TangoRosStreamer/tango_nodelet_manager/src/main/jni/jni.cc
C++
apache-2.0
5,118
#ifndef ParticlesObjectH #define ParticlesObjectH #include "../PS_instance.h" extern const Fvector zero_vel; class CParticlesObject : public CPS_Instance { typedef CPS_Instance inherited; u32 dwLastTime; void Init (LPCSTR p_name, IRender_Sector* S, BOOL bAutoRemove); void UpdateSpatial (); protected: bool m_bLooped; //ôëàã, ÷òî ñèñòåìà çàöèêëåíà bool m_bStopping; //âûçâàíà ôóíêöèÿ Stop() protected: u32 mt_dt; protected: virtual ~CParticlesObject (); public: CParticlesObject (LPCSTR p_name, BOOL bAutoRemove); virtual bool shedule_Needed () {return true;}; virtual float shedule_Scale () { return Device.vCameraPosition.distance_to(Position())/200.f; } virtual void shedule_Update (u32 dt); virtual void renderable_Render (); void PerformAllTheWork (u32 dt); void __stdcall PerformAllTheWork_mt(); Fvector& Position (); void SetXFORM (const Fmatrix& m); IC Fmatrix& XFORM () {return renderable.xform;} void UpdateParent (const Fmatrix& m, const Fvector& vel); void play_at_pos (const Fvector& pos, BOOL xform=FALSE); virtual void Play (); void Stop (BOOL bDefferedStop=TRUE); virtual BOOL Locked () { return mt_dt; } bool IsLooped () {return m_bLooped;} bool IsAutoRemove (); bool IsPlaying (); void SetAutoRemove (bool auto_remove); const shared_str Name (); public: static CParticlesObject* Create (LPCSTR p_name, BOOL bAutoRemove=TRUE) { return xr_new<CParticlesObject>(p_name, bAutoRemove); } static void Destroy (CParticlesObject*& p) { if (p){ p->PSI_destroy (); p = 0; } } }; #endif /*ParticlesObjectH*/
OLR-xray/XRay-NEW
XRay/xr_3da/xrGame/ParticlesObject.h
C
apache-2.0
1,692
/* * Dynomite - A thin, distributed replication layer for multi non-distributed storages. * Copyright (C) 2014 Netflix, Inc. */ /* * twemproxy - A fast and lightweight proxy for memcached protocol. * Copyright (C) 2011 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "dyn_core.h" #include "dyn_server.h" #include "dyn_dnode_peer.h" struct msg * req_get(struct conn *conn) { struct msg *msg; ASSERT((conn->client && !conn->proxy) || (conn->dnode_client && !conn->dnode_server)); msg = msg_get(conn, true, conn->redis); if (msg == NULL) { conn->err = errno; } return msg; } void req_put(struct msg *msg) { struct msg *pmsg; /* peer message (response) */ ASSERT(msg->request); pmsg = msg->peer; if (pmsg != NULL) { ASSERT(!pmsg->request && pmsg->peer == msg); msg->peer = NULL; pmsg->peer = NULL; rsp_put(pmsg); } msg_tmo_delete(msg); msg_put(msg); } /* * Return true if request is done, false otherwise * * A request is done, if we received response for the given request. * A request vector is done if we received responses for all its * fragments. */ bool req_done(struct conn *conn, struct msg *msg) { struct msg *cmsg, *pmsg; /* current and previous message */ uint64_t id; /* fragment id */ uint32_t nfragment; /* # fragment */ ASSERT((conn->client && !conn->proxy) || (conn->dnode_client && !conn->dnode_server)); if (msg == NULL || !msg->done) { return false; } ASSERT(msg->request); id = msg->frag_id; if (id == 0) { return true; } if (msg->fdone) { /* request has already been marked as done */ return true; } /* check all fragments of the given request vector are done */ for (pmsg = msg, cmsg = TAILQ_PREV(msg, msg_tqh, c_tqe); cmsg != NULL && cmsg->frag_id == id; pmsg = cmsg, cmsg = TAILQ_PREV(cmsg, msg_tqh, c_tqe)) { if (!cmsg->done) { return false; } } for (pmsg = msg, cmsg = TAILQ_NEXT(msg, c_tqe); cmsg != NULL && cmsg->frag_id == id; pmsg = cmsg, cmsg = TAILQ_NEXT(cmsg, c_tqe)) { if (!cmsg->done) { return false; } } if (!pmsg->last_fragment) { return false; } /* * At this point, all the fragments including the last fragment have * been received. * * Mark all fragments of the given request vector to be done to speed up * future req_done calls for any of fragments of this request */ msg->fdone = 1; nfragment = 1; for (pmsg = msg, cmsg = TAILQ_PREV(msg, msg_tqh, c_tqe); cmsg != NULL && cmsg->frag_id == id; pmsg = cmsg, cmsg = TAILQ_PREV(cmsg, msg_tqh, c_tqe)) { cmsg->fdone = 1; nfragment++; } for (pmsg = msg, cmsg = TAILQ_NEXT(msg, c_tqe); cmsg != NULL && cmsg->frag_id == id; pmsg = cmsg, cmsg = TAILQ_NEXT(cmsg, c_tqe)) { cmsg->fdone = 1; nfragment++; } ASSERT(msg->frag_owner->nfrag == nfragment); msg->post_coalesce(msg->frag_owner); log_debug(LOG_DEBUG, "req from c %d with fid %"PRIu64" and %"PRIu32" " "fragments is done", conn->sd, id, nfragment); return true; } /* * Return true if request is in error, false otherwise * * A request is in error, if there was an error in receiving response for the * given request. A multiget request is in error if there was an error in * receiving response for any its fragments. */ bool req_error(struct conn *conn, struct msg *msg) { struct msg *cmsg; /* current message */ uint64_t id; uint32_t nfragment; ASSERT(msg->request && req_done(conn, msg)); if (msg->error) { return true; } id = msg->frag_id; if (id == 0) { return false; } if (msg->ferror) { /* request has already been marked to be in error */ return true; } /* check if any of the fragments of the given request are in error */ for (cmsg = TAILQ_PREV(msg, msg_tqh, c_tqe); cmsg != NULL && cmsg->frag_id == id; cmsg = TAILQ_PREV(cmsg, msg_tqh, c_tqe)) { if (cmsg->error) { goto ferror; } } for (cmsg = TAILQ_NEXT(msg, c_tqe); cmsg != NULL && cmsg->frag_id == id; cmsg = TAILQ_NEXT(cmsg, c_tqe)) { if (cmsg->error) { goto ferror; } } return false; ferror: /* * Mark all fragments of the given request to be in error to speed up * future req_error calls for any of fragments of this request */ msg->ferror = 1; nfragment = 1; for (cmsg = TAILQ_PREV(msg, msg_tqh, c_tqe); cmsg != NULL && cmsg->frag_id == id; cmsg = TAILQ_PREV(cmsg, msg_tqh, c_tqe)) { cmsg->ferror = 1; nfragment++; } for (cmsg = TAILQ_NEXT(msg, c_tqe); cmsg != NULL && cmsg->frag_id == id; cmsg = TAILQ_NEXT(cmsg, c_tqe)) { cmsg->ferror = 1; nfragment++; } log_debug(LOG_DEBUG, "req from c %d with fid %"PRIu64" and %"PRIu32" " "fragments is in error", conn->sd, id, nfragment); return true; } void req_server_enqueue_imsgq(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(msg->request); ASSERT((!conn->client && !conn->proxy) || (!conn->dnode_client && !conn->dnode_server)); /* * timeout clock starts ticking the instant the message is enqueued into * the server in_q; the clock continues to tick until it either expires * or the message is dequeued from the server out_q * * noreply request are free from timeouts because client is not interested * in the reponse anyway! */ if (!msg->noreply) { msg_tmo_insert(msg, conn); } TAILQ_INSERT_TAIL(&conn->imsg_q, msg, s_tqe); if (!conn->dyn_mode) { stats_server_incr(ctx, conn->owner, in_queue); stats_server_incr_by(ctx, conn->owner, in_queue_bytes, msg->mlen); } else { struct server_pool *pool = (struct server_pool *) array_get(&ctx->pool, 0); stats_pool_incr(ctx, pool, peer_in_queue); stats_pool_incr_by(ctx, pool, peer_in_queue_bytes, msg->mlen); } } void req_server_dequeue_imsgq(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(msg->request); ASSERT(!conn->client && !conn->proxy); TAILQ_REMOVE(&conn->imsg_q, msg, s_tqe); stats_server_decr(ctx, conn->owner, in_queue); stats_server_decr_by(ctx, conn->owner, in_queue_bytes, msg->mlen); } void req_client_enqueue_omsgq(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(msg->request); ASSERT(conn->client && !conn->proxy); msg->stime_in_microsec = dn_usec_now(); TAILQ_INSERT_TAIL(&conn->omsg_q, msg, c_tqe); } void req_server_enqueue_omsgq(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(msg->request); ASSERT(!conn->client && !conn->proxy); TAILQ_INSERT_TAIL(&conn->omsg_q, msg, s_tqe); stats_server_incr(ctx, conn->owner, out_queue); stats_server_incr_by(ctx, conn->owner, out_queue_bytes, msg->mlen); } void req_client_dequeue_omsgq(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(msg->request); ASSERT(conn->client && !conn->proxy); uint64_t latency = dn_usec_now() - msg->stime_in_microsec; stats_histo_add_latency(ctx, latency); TAILQ_REMOVE(&conn->omsg_q, msg, c_tqe); } void req_server_dequeue_omsgq(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(msg->request); ASSERT(!conn->client && !conn->proxy); msg_tmo_delete(msg); TAILQ_REMOVE(&conn->omsg_q, msg, s_tqe); stats_server_decr(ctx, conn->owner, out_queue); stats_server_decr_by(ctx, conn->owner, out_queue_bytes, msg->mlen); } struct msg * req_recv_next(struct context *ctx, struct conn *conn, bool alloc) { struct msg *msg; ASSERT((conn->client && !conn->proxy) || (conn->dnode_client && !conn->dnode_server)); if (conn->eof) { msg = conn->rmsg; //if (conn->dyn_mode) { // if (conn->non_bytes_recv > MAX_CONN_ALLOWABLE_NON_RECV) { // conn->err = EPIPE; // return NULL; // } // conn->eof = 0; // return msg; //} /* client sent eof before sending the entire request */ if (msg != NULL) { conn->rmsg = NULL; ASSERT(msg->peer == NULL); ASSERT(msg->request && !msg->done); log_error("eof c %d discarding incomplete req %"PRIu64" len " "%"PRIu32"", conn->sd, msg->id, msg->mlen); req_put(msg); } /* * TCP half-close enables the client to terminate its half of the * connection (i.e. the client no longer sends data), but it still * is able to receive data from the proxy. The proxy closes its * half (by sending the second FIN) when the client has no * outstanding requests */ if (!conn->active(conn)) { conn->done = 1; log_debug(LOG_INFO, "c %d is done", conn->sd); } return NULL; } msg = conn->rmsg; if (msg != NULL) { ASSERT(msg->request); return msg; } if (!alloc) { return NULL; } msg = req_get(conn); if (msg != NULL) { conn->rmsg = msg; } return msg; } static bool req_filter(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT(conn->client && !conn->proxy); if (msg_empty(msg)) { ASSERT(conn->rmsg == NULL); log_debug(LOG_VERB, "filter empty req %"PRIu64" from c %d", msg->id, conn->sd); req_put(msg); return true; } /* * Handle "quit\r\n", which is the protocol way of doing a * passive close */ if (msg->quit) { ASSERT(conn->rmsg == NULL); log_debug(LOG_INFO, "filter quit req %"PRIu64" from c %d", msg->id, conn->sd); conn->eof = 1; conn->recv_ready = 0; req_put(msg); return true; } return false; } static void req_forward_error(struct context *ctx, struct conn *conn, struct msg *msg) { rstatus_t status; if (log_loggable(LOG_INFO)) { log_debug(LOG_INFO, "forward req %"PRIu64" len %"PRIu32" type %d from " "c %d failed: %s", msg->id, msg->mlen, msg->type, conn->sd, strerror(errno)); } msg->done = 1; msg->error = 1; msg->err = errno; /* noreply request don't expect any response */ if (msg->noreply) { req_put(msg); return; } if (req_done(conn, TAILQ_FIRST(&conn->omsg_q))) { status = event_add_out(ctx->evb, conn); if (status != DN_OK) { conn->err = errno; } } } static void req_forward_stats(struct context *ctx, struct server *server, struct msg *msg) { ASSERT(msg->request); if (msg->is_read) { stats_server_incr(ctx, server, read_requests); stats_server_incr_by(ctx, server, read_request_bytes, msg->mlen); } else { stats_server_incr(ctx, server, write_requests); stats_server_incr_by(ctx, server, write_request_bytes, msg->mlen); } } void local_req_forward(struct context *ctx, struct conn *c_conn, struct msg *msg, uint8_t *key, uint32_t keylen) { rstatus_t status; struct conn *s_conn; if (log_loggable(LOG_VVERB)) { loga("local_req_forward entering ............"); } ASSERT((c_conn->client || c_conn->dnode_client) && !c_conn->proxy && !c_conn->dnode_server); /* enqueue message (request) into client outq, if response is expected */ if (!msg->noreply) { c_conn->enqueue_outq(ctx, c_conn, msg); } s_conn = server_pool_conn(ctx, c_conn->owner, key, keylen); if (s_conn == NULL) { req_forward_error(ctx, c_conn, msg); return; } ASSERT(!s_conn->client && !s_conn->proxy); if (log_loggable(LOG_DEBUG)) { log_debug(LOG_DEBUG, "forwarding request from client conn '%s' to storage conn '%s'", dn_unresolve_peer_desc(c_conn->sd), dn_unresolve_peer_desc(s_conn->sd)); } if (ctx->dyn_state == NORMAL) { /* enqueue the message (request) into server inq */ if (TAILQ_EMPTY(&s_conn->imsg_q)) { status = event_add_out(ctx->evb, s_conn); if (status != DN_OK) { req_forward_error(ctx, c_conn, msg); s_conn->err = errno; return; } } } else if (ctx->dyn_state == STANDBY) { //no reads/writes from peers/clients log_debug(LOG_INFO, "Node is in STANDBY state. Drop write/read requests"); req_forward_error(ctx, c_conn, msg); return; } else if (ctx->dyn_state == WRITES_ONLY && msg->is_read) { //no reads from peers/clients but allow writes from peers/clients log_debug(LOG_INFO, "Node is in WRITES_ONLY state. Drop read requests"); req_forward_error(ctx, c_conn, msg); return; } else if (ctx->dyn_state == RESUMING) { log_debug(LOG_INFO, "Node is in RESUMING state. Still drop read requests and flush out all the queued writes"); if (msg->is_read) { req_forward_error(ctx, c_conn, msg); return; } status = event_add_out(ctx->evb, s_conn); if (status != DN_OK) { req_forward_error(ctx, c_conn, msg); s_conn->err = errno; return; } } s_conn->enqueue_inq(ctx, s_conn, msg); req_forward_stats(ctx, s_conn->owner, msg); if (log_loggable(LOG_VERB)) { log_debug(LOG_VERB, "local forward from c %d to s %d req %"PRIu64" len %"PRIu32 " type %d with key '%.*s'", c_conn->sd, s_conn->sd, msg->id, msg->mlen, msg->type, keylen, key); } } static bool request_send_to_all_racks(struct msg *msg) { msg_type_t t = msg->type; return msg->is_read? 0 : 1; } static void admin_local_req_forward(struct context *ctx, struct conn *c_conn, struct msg *msg, struct rack *rack, uint8_t *key, uint32_t keylen) { struct conn *p_conn; rstatus_t status; ASSERT(c_conn->client || c_conn->dnode_client); p_conn = dnode_peer_pool_conn(ctx, c_conn->owner, rack, key, keylen, msg->msg_type); if (p_conn == NULL) { c_conn->err = EHOSTDOWN; req_forward_error(ctx, c_conn, msg); return; } struct server *peer = p_conn->owner; struct msg *nmsg; if (peer->is_local) { //do nothing nmsg = msg_get_rsp_integer(true); c_conn->enqueue_outq(ctx, c_conn, msg); msg->peer = nmsg; nmsg->peer = msg; msg->done = 1; //msg->pre_coalesce(msg); status = event_add_out(ctx->evb, c_conn); } else { log_debug(LOG_NOTICE, "Need to delete [%.*s] ", keylen, key); local_req_forward(ctx, c_conn, msg, key, keylen); } } void remote_req_forward(struct context *ctx, struct conn *c_conn, struct msg *msg, struct rack *rack, uint8_t *key, uint32_t keylen) { struct conn *p_conn; ASSERT(c_conn->client || c_conn->dnode_client); p_conn = dnode_peer_pool_conn(ctx, c_conn->owner, rack, key, keylen, msg->msg_type); if (p_conn == NULL) { c_conn->err = EHOSTDOWN; req_forward_error(ctx, c_conn, msg); return; } //jeb - check if s_conn is _this_ node, and if so, get conn from server_pool_conn instead struct server *peer = p_conn->owner; if (peer->is_local) { local_req_forward(ctx, c_conn, msg, key, keylen); return; } else { dnode_peer_req_forward(ctx, c_conn, p_conn, msg, rack, key, keylen); } } static void req_forward(struct context *ctx, struct conn *c_conn, struct msg *msg) { struct server_pool *pool = c_conn->owner; uint8_t *key; uint32_t keylen; ASSERT(c_conn->client && !c_conn->proxy); if (msg->is_read) stats_pool_incr(ctx, pool, client_read_requests); else stats_pool_incr(ctx, pool, client_write_requests); key = NULL; keylen = 0; if (!string_empty(&pool->hash_tag)) { struct string *tag = &pool->hash_tag; uint8_t *tag_start, *tag_end; tag_start = dn_strchr(msg->key_start, msg->key_end, tag->data[0]); if (tag_start != NULL) { tag_end = dn_strchr(tag_start + 1, msg->key_end, tag->data[1]); if (tag_end != NULL) { key = tag_start + 1; keylen = (uint32_t)(tag_end - key); } } } if (keylen == 0) { key = msg->key_start; keylen = (uint32_t)(msg->key_end - msg->key_start); } // need to capture the initial mbuf location as once we add in the dynomite headers (as mbufs to the src msg), // that will bork the request sent to secondary racks struct mbuf *orig_mbuf = STAILQ_FIRST(&msg->mhdr); if (ctx->admin_opt == 1) { if (msg->type == MSG_REQ_REDIS_DEL || msg->type == MSG_REQ_MC_DELETE) { struct rack * rack = server_get_rack_by_dc_rack(pool, &pool->rack, &pool->dc); admin_local_req_forward(ctx, c_conn, msg, rack, key, keylen); return; } } if (request_send_to_all_racks(msg)) { uint32_t dc_cnt = array_n(&pool->datacenters); uint32_t dc_index; for(dc_index = 0; dc_index < dc_cnt; dc_index++) { struct datacenter *dc = array_get(&pool->datacenters, dc_index); if (dc == NULL) { log_error("Wow, this is very bad, dc is NULL"); return; } if (string_compare(dc->name, &pool->dc) == 0) { //send to all local racks //log_debug(LOG_DEBUG, "dc name '%.*s'", dc->name->len, dc->name->data); uint32_t rack_cnt = array_n(&dc->racks); uint32_t rack_index; for(rack_index = 0; rack_index < rack_cnt; rack_index++) { struct rack *rack = array_get(&dc->racks, rack_index); //log_debug(LOG_DEBUG, "rack name '%.*s'", rack->name->len, rack->name->data); struct msg *rack_msg; if (string_compare(rack->name, &pool->rack) == 0 ) { rack_msg = msg; } else { rack_msg = msg_get(c_conn, msg->request, msg->redis); if (rack_msg == NULL) { log_debug(LOG_VERB, "whelp, looks like yer screwed now, buddy. no inter-rack messages for you!"); continue; } msg_clone(msg, orig_mbuf, rack_msg); rack_msg->swallow = true; } if (log_loggable(LOG_DEBUG)) { log_debug(LOG_DEBUG, "forwarding request to conn '%s' on rack '%.*s'", dn_unresolve_peer_desc(c_conn->sd), rack->name->len, rack->name->data); } remote_req_forward(ctx, c_conn, rack_msg, rack, key, keylen); } } else { uint32_t rack_cnt = array_n(&dc->racks); if (rack_cnt == 0) continue; uint32_t ran_index = rand() % rack_cnt; struct rack *rack = array_get(&dc->racks, ran_index); struct msg *rack_msg = msg_get(c_conn, msg->request, msg->redis); if (rack_msg == NULL) { log_debug(LOG_VERB, "whelp, looks like yer screwed now, buddy. no inter-rack messages for you!"); msg_put(rack_msg); continue; } msg_clone(msg, orig_mbuf, rack_msg); rack_msg->swallow = true; if (log_loggable(LOG_DEBUG)) { log_debug(LOG_DEBUG, "forwarding request to conn '%s' on rack '%.*s'", dn_unresolve_peer_desc(c_conn->sd), rack->name->len, rack->name->data); } remote_req_forward(ctx, c_conn, rack_msg, rack, key, keylen); } } } else { //for read only requests struct rack * rack = server_get_rack_by_dc_rack(pool, &pool->rack, &pool->dc); remote_req_forward(ctx, c_conn, msg, rack, key, keylen); } } void req_recv_done(struct context *ctx, struct conn *conn, struct msg *msg, struct msg *nmsg) { ASSERT(conn->client && !conn->proxy); ASSERT(msg->request); ASSERT(msg->owner == conn); ASSERT(conn->rmsg == msg); ASSERT(nmsg == NULL || nmsg->request); stats_histo_add_payloadsize(ctx, msg->mlen); /* enqueue next message (request), if any */ conn->rmsg = nmsg; if (req_filter(ctx, conn, msg)) { return; } req_forward(ctx, conn, msg); } struct msg * req_send_next(struct context *ctx, struct conn *conn) { rstatus_t status; struct msg *msg, *nmsg; /* current and next message */ ASSERT((!conn->client && !conn->proxy) || (!conn->dnode_client && !conn->dnode_server)); if (conn->connecting) { if (!conn->dyn_mode && !conn->client) { server_connected(ctx, conn); } else if (conn->dyn_mode && !conn->dnode_client) { dnode_peer_connected(ctx, conn); } } nmsg = TAILQ_FIRST(&conn->imsg_q); if (nmsg == NULL) { /* nothing to send as the server inq is empty */ status = event_del_out(ctx->evb, conn); if (status != DN_OK) { conn->err = errno; } return NULL; } msg = conn->smsg; if (msg != NULL) { ASSERT(msg->request && !msg->done); nmsg = TAILQ_NEXT(msg, s_tqe); } conn->smsg = nmsg; if (nmsg == NULL) { return NULL; } ASSERT(nmsg->request && !nmsg->done); if (log_loggable(LOG_VVERB)) { log_debug(LOG_VVERB, "send next req %"PRIu64" len %"PRIu32" type %d on " "s %d", nmsg->id, nmsg->mlen, nmsg->type, conn->sd); } return nmsg; } void req_send_done(struct context *ctx, struct conn *conn, struct msg *msg) { ASSERT((!conn->client && !conn->proxy) || (!conn->dnode_client && !conn->dnode_server)); ASSERT(msg != NULL && conn->smsg == NULL); ASSERT(msg->request && !msg->done); //ASSERT(msg->owner == conn); if (log_loggable(LOG_VVERB)) { log_debug(LOG_VVERB, "send done req %"PRIu64" len %"PRIu32" type %d on " "s %d", msg->id, msg->mlen, msg->type, conn->sd); } /* dequeue the message (request) from server inq */ conn->dequeue_inq(ctx, conn, msg); /* * noreply request instructs the server not to send any response. So, * enqueue message (request) in server outq, if response is expected. * Otherwise, free the noreply request */ if (!msg->noreply) { conn->enqueue_outq(ctx, conn, msg); } else { if (!conn->dyn_mode && !conn->client && !conn->proxy) { //still enqueue if it is storage conn conn->enqueue_outq(ctx, conn, msg); } else { req_put(msg); } } }
fengshao0907/dynomite
src/dyn_request.c
C
apache-2.0
22,885
package cn.edu.fudan.se.DataExtractor.repository; import java.util.Date; public class IssueComment { private Integer id; private Integer repositoryId; private Integer issueId; private Integer commentId; private Integer authorId; private String authorName; private String content; private Date createdAt; private Date updatedAt; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getRepositoryId() { return repositoryId; } public void setRepositoryId(Integer repositoryId) { this.repositoryId = repositoryId; } public Integer getIssueId() { return issueId; } public void setIssueId(Integer issueId) { this.issueId = issueId; } public Integer getCommentId() { return commentId; } public void setCommentId(Integer commentId) { this.commentId = commentId; } public Integer getAuthorId() { return authorId; } public void setAuthorId(Integer authorId) { this.authorId = authorId; } public String getAuthorName() { return authorName; } public void setAuthorName(String authorName) { this.authorName = authorName; } public Date getCreatedAt() { return createdAt; } public void setCreatedAt(Date createdAt) { this.createdAt = createdAt; } public Date getUpdatedAt() { return updatedAt; } public void setUpdatedAt(Date updatedAt) { this.updatedAt = updatedAt; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } }
SEBigData/SEDataExtractor
DataExtractor/src/main/java/cn/edu/fudan/se/DataExtractor/repository/IssueComment.java
Java
apache-2.0
1,540
var _ = require("lodash"); var Q = require("q"); var path = require("path"); var nunjucks = require("nunjucks"); var escapeStringRegexp = require("escape-string-regexp"); var git = require("./utils/git"); var fs = require("./utils/fs"); var batch = require("./utils/batch"); var pkg = require("../package.json"); var defaultBlocks = require("./blocks"); // Normalize result from a block function normBlockResult(blk) { if (_.isString(blk)) blk = { body: blk }; return blk; } // The loader should handle relative and git url var BookLoader = nunjucks.Loader.extend({ async: true, init: function(book) { this.book = book; }, getSource: function(fileurl, callback) { var that = this; git.resolveFile(fileurl) .then(function(filepath) { // Is local file if (!filepath) filepath = path.resolve(that.book.root, fileurl); else that.book.log.debug.ln("resolve from git", fileurl, "to", filepath) // Read file from absolute path return fs.readFile(filepath) .then(function(source) { return { src: source.toString(), path: filepath } }); }) .nodeify(callback); }, resolve: function(from, to) { return path.resolve(path.dirname(from), to); } }); var TemplateEngine = function(book) { this.book = book; this.log = this.book.log; // Nunjucks env this.env = new nunjucks.Environment( new BookLoader(book), { // Escaping is done after by the markdown parser autoescape: false, // Tags tags: { blockStart: '{%', blockEnd: '%}', variableStart: '{{', variableEnd: '}}', commentStart: '{###', commentEnd: '###}' } } ); // List of tags shortcuts this.shortcuts = []; // Map of blocks bodies (that requires post-processing) this.blockBodies = {}; // Map of added blocks this.blocks = {}; // Bind methods _.bindAll(this); // Add default blocks this.addBlocks(defaultBlocks); }; // Process the result of block in a context TemplateEngine.prototype.processBlock = function(blk) { blk = _.defaults(blk, { parse: false, post: undefined }); blk.id = _.uniqueId("blk"); var toAdd = (!blk.parse) || (blk.post != undefined); // Add to global map if (toAdd) this.blockBodies[blk.id] = blk; //Parsable block, just return it if (blk.parse) { return blk.body; } // Return it as a position marker return "@%@"+blk.id+"@%@"; }; // Replace position markers of blocks by body after processing // This is done to avoid that markdown/asciidoc processer parse the block content TemplateEngine.prototype.replaceBlocks = function(content) { var that = this; return content.replace(/\@\%\@([\s\S]+?)\@\%\@/g, function(match, key) { var blk = that.blockBodies[key]; if (!blk) return match; var body = blk.body; return body; }); }; // Bind a function to a context TemplateEngine.prototype.bindContext = function(func) { var that = this; return function() { var ctx = { ctx: this.ctx, book: that.book, generator: that.book.options.generator }; return func.apply(ctx, arguments); }; }; // Add filter TemplateEngine.prototype.addFilter = function(filterName, func) { try { this.env.getFilter(filterName); this.log.warn.ln("conflict in filters, '"+filterName+"' is already set"); return false; } catch(e) {} this.log.debug.ln("add filter '"+filterName+"'"); this.env.addFilter(filterName, this.bindContext(function() { var ctx = this; var args = Array.prototype.slice.apply(arguments); var callback = _.last(args); Q() .then(function() { return func.apply(ctx, args.slice(0, -1)); }) .nodeify(callback); }), true); return true; }; // Add multiple filters TemplateEngine.prototype.addFilters = function(filters) { _.each(filters, function(filter, name) { this.addFilter(name, filter); }, this); }; // Return nunjucks extension name of a block TemplateEngine.prototype.blockExtName = function(name) { return 'Block'+name+'Extension'; }; // Test if a block is defined TemplateEngine.prototype.hasBlock = function(name) { return this.env.hasExtension(this.blockExtName(name)); }; // Remove a block TemplateEngine.prototype.removeBlock = function(name) { if (!this.hasBlock(name)) return; // Remove nunjucks extension this.env.removeExtension(this.blockExtName(name)); // Cleanup shortcuts this.shortcuts = _.reject(this.shortcuts, { block: name }); }; // Add a block TemplateEngine.prototype.addBlock = function(name, block) { var that = this, Ext, extName; if (_.isFunction(block)) block = { process: block }; block = _.defaults(block || {}, { shortcuts: [], end: "end"+name, process: _.identity, blocks: [] }); var extName = this.blockExtName(name); if (this.hasBlock(name) && !defaultBlocks[name]) { this.log.warn.ln("conflict in blocks, '"+name+"' is already defined"); } // Cleanup previous block this.removeBlock(name); this.log.debug.ln("add block '"+name+"'"); this.blocks[name] = block; var Ext = function () { this.tags = [name]; this.parse = function(parser, nodes, lexer) { var body = null; var lastBlockName = null; var lastBlockArgs = null; var allBlocks = block.blocks.concat([block.end]); var subbodies = {}; var tok = parser.nextToken(); var args = parser.parseSignature(null, true); parser.advanceAfterBlockEnd(tok.value); while (1) { // Read body var currentBody = parser.parseUntilBlocks.apply(parser, allBlocks); // Handle body with previous block name and args if (lastBlockName) { subbodies[lastBlockName] = subbodies[lastBlockName] || []; subbodies[lastBlockName].push({ body: currentBody, args: lastBlockArgs }); } else { body = currentBody; } // Read new block lastBlockName = parser.peekToken().value; if (lastBlockName == block.end) { break; } // Parse signature and move to the end of the block lastBlockArgs = parser.parseSignature(null, true); parser.advanceAfterBlockEnd(lastBlockName); } parser.advanceAfterBlockEnd(); var bodies = [body]; _.each(block.blocks, function(blockName) { subbodies[blockName] = subbodies[blockName] || []; if (subbodies[blockName].length == 0) { subbodies[blockName].push({ args: new nodes.NodeList(), body: new nodes.NodeList() }); } bodies.push(subbodies[blockName][0].body); }); return new nodes.CallExtensionAsync(this, 'run', args, bodies); }; this.run = function(context) { var args = Array.prototype.slice.call(arguments, 1); var callback = args.pop(); // Extract blocks var blocks = args .concat([]) .slice(-block.blocks.length); // Eliminate blocks from list if (block.blocks.length > 0) args = args.slice(0, -block.blocks.length); // Extract main body and kwargs var body = args.pop(); var kwargs = _.isObject(_.last(args))? args.pop() : {}; // Extract blocks body var _blocks = _.map(block.blocks, function(blockName, i){ return { name: blockName, body: blocks[i]() }; }); Q() .then(function() { return that.applyBlock(name, { body: body(), args: args, kwargs: kwargs, blocks: _blocks }); }) // process the block returned .then(that.processBlock) .nodeify(callback) }; }; // Add the Extension this.env.addExtension(extName, new Ext()); // Add shortcuts if (!_.isArray(block.shortcuts)) block.shortcuts = [block.shortcuts]; _.each(block.shortcuts, function(shortcut) { this.log.debug.ln("add template shortcut from '"+shortcut.start+"' to block '"+name+"' for parsers ", shortcut.parsers); this.shortcuts.push({ block: name, parsers: shortcut.parsers, start: shortcut.start, end: shortcut.end, tag: { start: name, end: block.end } }); }, this); }; // Add multiple blocks TemplateEngine.prototype.addBlocks = function(blocks) { _.each(blocks, function(block, name) { this.addBlock(name, block); }, this); }; // Apply a block to some content // This method result depends on the type of block (async or sync) TemplateEngine.prototype.applyBlock = function(name, blk) { var func, block, func, r; block = this.blocks[name]; if (!block) throw new Error('Block not found "'+name+'"'); if (_.isString(blk)) { blk = { body: blk }; } blk = _.defaults(blk, { args: [], kwargs: {}, blocks: [] }); // Bind and call block processor func = this.bindContext(block.process); r = func.call(context, blk); if (Q.isPromise(r)) return r.then(normBlockResult); else return normBlockResult(r); }; // Apply a shortcut to a string TemplateEngine.prototype._applyShortcut = function(parser, content, shortcut) { if (!_.contains(shortcut.parsers, parser)) return content; var regex = new RegExp( escapeStringRegexp(shortcut.start) + "([\\s\\S]*?[^\\$])" + escapeStringRegexp(shortcut.end), 'g' ); return content.replace(regex, function(all, match) { return "{% "+shortcut.tag.start+" %}"+ match + "{% "+shortcut.tag.end+" %}"; }); }; // Render a string from the book TemplateEngine.prototype.renderString = function(content, context, options) { var context = _.extend({}, context, { // Variables from book.json book: this.book.options.variables, // Complete book.json config: this.book.options, // infos about gitbook gitbook: { version: pkg.version, generator: this.book.options.generator } }); options = _.defaults(options || {}, { path: null, type: null }); if (options.path) options.path = this.book.resolve(options.path); // Replace shortcuts content = _.reduce(this.shortcuts, _.partial(this._applyShortcut.bind(this), options.type), content); return Q.nfcall(this.env.renderString.bind(this.env), content, context, options) .fail(function(err) { if (_.isString(err)) err = new Error(err); err.message = err.message.replace(/^Error: /, ""); throw err; }); }; // Render a file from the book TemplateEngine.prototype.renderFile = function(filename, options) { var that = this, context; return that.book.readFile(filename) .then(function(content) { return that.renderString(content, {}, { path: filename }); }); }; // Render a page from the book TemplateEngine.prototype.renderPage = function(page) { var that = this, context; return that.book.statFile(page.path) .then(function(stat) { context = { // infos about the file file: { path: page.path, mtime: stat.mtime } }; return that.renderString(page.content, context, { path: page.path, type: page.type }); }); }; // Post process content TemplateEngine.prototype.postProcess = function(content) { var that = this; return Q(content) .then(that.replaceBlocks) .then(function(_content) { return batch.execEach(that.blockBodies, { max: 20, fn: function(blk, blkId) { return Q() .then(function() { if (!blk.post) return Q(); return blk.post(); }) .then(function() { delete that.blockBodies[blkId]; }); } }) .thenResolve(_content); }); }; module.exports = TemplateEngine;
tzq668766/gitbook
lib/template.js
JavaScript
apache-2.0
13,268
# adapted from zmq_server_example.py in tinyrpc import time, sys import zmq from tinyrpc.protocols.jsonrpc import JSONRPCProtocol from tinyrpc.transports.zmq import ZmqServerTransport from tinyrpc.server import RPCServer from tinyrpc.dispatch import RPCDispatcher class Server(object): def __init__(self, req_callback): # print 'initializing Rpc' self.ctx = zmq.Context() self.dispatcher = RPCDispatcher() self.transport = ZmqServerTransport.create(self.ctx, 'tcp://127.0.0.1:8000') self.req_callback = req_callback self.rpc_server = RPCServer( self.transport, JSONRPCProtocol(), self.dispatcher ) self.dispatcher.public(self.request) # register this function (replacing the decorator) # print 'READYc: '+str(time.clock()) # sys.exit(0) self.rpc_server.serve_forever() # def start(self): # self.rpc_server.serve_forever() def request(self, req): return self.req_callback(req)
dongting/sdnac
sdnac/api/rpc.py
Python
apache-2.0
1,086
package pft.frames.marshallers; import static org.junit.Assert.assertEquals; import pft.frames.DataResponse; import pft.frames.marshallers.DataResponseMarshaller; import org.junit.Test; import java.util.Random; public class DataResponseMarshallerTest { @Test public void encodeShouldEqualDecode() { Random rand = new Random(); { int identifier = rand.nextInt(); byte[] data = new byte[4096]; rand.nextBytes(data); DataResponse response = new DataResponse(identifier, 0, 4096, data); DataResponseMarshaller marshaller = new DataResponseMarshaller(); assertEquals(response, marshaller.decode(identifier, marshaller.encode(response))); } { int identifier = rand.nextInt(); byte[] data = new byte[127]; rand.nextBytes(data); DataResponse response = new DataResponse(identifier, 809123, 127, data); DataResponseMarshaller marshaller = new DataResponseMarshaller(); assertEquals(response, marshaller.decode(identifier, marshaller.encode(response))); } } }
bhatanku1/ProtocolDesign
src/test/java/pft/frames/marshallers/DataResponseMarshallerTest.java
Java
apache-2.0
1,057
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.config.spring.beans.factory.config; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.ImportResource; import org.springframework.context.support.GenericApplicationContext; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = DubboConfigEarlyInitializationTest.class) @ImportResource(locations = "classpath:/META-INF/spring/dubbo-config-early-initialization.xml") @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) public class DubboConfigEarlyInitializationTest { @Autowired private ApplicationContext applicationContext; @Test public void testDubboConfigEarlyInitializationPostProcessor() { assertTrue(applicationContext instanceof GenericApplicationContext); ConfigurableListableBeanFactory clBeanFactory = ((GenericApplicationContext) applicationContext).getBeanFactory(); assertTrue(clBeanFactory instanceof DefaultListableBeanFactory); DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) clBeanFactory; List<BeanPostProcessor> beanPostProcessorList = beanFactory.getBeanPostProcessors(); assertEquals(beanFactory.getBeanPostProcessorCount(), beanPostProcessorList.size()); boolean containsDubboConfigEarlyInitializationPostProcessor = false; for (BeanPostProcessor beanPostProcessor : beanPostProcessorList) { if (beanPostProcessor instanceof DubboConfigEarlyRegistrationPostProcessor.DubboConfigEarlyInitializationPostProcessor) { containsDubboConfigEarlyInitializationPostProcessor = true; break; } } assertTrue(containsDubboConfigEarlyInitializationPostProcessor); } }
wuwen5/dubbo
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/beans/factory/config/DubboConfigEarlyInitializationTest.java
Java
apache-2.0
3,347
package org.ovirt.mobile.movirt.rest.dto.v4; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.ovirt.mobile.movirt.util.IdHelper; @JsonIgnoreProperties(ignoreUnknown = true) public class Cluster extends org.ovirt.mobile.movirt.rest.dto.Cluster { public DataCenter data_center; @Override public org.ovirt.mobile.movirt.model.Cluster toEntity(String accountId) { org.ovirt.mobile.movirt.model.Cluster cluster = super.toEntity(accountId); cluster.setDataCenterId(IdHelper.combinedIdSafe(accountId, data_center)); return cluster; } }
matobet/moVirt
moVirt/src/main/java/org/ovirt/mobile/movirt/rest/dto/v4/Cluster.java
Java
apache-2.0
598
using System.Collections; using System.Collections.Generic; using UnityEngine; public class City { //Attributs public int id; public string name; public bool capital; public City(int id, string name, bool isCapital) { this.id = id; this.name = name; capital = this.isCapital(isCapital); } public City() { id = -1; } //Getters public int getID() { return id; } public string getName() { return name; } //Setters public void setID(int id) { this.id = id; } public void setName(string name) { this.name = name; } //Issers xD public bool isCapital(bool yesno) { if (yesno == true) return true; else return false; } }
Kuraikari/Modern-Times
Modern Time (J)RPG/Assets/Scripts/Economy System/Countries/Cities/City.cs
C#
apache-2.0
736
/* * Copyright 2016 OPEN TONE Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.co.opentone.bsol.linkbinder.view.servlet; import static org.junit.Assert.*; import java.io.File; import java.io.FileNotFoundException; import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; /** * @author opentone */ public class MockPrintWriter extends PrintWriter { private static int RET_TESTCASE; /** * @param out */ public MockPrintWriter(Writer out) { super(out); } /** * @param out */ public MockPrintWriter(OutputStream out) { super(out); } /** * @param fileName * @throws FileNotFoundException */ public MockPrintWriter(String fileName) throws FileNotFoundException { super(fileName); MockPrintWriter.RET_TESTCASE = Integer.valueOf(fileName); } /** * @param file * @throws FileNotFoundException */ public MockPrintWriter(File file) throws FileNotFoundException { super(file); } /** * @param out * @param autoFlush */ public MockPrintWriter(Writer out, boolean autoFlush) { super(out, autoFlush); } /** * @param out * @param autoFlush */ public MockPrintWriter(OutputStream out, boolean autoFlush) { super(out, autoFlush); } /** * @param fileName * @param csn * @throws FileNotFoundException * @throws UnsupportedEncodingException */ public MockPrintWriter(String fileName, String csn) throws FileNotFoundException, UnsupportedEncodingException { super(fileName, csn); } /** * @param file * @param csn * @throws FileNotFoundException * @throws UnsupportedEncodingException */ public MockPrintWriter(File file, String csn) throws FileNotFoundException, UnsupportedEncodingException { super(file, csn); } /** * */ @Override public void println(String statement) { switch (RET_TESTCASE) { case 1: assertEquals("paramerter error : maked time", statement); break; case 2: assertEquals("paramerter error : hash code", statement); break; case 3: assertEquals("paramerter error : no value", statement); break; case 4: assertEquals("paramerter error : no value", statement); break; case 5: assertEquals("paramerter error : no value", statement); break; case 6: assertEquals("paramerter error : no value", statement); break; } } /** * */ @Override public void println(int i) { switch (RET_TESTCASE) { case 7: assertEquals(i,0); break; case 8: assertEquals(i,0); break; } } }
otsecbsol/linkbinder
linkbinder-web/src/test/java/jp/co/opentone/bsol/linkbinder/view/servlet/MockPrintWriter.java
Java
apache-2.0
3,524
# Copyright (c) 2014 Alcatel-Lucent Enterprise # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nw.providers.Provider import Provider import subprocess import re from logging import getLogger # /!\ Warning: this Provider uses the ping system command and has been designed for Linux (Debian Wheezy). # List of data the Ping Provider can return (set in Provider's config field 'requested_data'). # If the Provider is configured with another requested_data, an exception is raised. # If no requested_data is configured for Ping Provider, status is used by default. _data_available = [ 'status', # returns the status code (integer) of ping command execution: 0 = success, other = error occurred 'ping_response', # returns the whole std output of ping command (string) 'pkt_transmitted', # returns the number of packets transmitted (integer) (extracted from stdout of ping command using a regex) 'pkt_received', # returns the number of packets received (integer) (extracted from stdout of ping command using a regex) 'pkt_loss', # returns the number of packets loss (integer) (extracted from stdout of ping command using a regex) 'ping_avg', # returns the average ping time (in ms) (float) (extracted from stdout of ping command using a regex) 'ping_min', # returns the min ping time (in ms) (float) (extracted from stdout of ping command using a regex) 'ping_max' # returns the max ping time (in ms) (float) (extracted from stdout of ping command using a regex) ] class Ping(Provider): # Overload _mandatory_parameters and _optional_parameters to list the parameters required by HttpRequest provider _mandatory_parameters = [ 'ping_addr' # IP address or hostname of the machine to ping ] _optional_parameters = [ 'requested_data', # (string) Requested data (default is 'status' which returns the status code of ping command execution). See _data_available for available options. 'count', # (integer) -c option of ping: Stop after sending (and receiving) count ECHO_RESPONSE packets. If not defined, default value is 1. 'timeout' # (integer) -W option of ping: Time to wait for a response, in seconds. The option affects only timeout in absense of any responses, otherwise ping waits for two RTTs. ] def __init__(self, options): Provider.__init__(self, options) # Build ping command self.ping_cmd = "ping" # Add -c option if not self._config.get('count'): getLogger(__name__).info('Option "count" is not provided to provider Ping, use default value (1)') self.count = 1 else: self.count = self._config.get('count') self.ping_cmd += " -c " + str(self.count) # Add -W option if requested if self._config.get('timeout'): self.ping_cmd += " -W " + str(self._config.get('timeout')) # Add ping address self.ping_cmd += " " + self._config.get('ping_addr') # Load requested data (default is 'status') self.requested_data = self._config.get('requested_data') or "status" def process(self): if (self.requested_data == "status"): return self._getPingStatus() else: # TODO: better management of ping errors try: ping_data = self._performPing() except: return None # Ping error # Return the requested data if (self.requested_data == "ping_response"): return ping_data.ping_response if (self.requested_data == "pkt_transmitted"): return ping_data.pkt_transmitted if (self.requested_data == "pkt_received"): return ping_data.pkt_received elif (self.requested_data == "pkt_loss"): return ping_data.pkt_loss if (self.requested_data == "ping_avg"): return ping_data.ping_avg if (self.requested_data == "ping_min"): return ping_data.ping_min if (self.requested_data == "ping_max"): return ping_data.ping_max # Simply execute ping command to retrieve the command's returned code def _getPingStatus(self): getLogger(__name__).debug('Call ping command with the following options: ' + self.ping_cmd) returncode = subprocess.call(self.ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) getLogger(__name__).debug('Ping command returned status code: ' + str(returncode)) return returncode # Execute ping command and returned a PingData object in case of success def _performPing(self): getLogger(__name__).debug('Call ping command with the following options: ' + self.ping_cmd) (output, error) = subprocess.Popen(self.ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate() if output: getLogger(__name__).debug('Ping command returned: ' + output) return PingData(output) else: getLogger(__name__).debug('Ping error: ' + error) raise Exception(error) # This function is called by __init__ of the abstract Provider class, it verify during the object initialization if the Provider' configuration is valid. def _isConfigValid(self): Provider._isConfigValid(self) # If requested_data is provided, check if it is managed by Ping provider if self._config.get('requested_data') and not (self._config.get('requested_data') in _data_available): getLogger(__name__).error('Parameter requested_data "' + self._config.get('requested_data') + '" provided to provider Ping is not allowed. Allowed conditions are: ' + str(_data_available)) return False return True class PingData: """ Class extracting ping statistics data using regexps on ping command response. /!\ Warning: regexp used to extract information applies on string returned by ping command on Linux (tested on Debian Wheezy). Extracted data are: - ping_response = the whole output of ping command - pkt_transmitted = number of packets transmitted (integer) - pkt_received = number of packets received (integer) - pkt_loss = packet loss rate in percentage (float) - ping_min = ping minimum response time in milliseconds (float) - ping_avg = ping average response time in milliseconds (float) - ping_max = ping maximum response time in milliseconds (float) - ping_stdev = standard deviation of ping response time in milliseconds (float) """ def __init__(self, ping_response): if not ping_response: raise Exception("Can't create PingData object without ping response data") self.ping_response = ping_response # Extract packets data from statistics section of Ping response result = re.search('(?P<pkt_transmitted>\d)\spackets\stransmitted,\s(?P<pkt_received>\d)?\s?\w*\sreceived,\s(?P<pkt_loss>[\d]*?\.?[\d]*)\%\spacket\sloss', self.ping_response) self.pkt_transmitted = int(result.group('pkt_transmitted')) self.pkt_received = int(result.group('pkt_received')) self.pkt_loss = float(result.group('pkt_loss')) # Extract time stats from statistics section of Ping response result = re.search('min\/avg\/max\/\w*\s=\s(?P<ping_min>[\d]*\.[\d]*)\/(?P<ping_avg>[\d]*\.[\d]*)\/(?P<ping_max>[\d]*\.[\d]*)\/(?P<ping_stddev>[\d]*\.[\d]*)', self.ping_response) self.ping_min = float(result.group('ping_min')) self.ping_avg = float(result.group('ping_avg')) self.ping_max = float(result.group('ping_max')) self.ping_stddev = float(result.group('ping_stddev'))
OpenTouch/night-watch
src/nw/providers/Ping.py
Python
apache-2.0
8,878
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Class com.google.zxing.datamatrix.detector.Detector (ZXing 3.3.2 API)</title> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class com.google.zxing.datamatrix.detector.Detector (ZXing 3.3.2 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../com/google/zxing/datamatrix/detector/Detector.html" title="class in com.google.zxing.datamatrix.detector">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?com/google/zxing/datamatrix/detector/class-use/Detector.html" target="_top">Frames</a></li> <li><a href="Detector.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class com.google.zxing.datamatrix.detector.Detector" class="title">Uses of Class<br>com.google.zxing.datamatrix.detector.Detector</h2> </div> <div class="classUseContainer">No usage of com.google.zxing.datamatrix.detector.Detector</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../com/google/zxing/datamatrix/detector/Detector.html" title="class in com.google.zxing.datamatrix.detector">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?com/google/zxing/datamatrix/detector/class-use/Detector.html" target="_top">Frames</a></li> <li><a href="Detector.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2018. All rights reserved.</small></p> </body> </html>
l-dobrev/zxing
docs/apidocs/com/google/zxing/datamatrix/detector/class-use/Detector.html
HTML
apache-2.0
4,620
/* Copyright 2017 Google Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.codegen.transformer.py; import com.google.api.codegen.ReleaseLevel; import com.google.api.codegen.ServiceMessages; import com.google.api.codegen.config.FieldConfig; import com.google.api.codegen.config.GapicInterfaceConfig; import com.google.api.codegen.config.GapicMethodConfig; import com.google.api.codegen.config.InterfaceConfig; import com.google.api.codegen.config.SingleResourceNameConfig; import com.google.api.codegen.config.VisibilityConfig; import com.google.api.codegen.metacode.InitFieldConfig; import com.google.api.codegen.transformer.GapicInterfaceContext; import com.google.api.codegen.transformer.ModelTypeFormatterImpl; import com.google.api.codegen.transformer.ModelTypeTable; import com.google.api.codegen.transformer.SurfaceNamer; import com.google.api.codegen.transformer.Synchronicity; import com.google.api.codegen.util.Name; import com.google.api.codegen.util.SymbolTable; import com.google.api.codegen.util.TypeName; import com.google.api.codegen.util.VersionMatcher; import com.google.api.codegen.util.py.PythonCommentReformatter; import com.google.api.codegen.util.py.PythonDocstringUtil; import com.google.api.codegen.util.py.PythonNameFormatter; import com.google.api.codegen.util.py.PythonTypeTable; import com.google.api.tools.framework.model.EnumType; import com.google.api.tools.framework.model.Interface; import com.google.api.tools.framework.model.MessageType; import com.google.api.tools.framework.model.Method; import com.google.api.tools.framework.model.ProtoFile; import com.google.api.tools.framework.model.TypeRef; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; /** The SurfaceNamer for Python. */ public class PythonSurfaceNamer extends SurfaceNamer { public PythonSurfaceNamer(String packageName) { super( new PythonNameFormatter(), new ModelTypeFormatterImpl(new PythonModelTypeNameConverter(packageName)), new PythonTypeTable(packageName), new PythonCommentReformatter(), packageName, packageName); } @Override public SurfaceNamer cloneWithPackageName(String packageName) { return new PythonSurfaceNamer(packageName); } @Override public String getServicePhraseName(Interface apiInterface) { return apiInterface.getParent().getFullName() + " " + apiInterface.getSimpleName() + " API"; } @Override public String getApiWrapperClassConstructorName(Interface apiInterface) { return getApiWrapperClassName(apiInterface.getSimpleName()); } @Override public String getApiWrapperModuleName() { String namespace = getVersionedDirectoryNamespace(); return namespace.substring(namespace.lastIndexOf('.') + 1); } @Override public String getTopLevelNamespace() { String namespace = getVersionedDirectoryNamespace(); if (namespace.lastIndexOf('.') > -1) { return namespace.substring(0, namespace.lastIndexOf('.')); } return ""; } @Override public String getVersionedDirectoryNamespace() { String namespace = getPackageName(); return namespace.substring(0, namespace.lastIndexOf('.')); } @Override public String getGapicPackageName(String configPackageName) { List<String> parts = Arrays.asList(configPackageName.split("-")); if (VersionMatcher.isVersion(parts.get(parts.size() - 1))) { return Joiner.on("-").join(parts.subList(0, parts.size() - 1)); } return configPackageName; } @Override public String getFormattedVariableName(Name identifier) { return localVarName(identifier); } @Override public String getRequestVariableName(Method method) { return method.getRequestStreaming() ? "requests" : "request"; } @Override public String getApiWrapperClassName(InterfaceConfig interfaceConfig) { return getApiWrapperClassName(getInterfaceName(interfaceConfig)); } private String getApiWrapperClassName(String interfaceName) { return publicClassName(Name.upperCamelKeepUpperAcronyms(interfaceName, "Client")); } @Override public String getFullyQualifiedApiWrapperClassName(GapicInterfaceConfig interfaceConfig) { return Joiner.on(".") .join(getVersionedDirectoryNamespace(), getApiWrapperClassName(interfaceConfig)); } @Override public String getMessageTypeName(ModelTypeTable typeTable, MessageType message) { return publicClassName(Name.upperCamel(message.getSimpleName())); } @Override public String getEnumTypeName(ModelTypeTable typeTable, EnumType enumType) { return publicClassName(Name.upperCamel(enumType.getSimpleName())); } @Override public String getRequestTypeName(ModelTypeTable typeTable, TypeRef type) { return typeTable.getAndSaveNicknameFor(type); } @Override public String getLongRunningOperationTypeName(ModelTypeTable typeTable, TypeRef type) { return typeTable.getAndSaveNicknameFor(type); } @Override public String getParamTypeName(ModelTypeTable typeTable, TypeRef type) { if (type.isMap()) { TypeName mapTypeName = new TypeName("dict"); TypeName keyTypeName = new TypeName(getParamTypeNameForElementType(type.getMapKeyField().getType())); TypeName valueTypeName = new TypeName(getParamTypeNameForElementType(type.getMapValueField().getType())); return new TypeName( mapTypeName.getFullName(), mapTypeName.getNickname(), "%s[%i -> %i]", keyTypeName, valueTypeName) .getFullName(); } if (type.isRepeated()) { TypeName listTypeName = new TypeName("list"); TypeName elementTypeName = new TypeName(getParamTypeNameForElementType(type)); return new TypeName( listTypeName.getFullName(), listTypeName.getNickname(), "%s[%i]", elementTypeName) .getFullName(); } return getParamTypeNameForElementType(type); } @Override public String getAndSavePagedResponseTypeName( Method method, ModelTypeTable typeTable, FieldConfig resourcesFieldConfig) { return typeTable.getAndSaveNicknameFor(method.getOutputType()); } private String getParamTypeNameForElementType(TypeRef type) { String typeName = getModelTypeFormatter().getFullNameForElementType(type); if (type.isMessage() || type.isEnum()) { typeName = PythonDocstringUtil.napoleonType(typeName, getVersionedDirectoryNamespace()); } if (type.isMessage()) { return "Union[dict, " + typeName + "]"; } if (type.isEnum()) { return typeName; } return typeName; } private String getResponseTypeNameForElementType(TypeRef type) { if (type.isMessage()) { String typeName = getModelTypeFormatter().getFullNameForElementType(type); return PythonDocstringUtil.napoleonType(typeName, getVersionedDirectoryNamespace()); } return getParamTypeNameForElementType(type); } @Override public String getPathTemplateName( Interface service, SingleResourceNameConfig resourceNameConfig) { return "_" + inittedConstantName(Name.from(resourceNameConfig.getEntityName(), "path", "template")); } @Override public String getFormatFunctionName( Interface apiInterface, SingleResourceNameConfig resourceNameConfig) { return staticFunctionName(Name.from(resourceNameConfig.getEntityName(), "path")); } @Override public String getParseFunctionName(String var, SingleResourceNameConfig resourceNameConfig) { return staticFunctionName( Name.from("match", var, "from", resourceNameConfig.getEntityName(), "name")); } @Override public String getGrpcClientTypeName(Interface apiInterface) { String fullName = getModelTypeFormatter().getFullNameFor(apiInterface) + "Stub"; return getTypeNameConverter().getTypeName(fullName).getNickname(); } @Override public String getClientConfigPath(Interface apiInterface) { return String.format("%s.%s", getPackageName(), getClientConfigName(apiInterface)); } @Override public String getClientConfigName(Interface apiInterface) { return classFileNameBase(Name.upperCamel(apiInterface.getSimpleName()).join("client_config")); } @Override public List<String> getThrowsDocLines(GapicMethodConfig methodConfig) { ImmutableList.Builder<String> lines = ImmutableList.builder(); lines.add(":exc:`google.gax.errors.GaxError` if the RPC is aborted."); if (hasParams(methodConfig)) { lines.add(":exc:`ValueError` if the parameters are invalid."); } return lines.build(); } private boolean hasParams(GapicMethodConfig methodConfig) { if (!Iterables.isEmpty(methodConfig.getRequiredFieldConfigs())) { return true; } int optionalParamCount = Iterables.size(methodConfig.getOptionalFieldConfigs()); // Must have at least one parameter that is not the page token parameter. return optionalParamCount > (methodConfig.getPageStreaming() == null ? 0 : 1); } @Override public List<String> getReturnDocLines( GapicInterfaceContext context, GapicMethodConfig methodConfig, Synchronicity synchronicity) { TypeRef outputType = methodConfig.getMethod().getOutputType(); if (ServiceMessages.s_isEmptyType(outputType)) { return ImmutableList.<String>of(); } String returnTypeName = methodConfig.isLongRunningOperation() ? "google.gax._OperationFuture" : getModelTypeFormatter().getFullNameFor(outputType); String classInfo = PythonDocstringUtil.napoleonType(returnTypeName, getVersionedDirectoryNamespace()); if (methodConfig.getMethod().getResponseStreaming()) { return ImmutableList.of("Iterable[" + classInfo + "]."); } if (methodConfig.isPageStreaming()) { TypeRef resourceType = methodConfig.getPageStreaming().getResourcesField().getType(); return ImmutableList.of( "A :class:`~google.gax.PageIterator` instance. By default, this", "is an iterable of " + annotateWithClass(getResponseTypeNameForElementType(resourceType)) + " instances.", "This object can also be configured to iterate over the pages", "of the response through the `options` parameter."); } return ImmutableList.of(String.format("A %s instance.", annotateWithClass(classInfo))); } private String annotateWithClass(String maybeClassWrappedType) { if (maybeClassWrappedType.startsWith(":class:")) { return maybeClassWrappedType; } return String.format(":class:`%s`", maybeClassWrappedType); } @Override public String getSourceFilePath(String path, String publicClassName) { return path + File.separator + classFileNameBase(Name.upperCamel(publicClassName)) + ".py"; } @Override public String getLroApiMethodName(Method method, VisibilityConfig visibility) { return getApiMethodName(method, visibility); } @Override public String getGrpcStubCallString(Interface apiInterface, Method method) { return getGrpcMethodName(method); } @Override public String getFieldGetFunctionName(TypeRef type, Name identifier) { return publicFieldName(identifier); } @Override public String getProtoFileName(ProtoFile file) { String protoFilename = file.getSimpleName(); return protoFilename.substring(0, protoFilename.lastIndexOf('.')) + ".py"; } @Override public String getUnitTestClassName(GapicInterfaceConfig interfaceConfig) { return publicClassName( Name.upperCamelKeepUpperAcronyms("Test", getInterfaceName(interfaceConfig), "Client")); } @Override public String getSmokeTestClassName(GapicInterfaceConfig interfaceConfig) { return publicClassName( Name.upperCamelKeepUpperAcronyms("Test", "System", getInterfaceName(interfaceConfig))); } @Override public String getTestPackageName() { return getPackageName(); } @Override public String getTestCaseName(SymbolTable symbolTable, Method method) { Name testCaseName = symbolTable.getNewSymbol(Name.upperCamel("Test", method.getSimpleName())); return publicMethodName(testCaseName); } @Override public String getExceptionTestCaseName(SymbolTable symbolTable, Method method) { Name testCaseName = symbolTable.getNewSymbol(Name.upperCamel("Test", method.getSimpleName(), "Exception")); return publicMethodName(testCaseName); } @Override public String injectRandomStringGeneratorCode(String randomString) { Matcher m = InitFieldConfig.RANDOM_TOKEN_PATTERN.matcher(randomString); StringBuffer sb = new StringBuffer(); List<String> stringParts = new ArrayList<>(); while (m.find()) { m.appendReplacement(sb, "{" + stringParts.size() + "}"); stringParts.add("time.time()"); } m.appendTail(sb); if (!stringParts.isEmpty()) { sb.append(".format(").append(Joiner.on(", ").join(stringParts)).append(")"); } return sb.toString(); } @Override public String quoted(String text) { return "'" + text + "'"; } /** * Somewhat misleadingly named; in the Python case, this converts the ReleaseLevel to a Trove * classifier, rather than an annotation. */ @Override public String getReleaseAnnotation(ReleaseLevel releaseLevel) { switch (releaseLevel) { case UNSET_RELEASE_LEVEL: // fallthrough case ALPHA: return "3 - Alpha"; case BETA: return "4 - Beta"; case GA: return "5 - Production/Stable"; case DEPRECATED: return "7 - Inactive"; default: throw new IllegalStateException("Invalid development status"); } } }
shinfan/toolkit
src/main/java/com/google/api/codegen/transformer/py/PythonSurfaceNamer.java
Java
apache-2.0
14,379
/** * Autogenerated by Thrift for src/module.thrift * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @nocommit */ #pragma once #include "thrift/compiler/test/fixtures/fatal/gen-cpp2/module_fatal_types.h" namespace test_cpp2 { namespace cpp_reflection { }} // test_cpp2::cpp_reflection
facebook/fbthrift
thrift/compiler/test/fixtures/fatal/gen-cpp2/module_fatal_constant.h
C
apache-2.0
327
// Copyright 2013 Matthew Baird // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package core import ( "encoding/json" "fmt" "github.com/splicers/elastigo/api" ) // MoreLikeThis allows the caller to get documents that are “like” a specified document. // http://www.elasticsearch.org/guide/reference/api/more-like-this.html func MoreLikeThis(index string, _type string, id string, args map[string]interface{}, query MoreLikeThisQuery) (api.BaseResponse, error) { var url string var retval api.BaseResponse url = fmt.Sprintf("/%s/%s/%s/_mlt", index, _type, id) body, err := api.DoCommand("GET", url, args, query) if err != nil { return retval, err } if err == nil { // marshall into json jsonErr := json.Unmarshal(body, &retval) if jsonErr != nil { return retval, jsonErr } } return retval, err } type MoreLikeThisQuery struct { MoreLikeThis MLT `json:"more_like_this"` } type MLT struct { Fields []string `json:"fields"` LikeText string `json:"like_text"` PercentTermsToMatch float32 `json:"percent_terms_to_match"` MinTermFrequency int `json:"min_term_freq"` MaxQueryTerms int `json:"max_query_terms"` StopWords []string `json:"stop_words"` MinDocFrequency int `json:"min_doc_freq"` MaxDocFrequency int `json:"max_doc_freq"` MinWordLength int `json:"min_word_len"` MaxWordLength int `json:"max_word_len"` BoostTerms int `json:"boost_terms"` Boost float32 `json:"boost"` Analyzer string `json:"analyzer"` }
PuerkitoBio/elastigo
core/moreLikeThis.go
GO
apache-2.0
2,093
## How to extract csv from pdf and fill database in sql server with pdf extractor sdk in VB.NET using ByteScout Premium Suite ### Continuous learning is a crucial part of computer science and this tutorial shows how to extract csv from pdf and fill database in sql server with pdf extractor sdk in VB.NET The sample source code below will teach you how to extract csv from pdf and fill database in sql server with pdf extractor sdk in VB.NET. ByteScout Premium Suite: the set that includes 12 SDK products from ByteScout including tools and components for PDF, barcodes, spreadsheets, screen video recording. It can extract csv from pdf and fill database in sql server with pdf extractor sdk in VB.NET. These VB.NET code samples for VB.NET guide developers to speed up coding of the application when using ByteScout Premium Suite. This VB.NET sample code is all you need for your app. Just copy and paste the code, add references (if needs to) and you are all set! Enjoy writing a code with ready-to-use sample codes in VB.NET. ByteScout Premium Suite free trial version is available on our website. VB.NET and other programming languages are supported. ## REQUEST FREE TECH SUPPORT [Click here to get in touch](https://bytescout.zendesk.com/hc/en-us/requests/new?subject=ByteScout%20Premium%20Suite%20Question) or just send email to [support@bytescout.com](mailto:support@bytescout.com?subject=ByteScout%20Premium%20Suite%20Question) ## ON-PREMISE OFFLINE SDK [Get Your 60 Day Free Trial](https://bytescout.com/download/web-installer?utm_source=github-readme) [Explore SDK Docs](https://bytescout.com/documentation/index.html?utm_source=github-readme) [Sign Up For Online Training](https://academy.bytescout.com/) ## ON-DEMAND REST WEB API [Get your API key](https://pdf.co/documentation/api?utm_source=github-readme) [Explore Web API Documentation](https://pdf.co/documentation/api?utm_source=github-readme) [Explore Web API Samples](https://github.com/bytescout/ByteScout-SDK-SourceCode/tree/master/PDF.co%20Web%20API) ## VIDEO REVIEW [https://www.youtube.com/watch?v=NEwNs2b9YN8](https://www.youtube.com/watch?v=NEwNs2b9YN8) <!-- code block begin --> ##### ****ExtractCsvAndFillDatabase.vbproj:** ``` <?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" /> <PropertyGroup> <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform> <ProjectGuid>{1A9B710D-F645-41B2-8F9F-50A78416F161}</ProjectGuid> <OutputType>Exe</OutputType> <StartupObject>ExtractCsvAndFillDatabase.Program</StartupObject> <RootNamespace>ExtractCsvAndFillDatabase</RootNamespace> <AssemblyName>ExtractCsvAndFillDatabase</AssemblyName> <FileAlignment>512</FileAlignment> <MyType>Console</MyType> <TargetFrameworkVersion>v2.0</TargetFrameworkVersion> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' "> <PlatformTarget>AnyCPU</PlatformTarget> <DebugSymbols>true</DebugSymbols> <DebugType>full</DebugType> <DefineDebug>true</DefineDebug> <DefineTrace>true</DefineTrace> <OutputPath>bin\Debug\</OutputPath> <NoWarn>42016,41999,42017,42018,42019,42032,42036,42020,42021,42022</NoWarn> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' "> <PlatformTarget>AnyCPU</PlatformTarget> <DebugType>pdbonly</DebugType> <DefineDebug>false</DefineDebug> <DefineTrace>true</DefineTrace> <Optimize>true</Optimize> <OutputPath>bin\Release\</OutputPath> <NoWarn>42016,41999,42017,42018,42019,42032,42036,42020,42021,42022</NoWarn> </PropertyGroup> <PropertyGroup> <OptionExplicit>On</OptionExplicit> </PropertyGroup> <PropertyGroup> <OptionCompare>Binary</OptionCompare> </PropertyGroup> <PropertyGroup> <OptionStrict>Off</OptionStrict> </PropertyGroup> <PropertyGroup> <OptionInfer>On</OptionInfer> </PropertyGroup> <ItemGroup> <Reference Include="Bytescout.PDFExtractor, Version=9.1.0.3170, Culture=neutral, PublicKeyToken=f7dd1bd9d40a50eb, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath>c:\Program Files\Bytescout PDF Extractor SDK\net2.00\Bytescout.PDFExtractor.dll</HintPath> </Reference> <Reference Include="System" /> <Reference Include="System.Data" /> <Reference Include="System.Deployment" /> <Reference Include="System.Drawing" /> </ItemGroup> <ItemGroup> <Import Include="Microsoft.VisualBasic" /> <Import Include="System" /> <Import Include="System.Collections" /> <Import Include="System.Collections.Generic" /> <Import Include="System.Data" /> <Import Include="System.Diagnostics" /> </ItemGroup> <ItemGroup> <Compile Include="Program.vb" /> </ItemGroup> <ItemGroup> <None Include="sample.pdf"> <CopyToOutputDirectory>Always</CopyToOutputDirectory> </None> </ItemGroup> <Import Project="$(MSBuildToolsPath)\Microsoft.VisualBasic.targets" /> </Project> ``` <!-- code block end --> <!-- code block begin --> ##### ****Program.vb:** ``` Imports System.Data.SqlClient Imports Bytescout.PDFExtractor Module Program Sub Main() Try ' Step-1: Get Datatable Dim oDataTable = GetDataTableFromDocument("sample.pdf") ' PLEASE NOTE Please Replace With your connection String, You need to have "PersonData" table into your database. ' You can find that table from Scripts.sql file Dim connectionString As String = "Data Source=DESKTOP-92VMCQG\SQLEXPRESS;Initial Catalog=SampleDatabase;Persist Security Info=True;User ID=sa;Password=Hiren@009" ' Step-2: Insert into database InsertIntoSqlServerDatabase(oDataTable, connectionString) ' Step-3: Fetch from database and display results DisplayDatabaseResults(connectionString) Catch ex As Exception Console.WriteLine(ex.Message) End Try Console.WriteLine("Press enter key to exit...") Console.ReadLine() End Sub ''' <summary> ''' Inserts into Sql Server database ''' </summary> Private Sub InsertIntoSqlServerDatabase(ByVal oDataTable As DataTable, ByVal connectionString As String) Using con As SqlConnection = New SqlConnection(connectionString) ' Open connection con.Open() ' Sql query to insert data Dim cmdInsert As String = "Insert into PersonData (id, first_name, last_name, email, gender, ip_address) values (@id, @first_name, @last_name, @email, @gender, @ip_address)" For Each itmRow As DataRow In oDataTable.Rows ' Prepare sql command Dim cmd As SqlCommand = New SqlCommand(cmdInsert, con) cmd.CommandType = CommandType.Text cmd.Parameters.Add(New SqlParameter("@id", Convert.ToString(itmRow("id")))) cmd.Parameters.Add(New SqlParameter("@first_name", Convert.ToString(itmRow("first_name")))) cmd.Parameters.Add(New SqlParameter("@last_name", Convert.ToString(itmRow("last_name")))) cmd.Parameters.Add(New SqlParameter("@email", Convert.ToString(itmRow("email")))) cmd.Parameters.Add(New SqlParameter("@gender", Convert.ToString(itmRow("gender")))) cmd.Parameters.Add(New SqlParameter("@ip_address", Convert.ToString(itmRow("ip_address")))) ' Execute sql command cmd.ExecuteNonQuery() Next ' Close connection con.Close() End Using End Sub ' Displays inserted database results Private Sub DisplayDatabaseResults(ByVal connectionString As String) ' Person data holder Dim personDataTable As DataTable = New DataTable() Using con As SqlConnection = New SqlConnection(connectionString) ' Sql query to fetch data Dim cmdInsert As String = "SELECT id, first_name, last_name, email, gender, ip_address FROM PersonData;" ' Prepare sql command Dim cmd As SqlCommand = New SqlCommand(cmdInsert, con) cmd.CommandType = CommandType.Text ' Prepare DataAdapter Dim dataAdapter As SqlDataAdapter = New SqlDataAdapter(cmd) ' Fill person dataTable dataAdapter.Fill(personDataTable) End Using ' Display all person data if any If personDataTable IsNot Nothing AndAlso personDataTable.Rows.Count > 0 Then ' Print all columns For Each column As DataColumn In personDataTable.Columns Console.Write("{0} | ", column.ColumnName) Next Console.WriteLine() ' Print all data For Each dataRow As DataRow In personDataTable.Rows For Each column As DataColumn In personDataTable.Columns Console.Write("{0} | ", dataRow(column.ColumnName)) Next Console.WriteLine() Next Else Console.WriteLine("No data retrieved..") End If End Sub ''' <summary> ''' Get DataTable from Document ''' </summary> Private Function GetDataTableFromDocument(ByVal fileName As String) As DataTable Dim oDataTable As DataTable = Nothing ' Initialise table detector Using tableDetector As TableDetector = New TableDetector("demo", "demo") Using CSVExtractor As CSVExtractor = New CSVExtractor("demo", "demo") ' Set table detection mode to "bordered tables" - best for tables with closed solid borders. tableDetector.ColumnDetectionMode = ColumnDetectionMode.BorderedTables ' We should define what kind of tables we should detect. ' So we set min required number of columns to 2 ... tableDetector.DetectionMinNumberOfColumns = 2 ' ... and we set min required number of rows to 2 tableDetector.DetectionMinNumberOfRows = 2 ' Load PDF document tableDetector.LoadDocumentFromFile(fileName) CSVExtractor.LoadDocumentFromFile(fileName) ' Get page count Dim pageCount As Integer = tableDetector.GetPageCount() If tableDetector.FindTable(0) Then ' Set extraction area for CSV extractor to rectangle received from the table detector CSVExtractor.SetExtractionArea(tableDetector.FoundTableLocation) ' Generate CSV data Dim allCsvData = CSVExtractor.GetCSV() ' Generate Datatable oDataTable = GetDataTableFromCSV(allCsvData) End If End Using End Using Return oDataTable End Function ''' <summary> ''' Get Datatable from CSV ''' </summary> Private Function GetDataTableFromCSV(ByVal allCsvData As String) As DataTable Dim oRetDataTable = New DataTable() oRetDataTable.Columns.Add("id") oRetDataTable.Columns.Add("first_name") oRetDataTable.Columns.Add("last_name") oRetDataTable.Columns.Add("email") oRetDataTable.Columns.Add("gender") oRetDataTable.Columns.Add("ip_address") Dim rows = allCsvData.Split(vbLf) For iRow As Integer = 1 To rows.Length - 1 ' Get all column data Dim columns = rows(iRow).Split(","c) If columns.Length >= 5 Then ' Prepare new row Dim oRow = oRetDataTable.NewRow() oRow("id") = columns(0) oRow("first_name") = columns(1) oRow("last_name") = columns(2) oRow("email") = columns(3) oRow("gender") = columns(4) oRow("ip_address") = columns(5) ' Add row back to datatable oRetDataTable.Rows.Add(oRow) End If Next ' Return DataTable Return oRetDataTable End Function End Module ``` <!-- code block end -->
bytescout/ByteScout-SDK-SourceCode
Premium Suite/VB.NET/Extract csv from pdf and fill database in sql server with pdf extractor sdk/README.md
Markdown
apache-2.0
12,549
package db import ( "github.com/couchbase/go-couchbase" "github.com/couchbase/sync_gateway/base" ) // A goroutine that watches the tapListener for documents that don't have // sync metadata, and calls assimilate() on them. func (c *DatabaseContext) watchDocChanges() { if c.tapListener.DocChannel == nil { return } base.LogTo("Shadow", "Watching doc changes...") for event := range c.tapListener.DocChannel { doc, err := unmarshalDocument(string(event.Key), event.Value) if err == nil { if doc.hasValidSyncData(c.writeSequences()) { if c.Shadower != nil { c.Shadower.PushRevision(doc) } } else { if c.autoImport { go c.assimilate(doc.ID) } } } } } // Adds sync metadata to a Couchbase document func (c *DatabaseContext) assimilate(docid string) { base.LogTo("CRUD", "Importing new doc %q", docid) db := Database{DatabaseContext: c, user: nil} _, err := db.updateDoc(docid, true, func(doc *document) (Body, error) { if doc.hasValidSyncData(c.writeSequences()) { return nil, couchbase.UpdateCancel // someone beat me to it } if err := db.initializeSyncData(doc); err != nil { return nil, err } return doc.body, nil }) if err != nil && err != couchbase.UpdateCancel { base.Warn("Failed to import new doc %q: %v", docid, err) } }
tleyden/sync_gateway
db/assimilator.go
GO
apache-2.0
1,304
# Lagunaria lilacina (Lindl.) Sweet SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name Hibiscus lilacinus Lindl. ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Malvales/Malvaceae/Lagunaria/Lagunaria lilacina/README.md
Markdown
apache-2.0
204
/****************************************************************************** * Copyright 2019 The Apollo Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ /** * @file **/ #include <string> #include <vector> #include "modules/planning/scenarios/park/valet_parking/stage_approaching_parking_spot.h" #include "modules/planning/scenarios/park/valet_parking/stage_parking.h" #include "modules/planning/scenarios/park/valet_parking/valet_parking_scenario.h" namespace apollo { namespace planning { namespace scenario { namespace valet_parking { using apollo::common::Status; using apollo::common::VehicleState; using apollo::common::math::Box2d; using apollo::common::math::Vec2d; using apollo::hdmap::HDMapUtil; using apollo::hdmap::LaneSegment; using apollo::hdmap::ParkingSpaceInfoConstPtr; using apollo::hdmap::Path; using apollo::hdmap::PathOverlap; apollo::common::util::Factory< ScenarioConfig::StageType, Stage, Stage* (*)(const ScenarioConfig::StageConfig& stage_config)> ValetParkingScenario::s_stage_factory_; void ValetParkingScenario::Init() { if (init_) { return; } Scenario::Init(); if (!GetScenarioConfig()) { AERROR << "fail to get scenario specific config"; return; } hdmap_ = hdmap::HDMapUtil::BaseMapPtr(); CHECK_NOTNULL(hdmap_); } void ValetParkingScenario::RegisterStages() { if (s_stage_factory_.Empty()) { s_stage_factory_.Clear(); } s_stage_factory_.Register( ScenarioConfig::VALET_PARKING_APPROACHING_PARKING_SPOT, [](const ScenarioConfig::StageConfig& config) -> Stage* { return new StageApproachingParkingSpot(config); }); s_stage_factory_.Register( ScenarioConfig::VALET_PARKING_PARKING, [](const ScenarioConfig::StageConfig& config) -> Stage* { return new StageParking(config); }); } std::unique_ptr<Stage> ValetParkingScenario::CreateStage( const ScenarioConfig::StageConfig& stage_config) { if (s_stage_factory_.Empty()) { RegisterStages(); } auto ptr = s_stage_factory_.CreateObjectOrNull(stage_config.stage_type(), stage_config); if (ptr) { ptr->SetContext(&context_); } return ptr; } bool ValetParkingScenario::GetScenarioConfig() { if (!config_.has_valet_parking_config()) { AERROR << "miss scenario specific config"; return false; } context_.scenario_config.CopyFrom(config_.valet_parking_config()); return true; } bool ValetParkingScenario::IsTransferable(const Frame& frame, const double parking_start_range) { // TODO(all) Implement avaliable parking spot detection by preception results std::string target_parking_spot_id; if (frame.local_view().routing->routing_request().has_parking_space() && frame.local_view().routing->routing_request().parking_space().has_id()) { target_parking_spot_id = frame.local_view().routing->routing_request().parking_space().id().id(); } else { ADEBUG << "No parking space id from routing"; return false; } if (target_parking_spot_id.empty()) { return false; } const auto& nearby_path = frame.reference_line_info().front().reference_line().map_path(); PathOverlap parking_space_overlap; const auto& vehicle_state = frame.vehicle_state(); if (!SearchTargetParkingSpotOnPath(nearby_path, target_parking_spot_id, &parking_space_overlap)) { ADEBUG << "No such parking spot found after searching all path forward " "possible" << target_parking_spot_id; return false; } if (!CheckDistanceToParkingSpot(vehicle_state, nearby_path, parking_start_range, parking_space_overlap)) { ADEBUG << "target parking spot found, but too far, distance larger than " "pre-defined distance" << target_parking_spot_id; return false; } return true; } bool ValetParkingScenario::SearchTargetParkingSpotOnPath( const Path& nearby_path, const std::string& target_parking_id, PathOverlap* parking_space_overlap) { const auto& parking_space_overlaps = nearby_path.parking_space_overlaps(); for (const auto& parking_overlap : parking_space_overlaps) { if (parking_overlap.object_id == target_parking_id) { *parking_space_overlap = parking_overlap; return true; } } return false; } bool ValetParkingScenario::CheckDistanceToParkingSpot( const VehicleState& vehicle_state, const Path& nearby_path, const double parking_start_range, const PathOverlap& parking_space_overlap) { // TODO(Jinyun) parking overlap s are wrong on map, not usable // double parking_space_center_s = // (parking_space_overlap.start_s + parking_space_overlap.end_s) / 2.0; const hdmap::HDMap* hdmap = hdmap::HDMapUtil::BaseMapPtr(); hdmap::Id id; id.set_id(parking_space_overlap.object_id); ParkingSpaceInfoConstPtr target_parking_spot_ptr = hdmap->GetParkingSpaceById(id); Vec2d left_bottom_point = target_parking_spot_ptr->polygon().points().at(0); Vec2d right_bottom_point = target_parking_spot_ptr->polygon().points().at(1); double left_bottom_point_s = 0.0; double left_bottom_point_l = 0.0; double right_bottom_point_s = 0.0; double right_bottom_point_l = 0.0; nearby_path.GetNearestPoint(left_bottom_point, &left_bottom_point_s, &left_bottom_point_l); nearby_path.GetNearestPoint(right_bottom_point, &right_bottom_point_s, &right_bottom_point_l); double parking_space_center_s = (left_bottom_point_s + right_bottom_point_s) / 2.0; double vehicle_point_s = 0.0; double vehicle_point_l = 0.0; Vec2d vehicle_vec(vehicle_state.x(), vehicle_state.y()); nearby_path.GetNearestPoint(vehicle_vec, &vehicle_point_s, &vehicle_point_l); if (std::abs(parking_space_center_s - vehicle_point_s) < parking_start_range) { return true; } else { return false; } } } // namespace valet_parking } // namespace scenario } // namespace planning } // namespace apollo
ycool/apollo
modules/planning/scenarios/park/valet_parking/valet_parking_scenario.cc
C++
apache-2.0
6,725
var models = require('../models/models.js'); exports.ownershipRequired = function (req, res, next) { models.Quiz.find({ where: { id: Number(req.comment.QuizId) } }).then(function (quiz) { if (quiz) { var objQuizOwner = quiz.UserId; var logUser = req.session.user.id; var isAdmin = req.session.user.isAdmin; console.log(objQuizOwner, logUser, isAdmin); if (isAdmin || objQuizOwner === logUser) { next(); } else { res.redirect('/'); } } else { next(new Error('No existe quizId=' + quizId)) } }).catch(function(error){next(error)}); }; exports.load = function (req, res, next, commentId) { models.Comment.find({ where: { id: Number(commentId) } }).then(function (comment) { if (comment) { req.comment = comment; next(); } else { next(new Error('No existe commentId=' + commentId)); } }); }; exports.new = function (req, res) { res.render('comments/new.ejs', { quizId: req.params.quizId, errors: [] }); }; exports.create = function (req, res) { var comment = models.Comment.build({ texto: req.body.comment.texto, QuizId: req.params.quizId }); comment.validate().then( function (err) { if (err) { res.render('comments/new.ejs', { comment: comment, errors: err.errors }); } else { comment.save().then( function () { res.redirect('/quizes/' + req.params.quizId); }); } } ).catch(function (error) { next(error); }); }; exports.publish = function (req, res) { req.comment.publicado = true; req.comment.save({ fields: ["publicado"] }).then(function () { res.redirect('/quizes/' + req.params.quizId); }).catch(function (error) { next(error) }); };
sirasistant/Core-quiz
controllers/comment_controller.js
JavaScript
apache-2.0
1,652
package com.example.coolweather.gson; /** * Created by Administrator on 2017/9/1. */ public class AQI { public AQICity city; public class AQICity{ public String aqi; public String pm25; } }
1312662151/coolweather
app/src/main/java/com/example/coolweather/gson/AQI.java
Java
apache-2.0
222
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.collect.id; import java.util.function.Function; import com.google.common.reflect.TypeToken; /** * A resolver that can provide the target when resolving links. * <p> * A {@link Link} provides loose coupling between different parts of the object model. * When the target of a link is needed, it is resolved by passing in a link resolver. * <p> * Link resolution will typically be implemented to access an underlying data store. * If the link specifies an identifier that is not resolvable, or the declared target * type is incorrect, an exception is thrown. */ public interface LinkResolver { /** * Obtains a link resolver that is unable to resolve any links. * <p> * This is a special implementation of {@code LinkResolver} that will be used when * it is assumed that all targets have already been resolved. * Any attempt to resolve a link will throw {@code LinkResolutionException} * * @return the link resolver */ public static LinkResolver none() { // cannot use a lambda for LinkResolver return new LinkResolver() { @Override public <T extends IdentifiableBean> T resolve(StandardId identifier, TypeToken<T> targetType) { throw new LinkResolutionException("Unable to resolve link to: " + identifier + ", using LinkResolver.none()"); } }; } //------------------------------------------------------------------------- /** * Resolves the supplied link, returning the realized target of the link. * <p> * The implementation of this interface may perform any thread-safe action to obtain * the link target. Typically this will involve accessing an underlying data store. * If the link cannot be resolved then a {@code LinkResolutionException} will be thrown. * <p> * The type is expressed as a standard {@link Class} object. * * @param <T> the type of the target of the link * @param identifier the identifier to be resolved * @param targetType the target type of the link * @return the resolved target of the link * @throws LinkResolutionException if the link cannot be resolved */ public default <T extends IdentifiableBean> T resolve(StandardId identifier, Class<T> targetType) { return resolve(identifier, TypeToken.of(targetType)); } /** * Resolves the supplied link, returning the realized target of the link. * <p> * The implementation of this interface may perform any thread-safe action to obtain * the link target. Typically this will involve accessing an underlying data store. * If the link cannot be resolved then a {@code LinkResolutionException} will be thrown. * <p> * The type is expressed as a {@link TypeToken}, which allows types like * {@code Trade<Swap>} to be expressed: * <p> * <pre> * new TypeToken&lt;Trade&lt;Swap&gt;&gt;() {}; * </pre> * * @param <T> the type of the target of the link * @param identifier the identifier to be resolved * @param targetType the target type of the link * @return the resolved target of the link * @throws LinkResolutionException if the link cannot be resolved */ public abstract <T extends IdentifiableBean> T resolve(StandardId identifier, TypeToken<T> targetType); //------------------------------------------------------------------------- /** * Resolves all the links within the specified bean. * <p> * This takes the specified bean and resolves any links if the object implements {@link Resolvable}. * If the target is not resolvable, or the target is already resolved, * then the specified input bean will be returned. * <p> * This method is primarily useful where the type of the input object is not known to be resolvable. * For example, this might occur when processing a {@code List<Object>}. * * @param bean the target bean * @return the resolved bean */ @SuppressWarnings("unchecked") public default <B> B resolveLinksIn(B bean) { return (bean instanceof Resolvable ? ((Resolvable<B>) bean).resolveLinks(this) : bean); } /** * Resolves all the links within one property of a bean. * <p> * This takes the specified bean and replaces the target object. * The target must be a property of the bean and the update function must be able to replace the target. * The update must return a new bean, leaving the original unaltered. * <p> * If the target is not resolvable, is null, or is already resolved, * then the specified input bean will be returned. * <p> * For example, this method might be used as follows: * <pre> * resolver.resolveLinksIn(bean, bean.getFoo(), resolved -> bean.toBuilder().foo(resolved).build()); * </pre> * <p> * This method is typically invoked from implementations of {@link Resolvable#resolveLinks(LinkResolver)}. * In that case, the above example would use {@code this} instead of {@code bean}. * * @param bean the target bean * @param target the target object within the bean, may be null * @param updateFn the update function * @return the updated bean */ public default <B, T> B resolveLinksIn(B bean, T target, Function<T, B> updateFn) { if (target instanceof Resolvable) { @SuppressWarnings("unchecked") Resolvable<T> resolvableTarget = (Resolvable<T>) target; T resolved = resolvableTarget.resolveLinks(this); if (resolved != target) { return updateFn.apply(resolved); } } return bean; } }
nssales/Strata
modules/collect/src/main/java/com/opengamma/strata/collect/id/LinkResolver.java
Java
apache-2.0
5,642
/* Copyright 2018 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #ifndef __DEBUG_MARKER_H__ #define __DEBUG_MARKER_H__ #include <glm/glm.hpp> #include "VkexLoader.h" #define SCOPE1(cmd, name) vkex::internal::DebugMarkerHelper temp_##__LINE__(cmd, name); #define SCOPE2(cmd, name, color) vkex::internal::DebugMarkerHelper temp_##__LINE__(cmd, name, color); #define BEGIN1(cmd, name) vkex::internal::DebugMarkerBegin(cmd, name); #define BEGIN2(cmd, name, color) vkex::internal::DebugMarkerBegin(cmd, name, color); #define GET_MACRO(_1, _2, _3, NAME, ...) NAME #if defined(VKEX_WIN32) // No op these for Windows #define DEBUG_MARKER_SCOPE #define DEBUG_MARKER_BEGIN #define DEBUG_MARKER_END #else // Use as follows: DEBUG_MARKER_SCOPE(name) or DEBUG_MARKER_SCOPE(name, color). // Note: Make sure scope exists within vkBeginCommandBuffer/vkEndCommandBuffer! #define DEBUG_MARKER_SCOPE(...) GET_MACRO(__VA_ARGS__, SCOPE2, SCOPE1)(__VA_ARGS__) // Use as follows: DEBUG_MARKER_BEGIN(name) or DEBUG_MARKER_BEGIN(name, color). #define DEBUG_MARKER_BEGIN(...) GET_MACRO(__VA_ARGS__, BEGIN2, BEGIN1)(__VA_ARGS__) #define DEBUG_MARKER_END(cmd) vkex::internal::DebugMarkerEnd(cmd); #endif namespace vkex { // ================================================================================================= // Internal functions/classes. Use above DEBUG_MARKER_* macros! // ================================================================================================= namespace internal { inline void DebugMarkerBegin(VkCommandBuffer cmd_buffer, const char *marker_name, const glm::vec4 &color = glm::vec4(1,1,1,1)) { VkDebugMarkerMarkerInfoEXT markerInfo = {}; markerInfo.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT; memcpy(markerInfo.color, &color[0], sizeof(float) * 4); markerInfo.pMarkerName = marker_name; if (vkex::CmdDebugMarkerBeginEXT != nullptr) vkex::CmdDebugMarkerBeginEXT(cmd_buffer, &markerInfo); } inline void DebugMarkerEnd(VkCommandBuffer cmd_buffer) { if (vkex::CmdDebugMarkerEndEXT != nullptr) vkex::CmdDebugMarkerEndEXT(cmd_buffer); } // ================================================================================================= // DebugMarkerHelper // ================================================================================================= class DebugMarkerHelper { public: DebugMarkerHelper(VkCommandBuffer cmd_buffer, const char *marker_name, const glm::vec4 &color = glm::vec4(1,1,1,1)) { m_cmd_buffer = cmd_buffer; DebugMarkerBegin(cmd_buffer, marker_name, color); } ~DebugMarkerHelper() { DebugMarkerEnd(m_cmd_buffer); } private: VkCommandBuffer m_cmd_buffer; }; } // namespace internal } // namespace vkex #endif // __DEBUG_MARKER_H__
googlestadia/PorQue4K
src/vkex/DebugMarker.h
C
apache-2.0
3,293
<div class="container-fluid"> <div class="row"> <div class="col-sm-6"> <h4>Conduit One</h4> <uxd-conduit-search></uxd-conduit-search> </div> <div class="col-sm-6"> <h4>Conduit Two</h4> <uxd-conduit-search></uxd-conduit-search> </div> </div> </div>
UXAspects/UXAspects
docs/app/pages/components/components-sections/conduits/conduit/example/component-zone/component-zone.component.html
HTML
apache-2.0
335
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.securitytoken.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Identifiers for the federated user that is associated with the credentials. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sts-2011-06-15/FederatedUser" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class FederatedUser implements Serializable, Cloneable { /** * <p> * The string that identifies the federated user associated with the credentials, similar to the unique ID of an IAM * user. * </p> */ private String federatedUserId; /** * <p> * The ARN that specifies the federated user that is associated with the credentials. For more information about * ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in the * <i>IAM User Guide</i>. * </p> */ private String arn; /** * Default constructor for FederatedUser object. Callers should use the setter or fluent setter (with...) methods to * initialize the object after creating it. */ public FederatedUser() { } /** * Constructs a new FederatedUser object. Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param federatedUserId * The string that identifies the federated user associated with the credentials, similar to the unique ID of * an IAM user. * @param arn * The ARN that specifies the federated user that is associated with the credentials. For more information * about ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in * the <i>IAM User Guide</i>. */ public FederatedUser(String federatedUserId, String arn) { setFederatedUserId(federatedUserId); setArn(arn); } /** * <p> * The string that identifies the federated user associated with the credentials, similar to the unique ID of an IAM * user. * </p> * * @param federatedUserId * The string that identifies the federated user associated with the credentials, similar to the unique ID of * an IAM user. */ public void setFederatedUserId(String federatedUserId) { this.federatedUserId = federatedUserId; } /** * <p> * The string that identifies the federated user associated with the credentials, similar to the unique ID of an IAM * user. * </p> * * @return The string that identifies the federated user associated with the credentials, similar to the unique ID * of an IAM user. */ public String getFederatedUserId() { return this.federatedUserId; } /** * <p> * The string that identifies the federated user associated with the credentials, similar to the unique ID of an IAM * user. * </p> * * @param federatedUserId * The string that identifies the federated user associated with the credentials, similar to the unique ID of * an IAM user. * @return Returns a reference to this object so that method calls can be chained together. */ public FederatedUser withFederatedUserId(String federatedUserId) { setFederatedUserId(federatedUserId); return this; } /** * <p> * The ARN that specifies the federated user that is associated with the credentials. For more information about * ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in the * <i>IAM User Guide</i>. * </p> * * @param arn * The ARN that specifies the federated user that is associated with the credentials. For more information * about ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in * the <i>IAM User Guide</i>. */ public void setArn(String arn) { this.arn = arn; } /** * <p> * The ARN that specifies the federated user that is associated with the credentials. For more information about * ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in the * <i>IAM User Guide</i>. * </p> * * @return The ARN that specifies the federated user that is associated with the credentials. For more information * about ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in * the <i>IAM User Guide</i>. */ public String getArn() { return this.arn; } /** * <p> * The ARN that specifies the federated user that is associated with the credentials. For more information about * ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in the * <i>IAM User Guide</i>. * </p> * * @param arn * The ARN that specifies the federated user that is associated with the credentials. For more information * about ARNs and how to use them in policies, see <a * href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html">IAM Identifiers</a> in * the <i>IAM User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public FederatedUser withArn(String arn) { setArn(arn); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFederatedUserId() != null) sb.append("FederatedUserId: ").append(getFederatedUserId()).append(","); if (getArn() != null) sb.append("Arn: ").append(getArn()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof FederatedUser == false) return false; FederatedUser other = (FederatedUser) obj; if (other.getFederatedUserId() == null ^ this.getFederatedUserId() == null) return false; if (other.getFederatedUserId() != null && other.getFederatedUserId().equals(this.getFederatedUserId()) == false) return false; if (other.getArn() == null ^ this.getArn() == null) return false; if (other.getArn() != null && other.getArn().equals(this.getArn()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFederatedUserId() == null) ? 0 : getFederatedUserId().hashCode()); hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode()); return hashCode; } @Override public FederatedUser clone() { try { return (FederatedUser) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
aws/aws-sdk-java
aws-java-sdk-sts/src/main/java/com/amazonaws/services/securitytoken/model/FederatedUser.java
Java
apache-2.0
8,812
package pro.taskana.workbasket.internal.builder; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import javax.security.auth.Subject; import pro.taskana.common.api.exceptions.InvalidArgumentException; import pro.taskana.common.api.exceptions.NotAuthorizedException; import pro.taskana.common.api.security.UserPrincipal; import pro.taskana.workbasket.api.WorkbasketPermission; import pro.taskana.workbasket.api.WorkbasketService; import pro.taskana.workbasket.api.exceptions.WorkbasketAccessItemAlreadyExistException; import pro.taskana.workbasket.api.exceptions.WorkbasketNotFoundException; import pro.taskana.workbasket.api.models.WorkbasketAccessItem; import pro.taskana.workbasket.internal.models.WorkbasketAccessItemImpl; public class WorkbasketAccessItemBuilder { WorkbasketAccessItemImpl testWorkbasketAccessItem = new WorkbasketAccessItemImpl(); private WorkbasketAccessItemBuilder() {} public static WorkbasketAccessItemBuilder newWorkbasketAccessItem() { return new WorkbasketAccessItemBuilder(); } public WorkbasketAccessItemBuilder workbasketId(String workbasketId) { testWorkbasketAccessItem.setWorkbasketId(workbasketId); return this; } public WorkbasketAccessItemBuilder accessId(String accessId) { testWorkbasketAccessItem.setAccessId(accessId); return this; } public WorkbasketAccessItemBuilder accessName(String accessName) { testWorkbasketAccessItem.setAccessName(accessName); return this; } public WorkbasketAccessItemBuilder permission(WorkbasketPermission permission) { return permission(permission, true); } public WorkbasketAccessItemBuilder permission(WorkbasketPermission permission, boolean value) { testWorkbasketAccessItem.setPermission(permission, value); return this; } public WorkbasketAccessItem buildAndStore(WorkbasketService workbasketService) throws InvalidArgumentException, WorkbasketAccessItemAlreadyExistException, WorkbasketNotFoundException, NotAuthorizedException { return workbasketService.createWorkbasketAccessItem(testWorkbasketAccessItem); } public WorkbasketAccessItem buildAndStore(WorkbasketService workbasketService, String userId) throws PrivilegedActionException { Subject subject = new Subject(); subject.getPrincipals().add(new UserPrincipal(userId)); PrivilegedExceptionAction<WorkbasketAccessItem> performBuildAndStore = () -> buildAndStore(workbasketService); return Subject.doAs(subject, performBuildAndStore); } }
Taskana/taskana
lib/taskana-core/src/main/java/pro/taskana/workbasket/internal/builder/WorkbasketAccessItemBuilder.java
Java
apache-2.0
2,562
/** * ExperimentProteinInferRun.java * @author Vagisha Sharma * Aug 5, 2009 * @version 1.0 */ package org.yeastrc.experiment; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.yeastrc.ms.domain.protinfer.ProteinInferenceProgram; import org.yeastrc.ms.domain.protinfer.proteinProphet.ProteinProphetRun; /** * */ public class ExperimentProteinProphetRun { private final ProteinProphetRun run; private int uniqPeptideSequenceCount; private int uniqIonCount; private int numParsimoniousProteins; private int numParsimoniousProteinGroups; private int numParsimoniousProteinProphetGroups; private boolean isBookmarked = false; private static final Pattern tppVersionPattern = Pattern.compile("TPP\\s+(v\\d+\\.\\d+)"); public boolean getIsBookmarked() { return isBookmarked; } public void setBookmarked(boolean isBookmarked) { this.isBookmarked = isBookmarked; } public int getUniqIonCount() { return uniqIonCount; } public void setUniqIonCount(int uniqueIonCount) { this.uniqIonCount = uniqueIonCount; } public int getNumParsimoniousProteinProphetGroups() { return numParsimoniousProteinProphetGroups; } public void setNumParsimoniousProteinProphetGroups(int numProteinProphetGroups) { this.numParsimoniousProteinProphetGroups = numProteinProphetGroups; } public ExperimentProteinProphetRun(ProteinProphetRun run) { this.run = run; } public ProteinProphetRun getProteinProphetRun() { return run; } public int getUniqPeptideSequenceCount() { return uniqPeptideSequenceCount; } public void setUniqPeptideSequenceCount(int uniqPeptideSequenceCount) { this.uniqPeptideSequenceCount = uniqPeptideSequenceCount; } public int getNumParsimoniousProteins() { return numParsimoniousProteins; } public void setNumParsimoniousProteins(int numParsimoniousProteins) { this.numParsimoniousProteins = numParsimoniousProteins; } public int getNumParsimoniousProteinGroups() { return numParsimoniousProteinGroups; } public void setNumParsimoniousProteinGroups(int numParsimoniousProteinGroups) { this.numParsimoniousProteinGroups = numParsimoniousProteinGroups; } public String getProgramVersionShort() { String version = run.getProgramVersion(); if(run.getProgram() == ProteinInferenceProgram.PROTEIN_PROPHET) { Matcher m = tppVersionPattern.matcher(version); if(m.find()) { version = m.group(1); } } return version; } }
yeastrc/msdapl
MSDaPl_Web_App/src/org/yeastrc/experiment/ExperimentProteinProphetRun.java
Java
apache-2.0
2,620
/** * @license * Copyright 2018 Google LLC * SPDX-License-Identifier: Apache-2.0 */ // Style preference for leading underscores. // tslint:disable:strip-private-property-underscore import {customElement} from 'lit/decorators.js'; import {TabScrollerBase} from './mwc-tab-scroller-base'; import {styles} from './mwc-tab-scroller.css'; declare global { interface HTMLElementTagNameMap { 'mwc-tab-scroller': TabScroller; } } @customElement('mwc-tab-scroller') export class TabScroller extends TabScrollerBase { static override styles = [styles]; }
material-components/material-web
packages/tab-scroller/mwc-tab-scroller.ts
TypeScript
apache-2.0
564
import lean import lang.expr as expr # ========================================================= # Declaration Views class DeclView(lean.declaration): def __init__(self, decl): self.decl = decl def destruct(self): # type: DeclView -> (lean.name, ?, ?, lean.expr, lean.expr) return (self.decl.get_name(), self.decl.get_univ_params(), self.decl.get_num_univ_params(), self.decl.get_type(), self.decl.get_value()) def mentions(self, d_thm): v = self.decl.get_value() return expr.gather_theorem(d_thm, v) # ========================================================= # Environment Views class EnvView(lean.environment): def __init__(self, env): # type: lean.environment -> None self.env = env def get_decls(self, f=None): # type: (lean.declaration -> bool) -> [lean.declaration] decls = [] self.env.for_each_declaration(lambda decl: decls.append(decl)) if f: decls = filter(lambda decl: f(decl), decls) return decls def get_theorems(self): # type: (lean.declaration -> bool) -> [lean.declaration] return self.get_decls(lambda decl: decl.is_theorem()) def thm_dict_of_decls(self, decls): # type: [lean.declaration] -> dict<lean.name, lean.expr> d_thm = {} for decl in decls: if decl.is_theorem(): n, up, nup, t, v = DeclView(decl).destruct() d_thm[n] = v return d_thm
dselsam/lean-python-bindings
lean/lang/env.py
Python
apache-2.0
1,567
# WARNING: DO NOT EDIT. AUTO-GENERATED CODE (editorconfig-checker.rb.tpl) class EditorconfigChecker < Formula version "2.0.3" bottle :unneeded if OS.mac? if Hardware::CPU.is_64_bit? url "https://github.com/editorconfig-checker/editorconfig-checker/releases/download/2.0.3/ec-darwin-amd64.tar.gz" sha256 "c6d646f8057eccde7ad85225fc5dd54a2e7124929a1b61d2f053c343102ed91e" else url "https://github.com/editorconfig-checker/editorconfig-checker/releases/download/2.0.3/ec-darwin-386.tar.gz" sha256 "b58367263b45740d50733b4b4533c10c7434ddba5794609eb4c007dd1cb4bcfd" end elsif OS.linux? if Hardware::CPU.intel? if Hardware::CPU.is_64_bit? url "https://github.com/editorconfig-checker/editorconfig-checker/releases/download/2.0.3/ec-linux-amd64.tar.gz" sha256 "8c61c1bfc82a219f87a700bc04f868bf04a7e9b8854cddae6a781405b3f2e7d5" else url "https://github.com/editorconfig-checker/editorconfig-checker/releases/download/2.0.3/ec-linux-386.tar.gz" sha256 "ade995b1c828564f1a3c9694ac15a52cc4c8e57d7a0a559b423cf63b2ea90602" end elsif Hardware::CPU.arm? if Hardware::CPU.is_64_bit? url "https://github.com/editorconfig-checker/editorconfig-checker/releases/download/2.0.3/ec-linux-arm64.tar.gz" sha256 "8010008c2109d8c168bb76248cc4e69aaa8843325fe199f9db4ac098f8f40e6b" else url "https://github.com/editorconfig-checker/editorconfig-checker/releases/download/2.0.3/ec-linux-arm.tar.gz" sha256 "281943af43a4455140ac4352abf2a423e5faad38ffcf5439f7b5105e4a4f7ce7" end end end def install if OS.mac? if Hardware::CPU.is_64_bit? bin.install "ec-darwin-amd64" => "editorconfig-checker" else bin.install "ec-darwin-386" => "editorconfig-checker" end elsif OS.linux? if Hardware::CPU.intel? if Hardware::CPU.is_64_bit? bin.install "ec-linux-amd64" => "editorconfig-checker" else bin.install "ec-linux-386" => "editorconfig-checker" end elsif Hardware::CPU.arm? if Hardware::CPU.is_64_bit? bin.install "ec-linux-arm64" => "editorconfig-checker" else bin.install "ec-linux-arm" => "editorconfig-checker" end end end end test do system "#{bin}/editorconfig-checker -v" end end
tobiipro/support-firecloud
priv/editorconfig-checker.rb
Ruby
apache-2.0
2,375
/** * @license Copyright 2019 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* eslint-disable max-len */ 'use strict'; const i18n = require('../../lighthouse-core/lib/i18n/i18n.js'); const wordpressIcon = `data:image/svg+xml,%3Csvg viewBox='0 0 122.5 122.5' xmlns='http://www.w3.org/2000/svg'%3E%3Cg fill='%232f3439'%3E%3Cpath d='M8.7 61.3c0 20.8 12.1 38.7 29.6 47.3l-25-68.7c-3 6.5-4.6 13.7-4.6 21.4zM96.7 58.6c0-6.5-2.3-11-4.3-14.5-2.7-4.3-5.2-8-5.2-12.3 0-4.8 3.7-9.3 8.9-9.3h.7a52.4 52.4 0 0 0-79.4 9.9h3.3c5.5 0 14-.6 14-.6 2.9-.2 3.2 4 .4 4.3 0 0-2.9.4-6 .5l19.1 57L59.7 59l-8.2-22.5c-2.8-.1-5.5-.5-5.5-.5-2.8-.1-2.5-4.5.3-4.3 0 0 8.7.7 13.9.7 5.5 0 14-.7 14-.7 2.8-.2 3.2 4 .3 4.3 0 0-2.8.4-6 .5l19 56.5 5.2-17.5c2.3-7.3 4-12.5 4-17z'/%3E%3Cpath d='M62.2 65.9l-15.8 45.8a52.6 52.6 0 0 0 32.3-.9l-.4-.7zM107.4 36a49.6 49.6 0 0 1-3.6 24.2l-16.1 46.5A52.5 52.5 0 0 0 107.4 36z'/%3E%3Cpath d='M61.3 0a61.3 61.3 0 1 0 .1 122.7A61.3 61.3 0 0 0 61.3 0zm0 119.7a58.5 58.5 0 1 1 .1-117 58.5 58.5 0 0 1-.1 117z'/%3E%3C/g%3E%3C/svg%3E`; const UIStrings = { /** Additional description of a Lighthouse audit that tells the user how they can improve performance by removing unused CSS, in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ unused_css_rules: 'Consider reducing, or switching, the number of [WordPress plugins](https://wordpress.org/plugins/) loading unused CSS in your page. To identify plugins that are adding extraneous CSS, try running [code coverage](https://developers.google.com/web/updates/2017/04/devtools-release-notes#coverage) in Chrome DevTools. You can identify the theme/plugin responsible from the URL of the stylesheet. Look out for plugins that have many stylesheets in the list which have a lot of red in code coverage. A plugin should only enqueue a stylesheet if it is actually used on the page.', /** Additional description of a Lighthouse audit that tells the user how they can improve image loading by using webp in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ uses_webp_images: 'Consider using a [plugin](https://wordpress.org/plugins/search/convert+webp/) or service that will automatically convert your uploaded images to the optimal formats.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by lazy loading images that are initially offscreen in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ offscreen_images: 'Install a [lazy-load WordPress plugin](https://wordpress.org/plugins/search/lazy+load/) that provides the ability to defer any offscreen images, or switch to a theme that provides that functionality. Also consider using [the AMP plugin](https://wordpress.org/plugins/amp/).', /** Additional description of a Lighthouse audit that tells the user how they can improve site loading performance by reducing the total bytes delivered by their page in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ total_byte_weight: 'Consider showing excerpts in your post lists (e.g. via the more tag), reducing the number of posts shown on a given page, breaking your long posts into multiple pages, or using a plugin to lazy-load comments.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by reducing the amount of render blocking resources present on their page, in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ render_blocking_resources: 'There are a number of WordPress plugins that can help you [inline critical assets](https://wordpress.org/plugins/search/critical+css/) or [defer less important resources](https://wordpress.org/plugins/search/defer+css+javascript/). Beware that optimizations provided by these plugins may break features of your theme or plugins, so you will likely need to make code changes.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by minifying their CSS files in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ unminified_css: 'A number of [WordPress plugins](https://wordpress.org/plugins/search/minify+css/) can speed up your site by concatenating, minifying, and compressing your styles. You may also want to use a build process to do this minification up-front if possible.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by minifying their Javascript files in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ unminified_javascript: 'A number of [WordPress plugins](https://wordpress.org/plugins/search/minify+javascript/) can speed up your site by concatenating, minifying, and compressing your scripts. You may also want to use a build process to do this minification up front if possible.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by encoding animated images as video, in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ efficient_animated_content: 'Consider uploading your GIF to a service which will make it available to embed as an HTML5 video.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by removing unused Javascript files in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ unused_javascript: 'Consider reducing, or switching, the number of [WordPress plugins](https://wordpress.org/plugins/) loading unused JavaScript in your page. To identify plugins that are adding extraneous JS, try running [code coverage](https://developers.google.com/web/updates/2017/04/devtools-release-notes#coverage) in Chrome DevTools. You can identify the theme/plugin responsible from the URL of the script. Look out for plugins that have many scripts in the list which have a lot of red in code coverage. A plugin should only enqueue a script if it is actually used on the page.', /** Additional description of a Lighthouse audit that tells the user how they can improve their site by enabling long caching in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ uses_long_cache_ttl: 'Read about [Browser Caching in WordPress](https://wordpress.org/support/article/optimization/#browser-caching).', /** Additional description of a Lighthouse audit that tells the user how they can improve site performance by optimizing images, in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ uses_optimized_images: 'Consider using an [image optimization WordPress plugin](https://wordpress.org/plugins/search/optimize+images/) that compresses your images while retaining quality.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance via enabling text compression in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ uses_text_compression: 'You can enable text compression in your web server configuration.', /** Additional description of a Lighthouse audit that tells the user how they can improve performance by using responsive images in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ uses_responsive_images: 'Upload images directly through the [media library](https://wordpress.org/support/article/media-library-screen/) to ensure that the required image sizes are available, and then insert them from the media library or use the image widget to ensure the optimal image sizes are used (including those for the responsive breakpoints). Avoid using `Full Size` images unless the dimensions are adequate for their usage. [Learn More](https://wordpress.org/support/article/inserting-images-into-posts-and-pages/).', /** Additional description of a Lighthouse audit that tells the user how they can improve the time to first byte speed metric, in the context of the Wordpress CMS platform. This is displayed after a user expands the section to see more. No character length limits. Links in (parenthesis) become link texts to additional documentation. */ time_to_first_byte: 'Themes, plugins, and server specifications all contribute to server response time. Consider finding a more optimized theme, carefully selecting an optimization plugin, and/or upgrading your server.', }; const str_ = i18n.createMessageInstanceIdFn(__filename, UIStrings); module.exports = { id: 'wordpress', iconDataURL: wordpressIcon, title: 'WordPress', descriptions: { 'unused-css-rules': str_(UIStrings.unused_css_rules), 'uses-webp-images': str_(UIStrings.uses_webp_images), 'offscreen-images': str_(UIStrings.offscreen_images), 'total-byte-weight': str_(UIStrings.total_byte_weight), 'render-blocking-resources': str_(UIStrings.render_blocking_resources), 'unminified-css': str_(UIStrings.unminified_css), 'unminified-javascript': str_(UIStrings.unminified_javascript), 'efficient-animated-content': str_(UIStrings.efficient_animated_content), 'unused-javascript': str_(UIStrings.unused_javascript), 'uses-long-cache-ttl': str_(UIStrings.uses_long_cache_ttl), 'uses-optimized-images': str_(UIStrings.uses_optimized_images), 'uses-text-compression': str_(UIStrings.uses_text_compression), 'uses-responsive-images': str_(UIStrings.uses_responsive_images), 'time-to-first-byte': str_(UIStrings.time_to_first_byte), }, }; module.exports.UIStrings = UIStrings;
wardpeet/lighthouse
stack-packs/packs/wordpress.js
JavaScript
apache-2.0
11,796
import logging import os import sys import time import json import jsonschema import pprint import pytest import requests from ray._private.test_utils import ( format_web_url, wait_for_condition, wait_until_server_available, ) from ray.dashboard import dashboard from ray.dashboard.tests.conftest import * # noqa from ray.job_submission import JobSubmissionClient logger = logging.getLogger(__name__) def _get_snapshot(address: str): response = requests.get(f"{address}/api/snapshot") response.raise_for_status() data = response.json() schema_path = os.path.join( os.path.dirname(dashboard.__file__), "modules/snapshot/snapshot_schema.json" ) pprint.pprint(data) jsonschema.validate(instance=data, schema=json.load(open(schema_path))) return data def test_successful_job_status( ray_start_with_dashboard, disable_aiohttp_cache, enable_test_module ): address = ray_start_with_dashboard.address_info["webui_url"] assert wait_until_server_available(address) address = format_web_url(address) job_sleep_time_s = 5 entrypoint_cmd = ( 'python -c"' "import ray;" "ray.init();" "import time;" f"time.sleep({job_sleep_time_s});" '"' ) client = JobSubmissionClient(address) start_time_s = int(time.time()) runtime_env = {"env_vars": {"RAY_TEST_123": "123"}} metadata = {"ray_test_456": "456"} job_id = client.submit_job( entrypoint=entrypoint_cmd, metadata=metadata, runtime_env=runtime_env ) def wait_for_job_to_succeed(): data = _get_snapshot(address) legacy_job_succeeded = False job_succeeded = False # Test legacy job snapshot (one driver per job). for job_entry in data["data"]["snapshot"]["jobs"].values(): if job_entry["status"] is not None: assert job_entry["config"]["metadata"]["jobSubmissionId"] == job_id assert job_entry["status"] in {"PENDING", "RUNNING", "SUCCEEDED"} assert job_entry["statusMessage"] is not None legacy_job_succeeded = job_entry["status"] == "SUCCEEDED" # Test new jobs snapshot (0 to N drivers per job). for job_submission_id, entry in data["data"]["snapshot"][ "jobSubmission" ].items(): if entry["status"] is not None: assert entry["status"] in {"PENDING", "RUNNING", "SUCCEEDED"} assert entry["message"] is not None # TODO(architkulkarni): Disable automatic camelcase. assert entry["runtimeEnv"] == {"envVars": {"RAYTest123": "123"}} assert entry["metadata"] == {"rayTest456": "456"} assert entry["errorType"] is None assert abs(entry["startTime"] - start_time_s) <= 2 if entry["status"] == "SUCCEEDED": job_succeeded = True assert entry["endTime"] >= entry["startTime"] + job_sleep_time_s return legacy_job_succeeded and job_succeeded wait_for_condition(wait_for_job_to_succeed, timeout=30) def test_failed_job_status( ray_start_with_dashboard, disable_aiohttp_cache, enable_test_module ): address = ray_start_with_dashboard.address_info["webui_url"] assert wait_until_server_available(address) address = format_web_url(address) job_sleep_time_s = 5 entrypoint_cmd = ( 'python -c"' "import ray;" "ray.init();" "import time;" f"time.sleep({job_sleep_time_s});" "import sys;" "sys.exit(1);" '"' ) start_time_s = int(time.time()) client = JobSubmissionClient(address) runtime_env = {"env_vars": {"RAY_TEST_456": "456"}} metadata = {"ray_test_789": "789"} job_id = client.submit_job( entrypoint=entrypoint_cmd, metadata=metadata, runtime_env=runtime_env ) def wait_for_job_to_fail(): data = _get_snapshot(address) legacy_job_failed = False job_failed = False # Test legacy job snapshot (one driver per job). for job_entry in data["data"]["snapshot"]["jobs"].values(): if job_entry["status"] is not None: assert job_entry["config"]["metadata"]["jobSubmissionId"] == job_id assert job_entry["status"] in {"PENDING", "RUNNING", "FAILED"} assert job_entry["statusMessage"] is not None legacy_job_failed = job_entry["status"] == "FAILED" # Test new jobs snapshot (0 to N drivers per job). for job_submission_id, entry in data["data"]["snapshot"][ "jobSubmission" ].items(): if entry["status"] is not None: assert entry["status"] in {"PENDING", "RUNNING", "FAILED"} assert entry["message"] is not None # TODO(architkulkarni): Disable automatic camelcase. assert entry["runtimeEnv"] == {"envVars": {"RAYTest456": "456"}} assert entry["metadata"] == {"rayTest789": "789"} assert entry["errorType"] is None assert abs(entry["startTime"] - start_time_s) <= 2 if entry["status"] == "FAILED": job_failed = True assert entry["endTime"] >= entry["startTime"] + job_sleep_time_s return legacy_job_failed and job_failed wait_for_condition(wait_for_job_to_fail, timeout=25) if __name__ == "__main__": sys.exit(pytest.main(["-v", __file__]))
ray-project/ray
dashboard/modules/snapshot/tests/test_job_submission.py
Python
apache-2.0
5,558
/* * Copyright (c) 2016-2016 Bas van den Boom 'Z3r0byte' * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.ilexiconn.magister.container; import com.google.gson.annotations.SerializedName; import net.ilexiconn.magister.container.sub.Link; import java.io.Serializable; public class MessageFolder implements Serializable { @SerializedName("Naam") public String title; @SerializedName("OngelezenBerichten") public int unreadMessages; @SerializedName("Id") public int id; @SerializedName("ParentId") public int parentId; // TODO: Seems always to be NULL // @SerializedName("BerichtenUri") // public int id; @SerializedName("Links") public Link[] links; }
Z3r0byte/Magistify
app/src/main/java/net/ilexiconn/magister/container/MessageFolder.java
Java
apache-2.0
1,232
/******************************************************************************* * * Copyright (C) 2015-2021 the BBoxDB project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package org.bboxdb.network.client.response; import java.nio.ByteBuffer; import org.bboxdb.network.client.BBoxDBConnection; import org.bboxdb.network.client.future.network.NetworkOperationFuture; import org.bboxdb.network.packages.PackageEncodeException; public interface ServerResponseHandler { /** * Handle a server result * @param encodedPackage * @param future * @return Remove the result future or not * @throws PackageEncodeException * @throws InterruptedException */ public boolean handleServerResult(final BBoxDBConnection bBoxDBConnection, final ByteBuffer encodedPackage,final NetworkOperationFuture future) throws PackageEncodeException, InterruptedException; }
jnidzwetzki/scalephant
bboxdb-server/src/main/java/org/bboxdb/network/client/response/ServerResponseHandler.java
Java
apache-2.0
1,515
#!/usr/bin/perl -w # # Copyright 2017, Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # This example adds a portfolio bidding strategy and uses it to # construct a campaign. use strict; use lib "../../../lib"; use utf8; use Google::Ads::AdWords::Client; use Google::Ads::AdWords::Logging; use Google::Ads::AdWords::v201809::SharedBiddingStrategy; use Google::Ads::AdWords::v201809::TargetSpendBiddingScheme; use Google::Ads::AdWords::v201809::Money; use Google::Ads::AdWords::v201809::BiddingStrategyOperation; use Google::Ads::AdWords::v201809::BudgetOperation; use Google::Ads::AdWords::v201809::NetworkSetting; use Google::Ads::AdWords::v201809::CampaignOperation; use Cwd qw(abs_path); use Data::Uniqid qw(uniqid); # Replace with valid values of your account. my $budget_id = 0; # Example main subroutine. sub use_portfolio_bidding_strategy { my $client = shift; my $budget_id = shift; my $biddingStrategy = create_bidding_strategy($client); if (!$biddingStrategy) { return 0; } if (!$budget_id) { my $budget = create_shared_budget($client); if (!$budget) { return 0; } $budget_id = $budget->get_budgetId(); } create_campaign_with_bidding_strategy($client, $biddingStrategy->get_id(), $budget_id); return 1; } # Creates the bidding strategy object. sub create_bidding_strategy { my $client = shift; my @operations = (); # Create a portfolio bidding strategy. my $bidding_strategy = Google::Ads::AdWords::v201809::SharedBiddingStrategy->new({ name => "Maximize Clicks " . uniqid(), type => "TARGET_SPEND", # Create the bidding scheme. biddingScheme => Google::Ads::AdWords::v201809::TargetSpendBiddingScheme->new({ # Optionally set additional bidding scheme parameters. bidCeiling => Google::Ads::AdWords::v201809::Money->new( {microAmount => 2000000,} ), spendTarget => Google::Ads::AdWords::v201809::Money->new( {microAmount => 20000000,})})}); # Create operation. my $operation = Google::Ads::AdWords::v201809::BiddingStrategyOperation->new({ operator => "ADD", operand => $bidding_strategy }); push @operations, $operation; my $result = $client->BiddingStrategyService()->mutate({operations => \@operations}); if ($result->get_value()) { my $strategy = $result->get_value()->[0]; printf "Portfolio bidding strategy with name \"%s\" and ID %d of type %s " . "was created.\n", $strategy->get_name(), $strategy->get_id(), $strategy->get_biddingScheme()->get_BiddingScheme__Type(); return $strategy; } else { print "No portfolio bidding strategies were added.\n"; return 0; } } # Creates an explicit budget to be used only to create the campaign. sub create_shared_budget { my $client = shift; my @operations = (); # Create a shared budget operation. my $operation = Google::Ads::AdWords::v201809::BudgetOperation->new({ operator => 'ADD', operand => Google::Ads::AdWords::v201809::Budget->new({ amount => Google::Ads::AdWords::v201809::Money->new( {microAmount => 50000000} ), deliveryMethod => 'STANDARD', isExplicitlyShared => 0 })}); push @operations, $operation; # Make the mutate request. my $result = $client->BudgetService()->mutate({operations => \@operations}); if ($result->get_value()) { return $result->get_value()->[0]; } else { print "No budgets were added.\n"; return 0; } } # Create a campaign with a portfolio bidding strategy. sub create_campaign_with_bidding_strategy { my $client = shift; my $bidding_strategy_id = shift; my $budget_id = shift; my @operations = (); # Create campaign. my $campaign = Google::Ads::AdWords::v201809::Campaign->new({ name => 'Interplanetary Cruise #' . uniqid(), budget => Google::Ads::AdWords::v201809::Budget->new({budgetId => $budget_id}), # Set bidding strategy (required). biddingStrategyConfiguration => Google::Ads::AdWords::v201809::BiddingStrategyConfiguration->new( {biddingStrategyId => $bidding_strategy_id} ), # Set advertising channel type (required). advertisingChannelType => 'SEARCH', # Network targeting (recommended). networkSetting => Google::Ads::AdWords::v201809::NetworkSetting->new({ targetGoogleSearch => 1, targetSearchNetwork => 1, targetContentNetwork => 1 }), # Recommendation: Set the campaign to PAUSED when creating it to stop # the ads from immediately serving. Set to ENABLED once you've added # targeting and the ads are ready to serve. status => "PAUSED" }); # Create operation. my $operation = Google::Ads::AdWords::v201809::CampaignOperation->new({ operator => 'ADD', operand => $campaign }); push @operations, $operation; my $result = $client->CampaignService()->mutate({operations => \@operations}); if ($result->get_value()) { my $new_campaign = $result->get_value()->[0]; printf "Campaign with name \"%s\", ID %d and bidding strategy ID %d was " . "created.\n", $new_campaign->get_name(), $new_campaign->get_id(), $new_campaign->get_biddingStrategyConfiguration() ->get_biddingStrategyId(); return $new_campaign; } else { print "No campaigns were added.\n"; return 0; } } # Don't run the example if the file is being included. if (abs_path($0) ne abs_path(__FILE__)) { return 1; } # Log SOAP XML request, response and API errors. Google::Ads::AdWords::Logging::enable_all_logging(); # Get AdWords Client, credentials will be read from ~/adwords.properties. my $client = Google::Ads::AdWords::Client->new({version => "v201809"}); # By default examples are set to die on any server returned fault. $client->set_die_on_faults(1); # Call the example use_portfolio_bidding_strategy($client, $budget_id);
googleads/googleads-perl-lib
examples/v201809/advanced_operations/use_portfolio_bidding_strategy.pl
Perl
apache-2.0
6,548
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ly.stealth.mesos.exhibitor import org.junit.Assert._ import org.junit.Test import play.api.libs.json.{Writes, Reads} import scala.util.{Failure, Try} class ClusterTest extends MesosTestCase { @Test def expandIds() { val cluster = Cluster() (0 until 5).foreach(i => cluster.addServer(ExhibitorServer("" + i))) Try(cluster.expandIds("")) match { case Failure(t) if t.isInstanceOf[IllegalArgumentException] => case other => fail(other.toString) } assertEquals(List("0"), cluster.expandIds("0")) assertEquals(List("0", "2", "4"), cluster.expandIds("0,2,4")) assertEquals(List("1", "2", "3"), cluster.expandIds("1..3")) assertEquals(List("0", "1", "3", "4"), cluster.expandIds("0..1,3..4")) assertEquals(List("0", "1", "2", "3", "4"), cluster.expandIds("*")) // duplicates assertEquals(List("0", "1", "2", "3", "4"), cluster.expandIds("0..3,2..4")) // sorting assertEquals(List("2", "3", "4"), cluster.expandIds("4,3,2")) } @Test def loadSave() { val cluster = Cluster() cluster.frameworkId = Some("some id") cluster.save() val loaded = Cluster() loaded.load() assertEquals(cluster.frameworkId, loaded.frameworkId) } }
CiscoCloud/exhibitor-mesos-framework
src/main/test/ly/stealth/mesos/exhibitor/ClusterTest.scala
Scala
apache-2.0
2,039
/* * Copyright © 2013-2019 camunda services GmbH and various authors (info@camunda.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.impl.bpmn.parser.BpmnParse; import org.camunda.bpm.engine.impl.context.Context; import org.camunda.bpm.engine.impl.db.CompositePermissionCheck; import org.camunda.bpm.engine.impl.db.PermissionCheck; import org.camunda.bpm.engine.impl.event.EventType; import org.camunda.bpm.engine.impl.interceptor.CommandContext; import org.camunda.bpm.engine.impl.interceptor.CommandExecutor; import org.camunda.bpm.engine.impl.persistence.entity.ProcessDefinitionEntity; import org.camunda.bpm.engine.impl.persistence.entity.SuspensionState; import org.camunda.bpm.engine.impl.util.CompareUtil; import org.camunda.bpm.engine.repository.ProcessDefinition; import org.camunda.bpm.engine.repository.ProcessDefinitionQuery; import org.camunda.bpm.model.bpmn.BpmnModelInstance; import org.camunda.bpm.model.bpmn.instance.Documentation; import org.camunda.bpm.model.xml.instance.ModelElementInstance; import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull; import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensurePositive; /** * @author Tom Baeyens * @author Joram Barrez * @author Daniel Meyer * @author Saeid Mirzaei */ public class ProcessDefinitionQueryImpl extends AbstractQuery<ProcessDefinitionQuery, ProcessDefinition> implements ProcessDefinitionQuery { private static final long serialVersionUID = 1L; protected String id; protected String[] ids; protected String category; protected String categoryLike; protected String name; protected String nameLike; protected String deploymentId; protected String key; protected String[] keys; protected String keyLike; protected String resourceName; protected String resourceNameLike; protected Integer version; protected boolean latest = false; protected SuspensionState suspensionState; protected String authorizationUserId; protected String procDefId; protected String incidentType; protected String incidentId; protected String incidentMessage; protected String incidentMessageLike; protected String eventSubscriptionName; protected String eventSubscriptionType; protected boolean isTenantIdSet = false; protected String[] tenantIds; protected boolean includeDefinitionsWithoutTenantId = false; protected String versionTag; protected String versionTagLike; protected boolean isStartableInTasklist = false; protected boolean isNotStartableInTasklist = false; protected boolean startablePermissionCheck = false; // for internal use protected List<PermissionCheck> processDefinitionCreatePermissionChecks = new ArrayList<PermissionCheck>(); public ProcessDefinitionQueryImpl() { } public ProcessDefinitionQueryImpl(CommandExecutor commandExecutor) { super(commandExecutor); } public ProcessDefinitionQueryImpl processDefinitionId(String processDefinitionId) { this.id = processDefinitionId; return this; } public ProcessDefinitionQueryImpl processDefinitionIdIn(String... ids) { this.ids = ids; return this; } public ProcessDefinitionQueryImpl processDefinitionCategory(String category) { ensureNotNull("category", category); this.category = category; return this; } public ProcessDefinitionQueryImpl processDefinitionCategoryLike(String categoryLike) { ensureNotNull("categoryLike", categoryLike); this.categoryLike = categoryLike; return this; } public ProcessDefinitionQueryImpl processDefinitionName(String name) { ensureNotNull("name", name); this.name = name; return this; } public ProcessDefinitionQueryImpl processDefinitionNameLike(String nameLike) { ensureNotNull("nameLike", nameLike); this.nameLike = nameLike; return this; } public ProcessDefinitionQueryImpl deploymentId(String deploymentId) { ensureNotNull("deploymentId", deploymentId); this.deploymentId = deploymentId; return this; } public ProcessDefinitionQueryImpl processDefinitionKey(String key) { ensureNotNull("key", key); this.key = key; return this; } public ProcessDefinitionQueryImpl processDefinitionKeysIn(String... keys) { ensureNotNull("keys", (Object[]) keys); this.keys = keys; return this; } public ProcessDefinitionQueryImpl processDefinitionKeyLike(String keyLike) { ensureNotNull("keyLike", keyLike); this.keyLike = keyLike; return this; } public ProcessDefinitionQueryImpl processDefinitionResourceName(String resourceName) { ensureNotNull("resourceName", resourceName); this.resourceName = resourceName; return this; } public ProcessDefinitionQueryImpl processDefinitionResourceNameLike(String resourceNameLike) { ensureNotNull("resourceNameLike", resourceNameLike); this.resourceNameLike = resourceNameLike; return this; } public ProcessDefinitionQueryImpl processDefinitionVersion(Integer version) { ensureNotNull("version", version); ensurePositive("version", version.longValue()); this.version = version; return this; } public ProcessDefinitionQueryImpl latestVersion() { this.latest = true; return this; } public ProcessDefinitionQuery active() { this.suspensionState = SuspensionState.ACTIVE; return this; } public ProcessDefinitionQuery suspended() { this.suspensionState = SuspensionState.SUSPENDED; return this; } public ProcessDefinitionQuery messageEventSubscription(String messageName) { return eventSubscription(EventType.MESSAGE, messageName); } public ProcessDefinitionQuery messageEventSubscriptionName(String messageName) { return eventSubscription(EventType.MESSAGE, messageName); } public ProcessDefinitionQuery processDefinitionStarter(String procDefId) { this.procDefId = procDefId; return this; } public ProcessDefinitionQuery eventSubscription(EventType eventType, String eventName) { ensureNotNull("event type", eventType); ensureNotNull("event name", eventName); this.eventSubscriptionType = eventType.name(); this.eventSubscriptionName = eventName; return this; } public ProcessDefinitionQuery incidentType(String incidentType) { ensureNotNull("incident type", incidentType); this.incidentType = incidentType; return this; } public ProcessDefinitionQuery incidentId(String incidentId) { ensureNotNull("incident id", incidentId); this.incidentId = incidentId; return this; } public ProcessDefinitionQuery incidentMessage(String incidentMessage) { ensureNotNull("incident message", incidentMessage); this.incidentMessage = incidentMessage; return this; } public ProcessDefinitionQuery incidentMessageLike(String incidentMessageLike) { ensureNotNull("incident messageLike", incidentMessageLike); this.incidentMessageLike = incidentMessageLike; return this; } @Override protected boolean hasExcludingConditions() { return super.hasExcludingConditions() || CompareUtil.elementIsNotContainedInArray(id, ids); } public ProcessDefinitionQueryImpl tenantIdIn(String... tenantIds) { ensureNotNull("tenantIds", (Object[]) tenantIds); this.tenantIds = tenantIds; isTenantIdSet = true; return this; } public ProcessDefinitionQuery withoutTenantId() { isTenantIdSet = true; this.tenantIds = null; return this; } public ProcessDefinitionQuery includeProcessDefinitionsWithoutTenantId() { this.includeDefinitionsWithoutTenantId = true; return this; } public ProcessDefinitionQuery versionTag(String versionTag) { ensureNotNull("versionTag", versionTag); this.versionTag = versionTag; return this; } public ProcessDefinitionQuery versionTagLike(String versionTagLike) { ensureNotNull("versionTagLike", versionTagLike); this.versionTagLike = versionTagLike; return this; } public ProcessDefinitionQuery startableInTasklist() { this.isStartableInTasklist = true; return this; } public ProcessDefinitionQuery notStartableInTasklist() { this.isNotStartableInTasklist = true; return this; } public ProcessDefinitionQuery startablePermissionCheck() { this.startablePermissionCheck = true; return this; } //sorting //////////////////////////////////////////// public ProcessDefinitionQuery orderByDeploymentId() { return orderBy(ProcessDefinitionQueryProperty.DEPLOYMENT_ID); } public ProcessDefinitionQuery orderByProcessDefinitionKey() { return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_KEY); } public ProcessDefinitionQuery orderByProcessDefinitionCategory() { return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_CATEGORY); } public ProcessDefinitionQuery orderByProcessDefinitionId() { return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_ID); } public ProcessDefinitionQuery orderByProcessDefinitionVersion() { return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_VERSION); } public ProcessDefinitionQuery orderByProcessDefinitionName() { return orderBy(ProcessDefinitionQueryProperty.PROCESS_DEFINITION_NAME); } public ProcessDefinitionQuery orderByTenantId() { return orderBy(ProcessDefinitionQueryProperty.TENANT_ID); } public ProcessDefinitionQuery orderByVersionTag() { return orderBy(ProcessDefinitionQueryProperty.VERSION_TAG); } //results //////////////////////////////////////////// @Override public long executeCount(CommandContext commandContext) { checkQueryOk(); return commandContext .getProcessDefinitionManager() .findProcessDefinitionCountByQueryCriteria(this); } @Override public List<ProcessDefinition> executeList(CommandContext commandContext, Page page) { checkQueryOk(); List<ProcessDefinition> list = commandContext .getProcessDefinitionManager() .findProcessDefinitionsByQueryCriteria(this, page); boolean shouldQueryAddBpmnModelInstancesToCache = commandContext.getProcessEngineConfiguration().getEnableFetchProcessDefinitionDescription(); if(shouldQueryAddBpmnModelInstancesToCache) { addProcessDefinitionToCacheAndRetrieveDocumentation(list); } return list; } protected void addProcessDefinitionToCacheAndRetrieveDocumentation(List<ProcessDefinition> list) { for (ProcessDefinition processDefinition : list) { BpmnModelInstance bpmnModelInstance = Context.getProcessEngineConfiguration() .getDeploymentCache() .findBpmnModelInstanceForProcessDefinition((ProcessDefinitionEntity) processDefinition); ModelElementInstance processElement = bpmnModelInstance.getModelElementById(processDefinition.getKey()); if (processElement != null) { Collection<Documentation> documentations = processElement.getChildElementsByType(Documentation.class); List<String> docStrings = new ArrayList<String>(); for (Documentation documentation : documentations) { docStrings.add(documentation.getTextContent()); } ProcessDefinitionEntity processDefinitionEntity = (ProcessDefinitionEntity) processDefinition; processDefinitionEntity.setProperty(BpmnParse.PROPERTYNAME_DOCUMENTATION, BpmnParse.parseDocumentation(docStrings)); } } } @Override public void checkQueryOk() { super.checkQueryOk(); if (latest && ( (id != null) || (version != null) || (deploymentId != null) ) ){ throw new ProcessEngineException("Calling latest() can only be used in combination with key(String) and keyLike(String) or name(String) and nameLike(String)"); } } //getters //////////////////////////////////////////// public String getDeploymentId() { return deploymentId; } public String getId() { return id; } public String[] getIds() { return ids; } public String getName() { return name; } public String getNameLike() { return nameLike; } public String getKey() { return key; } public String getKeyLike() { return keyLike; } public Integer getVersion() { return version; } public boolean isLatest() { return latest; } public String getCategory() { return category; } public String getCategoryLike() { return categoryLike; } public String getResourceName() { return resourceName; } public String getResourceNameLike() { return resourceNameLike; } public SuspensionState getSuspensionState() { return suspensionState; } public void setSuspensionState(SuspensionState suspensionState) { this.suspensionState = suspensionState; } public String getIncidentId() { return incidentId; } public String getIncidentType() { return incidentType; } public String getIncidentMessage() { return incidentMessage; } public String getIncidentMessageLike() { return incidentMessageLike; } public String getVersionTag() { return versionTag; } public boolean isStartableInTasklist() { return isStartableInTasklist; } public boolean isNotStartableInTasklist() { return isNotStartableInTasklist; } public boolean isStartablePermissionCheck() { return startablePermissionCheck; } public void setProcessDefinitionCreatePermissionChecks(List<PermissionCheck> processDefinitionCreatePermissionChecks) { this.processDefinitionCreatePermissionChecks = processDefinitionCreatePermissionChecks; } public List<PermissionCheck> getProcessDefinitionCreatePermissionChecks() { return processDefinitionCreatePermissionChecks; } public void addProcessDefinitionCreatePermissionCheck(CompositePermissionCheck processDefinitionCreatePermissionCheck) { processDefinitionCreatePermissionChecks.addAll(processDefinitionCreatePermissionCheck.getAllPermissionChecks()); } public ProcessDefinitionQueryImpl startableByUser(String userId) { ensureNotNull("userId", userId); this.authorizationUserId = userId; return this; } }
xasx/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/ProcessDefinitionQueryImpl.java
Java
apache-2.0
14,791
package org.globus.gsi.provider.simple; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.security.KeyStore.LoadStoreParameter; import java.util.Collections; import java.util.concurrent.ConcurrentHashMap; import java.util.Map; import java.security.cert.X509Certificate; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.Key; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.util.Date; import java.util.Enumeration; import java.security.KeyStoreSpi; /** * @deprecated */ public class SimpleMemoryKeyStore extends KeyStoreSpi { private Log logger = LogFactory.getLog(SimpleMemoryKeyStore.class); private Map<String, X509Certificate> certMap; @Override public void engineLoad(LoadStoreParameter params) throws IOException, NoSuchAlgorithmException, CertificateException { logger.debug("creating cert store."); if (params == null) { throw new IllegalArgumentException("parameter null"); } else if (!(params instanceof SimpleMemoryKeyStoreLoadStoreParameter)) { throw new IllegalArgumentException("Wrong parameter type"); } X509Certificate[] certs = ((SimpleMemoryKeyStoreLoadStoreParameter) params).getCerts(); this.certMap = new ConcurrentHashMap<String,X509Certificate>(); if (certs != null) { for (X509Certificate cert : certs) { if (cert != null) { logger.debug("adding cert " + cert.getSubjectX500Principal().getName()); certMap.put(cert.getSubjectX500Principal().getName(), cert); } } } } @Override public Enumeration<String> engineAliases() { return Collections.enumeration(this.certMap.keySet()); } @Override public boolean engineContainsAlias(String alias) { return this.certMap.containsKey(alias); } @Override public void engineDeleteEntry(String alias) throws KeyStoreException { this.certMap.remove(alias); } @Override public Certificate engineGetCertificate(String alias) { return this.certMap.get(alias); } @Override public boolean engineIsCertificateEntry(String alias) { return engineContainsAlias(alias); } @Override public boolean engineIsKeyEntry(String alias) { return false; } @Override public void engineSetCertificateEntry(String alias, Certificate cert) throws KeyStoreException { if (cert == null) { return; } if (cert instanceof X509Certificate) { this.certMap.put(alias, (X509Certificate) cert); } else { throw new IllegalArgumentException("Certificate should be X509Cert"); } } @Override public int engineSize() { return this.certMap.size(); } @Override public void engineStore(OutputStream stream, char[] password) throws IOException, NoSuchAlgorithmException, CertificateException { throw new UnsupportedOperationException(); } @Override public void engineSetKeyEntry(String alias, byte[] key, Certificate[] chain) throws KeyStoreException { throw new UnsupportedOperationException(); } @Override public void engineSetKeyEntry(String alias, Key key, char[] password, Certificate[] chain) throws KeyStoreException { throw new UnsupportedOperationException(); } @Override public void engineLoad(InputStream stream, char[] password) throws IOException, NoSuchAlgorithmException, CertificateException { throw new UnsupportedOperationException(); } @Override public String engineGetCertificateAlias(Certificate cert) { throw new UnsupportedOperationException(); } @Override public Certificate[] engineGetCertificateChain(String alias) { throw new UnsupportedOperationException(); } @Override public Date engineGetCreationDate(String alias) { throw new UnsupportedOperationException(); } @Override public Key engineGetKey(String alias, char[] password) throws NoSuchAlgorithmException, UnrecoverableKeyException { throw new UnsupportedOperationException(); } }
jrevillard/JGlobus
ssl-proxies/src/main/java/org/globus/gsi/provider/simple/SimpleMemoryKeyStore.java
Java
apache-2.0
4,512
<?php /** * HiPay Fullservice SDK Magento 1 * * 2018 HiPay * * NOTICE OF LICENSE * * @author HiPay <support.tpp@hipay.com> * @copyright 2018 HiPay * @license https://github.com/hipay/hipay-fullservice-sdk-magento1/blob/master/LICENSE.md */ /** * * * @author HiPay <support.tpp@hipay.com> * @copyright Copyright (c) 2018 - HiPay * @license https://github.com/hipay/hipay-fullservice-sdk-magento1/blob/master/LICENSE.md * @link https://github.com/hipay/hipay-fullservice-sdk-magento1 */ class Allopass_Hipay_YandexController extends Allopass_Hipay_Controller_Payment { protected function _getMethodInstance() { return Mage::getSingleton('hipay/method_yandex'); } }
hipay/hipay-fullservice-sdk-magento1
src/app/code/community/Allopass/Hipay/controllers/YandexController.php
PHP
apache-2.0
721
# Phoma xylostei Cooke & Harkn. SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in Grevillea 9(no. 51): 82 (1881) #### Original name Phoma xylostei Cooke & Harkn. ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Dothideomycetes/Pleosporales/Phoma/Phoma xylostei/README.md
Markdown
apache-2.0
213
package com.marcohc.architecture.aca.presentation.bus.presentation.mvp; import com.hannesdorfmann.mosby.mvp.MvpPresenter; import com.marcohc.architecture.aca.presentation.bus.common.BusProvider; import com.marcohc.architecture.aca.presentation.mvp.BaseMvpView; import com.marcohc.architecture.aca.presentation.mvp.BaseMvpActivity; /** * Base bus activity which automatically register into the event bus. * <p> * Override it for specific common methods in activities. * * @param <V> the BaseMvpView type the superclass is implementing * @param <P> the type of MvpPresenter which will handle the logic of the class * @author Marco Hernaiz * @since 08/08/16 */ public abstract class BaseBusMvpActivity<V extends BaseMvpView, P extends MvpPresenter<V>> extends BaseMvpActivity<V, P> implements BaseMvpView { @Override public void onStart() { super.onStart(); BusProvider.register(presenter); } @Override public void onStop() { BusProvider.unregister(presenter); super.onStop(); } }
marcohc/android-clean-architecture
bus/src/main/java/com/marcohc/architecture/aca/presentation/bus/presentation/mvp/BaseBusMvpActivity.java
Java
apache-2.0
1,048
# AUTOGENERATED FILE FROM balenalib/npe-x500-m3-alpine:edge-build # remove several traces of python RUN apk del python* # http://bugs.python.org/issue19846 # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. ENV LANG C.UTF-8 # key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported # key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \ && gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \ && gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059 # point Python at a system-provided certificate database. Otherwise, we might hit CERTIFICATE_VERIFY_FAILED. # https://www.python.org/dev/peps/pep-0476/#trust-database ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt ENV PYTHON_VERSION 3.6.12 # if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'" ENV PYTHON_PIP_VERSION 21.0.1 ENV SETUPTOOLS_VERSION 56.0.0 RUN set -x \ && curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-alpine-armv7hf-openssl1.1.tar.gz" \ && echo "9ea800721595b573ee89cb20f4c28fa0273cf726a509bc7fcd21772bd4adefda Python-$PYTHON_VERSION.linux-alpine-armv7hf-openssl1.1.tar.gz" | sha256sum -c - \ && tar -xzf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-openssl1.1.tar.gz" --strip-components=1 \ && rm -rf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-openssl1.1.tar.gz" \ && if [ ! -e /usr/local/bin/pip3 ]; then : \ && curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \ && echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \ && python3 get-pip.py \ && rm get-pip.py \ ; fi \ && pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \ && find /usr/local \ \( -type d -a -name test -o -name tests \) \ -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ -exec rm -rf '{}' + \ && cd / \ && rm -rf /usr/src/python ~/.cache # install "virtualenv", since the vast majority of users of this image will want it RUN pip3 install --no-cache-dir virtualenv ENV PYTHON_DBUS_VERSION 1.2.8 # install dbus-python dependencies RUN apk add --no-cache \ dbus-dev \ dbus-glib-dev # install dbus-python RUN set -x \ && mkdir -p /usr/src/dbus-python \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \ && gpg --verify dbus-python.tar.gz.asc \ && tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \ && rm dbus-python.tar.gz* \ && cd /usr/src/dbus-python \ && PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \ && make -j$(nproc) \ && make install -j$(nproc) \ && cd / \ && rm -rf /usr/src/dbus-python # make some useful symlinks that are expected to exist RUN cd /usr/local/bin \ && ln -sf pip3 pip \ && { [ -e easy_install ] || ln -s easy_install-* easy_install; } \ && ln -sf idle3 idle \ && ln -sf pydoc3 pydoc \ && ln -sf python3 python \ && ln -sf python3-config python-config CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh" \ && echo "Running test-stack@python" \ && chmod +x test-stack@python.sh \ && bash test-stack@python.sh \ && rm -rf test-stack@python.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux edge \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.6.12, Pip v21.0.1, Setuptools v56.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
nghiant2710/base-images
balena-base-images/python/npe-x500-m3/alpine/edge/3.6.12/build/Dockerfile
Dockerfile
apache-2.0
4,840
# AUTOGENERATED FILE FROM balenalib/artik10-alpine:3.11-run ENV NODE_VERSION 14.16.0 ENV YARN_VERSION 1.22.4 # Install dependencies RUN apk add --no-cache libgcc libstdc++ libuv \ && apk add --no-cache libssl1.0 || apk add --no-cache libssl1.1 RUN buildDeps='curl' \ && set -x \ && for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && apk add --no-cache $buildDeps \ && curl -SLO "http://resin-packages.s3.amazonaws.com/node/v$NODE_VERSION/node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" \ && echo "0ae0d3668eb409e09d006a93e6194e39b28b7576eee29dc779aa9dc9a2f7a88a node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-alpine-armv7hf.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \ && echo "Running test-stack@node" \ && chmod +x test-stack@node.sh \ && bash test-stack@node.sh \ && rm -rf test-stack@node.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux 3.11 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.16.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
nghiant2710/base-images
balena-base-images/node/artik10/alpine/3.11/14.16.0/run/Dockerfile
Dockerfile
apache-2.0
3,022
package it.unibz.inf.ontop.answering.reformulation.impl; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; import it.unibz.inf.ontop.answering.logging.QueryLogger; import it.unibz.inf.ontop.answering.reformulation.QueryCache; import it.unibz.inf.ontop.answering.reformulation.QueryReformulator; import it.unibz.inf.ontop.answering.reformulation.generation.NativeQueryGenerator; import it.unibz.inf.ontop.answering.reformulation.input.InputQuery; import it.unibz.inf.ontop.answering.reformulation.input.InputQueryFactory; import it.unibz.inf.ontop.answering.reformulation.input.translation.InputQueryTranslator; import it.unibz.inf.ontop.answering.reformulation.rewriting.QueryRewriter; import it.unibz.inf.ontop.answering.reformulation.unfolding.QueryUnfolder; import it.unibz.inf.ontop.exception.OntopReformulationException; import it.unibz.inf.ontop.injection.TranslationFactory; import it.unibz.inf.ontop.iq.IQ; import it.unibz.inf.ontop.iq.exception.EmptyQueryException; import it.unibz.inf.ontop.iq.optimizer.*; import it.unibz.inf.ontop.iq.planner.QueryPlanner; import it.unibz.inf.ontop.spec.OBDASpecification; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * TODO: rename it QueryTranslatorImpl ? * * See ReformulationFactory for creating a new instance. * */ public class QuestQueryProcessor implements QueryReformulator { private static final Logger LOGGER = LoggerFactory.getLogger(QuestQueryProcessor.class); private final QueryRewriter rewriter; private final NativeQueryGenerator datasourceQueryGenerator; private final QueryCache queryCache; private final QueryUnfolder queryUnfolder; private final InputQueryTranslator inputQueryTranslator; private final InputQueryFactory inputQueryFactory; private final GeneralStructuralAndSemanticIQOptimizer generalOptimizer; private final QueryPlanner queryPlanner; private final QueryLogger.Factory queryLoggerFactory; @AssistedInject private QuestQueryProcessor(@Assisted OBDASpecification obdaSpecification, QueryCache queryCache, TranslationFactory translationFactory, QueryRewriter queryRewriter, InputQueryFactory inputQueryFactory, InputQueryTranslator inputQueryTranslator, GeneralStructuralAndSemanticIQOptimizer generalOptimizer, QueryPlanner queryPlanner, QueryLogger.Factory queryLoggerFactory) { this.inputQueryFactory = inputQueryFactory; this.rewriter = queryRewriter; this.generalOptimizer = generalOptimizer; this.queryPlanner = queryPlanner; this.queryLoggerFactory = queryLoggerFactory; this.rewriter.setTBox(obdaSpecification.getSaturatedTBox()); this.queryUnfolder = translationFactory.create(obdaSpecification.getSaturatedMapping()); this.datasourceQueryGenerator = translationFactory.create(obdaSpecification.getDBParameters()); this.inputQueryTranslator = inputQueryTranslator; this.queryCache = queryCache; LOGGER.info("Ontop has completed the setup and it is ready for query answering!"); } @Override public IQ reformulateIntoNativeQuery(InputQuery inputQuery, QueryLogger queryLogger) throws OntopReformulationException { long beginning = System.currentTimeMillis(); IQ cachedQuery = queryCache.get(inputQuery); if (cachedQuery != null) { queryLogger.declareReformulationFinishedAndSerialize(cachedQuery,true); return cachedQuery; } try { LOGGER.debug("SPARQL query:\n{}\n", inputQuery.getInputString()); IQ convertedIQ = inputQuery.translate(inputQueryTranslator); LOGGER.debug("Parsed query converted into IQ (after normalization):\n{}\n", convertedIQ); queryLogger.setSparqlIQ(convertedIQ); try { LOGGER.debug("Start the rewriting process..."); IQ rewrittenIQ = rewriter.rewrite(convertedIQ); LOGGER.debug("Rewritten IQ:\n{}\n", rewrittenIQ); LOGGER.debug("Start the unfolding..."); IQ unfoldedIQ = queryUnfolder.optimize(rewrittenIQ); if (unfoldedIQ.getTree().isDeclaredAsEmpty()) { queryLogger.declareReformulationFinishedAndSerialize(unfoldedIQ, false); LOGGER.debug("Reformulation time: {} ms\n", System.currentTimeMillis() - beginning); return unfoldedIQ; } LOGGER.debug("Unfolded query:\n{}\n", unfoldedIQ); IQ optimizedQuery = generalOptimizer.optimize(unfoldedIQ); IQ plannedQuery = queryPlanner.optimize(optimizedQuery); LOGGER.debug("Planned query:\n{}\n", plannedQuery); queryLogger.setPlannedQuery(plannedQuery); IQ executableQuery = generateExecutableQuery(plannedQuery); queryCache.put(inputQuery, executableQuery); queryLogger.declareReformulationFinishedAndSerialize(executableQuery, false); LOGGER.debug("Reformulation time: {} ms\n", System.currentTimeMillis() - beginning); return executableQuery; } catch (OntopReformulationException e) { queryLogger.declareReformulationException(e); throw e; } } /* * Bug: should normally not be reached * TODO: remove it */ catch (Exception e) { LOGGER.warn("Unexpected exception: " + e.getMessage(), e); OntopReformulationException exception = new OntopReformulationException(e); queryLogger.declareReformulationException(exception); throw exception; } } private IQ generateExecutableQuery(IQ iq) { LOGGER.debug("Producing the native query string..."); IQ executableQuery = datasourceQueryGenerator.generateSourceQuery(iq); LOGGER.debug("Resulting native query:\n{}\n", executableQuery); return executableQuery; } /** * Returns the final rewriting of the given query */ @Override public String getRewritingRendering(InputQuery query) throws OntopReformulationException { LOGGER.debug("SPARQL query:\n{}\n", query.getInputString()); IQ convertedIQ = query.translate(inputQueryTranslator); LOGGER.debug("Parsed query converted into IQ:\n{}\n", convertedIQ); try { IQ rewrittenIQ = rewriter.rewrite(convertedIQ); return rewrittenIQ.toString(); } catch (EmptyQueryException e) { e.printStackTrace(); } return "EMPTY REWRITING"; } @Override public InputQueryFactory getInputQueryFactory() { return inputQueryFactory; } @Override public QueryLogger.Factory getQueryLoggerFactory() { return queryLoggerFactory; } }
ontop/ontop
engine/reformulation/core/src/main/java/it/unibz/inf/ontop/answering/reformulation/impl/QuestQueryProcessor.java
Java
apache-2.0
6,425
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.services.backend.common; public interface Supplier<T> { T get(); }
scandihealth/kie-wb-common
kie-wb-common-services/kie-wb-common-services-backend/src/main/java/org/kie/workbench/common/services/backend/common/Supplier.java
Java
apache-2.0
729
php-client ========== Official PHP Client Library
redstagfulfillment/php-client
README.md
Markdown
apache-2.0
51
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ var ApplicationOptions = { colors: { states: { NORMAL: 'blue', WARNING: 'blue', OFFLINE: 'grey', ALERTED: 'red', UNKNOWN: 'black' // TODO: previous color #19FFFF , change this if black is not user friendly ;) }, application: { header: 'grey' } }, constance:{ CEP_WEB_SOCKET_OUTPUT_ADAPTOR_NAME: 'geo_publisher_websocket_localfusedspacialevent', // CEP_ON_ALERT_WEB_SOCKET_OUTPUT_ADAPTOR_NAME: 'Geo-Publisher-WebSocketLocal-GeoAlertNotifications', CEP_Traffic_STREAM_WEB_SOCKET_OUTPUT_ADAPTOR_NAME: 'DefaultWebsocketOutputAdaptorOnTrafficStream', CEP_WEB_SOCKET_OUTPUT_ADAPTOR_WEBAPP_NAME: 'outputwebsocket', TENANT_INDEX: 't', COLON : ':', PATH_SEPARATOR : '/', SPEED_HISTORY_COUNT: 20, NOTIFY_INFO_TIMEOUT: 1000, NOTIFY_SUCCESS_TIMEOUT: 1000, NOTIFY_WARNING_TIMEOUT: 3000, NOTIFY_DANGER_TIMEOUT: 5000 }, messages:{ app:{ } }, leaflet: { iconUrls: { normalMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/object-types/default/moving/alerted.png', alertedMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/moving/arrow_alerted.png', offlineMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/moving/arrow_offline.png', warningMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/moving/arrow_warning.png', defaultMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/moving/arrow_normal.png', normalNonMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/non_moving/dot_normal.png', alertedNonMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/non_moving/dot_alerted.png', offlineNonMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/non_moving/dot_offline.png', warningNonMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/non_moving/dot_warning.png', defaultNonMovingIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/non_moving/dot_normal.png', normalPlaceIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/places/marker-icon.png', alertedPlaceIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/places/redMarker.png', offlinePlaceIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/places/ashMarker.png', warningPlaceIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/places/pinkMarker.png', defaultPlaceIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/places/marker-icon.png', defaultIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/moving/default_icons/marker-icon.png', resizeIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/resize.png', stopIcon: '/portal/store/carbon.super/fs/gadget/motor-rally/img/markers/stopIcon.png' } } };
GPrathap/analytics-iots
product/samples/motorRallyAnalytics/feature/org.wso2.carbon.analytics.iots.motorrallyalytics.feature/src/main/resources/carbonapps/motorrally/gadgets/gadget-motor-rally/motor-rally/js/application_options.js
JavaScript
apache-2.0
3,938
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_40) on Wed Apr 13 18:09:43 UTC 2016 --> <title>Uses of Class org.apache.cassandra.utils.concurrent.Ref (apache-cassandra API)</title> <meta name="date" content="2016-04-13"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.cassandra.utils.concurrent.Ref (apache-cassandra API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/cassandra/utils/concurrent/class-use/Ref.html" target="_top">Frames</a></li> <li><a href="Ref.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.cassandra.utils.concurrent.Ref" class="title">Uses of Class<br>org.apache.cassandra.utils.concurrent.Ref</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.apache.cassandra.io.sstable.format">org.apache.cassandra.io.sstable.format</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="#org.apache.cassandra.streaming">org.apache.cassandra.streaming</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><a href="#org.apache.cassandra.streaming.messages">org.apache.cassandra.streaming.messages</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="#org.apache.cassandra.utils.concurrent">org.apache.cassandra.utils.concurrent</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.apache.cassandra.io.sstable.format"> <!-- --> </a> <h3>Uses of <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a> in <a href="../../../../../../org/apache/cassandra/io/sstable/format/package-summary.html">org.apache.cassandra.io.sstable.format</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../org/apache/cassandra/io/sstable/format/package-summary.html">org.apache.cassandra.io.sstable.format</a> that return <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">SSTableReader.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html#ref--">ref</a></span>()</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">SSTableReader.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html#selfRef--">selfRef</a></span>()</code>&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">SSTableReader.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html#tryRef--">tryRef</a></span>()</code>&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"><a name="org.apache.cassandra.streaming"> <!-- --> </a> <h3>Uses of <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a> in <a href="../../../../../../org/apache/cassandra/streaming/package-summary.html">org.apache.cassandra.streaming</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation"> <caption><span>Fields in <a href="../../../../../../org/apache/cassandra/streaming/package-summary.html">org.apache.cassandra.streaming</a> declared as <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Field and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">StreamSession.SSTableStreamingSections.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/streaming/StreamSession.SSTableStreamingSections.html#ref">ref</a></span></code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../org/apache/cassandra/streaming/package-summary.html">org.apache.cassandra.streaming</a> with parameters of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="typeNameLabel">StreamTransferTask.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/streaming/StreamTransferTask.html#addTransferFile-org.apache.cassandra.utils.concurrent.Ref-long-java.util.List-long-">addTransferFile</a></span>(<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;&nbsp;ref, long&nbsp;estimatedKeys, java.util.List&lt;<a href="../../../../../../org/apache/cassandra/utils/Pair.html" title="class in org.apache.cassandra.utils">Pair</a>&lt;java.lang.Long,java.lang.Long&gt;&gt;&nbsp;sections, long&nbsp;repairedAt)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation"> <caption><span>Constructors in <a href="../../../../../../org/apache/cassandra/streaming/package-summary.html">org.apache.cassandra.streaming</a> with parameters of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/streaming/StreamSession.SSTableStreamingSections.html#SSTableStreamingSections-org.apache.cassandra.utils.concurrent.Ref-java.util.List-long-long-">SSTableStreamingSections</a></span>(<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;&nbsp;ref, java.util.List&lt;<a href="../../../../../../org/apache/cassandra/utils/Pair.html" title="class in org.apache.cassandra.utils">Pair</a>&lt;java.lang.Long,java.lang.Long&gt;&gt;&nbsp;sections, long&nbsp;estimatedKeys, long&nbsp;repairedAt)</code>&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"><a name="org.apache.cassandra.streaming.messages"> <!-- --> </a> <h3>Uses of <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a> in <a href="../../../../../../org/apache/cassandra/streaming/messages/package-summary.html">org.apache.cassandra.streaming.messages</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation"> <caption><span>Constructors in <a href="../../../../../../org/apache/cassandra/streaming/messages/package-summary.html">org.apache.cassandra.streaming.messages</a> with parameters of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/streaming/messages/OutgoingFileMessage.html#OutgoingFileMessage-org.apache.cassandra.utils.concurrent.Ref-int-long-java.util.List-long-boolean-">OutgoingFileMessage</a></span>(<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&gt;&nbsp;ref, int&nbsp;sequenceNumber, long&nbsp;estimatedKeys, java.util.List&lt;<a href="../../../../../../org/apache/cassandra/utils/Pair.html" title="class in org.apache.cassandra.utils">Pair</a>&lt;java.lang.Long,java.lang.Long&gt;&gt;&nbsp;sections, long&nbsp;repairedAt, boolean&nbsp;keepSSTableLevel)</code>&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"><a name="org.apache.cassandra.utils.concurrent"> <!-- --> </a> <h3>Uses of <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a> in <a href="../../../../../../org/apache/cassandra/utils/concurrent/package-summary.html">org.apache.cassandra.utils.concurrent</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../org/apache/cassandra/utils/concurrent/package-summary.html">org.apache.cassandra.utils.concurrent</a> that return <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html" title="type parameter in Refs">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">Refs.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html#get-T-">get</a></span>(<a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html" title="type parameter in Refs">T</a>&nbsp;referenced)</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="type parameter in Ref">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">Ref.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html#ref--">ref</a></span>()</code>&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/RefCounted.html" title="type parameter in RefCounted">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">RefCounted.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/RefCounted.html#ref--">ref</a></span>()</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/SelfRefCounted.html" title="type parameter in SelfRefCounted">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">SelfRefCounted.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/SelfRefCounted.html#selfRef--">selfRef</a></span>()</code>&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="type parameter in Ref">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">Ref.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html#tryRef--">tryRef</a></span>()</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/RefCounted.html" title="type parameter in RefCounted">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">RefCounted.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/RefCounted.html#tryRef--">tryRef</a></span>()</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../org/apache/cassandra/utils/concurrent/package-summary.html">org.apache.cassandra.utils.concurrent</a> that return types with arguments of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>static &lt;T extends <a href="../../../../../../org/apache/cassandra/utils/concurrent/SelfRefCounted.html" title="interface in org.apache.cassandra.utils.concurrent">SelfRefCounted</a>&lt;T&gt;&gt;<br>java.lang.Iterable&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;T&gt;&gt;</code></td> <td class="colLast"><span class="typeNameLabel">Refs.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html#selfRefs-java.lang.Iterable-">selfRefs</a></span>(java.lang.Iterable&lt;T&gt;&nbsp;refs)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../org/apache/cassandra/utils/concurrent/package-summary.html">org.apache.cassandra.utils.concurrent</a> with parameters of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><span class="typeNameLabel">Ref.IdentityCollection.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.IdentityCollection.html#add-org.apache.cassandra.utils.concurrent.Ref-">add</a></span>(<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;?&gt;&nbsp;ref)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Method parameters in <a href="../../../../../../org/apache/cassandra/utils/concurrent/package-summary.html">org.apache.cassandra.utils.concurrent</a> with type arguments of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>static void</code></td> <td class="colLast"><span class="typeNameLabel">Refs.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html#release-java.lang.Iterable-">release</a></span>(java.lang.Iterable&lt;? extends <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;?&gt;&gt;&nbsp;refs)</code>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><code>static java.lang.Throwable</code></td> <td class="colLast"><span class="typeNameLabel">Refs.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html#release-java.lang.Iterable-java.lang.Throwable-">release</a></span>(java.lang.Iterable&lt;? extends <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;?&gt;&gt;&nbsp;refs, java.lang.Throwable&nbsp;accumulate)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation"> <caption><span>Constructor parameters in <a href="../../../../../../org/apache/cassandra/utils/concurrent/package-summary.html">org.apache.cassandra.utils.concurrent</a> with type arguments of type <a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html#Refs-java.util.Map-">Refs</a></span>(java.util.Map&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html" title="type parameter in Refs">T</a>,<a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Ref</a>&lt;<a href="../../../../../../org/apache/cassandra/utils/concurrent/Refs.html" title="type parameter in Refs">T</a>&gt;&gt;&nbsp;references)</code>&nbsp;</td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/cassandra/utils/concurrent/Ref.html" title="class in org.apache.cassandra.utils.concurrent">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/cassandra/utils/concurrent/class-use/Ref.html" target="_top">Frames</a></li> <li><a href="Ref.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &copy; 2016 The Apache Software Foundation</small></p> </body> </html>
elisska/cloudera-cassandra
DATASTAX_CASSANDRA-3.5.0/javadoc/org/apache/cassandra/utils/concurrent/class-use/Ref.html
HTML
apache-2.0
25,557
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.resolver; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.Future; import java.net.InetAddress; import java.net.InetSocketAddress; /** * A skeletal {@link NameResolver} implementation that resolves {@link InetAddress}. */ public abstract class InetNameResolver extends SimpleNameResolver<InetAddress> { private volatile AddressResolver<InetSocketAddress> addressResolver; /** * @param executor the {@link EventExecutor} which is used to notify the listeners of the {@link Future} returned * by {@link #resolve(String)} */ protected InetNameResolver(EventExecutor executor) { super(executor); } /** * Return a {@link AddressResolver} that will use this name resolver underneath. * It's cached internally, so the same instance is always returned. */ public AddressResolver<InetSocketAddress> asAddressResolver() { AddressResolver<InetSocketAddress> result = addressResolver; if (result == null) { synchronized (this) { result = addressResolver; if (result == null) { addressResolver = result = new InetSocketAddressResolver(executor(), this); } } } return result; } }
fenik17/netty
resolver/src/main/java/io/netty/resolver/InetNameResolver.java
Java
apache-2.0
1,961
/* * Copyright 2016 Kejun Xia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.shipdream.lib.android.mvc.view.viewpager; import android.content.Intent; import com.shipdream.lib.android.mvc.Forwarder; import com.shipdream.lib.android.mvc.FragmentController; import com.shipdream.lib.android.mvc.MvcFragment; import com.shipdream.lib.android.mvc.NavigationManager; import com.shipdream.lib.android.mvc.TestActivity; import com.shipdream.lib.android.mvc.view.viewpager.controller.FirstFragmentController; import com.shipdream.lib.android.mvc.view.viewpager.controller.HomeController; import com.shipdream.lib.android.mvc.view.viewpager.controller.SecondFragmentController; import javax.inject.Inject; public class ViewPagerTestActivity extends TestActivity { @Override protected Class<? extends MvcFragment> mapFragmentRouting(Class<? extends FragmentController> controllerClass) { if (controllerClass == FirstFragmentController.class) { return ViewPagerHomeFragment.class; } else if (controllerClass == SecondFragmentController.class) { return SecondFragment.class; } return ViewPagerHomeFragment.class; } @Override protected Class<? extends DelegateFragment> getDelegateFragmentClass() { return HomeFragment.class; } public static class HomeFragment extends DelegateFragment<HomeController> { @Inject private NavigationManager navigationManager; @Override protected void onStartUp() { navigationManager.navigate(this).to(FirstFragmentController.class, new Forwarder().clearAll()); } @Override protected Class<HomeController> getControllerClass() { return HomeController.class; } @Override public void update() { } } Intent launchAnotherActivity() { Intent intent = new Intent(this, ViewPagerTestActivityTop.class); return intent; } }
kejunxia/AndroidMvc
library/android-mvc-test/src/main/java/com/shipdream/lib/android/mvc/view/viewpager/ViewPagerTestActivity.java
Java
apache-2.0
2,503
package ch10innerclasses; /** * Creating inner classes. * * <pre> * Output: * Tasmania * </pre> */ public class D01_Parcel1 { class Contents { private int i = 11; public int value() { return i; } } class Destination { private String label; Destination(String whereTo) { label = whereTo; } String readLabel() { return label; } } // Using inner classes looks just like // using any other class, within D01_Parcel1: public void ship(String dest) { Contents c = new Contents(); Destination d = new Destination(dest); System.out.println(d.readLabel()); } public static void main(String[] args) { D01_Parcel1 p = new D01_Parcel1(); p.ship("Tasmania"); } }
deguo/tij4
src/main/java/ch10innerclasses/D01_Parcel1.java
Java
apache-2.0
709
"use strict"; var utils = require("./utils.js"); var isThenable = utils.isThenable; var invoke = utils.invoke; var fnCall = utils.fnCall; var delayCall = utils.delayCall; var deprecateWarning = {}; function promiseResolve(promise, resolve, reject, resolution) { if (promise === resolution) { reject(new TypeError("A promise cannot resolve to itself.")); } try { if (isThenable(resolution)) { resolution.then(function (value) { promiseResolve(promise, resolve, reject, value); }, function (reason) { reject(reason); }); } else { // value resolve(resolution); } } catch (err) { reject(err); } } module.exports = function (Promise) { if (Promise._SAP_Enhanced) { return; } Promise._SAP_Enhanced = true; // ---------------------------------------- // Promise.prototype enhancement // ---------------------------------------- function promiseInvoke(argObject) { return new Promise(function (resolve, reject) { invoke(argObject, function (err, result) { var k, n = arguments.length; if (err) { reject(err); return; } if (n > 2) { result = [ result ]; for (k = 2; k < n; ++k) { result.push(arguments[k]); } } resolve(result); }); }); } /** * Returns a promise which will be settled based on the outcome of the onSettled callback. onSettled will be called when the initial promise is settled (i.e. either fulfilled or rejected). * @param onSettled Function called when the promise is settled. If the promise is rejected, onSettled is called with the rejection reason as single argument. * If the promise is fulfilled, onSettled is called with null as first arguments and the promise fulfillment value as second argument. * @returns {Promise} */ Promise.prototype.always = function (onSettled) { return this.then(function (result) { return (typeof onSettled === "function" ? onSettled(null, result) : result); }, function (reason) { return (typeof onSettled === "function" ? onSettled(reason) : reason); }); }; /** * Performs a callback-based API call upon promise completion * @param {object} [thisArg] Optional this parameter * @param {function|string} fn Function or method name to call * @param {...*} [arg] Optional parameters to be passed AFTER promise result which is always the first parameter (final callback parameter must NOT be passed) * @returns {Promise} */ Promise.prototype.thenInvoke = function (thisArg) { var k, arg = arguments, len = arg.length, argStart = (typeof thisArg === "function" ? 1 : 2); for (k = len; k > argStart; --k) { arg[k] = arg[k - 1]; } arg[argStart] = undefined; ++arg.length; return this.then(function (result) { arg[argStart] = result; return promiseInvoke(arg); }); }; /** * Performs a regular function call upon promise completion * @param {object} [thisArg] Optional this parameter * @param {function|string} fn Function or method name to call * @param {...*} [arg] Optional parameters to be passed AFTER promise result which is always the first parameter * @returns {Promise} */ Promise.prototype.thenCall = function (thisArg) { var k, arg = arguments, len = arg.length, argStart = (typeof thisArg === "function" ? 1 : 2); for (k = len; k > argStart; --k) { arg[k] = arg[k - 1]; } arg[argStart] = undefined; ++arg.length; return this.then(function (result) { arg[argStart] = result; return fnCall(arg); }); }; /** * @deprecated */ Promise.prototype.setTimeout = function () { if (!deprecateWarning["Promise.prototype.setTimeout"]) { console.warn("Promise.prototype.setTimeout function is deprecated, use Promise.prototype.timeout instead"); deprecateWarning["Promise.prototype.setTimeout"] = true; } return this.timeout.apply(this, Array.prototype.slice.call(arguments)); }; /** * @deprecated */ Promise.prototype.clearTimeout = function () { if (!deprecateWarning["Promise.prototype.clearTimeout"]) { console.warn("Promise.prototype.clearTimeout function is deprecated"); deprecateWarning["Promise.prototype.clearTimeout"] = true; } return this; }; /** * Returns a promise which will be resolved to an alternative resolution if the initial promise has not been settled at timeout expiration. * If the initial promise is settled before timeout, the setTimeout promise will be settled accordingly. * @param delay timeout in ms * @param {any} [resolution] This controls how the promise will be settled after timeout. If resolution is an instance of Error, the promise will be rejected with resolution. * If resolution is a function, the promise will be resolved with the return value of the function call. If the function throws an exception, the promise will be rejected accordingly. * Otherwise, the promise will be resolved with resolution (applying the standard PromiseResolve logic). * If no resolution argument is passed, the promise will be rejected with a default Error. * @returns {Promise} */ Promise.prototype.timeout = function (delay, resolution) { var onTimeout, timeoutId, defer = Promise.defer(); delay = ~~delay; // Typecast to integer value if (arguments.length === 1) { resolution = new Error("Promise timeout expired."); } if (typeof resolution === "function") { onTimeout = function () { try { promiseResolve(defer.promise, defer.resolve, defer.reject, resolution()); } catch (err) { defer.reject(err); } }; } else if (resolution instanceof Error) { onTimeout = function () { defer.reject(resolution); }; } else { onTimeout = function () { try { promiseResolve(defer.promise, defer.resolve, defer.reject, resolution); } catch (err) { defer.reject(err); } }; } timeoutId = setTimeout(onTimeout, delay); this.then(function (result) { if (timeoutId) { clearTimeout(timeoutId); timeoutId = undefined; defer.resolve(result); } }, function (reason) { if (timeoutId) { clearTimeout(timeoutId); timeoutId = undefined; defer.reject(reason); } }); return defer.promise; }; /*eslint-disable no-empty */ /** * Returns a promise which will be settled according to the initial promise when the return value of the finalizer is settled. * @param {function} finalizer Callback function which will be called when the initial promise is settled. * If finalizer returns a promise, the finally promise will be settled once the finalizer promise is settled (but with the outcome of the initial promise). * @returns {Promise} */ Promise.prototype.finally = function (finalizer) { var promise; if ((typeof finalizer === "function")) { promise = this.then(function (value) { var finState, restore; try { finState = finalizer(); } catch (e) { } if (finState) { restore = function () { return value; }; return Promise.resolve(finState).then(restore, restore); } return value; }, function (reason) { var finState, restore; try { finState = finalizer(); } catch (e) { } if (finState) { restore = function () { throw reason; }; return Promise.resolve(finState).then(restore, restore); } throw reason; }); } else { promise = this; } return promise; }; /*eslint-enable no-empty */ /** * Returns a promise that will be settled with the initial promise outcome, after invoking an optional callback. * @param {function} [done] Optional callback * @returns {Promise} */ Promise.prototype.callback = function (done) { if (typeof done === "function") { return this.then(function (result) { done(null, result); return result; }, function (err) { done(err); throw err; }); } else { return this; } }; /** * Return * @param delay * @returns {Promise} */ Promise.prototype.delay = function (delay) { delay = delay || 0; return this.then(function (result) { return new Promise(function (resolve) { delayCall(resolve, result, delay); }); }, function (err) { return new Promise(function (resolve, reject) { delayCall(reject, err, delay); }); }); }; // ---------------------------------------- // Promise enhancement // ---------------------------------------- /** * @deprecated */ Promise.cast = function (value) { if (!deprecateWarning["Promise.cast"]) { console.warn("Promise.cast function is deprecated, use Promise.resolve instead"); deprecateWarning["Promise.cast"] = true; } return Promise.resolve(value); }; /** * * @param promises * @returns {Promise} */ Promise.waitAll = function (promises) { var executor; if (!Array.isArray(promises)) { throw new TypeError("Promise.waitAll requires an array argument."); } executor = function (resolve, reject) { var i, n, results, errors, remaining, getResolver, resolveAt, getRejecter, promise, failCount, success; results = []; failCount = 0; success = 0; errors = []; remaining = promises.length; if (remaining === 0) { resolve([]); return; } resolveAt = function (index, value, error) { var err; if (error) { ++failCount; } else { ++success; } results[index] = value; errors[index] = error; --remaining; if (remaining === 0) { if (failCount) { if (success) { err = new Error("Some operations failed"); } else { err = new Error("All operations failed"); } err.detail = { errors: errors, failCount: failCount, results: results }; reject(err); } else { resolve(results); } } }; getResolver = function (index) { return function (value) { resolveAt(index, value); }; }; getRejecter = function (index) { return function (reason) { reason = reason || new Error("Operation failed"); resolveAt(index, undefined, reason); }; }; n = remaining; for (i = 0; i < n; ++i) { promise = promises[i]; try { if (isThenable(promise)) { Promise.resolve(promise).then(getResolver(i), getRejecter(i)); } else { resolveAt(i, promise); } } catch (e) { // isThenable may throw as it accesses promise.then property getRejecter(i)(e); } } }; return new Promise(executor); }; // Expose a defer API for people used to Q /** * * @returns {{}} */ Promise.defer = function () { var deferred = {}; deferred.promise = new Promise(function (resolve, reject) { deferred.resolve = resolve; deferred.reject = reject; }); return deferred; }; /** * Transforms a callback-based API call into a Promise-based one * @param {object} [thisArg] Optional this parameter * @param {function|string} fn Function or method name to call * @param {...*} [arg] Optional function parameters: final callback parameter must NOT be passed * @returns {Promise} */ Promise.invoke = function () { return promiseInvoke(arguments); }; /** * Wraps the result of a synchronous function into a Promise. * @param {object} [thisArg] Optional this parameter * @param {function|string} fn Function or method name to call * @param {...*} [arg] Optional function parameters * @returns {Promise} Promise resolved to the function return value or rejected with the error thrown by the function */ Promise.fnCall = function () { var args = arguments; return new Promise(function (resolve) { resolve(fnCall(args)); }); }; /** * @deprecated */ Promise.objectInvoke = function () { if (!deprecateWarning["Promise.objectInvoke"]) { console.warn("Promise.objectInvoke function is deprecated, use Promise.invoke instead"); deprecateWarning["Promise.objectInvoke"] = true; } return Promise.invoke.apply(undefined, Array.prototype.slice.call(arguments)); }; /** * Returns a Promise which will be resolved with resolution after delay milliseconds * @param {*} [resolution] Resolved value, default to undefined * @param {number} delay Delay in milliseconds before promise is fulfilled * @returns {Promise} */ Promise.delay = function (resolution, delay) { if (arguments.length <= 1) { delay = resolution; resolution = undefined; } return Promise.resolve(resolution).delay(delay); }; };
sapbuild/node-sap-promise
lib/enhance.js
JavaScript
apache-2.0
15,539
/* * Copyright 2012-2015 Aerospike, Inc. * * Portions may be licensed to Aerospike, Inc. under one or more contributor * license agreements. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ using System; using System.Collections.Generic; namespace Aerospike.Client { public sealed class ExistsCommand : SyncCommand { private readonly Cluster cluster; private readonly Policy policy; private readonly Key key; private readonly Partition partition; private bool exists; public ExistsCommand(Cluster cluster, Policy policy, Key key) { this.cluster = cluster; this.policy = policy; this.key = key; this.partition = new Partition(key); } protected internal override Policy GetPolicy() { return policy; } protected internal override void WriteBuffer() { SetExists(policy, key); } protected internal override Node GetNode() { return cluster.GetReadNode(partition, policy.replica); } protected internal override void ParseResult(Connection conn) { // Read header. conn.ReadFully(dataBuffer, MSG_TOTAL_HEADER_SIZE); int resultCode = dataBuffer[13]; if (resultCode != 0 && resultCode != ResultCode.KEY_NOT_FOUND_ERROR) { throw new AerospikeException(resultCode); } exists = resultCode == 0; EmptySocket(conn); } public bool Exists() { return exists; } } }
YuvalItzchakov/aerospike-client-csharp
AerospikeClient/Command/ExistsCommand.cs
C#
apache-2.0
1,864
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Store Mediator</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getMessageStore <em>Message Store</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getOnStoreSequence <em>On Store Sequence</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getInputConnector <em>Input Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getOutputConnector <em>Output Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getAvailableMessageStores <em>Available Message Stores</em>}</li> * </ul> * </p> * * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getStoreMediator() * @model * @generated */ public interface StoreMediator extends Mediator { /** * Returns the value of the '<em><b>Message Store</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Message Store</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Message Store</em>' attribute. * @see #setMessageStore(String) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getStoreMediator_MessageStore() * @model * @generated */ String getMessageStore(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getMessageStore <em>Message Store</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Message Store</em>' attribute. * @see #getMessageStore() * @generated */ void setMessageStore(String value); /** * Returns the value of the '<em><b>On Store Sequence</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>On Store Sequence</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>On Store Sequence</em>' containment reference. * @see #setOnStoreSequence(RegistryKeyProperty) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getStoreMediator_OnStoreSequence() * @model containment="true" * @generated */ RegistryKeyProperty getOnStoreSequence(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getOnStoreSequence <em>On Store Sequence</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>On Store Sequence</em>' containment reference. * @see #getOnStoreSequence() * @generated */ void setOnStoreSequence(RegistryKeyProperty value); /** * Returns the value of the '<em><b>Input Connector</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Input Connector</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Input Connector</em>' containment reference. * @see #setInputConnector(StoreMediatorInputConnector) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getStoreMediator_InputConnector() * @model containment="true" * @generated */ StoreMediatorInputConnector getInputConnector(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getInputConnector <em>Input Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Input Connector</em>' containment reference. * @see #getInputConnector() * @generated */ void setInputConnector(StoreMediatorInputConnector value); /** * Returns the value of the '<em><b>Output Connector</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Output Connector</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Output Connector</em>' containment reference. * @see #setOutputConnector(StoreMediatorOutputConnector) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getStoreMediator_OutputConnector() * @model containment="true" * @generated */ StoreMediatorOutputConnector getOutputConnector(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getOutputConnector <em>Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Output Connector</em>' containment reference. * @see #getOutputConnector() * @generated */ void setOutputConnector(StoreMediatorOutputConnector value); /** * Returns the value of the '<em><b>Available Message Stores</b></em>' attribute. * The default value is <code>""</code>. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Available Message Stores</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Available Message Stores</em>' attribute. * @see #setAvailableMessageStores(String) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getStoreMediator_AvailableMessageStores() * @model default="" * @generated */ String getAvailableMessageStores(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.StoreMediator#getAvailableMessageStores <em>Available Message Stores</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Available Message Stores</em>' attribute. * @see #getAvailableMessageStores() * @generated */ void setAvailableMessageStores(String value); } // StoreMediator
asankas/developer-studio
esb/org.wso2.developerstudio.eclipse.gmf.esb/src/org/wso2/developerstudio/eclipse/gmf/esb/StoreMediator.java
Java
apache-2.0
6,209
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>FST - Jed Fasso</title> <meta name="description" content="Keep track of the statistics from Jed Fasso. Average heat score, heat wins, heat wins percentage, epic heats road to the final"> <meta name="author" content=""> <link rel="apple-touch-icon" sizes="57x57" href="/favicon/apple-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="/favicon/apple-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="/favicon/apple-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="/favicon/apple-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="/favicon/apple-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="/favicon/apple-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="/favicon/apple-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="/favicon/apple-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="/favicon/apple-icon-180x180.png"> <link rel="icon" type="image/png" sizes="192x192" href="/favicon/android-icon-192x192.png"> <link rel="icon" type="image/png" sizes="32x32" href="/favicon/favicon-32x32.png"> <link rel="icon" type="image/png" sizes="96x96" href="/favicon/favicon-96x96.png"> <link rel="icon" type="image/png" sizes="16x16" href="/favicon/favicon-16x16.png"> <link rel="manifest" href="/manifest.json"> <meta name="msapplication-TileColor" content="#ffffff"> <meta name="msapplication-TileImage" content="/ms-icon-144x144.png"> <meta name="theme-color" content="#ffffff"> <meta property="og:title" content="Fantasy Surfing tips"/> <meta property="og:image" content="https://fantasysurfingtips.com/img/just_waves.png"/> <meta property="og:description" content="See how great Jed Fasso is surfing this year"/> <!-- Bootstrap Core CSS - Uses Bootswatch Flatly Theme: https://bootswatch.com/flatly/ --> <link href="https://fantasysurfingtips.com/css/bootstrap.css" rel="stylesheet"> <!-- Custom CSS --> <link href="https://fantasysurfingtips.com/css/freelancer.css" rel="stylesheet"> <link href="https://cdn.datatables.net/plug-ins/1.10.7/integration/bootstrap/3/dataTables.bootstrap.css" rel="stylesheet" /> <!-- Custom Fonts --> <link href="https://fantasysurfingtips.com/font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css"> <link href="https://fonts.googleapis.com/css?family=Montserrat:400,700" rel="stylesheet" type="text/css"> <link href="https://fonts.googleapis.com/css?family=Lato:400,700,400italic,700italic" rel="stylesheet" type="text/css"> <link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/morris.js/0.5.1/morris.css"> <script src="https://code.jquery.com/jquery-2.x-git.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery-ujs/1.2.1/rails.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/raphael/2.1.0/raphael-min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/morris.js/0.5.1/morris.min.js"></script> <script src="https://www.w3schools.com/lib/w3data.js"></script> <script async src="//pagead2.googlesyndication.com/pagead/js/adsbygoogle.js"></script> <script> (adsbygoogle = window.adsbygoogle || []).push({ google_ad_client: "ca-pub-2675412311042802", enable_page_level_ads: true }); </script> </head> <body> <div id="fb-root"></div> <script>(function(d, s, id) { var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) return; js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_GB/sdk.js#xfbml=1&version=v2.6"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk'));</script> <!-- Navigation --> <div w3-include-html="https://fantasysurfingtips.com/layout/header.html"></div> <!-- Header --> <div w3-include-html="https://fantasysurfingtips.com/layout/sponsor.html"></div> <section > <div class="container"> <div class="row"> <div class="col-sm-3 "> <div class="col-sm-2 "> </div> <div class="col-sm-8 "> <!-- <img src="http://fantasysurfingtips.com/img/surfers/jfas.png" class="img-responsive" alt=""> --> <h3 style="text-align:center;">Jed Fasso</h3> <a href="https://twitter.com/share" class="" data-via="fansurfingtips"><i class="fa fa-twitter"></i> Share on Twitter</i></a> <br/> <a class="fb-xfbml-parse-ignore" target="_blank" href="https://www.facebook.com/sharer/sharer.php?u=http%3A%2F%2Ffantasysurfingtips.com%2Fsurfers%2Fjfas&amp;src=sdkpreparse"><i class="fa fa-facebook"></i> Share on Facebook</a> </div> <div class="col-sm-2 "> </div> </div> <div class="col-sm-3 portfolio-item"> </div> <div class="col-sm-3 portfolio-item"> <h6 style="text-align:center;">Avg Heat Score (FST DATA)</h6> <h1 style="text-align:center;">7.3</h1> </div> </div> <hr/> <h4 style="text-align:center;" >Competitions</h4> <h4 style="text-align:center;" >Epic Battles</h4> <div class="row"> <div class="col-sm-6 "> <p style="text-align:center;">Surfed <b>1</b> heats against <a href="http://fantasysurfingtips.com/surfers/mqs/mbow.html"> McKenzie Bowden</a> <br/> <b>won 1</b> and <b>lost 0</b></p> </div> <div class="col-sm-6 "> <p style="text-align:center;">Surfed <b>1</b> heats against <a href="http://fantasysurfingtips.com/surfers/mqs/ddo.html"> David do Carmo</a> <br/> <b>won 0</b> and <b>lost 1</b></p> </div> </div> <hr/> <h4 style="text-align:center;" >Heat Stats (FST data)</h4> <div class="row"> <div class="col-sm-4 portfolio-item"> <h6 style="text-align:center;">Heats</h6> <h2 style="text-align:center;">6</h2> </div> <div class="col-sm-4 portfolio-item"> <h6 style="text-align:center;">Heat wins</h6> <h2 style="text-align:center;">1</h2> </div> <div class="col-sm-4 portfolio-item"> <h6 style="text-align:center;">HEAT WINS PERCENTAGE</h6> <h2 style="text-align:center;">16.67%</h2> </div> </div> <hr/> <h4 style="text-align:center;">Avg Heat Score progression</h4> <div id="avg_chart" style="height: 250px;"></div> <hr/> <h4 style="text-align:center;">Heat stats progression</h4> <div id="heat_chart" style="height: 250px;"></div> <hr/> <style type="text/css"> .heats-all{ z-index: 3; margin-left: 5px; cursor: pointer; } </style> <div class="container"> <div id="disqus_thread"></div> <script> /** * RECOMMENDED CONFIGURATION VARIABLES: EDIT AND UNCOMMENT THE SECTION BELOW TO INSERT DYNAMIC VALUES FROM YOUR PLATFORM OR CMS. * LEARN WHY DEFINING THESE VARIABLES IS IMPORTANT: https://disqus.com/admin/universalcode/#configuration-variables*/ var disqus_config = function () { this.page.url = "http://fantasysurfingtips.com/surfers/jfas"; // Replace PAGE_URL with your page's canonical URL variable this.page.identifier = '3078'; // Replace PAGE_IDENTIFIER with your page's unique identifier variable }; (function() { // DON'T EDIT BELOW THIS LINE var d = document, s = d.createElement('script'); s.src = '//fantasysurfingtips.disqus.com/embed.js'; s.setAttribute('data-timestamp', +new Date()); (d.head || d.body).appendChild(s); })(); </script> <noscript>Please enable JavaScript to view the <a href="https://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript> </div> </section> <script type="text/javascript"> $('.heats-all').click(function(){ $('.heats-all-stat').css('display', 'none') $('#'+$(this).attr('id')+'-stat').css('display', 'block') }); $('.heats-2016').click(function(){ $('.heats-2016-stat').css('display', 'none') $('#'+$(this).attr('id')+'-stat').css('display', 'block') }); $('document').ready(function(){ new Morris.Line({ // ID of the element in which to draw the chart. element: 'avg_chart', // Chart data records -- each entry in this array corresponds to a point on // the chart. data: [], // The name of the data record attribute that contains x-values. xkey: 'year', // A list of names of data record attributes that contain y-values. ykeys: ['avg', 'avg_all'], // Labels for the ykeys -- will be displayed when you hover over the // chart. labels: ['Avg score in year', 'Avg score FST DATA'] }); new Morris.Bar({ // ID of the element in which to draw the chart. element: 'heat_chart', // Chart data records -- each entry in this array corresponds to a point on // the chart. data: [], // The name of the data record attribute that contains x-values. xkey: 'year', // A list of names of data record attributes that contain y-values. ykeys: ['heats', 'wins', 'percs'], // Labels for the ykeys -- will be displayed when you hover over the // chart. labels: ['Heats surfed', 'Heats won', 'Winning percentage'] }); }); </script> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script> <!-- Footer --> <div w3-include-html="https://fantasysurfingtips.com/layout/footer.html"></div> <script type="text/javascript"> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-74337819-1', 'auto'); // Replace with your property ID. ga('send', 'pageview'); </script> <script> w3IncludeHTML(); </script> <!-- jQuery --> <script src="https://fantasysurfingtips.com/js/jquery.js"></script> <script src="https://cdn.datatables.net/1.10.7/js/jquery.dataTables.min.js"></script> <!-- Bootstrap Core JavaScript --> <script src="https://fantasysurfingtips.com/js/bootstrap.min.js"></script> <!-- Plugin JavaScript --> <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery-easing/1.3/jquery.easing.min.js"></script> <script src="https://fantasysurfingtips.com/js/classie.js"></script> <script src="https://fantasysurfingtips.com/js/cbpAnimatedHeader.js"></script> <!-- Contact Form JavaScript --> <script src="https://fantasysurfingtips.com/js/jqBootstrapValidation.js"></script> <script src="https://fantasysurfingtips.com/js/contact_me.js"></script> <!-- Custom Theme JavaScript --> <script src="https://fantasysurfingtips.com/js/freelancer.js"></script> <script type="https://cdn.datatables.net/1.10.12/js/jquery.dataTables.min.js"></script> <script type="https://cdn.datatables.net/1.10.12/js/dataTables.bootstrap.min.js"></script> </body> </html>
chicofilho/fst
surfers/mqs/jfas.html
HTML
apache-2.0
11,861
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_45) on Mon Mar 03 10:44:34 EST 2014 --> <title>org.hibernate.result.internal (Hibernate JavaDocs)</title> <meta name="date" content="2014-03-03"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.hibernate.result.internal (Hibernate JavaDocs)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/hibernate/result/package-summary.html">Prev Package</a></li> <li><a href="../../../../org/hibernate/result/spi/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/hibernate/result/internal/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;org.hibernate.result.internal</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/hibernate/result/internal/OutputsImpl.html" title="class in org.hibernate.result.internal">OutputsImpl</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/hibernate/result/package-summary.html">Prev Package</a></li> <li><a href="../../../../org/hibernate/result/spi/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/hibernate/result/internal/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &copy; 2001-2014 <a href="http://redhat.com">Red Hat, Inc.</a> All Rights Reserved.</small></p> </body> </html>
serious6/HibernateSimpleProject
javadoc/hibernate_Doc/org/hibernate/result/internal/package-summary.html
HTML
apache-2.0
4,814
// Copyright 2015 The Cockroach Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. See the AUTHORS file // for names of contributors. // // Author: Peter Mattis (peter@cockroachlabs.com) package sql import ( "bytes" "fmt" "github.com/cockroachdb/cockroach/sql/parser" ) // ShowColumns of a table. // Privileges: None. // Notes: postgres does not have a SHOW COLUMNS statement. // mysql only returns columns you have privileges on. func (p *planner) ShowColumns(n *parser.ShowColumns) (planNode, error) { desc, err := p.getTableDesc(n.Table) if err != nil { return nil, err } v := &valuesNode{columns: []string{"Field", "Type", "Null"}} for i, col := range desc.Columns { v.rows = append(v.rows, []parser.Datum{ parser.DString(desc.Columns[i].Name), parser.DString(col.Type.SQLString()), parser.DBool(desc.Columns[i].Nullable), }) } return v, nil } // ShowDatabases returns all the databases. // Privileges: None. // Notes: postgres does not have a "show databases" // mysql has a "SHOW DATABASES" permission, but we have no system-level permissions. func (p *planner) ShowDatabases(n *parser.ShowDatabases) (planNode, error) { prefix := MakeNameMetadataKey(RootNamespaceID, "") sr, err := p.txn.Scan(prefix, prefix.PrefixEnd(), 0) if err != nil { return nil, err } v := &valuesNode{columns: []string{"Database"}} for _, row := range sr { name := string(bytes.TrimPrefix(row.Key, prefix)) v.rows = append(v.rows, []parser.Datum{parser.DString(name)}) } return v, nil } // ShowGrants returns grant details for the specified objects and users. // TODO(marc): implement multiple targets, or no targets (meaning full scan). // Privileges: None. // Notes: postgres does not have a SHOW GRANTS statement. // mysql only returns the user's privileges. func (p *planner) ShowGrants(n *parser.ShowGrants) (planNode, error) { if n.Targets == nil { return nil, fmt.Errorf("TODO(marc): implement SHOW GRANT with no targets") } descriptor, err := p.getDescriptorFromTargetList(*n.Targets) if err != nil { return nil, err } objectType := "Database" if n.Targets.Tables != nil { objectType = "Table" } v := &valuesNode{columns: []string{objectType, "User", "Privileges"}} var wantedUsers map[string]struct{} if len(n.Grantees) != 0 { wantedUsers = make(map[string]struct{}) } for _, u := range n.Grantees { wantedUsers[u] = struct{}{} } userPrivileges, err := descriptor.GetPrivileges().Show() if err != nil { return nil, err } for _, userPriv := range userPrivileges { if wantedUsers != nil { if _, ok := wantedUsers[userPriv.User]; !ok { continue } } v.rows = append(v.rows, []parser.Datum{ parser.DString(descriptor.GetName()), parser.DString(userPriv.User), parser.DString(userPriv.Privileges), }) } return v, nil } // ShowIndex returns all the indexes for a table. // Privileges: None. // Notes: postgres does not have a SHOW INDEX statement. // mysql requires some privilege for any column. func (p *planner) ShowIndex(n *parser.ShowIndex) (planNode, error) { desc, err := p.getTableDesc(n.Table) if err != nil { return nil, err } v := &valuesNode{columns: []string{"Table", "Name", "Unique", "Seq", "Column"}} name := n.Table.Table() for _, index := range append([]IndexDescriptor{desc.PrimaryIndex}, desc.Indexes...) { for j, col := range index.ColumnNames { v.rows = append(v.rows, []parser.Datum{ parser.DString(name), parser.DString(index.Name), parser.DBool(index.Unique), parser.DInt(j + 1), parser.DString(col), }) } } return v, nil } // ShowTables returns all the tables. // Privileges: None. // Notes: postgres does not have a SHOW TABLES statement. // mysql only returns tables you have privileges on. func (p *planner) ShowTables(n *parser.ShowTables) (planNode, error) { if n.Name == nil { if p.session.Database == "" { return nil, errNoDatabase } n.Name = &parser.QualifiedName{Base: parser.Name(p.session.Database)} } dbDesc, err := p.getDatabaseDesc(n.Name.String()) if err != nil { return nil, err } tableNames, err := p.getTableNames(dbDesc) if err != nil { return nil, err } v := &valuesNode{columns: []string{"Table"}} for _, name := range tableNames { v.rows = append(v.rows, []parser.Datum{parser.DString(name.Table())}) } return v, nil }
jgautheron/cockroach
sql/show.go
GO
apache-2.0
4,901
export const schema = {} export const jsonDict = {} export const columns1 = [ { items: [ { errors: [], key: 'a', reactId: '/a+{"b":{"123":123,"c":"ccc","d":"ddd"}}', sortIndex: 1, value: { b: { 123: 123, c: 'ccc', d: 'ddd' } }, }, { errors: [], key: '123', reactId: '/123+123', sortIndex: 2, value: 123 }, { errors: [], key: 'b', reactId: '/b+"bbb"', sortIndex: 3, value: 'bbb' }, ], parent: { 123: 123, a: { b: { 123: 123, c: 'ccc', d: 'ddd' } }, b: 'bbb' }, }, ] export const columns2 = [ { items: [ { errors: [], key: 'a', reactId: '/a+{"b":{"123":123,"c":"ccc","d":"ddd"}}', sortIndex: 0, value: { b: { 123: 123, c: 'ccc', d: 'ddd' } }, }, { errors: [], key: 'b', reactId: '/b+"bbb"', sortIndex: 0, value: 'bbb' }, { errors: [], key: '123', reactId: '/123+123', sortIndex: 1, value: 123 }, ], parent: { 123: 123, a: { b: { 123: 123, c: 'ccc', d: 'ddd' } }, b: 'bbb' }, }, { items: [ { errors: [], key: 'b', reactId: '/a/b+{"123":123,"c":"ccc","d":"ddd"}', sortIndex: 0, value: { 123: 123, c: 'ccc', d: 'ddd', }, }, ], parent: { b: { 123: 123, c: 'ccc', d: 'ddd', }, }, }, { items: [ { errors: [], key: '123', reactId: '/a/b/123+123', sortIndex: 0, value: 123, }, { errors: [], key: 'd', reactId: '/a/b/d+"ddd"', sortIndex: 2, value: 'ddd', }, { errors: [], key: 'c', reactId: '/a/b/c+"ccc"', sortIndex: 3, value: 'ccc', }, ], parent: { 123: 123, c: 'ccc', d: 'ddd', }, }, ] export const sortOrder1 = { '/a': 1, '/123': 2, '/b': 3 } export const sortOrder2 = { '/123': 1, '/a/b/c': 3, '/a/b/d': 2 } export const object = { a: { b: { c: 'ccc', d: 'ddd', 123: 123 } }, b: 'bbb', 123: 123 }
quiltdata/quilt
catalog/app/components/JsonEditor/mocks/sorted.js
JavaScript
apache-2.0
2,097
// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import {describe, it, beforeEach, afterEach} from 'mocha'; import {expect} from 'chai'; import {google} from '../protos/firestore_v1_proto_api'; import { Firestore, QueryDocumentSnapshot, setLogFunction, Timestamp, } from '../src'; import {GeoPoint} from '../src'; import {DocumentReference} from '../src'; import * as order from '../src/order'; import {QualifiedResourcePath} from '../src/path'; import {createInstance, InvalidApiUsage, verifyInstance} from './util/helpers'; import api = google.firestore.v1; // Change the argument to 'console.log' to enable debug output. setLogFunction(null); describe('Order', () => { let firestore: Firestore; beforeEach(() => { return createInstance().then(firestoreInstance => { firestore = firestoreInstance; }); }); afterEach(() => verifyInstance(firestore)); /** Converts a value into its proto representation. */ function wrap(value: unknown): api.IValue { const val = firestore._serializer!.encodeValue(value); expect(val).to.not.be.null; return val!; } function blob(data: number[]): api.IValue { return wrap(Buffer.from(data)); } function resource(pathString: string): api.IValue { return wrap( new DocumentReference( firestore, QualifiedResourcePath.fromSlashSeparatedString(pathString) ) ); } function geopoint(lat: number, lng: number): api.IValue { return wrap(new GeoPoint(lat, lng)); } function int(n: number): api.IValue { return { integerValue: n, }; } function double(n: number): api.IValue { return { doubleValue: n, }; } it('throws on invalid value', () => { expect(() => { order.compare( {valueType: 'foo'} as InvalidApiUsage, {valueType: 'foo'} as InvalidApiUsage ); }).to.throw('Unexpected value type: foo'); }); it('throws on invalid blob', () => { expect(() => { order.compare( { bytesValue: new Uint8Array([1, 2, 3]), }, { bytesValue: new Uint8Array([1, 2, 3]), } ); }).to.throw('Blobs can only be compared if they are Buffers'); }); it('compares document snapshots by name', () => { const docs = [ new QueryDocumentSnapshot( firestore.doc('col/doc3'), {}, Timestamp.now(), Timestamp.now(), Timestamp.now() ), new QueryDocumentSnapshot( firestore.doc('col/doc2'), {}, Timestamp.now(), Timestamp.now(), Timestamp.now() ), new QueryDocumentSnapshot( firestore.doc('col/doc2'), {}, Timestamp.now(), Timestamp.now(), Timestamp.now() ), new QueryDocumentSnapshot( firestore.doc('col/doc1'), {}, Timestamp.now(), Timestamp.now(), Timestamp.now() ), ]; docs.sort(firestore.collection('col').comparator()); expect(docs.map(doc => doc.id)).to.deep.eq([ 'doc1', 'doc2', 'doc2', 'doc3', ]); }); it('is correct', () => { const groups = [ // null first [wrap(null)], // booleans [wrap(false)], [wrap(true)], // numbers [double(NaN), double(NaN)], [double(-Infinity)], [double(-Number.MAX_VALUE)], [int(Number.MIN_SAFE_INTEGER - 1)], [int(Number.MIN_SAFE_INTEGER)], [double(-1.1)], // Integers and Doubles order the same. [int(-1), double(-1.0)], [double(-Number.MIN_VALUE)], // zeros all compare the same. [int(0), double(0.0), double(-0)], [double(Number.MIN_VALUE)], [int(1), double(1.0)], [double(1.1)], [int(2)], [int(10)], [int(Number.MAX_SAFE_INTEGER)], [int(Number.MAX_SAFE_INTEGER + 1)], [double(Infinity)], // timestamps [wrap(new Date(2016, 5, 20, 10, 20))], [wrap(new Date(2016, 10, 21, 15, 32))], // strings [wrap('')], [wrap('\u0000\ud7ff\ue000\uffff')], [wrap('(╯°□°)╯︵ ┻━┻')], [wrap('a')], [wrap('abc def')], // latin small letter e + combining acute accent + latin small letter b [wrap('e\u0301b')], [wrap('æ')], // latin small letter e with acute accent + latin small letter a [wrap('\u00e9a')], // blobs [blob([])], [blob([0])], [blob([0, 1, 2, 3, 4])], [blob([0, 1, 2, 4, 3])], [blob([255])], // resource names [resource('projects/p1/databases/d1/documents/c1/doc1')], [resource('projects/p1/databases/d1/documents/c1/doc2')], [resource('projects/p1/databases/d1/documents/c1/doc2/c2/doc1')], [resource('projects/p1/databases/d1/documents/c1/doc2/c2/doc2')], [resource('projects/p1/databases/d1/documents/c10/doc1')], [resource('projects/p1/databases/d1/documents/c2/doc1')], [resource('projects/p2/databases/d2/documents/c1/doc1')], [resource('projects/p2/databases/d2/documents/c1-/doc1')], [resource('projects/p2/databases/d3/documents/c1-/doc1')], // geo points [geopoint(-90, -180)], [geopoint(-90, 0)], [geopoint(-90, 180)], [geopoint(0, -180)], [geopoint(0, 0)], [geopoint(0, 180)], [geopoint(1, -180)], [geopoint(1, 0)], [geopoint(1, 180)], [geopoint(90, -180)], [geopoint(90, 0)], [geopoint(90, 180)], // arrays [wrap([])], [wrap(['bar'])], [wrap(['foo'])], [wrap(['foo', 1])], [wrap(['foo', 2])], [wrap(['foo', '0'])], // objects [wrap({bar: 0})], [wrap({bar: 0, foo: 1})], [wrap({foo: 1})], [wrap({foo: 2})], [wrap({foo: '0'})], ]; for (let i = 0; i < groups.length; i++) { for (const left of groups[i]) { for (let j = 0; j < groups.length; j++) { for (const right of groups[j]) { let expected = order.primitiveComparator(i, j); expect(order.compare(left, right)).to.equal( expected, 'comparing ' + left + ' (' + JSON.stringify(left) + ') to ' + right + ' (' + JSON.stringify(right) + ') at (' + i + ', ' + j + ')' ); expected = order.primitiveComparator(j, i); expect(order.compare(right, left)).to.equal( expected, 'comparing ' + right + ' (' + JSON.stringify(right) + ') to ' + left + ' (' + JSON.stringify(left) + ') at (' + j + ', ' + i + ')' ); } } } } }); });
googleapis/nodejs-firestore
dev/test/order.ts
TypeScript
apache-2.0
7,565