code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
/* AngularBeans, CDI-AngularJS bridge Copyright (c) 2014, Bessem Hmidi. or third-party contributors as indicated by * the @author tags or express copyright attribution statements applied by the authors. This copyrighted material is * made available to anyone wishing to use, modify, copy, or redistribute it subject to the terms and conditions of the * GNU Lesser General Public License, as published by the Free Software Foundation. This program is distributed in the * hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. */ package angularBeans.context; import java.io.Serializable; import java.util.Set; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.context.RequestScoped; import javax.enterprise.context.spi.Context; import javax.enterprise.context.spi.CreationalContext; import javax.enterprise.inject.Any; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import javax.enterprise.util.AnnotationLiteral; import javax.inject.Inject; import angularBeans.util.AngularBeansUtils; import angularBeans.util.CommonUtils; /** * provide a lookup method to obtain an angularBean reference from an external context to the HTTP Session context * (useful with realTime methods calls) * * @author Bessem Hmidi */ @SuppressWarnings("serial") @ApplicationScoped public class BeanLocator implements Serializable { @Inject private BeanManager beanManager; @Inject AngularBeansUtils util; public Object lookup(String beanName, String sessionID) { NGSessionScopeContext.setCurrentContext(sessionID); Set<Bean<?>> beans = beanManager.getBeans(beanName); Class beanClass = CommonUtils.beanNamesHolder.get(beanName); if (beans.isEmpty()) { beans = beanManager.getBeans(beanClass, new AnnotationLiteral<Any>() { // }); } Bean bean = beanManager.resolve(beans); Class scopeAnnotationClass = bean.getScope(); Context context; if (scopeAnnotationClass.equals(RequestScoped.class)) { context = beanManager.getContext(scopeAnnotationClass); if (context == null) return bean.create(beanManager.createCreationalContext(bean)); } else { if (scopeAnnotationClass.equals(NGSessionScopeContext.class)) { context = NGSessionScopeContext.getINSTANCE(); } else { context = beanManager.getContext(scopeAnnotationClass); } } CreationalContext creationalContext = beanManager.createCreationalContext(bean); Object reference = context.get(bean, creationalContext); // if(reference==null && scopeAnnotationClass.equals(RequestScoped.class)){ // reference= bean.create(beanManager.createCreationalContext(bean)); // } return reference; } }
bessemHmidi/AngularBeans
angular-beans/src/main/java/angularBeans/context/BeanLocator.java
Java
lgpl-3.0
2,829
/* * Copyright (C) 2006 Oliver Hunt <oliver@nerget.com> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. */ #ifndef SVGFEDisplacementMapElement_h #define SVGFEDisplacementMapElement_h #if ENABLE(SVG) && ENABLE(FILTERS) #include "FEDisplacementMap.h" #include "SVGAnimatedEnumeration.h" #include "SVGAnimatedNumber.h" #include "SVGFilterPrimitiveStandardAttributes.h" namespace WebCore { template<> struct SVGPropertyTraits<ChannelSelectorType> { static unsigned highestEnumValue() { return CHANNEL_A; } static String toString(ChannelSelectorType type) { switch (type) { case CHANNEL_UNKNOWN: return emptyString(); case CHANNEL_R: return "R"; case CHANNEL_G: return "G"; case CHANNEL_B: return "B"; case CHANNEL_A: return "A"; } ASSERT_NOT_REACHED(); return emptyString(); } static ChannelSelectorType fromString(const String& value) { if (value == "R") return CHANNEL_R; if (value == "G") return CHANNEL_G; if (value == "B") return CHANNEL_B; if (value == "A") return CHANNEL_A; return CHANNEL_UNKNOWN; } }; class SVGFEDisplacementMapElement : public SVGFilterPrimitiveStandardAttributes { public: static PassRefPtr<SVGFEDisplacementMapElement> create(const QualifiedName&, Document*); static ChannelSelectorType stringToChannel(const String&); private: SVGFEDisplacementMapElement(const QualifiedName& tagName, Document*); bool isSupportedAttribute(const QualifiedName&); virtual void parseAttribute(const QualifiedName&, const AtomicString&) OVERRIDE; virtual bool setFilterEffectAttribute(FilterEffect*, const QualifiedName& attrName); virtual void svgAttributeChanged(const QualifiedName&); virtual PassRefPtr<FilterEffect> build(SVGFilterBuilder*, Filter*); BEGIN_DECLARE_ANIMATED_PROPERTIES(SVGFEDisplacementMapElement) DECLARE_ANIMATED_STRING(In1, in1) DECLARE_ANIMATED_STRING(In2, in2) DECLARE_ANIMATED_ENUMERATION(XChannelSelector, xChannelSelector, ChannelSelectorType) DECLARE_ANIMATED_ENUMERATION(YChannelSelector, yChannelSelector, ChannelSelectorType) DECLARE_ANIMATED_NUMBER(Scale, scale) END_DECLARE_ANIMATED_PROPERTIES }; } // namespace WebCore #endif // ENABLE(SVG) #endif // SVGFEDisplacementMapElement_h
nawawi/wkhtmltopdf
webkit/Source/WebCore/svg/SVGFEDisplacementMapElement.h
C
lgpl-3.0
3,203
package com.github.axet.wget.info.ex; public class DownloadIOCodeError extends DownloadError { private static final long serialVersionUID = 7835308901669107488L; int code; public DownloadIOCodeError() { } public DownloadIOCodeError(int code) { this.code = code; } public int getCode() { return code; } }
sngvahmed/wGet_ODM
src/main/java/com/github/axet/wget/info/ex/DownloadIOCodeError.java
Java
lgpl-3.0
359
/** * Copyright (c) 2012 */ package lab.s2jh.core.web.view; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; /** * 用于Object到JSON序列化的对象结构体定义 */ @JsonInclude(Include.NON_NULL) public class OperationResult { /** 标识操作成功与否 */ public enum OPERATION_RESULT_TYPE { success, warning, failure, confirm } /** 返回success或failure操作标识 */ private String type; /** 国际化处理的返回JSON消息正文 */ private String message; /** 补充的数据 */ private Object userdata; public static OperationResult buildSuccessResult(String message, Object userdata) { return new OperationResult(OPERATION_RESULT_TYPE.success, message, userdata); } public static OperationResult buildSuccessResult(String message) { return new OperationResult(OPERATION_RESULT_TYPE.success, message); } public static OperationResult buildWarningResult(String message, Object userdata) { return new OperationResult(OPERATION_RESULT_TYPE.warning, message, userdata); } public static OperationResult buildFailureResult(String message) { return new OperationResult(OPERATION_RESULT_TYPE.failure, message); } public static OperationResult buildFailureResult(String message, Object userdata) { return new OperationResult(OPERATION_RESULT_TYPE.failure, message, userdata); } public static OperationResult buildConfirmResult(String message, Object userdata) { return new OperationResult(OPERATION_RESULT_TYPE.confirm, message, userdata); } public OperationResult(OPERATION_RESULT_TYPE type, String message) { this.type = type.name(); this.message = message; } public OperationResult(OPERATION_RESULT_TYPE type, String message, Object userdata) { this.type = type.name(); this.message = message; this.userdata = userdata; } /** * @return the type */ public String getType() { return type; } /** * @param type * the type to set */ public void setType(String type) { this.type = type; } /** * @return the message */ public String getMessage() { return message; } /** * @param message * the message to set */ public void setMessage(String message) { this.message = message; } public Object getUserdata() { return userdata; } public void setUserdata(Object userdata) { this.userdata = userdata; } }
xautlx/s2jh
core-service/src/main/java/lab/s2jh/core/web/view/OperationResult.java
Java
lgpl-3.0
2,803
/* Integrate.c integrate over the unit hypercube this file is part of Vegas last modified 23 May 14 th */ typedef struct { signature_t signature; count niter; number nsamples, neval; Cumulants cumul[]; } State; static int Integrate(This *t, real *integral, real *error, real *prob) { bin_t *bins; count dim, comp; int fail; StateDecl; csize_t statesize = sizeof(State) + NCOMP*sizeof(Cumulants) + NDIM*sizeof(Grid); Sized(State, state, statesize); Cumulants *c, *C = state->cumul + t->ncomp; Grid *state_grid = (Grid *)C; Array(Grid, margsum, NCOMP, NDIM); Vector(char, out, 128*NCOMP + 256); if( VERBOSE > 1 ) { sprintf(out, "Vegas input parameters:\n" " ndim " COUNT "\n ncomp " COUNT "\n" ML_NOT(" nvec " NUMBER "\n") " epsrel " REAL "\n epsabs " REAL "\n" " flags %d\n seed %d\n" " mineval " NUMBER "\n maxeval " NUMBER "\n" " nstart " NUMBER "\n nincrease " NUMBER "\n" " nbatch " NUMBER "\n gridno %d\n" " statefile \"%s\"", t->ndim, t->ncomp, ML_NOT(t->nvec,) t->epsrel, t->epsabs, t->flags, t->seed, t->mineval, t->maxeval, t->nstart, t->nincrease, t->nbatch, t->gridno, t->statefile); Print(out); } if( BadComponent(t) ) return -2; if( BadDimension(t) ) return -1; FrameAlloc(t, Master); ForkCores(t); Alloc(bins, t->nbatch*t->ndim); if( (fail = setjmp(t->abort)) ) goto abort; IniRandom(t); StateSetup(t); if( StateReadTest(t) ) { StateReadOpen(t, fd) { if( read(fd, state, statesize) != statesize || state->signature != StateSignature(t, 1) ) break; } StateReadClose(t, fd); t->neval = state->neval; t->rng.skiprandom(t, t->neval); } if( ini ) { state->niter = 0; state->nsamples = t->nstart; FClear(state->cumul); GetGrid(t, state_grid); t->neval = 0; } /* main iteration loop */ for( ; ; ) { number nsamples = state->nsamples; creal jacobian = 1./nsamples; FClear(margsum); for( ; nsamples > 0; nsamples -= t->nbatch ) { cnumber n = IMin(t->nbatch, nsamples); real *w = t->frame; real *x = w + n; real *f = x + n*t->ndim; real *lastf = f + n*t->ncomp; bin_t *bin = bins; while( x < f ) { real weight = jacobian; t->rng.getrandom(t, x); for( dim = 0; dim < t->ndim; ++dim ) { creal pos = *x*NBINS; ccount ipos = (count)pos; creal prev = (ipos == 0) ? 0 : state_grid[dim][ipos - 1]; creal diff = state_grid[dim][ipos] - prev; *x++ = prev + (pos - ipos)*diff; *bin++ = ipos; weight *= diff*NBINS; } *w++ = weight; } DoSample(t, n, w, f, t->frame, state->niter + 1); bin = bins; w = t->frame; while( f < lastf ) { creal weight = *w++; Grid *m = &margsum[0][0]; for( c = state->cumul; c < C; ++c ) { real wfun = weight*(*f++); if( wfun ) { c->sum += wfun; c->sqsum += wfun *= wfun; for( dim = 0; dim < t->ndim; ++dim ) m[dim][bin[dim]] += wfun; } m += t->ndim; } bin += t->ndim; } } fail = 0; /* compute the integral and error values */ for( c = state->cumul; c < C; ++c ) { real w = Weight(c->sum, c->sqsum, state->nsamples); real sigsq = 1/(c->weightsum += w); real avg = sigsq*(c->avgsum += w*c->sum); c->avg = LAST ? (sigsq = 1/w, c->sum) : avg; c->err = sqrt(sigsq); fail |= (c->err > MaxErr(c->avg)); if( state->niter == 0 ) c->guess = c->sum; else { c->chisum += w *= c->sum - c->guess; c->chisqsum += w*c->sum; } c->chisq = c->chisqsum - avg*c->chisum; c->sum = c->sqsum = 0; } if( VERBOSE ) { char *oe = out + sprintf(out, "\n" "Iteration " COUNT ": " NUMBER " integrand evaluations so far", state->niter + 1, t->neval); for( c = state->cumul, comp = 0; c < C; ++c ) oe += sprintf(oe, "\n[" COUNT "] " REAL " +- " REAL " \tchisq " REAL " (" COUNT " df)", ++comp, c->avg, c->err, c->chisq, state->niter); Print(out); } if( fail == 0 && t->neval >= t->mineval ) break; if( t->neval >= t->maxeval && !StateWriteTest(t) ) break; if( t->ncomp == 1 ) for( dim = 0; dim < t->ndim; ++dim ) RefineGrid(t, state_grid[dim], margsum[0][dim]); else { for( dim = 0; dim < t->ndim; ++dim ) { Grid wmargsum; Zap(wmargsum); for( comp = 0; comp < t->ncomp; ++comp ) { real w = state->cumul[comp].avg; if( w != 0 ) { creal *m = margsum[comp][dim]; count bin; w = 1/Sq(w); for( bin = 0; bin < NBINS; ++bin ) wmargsum[bin] += w*m[bin]; } } RefineGrid(t, state_grid[dim], wmargsum); } } ++state->niter; state->nsamples += t->nincrease; if( StateWriteTest(t) ) { state->signature = StateSignature(t, 1); state->neval = t->neval; StateWriteOpen(t, fd) { StateWrite(fd, state, statesize); } StateWriteClose(t, fd); if( t->neval >= t->maxeval ) break; } } for( comp = 0; comp < t->ncomp; ++comp ) { cCumulants *c = &state->cumul[comp]; integral[comp] = c->avg; error[comp] = c->err; prob[comp] = ChiSquare(c->chisq, state->niter); } abort: PutGrid(t, state_grid); free(bins); FrameFree(t, Master); StateRemove(t); return fail; }
JohannesBuchner/cuba
src/vegas/Integrate.c
C
lgpl-3.0
5,669
# uexQQ QQ插件
sandy1108/uexQQ
README.md
Markdown
lgpl-3.0
17
/* Copyright (c) 2013-2014 Andrey Goryachev <andrey.goryachev@gmail.com> Copyright (c) 2011-2014 Other contributors as noted in the AUTHORS file. This file is part of Cocaine. Cocaine is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. Cocaine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef COCAINE_IO_FROZEN_SERIALIZATION_TRAITS_HPP #define COCAINE_IO_FROZEN_SERIALIZATION_TRAITS_HPP #include "cocaine/traits.hpp" #include "cocaine/traits/tuple.hpp" #include "cocaine/rpc/frozen.hpp" namespace cocaine { namespace io { template<class Event> struct type_traits<frozen<Event>> { template<class Stream> static inline void pack(msgpack::packer<Stream>& target, const frozen<Event>& source) { type_traits<typename frozen<Event>::tuple_type>::pack(target, source.tuple); } static inline void unpack(const msgpack::object& source, frozen<Event>& target) { type_traits<typename frozen<Event>::tuple_type>::unpack(source, target.tuple); } }; }} // namespace cocaine::io #endif
cocaine/cocaine-core
include/cocaine/traits/frozen.hpp
C++
lgpl-3.0
1,600
CREATE TABLE IF NOT EXISTS `be_BlogRollItems` ( `BlogRollID` varchar(36) NOT NULL, `Title` varchar(255) NOT NULL, `Description` longtext DEFAULT NULL, `BlogUrl` varchar(255) NOT NULL, `FeedUrl` varchar(255) DEFAULT NULL, `Xfn` varchar(255) DEFAULT NULL, `SortIndex` int(10) NOT NULL, PRIMARY KEY (`BlogRollID`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; CREATE TABLE IF NOT EXISTS `be_Referrers` ( `ReferrerId` varchar(36) NOT NULL, `ReferralDay` datetime NOT NULL, `ReferrerUrl` varchar(255) NOT NULL, `ReferralCount` int(10) NOT NULL, `Url` varchar(255) DEFAULT NULL, `IsSpam` tinyint(1) NULL, PRIMARY KEY (`ReferrerId`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; ALTER TABLE `be_Pages` ADD `Slug` VARCHAR(255) DEFAULT NULL; ALTER TABLE `be_PostComment` ADD `ModeratedBy` VARCHAR(100) DEFAULT NULL; ALTER TABLE `be_PostComment` ADD `Avatar` VARCHAR(255) DEFAULT NULL;
royosherove/dotnetmocks2demo
blogengine_7761d88db905/BlogEngine/BlogEngine.NET/setup/MySQL/MySQLUpgradeFrom1.5To1.6.sql
SQL
lgpl-3.0
898
# Copyright Iris contributors # # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """Unit tests for the :class:`iris.coord_systems.VerticalPerspective` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip import cartopy.crs as ccrs from iris.coord_systems import GeogCS, VerticalPerspective class Test(tests.IrisTest): def setUp(self): self.latitude_of_projection_origin = 0.0 self.longitude_of_projection_origin = 0.0 self.perspective_point_height = 38204820000.0 self.false_easting = 0.0 self.false_northing = 0.0 self.semi_major_axis = 6377563.396 self.semi_minor_axis = 6356256.909 self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) self.globe = ccrs.Globe( semimajor_axis=self.semi_major_axis, semiminor_axis=self.semi_minor_axis, ellipse=None, ) # Actual and expected coord system can be re-used for # VerticalPerspective.test_crs_creation and test_projection_creation. self.expected = ccrs.NearsidePerspective( central_longitude=self.longitude_of_projection_origin, central_latitude=self.latitude_of_projection_origin, satellite_height=self.perspective_point_height, false_easting=self.false_easting, false_northing=self.false_northing, globe=self.globe, ) self.vp_cs = VerticalPerspective( self.latitude_of_projection_origin, self.longitude_of_projection_origin, self.perspective_point_height, self.false_easting, self.false_northing, self.ellipsoid, ) def test_crs_creation(self): res = self.vp_cs.as_cartopy_crs() self.assertEqual(res, self.expected) def test_projection_creation(self): res = self.vp_cs.as_cartopy_projection() self.assertEqual(res, self.expected) def test_set_optional_args(self): # Check that setting the optional (non-ellipse) args works. crs = VerticalPerspective( 0, 0, 1000, false_easting=100, false_northing=-203.7 ) self.assertEqualAndKind(crs.false_easting, 100.0) self.assertEqualAndKind(crs.false_northing, -203.7) def _check_crs_defaults(self, crs): # Check for property defaults when no kwargs options were set. # NOTE: except ellipsoid, which is done elsewhere. self.assertEqualAndKind(crs.false_easting, 0.0) self.assertEqualAndKind(crs.false_northing, 0.0) def test_no_optional_args(self): # Check expected defaults with no optional args. crs = VerticalPerspective(0, 0, 1000) self._check_crs_defaults(crs) def test_optional_args_None(self): # Check expected defaults with optional args=None. crs = VerticalPerspective( 0, 0, 1000, false_easting=None, false_northing=None ) self._check_crs_defaults(crs) if __name__ == "__main__": tests.main()
SciTools/iris
lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py
Python
lgpl-3.0
3,251
/* * This file is part of the GROMACS molecular simulation package. * * Copyright (c) 1991-2000, University of Groningen, The Netherlands. * Copyright (c) 2001-2004, The GROMACS development team. * Copyright (c) 2011-2019, by the GROMACS development team, led by * Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl, * and including many others, as listed in the AUTHORS file in the * top-level source directory and at http://www.gromacs.org. * * GROMACS is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * * GROMACS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with GROMACS; if not, see * http://www.gnu.org/licenses, or write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * If you want to redistribute modifications to GROMACS, please * consider that scientific software is very special. Version * control is crucial - bugs must be traceable. We will be happy to * consider code for inclusion in the official distribution, but * derived work must not be called official GROMACS. Details are found * in the README & COPYING files - if they are missing, get the * official version at http://www.gromacs.org. * * To help us fund GROMACS development, we humbly ask that you cite * the research papers on the package. Check out http://www.gromacs.org. */ /*! \internal \file * * \brief This file declares helper functionality for legacy option handling for mdrun * * \author Berk Hess <hess@kth.se> * \author David van der Spoel <david.vanderspoel@icm.uu.se> * \author Erik Lindahl <erik@kth.se> * \author Mark Abraham <mark.j.abraham@gmail.com> * * \ingroup module_mdrun */ #include "gmxpre.h" #include "legacymdrunoptions.h" #include <cstring> #include "gromacs/math/functions.h" #include "gromacs/utility/arraysize.h" #include "gromacs/utility/fatalerror.h" namespace gmx { /*! \brief Return whether the command-line parameter that * will trigger a multi-simulation is set */ static bool is_multisim_option_set(int argc, const char* const argv[]) { for (int i = 0; i < argc; ++i) { if (strcmp(argv[i], "-multidir") == 0) { return true; } } return false; } int LegacyMdrunOptions::updateFromCommandLine(int argc, char** argv, ArrayRef<const char*> desc) { unsigned long PCA_Flags = PCA_CAN_SET_DEFFNM; // With -multidir, the working directory still needs to be // changed, so we can't check for the existence of files during // parsing. It isn't useful to do any completion based on file // system contents, either. if (is_multisim_option_set(argc, argv)) { PCA_Flags |= PCA_DISABLE_INPUT_FILE_CHECKING; } if (!parse_common_args(&argc, argv, PCA_Flags, ssize(filenames), filenames.data(), asize(pa), pa, ssize(desc), desc.data(), 0, nullptr, &oenv)) { return 0; } // Handle the options that permits the user to either declare // which compatible GPUs are availble for use, or to select a GPU // task assignment. Either could be in an environment variable (so // that there is a way to customize it, when using MPI in // heterogeneous contexts). { // TODO Argument parsing can't handle std::string. We should // fix that by changing the parsing, once more of the roles of // handling, validating and implementing defaults for user // command-line options have been seperated. hw_opt.gpuIdsAvailable = gpuIdsAvailable; hw_opt.userGpuTaskAssignment = userGpuTaskAssignment; const char* env = getenv("GMX_GPU_ID"); if (env != nullptr) { if (!hw_opt.gpuIdsAvailable.empty()) { gmx_fatal(FARGS, "GMX_GPU_ID and -gpu_id can not be used at the same time"); } hw_opt.gpuIdsAvailable = env; } env = getenv("GMX_GPUTASKS"); if (env != nullptr) { if (!hw_opt.userGpuTaskAssignment.empty()) { gmx_fatal(FARGS, "GMX_GPUTASKS and -gputasks can not be used at the same time"); } hw_opt.userGpuTaskAssignment = env; } if (!hw_opt.gpuIdsAvailable.empty() && !hw_opt.userGpuTaskAssignment.empty()) { gmx_fatal(FARGS, "-gpu_id and -gputasks cannot be used at the same time"); } } hw_opt.threadAffinity = static_cast<ThreadAffinity>(nenum(thread_aff_opt_choices)); if (!opt2parg_bSet("-append", asize(pa), pa)) { mdrunOptions.appendingBehavior = AppendingBehavior::Auto; } else { if (opt2parg_bool("-append", asize(pa), pa)) { mdrunOptions.appendingBehavior = AppendingBehavior::Appending; } else { mdrunOptions.appendingBehavior = AppendingBehavior::NoAppending; } } mdrunOptions.rerun = opt2bSet("-rerun", ssize(filenames), filenames.data()); mdrunOptions.ntompOptionIsSet = opt2parg_bSet("-ntomp", asize(pa), pa); domdecOptions.rankOrder = static_cast<DdRankOrder>(nenum(ddrank_opt_choices)); domdecOptions.dlbOption = static_cast<DlbOption>(nenum(dddlb_opt_choices)); domdecOptions.numCells[XX] = roundToInt(realddxyz[XX]); domdecOptions.numCells[YY] = roundToInt(realddxyz[YY]); domdecOptions.numCells[ZZ] = roundToInt(realddxyz[ZZ]); /* PLUMED */ plumedswitch=0; if (opt2bSet("-plumed", static_cast<int>(filenames.size()), filenames.data())) plumedswitch=1; if(plumedswitch){ int real_precision=sizeof(real); real energyUnits=1.0; real lengthUnits=1.0; real timeUnits=1.0; if(!plumed_installed()){ gmx_fatal(FARGS,"Plumed is not available. Check your PLUMED_KERNEL variable."); } plumedmain=plumed_create(); plumed_cmd(plumedmain,"setRealPrecision",&real_precision); // this is not necessary for gromacs units: plumed_cmd(plumedmain,"setMDEnergyUnits",&energyUnits); plumed_cmd(plumedmain,"setMDLengthUnits",&lengthUnits); plumed_cmd(plumedmain,"setMDTimeUnits",&timeUnits); // plumed_cmd(plumedmain,"setPlumedDat",ftp2fn(efDAT,static_cast<int>(filenames.size()), filenames.data())); plumedswitch=1; } /* PLUMED HREX*/ if(getenv("PLUMED_HREX")) plumed_hrex=1; if(plumed_hrex){ if(!plumedswitch) gmx_fatal(FARGS,"-hrex (or PLUMED_HREX) requires -plumed"); if(replExParams.exchangeInterval==0) gmx_fatal(FARGS,"-hrex (or PLUMED_HREX) replica exchange"); if(replExParams.numExchanges!=0) gmx_fatal(FARGS,"-hrex (or PLUMED_HREX) not compatible with -nex"); } /* END PLUMED HREX */ /* END PLUMED */ return 1; } LegacyMdrunOptions::~LegacyMdrunOptions() { output_env_done(oenv); } } // namespace gmx
plumed/plumed2
patches/gromacs-2020.6.diff/src/gromacs/mdrun/legacymdrunoptions.cpp
C++
lgpl-3.0
7,356
/* * #%L * Alfresco Repository * %% * Copyright (C) 2005 - 2016 Alfresco Software Limited * %% * This file is part of the Alfresco software. * If the software was purchased under a paid Alfresco license, the terms of * the paid license agreement will prevail. Otherwise, the software is * provided under the following open source license terms: * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * #L% */ package org.alfresco.repo.tagging; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.alfresco.model.ContentModel; import org.alfresco.repo.cache.SimpleCache; import org.alfresco.service.cmr.repository.ContentData; import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentService; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.NodeService; import org.alfresco.service.cmr.tagging.TagDetails; import org.alfresco.service.namespace.QName; import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; /** * This class is an interceptor of the NodeService that converts the content of the tagScopeCache property * into a pseudo, multi-value text property (cm:tagScopeSummary) * with each value of the spoofed property taking the form "<tag name>=<tag count>". * This interceptor can be enabled by calling its * static {@link TagScopePropertyMethodInterceptor#setEnabled(Boolean)} method. It is enabled by default. When enabled, * a call to getProperties * for a node that has a cm:tagScopeCache property will include the calculated cm:tagScopeSummary property. A call to * getProperty specifying cm:tagScopeSummary as the property name will return the calculated property value or null * if the node has no cm:tagScopeCache property value. * * @author Brian Remmington * */ public class TagScopePropertyMethodInterceptor implements MethodInterceptor { private static ThreadLocal<Boolean> enabled = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return Boolean.TRUE; } }; private ContentService contentService; private NodeService nodeService; private SimpleCache<String, List<String>> cache; public void setContentService(ContentService contentService) { this.contentService = contentService; } public void setNodeService(NodeService nodeService) { this.nodeService = nodeService; } public void setCache(SimpleCache<String, List<String>> cache) { this.cache = cache; } @SuppressWarnings("unchecked") @Override public Object invoke(MethodInvocation invocation) throws Throwable { Object ret; //If we're not enabled then exit here if (Boolean.FALSE.equals(getEnabled())) { return invocation.proceed(); } String methodName = invocation.getMethod().getName(); if ("getProperty".equals(methodName)) { Object[] args = invocation.getArguments(); NodeRef nodeRef = (NodeRef) args[0]; QName propertyQName = (QName) args[1]; //Is this a request for the calculated cm:tagScopeSummary property? if (ContentModel.PROP_TAGSCOPE_SUMMARY.equals(propertyQName)) { ret = getTagSummary(nodeRef, null); } else { ret = invocation.proceed(); } } else if ("getProperties".equals(methodName)) { ret = invocation.proceed(); if (Map.class.isAssignableFrom(ret.getClass())) { Map<QName, Serializable> retMap = (Map<QName, Serializable>)ret; NodeRef nodeRef = (NodeRef) invocation.getArguments()[0]; List<String> tagSummary = getTagSummary(nodeRef, retMap); if (tagSummary != null) { retMap.put(ContentModel.PROP_TAGSCOPE_SUMMARY, (Serializable)tagSummary); } } } else if ("setProperty".equals(methodName)) { Object[] args = invocation.getArguments(); QName propertyQName = (QName) args[1]; if (!ContentModel.PROP_TAGSCOPE_SUMMARY.equals(propertyQName)) { ret = invocation.proceed(); } else { ret = null; } } else if ("setProperties".equals(methodName)) { Object[] args = invocation.getArguments(); Map<QName, Serializable> properties = (Map<QName, Serializable>) args[1]; if (properties != null) { properties.remove(ContentModel.PROP_TAGSCOPE_SUMMARY); } ret = invocation.proceed(); } else { ret = invocation.proceed(); } return ret; } /** * Given a NodeRef and, optionally, the property map of that node, this operation establishes whether * the node is a TagScope node, and returns the appropriate value of the cm:tagScopeSummary property. * @param nodeRef NodeRef * @param allNodeProperties Optional. If the caller has a current property map for the node being queried * then supplying it here saves a little time. This argument is allowed to be null. */ protected List<String> getTagSummary(NodeRef nodeRef, Map<QName, Serializable> allNodeProperties) { List<String> tagSummary = null; ContentData tagScopeCache = null; if (allNodeProperties != null) { tagScopeCache = (ContentData) allNodeProperties.get(ContentModel.PROP_TAGSCOPE_CACHE); } else { tagScopeCache = (ContentData) nodeService.getProperty(nodeRef, ContentModel.PROP_TAGSCOPE_CACHE); } if (tagScopeCache != null) { String contentUrl = tagScopeCache.getContentUrl(); tagSummary = cache.get(contentUrl); if (tagSummary == null) { ContentReader contentReader = contentService.getRawReader(contentUrl); if (contentReader != null && contentReader.exists()) { List<TagDetails> tagDetails = TaggingServiceImpl.readTagDetails(contentReader.getContentInputStream()); tagSummary = new ArrayList<String>(tagDetails.size()); for (TagDetails tagDetail : tagDetails) { tagSummary.add(tagDetail.getName() + "=" + tagDetail.getCount()); } //Push into the cache tagSummary = Collections.unmodifiableList(tagSummary); cache.put(contentUrl, tagSummary); } } } return tagSummary; } public static final Boolean getEnabled() { return enabled.get(); } /** * Allows the functionality of this interceptor to be enabled and disabled on a thread-by-thread basis. * The caller should ensure that the value is reset to its prior setting once it has finished using the * thread of execution. * @param enable Boolean * @return The setting prior to invoking this operation. */ public static final Boolean setEnabled(Boolean enable) { Boolean oldSetting = enabled.get(); enabled.set(enable); return oldSetting; } }
Alfresco/community-edition
projects/repository/source/java/org/alfresco/repo/tagging/TagScopePropertyMethodInterceptor.java
Java
lgpl-3.0
8,523
/** * This file is part of FNLP (formerly FudanNLP). * * FNLP is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * FNLP is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FudanNLP. If not, see <http://www.gnu.org/licenses/>. * * Copyright 2009-2014 www.fnlp.org. All rights reserved. */ package org.fnlp.app.num; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.text.SimpleDateFormat; class MyFrame extends JFrame{ protected JTextArea textIn; protected JScrollPane jspIn; protected JTextArea textOut; protected JScrollPane jspOut; private CNExpression expr=new CNExpression(); JButton button; MyFrame(){ setTitle("中文算式识别"); Dimension screenSize=Toolkit.getDefaultToolkit().getScreenSize(); setSize(400, 300); setLocation((screenSize.width-getWidth())/2,(screenSize.height-getHeight())/2); init(); setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); setVisible(true); } void init(){ textIn=new JTextArea(); textIn.setLineWrap(false); textIn.setWrapStyleWord(true); jspIn=new JScrollPane(textIn); //jspIn.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); textOut=new JTextArea(); textOut.setLineWrap(false); textOut.setWrapStyleWord(true); textOut.setEditable(false); textOut.setText(""); jspOut=new JScrollPane(textOut); jspOut.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); //jspOut.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS); Panel panel=new Panel(); panel.setLayout(null); int sp=3; int width=getWidth()-15; int height=getHeight()-50; jspOut.setBounds(sp, sp, width-sp*2, height*5/6); jspIn.setBounds(sp, height*5/6+sp*2, width*4/5-sp*2, height*1/6); panel.add(jspIn); panel.add(jspOut); button=new JButton("发送"); button.addActionListener(new ButtonListener()); button.setBounds(width*4/5+sp, height*5/6+sp*2, width*1/5-sp*2, height*1/6); panel.add(button); add(panel); } class ButtonListener implements ActionListener{ public void actionPerformed(ActionEvent e){ if(e.getActionCommand().equals("发送")){ String str; str=textIn.getText(); str.intern(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String ly_time = sdf.format(new java.util.Date()); textOut.append("User "+ly_time+"\n"); textOut.append(str); textOut.append("\n"); String str2; if(expr.setExpr(str)){ expr.calculate(); str2=expr.getAnswerInChn(); } else str2=new String("不能识别请重新输入"); sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); ly_time = sdf.format(new java.util.Date()); textOut.append("System "+ly_time+"\n"); textOut.append(str2); textOut.append("\n"); } } } } public class Test { public static void main(String args[]){ MyFrame frame=new MyFrame(); } }
xpqiu/fnlp
fnlp-app/src/main/java/org/fnlp/app/num/Test.java
Java
lgpl-3.0
3,439
/// @author Alexander Rykovanov 2013 /// @email rykovanov.as@gmail.com /// @brief Opc binary cnnection channel. /// @license GNU LGPL /// /// Distributed under the GNU LGPL License /// (See accompanying file LICENSE or copy at /// http://www.gnu.org/licenses/lgpl.html) /// #ifndef __OPC_UA_BINARY_MESSAGE_IdENTIFIERS #define __OPC_UA_BINARY_MESSAGE_IdENTIFIERS namespace OpcUa { enum MessageId { INVALID = 0, ACTIVATE_SESSION_REQUEST = 0x1d3, //467; ACTIVATE_SESSION_RESPONSE = 0x1d6, //470; BROWSE_REQUEST = 0x20f, // 527; BROWSE_RESPONSE = 0x212, //530; BROWSE_NEXT_REQUEST = 0x215, // 533 BROWSE_NEXT_RESPONSE = 0x218, // 536 CLOSE_SECURE_CHANNEL_REQUEST = 0x1c4, // 452 CLOSE_SESSION_REQUEST = 0x1d9, // 473; CLOSE_SESSION_RESPONSE = 0x1dc, // 476; // Session services CREATE_SESSION_REQUEST = 0x1cd, // 461; CREATE_SESSION_RESPONSE = 0x1d0, // 464; // Endpoints services FIND_ServerS_REQUEST = 0x1A6, // 422 FIND_ServerS_RESPONSE = 0x1A9, // 425 GET_ENDPOINTS_REQUEST = 0x1ac, // 428 GET_ENDPOINTS_RESPONSE = 0x1af, // 431 // Secure channel services OPEN_SECURE_CHANNEL_REQUEST = 0x1be, // 446 OPEN_SECURE_CHANNEL_RESPONSE = 0x1c1, // 449 TRANSLATE_BROWSE_PATHS_TO_NODE_IdS_REQUEST = 0x22A, // 554 TRANSLATE_BROWSE_PATHS_TO_NODE_IdS_RESPONSE = 0x22D, // 557 REGISTER_NODES_REQUEST = 0x230, // 560 REGISTER_NODES_RESPONSE = 0x233, // 563 UNREGISTER_NODES_REQUEST = 0x234, // 564 UNREGISTER_NODES_RESPONSE = 0x237, // 567 READ_REQUEST = 0x277, // 631 READ_RESPONSE = 0x27A, // 634 WRITE_REQUEST = 0x2A1, //673 WRITE_RESPONSE = 0x2A4, // 676 CALL_REQUEST = 712, CALL_RESPONSE = 715, // 754 CREATE_MONITORED_ITEMS_REQUEST = 0x2EF, // 751 CREATE_MONITORED_ITEMS_RESPONSE = 0x2F2, // 754 DELETE_MONITORED_ITEMS_REQUEST = 0x30d, // 781 DELETE_MONITORED_ITEMS_RESPONSE = 0x310, // 784 CREATE_SUBSCRIPTION_REQUEST = 0x313, //787 CREATE_SUBSCRIPTION_RESPONSE = 0x316, //790 DELETE_SUBSCRIPTION_REQUEST = 0x34f, //847 DELETE_SUBSCRIPTION_RESPONSE = 0x352, //850 MODIFY_SUBSCRIPTION_REQUEST = 0x319, //793 MODIFY_SUBSCRIPTION_RESPONSE = 0x31c, //796 PUBLISH_REQUEST = 0x33A, // 826 PUBLISH_RESPONSE = 0x33D, // 829 REPUBLISH_REQUEST = 832, REPUBLISH_RESPONSE = 835, SET_PUBLISHING_MODE_REQUEST = 0x31F, // 799 SET_PUBLISHING_MODE_RESPONSE = 0x322, // 802 ADD_NODES_REQUEST = 0x1e8, //488; ADD_NODES_RESPONSE = 0x1eb, //491; DELETE_NODES_REQUEST = 0x1f4, //500; DELETE_NODES_RESPONSE = 0x1f7, //503; ADD_REFERENCES_REQUEST = 0x1ee, //494; ADD_REFERENCES_RESPONSE = 0x1f1, //497; DELETE_REFERENCES_REQUEST = 0x1fa, //506; DELETE_REFERENCES_RESPONSE = 0x1fd, //509; SERVICE_FAULT = 0x18d, //397; }; struct NodeId; MessageId GetMessageId(const NodeId & id); } #endif // __OPC_UA_BINARY_MESSAGE_IdENTIFIERS
FreeOpcUa/freeopcua
include/opc/ua/protocol/message_identifiers.h
C
lgpl-3.0
2,870
/** * Copyright (c) 2012 */ package lab.s2jh.core.entity; import java.io.Serializable; import java.util.Date; import javax.persistence.Column; import javax.persistence.EntityListeners; import javax.persistence.MappedSuperclass; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.persistence.Version; import lab.s2jh.core.annotation.MetaData; import lab.s2jh.core.audit.SaveUpdateAuditListener; import lab.s2jh.core.entity.annotation.SkipParamBind; import lab.s2jh.core.entity.def.DefaultAuditable; import lab.s2jh.core.web.json.DateTimeJsonSerializer; import lab.s2jh.core.web.rest.Jackson2LibHandler; import org.hibernate.envers.AuditOverride; import org.hibernate.envers.AuditOverrides; import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonSerialize; @JsonFilter(Jackson2LibHandler.DEFAULT_JSON_FILTER_NAME) @JsonInclude(Include.NON_EMPTY) @EntityListeners({ SaveUpdateAuditListener.class }) @MappedSuperclass @AuditOverrides({ @AuditOverride(forClass = BaseEntity.class) }) public abstract class BaseEntity<ID extends Serializable> extends PersistableEntity<ID> implements DefaultAuditable<String, ID> { /** 乐观锁版本,初始设置为0 */ private int version = 0; @MetaData(value = "数据访问控制代码", tooltips = "用于分机构的数据访问控制代码") protected String aclCode; /** 数据访问控制类型 */ protected String aclType; protected String createdBy; protected Date createdDate; protected String lastModifiedBy; protected Date lastModifiedDate; public abstract void setId(final ID id); /* * (non-Javadoc) * * @see org.springframework.data.domain.Auditable#getCreatedBy() */ @JsonProperty @Column(updatable = false, name = "created_by") public String getCreatedBy() { return createdBy; } /* * (non-Javadoc) * * @see * org.springframework.data.domain.Auditable#setCreatedBy(java.lang.Object) */ @SkipParamBind public void setCreatedBy(final String createdBy) { this.createdBy = createdBy; } @Column(updatable = false, name = "created_dt") @Temporal(TemporalType.TIMESTAMP) @JsonSerialize(using = DateTimeJsonSerializer.class) @JsonProperty public Date getCreatedDate() { return createdDate; } @SkipParamBind public void setCreatedDate(final Date createdDate) { this.createdDate = createdDate; } /* * (non-Javadoc) * * @see org.springframework.data.domain.Auditable#getLastModifiedBy() */ @JsonIgnore @Column(name = "updated_by") public String getLastModifiedBy() { return lastModifiedBy; } @SkipParamBind public void setLastModifiedBy(final String lastModifiedBy) { this.lastModifiedBy = lastModifiedBy; } @JsonIgnore @Temporal(TemporalType.TIMESTAMP) @Column(name = "updated_dt") public Date getLastModifiedDate() { return lastModifiedDate; } public void setLastModifiedDate(final Date lastModifiedDate) { this.lastModifiedDate = lastModifiedDate; } @Column(length = 20, nullable = true) public String getAclCode() { return aclCode; } @SkipParamBind public void setAclCode(String aclCode) { this.aclCode = aclCode; } public String getAclType() { return aclType; } @SkipParamBind public void setAclType(String aclType) { this.aclType = aclType; } @Version @Column(nullable = true) @JsonProperty public int getVersion() { return version; } public void setVersion(int version) { this.version = version; } public void resetCommonProperties() { setId(null); version = 0; lastModifiedBy = null; lastModifiedDate = null; createdBy = null; createdDate = null; aclCode = null; aclType = null; addExtraAttribute(PersistableEntity.EXTRA_ATTRIBUTE_DIRTY_ROW, true); } private static final String[] PROPERTY_LIST = new String[] { "id", "version", "lastModifiedBy", "lastModifiedDate", "createdBy", "createdDate", "aclCode", "aclType" }; public String[] retriveCommonProperties() { return PROPERTY_LIST; } @Override @Transient @JsonProperty public String getDisplay() { return "[" + getId() + "]" + this.getClass().getSimpleName(); } }
xautlx/s2jh
core-service/src/main/java/lab/s2jh/core/entity/BaseEntity.java
Java
lgpl-3.0
4,989
<div class="navbar-layout-phone" data-options="dxLayout : { name: 'pivot', platform: 'win', phone: true }"> <div class="layout-content" data-options="dxContentPlaceholder : { name: 'content' }"> </div> <div class="layout-footer" data-options="dxContentPlaceholder : { name: 'footer' }"> <div data-bind="dxToolbar: { items: [], renderAs: 'bottomToolbar' }" data-options="dxCommandContainer : { id: 'win8-phone-appbar' }" class="layout-toolbar-bottom"> </div> </div> </div>
has191210/bdademo
src/main/resources/public/bower_components/devextreme/layouts/Pivot/PivotLayout.html
HTML
unlicense
510
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.7.0_17) on Wed Mar 20 16:04:59 PDT 2013 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> Shape (rectangles) </TITLE> <META NAME="date" CONTENT="2013-03-20"> <LINK REL ="stylesheet" TYPE="text/css" HREF="stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Shape (rectangles)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="RectangleDemo.html" title="class in &lt;Unnamed&gt;"><B>PREV CLASS</B></A>&nbsp; &nbsp;NEXT CLASS</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="index.html?Shape.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="Shape.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> Interface Shape</H2> <DL> <DT><B>All Known Implementing Classes:</B> <DD><A HREF="Rectangle.html" title="class in &lt;Unnamed&gt;">Rectangle</A></DD> </DL> <HR> <DL> <DT><PRE>public interface <B>Shape</B></DL> </PRE> <P> A shape that can be drawn on a canvas. <P> <P> <HR> <P> <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Method Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;double</CODE></FONT></TD> <TD><CODE><B><A HREF="Shape.html#getHeight()">getHeight</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Gets the height of the shape.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;double</CODE></FONT></TD> <TD><CODE><B><A HREF="Shape.html#getWidth()">getWidth</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Gets the width of the shape.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;double</CODE></FONT></TD> <TD><CODE><B><A HREF="Shape.html#getX()">getX</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Gets the leftmost x-position of the shape.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;double</CODE></FONT></TD> <TD><CODE><B><A HREF="Shape.html#getY()">getY</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Gets the topmost y-position of the shape.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="Shape.html#paintShape(java.awt.Graphics2D)">paintShape</A></B>(<A HREF="http://download.oracle.com/javase/7/docs/api/java/awt/Graphics2D.html?is-external=true" title="class or interface in java.awt">Graphics2D</A>&nbsp;g2)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Paints the shape</TD> </TR> </TABLE> &nbsp; <P> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Method Detail</B></FONT></TH> </TR> </TABLE> <A NAME="getHeight()"><!-- --></A><H3> getHeight</H3> <PRE> double <B>getHeight</B>()</PRE> <DL> <DD>Gets the height of the shape. <P> <DD><DL> <DT><B>Returns:</B><DD>the height</DL> </DD> </DL> <HR> <A NAME="getWidth()"><!-- --></A><H3> getWidth</H3> <PRE> double <B>getWidth</B>()</PRE> <DL> <DD>Gets the width of the shape. <P> <DD><DL> <DT><B>Returns:</B><DD>the width</DL> </DD> </DL> <HR> <A NAME="getX()"><!-- --></A><H3> getX</H3> <PRE> double <B>getX</B>()</PRE> <DL> <DD>Gets the leftmost x-position of the shape. <P> <DD><DL> <DT><B>Returns:</B><DD>the leftmost x-position</DL> </DD> </DL> <HR> <A NAME="getY()"><!-- --></A><H3> getY</H3> <PRE> double <B>getY</B>()</PRE> <DL> <DD>Gets the topmost y-position of the shape. <P> <DD><DL> <DT><B>Returns:</B><DD>the topmost y-position</DL> </DD> </DL> <HR> <A NAME="paintShape(java.awt.Graphics2D)"><!-- --></A><H3> paintShape</H3> <PRE> void <B>paintShape</B>(<A HREF="http://download.oracle.com/javase/7/docs/api/java/awt/Graphics2D.html?is-external=true" title="class or interface in java.awt">Graphics2D</A>&nbsp;g2)</PRE> <DL> <DD>Paints the shape <P> <DD><DL> <DT><B>Parameters:</B><DD><CODE>g2</CODE> - the graphics object</DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="RectangleDemo.html" title="class in &lt;Unnamed&gt;"><B>PREV CLASS</B></A>&nbsp; &nbsp;NEXT CLASS</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="index.html?Shape.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="Shape.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
W0mpRat/IntroToProgramming
Code/lesson4/checkerboard/doc/Shape.html
HTML
unlicense
9,230
// Copyright 2015 The etcd Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package version implements etcd version parsing and contains latest version // information. package version import ( "fmt" "strings" "github.com/coreos/go-semver/semver" ) var ( // MinClusterVersion is the min cluster version this etcd binary is compatible with. MinClusterVersion = "3.0.0" Version = "3.1.0-rc.0+git" APIVersion = "unknown" // Git SHA Value will be set during build GitSHA = "Not provided (use ./build instead of go build)" ) func init() { ver, err := semver.NewVersion(Version) if err == nil { APIVersion = fmt.Sprintf("%d.%d", ver.Major, ver.Minor) } } type Versions struct { Server string `json:"etcdserver"` Cluster string `json:"etcdcluster"` // TODO: raft state machine version } // Cluster only keeps the major.minor. func Cluster(v string) string { vs := strings.Split(v, ".") if len(vs) <= 2 { return v } return fmt.Sprintf("%s.%s", vs[0], vs[1]) }
OyTao/etcd_learning
version/version.go
GO
apache-2.0
1,520
/* * Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: Filter.java,v 1.2.4.1 2005/09/06 06:18:58 pvedula Exp $ */ package com.sun.org.apache.xalan.internal.xsltc.dom; /** * @author Jacek Ambroziak * @author Santiago Pericas-Geertsen */ public interface Filter { public boolean test(int node); }
shun634501730/java_source_cn
src_en/com/sun/org/apache/xalan/internal/xsltc/dom/Filter.java
Java
apache-2.0
1,037
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.metastore.HiveMetastore; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaNotFoundException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.StandardErrorCode; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.BigintType; import com.facebook.presto.spi.type.BooleanType; import com.facebook.presto.spi.type.DateType; import com.facebook.presto.spi.type.DoubleType; import com.facebook.presto.spi.type.TimestampType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.VarbinaryType; import com.facebook.presto.spi.type.VarcharType; import com.google.common.base.StandardSystemProperty; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Ints; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.metastore.ProtectMode; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.joda.time.DateTimeZone; import java.io.IOException; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.concurrent.TimeUnit; import static com.facebook.presto.hive.HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_METADATA; import static com.facebook.presto.hive.HiveErrorCode.HIVE_PATH_ALREADY_EXISTS; import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR; import static com.facebook.presto.hive.HiveSplitManager.PRESTO_OFFLINE; import static com.facebook.presto.hive.HiveUtil.checkCondition; import static com.facebook.presto.hive.HiveUtil.isArrayType; import static com.facebook.presto.hive.HiveUtil.isMapType; import static com.facebook.presto.hive.HiveUtil.isRowType; import static com.facebook.presto.hive.util.Types.checkType; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.google.common.base.Strings.isNullOrEmpty; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.UUID.randomUUID; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.getProtectMode; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; import static org.joda.time.DateTimeZone.UTC; public final class HiveWriteUtils { @SuppressWarnings("OctalInteger") private static final FsPermission ALL_PERMISSIONS = new FsPermission((short) 0777); private HiveWriteUtils() { } public static RecordWriter createRecordWriter(Path target, JobConf conf, boolean compress, Properties properties, String outputFormatName) { try { Object writer = Class.forName(outputFormatName).getConstructor().newInstance(); return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL); } catch (IOException | ReflectiveOperationException e) { throw new PrestoException(HIVE_WRITER_DATA_ERROR, e); } } public static ObjectInspector getJavaObjectInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return javaBooleanObjectInspector; } else if (type.equals(BigintType.BIGINT)) { return javaLongObjectInspector; } else if (type.equals(DoubleType.DOUBLE)) { return javaDoubleObjectInspector; } else if (type instanceof VarcharType) { return writableStringObjectInspector; } else if (type.equals(VarbinaryType.VARBINARY)) { return javaByteArrayObjectInspector; } else if (type.equals(DateType.DATE)) { return javaDateObjectInspector; } else if (type.equals(TimestampType.TIMESTAMP)) { return javaTimestampObjectInspector; } else if (isArrayType(type)) { return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0))); } else if (isMapType(type)) { ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0)); ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1)); return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector); } else if (isRowType(type)) { return ObjectInspectorFactory.getStandardStructObjectInspector( type.getTypeSignature().getParameters().stream() .map(parameter -> parameter.getNamedTypeSignature().getName()) .collect(toList()), type.getTypeParameters().stream() .map(HiveWriteUtils::getJavaObjectInspector) .collect(toList())); } throw new IllegalArgumentException("unsupported type: " + type); } public static Object getField(Type type, Block block, int position) { if (block.isNull(position)) { return null; } if (BooleanType.BOOLEAN.equals(type)) { return type.getBoolean(block, position); } if (BigintType.BIGINT.equals(type)) { return type.getLong(block, position); } if (DoubleType.DOUBLE.equals(type)) { return type.getDouble(block, position); } if (type instanceof VarcharType) { return new Text(type.getSlice(block, position).getBytes()); } if (VarbinaryType.VARBINARY.equals(type)) { return type.getSlice(block, position).getBytes(); } if (DateType.DATE.equals(type)) { long days = type.getLong(block, position); return new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days))); } if (TimestampType.TIMESTAMP.equals(type)) { long millisUtc = type.getLong(block, position); return new Timestamp(millisUtc); } if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } return Collections.unmodifiableList(list); } if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } return Collections.unmodifiableMap(map); } if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); List<Type> fieldTypes = type.getTypeParameters(); checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.INTERNAL_ERROR, "Expected row value field count does not match type field count"); List<Object> row = new ArrayList<>(rowBlock.getPositionCount()); for (int i = 0; i < rowBlock.getPositionCount(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); row.add(element); } return Collections.unmodifiableList(row); } throw new PrestoException(NOT_SUPPORTED, "unsupported type: " + type); } public static void checkTableIsWritable(Table table) { checkWritable( new SchemaTableName(table.getDbName(), table.getTableName()), Optional.empty(), getProtectMode(table), table.getParameters(), table.getSd()); } public static void checkPartitionIsWritable(String partitionName, Partition partition) { checkWritable( new SchemaTableName(partition.getDbName(), partition.getTableName()), Optional.of(partitionName), getProtectMode(partition), partition.getParameters(), partition.getSd()); } private static void checkWritable( SchemaTableName tableName, Optional<String> partitionName, ProtectMode protectMode, Map<String, String> parameters, StorageDescriptor storageDescriptor) { String tablePartitionDescription = "Table '" + tableName + "'"; if (partitionName.isPresent()) { tablePartitionDescription += " partition '" + partitionName.get() + "'"; } // verify online if (protectMode.offline) { throw new TableOfflineException(tableName, format("%s is offline", tablePartitionDescription)); } String prestoOffline = parameters.get(PRESTO_OFFLINE); if (!isNullOrEmpty(prestoOffline)) { throw new TableOfflineException(tableName, format("%s is offline for Presto: %s", tablePartitionDescription, prestoOffline)); } // verify not read only if (protectMode.readOnly) { throw new HiveReadOnlyException(tableName, partitionName); } // verify storage descriptor is valid if (storageDescriptor == null) { throw new PrestoException(HIVE_INVALID_METADATA, format("%s does not contain a valid storage descriptor", tablePartitionDescription)); } // verify bucketing List<String> bucketColumns = storageDescriptor.getBucketCols(); if (bucketColumns != null && !bucketColumns.isEmpty()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed tables is not supported. %s", tablePartitionDescription)); } // verify sorting List<Order> sortColumns = storageDescriptor.getSortCols(); if (sortColumns != null && !sortColumns.isEmpty()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed sorted tables is not supported. %s", tablePartitionDescription)); } // verify skew info SkewedInfo skewedInfo = storageDescriptor.getSkewedInfo(); if (skewedInfo != null && skewedInfo.getSkewedColNames() != null && !skewedInfo.getSkewedColNames().isEmpty()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed tables with skew is not supported. %s", tablePartitionDescription)); } } public static Path getTableDefaultLocation(HiveMetastore metastore, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName) { String location = getDatabase(metastore, schemaName).getLocationUri(); if (isNullOrEmpty(location)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not set", schemaName)); } Path databasePath = new Path(location); if (!pathExists(hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location does not exist: %s", schemaName, databasePath)); } if (!isDirectory(hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not a directory: %s", schemaName, databasePath)); } return new Path(databasePath, tableName); } private static Database getDatabase(HiveMetastore metastore, String database) { return metastore.getDatabase(database).orElseThrow(() -> new SchemaNotFoundException(database)); } public static boolean pathExists(HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(path).exists(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } private static boolean isDirectory(HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(path).isDirectory(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static void renameDirectory(HdfsEnvironment hdfsEnvironment, String schemaName, String tableName, Path source, Path target) { if (pathExists(hdfsEnvironment, target)) { throw new PrestoException(HIVE_PATH_ALREADY_EXISTS, format("Unable to commit creation of table '%s.%s': target directory already exists: %s", schemaName, tableName, target)); } if (!pathExists(hdfsEnvironment, target.getParent())) { createDirectory(hdfsEnvironment, target.getParent()); } try { if (!hdfsEnvironment.getFileSystem(source).rename(source, target)) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, format("Failed to rename %s to %s: rename returned false", source, target)); } } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, format("Failed to rename %s to %s", source, target), e); } } public static Path createTemporaryPath(HdfsEnvironment hdfsEnvironment, Path targetPath) { // use a per-user temporary directory to avoid permission problems // TODO: this should use Hadoop UserGroupInformation String temporaryPrefix = "/tmp/presto-" + StandardSystemProperty.USER_NAME.value(); // create a temporary directory on the same filesystem Path temporaryRoot = new Path(targetPath, temporaryPrefix); Path temporaryPath = new Path(temporaryRoot, randomUUID().toString()); createDirectory(hdfsEnvironment, temporaryPath); return temporaryPath; } public static void createDirectory(HdfsEnvironment hdfsEnvironment, Path path) { try { if (!hdfsEnvironment.getFileSystem(path).mkdirs(path, ALL_PERMISSIONS)) { throw new IOException("mkdirs returned false"); } } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to create directory: " + path, e); } // explicitly set permission since the default umask overrides it on creation try { hdfsEnvironment.getFileSystem(path).setPermission(path, ALL_PERMISSIONS); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to set permission on directory: " + path, e); } } public static boolean isWritableType(HiveType hiveType) { return isWritableType(hiveType.getTypeInfo()); } private static boolean isWritableType(TypeInfo typeInfo) { switch (typeInfo.getCategory()) { case PRIMITIVE: PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(); return isWritablePrimitiveType(primitiveCategory); case MAP: MapTypeInfo mapTypeInfo = checkType(typeInfo, MapTypeInfo.class, "typeInfo"); return isWritableType(mapTypeInfo.getMapKeyTypeInfo()) && isWritableType(mapTypeInfo.getMapValueTypeInfo()); case LIST: ListTypeInfo listTypeInfo = checkType(typeInfo, ListTypeInfo.class, "typeInfo"); return isWritableType(listTypeInfo.getListElementTypeInfo()); case STRUCT: StructTypeInfo structTypeInfo = checkType(typeInfo, StructTypeInfo.class, "typeInfo"); return structTypeInfo.getAllStructFieldTypeInfos().stream().allMatch(HiveType::isSupportedType); } return false; } private static boolean isWritablePrimitiveType(PrimitiveCategory primitiveCategory) { switch (primitiveCategory) { case BOOLEAN: case LONG: case DOUBLE: case STRING: case DATE: case TIMESTAMP: case BINARY: return true; } return false; } public static List<ObjectInspector> getRowColumnInspectors(List<Type> types) { return types.stream() .map(HiveWriteUtils::getRowColumnInspector) .collect(toList()); } public static ObjectInspector getRowColumnInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return writableBooleanObjectInspector; } if (type.equals(BigintType.BIGINT)) { return writableLongObjectInspector; } if (type.equals(DoubleType.DOUBLE)) { return writableDoubleObjectInspector; } if (type.equals(VarcharType.VARCHAR)) { return writableStringObjectInspector; } if (type.equals(VarbinaryType.VARBINARY)) { return writableBinaryObjectInspector; } if (type.equals(DateType.DATE)) { return writableDateObjectInspector; } if (type.equals(TimestampType.TIMESTAMP)) { return writableTimestampObjectInspector; } if (isArrayType(type) || isMapType(type) || isRowType(type)) { return getJavaObjectInspector(type); } throw new IllegalArgumentException("unsupported type: " + type); } public static FieldSetter createFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { if (type.equals(BooleanType.BOOLEAN)) { return new BooleanFieldSetter(rowInspector, row, field); } if (type.equals(BigintType.BIGINT)) { return new BigintFieldBuilder(rowInspector, row, field); } if (type.equals(DoubleType.DOUBLE)) { return new DoubleFieldSetter(rowInspector, row, field); } if (type instanceof VarcharType) { return new VarcharFieldSetter(rowInspector, row, field); } if (type.equals(VarbinaryType.VARBINARY)) { return new BinaryFieldSetter(rowInspector, row, field); } if (type.equals(DateType.DATE)) { return new DateFieldSetter(rowInspector, row, field); } if (type.equals(TimestampType.TIMESTAMP)) { return new TimestampFieldSetter(rowInspector, row, field); } if (isArrayType(type)) { return new ArrayFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0)); } if (isMapType(type)) { return new MapFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0), type.getTypeParameters().get(1)); } if (isRowType(type)) { return new RowFieldSetter(rowInspector, row, field, type.getTypeParameters()); } throw new IllegalArgumentException("unsupported type: " + type); } public abstract static class FieldSetter { protected final SettableStructObjectInspector rowInspector; protected final Object row; protected final StructField field; protected FieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { this.rowInspector = requireNonNull(rowInspector, "rowInspector is null"); this.row = requireNonNull(row, "row is null"); this.field = requireNonNull(field, "field is null"); } public abstract void setField(Block block, int position); } private static class BooleanFieldSetter extends FieldSetter { private final BooleanWritable value = new BooleanWritable(); public BooleanFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BooleanType.BOOLEAN.getBoolean(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class BigintFieldBuilder extends FieldSetter { private final LongWritable value = new LongWritable(); public BigintFieldBuilder(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BigintType.BIGINT.getLong(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class DoubleFieldSetter extends FieldSetter { private final DoubleWritable value = new DoubleWritable(); public DoubleFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(DoubleType.DOUBLE.getDouble(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class VarcharFieldSetter extends FieldSetter { private final Text value = new Text(); public VarcharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(VarcharType.VARCHAR.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class BinaryFieldSetter extends FieldSetter { private final BytesWritable value = new BytesWritable(); public BinaryFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { byte[] bytes = VarbinaryType.VARBINARY.getSlice(block, position).getBytes(); value.set(bytes, 0, bytes.length); rowInspector.setStructFieldData(row, field, value); } } private static class DateFieldSetter extends FieldSetter { private final DateWritable value = new DateWritable(); public DateFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(Ints.checkedCast(DateType.DATE.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TimestampFieldSetter extends FieldSetter { private final TimestampWritable value = new TimestampWritable(); public TimestampFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { long millisUtc = TimestampType.TIMESTAMP.getLong(block, position); value.setTime(millisUtc); rowInspector.setStructFieldData(row, field, value); } } private static class ArrayFieldSetter extends FieldSetter { private final Type elementType; public ArrayFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type elementType) { super(rowInspector, row, field); this.elementType = requireNonNull(elementType, "elementType is null"); } @Override public void setField(Block block, int position) { Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } rowInspector.setStructFieldData(row, field, list); } } private static class MapFieldSetter extends FieldSetter { private final Type keyType; private final Type valueType; public MapFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type keyType, Type valueType) { super(rowInspector, row, field); this.keyType = requireNonNull(keyType, "keyType is null"); this.valueType = requireNonNull(valueType, "valueType is null"); } @Override public void setField(Block block, int position) { Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(mapBlock.getPositionCount() * 2); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } rowInspector.setStructFieldData(row, field, map); } } private static class RowFieldSetter extends FieldSetter { private final List<Type> fieldTypes; public RowFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, List<Type> fieldTypes) { super(rowInspector, row, field); this.fieldTypes = ImmutableList.copyOf(fieldTypes); } @Override public void setField(Block block, int position) { Block rowBlock = block.getObject(position, Block.class); // TODO reuse row object and use FieldSetters, like we do at the top level // Ideally, we'd use the same recursive structure starting from the top, but // this requires modeling row types in the same way we model table rows // (multiple blocks vs all fields packed in a single block) List<Object> value = new ArrayList<>(fieldTypes.size()); for (int i = 0; i < fieldTypes.size(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); value.add(element); } rowInspector.setStructFieldData(row, field, value); } } }
suyucs/presto
presto-hive/src/main/java/com/facebook/presto/hive/HiveWriteUtils.java
Java
apache-2.0
31,459
<?php namespace Codeception\Module; // here you can define custom functions for TestGuy class TestHelper extends \Codeception\Module { }
tambora-org/dockerCRE
volumes/www/survey/tmp/assets/d8a56be1/tests/_helpers/TestHelper.php
PHP
apache-2.0
148
package org.ovirt.engine.api.restapi.resource; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.Response; import org.ovirt.engine.api.common.security.auth.Principal; import org.ovirt.engine.api.model.BaseResource; import org.ovirt.engine.api.model.Group; import org.ovirt.engine.api.model.Permission; import org.ovirt.engine.api.model.Permissions; import org.ovirt.engine.api.model.User; import org.ovirt.engine.api.resource.PermissionResource; import org.ovirt.engine.api.resource.AssignedPermissionsResource; import org.ovirt.engine.core.common.VdcObjectType; import org.ovirt.engine.core.common.action.PermissionsOperationsParametes; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.businessentities.DbUser; import org.ovirt.engine.core.common.businessentities.ad_groups; import org.ovirt.engine.core.common.businessentities.permissions; import org.ovirt.engine.core.common.interfaces.SearchType; import org.ovirt.engine.core.common.queries.MultilevelAdministrationByPermissionIdParameters; import org.ovirt.engine.core.common.queries.VdcQueryParametersBase; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.ovirt.engine.core.common.users.VdcUser; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.StringHelper; public class BackendAssignedPermissionsResource extends AbstractBackendCollectionResource<Permission, permissions> implements AssignedPermissionsResource { private Guid targetId; private VdcQueryType queryType; private VdcQueryParametersBase queryParams; private Class<? extends BaseResource> suggestedParentType; private VdcObjectType objectType; public BackendAssignedPermissionsResource(Guid targetId, VdcQueryType queryType, VdcQueryParametersBase queryParams, Class<? extends BaseResource> suggestedParentType) { this(targetId, queryType, queryParams, suggestedParentType, null); } public BackendAssignedPermissionsResource(Guid targetId, VdcQueryType queryType, VdcQueryParametersBase queryParams, Class<? extends BaseResource> suggestedParentType, VdcObjectType objectType) { super(Permission.class, permissions.class); this.targetId = targetId; this.queryType = queryType; this.queryParams = queryParams; this.suggestedParentType = suggestedParentType; this.objectType = objectType; } @Override public Permissions list() { return mapCollection(getBackendCollection(queryType, queryParams)); } @Override public Response add(Permission permission) { validateParameters(permission, isPrincipalSubCollection() ? new String[] {"role.id", "dataCenter|cluster|host|storageDomain|vm|vmpool|template.id"} : new String[] {"role.id", "user|group.id"}); permissions entity = map(permission, getPermissionsTemplate(permission)); return performCreation(VdcActionType.AddPermission, getPrincipal(entity, permission), new QueryIdResolver(VdcQueryType.GetPermissionById, MultilevelAdministrationByPermissionIdParameters.class)); } @Override public Response performRemove(String id) { return performAction(VdcActionType.RemovePermission, new PermissionsOperationsParametes(getPermissions(id))); } @Override @SingleEntityResource public PermissionResource getPermissionSubResource(String id) { return inject(new BackendPermissionResource(id, this, suggestedParentType)); } protected Permissions mapCollection(List<permissions> entities) { Permissions collection = new Permissions(); Map<Guid, DbUser> users = getUsers(); for (permissions entity : entities) { if (entity.getObjectType() != VdcObjectType.System) { Permission permission = map(entity, users.containsKey(entity.getad_element_id()) ? users.get(entity.getad_element_id()) : null); collection.getPermissions().add(addLinks(permission, permission.getUser() != null ? suggestedParentType : Group.class)); } } return collection; } public Map<Guid, DbUser> getUsers() { HashMap<Guid, DbUser> users = new HashMap<Guid, DbUser>(); for (DbUser user : asCollection(DbUser.class, getEntity(List.class, SearchType.DBUser, "users:"))) { users.put(user.getuser_id(), user); } return users; } /** * injects user/group base on permission owner type * @param entity the permission to map * @param user the permission owner * @return permission */ public Permission map(permissions entity, DbUser user) { Permission template = new Permission(); if (entity.getad_element_id() != null && user != null) { if (isUser(user)) { template.setUser(new User()); template.getUser().setId(entity.getad_element_id().toString()); } else if (entity.getad_element_id() != null) { template.setGroup(new Group()); template.getGroup().setId(entity.getad_element_id().toString()); } } return map(entity, template); } //REVISIT: fix once BE can distinguish between the user and group private boolean isUser(DbUser user) { return StringHelper.isNullOrEmpty(user.getusername()) ? false : true; } /** * @pre completeness of "user|group.id" already validated if not * user sub-collection */ protected PermissionsOperationsParametes getPrincipal(permissions entity, Permission permission) { PermissionsOperationsParametes ret = null; if (isUserSubCollection() || permission.isSetUser()) { VdcUser user = new VdcUser(); user.setUserId(isUserSubCollection() ? targetId : asGuid(permission.getUser().getId())); user.setDomainControler(getCurrent().get(Principal.class).getDomain()); ret = new PermissionsOperationsParametes(entity, user); } else if (isGroupSubCollection() || permission.isSetGroup()) { ad_groups group = new ad_groups(); group.setid(isGroupSubCollection() ? targetId : asGuid(permission.getGroup().getId())); group.setdomain(getCurrent().get(Principal.class).getDomain()); ret = new PermissionsOperationsParametes(entity, group); } return ret; } @Override public Permission addParents(Permission permission) { // REVISIT for entity-level permissions we need an isUser // flag on the permissions entity in order to distinguish // between the user and group cases if (isGroupSubCollection() && permission.isSetUser() && permission.getUser().isSetId()) { permission.setGroup(new Group()); permission.getGroup().setId(permission.getUser().getId()); permission.setUser(null); } return permission; } protected permissions getPermissionsTemplate(Permission perm) { permissions permission = new permissions(); // allow the target Id to be implicit in the client-provided // representation if (isPrincipalSubCollection()) { permission.setad_element_id(targetId); permission.setObjectId(getMapper(Permission.class, Guid.class).map(perm, null)); } else { if (perm.getUser()!=null) { permission.setad_element_id(asGuid(perm.getUser().getId())); } else { //if user is null, group is not null; this was validated before permission.setad_element_id(asGuid(perm.getGroup().getId())); } permission.setObjectId(targetId); permission.setObjectType(objectType); } return permission; } protected boolean isPrincipalSubCollection() { return isUserSubCollection() || isGroupSubCollection(); } protected boolean isUserSubCollection() { return User.class.equals(suggestedParentType); } protected boolean isGroupSubCollection() { return Group.class.equals(suggestedParentType); } protected permissions getPermissions(String id) { return getEntity(permissions.class, VdcQueryType.GetPermissionById, new MultilevelAdministrationByPermissionIdParameters(new Guid(id)), id); } }
anjalshireesh/gluster-ovirt-poc
backend/manager/modules/restapi/jaxrs/src/main/java/org/ovirt/engine/api/restapi/resource/BackendAssignedPermissionsResource.java
Java
apache-2.0
9,096
# # Copyright (c) 2015, Chef Software, Inc. <legal@chef.io> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # This recipe is included to make our tests happy. # By intention, it does not contain anything.
chef/license_scout
spec/fixtures/berkshelf_cache_dir/cookbooks/chef-ingredient-0.19.0/recipes/default.rb
Ruby
apache-2.0
705
package io.bxbxbai.zhuanlan.utils; import android.os.SystemClock; import android.util.Log; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * Android应用中的秒表工具,跟踪某一段代码的执行时间<br/> * 调用begin方法启动秒表,并在当前时间打上tag。当你再调用end方法时,TimeTester就会输出这个tag的时间差,并删除tag<br/> * * 如果你调用lap方法,就相当于计次,不会删除tag,可以多次调用 * * @author xuebin.bai * @version 1.0.0 * @since 2014.08.01 */ public final class StopWatch { private static final String TAG = StopWatch.class.getSimpleName(); private static final Map<String, Long> TIME = new ConcurrentHashMap<String, Long>(); private static final int INT_THOUSAND = 1000; /** * 给当前时间打上一个tag * @param tag tag */ public static void begin(String tag) { begin(tag, false); } /** * 给当前时间打上一个tag * @param tag tag * @param needPrint 是否打印begin的tag */ public static void begin(String tag, boolean needPrint) { TIME.put(tag, SystemClock.uptimeMillis()); if (needPrint) { Log.i(TAG, "begin: " + tag); } } /** * 计算当前时间和之前打上tag的时间差 * * @param tag tag * @return 运行的时间(ms) */ public static long end(String tag) { return end(tag, ""); } /** * 计算当前时间和之前打上tag的时间差 * * @param tag tag * @param needPrint 是否打印begin的tag * @return 运行的时间(ms) */ public static long end(String tag, boolean needPrint) { long t = lap(tag, "", needPrint); TIME.remove(tag); return t; } /** * 计算当前时间和之前打上tag的时间,并删除tag * @param tag tag * @param extra 额外信息 * @return 运行的时间(ms) */ public static long end(String tag, String extra) { long t = lap(tag, extra, true); TIME.remove(tag); return t; } /** * 计算当前时间和之前打上tag的时间差,不删除tag * * @param tag tag */ public static void lap(String tag) { lap(tag, "", true); } /** * 计算当前时间和之前打上tag的时间差,不删除tag * * @param tag tag * @param extra extra */ public static void lap(String tag, String extra) { lap(tag, extra, true); } /** * 计算当前时间和之前打上tag的时间差,不删除tag * @param tag tag * @param extra 额外信息 * @param needPrint 是否打印begin的tag * @return 运行的时间(ms) */ public static long lap(String tag, String extra, boolean needPrint) { extra = extra != null && extra.length() > 0 ? ", " + extra : ""; long t = -1; if (TIME.containsKey(tag)) { t = (SystemClock.uptimeMillis() - TIME.get(tag)); String time = t > INT_THOUSAND ? (double)t / INT_THOUSAND + "s" : t + "ms"; if (needPrint) { Log.i(TAG, tag + ": " + time + ", " + extra); } } else { if (needPrint) { Log.e(TAG, "You did NOT CALL StopWatch.begin(" + tag + ")"); } } return t; } /** * 打一个log * @param msg Message */ public static void log(String msg) { log(TAG, msg); } /** * 打一个log * @param tag Tag * @param msg Message */ public static void log(String tag, String msg) { Log.i(tag, msg); } }
Zhenghaotao/ZhuanLan
app/src/main/java/io/bxbxbai/zhuanlan/utils/StopWatch.java
Java
apache-2.0
3,738
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /*! * \file elemwise_binary_broadcast_op_logic.cc * \brief CPU Implementation of elementwise binary broadcast logical operators. */ #include "./elemwise_unary_op.h" #include "./elemwise_binary_op.h" #include "./elemwise_binary_broadcast_op.h" namespace mxnet { namespace op { MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_equal) .describe( R"code(Returns the result of element-wise **equal to** (==) comparison operation with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_equal(x, y) = [[ 0., 0., 0.], [ 1., 1., 1.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::eq>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_not_equal) .describe( R"code(Returns the result of element-wise **not equal to** (!=) comparison operation with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_not_equal(x, y) = [[ 1., 1., 1.], [ 0., 0., 0.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::ne>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_greater) .describe( R"code(Returns the result of element-wise **greater than** (>) comparison operation with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_greater(x, y) = [[ 1., 1., 1.], [ 0., 0., 0.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::gt>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); NNVM_REGISTER_OP(broadcast_greater).add_alias("_npx_broadcast_greater"); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_greater_equal) .describe( R"code(Returns the result of element-wise **greater than or equal to** (>=) comparison operation with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_greater_equal(x, y) = [[ 1., 1., 1.], [ 1., 1., 1.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::ge>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_lesser) .describe( R"code(Returns the result of element-wise **lesser than** (<) comparison operation with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_lesser(x, y) = [[ 0., 0., 0.], [ 0., 0., 0.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::lt>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_lesser_equal) .describe( R"code(Returns the result of element-wise **lesser than or equal to** (<=) comparison operation with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_lesser_equal(x, y) = [[ 0., 0., 0.], [ 1., 1., 1.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::le>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_logical_and) .describe(R"code(Returns the result of element-wise **logical and** with broadcasting. Example:: x = [[ 1., 1., 1.], [ 1., 1., 1.]] y = [[ 0.], [ 1.]] broadcast_logical_and(x, y) = [[ 0., 0., 0.], [ 1., 1., 1.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::logical_and>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_logical_or) .describe(R"code(Returns the result of element-wise **logical or** with broadcasting. Example:: x = [[ 1., 1., 0.], [ 1., 1., 0.]] y = [[ 1.], [ 0.]] broadcast_logical_or(x, y) = [[ 1., 1., 1.], [ 1., 1., 0.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::logical_or>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); MXNET_OPERATOR_REGISTER_BINARY_BROADCAST(broadcast_logical_xor) .describe(R"code(Returns the result of element-wise **logical xor** with broadcasting. Example:: x = [[ 1., 1., 0.], [ 1., 1., 0.]] y = [[ 1.], [ 0.]] broadcast_logical_xor(x, y) = [[ 0., 0., 1.], [ 1., 1., 0.]] )code" ADD_FILELINE) .set_attr<FCompute>("FCompute<cpu>", BinaryBroadcastCompute<cpu, mshadow_op::logical_xor>) .set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes); } // namespace op } // namespace mxnet
szha/mxnet
src/operator/tensor/elemwise_binary_broadcast_op_logic.cc
C++
apache-2.0
6,133
package org.zstack.test.core.job; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.zstack.core.job.Job; import org.zstack.core.job.JobContext; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.utils.Utils; import org.zstack.utils.logging.CLogger; @Configurable(preConstruction = true, autowire = Autowire.BY_TYPE) public class FakeJobReturnValue implements Job { CLogger logger = Utils.getLogger(FakeJobReturnValue.class); @JobContext private long index; @Autowired private FakeJobConfig fl; private FakeJobReturnValue() { } public FakeJobReturnValue(long index) { this.index = index; } @Override public void run(ReturnValueCompletion<Object> complete) { try { logger.debug(String.format("job %s is executing", index)); } finally { complete.success(index); } } }
MaJin1996/zstack
test/src/test/java/org/zstack/test/core/job/FakeJobReturnValue.java
Java
apache-2.0
1,047
package org.batfish.specifier.parboiled; import com.google.common.base.MoreObjects; import java.util.Objects; final class NameFilterAstNode implements FilterAstNode { private final String _name; NameFilterAstNode(AstNode nameAst) { this(((StringAstNode) nameAst).getStr()); } NameFilterAstNode(String name) { _name = name; } @Override public <T> T accept(AstNodeVisitor<T> visitor) { return visitor.visitNameFilterAstNode(this); } @Override public <T> T accept(FilterAstNodeVisitor<T> visitor) { return visitor.visitNameFilterAstNode(this); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof NameFilterAstNode)) { return false; } NameFilterAstNode that = (NameFilterAstNode) o; return Objects.equals(_name, that._name); } public String getName() { return _name; } @Override public int hashCode() { return Objects.hashCode(_name); } @Override public String toString() { return MoreObjects.toStringHelper(getClass()).add("name", _name).toString(); } }
intentionet/batfish
projects/batfish-common-protocol/src/main/java/org/batfish/specifier/parboiled/NameFilterAstNode.java
Java
apache-2.0
1,114
package org.zstack.sdk; import java.util.HashMap; import java.util.Map; import org.zstack.sdk.*; public class DeleteEcsVpcInLocalAction extends AbstractAction { private static final HashMap<String, Parameter> parameterMap = new HashMap<>(); private static final HashMap<String, Parameter> nonAPIParameterMap = new HashMap<>(); public static class Result { public ErrorCode error; public org.zstack.sdk.DeleteEcsVpcInLocalResult value; public Result throwExceptionIfError() { if (error != null) { throw new ApiException( String.format("error[code: %s, description: %s, details: %s]", error.code, error.description, error.details) ); } return this; } } @Param(required = true, nonempty = false, nullElements = false, emptyString = true, noTrim = false) public java.lang.String uuid; @Param(required = false) public java.lang.String deleteMode = "Permissive"; @Param(required = false) public java.util.List systemTags; @Param(required = false) public java.util.List userTags; @Param(required = false) public String sessionId; @Param(required = false) public String accessKeyId; @Param(required = false) public String accessKeySecret; @Param(required = false) public String requestIp; @NonAPIParam public long timeout = -1; @NonAPIParam public long pollingInterval = -1; private Result makeResult(ApiResult res) { Result ret = new Result(); if (res.error != null) { ret.error = res.error; return ret; } org.zstack.sdk.DeleteEcsVpcInLocalResult value = res.getResult(org.zstack.sdk.DeleteEcsVpcInLocalResult.class); ret.value = value == null ? new org.zstack.sdk.DeleteEcsVpcInLocalResult() : value; return ret; } public Result call() { ApiResult res = ZSClient.call(this); return makeResult(res); } public void call(final Completion<Result> completion) { ZSClient.call(this, new InternalCompletion() { @Override public void complete(ApiResult res) { completion.complete(makeResult(res)); } }); } protected Map<String, Parameter> getParameterMap() { return parameterMap; } protected Map<String, Parameter> getNonAPIParameterMap() { return nonAPIParameterMap; } protected RestInfo getRestInfo() { RestInfo info = new RestInfo(); info.httpMethod = "DELETE"; info.path = "/hybrid/aliyun/vpc/{uuid}"; info.needSession = true; info.needPoll = true; info.parameterName = ""; return info; } }
AlanJager/zstack
sdk/src/main/java/org/zstack/sdk/DeleteEcsVpcInLocalAction.java
Java
apache-2.0
2,821
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Struct template noinvoke</title> <link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../../proto/reference.html#header.boost.proto.transform.make_hpp" title="Header &lt;boost/proto/transform/make.hpp&gt;"> <link rel="prev" href="lazy/impl.html" title="Struct template impl"> <link rel="next" href="protect.html" title="Struct template protect"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td> <td align="center"><a href="../../../../index.html">Home</a></td> <td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="lazy/impl.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../proto/reference.html#header.boost.proto.transform.make_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="protect.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="refentry"> <a name="boost.proto.noinvoke"></a><div class="titlepage"></div> <div class="refnamediv"> <h2><span class="refentrytitle">Struct template noinvoke</span></h2> <p>boost::proto::noinvoke &#8212; A type annotation in an <a class="link" href="../../ObjectTransform.html" title="Concept ObjectTransform">ObjectTransform</a> which instructs Proto not to look for a nested <code class="computeroutput">::type</code> within <code class="computeroutput">T</code> after type substitution.</p> </div> <h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2> <div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: &lt;<a class="link" href="../../proto/reference.html#header.boost.proto.transform.make_hpp" title="Header &lt;boost/proto/transform/make.hpp&gt;">boost/proto/transform/make.hpp</a>&gt; </span><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> T<span class="special">&gt;</span> <span class="keyword">struct</span> <a class="link" href="noinvoke.html" title="Struct template noinvoke">noinvoke</a> <span class="special">{</span> <span class="special">}</span><span class="special">;</span></pre></div> <div class="refsect1"> <a name="idp390978944"></a><h2>Description</h2> <p> <a class="link" href="../../ObjectTransform.html" title="Concept ObjectTransform">ObjectTransform</a>s are evaluated by <code class="computeroutput"><a class="link" href="make.html" title="Struct template make">proto::make&lt;&gt;</a></code>, which finds all nested transforms and replaces them with the result of their applications. If any substitutions are performed, the result is first assumed to be a metafunction to be applied; that is, Proto checks to see if the result has a nested <code class="computeroutput">::type</code> typedef. If it does, that becomes the result. The purpose of <code class="computeroutput">proto::noinvoke&lt;&gt;</code> is to prevent Proto from looking for a nested <code class="computeroutput">::type</code> typedef in these situations. </p> <p> Example: </p> <pre class="programlisting"><span class="keyword">struct</span> <span class="identifier">Test</span> <span class="special">:</span> <a class="link" href="when.html" title="Struct template when">proto::when</a><span class="special">&lt;</span> <a class="link" href="_.html" title="Struct _">_</a> <span class="special">,</span> <span class="identifier">proto</span><span class="special">::</span><span class="identifier">noinvoke</span><span class="special">&lt;</span> <span class="comment">// This remove_pointer invocation is bloked by noinvoke</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">remove_pointer</span><span class="special">&lt;</span> <span class="comment">// This add_pointer invocation is *not* blocked by noinvoke</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">add_pointer</span><span class="special">&lt;</span><a class="link" href="_.html" title="Struct _">_</a><span class="special">&gt;</span> <span class="special">&gt;</span> <span class="special">&gt;</span><span class="special">(</span><span class="special">)</span> <span class="special">&gt;</span> <span class="special">{</span><span class="special">}</span><span class="special">;</span> <span class="keyword">void</span> <span class="identifier">test_noinvoke</span><span class="special">(</span><span class="special">)</span> <span class="special">{</span> <span class="keyword">typedef</span> <a class="link" href="terminal.html" title="Struct template terminal">proto::terminal</a><span class="special">&lt;</span><span class="keyword">int</span><span class="special">&gt;</span><span class="special">::</span><span class="identifier">type</span> <span class="identifier">Int</span><span class="special">;</span> <span class="identifier">BOOST_MPL_ASSERT</span><span class="special">(</span><span class="special">(</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">is_same</span><span class="special">&lt;</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">result_of</span><span class="special">&lt;</span><span class="identifier">Test</span><span class="special">(</span><span class="identifier">Int</span><span class="special">)</span><span class="special">&gt;</span><span class="special">::</span><span class="identifier">type</span> <span class="special">,</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">remove_pointer</span><span class="special">&lt;</span><span class="identifier">Int</span> <span class="special">*</span><span class="special">&gt;</span> <span class="special">&gt;</span> <span class="special">)</span><span class="special">)</span><span class="special">;</span> <span class="identifier">Int</span> <span class="identifier">i</span> <span class="special">=</span> <span class="special">{</span><span class="number">42</span><span class="special">}</span><span class="special">;</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">remove_pointer</span><span class="special">&lt;</span><span class="identifier">Int</span> <span class="special">*</span><span class="special">&gt;</span> <span class="identifier">t</span> <span class="special">=</span> <span class="identifier">Test</span><span class="special">(</span><span class="special">)</span><span class="special">(</span><span class="identifier">i</span><span class="special">)</span><span class="special">;</span> <span class="special">}</span></pre> <p> </p> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2008 Eric Niebler<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="lazy/impl.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../proto/reference.html#header.boost.proto.transform.make_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="protect.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
biospi/seamass-windeps
src/boost_1_57_0/doc/html/boost/proto/noinvoke.html
HTML
apache-2.0
8,924
# encoding: utf-8 """ Test lldb data formatter subsystem. """ from __future__ import print_function import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil from ObjCDataFormatterTestCase import ObjCDataFormatterTestCase class ObjCDataFormatterNSError(ObjCDataFormatterTestCase): @skipUnlessDarwin def test_nserror_with_run_command(self): """Test formatters for NSError.""" self.appkit_tester_impl(self.nserror_data_formatter_commands) def nserror_data_formatter_commands(self): self.expect( 'frame variable nserror', substrs=['domain: @"Foobar" - code: 12']) self.expect( 'frame variable nserrorptr', substrs=['domain: @"Foobar" - code: 12']) self.expect( 'frame variable nserror->_userInfo', substrs=['2 key/value pairs']) self.expect( 'frame variable nserror->_userInfo --ptr-depth 1 -d run-target', substrs=['@"a"', '@"b"', "1", "2"])
llvm-mirror/lldb
packages/Python/lldbsuite/test/functionalities/data-formatter/data-formatter-objc/TestDataFormatterObjCNSError.py
Python
apache-2.0
1,050
#lightbox{ position: absolute; left: 0; width: 100%; z-index: 10000; text-align: center; line-height: 0;} #lightbox img{ width: auto; height: auto;} #lightbox a img{ border: none; } #outerImageContainer{ position: relative; background-color: #fff; width: 250px; height: 250px; margin: 0 auto; } #imageContainer{ padding: 10px; } #loading{ position: absolute; top: 40%; left: 0%; height: 25%; width: 100%; text-align: center; line-height: 0; } #hoverNav{ position: absolute; top: 0; left: 0; height: 100%; width: 100%; z-index: 10; } #imageContainer>#hoverNav{ left: 0;} #hoverNav a{ outline: none;} #prevLink, #nextLink{ width: 49%; height: 100%; background-image: url(data:image/gif;base64,AAAA); /* Trick IE into showing hover */ display: block; } #prevLink { left: 0; float: left;} #nextLink { right: 0; float: right;} #prevLink:hover, #prevLink:visited:hover { background: url(http://orgmode.org/images/prevlabel.gif) left 15% no-repeat; } #nextLink:hover, #nextLink:visited:hover { background: url(http://orgmode.org/images/nextlabel.gif) right 15% no-repeat; } #imageDataContainer{ font: 10px Verdana, Helvetica, sans-serif; background-color: #fff; margin: 0 auto; line-height: 1.4em; overflow: auto; width: 100% ; } #imageData{ padding:0 10px; color: #666; } #imageData #imageDetails{ width: 70%; float: left; text-align: left; } #imageData #caption{ font-weight: bold; } #imageData #numberDisplay{ display: block; clear: left; padding-bottom: 1.0em; } #imageData #bottomNavClose{ width: 66px; float: right; padding-bottom: 0.7em; outline: none;} #overlay{ position: absolute; top: 0; left: 0; z-index: 9000; width: 100%; height: 500px; background-color: #000; } @media all { body { margin: 10px 8% 10px 8%; font-family: Verdana; text-align: justify; font-size: 10pt; padding: 10px; line-height: 1.2em; } #table-of-contents { color: black; background: #FFF; font-size: 80%; padding: .5em; margin: 0em 0em 1em 1em; display: block; } #table-of-contents a { color: #003333; } #table-of-contents a:hover { color: #003333; text-decoration: underline; } #table-of-contents li { margin: .2em; } #table-of-contents h2 { margin-top: .2em; border: none; } #license { padding: .3em; border: 1px solid grey; background-color: #eeeeee; font-size: 80%; } h1 { font-size: 12pt; } .title { color: #990000; padding-bottom: 7px; margin-bottom: 20px; border-bottom: 1px solid #222; } h2 { font-size: 12pt; padding-bottom: 4px; margin-bottom: 5px; border-bottom: 3px solid #DDD; } h3 { font-size: 11pt; color: #333333; } h4 { font-size: 9pt; } a { text-decoration: none; color: #006666 } a:visited { text-decoration: none; color: #336666 } a:hover { text-decoration: underline; color: #003333 } .todo { color: #990000; } .done { color: #006666; } .timestamp-kwd { color: #444; } .tag { color: #DDD; font-size: 70%; font-weight: 500; } li { margin: .4em; } table { border: 1px solid #ccc; } td { border: 1px solid #ccc; padding: .1em .7em .1em .3em; } th { border: 1px solid #ccc; } code { font-size: 100%; } img { border: none; } .share img { opacity: .4; -moz-opacity: .4; filter: alpha(opacity=40); } .share img:hover { opacity: 1; -moz-opacity: 1; filter: alpha(opacity=100); } /* pre {border: 1px solid #555; */ /* background: #EEE; */ /* font-size: 9pt; */ /* padding: 1em; */ /* } */ /* pre { */ /* color: #e5e5e5; */ /* background-color: #000000; */ /* padding: 1.4em; */ /* border: 2px solid grey; */ /* } */ pre { background-color: black; border: 4px solid grey; color: #EEE; overflow: auto; padding: 1em; } .org-info-box { clear:both; margin-left:auto; margin-right:auto; padding:0.7em; /* border:1px solid #CCC; */ /* border-radius:10px; */ /* -moz-border-radius:10px; */ } .org-info-box img { float:left; margin:0em 0.5em 0em 0em; } .org-info-box p { margin:0em; padding:0em; } .builtin { /* font-lock-builtin-face */ color: #f4a460; } .comment { /* font-lock-comment-face */ color: #737373; } .comment-delimiter { /* font-lock-comment-delimiter-face */ color: #666666; } .constant { /* font-lock-constant-face */ color: #db7093; } .doc { /* font-lock-doc-face */ color: #b3b3b3; } .function-name { /* font-lock-function-name-face */ color: #5f9ea0; } .headline { /* headline-face */ color: #ffffff; background-color: #000000; font-weight: bold; } .keyword { /* font-lock-keyword-face */ color: #4682b4; } .negation-char { } .regexp-grouping-backslash { } .regexp-grouping-construct { } .string { /* font-lock-string-face */ color: #ccc79a; } .todo-comment { /* todo-comment-face */ color: #ffffff; background-color: #000000; font-weight: bold; } .variable-name { /* font-lock-variable-name-face */ color: #ff6a6a; } .warning { /* font-lock-warning-face */ color: #ffffff; background-color: #cd5c5c; font-weight: bold; } pre.a { color: inherit; background-color: inherit; font: inherit; text-decoration: inherit; } pre.a:hover { text-decoration: underline; } pre.src { background-color: #303030; color: #e5e5e5; overflow: auto; } /* Styles for org-info.js */ .org-info-js_info-navigation { border-style:none; } #org-info-js_console-label { font-size:10px; font-weight:bold; white-space:nowrap; } .org-info-js_search-highlight { background-color:#ffff00; color:#000000; font-weight:bold; } #org-info-js-window { border-bottom:1px solid black; padding-bottom:10px; margin-bottom:10px; } .org-info-search-highlight { background-color:#adefef; /* same color as emacs default */ color:#000000; font-weight:bold; } .org-bbdb-company { /* bbdb-company */ font-style: italic; } .org-bbdb-field-name { } .org-bbdb-field-value { } .org-bbdb-name { /* bbdb-name */ text-decoration: underline; } .org-bold { /* bold */ font-weight: bold; } .org-bold-italic { /* bold-italic */ font-weight: bold; font-style: italic; } .org-border { /* border */ background-color: #000000; } .org-buffer-menu-buffer { /* buffer-menu-buffer */ font-weight: bold; } .org-builtin { /* font-lock-builtin-face */ color: #da70d6; } .org-button { /* button */ text-decoration: underline; } .org-c-nonbreakable-space { /* c-nonbreakable-space-face */ background-color: #ff0000; font-weight: bold; } .org-calendar-today { /* calendar-today */ text-decoration: underline; } .org-comment { /* font-lock-comment-face */ color: #b22222; } .org-comment-delimiter { /* font-lock-comment-delimiter-face */ color: #b22222; } .org-constant { /* font-lock-constant-face */ color: #5f9ea0; } .org-cursor { /* cursor */ background-color: #000000; } .org-default { /* default */ color: #000000; background-color: #ffffff; } .org-diary { /* diary */ color: #ff0000; } .org-doc { /* font-lock-doc-face */ color: #bc8f8f; } .org-escape-glyph { /* escape-glyph */ color: #a52a2a; } .org-file-name-shadow { /* file-name-shadow */ color: #7f7f7f; } .org-fixed-pitch { } .org-fringe { /* fringe */ background-color: #f2f2f2; } .org-function-name { /* font-lock-function-name-face */ color: #0000ff; } .org-header-line { /* header-line */ color: #333333; background-color: #e5e5e5; } .org-help-argument-name { /* help-argument-name */ font-style: italic; } .org-highlight { /* highlight */ background-color: #b4eeb4; } .org-holiday { /* holiday */ background-color: #ffc0cb; } .org-info-header-node { /* info-header-node */ color: #a52a2a; font-weight: bold; font-style: italic; } .org-info-header-xref { /* info-header-xref */ color: #0000ff; text-decoration: underline; } .org-info-menu-header { /* info-menu-header */ font-weight: bold; } .org-info-menu-star { /* info-menu-star */ color: #ff0000; } .org-info-node { /* info-node */ color: #a52a2a; font-weight: bold; font-style: italic; } .org-info-title-1 { /* info-title-1 */ font-size: 172%; font-weight: bold; } .org-info-title-2 { /* info-title-2 */ font-size: 144%; font-weight: bold; } .org-info-title-3 { /* info-title-3 */ font-size: 120%; font-weight: bold; } .org-info-title-4 { /* info-title-4 */ font-weight: bold; } .org-info-xref { /* info-xref */ color: #0000ff; text-decoration: underline; } .org-isearch { /* isearch */ color: #b0e2ff; background-color: #cd00cd; } .org-italic { /* italic */ font-style: italic; } .org-keyword { /* font-lock-keyword-face */ color: #a020f0; } .org-lazy-highlight { /* lazy-highlight */ background-color: #afeeee; } .org-link { /* link */ color: #0000ff; text-decoration: underline; } .org-link-visited { /* link-visited */ color: #8b008b; text-decoration: underline; } .org-match { /* match */ background-color: #ffff00; } .org-menu { } .org-message-cited-text { /* message-cited-text */ color: #ff0000; } .org-message-header-cc { /* message-header-cc */ color: #191970; } .org-message-header-name { /* message-header-name */ color: #6495ed; } .org-message-header-newsgroups { /* message-header-newsgroups */ color: #00008b; font-weight: bold; font-style: italic; } .org-message-header-other { /* message-header-other */ color: #4682b4; } .org-message-header-subject { /* message-header-subject */ color: #000080; font-weight: bold; } .org-message-header-to { /* message-header-to */ color: #191970; font-weight: bold; } .org-message-header-xheader { /* message-header-xheader */ color: #0000ff; } .org-message-mml { /* message-mml */ color: #228b22; } .org-message-separator { /* message-separator */ color: #a52a2a; } .org-minibuffer-prompt { /* minibuffer-prompt */ color: #0000cd; } .org-mm-uu-extract { /* mm-uu-extract */ color: #006400; background-color: #ffffe0; } .org-mode-line { /* mode-line */ color: #000000; background-color: #bfbfbf; } .org-mode-line-buffer-id { /* mode-line-buffer-id */ font-weight: bold; } .org-mode-line-highlight { } .org-mode-line-inactive { /* mode-line-inactive */ color: #333333; background-color: #e5e5e5; } .org-mouse { /* mouse */ background-color: #000000; } .org-negation-char { } .org-next-error { /* next-error */ background-color: #eedc82; } .org-nobreak-space { /* nobreak-space */ color: #a52a2a; text-decoration: underline; } .org-org-agenda-date { /* org-agenda-date */ color: #0000ff; } .org-org-agenda-date-weekend { /* org-agenda-date-weekend */ color: #0000ff; font-weight: bold; } .org-org-agenda-restriction-lock { /* org-agenda-restriction-lock */ background-color: #ffff00; } .org-org-agenda-structure { /* org-agenda-structure */ color: #0000ff; } .org-org-archived { /* org-archived */ color: #7f7f7f; } .org-org-code { /* org-code */ color: #7f7f7f; } .org-org-column { /* org-column */ background-color: #e5e5e5; } .org-org-column-title { /* org-column-title */ background-color: #e5e5e5; font-weight: bold; text-decoration: underline; } .org-org-date { /* org-date */ color: #a020f0; text-decoration: underline; } .org-org-done { /* org-done */ color: #228b22; font-weight: bold; } .org-org-drawer { /* org-drawer */ color: #0000ff; } .org-org-ellipsis { /* org-ellipsis */ color: #b8860b; text-decoration: underline; } .org-org-formula { /* org-formula */ color: #b22222; } .org-org-headline-done { /* org-headline-done */ color: #bc8f8f; } .org-org-hide { /* org-hide */ color: #e5e5e5; } .org-org-latex-and-export-specials { /* org-latex-and-export-specials */ color: #8b4513; } .org-org-level-1 { /* org-level-1 */ color: #0000ff; } .org-org-level-2 { /* org-level-2 */ color: #b8860b; } .org-org-level-3 { /* org-level-3 */ color: #a020f0; } .org-org-level-4 { /* org-level-4 */ color: #b22222; } .org-org-level-5 { /* org-level-5 */ color: #228b22; } .org-org-level-6 { /* org-level-6 */ color: #5f9ea0; } .org-org-level-7 { /* org-level-7 */ color: #da70d6; } .org-org-level-8 { /* org-level-8 */ color: #bc8f8f; } .org-org-link { /* org-link */ color: #a020f0; text-decoration: underline; } .org-org-property-value { } .org-org-scheduled-previously { /* org-scheduled-previously */ color: #b22222; } .org-org-scheduled-today { /* org-scheduled-today */ color: #006400; } .org-org-sexp-date { /* org-sexp-date */ color: #a020f0; } .org-org-special-keyword { /* org-special-keyword */ color: #bc8f8f; } .org-org-table { /* org-table */ color: #0000ff; } .org-org-tag { /* org-tag */ font-weight: bold; } .org-org-target { /* org-target */ text-decoration: underline; } .org-org-time-grid { /* org-time-grid */ color: #b8860b; } .org-org-todo { /* org-todo */ color: #ff0000; } .org-org-upcoming-deadline { /* org-upcoming-deadline */ color: #b22222; } .org-org-verbatim { /* org-verbatim */ color: #7f7f7f; text-decoration: underline; } .org-org-warning { /* org-warning */ color: #ff0000; font-weight: bold; } .org-outline-1 { /* outline-1 */ color: #0000ff; } .org-outline-2 { /* outline-2 */ color: #b8860b; } .org-outline-3 { /* outline-3 */ color: #a020f0; } .org-outline-4 { /* outline-4 */ color: #b22222; } .org-outline-5 { /* outline-5 */ color: #228b22; } .org-outline-6 { /* outline-6 */ color: #5f9ea0; } .org-outline-7 { /* outline-7 */ color: #da70d6; } .org-outline-8 { /* outline-8 */ color: #bc8f8f; } .outline-text-1, .outline-text-2, .outline-text-3, .outline-text-4, .outline-text-5, .outline-text-6 { /* Add more spacing between section. Padding, so that folding with org-info.js works as expected. */ padding-bottom:2em; } .org-preprocessor { /* font-lock-preprocessor-face */ color: #da70d6; } .org-query-replace { /* query-replace */ color: #b0e2ff; background-color: #cd00cd; } .org-regexp-grouping-backslash { /* font-lock-regexp-grouping-backslash */ font-weight: bold; } .org-regexp-grouping-construct { /* font-lock-regexp-grouping-construct */ font-weight: bold; } .org-region { /* region */ background-color: #eedc82; } .org-rmail-highlight { } .org-scroll-bar { /* scroll-bar */ background-color: #bfbfbf; } .org-secondary-selection { /* secondary-selection */ background-color: #ffff00; } .org-shadow { /* shadow */ color: #7f7f7f; } .org-show-paren-match { /* show-paren-match */ background-color: #40e0d0; } .org-show-paren-mismatch { /* show-paren-mismatch */ color: #ffffff; background-color: #a020f0; } .org-string { /* font-lock-string-face */ color: #bc8f8f; } .org-texinfo-heading { /* texinfo-heading */ color: #0000ff; } .org-tool-bar { /* tool-bar */ color: #000000; background-color: #bfbfbf; } .org-tooltip { /* tooltip */ color: #000000; background-color: #ffffe0; } .org-trailing-whitespace { /* trailing-whitespace */ background-color: #ff0000; } .org-type { /* font-lock-type-face */ color: #228b22; } .org-underline { /* underline */ text-decoration: underline; } .org-variable-name { /* font-lock-variable-name-face */ color: #b8860b; } .org-variable-pitch { } .org-vertical-border { } .org-warning { /* font-lock-warning-face */ color: #ff0000; font-weight: bold; } .rss_box {} .rss_title, rss_title a {} .rss_items {} .rss_item a:link, .rss_item a:visited, .rss_item a:active {} .rss_item a:hover {} .rss_date {} } /* END OF @media all */ @media screen { #table-of-contents { float: right; border: 1px solid #CCC; max-width: 50%; overflow: auto; } } /* END OF @media screen */ @media all { @import url(/worg/style/zenburn-emacs.css); body { color: #dcdccc; background-color: white; font:0.9em serif; max-width: 95%; margin: auto; background-image: url(http://orgmode.org/tmp/org-mode-unicorn.png); background-repeat: no-repeat; } body #content { padding-top: 45px; } body pre { border: none; } body a { color: #8cd0d3; } body #content { padding-top: 100px; } body .title { margin-left: 120px; } /* TOC inspired by http://jashkenas.github.com/coffee-script */ #table-of-contents { font-size: 10pt; position: fixed; right: 0em; top: 0em; background: #2b2b2b; color: #dcdccc; -webkit-box-shadow: 0 0 1em #777777; -moz-box-shadow: 0 0 1em #777777; -webkit-border-bottom-left-radius: 5px; -moz-border-radius-bottomleft: 5px; text-align: right; /* ensure doesn't flow off the screen when expanded */ max-height: 80%; overflow: auto; } #table-of-contents h2 { font-size: 10pt; max-width: 8em; font-weight: normal; padding-left: 0.5em; padding-left: 0.5em; padding-top: 0.05em; padding-bottom: 0.05em; } #table-of-contents #text-table-of-contents { display: none; text-align: left; } #table-of-contents:hover #text-table-of-contents { display: block; padding: 0.5em; margin-top: -1.5em; } .rss_box {} .rss_title, rss_title a {} .rss_items {} .rss_item a:link, .rss_item a:visited, .rss_item a:active {} .rss_item a:hover {} .rss_date {} } /* END OF @media all */ @media screen { #table-of-contents { float: right; border: 1px solid #CCC; max-width: 50%; overflow: auto; } } /* END OF @media screen */ @media all { body { font-family: "Helvetica Neue", "Lucida Grande", "Lucida Sans Unicode", Helvetica, Arial, sans-serif !important; font-size: 14px; line-height: 21px; color: #333; max-width: 95%; margin: auto; background-image: url(http://orgmode.org/worg-unicorn.png); background-position: 25px 5px; background-repeat: no-repeat; } body #content { padding-top: 70px; } body .title { margin-left: 120px; } #org-div-home-and-up{ position: fixed; right: 0; top: 4em; } /* TOC inspired by http://jashkenas.github.com/coffee-script */ #table-of-contents { font-size: 10pt; position: fixed; right: 0em; top: 0em; background: white; -webkit-box-shadow: 0 0 1em #777777; -moz-box-shadow: 0 0 1em #777777; -webkit-border-bottom-left-radius: 5px; -moz-border-radius-bottomleft: 5px; text-align: right; /* ensure doesn't flow off the screen when expanded */ max-height: 80%; overflow: auto; } #table-of-contents h2 { font-size: 10pt; max-width: 9em; font-weight: normal; padding-left: 0.5em; padding-left: 0.5em; padding-top: 0.05em; padding-bottom: 0.05em; } #table-of-contents #text-table-of-contents { display: none; text-align: left; } #table-of-contents:hover #text-table-of-contents { display: block; padding: 0.5em; margin-top: -1.5em; } /* #license { */ /* padding: .3em; */ /* border: 1px solid gray; */ /* background-color: #eeeeee; */ /* } */ h1 { /* font-family:Sans; font-weight:bold; */ font-size:2.1em; padding:0 0 30px 0; margin-top: 10px; margin-bottom: 10px; margin-right: 7%; color: #6C5D4F; } /* h2:before { content: "* " } h3:before { content: "** " } h4:before { content: "*** " } */ h2 { font-family:Arial,sans-serif; font-size:1.45em; line-height:16px; padding:7px 0 0 0; color: #6E2432; } .outline-text-2 { margin-left: 0.1em } .title { } h3 { font-family:Arial,sans-serif; font-size:1.3em; color: #A34D32; margin-left: 0.6em; } .outline-text-3 { margin-left: 0.9em; } h4 { font-family:Arial,sans-serif; font-size:1.2em; margin-left: 1.2em; color: #A5573E; } .outline-text-4 { margin-left: 1.45em; } a {text-decoration: none; color: #537d7b} /* a:visited {text-decoration: none; color: #224444} */ /* Taken out because color too similar to text. */ a:visited {text-decoration: none; color: #98855b} /* this is now the color of the Unicorns horn */ a:hover {text-decoration: underline; color: #a34d32} .todo { color: #CA0000; } .done { color: #006666; } .timestamp-kwd { color: #444; } .tag { } li { margin: .4em; } table { border: 1px solid #ccc; } td { border: 1px solid #ccc; } th { border: 1px solid #ccc; } code { font-size: 100%; color: black; border: 1px solid #DEDEDE; padding: 0px 0.2em; } img { border: none; } .share img { opacity: .4; -moz-opacity: .4; filter: alpha(opacity=40); } .share img:hover { opacity: 1; -moz-opacity: 1; filter: alpha(opacity=100); } /* pre {border: 1px solid #555; */ /* background: #EEE; */ /* font-size: 9pt; */ /* padding: 1em; */ /* } */ /* pre { */ /* color: #e5e5e5; */ /* background-color: #000000; */ /* padding: 1.4em; */ /* border: 2px solid gray; */ /* } */ /* pre { */ /* background-color: #2b2b2b; */ /* border: 4px solid gray; */ /* color: #EEE; */ /* overflow: auto; */ /* padding: 1em; */ /* } */ pre { font-family: Monaco, Consolas, "Lucida Console", monospace; color: black; background-color: #efefef; padding: 1.2em; border: 1px solid #dddddd; overflow: auto; -webkit-box-shadow: 0px 0px 4px rgba(0,0,0,0.23); -moz-box-shadow: 0px 0px 4px rgba(0,0,0,0.23); box-shadow: 0px 0px 4px rgba(0,0,0,0.23); box-shadow: #333 3px 3px 4px; -o-box-shadow: #333 3px 3px 4px; -webkit-box-shadow: #333 3px 3px 4px; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .org-info-box { clear:both; margin-left:auto; margin-right:auto; padding:0.7em; /* border:1px solid #CCC; */ /* border-radius:10px; */ /* -moz-border-radius:10px; */ } .org-info-box img { float:left; margin:0em 0.5em 0em 0em; } .org-info-box p { margin:0em; padding:0em; } .builtin { /* font-lock-builtin-face */ color: #f4a460; } .comment { /* font-lock-comment-face */ color: #737373; } .comment-delimiter { /* font-lock-comment-delimiter-face */ color: #666666; } .constant { /* font-lock-constant-face */ color: #db7093; } .doc { /* font-lock-doc-face */ color: #b3b3b3; } .function-name { /* font-lock-function-name-face */ color: #5f9ea0; } .headline { /* headline-face */ color: #ffffff; background-color: #000000; font-weight: bold; } .keyword { /* font-lock-keyword-face */ color: #4682b4; } .negation-char { } .regexp-grouping-backslash { } .regexp-grouping-construct { } .string { /* font-lock-string-face */ color: #ccc79a; } .todo-comment { /* todo-comment-face */ color: #ffffff; background-color: #000000; font-weight: bold; } .variable-name { /* font-lock-variable-name-face */ color: #ff6a6a; } .warning { /* font-lock-warning-face */ color: #ffffff; background-color: #cd5c5c; font-weight: bold; } pre.a { color: inherit; background-color: inherit; font: inherit; text-decoration: inherit; } pre.a:hover { text-decoration: underline; } /* Styles for org-info.js */ .org-info-js_info-navigation { border-style:none; } #org-info-js_console-label { font-size:10px; font-weight:bold; white-space:nowrap; } .org-info-js_search-highlight { background-color:#ffff00; color:#000000; font-weight:bold; } #org-info-js-window { border-bottom:1px solid black; padding-bottom:10px; margin-bottom:10px; } .org-info-search-highlight { background-color:#adefef; /* same color as emacs default */ color:#000000; font-weight:bold; } .org-bbdb-company { /* bbdb-company */ font-style: italic; } .org-bbdb-field-name { } .org-bbdb-field-value { } .org-bbdb-name { /* bbdb-name */ text-decoration: underline; } .org-bold { /* bold */ font-weight: bold; } .org-bold-italic { /* bold-italic */ font-weight: bold; font-style: italic; } .org-border { /* border */ background-color: #000000; } .org-buffer-menu-buffer { /* buffer-menu-buffer */ font-weight: bold; } .org-builtin { /* font-lock-builtin-face */ color: #da70d6; } .org-button { /* button */ text-decoration: underline; } .org-c-nonbreakable-space { /* c-nonbreakable-space-face */ background-color: #ff0000; font-weight: bold; } .org-calendar-today { /* calendar-today */ text-decoration: underline; } .org-comment { /* font-lock-comment-face */ color: #b22222; } .org-comment-delimiter { /* font-lock-comment-delimiter-face */ color: #b22222; } .org-constant { /* font-lock-constant-face */ color: #5f9ea0; } .org-cursor { /* cursor */ background-color: #000000; } .org-default { /* default */ color: #000000; background-color: #ffffff; } .org-diary { /* diary */ color: #ff0000; } .org-doc { /* font-lock-doc-face */ color: #bc8f8f; } .org-escape-glyph { /* escape-glyph */ color: #a52a2a; } .org-file-name-shadow { /* file-name-shadow */ color: #7f7f7f; } .org-fixed-pitch { } .org-fringe { /* fringe */ background-color: #f2f2f2; } .org-function-name { /* font-lock-function-name-face */ color: #0000ff; } .org-header-line { /* header-line */ color: #333333; background-color: #e5e5e5; } .org-help-argument-name { /* help-argument-name */ font-style: italic; } .org-highlight { /* highlight */ background-color: #b4eeb4; } .org-holiday { /* holiday */ background-color: #ffc0cb; } .org-info-header-node { /* info-header-node */ color: #a52a2a; font-weight: bold; font-style: italic; } .org-info-header-xref { /* info-header-xref */ color: #0000ff; text-decoration: underline; } .org-info-menu-header { /* info-menu-header */ font-weight: bold; } .org-info-menu-star { /* info-menu-star */ color: #ff0000; } .org-info-node { /* info-node */ color: #a52a2a; font-weight: bold; font-style: italic; } .org-info-title-1 { /* info-title-1 */ font-size: 172%; font-weight: bold; } .org-info-title-2 { /* info-title-2 */ font-size: 144%; font-weight: bold; } .org-info-title-3 { /* info-title-3 */ font-size: 120%; font-weight: bold; } .org-info-title-4 { /* info-title-4 */ font-weight: bold; } .org-info-xref { /* info-xref */ color: #0000ff; text-decoration: underline; } .org-isearch { /* isearch */ color: #b0e2ff; background-color: #cd00cd; } .org-italic { /* italic */ font-style: italic; } .org-keyword { /* font-lock-keyword-face */ color: #a020f0; } .org-lazy-highlight { /* lazy-highlight */ background-color: #afeeee; } .org-link { /* link */ color: #0000ff; text-decoration: underline; } .org-link-visited { /* link-visited */ color: #8b008b; text-decoration: underline; } .org-match { /* match */ background-color: #ffff00; } .org-menu { } .org-message-cited-text { /* message-cited-text */ color: #ff0000; } .org-message-header-cc { /* message-header-cc */ color: #191970; } .org-message-header-name { /* message-header-name */ color: #6495ed; } .org-message-header-newsgroups { /* message-header-newsgroups */ color: #00008b; font-weight: bold; font-style: italic; } .org-message-header-other { /* message-header-other */ color: #4682b4; } .org-message-header-subject { /* message-header-subject */ color: #000080; font-weight: bold; } .org-message-header-to { /* message-header-to */ color: #191970; font-weight: bold; } .org-message-header-xheader { /* message-header-xheader */ color: #0000ff; } .org-message-mml { /* message-mml */ color: #228b22; } .org-message-separator { /* message-separator */ color: #a52a2a; } .org-minibuffer-prompt { /* minibuffer-prompt */ color: #0000cd; } .org-mm-uu-extract { /* mm-uu-extract */ color: #006400; background-color: #ffffe0; } .org-mode-line { /* mode-line */ color: #000000; background-color: #bfbfbf; } .org-mode-line-buffer-id { /* mode-line-buffer-id */ font-weight: bold; } .org-mode-line-highlight { } .org-mode-line-inactive { /* mode-line-inactive */ color: #333333; background-color: #e5e5e5; } .org-mouse { /* mouse */ background-color: #000000; } .org-negation-char { } .org-next-error { /* next-error */ background-color: #eedc82; } .org-nobreak-space { /* nobreak-space */ color: #a52a2a; text-decoration: underline; } .org-org-agenda-date { /* org-agenda-date */ color: #0000ff; } .org-org-agenda-date-weekend { /* org-agenda-date-weekend */ color: #0000ff; font-weight: bold; } .org-org-agenda-restriction-lock { /* org-agenda-restriction-lock */ background-color: #ffff00; } .org-org-agenda-structure { /* org-agenda-structure */ color: #0000ff; } .org-org-archived { /* org-archived */ color: #7f7f7f; } .org-org-code { /* org-code */ color: #7f7f7f; } .org-org-column { /* org-column */ background-color: #e5e5e5; } .org-org-column-title { /* org-column-title */ background-color: #e5e5e5; font-weight: bold; text-decoration: underline; } .org-org-date { /* org-date */ color: #a020f0; text-decoration: underline; } .org-org-done { /* org-done */ color: #228b22; font-weight: bold; } .org-org-drawer { /* org-drawer */ color: #0000ff; } .org-org-ellipsis { /* org-ellipsis */ color: #b8860b; text-decoration: underline; } .org-org-formula { /* org-formula */ color: #b22222; } .org-org-headline-done { /* org-headline-done */ color: #bc8f8f; } .org-org-hide { /* org-hide */ color: #e5e5e5; } .org-org-latex-and-export-specials { /* org-latex-and-export-specials */ color: #8b4513; } .org-org-level-1 { /* org-level-1 */ color: #0000ff; } .org-org-level-2 { /* org-level-2 */ color: #b8860b; } .org-org-level-3 { /* org-level-3 */ color: #a020f0; } .org-org-level-4 { /* org-level-4 */ color: #b22222; } .org-org-level-5 { /* org-level-5 */ color: #228b22; } .org-org-level-6 { /* org-level-6 */ color: #5f9ea0; } .org-org-level-7 { /* org-level-7 */ color: #da70d6; } .org-org-level-8 { /* org-level-8 */ color: #bc8f8f; } .org-org-link { /* org-link */ color: #a020f0; text-decoration: underline; } .org-org-property-value { } .org-org-scheduled-previously { /* org-scheduled-previously */ color: #b22222; } .org-org-scheduled-today { /* org-scheduled-today */ color: #006400; } .org-org-sexp-date { /* org-sexp-date */ color: #a020f0; } .org-org-special-keyword { /* org-special-keyword */ color: #bc8f8f; } .org-org-table { /* org-table */ color: #0000ff; } .org-org-tag { /* org-tag */ font-weight: bold; } .org-org-target { /* org-target */ text-decoration: underline; } .org-org-time-grid { /* org-time-grid */ color: #b8860b; } .org-org-todo { /* org-todo */ color: #ff0000; } .org-org-upcoming-deadline { /* org-upcoming-deadline */ color: #b22222; } .org-org-verbatim { /* org-verbatim */ color: #7f7f7f; text-decoration: underline; } .org-org-warning { /* org-warning */ color: #ff0000; font-weight: bold; } .org-outline-1 { /* outline-1 */ color: #0000ff; } .org-outline-2 { /* outline-2 */ color: #b8860b; } .org-outline-3 { /* outline-3 */ color: #a020f0; } .org-outline-4 { /* outline-4 */ color: #b22222; } .org-outline-5 { /* outline-5 */ color: #228b22; } .org-outline-6 { /* outline-6 */ color: #5f9ea0; } .org-outline-7 { /* outline-7 */ color: #da70d6; } .org-outline-8 { /* outline-8 */ color: #bc8f8f; } .outline-text-1, .outline-text-2, .outline-text-3, .outline-text-4, .outline-text-5, .outline-text-6 { /* Add more spacing between section. Padding, so that folding with org-info.js works as expected. */ } .org-preprocessor { /* font-lock-preprocessor-face */ color: #da70d6; } .org-query-replace { /* query-replace */ color: #b0e2ff; background-color: #cd00cd; } .org-regexp-grouping-backslash { /* font-lock-regexp-grouping-backslash */ font-weight: bold; } .org-regexp-grouping-construct { /* font-lock-regexp-grouping-construct */ font-weight: bold; } .org-region { /* region */ background-color: #eedc82; } .org-rmail-highlight { } .org-scroll-bar { /* scroll-bar */ background-color: #bfbfbf; } .org-secondary-selection { /* secondary-selection */ background-color: #ffff00; } .org-shadow { /* shadow */ color: #7f7f7f; } .org-show-paren-match { /* show-paren-match */ background-color: #40e0d0; } .org-show-paren-mismatch { /* show-paren-mismatch */ color: #ffffff; background-color: #a020f0; } .org-string { /* font-lock-string-face */ color: #bc8f8f; } .org-texinfo-heading { /* texinfo-heading */ color: #0000ff; } .org-tool-bar { /* tool-bar */ color: #000000; background-color: #bfbfbf; } .org-tooltip { /* tooltip */ color: #000000; background-color: #ffffe0; } .org-trailing-whitespace { /* trailing-whitespace */ background-color: #ff0000; } .org-type { /* font-lock-type-face */ color: #228b22; } .org-underline { /* underline */ text-decoration: underline; } .org-variable-name { /* font-lock-variable-name-face */ color: #b8860b; } .org-variable-pitch { } .org-vertical-border { } .org-warning { /* font-lock-warning-face */ color: #ff0000; font-weight: bold; } .rss_box {} .rss_title, rss_title a {} .rss_items {} .rss_item a:link, .rss_item a:visited, .rss_item a:active {} .rss_item a:hover {} .rss_date {} #postamble { padding: .3em; margin-bottom: 1em; border: 1px solid gray; background-color: #eeeeee; } #show_source {float: right; margin: .7em;} } /* END OF @media all */ @media screen { #table-of-contents { float: right; border: 1px solid #CCC; max-width: 50%; overflow: auto; } } /* END OF @media screen */ /* Date Author ÏÔʾ×ÖÌå¼°±³¾° */ div#postamble { margin: 20px 0px 20px 0px; font-style: italic; font-size: 11px; font-family:"Courier New", Courier, monospace; background: white; }
klose911/klose911.github.io
html/scheme/tutorial/css/org.css
CSS
apache-2.0
46,276
/* * Copyright 2012-2015 org.opencloudb. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * (created at 2011-1-23) */ package org.opencloudb.paser.ast.expression.function.cast; import java.util.List; import org.opencloudb.paser.ast.expression.Expression; import org.opencloudb.paser.ast.expression.function.FunctionExpression; import org.opencloudb.paser.visitor.SQLASTVisitor; /** * @author mycat */ public class Cast extends FunctionExpression { private final String typeName; private final Expression typeInfo1; private final Expression typeInfo2; /** * @param expr never null */ public Cast(Expression expr, String typeName, Expression typeInfo1, Expression typeInfo2) { super("CAST", wrapList(expr)); if (null == typeName) { throw new IllegalArgumentException("typeName is null"); } this.typeName = typeName; this.typeInfo1 = typeInfo1; this.typeInfo2 = typeInfo2; } /** * @return never null */ public Expression getExpr() { return getArguments().get(0); } /** * @return never null */ public String getTypeName() { return typeName; } public Expression getTypeInfo1() { return typeInfo1; } public Expression getTypeInfo2() { return typeInfo2; } @Override public FunctionExpression constructFunction(List<Expression> arguments) { throw new UnsupportedOperationException("function of char has special arguments"); } @Override public void accept(SQLASTVisitor visitor) { visitor.visit(this); } }
mingfly/opencloudb
src/main/java/org/opencloudb/paser/ast/expression/function/cast/Cast.java
Java
apache-2.0
2,239
// Copyright 2012 ESRI // // All rights reserved under the copyright laws of the United States // and applicable international laws, treaties, and conventions. // // You may freely redistribute and use this sample code, with or // without modification, provided you include the original copyright // notice and use restrictions. // // See the use restrictions at http://help.arcgis.com/en/sdk/10.0/usageRestrictions.htm // #import <UIKit/UIKit.h> @interface WeatherInfoSampleAppDelegate : NSObject <UIApplicationDelegate> @property (nonatomic, strong) IBOutlet UIWindow *window; @end
sugar2010/arcgis-runtime-samples-ios
WeatherInfoSample/objective-c/Classes/WeatherInfoSampleAppDelegate.h
C
apache-2.0
609
// // Copyright 2019 The AMP HTML Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the license. // // Runs webkit html5 test datasets and validates parser. #include <array> #include <fstream> #include <iostream> #include <sstream> #include <string> #include <utility> #include <vector> #include "gtest/gtest.h" #include "atomutil.h" #include "defer.h" #include "fileutil.h" #include "node.h" #include "parser.h" #include "renderer.h" #include "strings.h" #include "testconstants.h" #include "tokenizer.h" using namespace htmlparser; // Represents a single test case. struct TestCaseData { std::string text; std::string want; std::string context; bool error; // For debugging and test failure logs. std::string ToString() { std::stringstream ss; ss << "Original: \n"; ss << "-----\n"; ss << text; ss << "Parsed: \n"; ss << "-----\n"; ss << want; ss << "Context: \n"; ss << "-----\n"; ss << context; return ss.str(); } }; // Reads the file stream until any of the char in stop_chars is encountered. // Returns string of all the characters being read (including stop_char). // Returns false if error encountered during read operation. // If EOF is encountered without seeing any stop_chars, all the characters // are copied to buffer as if EOF is one of the stop_chars. std::string ReadUntil(std::ifstream* fd, const std::string& stop_chars) { if (!fd->good()) return ""; std::stringbuf buffer; while (!fd->eof()) { char c = fd->get(); // We also want stop char, so that data can be accumulated as is including // line breaks etc. buffer.sputc(c); if (stop_chars.find(c) != std::string::npos) break; } return buffer.str(); } TestCaseData ReadParseTest(std::ifstream* fd) { TestCaseData test_case; std::string line = ReadUntil(fd, "\n"); // Read raw html text that this test is going to parse. if (line != "#data\n") { return {"", "", "", true}; } // Accumulate all data until the beginning of next marker. std::stringbuf text_buffer; while (fd->peek() != '#') { line = ReadUntil(fd, "\n"); text_buffer.sputn(line.c_str(), line.size()); } line = ReadUntil(fd, "\n"); if (line != "#errors\n") { return {"", "", "", true}; } // Accumulate all data until the beginning of next marker. while (fd->peek() != '#') { line = ReadUntil(fd, "\n"); // Ignore these lines. } line = ReadUntil(fd, "\n"); std::stringbuf context_buffer; // This is optional, do not error if empty. if (line == "#document-fragment\n") { // Following line represents document fragment. line = ReadUntil(fd, "\n"); context_buffer.sputn(line.c_str(), line.size()); line = ReadUntil(fd, "\n"); } if (line != "#document\n") { return {"", "", "", true}; } bool in_quote = false; std::stringbuf want_buffer; while (fd->peek() != '#') { line = ReadUntil(fd, "\n"); std::string trimmed(line); Strings::Trim(&trimmed, "| \n"); if (trimmed.size() > 0) { if (line.front() == '|' && trimmed.front() == '"') { in_quote = true; } if (trimmed.back() == '"' && !(line.front() == '|' && trimmed.size() == 1)) { in_quote = false; } } if (line.empty() || (line.size() == 1 && line.front() == '\n' && !in_quote)) { break; } want_buffer.sputn(line.c_str(), line.size()); } std::string text = text_buffer.str(); if (!text.empty() && text.back() == '\n') { text.erase(text.end() - 1); } std::string context = context_buffer.str(); if (!context.empty() && context.back() == '\n') { context.erase(context.end() - 1); } return {text, want_buffer.str(), context, false}; } void DumpIndent(std::stringbuf* buffer, int level) { buffer->sputn("| ", 2 /* size */); for (int i = 0; i < level; ++i) { buffer->sputn(" ", 2); } } std::optional<Error> DumpLevel(Node* node, std::stringbuf* buffer, int level) { DumpIndent(buffer, level); level++; switch (node->Type()) { case NodeType::ERROR_NODE: return error("unexpected ErrorNode"); break; case NodeType::DOCUMENT_NODE: return error("unexpected DocumentNode"); break; case NodeType::ELEMENT_NODE: { std::string tag_name = node->DataAtom() == Atom::UNKNOWN ? node->Data().data() : AtomUtil::ToString(node->DataAtom()); if (!node->NameSpace().empty()) { buffer->sputc('<'); buffer->sputn(node->NameSpace().data(), node->NameSpace().size()); buffer->sputc(' '); buffer->sputn(tag_name.c_str(), tag_name.size()); buffer->sputc('>'); } else { buffer->sputc('<'); buffer->sputn(tag_name.c_str(), tag_name.size()); buffer->sputc('>'); } std::vector<Attribute> attributes; attributes.assign(node->Attributes().begin(), node->Attributes().end()); std::sort(attributes.begin(), attributes.end(), [&](Attribute& a1, Attribute& a2) { if (a1.name_space != a2.name_space) { return a1.name_space < a2.name_space; } return a1.key < a2.key; }); for (const auto& attr : attributes) { std::string ns = attr.name_space; std::string k = attr.key; std::string v = attr.value; buffer->sputc('\n'); DumpIndent(buffer, level); if (ns != "") { buffer->sputn(ns.c_str(), ns.size()); buffer->sputc(' '); buffer->sputn(k.c_str(), k.size()); buffer->sputc('='); buffer->sputc('"'); buffer->sputn(v.c_str(), v.size()); buffer->sputc('"'); } else { buffer->sputn(k.c_str(), k.size()); buffer->sputc('='); buffer->sputc('"'); buffer->sputn(v.c_str(), v.size()); buffer->sputc('"'); } } if (node->NameSpace().empty() && node->DataAtom() == Atom::TEMPLATE) { buffer->sputc('\n'); DumpIndent(buffer, level); level++; buffer->sputn("content", 7); } break; } case NodeType::TEXT_NODE: buffer->sputc('"'); buffer->sputn(node->Data().data(), node->Data().size()); buffer->sputc('"'); break; case NodeType::COMMENT_NODE: { buffer->sputn("<!-- ", 5); buffer->sputn(node->Data().data(), node->Data().size()); buffer->sputn(" -->", 4); break; } case NodeType::DOCTYPE_NODE: { buffer->sputn("<!DOCTYPE ", 10); buffer->sputn(node->Data().data(), node->Data().size()); if (!node->Attributes().empty()) { std::string p; std::string s; for (const auto& attr : node->Attributes()) { if (attr.key == "public") p = attr.value; else if (attr.key == "system") s = attr.value; } if (!p.empty() || !s.empty()) { buffer->sputn(" \"", 2); buffer->sputn(p.c_str(), p.size()); buffer->sputn("\" \"", 3); buffer->sputn(s.c_str(), s.size()); buffer->sputc('"'); } } buffer->sputc('>'); break; } case NodeType::SCOPE_MARKER_NODE: return error("unexpected ScopeMarkerNode"); default: return error("unknown node type"); } buffer->sputc('\n'); for (Node* c = node->FirstChild(); c; c = c->NextSibling()) { auto err = DumpLevel(c, buffer, level); if (err) { return err; } } return std::nullopt; } std::optional<Error> Dump(Node* node, std::stringbuf* buffer) { if (!node || !(node->FirstChild())) { return std::nullopt; } int level = 0; for (Node* c = node->FirstChild(); c; c = c->NextSibling()) { auto err = DumpLevel(c, buffer, level); if (err) { return err; } } return std::nullopt; } TEST(HTMLDatasetTest, WebkitData) { // Files excluded from testing due to remaining TODOs in the parser. std::vector<std::string> files_excluded_from_test = { "testdata/tree-construction/" "adoption01.dat", "testdata/tree-construction/" "foreign-fragment.dat", }; int num_test_cases = 0; for (auto pattern : htmlparser::testing::kTestDataDirs) { std::string full_path = pattern.data(); std::vector<std::string> filenames; EXPECT_TRUE(FileUtil::Glob(full_path, &filenames)) << "Error opening files: " << pattern; for (auto& path : filenames) { // Skip test files that should be excluded from testing. if (std::find(files_excluded_from_test.begin(), files_excluded_from_test.end(), path) != files_excluded_from_test.end()) continue; std::cerr << "Processing testdata: " << path << std::endl; std::ifstream fd(path); defer(fd.close()); EXPECT_TRUE(fd.good()) << "Error opening file path: " << path; ParseOptions options = { .scripting = true, .frameset_ok = true, .allow_deprecated_tags = true }; while (!fd.eof()) { TestCaseData test_case{ReadParseTest(&fd)}; if (test_case.error) break; std::string html = test_case.text; if (!test_case.context.empty()) { Atom context_atom = AtomUtil::ToAtom(test_case.context); auto context_node = std::unique_ptr<Node>( Node::make_node(NodeType::ELEMENT_NODE, context_atom)); if (context_atom == Atom::UNKNOWN) { context_node->SetData(test_case.context); } std::vector<Node*> nodes = ParseFragmentWithOptions(html, options, context_node.get()); auto doc = std::unique_ptr<Node>( Node::make_node(NodeType::DOCUMENT_NODE)); for (Node* node : nodes) { doc->AppendChild(node); } auto err = CheckTreeConsistency(doc.get()); EXPECT_FALSE(err) << err.value().error_msg; std::stringbuf output_buffer; Dump(doc.get(), &output_buffer); std::string output = output_buffer.str(); EXPECT_EQ(output, test_case.want) << test_case.ToString(); num_test_cases++; } else { auto doc = ParseWithOptions(html, options); auto err = CheckTreeConsistency(doc.get()); EXPECT_FALSE(err) << err.value().error_msg; std::stringbuf output_buffer; Dump(doc.get(), &output_buffer); std::string output = output_buffer.str(); EXPECT_EQ(output, test_case.want) << test_case.ToString(); num_test_cases++; } } } } // Hardcoded, whenever dataset changes. Ensures no new tests are added, or // old tests removed, without maintainers knowledge. EXPECT_EQ(794, num_test_cases); };
adup-tech/amphtml
validator/htmlparser/htmldataset_test.cc
C++
apache-2.0
11,379
/** * Copyright 2013-2020 the original author or authors from the JHipster project. * * This file is part of the JHipster project, see http://www.jhipster.tech/ * for more information. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const ApplicationTypes = { MONOLITH: 'monolith', MICROSERVICE: 'microservice', UAA: 'uaa', GATEWAY: 'gateway', }; ApplicationTypes.exists = applicationType => !!applicationType && !!ApplicationTypes[applicationType.toUpperCase()]; module.exports = ApplicationTypes;
cbornet/generator-jhipster
jdl/jhipster/application-types.js
JavaScript
apache-2.0
1,037
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark import java.io.{File, FileInputStream} import java.security.{KeyStore, NoSuchAlgorithmException} import javax.net.ssl.{KeyManager, KeyManagerFactory, SSLContext, TrustManager, TrustManagerFactory} import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory} import org.eclipse.jetty.util.ssl.SslContextFactory /** * SSLOptions class is a common container for SSL configuration options. It offers methods to * generate specific objects to configure SSL for different communication protocols. * * SSLOptions is intended to provide the maximum common set of SSL settings, which are supported * by the protocol, which it can generate the configuration for. Since Akka doesn't support client * authentication with SSL, SSLOptions cannot support it either. * * @param enabled enables or disables SSL; if it is set to false, the rest of the * settings are disregarded * @param keyStore a path to the key-store file * @param keyStorePassword a password to access the key-store file * @param keyPassword a password to access the private key in the key-store * @param trustStore a path to the trust-store file * @param trustStorePassword a password to access the trust-store file * @param protocol SSL protocol (remember that SSLv3 was compromised) supported by Java * @param enabledAlgorithms a set of encryption algorithms that may be used */ private[spark] case class SSLOptions( enabled: Boolean = false, keyStore: Option[File] = None, keyStorePassword: Option[String] = None, keyPassword: Option[String] = None, trustStore: Option[File] = None, trustStorePassword: Option[String] = None, protocol: Option[String] = None, enabledAlgorithms: Set[String] = Set.empty) extends Logging { /** * Creates a Jetty SSL context factory according to the SSL settings represented by this object. */ def createJettySslContextFactory(): Option[SslContextFactory] = { if (enabled) { val sslContextFactory = new SslContextFactory() keyStore.foreach(file => sslContextFactory.setKeyStorePath(file.getAbsolutePath)) trustStore.foreach(file => sslContextFactory.setTrustStore(file.getAbsolutePath)) keyStorePassword.foreach(sslContextFactory.setKeyStorePassword) trustStorePassword.foreach(sslContextFactory.setTrustStorePassword) keyPassword.foreach(sslContextFactory.setKeyManagerPassword) protocol.foreach(sslContextFactory.setProtocol) sslContextFactory.setIncludeCipherSuites(supportedAlgorithms.toSeq: _*) Some(sslContextFactory) } else { None } } /** * Creates an Akka configuration object which contains all the SSL settings represented by this * object. It can be used then to compose the ultimate Akka configuration. */ def createAkkaConfig: Option[Config] = { import scala.collection.JavaConversions._ if (enabled) { Some(ConfigFactory.empty() .withValue("akka.remote.netty.tcp.security.key-store", ConfigValueFactory.fromAnyRef(keyStore.map(_.getAbsolutePath).getOrElse(""))) .withValue("akka.remote.netty.tcp.security.key-store-password", ConfigValueFactory.fromAnyRef(keyStorePassword.getOrElse(""))) .withValue("akka.remote.netty.tcp.security.trust-store", ConfigValueFactory.fromAnyRef(trustStore.map(_.getAbsolutePath).getOrElse(""))) .withValue("akka.remote.netty.tcp.security.trust-store-password", ConfigValueFactory.fromAnyRef(trustStorePassword.getOrElse(""))) .withValue("akka.remote.netty.tcp.security.key-password", ConfigValueFactory.fromAnyRef(keyPassword.getOrElse(""))) .withValue("akka.remote.netty.tcp.security.random-number-generator", ConfigValueFactory.fromAnyRef("")) .withValue("akka.remote.netty.tcp.security.protocol", ConfigValueFactory.fromAnyRef(protocol.getOrElse(""))) .withValue("akka.remote.netty.tcp.security.enabled-algorithms", ConfigValueFactory.fromIterable(supportedAlgorithms.toSeq)) .withValue("akka.remote.netty.tcp.enable-ssl", ConfigValueFactory.fromAnyRef(true))) } else { None } } /* * The supportedAlgorithms set is a subset of the enabledAlgorithms that * are supported by the current Java security provider for this protocol. */ private val supportedAlgorithms: Set[String] = { var context: SSLContext = null try { context = SSLContext.getInstance(protocol.orNull) /* The set of supported algorithms does not depend upon the keys, trust, or rng, although they will influence which algorithms are eventually used. */ context.init(null, null, null) } catch { case npe: NullPointerException => logDebug("No SSL protocol specified") context = SSLContext.getDefault case nsa: NoSuchAlgorithmException => logDebug(s"No support for requested SSL protocol ${protocol.get}") context = SSLContext.getDefault } val providerAlgorithms = context.getServerSocketFactory.getSupportedCipherSuites.toSet // Log which algorithms we are discarding (enabledAlgorithms &~ providerAlgorithms).foreach { cipher => logDebug(s"Discarding unsupported cipher $cipher") } enabledAlgorithms & providerAlgorithms } /** Returns a string representation of this SSLOptions with all the passwords masked. */ override def toString: String = s"SSLOptions{enabled=$enabled, " + s"keyStore=$keyStore, keyStorePassword=${keyStorePassword.map(_ => "xxx")}, " + s"trustStore=$trustStore, trustStorePassword=${trustStorePassword.map(_ => "xxx")}, " + s"protocol=$protocol, enabledAlgorithms=$enabledAlgorithms}" } private[spark] object SSLOptions extends Logging { /** Resolves SSLOptions settings from a given Spark configuration object at a given namespace. * * The following settings are allowed: * $ - `[ns].enabled` - `true` or `false`, to enable or disable SSL respectively * $ - `[ns].keyStore` - a path to the key-store file; can be relative to the current directory * $ - `[ns].keyStorePassword` - a password to the key-store file * $ - `[ns].keyPassword` - a password to the private key * $ - `[ns].trustStore` - a path to the trust-store file; can be relative to the current * directory * $ - `[ns].trustStorePassword` - a password to the trust-store file * $ - `[ns].protocol` - a protocol name supported by a particular Java version * $ - `[ns].enabledAlgorithms` - a comma separated list of ciphers * * For a list of protocols and ciphers supported by particular Java versions, you may go to * [[https://blogs.oracle.com/java-platform-group/entry/diagnosing_tls_ssl_and_https Oracle * blog page]]. * * You can optionally specify the default configuration. If you do, for each setting which is * missing in SparkConf, the corresponding setting is used from the default configuration. * * @param conf Spark configuration object where the settings are collected from * @param ns the namespace name * @param defaults the default configuration * @return [[org.apache.spark.SSLOptions]] object */ def parse(conf: SparkConf, ns: String, defaults: Option[SSLOptions] = None): SSLOptions = { val enabled = conf.getBoolean(s"$ns.enabled", defaultValue = defaults.exists(_.enabled)) val keyStore = conf.getOption(s"$ns.keyStore").map(new File(_)) .orElse(defaults.flatMap(_.keyStore)) val keyStorePassword = conf.getOption(s"$ns.keyStorePassword") .orElse(defaults.flatMap(_.keyStorePassword)) val keyPassword = conf.getOption(s"$ns.keyPassword") .orElse(defaults.flatMap(_.keyPassword)) val trustStore = conf.getOption(s"$ns.trustStore").map(new File(_)) .orElse(defaults.flatMap(_.trustStore)) val trustStorePassword = conf.getOption(s"$ns.trustStorePassword") .orElse(defaults.flatMap(_.trustStorePassword)) val protocol = conf.getOption(s"$ns.protocol") .orElse(defaults.flatMap(_.protocol)) val enabledAlgorithms = conf.getOption(s"$ns.enabledAlgorithms") .map(_.split(",").map(_.trim).filter(_.nonEmpty).toSet) .orElse(defaults.map(_.enabledAlgorithms)) .getOrElse(Set.empty) new SSLOptions( enabled, keyStore, keyStorePassword, keyPassword, trustStore, trustStorePassword, protocol, enabledAlgorithms) } }
ArvinDevel/onlineAggregationOnSparkV2
core/src/main/scala/org/apache/spark/SSLOptions.scala
Scala
apache-2.0
9,430
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /*! * \file legalize.cc * \brief Converts an expr to another expr. This pass can be used to transform an op based on its * shape, dtype or layout to another op or a sequence of ops. */ #include <tvm/relay/expr_functor.h> #include <tvm/relay/op_attr_types.h> #include <tvm/relay/transform.h> #include <tvm/te/operation.h> namespace tvm { namespace relay { namespace legalize { // Call registered FTVMLegalize of an op // Returns the legalized expression class Legalizer : public ExprRewriter { public: explicit Legalizer(const std::string& legalize_map_attr_name) : legalize_map_attr_name_{legalize_map_attr_name} {} Expr Rewrite_(const CallNode* call_node, const Expr& post) override { // Get the new_call node without any changes to current call node. Call new_call = Downcast<Call>(post); // Check if the string is registered. if (!Op::HasAttrMap(legalize_map_attr_name_)) { return post; } // Collect the registered legalize function. auto fop_legalize = Op::GetAttrMap<FTVMLegalize>(legalize_map_attr_name_); auto call_op = call_node->op; if (call_op.as<OpNode>()) { Op op = Downcast<Op>(call_node->op); if (fop_legalize.count(op)) { // Collect the new_args. tvm::Array<Expr> call_args = new_call->args; // Collect input and output dtypes to pass on to Legalize API. tvm::Array<tvm::relay::Type> types; for (auto arg : call_node->args) { types.push_back(arg->checked_type()); } types.push_back(call_node->checked_type()); // Transform the op by calling the registered legalize function. Expr legalized_value = fop_legalize[op](call_node->attrs, call_args, types); // Return the new expr if the transformation succeeded. if (legalized_value.defined()) { // Check that the returned Expr from legalize is CallNode. const CallNode* legalized_call_node = legalized_value.as<CallNode>(); ICHECK(legalized_call_node) << "Can only replace the original operator with another call node"; return legalized_value; } } } return post; } private: std::string legalize_map_attr_name_; }; Expr Legalize(const Expr& expr, const std::string& legalize_map_attr_name) { auto rewriter = Legalizer(legalize_map_attr_name); return PostOrderRewrite(expr, &rewriter); } } // namespace legalize namespace transform { Pass Legalize(const String& legalize_map_attr_name) { runtime::TypedPackedFunc<Function(Function, IRModule, PassContext)> pass_func = [=](Function f, IRModule m, PassContext pc) { return Downcast<Function>(relay::legalize::Legalize(f, legalize_map_attr_name)); }; return CreateFunctionPass(pass_func, 1, "Legalize", {"InferType"}); } TVM_REGISTER_GLOBAL("relay._transform.Legalize").set_body_typed(Legalize); } // namespace transform } // namespace relay } // namespace tvm
tqchen/tvm
src/relay/transforms/legalize.cc
C++
apache-2.0
3,776
/* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.openstacktroubleshoot.util; import org.onlab.packet.Ip4Address; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.behaviour.ExtensionTreatmentResolver; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.instructions.ExtensionPropertyException; import org.onosproject.net.flow.instructions.ExtensionTreatment; import org.onosproject.openstacknetworking.api.InstancePort; import org.onosproject.openstacknetworking.api.OpenstackNetworkService; import org.slf4j.Logger; import static org.onosproject.net.flow.instructions.ExtensionTreatmentType.ExtensionTreatmentTypes.NICIRA_SET_TUNNEL_DST; import static org.slf4j.LoggerFactory.getLogger; /** * Provides common methods to help populating flow rules for troubleshoot app. */ public final class OpenstackTroubleshootUtil { private static final Logger log = getLogger(OpenstackTroubleshootUtil.class); private static final String TUNNEL_DST = "tunnelDst"; private OpenstackTroubleshootUtil() { } /** * Returns tunnel destination extension treatment object. * * @param deviceService driver service * @param deviceId device id to apply this treatment * @param remoteIp tunnel destination ip address * @return extension treatment */ public static ExtensionTreatment buildExtension(DeviceService deviceService, DeviceId deviceId, Ip4Address remoteIp) { Device device = deviceService.getDevice(deviceId); if (device != null && !device.is(ExtensionTreatmentResolver.class)) { log.error("The extension treatment is not supported"); return null; } if (device == null) { return null; } ExtensionTreatmentResolver resolver = device.as(ExtensionTreatmentResolver.class); ExtensionTreatment treatment = resolver.getExtensionInstruction(NICIRA_SET_TUNNEL_DST.type()); try { treatment.setPropertyValue(TUNNEL_DST, remoteIp); return treatment; } catch (ExtensionPropertyException e) { log.warn("Failed to get tunnelDst extension treatment for {}", deviceId); return null; } } /** * Returns segment ID of the given instance port where a VM is attached. * * @param service openstack network service * @param port instance port * @return segment ID */ public static long getSegId(OpenstackNetworkService service, InstancePort port) { return Long.parseLong(service.segmentId(port.networkId())); } }
gkatsikas/onos
apps/openstacktroubleshoot/app/src/main/java/org/onosproject/openstacktroubleshoot/util/OpenstackTroubleshootUtil.java
Java
apache-2.0
3,347
/* * Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.lang.module; /** * Thrown when reading a module descriptor and the module descriptor is found * to be malformed or otherwise cannot be interpreted as a module descriptor. * * @see ModuleDescriptor#read * @since 9 */ public class InvalidModuleDescriptorException extends RuntimeException { @java.io.Serial private static final long serialVersionUID = 4863390386809347380L; /** * Constructs an {@code InvalidModuleDescriptorException} with no detail * message. */ public InvalidModuleDescriptorException() { } /** * Constructs an {@code InvalidModuleDescriptorException} with the * specified detail message. * * @param msg * The detail message; can be {@code null} */ public InvalidModuleDescriptorException(String msg) { super(msg); } }
mirkosertic/Bytecoder
classlib/java.base/src/main/resources/META-INF/modules/java.base/classes/java/lang/module/InvalidModuleDescriptorException.java
Java
apache-2.0
2,064
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.portfolio.interestratechart.incentive; import java.math.BigDecimal; import org.apache.fineract.portfolio.common.domain.ConditionType; public abstract class AttributeIncentiveCalculation { public abstract BigDecimal calculateIncentive(final IncentiveDTO incentiveDTO); public boolean applyIncentive(ConditionType conditionType, Long attributeValue, Long actualValue) { boolean applyIncentive = false; int compareVal = actualValue.compareTo(attributeValue); switch (conditionType) { case LESSTHAN: applyIncentive = compareVal < 0; break; case EQUAL: applyIncentive = compareVal == 0; break; case NOT_EQUAL: applyIncentive = compareVal != 0; break; case GRETERTHAN: applyIncentive = compareVal > 0; break; default: applyIncentive = false; break; } return applyIncentive; } }
RanjithKumar5550/RanMifos
fineract-provider/src/main/java/org/apache/fineract/portfolio/interestratechart/incentive/AttributeIncentiveCalculation.java
Java
apache-2.0
1,860
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.rest.controller; import org.adrianwalker.multilinestring.Multiline; import org.apache.metron.rest.service.SensorIndexingConfigService; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic; import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment= SpringBootTest.WebEnvironment.RANDOM_PORT) @ActiveProfiles(TEST_PROFILE) public class SensorIndexingConfigControllerIntegrationTest { /** { "index": "broTest", "batchSize": 1 } */ @Multiline public static String broJson; @Autowired private SensorIndexingConfigService sensorIndexingConfigService; @Autowired private WebApplicationContext wac; private MockMvc mockMvc; private String sensorIndexingConfigUrl = "/api/v1/sensor/indexing/config"; private String user = "user"; private String password = "password"; @Before public void setup() throws Exception { this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).apply(springSecurity()).build(); } @Test public void testSecurity() throws Exception { this.mockMvc.perform(post(sensorIndexingConfigUrl).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(broJson)) .andExpect(status().isUnauthorized()); this.mockMvc.perform(get(sensorIndexingConfigUrl + "/broTest")) .andExpect(status().isUnauthorized()); this.mockMvc.perform(get(sensorIndexingConfigUrl)) .andExpect(status().isUnauthorized()); this.mockMvc.perform(delete(sensorIndexingConfigUrl + "/broTest").with(csrf())) .andExpect(status().isUnauthorized()); } @Test public void test() throws Exception { sensorIndexingConfigService.delete("broTest"); this.mockMvc.perform(get(sensorIndexingConfigUrl).with(httpBasic(user,password))) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(content().bytes("{}".getBytes())); this.mockMvc.perform(post(sensorIndexingConfigUrl + "/broTest").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(broJson)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$.index").value("broTest")) .andExpect(jsonPath("$.batchSize").value(1)); this.mockMvc.perform(post(sensorIndexingConfigUrl + "/broTest").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(broJson)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$.index").value("broTest")) .andExpect(jsonPath("$.batchSize").value(1)); this.mockMvc.perform(get(sensorIndexingConfigUrl + "/broTest").with(httpBasic(user,password))) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$.index").value("broTest")) .andExpect(jsonPath("$.batchSize").value(1)); this.mockMvc.perform(get(sensorIndexingConfigUrl).with(httpBasic(user,password))) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$[?(@.broTest.index == 'broTest' &&" + "@.broTest.batchSize == 1" + ")]").exists()); this.mockMvc.perform(delete(sensorIndexingConfigUrl + "/broTest").with(httpBasic(user,password)).with(csrf())) .andExpect(status().isOk()); this.mockMvc.perform(get(sensorIndexingConfigUrl + "/broTest").with(httpBasic(user,password))) .andExpect(status().isNotFound()); this.mockMvc.perform(delete(sensorIndexingConfigUrl + "/broTest").with(httpBasic(user,password)).with(csrf())) .andExpect(status().isNotFound()); this.mockMvc.perform(get(sensorIndexingConfigUrl).with(httpBasic(user,password))) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$[?(@.sensorTopic == 'broTest')]").doesNotExist()); sensorIndexingConfigService.delete("broTest"); } }
dlyle65535/metron
metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/SensorIndexingConfigControllerIntegrationTest.java
Java
apache-2.0
6,791
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.core.test.rule; import com.facebook.buck.core.util.immutables.BuckStyleValue; import com.facebook.buck.rules.macros.StringWithMacros; import com.google.common.base.Preconditions; import java.util.Map; import org.immutables.value.Value; /** * Freeform JSON to be used for the test protocol. The JSON is composed of {@link * com.facebook.buck.rules.macros.StringWithMacros}. * * <p>The JSON map keys must be {@link StringWithMacros}, and not other complicated collection * structures. */ @BuckStyleValue public abstract class TestRunnerSpec { public abstract Object getData(); @Value.Check protected void check() { // the json should be a map, iterable, a single stringwithmacros, a Number, or a Boolean Object object = getData(); Preconditions.checkState( object instanceof Map || object instanceof Iterable || object instanceof StringWithMacros || object instanceof Number || object instanceof Boolean); } public static TestRunnerSpec of(Object data) { return ImmutableTestRunnerSpec.of(data); } }
facebook/buck
src/com/facebook/buck/core/test/rule/TestRunnerSpec.java
Java
apache-2.0
1,737
/* * JBoss, Home of Professional Open Source * Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. * See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.arquillian.container.spi.event; import org.jboss.arquillian.container.spi.Container; import org.jboss.arquillian.container.spi.client.container.DeployableContainer; import org.jboss.arquillian.container.spi.client.deployment.Deployment; /** * DeploymentEvent * * @author <a href="mailto:aslak@redhat.com">Aslak Knutsen</a> * @version $Revision: $ */ public abstract class DeploymentEvent extends ContainerControlEvent { private Deployment deployment; public DeploymentEvent(Container container, Deployment deployment) { super(container); this.deployment = deployment; } /** * @return */ public DeployableContainer<?> getDeployableContainer() { return getContainer().getDeployableContainer(); } /** * @return */ public Deployment getDeployment() { return deployment; } }
rhusar/arquillian-core
container/spi/src/main/java/org/jboss/arquillian/container/spi/event/DeploymentEvent.java
Java
apache-2.0
1,714
/** * SoundJS * Visit http://createjs.com/ for documentation, updates and examples. * * Copyright (c) 2011 gskinner.com, inc. * * Distributed under the terms of the MIT license. * http://www.opensource.org/licenses/mit-license.html * * This notice shall be included in all copies or substantial portions of the Software. **/ this.createjs=this.createjs||{}; (function(){function b(){throw"SoundJS cannot be instantiated";}function f(a,b){this.init(a,b)}function d(){}b.DELIMITER="|";b.AUDIO_TIMEOUT=8E3;b.INTERRUPT_ANY="any";b.INTERRUPT_EARLY="early";b.INTERRUPT_LATE="late";b.INTERRUPT_NONE="none";b.PLAY_INITED="playInited";b.PLAY_SUCCEEDED="playSucceeded";b.PLAY_INTERRUPTED="playInterrupted";b.PLAY_FINISHED="playFinished";b.PLAY_FAILED="playFailed";b.activePlugin=null;b.muted=false;b.pluginsRegistered=false;b.masterVolume=1;b.instances=[];b.instanceHash= {};b.idHash=null;b.defaultSoundInstance=null;b.getPreloadHandlers=function(){return{callback:b.proxy(b.initLoad,b),types:["sound"],extensions:["mp3","ogg","wav"]}};b.registerPlugins=function(a){b.pluginsRegistered=true;for(var g=0,c=a.length;g<c;g++){var e=a[g];if(e!=null&&e.isSupported())return b.activePlugin=new e,true}return false};b.registerPlugin=function(a){b.pluginsRegistered=true;if(a==null)return false;return a.isSupported()?(b.activePlugin=new a,true):false};b.isReady=function(){return b.activePlugin!= null};b.getCapabilities=function(){return b.activePlugin==null?null:b.activePlugin.capabilities};b.getCapability=function(a){return b.activePlugin==null?null:b.activePlugin.capabilities[a]};b.initLoad=function(a,g,c,e){if(!b.checkPlugin(true))return false;a=b.parsePath(a,g,c,e);if(a==null)return false;if(c!=null){if(b.idHash==null)b.idHash={};b.idHash[c]=a.src}f.create(a.src,e);c=b.activePlugin.register(a.src,e);if(c!=null){if(c.tag!=null)a.tag=c.tag;else if(c.src)a.src=c.src;if(c.completeHandler!= null)a.handler=c.completeHandler}return a};b.parsePath=function(a,g,c,e){for(var a=a.split(b.DELIMITER),g={type:g||"sound",id:c,data:e,handler:b.handleSoundReady},c=false,e=b.getCapabilities(),h=0,d=a.length;h<d;h++){var f=a[h],i=f.lastIndexOf("."),k=f.substr(i+1).toLowerCase(),i=f.substr(0,i).split("/").pop();switch(k){case "mp3":e.mp3&&(c=true);break;case "ogg":e.ogg&&(c=true);break;case "wav":e.wav&&(c=true)}if(c)return g.name=i,g.src=f,g.extension=k,g}return null};b.play=function(a,g,c,e,h,f, d){if(!b.checkPlugin(true))return b.defaultSoundInstance;a=b.getSrcFromId(a);a=b.activePlugin.create(a);try{a.mute(b.muted)}catch(i){}b.playInstance(a,g,c,e,h,f,d)||a.playFailed();return a};b.playInstance=function(a,g,c,e,h,f,d){g=g||b.INTERRUPT_NONE;c==null&&(c=0);e==null&&(e=0);h==null&&(h=0);f==null&&(f=1);d==null&&(d=0);if(c==0){if(!b.beginPlaying(a,g,e,h,f,d))return false}else setTimeout(function(){b.beginPlaying(a,g,e,h,f,d)},c);this.instances.push(a);this.instanceHash[a.uniqueId]=a;return true}; b.beginPlaying=function(a,b,c,e,d,j){if(!f.add(a,b))return false;return!a.beginPlaying(c,e,d,j)?(b=this.instances.indexOf(a),b>-1&&this.instances.splice(b,1),delete this.instanceHash[a.uniqueId],false):true};b.checkPlugin=function(a){return b.activePlugin==null&&(a&&!b.pluginsRegistered&&b.registerPlugin(createjs.HTMLAudioPlugin),b.activePlugin==null)?false:true};b.getSrcFromId=function(a){return b.idHash==null||b.idHash[a]==null?a:b.idHash[a]};b.setVolume=function(a,g){if(Number(a)==null)return false; a=Math.max(0,Math.min(1,a));return b.tellAllInstances("setVolume",g,a)};b.getMasterVolume=function(){return b.masterVolume};b.setMasterVolume=function(a){b.masterVolume=a;return b.tellAllInstances("setMasterVolume",null,a)};b.setMute=function(a){this.muted=a;return b.tellAllInstances("mute",null,a)};b.pause=function(a){return b.tellAllInstances("pause",a)};b.resume=function(a){return b.tellAllInstances("resume",a)};b.stop=function(a){return b.tellAllInstances("stop",a)};b.getInstanceById=function(a){return this.instanceHash[a]}; b.playFinished=function(a){f.remove(a);a=this.instances.indexOf(a);a>-1&&this.instances.splice(a,1)};b.tellAllInstances=function(a,b,c){if(this.activePlugin==null)return false;for(var b=this.getSrcFromId(b),e=this.instances.length-1;e>=0;e--){var d=this.instances[e];if(!(b!=null&&d.src!=b))switch(a){case "pause":d.pause();break;case "resume":d.resume();break;case "setVolume":d.setVolume(c);break;case "setMasterVolume":d.setMasterVolume(c);break;case "mute":d.mute(c);break;case "stop":d.stop();break; case "setPan":d.setPan(c)}}return true};b.proxy=function(a,b){return function(){return a.apply(b,arguments)}};createjs.SoundJS=b;f.channels={};f.create=function(a,b){var c=f.get(a);c==null?f.channels[a]=new f(a,b):c.max+=b};f.add=function(a,b){var c=f.get(a.src);return c==null?false:c.add(a,b)};f.remove=function(a){var b=f.get(a.src);if(b==null)return false;b.remove(a);return true};f.get=function(a){return f.channels[a]};f.prototype={src:null,max:null,length:0,init:function(a,b){this.src=a;this.max= b||1;this.instances=[]},get:function(a){return this.instances[a]},add:function(a,b){if(!this.getSlot(b,a))return false;this.instances.push(a);this.length++;return true},remove:function(a){a=this.instances.indexOf(a);if(a==-1)return false;this.instances.splice(a,1);this.length--;return true},getSlot:function(a){for(var g,c,e=0,d=this.max||100;e<d;e++){g=this.get(e);if(g==null)return true;else if(a==b.INTERRUPT_NONE)continue;if(e==0)c=g;else if(g.playState==b.PLAY_FINISHED||g==b.PLAY_INTERRUPTED||g== b.PLAY_FAILED)c=g;else if(a==b.INTERRUPT_EARLY&&g.getPosition()<c.getPosition()||a==b.INTERRUPT_LATE&&g.getPosition()>c.getPosition())c=g}return c!=null?(c.interrupt(),this.remove(c),true):false},toString:function(){return"[SoundJS SoundChannel]"}};b.defaultSoundInstance=new function(){this.isDefault=true;this.pause=this.resume=this.play=this.beginPlaying=this.cleanUp=this.interrupt=this.stop=this.setMasterVolume=this.setVolume=this.mute=this.setPan=this.getPosition=this.setPosition=this.playFailed= function(){return false};this.getVolume=this.getPan=this.getDuration=function(){return 0};this.playState=b.PLAY_FAILED;this.toString=function(){return"[SoundJS Default Sound Instance]"}};d.init=function(){var a=navigator.userAgent;d.isFirefox=a.indexOf("Firefox")>-1;d.isOpera=window.opera!=null;d.isIOS=a.indexOf("iPod")>-1||a.indexOf("iPhone")>-1||a.indexOf("iPad")>-1};d.init();createjs.SoundJS.BrowserDetect=d})();this.createjs=this.createjs||{}; (function(){function b(){this.init()}function f(a){this.init(a)}function d(a){this.init(a)}b.MAX_INSTANCES=30;b.capabilities=null;b.lastId=0;b.AUDIO_READY="canplaythrough";b.AUDIO_ENDED="ended";b.AUDIO_ERROR="error";b.AUDIO_STALLED="stalled";b.fillChannels=false;b.isSupported=function(){if(createjs.SoundJS.BrowserDetect.isIOS)return false;b.generateCapabilities();return b.tag==null?false:true};b.generateCapabilities=function(){if(b.capabilities==null){var a=b.tag=document.createElement("audio");if(a.canPlayType== null)return null;b.capabilities={panning:false,volume:true,mp3:a.canPlayType("audio/mp3")!="no"&&a.canPlayType("audio/mp3")!="",ogg:a.canPlayType("audio/ogg")!="no"&&a.canPlayType("audio/ogg")!="",mpeg:a.canPlayType("audio/mpeg")!="no"&&a.canPlayType("audio/mpeg")!="",wav:a.canPlayType("audio/wav")!="no"&&a.canPlayType("audio/wav")!="",channels:b.MAX_INSTANCES}}};b.prototype={capabilities:null,FT:0.0010,channels:null,init:function(){this.capabilities=b.capabilities;this.channels={}},register:function(a, b){for(var c=d.get(a),e,f=0,j=b||1;f<j;f++)e=this.createTag(a),c.add(e);return{tag:e}},createTag:function(a){var b=document.createElement("audio");b.preload=false;b.src=a;return b},create:function(a){a=new f(a);a.owner=this;return a},toString:function(){return"[HTMLAudioPlugin]"}};createjs.HTMLAudioPlugin=b;f.prototype={src:null,uniqueId:-1,playState:null,owner:null,loaded:false,lastInterrupt:createjs.SoundJS.INTERRUPT_NONE,offset:0,delay:0,volume:1,pan:0,remainingLoops:0,delayTimeout:-1,tag:null, muted:false,paused:false,onComplete:null,onLoop:null,onReady:null,onPlayFailed:null,onPlayInterrupted:null,endedHandler:null,readyHandler:null,stalledHandler:null,init:function(a){this.uniqueId=createjs.HTMLAudioPlugin.lastId++;this.src=a;this.endedHandler=createjs.SoundJS.proxy(this.handleSoundComplete,this);this.readyHandler=createjs.SoundJS.proxy(this.handleSoundReady,this);this.stalledHandler=createjs.SoundJS.proxy(this.handleSoundStalled,this)},cleanUp:function(){var a=this.tag;if(a!=null){a.pause(); try{a.currentTime=0}catch(b){}a.removeEventListener(createjs.HTMLAudioPlugin.AUDIO_ENDED,this.endedHandler,false);a.removeEventListener(createjs.HTMLAudioPlugin.AUDIO_READY,this.readyHandler,false);d.setInstance(this.src,a);this.tag=null}window.createjs!=null&&createjs.SoundJS.playFinished(this)},interrupt:function(){if(this.tag!=null){this.playState=createjs.SoundJS.PLAY_INTERRUPTED;if(this.onPlayInterrupted)this.onPlayInterrupted(this);this.cleanUp();this.paused=false}},play:function(a,b,c,e,d, f){this.cleanUp();createjs.SoundJS.playInstance(this,a,b,c,e,d,f)},beginPlaying:function(a,b,c){if(window.createjs!=null){var e=this.tag=d.getInstance(this.src);if(e==null)return this.playFailed(),-1;e.addEventListener(createjs.HTMLAudioPlugin.AUDIO_ENDED,this.endedHandler,false);this.offset=a;this.volume=c;this.updateVolume();this.remainingLoops=b;e.readyState!==4?(e.addEventListener(createjs.HTMLAudioPlugin.AUDIO_READY,this.readyHandler,false),e.addEventListener(createjs.HTMLAudioPlugin.AUDIO_STALLED, this.stalledHandler,false),e.load()):this.handleSoundReady(null);return 1}},handleSoundStalled:function(){if(this.onPlayFailed!=null)this.onPlayFailed(this);this.cleanUp()},handleSoundReady:function(){if(window.createjs!=null)if(this.playState=createjs.SoundJS.PLAY_SUCCEEDED,this.paused=false,this.tag.removeEventListener(createjs.HTMLAudioPlugin.AUDIO_READY,this.readyHandler,false),this.offset>=this.getDuration())this.playFailed();else{if(this.offset>0)this.tag.currentTime=this.offset*0.0010;if(this.remainingLoops== -1)this.tag.loop=true;this.tag.play()}},pause:function(){this.paused=true;return this.tag!=null?(this.tag.pause(),false):true},resume:function(){this.paused=false;return this.tag!=null?(this.tag.play(),false):true},stop:function(){this.pause();this.playState=createjs.SoundJS.PLAY_FINISHED;this.cleanUp();return true},setMasterVolume:function(){this.updateVolume();return true},setVolume:function(a){this.volume=a;this.updateVolume();return true},updateVolume:function(){return this.tag!=null?(this.tag.volume= this.muted?0:this.volume*createjs.SoundJS.masterVolume,true):false},getVolume:function(){return this.volume},mute:function(a){this.muted=a;this.updateVolume();return true},setPan:function(){return false},getPan:function(){return 0},getPosition:function(){return this.tag==null?0:this.tag.currentTime*1E3},setPosition:function(a){if(this.tag==null)return false;try{this.tag.currentTime=a*0.0010}catch(b){return false}return true},getDuration:function(){return this.tag==null?0:this.tag.duration*1E3},handleSoundComplete:function(){if(this.remainingLoops!= 0){if(this.remainingLoops--,this.tag.play(),this.onLoop!=null)this.onLoop(this)}else if(window.createjs!=null){this.playState=createjs.SoundJS.PLAY_FINISHED;if(this.onComplete!=null)this.onComplete(this);this.cleanUp()}},playFailed:function(){if(window.createjs!=null){this.playState=createjs.SoundJS.PLAY_FAILED;if(this.onPlayFailed!=null)this.onPlayFailed(this);this.cleanUp()}},toString:function(){return"[HTMLAudioPlugin SoundInstance]"}};d.channels={};d.get=function(a){var b=d.channels[a];b==null&& (b=d.channels[a]=new d(a));return b};d.getInstance=function(a){a=d.channels[a];return a==null?null:a.get()};d.setInstance=function(a,b){var c=d.channels[a];return c==null?null:c.set(b)};d.prototype={src:null,length:0,available:0,tags:null,init:function(a){this.src=a;this.tags=[]},add:function(a){this.tags.push(a);this.length++;this.available=this.tags.length},get:function(){if(this.tags.length==0)return null;this.available=this.tags.length;var a=this.tags.pop();document.body.appendChild(a);return a}, set:function(a){this.tags.indexOf(a)==-1&&this.tags.push(a);document.body.removeChild(a);this.available=this.tags.length},toString:function(){return"[HTMLAudioPlugin TagChannel]"}}})();
Koava/Shtacker
jQueryShtacker/jQueryShtacker/TypeScriptShtacker/public/javascripts/soundjs-NEXT.min.js
JavaScript
apache-2.0
12,246
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>Параметры экрана AJAX Spider </title> </head> <body> <h1>Параметры экрана AJAX Spider </h1> Этот экран позволяет настроить <a href="concepts.html">AJAX-паук</a> опции. AJAX Spider — это надстройка для краулер под названием Crawljax. Надстройка устанавливает локальный прокси в ZAP для связи с Crawljax. AJAX Spider позволяет сканировать веб-приложения, написанные на AJAX, гораздо глубже, чем родной Паук. Используйте AJAX Spider, если у вас есть веб-приложения, написанные на AJAX. Вы также должны использовать родной Spider, а также для полного покрытия веб-приложения (например, для покрытия комментариев HTML). <br/> <br/> <h2>Параметры конфигурации</h2> <br/> <table border ="2"> <tr> <th>Поле</th> <th>Детали</th> <th>По умолчанию</th> </tr> <tr> <td>браузер</td> <td>AJAX Spider использует внешний браузер для сканирования целевого сайта. Ты сможешь укажать, какой из них вы хотите использовать. Дополнительные сведения о поддерживаемых браузерах см. Страницы справки надстройки "Selenium". </td> <td align="center">Firefox Headless</td> </tr> <tr> <td>Количество открытых окон браузера </td> <td>Вы можете настроить количество окон, которые будет использовать AJAX Spider. Чем больше окон, тем быстрее будет процесс. </td> <td align = "center">1</td> </tr> <tr> <td>Максимальная глубина обхода </td> <td>Максимальная глубина, которую может достичь краулер. Ноль означает неограниченную глубину. </td> <td align = "center">10</td> </tr> <tr> <td>Максимальное количество состояний сканирования </td> <td>Максимальное количество состояний, которое должен просканировать сканер. Ноль означает неограниченное количество состояний сканирования. </td> <td align = "center"> 0 (неограниченно) </td> </tr> <tr> <td>Максимальная продолжительность </td> <td>Максимальное время работы сканера. Ноль означает неограниченное время работы. </td> <td align = "center">60 минут</td> </tr> <tr> <td>Время ожидания события </td> <td>Время ожидания после запуска события на стороне клиента. </td> <td align = "center">1000 мс </td> </tr> <tr> <td>Время ожидания перезагрузки </td> <td>Время ожидания после загрузки URL. </td> <td align = "center">1000 мс </td> </tr> <tr> <td>Нажмите Элементы один раз </td> <td>Если этот параметр включен, сканер пытается взаимодействовать с каждым элементом (например, щелкнуть мышью) только один раз. Если это не установлено, сканер попытается щелкнуть несколько раз. Отключение этого параметра является более строгим, но может занять значительно больше времени. </td> <td align = "center">Истинный </td> </tr> <tr> <td>Используйте случайные значения в полях формы </td> <td>Когда включено, вставляет случайные значения в поля формы. В противном случае используются пустые значения. </td> <td align = "center">Истинный </td> </tr> <tr> <td>Нажмите «Только элементы по умолчанию». </td> <td>Если этот параметр включен, во время сканирования будут нажиматься только элементы «a», «button» и «input». В противном случае он использует приведенную ниже таблицу, чтобы определить, какие элементы будут нажаты. Для более глубокого анализа отключите это и настройте интерактивные элементы в таблице. </td> <td align = "center">Истинный </td> </tr> <tr> <td>Выберите элементы для нажатия во время сканирования (таблица) </td> <td>Список элементов для обхода. Эта таблица применяется только в том случае, если не включен параметр «щелкать только элементы по умолчанию». Используйте «включить все» для более глубокого анализа, хотя это может занять несколько больше времени. </td> <td align = "center">Все включено </td> </tr> <tr> <td>Разрешенные ресурсы (таблица) </td> <td>Список разрешенных ресурсов. Разрешенные ресурсы всегда извлекаются, даже если они выходят за рамки, что позволяет включать необходимые ресурсы (например, сценарии) от третьих сторон. </td> <td align = "center"></td> </tr> </table> <h2>Смотрите также </h2> <table> <tr> <td>&nbsp;&nbsp;&nbsp;&nbsp;</td> <td><a href="concepts.html">AJAX-паук</a></td> <td>для обзора вкладки AJAX Spider </td> </tr> <tr> <td>&nbsp;&nbsp;&nbsp;&nbsp;</td> <td><a href="tab.html">Вкладка «Паук AJAX» </a></td> <td>для обзора вкладки AJAX Spider </td> </tr> <tr> <td>&nbsp;&nbsp;&nbsp;&nbsp;</td> <td><a href="scandialog.html">Диалоговое окно AJAX Паук</a></td> <td>для обзора диалогового окна AJAX Spider </td> </tr> </table> </body> </html>
kingthorin/zap-extensions
addOns/spiderAjax/src/main/javahelp/org/zaproxy/zap/extension/spiderAjax/resources/help_ru_RU/contents/options.html
HTML
apache-2.0
7,170
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.impl.protocol.task.multimap; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.MultiMapValueCountCodec; import com.hazelcast.client.impl.protocol.task.AbstractPartitionMessageTask; import com.hazelcast.instance.impl.Node; import com.hazelcast.internal.nio.Connection; import com.hazelcast.multimap.impl.MultiMapService; import com.hazelcast.multimap.impl.operations.CountOperation; import com.hazelcast.security.permission.ActionConstants; import com.hazelcast.security.permission.MultiMapPermission; import com.hazelcast.spi.impl.operationservice.Operation; import java.security.Permission; /** * Client Protocol Task for handling messages with type ID: * {@link com.hazelcast.client.impl.protocol.codec.MultiMapMessageType#MULTIMAP_VALUECOUNT} */ public class MultiMapValueCountMessageTask extends AbstractPartitionMessageTask<MultiMapValueCountCodec.RequestParameters> { public MultiMapValueCountMessageTask(ClientMessage clientMessage, Node node, Connection connection) { super(clientMessage, node, connection); } @Override protected Operation prepareOperation() { CountOperation operation = new CountOperation(parameters.name, parameters.key); operation.setThreadId(parameters.threadId); return operation; } @Override protected MultiMapValueCountCodec.RequestParameters decodeClientMessage(ClientMessage clientMessage) { return MultiMapValueCountCodec.decodeRequest(clientMessage); } @Override protected ClientMessage encodeResponse(Object response) { return MultiMapValueCountCodec.encodeResponse((Integer) response); } @Override public String getServiceName() { return MultiMapService.SERVICE_NAME; } @Override public Permission getRequiredPermission() { return new MultiMapPermission(parameters.name, ActionConstants.ACTION_READ); } @Override public String getDistributedObjectName() { return parameters.name; } @Override public String getMethodName() { return "valueCount"; } @Override public Object[] getParameters() { return new Object[]{parameters.key}; } }
jerrinot/hazelcast
hazelcast/src/main/java/com/hazelcast/client/impl/protocol/task/multimap/MultiMapValueCountMessageTask.java
Java
apache-2.0
2,896
# SkyWalking Cross Process Propagation Headers Protocol * Version 3.0 SkyWalking is more akin to an APM system, rather than a common distributed tracing system. SkyWalking's headers are much more complex than those found in a common distributed tracing system. The reason behind their complexity is for better analysis performance of the OAP. You can find many similar mechanisms in other commercial APM systems (some of which are even more complex than ours!). ## Abstract The SkyWalking Cross Process Propagation Headers Protocol v3, also known as the sw8 protocol, is designed for context propagation. ### Standard Header Item The standard header is the minimal requirement for context propagation. * Header Name: `sw8`. * Header Value: 8 fields split by `-`. The length of header value must be less than 2k (default). Example of the value format: `XXXXX-XXXXX-XXXX-XXXX` #### Values Values must include the following segments, and all string type values are in BASE64 encoding. - Required: 1. Sample. 0 or 1. 0 means that the context exists, but it could (and most likely will) be ignored. 1 means this trace needs to be sampled and sent to the backend. 1. Trace ID. **String(BASE64 encoded)**. A literal string that is globally unique. 1. Parent trace segment ID. **String(BASE64 encoded)**. A literal string that is globally unique. 1. Parent span ID. Must be an integer. It begins with 0. This span ID points to the parent span in parent trace segment. 1. Parent service. **String(BASE64 encoded)**. Its length should be no more than 50 UTF-8 characters. 1. Parent service instance. **String(BASE64 encoded)**. Its length should be no more than 50 UTF-8 characters. 1. Parent endpoint. **String(BASE64 encoded)**. The operation name of the first entry span in the parent segment. Its length should be less than 150 UTF-8 characters. 1. Target address of this request used on the client end. **String(BASE64 encoded)**. The network address (not necessarily IP + port) used on the client end to access this target service. - Sample values: `1-TRACEID-SEGMENTID-3-PARENT_SERVICE-PARENT_INSTANCE-PARENT_ENDPOINT-IPPORT` ### Extension Header Item The extension header item is designed for advanced features. It provides interaction capabilities between the agents deployed in upstream and downstream services. * Header Name: `sw8-x` * Header Value: Split by `-`. The fields are extendable. #### Values The current value includes fields. 1. Tracing Mode. Empty, 0, or 1. Empty or 0 is the default. 1 indicates that all spans generated in this context will skip analysis, `spanObject#skipAnalysis=true`. This context is propagated to upstream by default, unless it is changed in the tracing process. 2. The timestamp of sending on the client end. This is used in async RPC, such as MQ. Once it is set, the consumer end would calculate the latency between sending and receiving, and tag the latency in the span by using key `transmission.latency` automatically.
ascrutae/sky-walking
docs/en/protocols/Skywalking-Cross-Process-Propagation-Headers-Protocol-v3.md
Markdown
apache-2.0
2,978
# Copyright (c) 2015 FUJITSU LIMITED # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_utils import importutils from cinder import test import cinder.volume.drivers.fujitsu.eternus_dx_common as eternus_dx_common CONF = cfg.CONF FUJITSU_FC_MODULE = ('cinder.volume.drivers.fujitsu.' 'eternus_dx_fc.FJDXFCDriver') FUJITSU_ISCSI_MODULE = ('cinder.volume.drivers.fujitsu.' 'eternus_dx_iscsi.FJDXISCSIDriver') class FJDriverCompatibility(test.TestCase): def setUp(self): super(FJDriverCompatibility, self).setUp() self.manager = importutils.import_object(CONF.volume_manager) # Stub definition self.stubs.Set( eternus_dx_common.FJDXCommon, '__init__', self.fake_init) def _load_driver(self, driver): self.manager.__init__(volume_driver=driver) def _driver_module_name(self): return "%s.%s" % (self.manager.driver.__class__.__module__, self.manager.driver.__class__.__name__) def fake_init(self, prtcl, configuration=None): msg = "selected protocol is %s" % prtcl self.assertTrue((prtcl == 'FC') or (prtcl == 'iSCSI'), msg=msg) def test_fujitsu_driver_fc_old(self): self._load_driver( 'cinder.volume.drivers.fujitsu_eternus_dx_fc.FJDXFCDriver') self.assertEqual(FUJITSU_FC_MODULE, self._driver_module_name()) def test_fujitsu_driver_fc_new(self): self._load_driver(FUJITSU_FC_MODULE) self.assertEqual(FUJITSU_FC_MODULE, self._driver_module_name()) def test_fujitsu_driver_iscsi_old(self): self._load_driver( 'cinder.volume.drivers.fujitsu_eternus_dx_iscsi.FJDXISCSIDriver') self.assertEqual(FUJITSU_ISCSI_MODULE, self._driver_module_name()) def test_fujitsu_driver_iscsi_new(self): self._load_driver(FUJITSU_ISCSI_MODULE) self.assertEqual(FUJITSU_ISCSI_MODULE, self._driver_module_name())
Akrog/cinder
cinder/tests/test_fujitsu_compatibility.py
Python
apache-2.0
2,530
/* * Copyright (c) 1997, 2006, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing; import java.awt.*; import java.beans.PropertyVetoException; import java.beans.PropertyChangeEvent; import javax.swing.border.Border; import java.awt.event.ComponentListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; /** This is an implementation of the <code>DesktopManager</code>. * It currently implements the basic behaviors for managing * <code>JInternalFrame</code>s in an arbitrary parent. * <code>JInternalFrame</code>s that are not children of a * <code>JDesktop</code> will use this component * to handle their desktop-like actions. * <p>This class provides a policy for the various JInternalFrame methods, * it is not meant to be called directly rather the various JInternalFrame * methods will call into the DesktopManager.</p> * @see JDesktopPane * @see JInternalFrame * @author David Kloba * @author Steve Wilson */ public class DefaultDesktopManager implements DesktopManager, java.io.Serializable { final static String HAS_BEEN_ICONIFIED_PROPERTY = "wasIconOnce"; final static int DEFAULT_DRAG_MODE = 0; final static int OUTLINE_DRAG_MODE = 1; final static int FASTER_DRAG_MODE = 2; int dragMode = DEFAULT_DRAG_MODE; private transient Rectangle currentBounds = null; private transient Graphics desktopGraphics = null; private transient Rectangle desktopBounds = null; private transient Rectangle[] floatingItems = {}; /** * Set to true when the user actually drags a frame vs clicks on it * to start the drag operation. This is only used when dragging with * FASTER_DRAG_MODE. */ private transient boolean didDrag; /** Normally this method will not be called. If it is, it * try to determine the appropriate parent from the desktopIcon of the frame. * Will remove the desktopIcon from its parent if it successfully adds the frame. */ public void openFrame(JInternalFrame f) { if(f.getDesktopIcon().getParent() != null) { f.getDesktopIcon().getParent().add(f); removeIconFor(f); } } /** * Removes the frame, and, if necessary, the * <code>desktopIcon</code>, from its parent. * @param f the <code>JInternalFrame</code> to be removed */ public void closeFrame(JInternalFrame f) { JDesktopPane d = f.getDesktopPane(); if (d == null) { return; } boolean findNext = f.isSelected(); Container c = f.getParent(); JInternalFrame nextFrame = null; if (findNext) { nextFrame = d.getNextFrame(f); try { f.setSelected(false); } catch (PropertyVetoException e2) { } } if(c != null) { c.remove(f); // Removes the focus. c.repaint(f.getX(), f.getY(), f.getWidth(), f.getHeight()); } removeIconFor(f); if(f.getNormalBounds() != null) f.setNormalBounds(null); if(wasIcon(f)) setWasIcon(f, null); if (nextFrame != null) { try { nextFrame.setSelected(true); } catch (PropertyVetoException e2) { } } else if (findNext && d.getComponentCount() == 0) { // It was selected and was the last component on the desktop. d.requestFocus(); } } /** * Resizes the frame to fill its parents bounds. * @param f the frame to be resized */ public void maximizeFrame(JInternalFrame f) { if (f.isIcon()) { try { // In turn calls deiconifyFrame in the desktop manager. // That method will handle the maximization of the frame. f.setIcon(false); } catch (PropertyVetoException e2) { } } else { f.setNormalBounds(f.getBounds()); Rectangle desktopBounds = f.getParent().getBounds(); setBoundsForFrame(f, 0, 0, desktopBounds.width, desktopBounds.height); } // Set the maximized frame as selected. try { f.setSelected(true); } catch (PropertyVetoException e2) { } } /** * Restores the frame back to its size and position prior * to a <code>maximizeFrame</code> call. * @param f the <code>JInternalFrame</code> to be restored */ public void minimizeFrame(JInternalFrame f) { // If the frame was an icon restore it back to an icon. if (f.isIcon()) { iconifyFrame(f); return; } if ((f.getNormalBounds()) != null) { Rectangle r = f.getNormalBounds(); f.setNormalBounds(null); try { f.setSelected(true); } catch (PropertyVetoException e2) { } setBoundsForFrame(f, r.x, r.y, r.width, r.height); } } /** * Removes the frame from its parent and adds its * <code>desktopIcon</code> to the parent. * @param f the <code>JInternalFrame</code> to be iconified */ public void iconifyFrame(JInternalFrame f) { JInternalFrame.JDesktopIcon desktopIcon; Container c = f.getParent(); JDesktopPane d = f.getDesktopPane(); boolean findNext = f.isSelected(); desktopIcon = f.getDesktopIcon(); if(!wasIcon(f)) { Rectangle r = getBoundsForIconOf(f); desktopIcon.setBounds(r.x, r.y, r.width, r.height); setWasIcon(f, Boolean.TRUE); } if (c == null || d == null) { return; } if (c instanceof JLayeredPane) { JLayeredPane lp = (JLayeredPane)c; int layer = lp.getLayer(f); lp.putLayer(desktopIcon, layer); } // If we are maximized we already have the normal bounds recorded // don't try to re-record them, otherwise we incorrectly set the // normal bounds to maximized state. if (!f.isMaximum()) { f.setNormalBounds(f.getBounds()); } d.setComponentOrderCheckingEnabled(false); c.remove(f); c.add(desktopIcon); d.setComponentOrderCheckingEnabled(true); c.repaint(f.getX(), f.getY(), f.getWidth(), f.getHeight()); if (findNext) { if (d.selectFrame(true) == null) { // The icon is the last frame. f.restoreSubcomponentFocus(); } } } /** * Removes the desktopIcon from its parent and adds its frame * to the parent. * @param f the <code>JInternalFrame</code> to be de-iconified */ public void deiconifyFrame(JInternalFrame f) { JInternalFrame.JDesktopIcon desktopIcon = f.getDesktopIcon(); Container c = desktopIcon.getParent(); JDesktopPane d = f.getDesktopPane(); if (c != null && d != null) { c.add(f); // If the frame is to be restored to a maximized state make // sure it still fills the whole desktop. if (f.isMaximum()) { Rectangle desktopBounds = c.getBounds(); if (f.getWidth() != desktopBounds.width || f.getHeight() != desktopBounds.height) { setBoundsForFrame(f, 0, 0, desktopBounds.width, desktopBounds.height); } } removeIconFor(f); if (f.isSelected()) { f.moveToFront(); f.restoreSubcomponentFocus(); } else { try { f.setSelected(true); } catch (PropertyVetoException e2) {} } } } /** This will activate <b>f</b> moving it to the front. It will * set the current active frame's (if any) * <code>IS_SELECTED_PROPERTY</code> to <code>false</code>. * There can be only one active frame across all Layers. * @param f the <code>JInternalFrame</code> to be activated */ public void activateFrame(JInternalFrame f) { Container p = f.getParent(); Component[] c; JDesktopPane d = f.getDesktopPane(); JInternalFrame currentlyActiveFrame = (d == null) ? null : d.getSelectedFrame(); // fix for bug: 4162443 if(p == null) { // If the frame is not in parent, its icon maybe, check it p = f.getDesktopIcon().getParent(); if(p == null) return; } // we only need to keep track of the currentActive InternalFrame, if any if (currentlyActiveFrame == null){ if (d != null) { d.setSelectedFrame(f);} } else if (currentlyActiveFrame != f) { // if not the same frame as the current active // we deactivate the current if (currentlyActiveFrame.isSelected()) { try { currentlyActiveFrame.setSelected(false); } catch(PropertyVetoException e2) {} } if (d != null) { d.setSelectedFrame(f);} } f.moveToFront(); } // implements javax.swing.DesktopManager public void deactivateFrame(JInternalFrame f) { JDesktopPane d = f.getDesktopPane(); JInternalFrame currentlyActiveFrame = (d == null) ? null : d.getSelectedFrame(); if (currentlyActiveFrame == f) d.setSelectedFrame(null); } // implements javax.swing.DesktopManager public void beginDraggingFrame(JComponent f) { setupDragMode(f); if (dragMode == FASTER_DRAG_MODE) { Component desktop = f.getParent(); floatingItems = findFloatingItems(f); currentBounds = f.getBounds(); if (desktop instanceof JComponent) { desktopBounds = ((JComponent)desktop).getVisibleRect(); } else { desktopBounds = desktop.getBounds(); desktopBounds.x = desktopBounds.y = 0; } desktopGraphics = JComponent.safelyGetGraphics(desktop); ((JInternalFrame)f).isDragging = true; didDrag = false; } } private void setupDragMode(JComponent f) { JDesktopPane p = getDesktopPane(f); Container parent = f.getParent(); dragMode = DEFAULT_DRAG_MODE; if (p != null) { String mode = (String)p.getClientProperty("JDesktopPane.dragMode"); if (mode != null && mode.equals("outline")) { dragMode = OUTLINE_DRAG_MODE; } else if (mode != null && mode.equals("faster") && f instanceof JInternalFrame && ((JInternalFrame)f).isOpaque() && (parent == null || parent.isOpaque())) { dragMode = FASTER_DRAG_MODE; } else { if (p.getDragMode() == JDesktopPane.OUTLINE_DRAG_MODE ) { dragMode = OUTLINE_DRAG_MODE; } else if ( p.getDragMode() == JDesktopPane.LIVE_DRAG_MODE && f instanceof JInternalFrame && ((JInternalFrame)f).isOpaque()) { dragMode = FASTER_DRAG_MODE; } else { dragMode = DEFAULT_DRAG_MODE; } } } } private transient Point currentLoc = null; /** * Moves the visible location of the frame being dragged * to the location specified. The means by which this occurs can vary depending * on the dragging algorithm being used. The actual logical location of the frame * might not change until <code>endDraggingFrame</code> is called. */ public void dragFrame(JComponent f, int newX, int newY) { if (dragMode == OUTLINE_DRAG_MODE) { JDesktopPane desktopPane = getDesktopPane(f); if (desktopPane != null){ Graphics g = JComponent.safelyGetGraphics(desktopPane); g.setXORMode(Color.white); if (currentLoc != null) { g.drawRect(currentLoc.x, currentLoc.y, f.getWidth()-1, f.getHeight()-1); } g.drawRect( newX, newY, f.getWidth()-1, f.getHeight()-1); currentLoc = new Point (newX, newY); g.dispose(); } } else if (dragMode == FASTER_DRAG_MODE) { dragFrameFaster(f, newX, newY); } else { setBoundsForFrame(f, newX, newY, f.getWidth(), f.getHeight()); } } // implements javax.swing.DesktopManager public void endDraggingFrame(JComponent f) { if ( dragMode == OUTLINE_DRAG_MODE && currentLoc != null) { setBoundsForFrame(f, currentLoc.x, currentLoc.y, f.getWidth(), f.getHeight() ); currentLoc = null; } else if (dragMode == FASTER_DRAG_MODE) { currentBounds = null; if (desktopGraphics != null) { desktopGraphics.dispose(); desktopGraphics = null; } desktopBounds = null; ((JInternalFrame)f).isDragging = false; } } // implements javax.swing.DesktopManager public void beginResizingFrame(JComponent f, int direction) { setupDragMode(f); } /** * Calls <code>setBoundsForFrame</code> with the new values. * @param f the component to be resized * @param newX the new x-coordinate * @param newY the new y-coordinate * @param newWidth the new width * @param newHeight the new height */ public void resizeFrame(JComponent f, int newX, int newY, int newWidth, int newHeight) { if ( dragMode == DEFAULT_DRAG_MODE || dragMode == FASTER_DRAG_MODE ) { setBoundsForFrame(f, newX, newY, newWidth, newHeight); } else { JDesktopPane desktopPane = getDesktopPane(f); if (desktopPane != null){ Graphics g = JComponent.safelyGetGraphics(desktopPane); g.setXORMode(Color.white); if (currentBounds != null) { g.drawRect( currentBounds.x, currentBounds.y, currentBounds.width-1, currentBounds.height-1); } g.drawRect( newX, newY, newWidth-1, newHeight-1); currentBounds = new Rectangle (newX, newY, newWidth, newHeight); g.setPaintMode(); g.dispose(); } } } // implements javax.swing.DesktopManager public void endResizingFrame(JComponent f) { if ( dragMode == OUTLINE_DRAG_MODE && currentBounds != null) { setBoundsForFrame(f, currentBounds.x, currentBounds.y, currentBounds.width, currentBounds.height ); currentBounds = null; } } /** This moves the <code>JComponent</code> and repaints the damaged areas. */ public void setBoundsForFrame(JComponent f, int newX, int newY, int newWidth, int newHeight) { boolean didResize = (f.getWidth() != newWidth || f.getHeight() != newHeight); f.setBounds(newX, newY, newWidth, newHeight); if(didResize) { f.validate(); } } /** Convenience method to remove the desktopIcon of <b>f</b> is necessary. */ protected void removeIconFor(JInternalFrame f) { JInternalFrame.JDesktopIcon di = f.getDesktopIcon(); Container c = di.getParent(); if(c != null) { c.remove(di); c.repaint(di.getX(), di.getY(), di.getWidth(), di.getHeight()); } } /** The iconifyFrame() code calls this to determine the proper bounds * for the desktopIcon. */ protected Rectangle getBoundsForIconOf(JInternalFrame f) { // // Get the icon for this internal frame and its preferred size // JInternalFrame.JDesktopIcon icon = f.getDesktopIcon(); Dimension prefSize = icon.getPreferredSize(); // // Get the parent bounds and child components. // Container c = f.getParent(); if (c == null) { c = f.getDesktopIcon().getParent(); } if (c == null) { /* the frame has not yet been added to the parent; how about (0,0) ?*/ return new Rectangle(0, 0, prefSize.width, prefSize.height); } Rectangle parentBounds = c.getBounds(); Component [] components = c.getComponents(); // // Iterate through valid default icon locations and return the // first one that does not intersect any other icons. // Rectangle availableRectangle = null; JInternalFrame.JDesktopIcon currentIcon = null; int x = 0; int y = parentBounds.height - prefSize.height; int w = prefSize.width; int h = prefSize.height; boolean found = false; while (!found) { availableRectangle = new Rectangle(x,y,w,h); found = true; for ( int i=0; i<components.length; i++ ) { // // Get the icon for this component // if ( components[i] instanceof JInternalFrame ) { currentIcon = ((JInternalFrame)components[i]).getDesktopIcon(); } else if ( components[i] instanceof JInternalFrame.JDesktopIcon ){ currentIcon = (JInternalFrame.JDesktopIcon)components[i]; } else /* found a child that's neither an internal frame nor an icon. I don't believe this should happen, but at present it does and causes a null pointer exception. Even when that gets fixed, this code protects against the npe. hania */ continue; // // If this icon intersects the current location, get next location. // if ( !currentIcon.equals(icon) ) { if ( availableRectangle.intersects(currentIcon.getBounds()) ) { found = false; break; } } } if (currentIcon == null) /* didn't find any useful children above. This probably shouldn't happen, but this check protects against an npe if it ever does (and it's happening now) */ return availableRectangle; x += currentIcon.getBounds().width; if ( x + w > parentBounds.width ) { x = 0; y -= h; } } return(availableRectangle); } /** * Stores the bounds of the component just before a maximize call. * @param f the component about to be resized * @param r the normal bounds to be saved away */ protected void setPreviousBounds(JInternalFrame f, Rectangle r) { f.setNormalBounds(r); } /** * Gets the normal bounds of the component prior to the component * being maximized. * @param f the <code>JInternalFrame</code> of interest * @return the normal bounds of the component */ protected Rectangle getPreviousBounds(JInternalFrame f) { return f.getNormalBounds(); } /** * Sets that the component has been iconized and the bounds of the * <code>desktopIcon</code> are valid. */ protected void setWasIcon(JInternalFrame f, Boolean value) { if (value != null) { f.putClientProperty(HAS_BEEN_ICONIFIED_PROPERTY, value); } } /** * Returns <code>true</code> if the component has been iconized * and the bounds of the <code>desktopIcon</code> are valid, * otherwise returns <code>false</code>. * * @param f the <code>JInternalFrame</code> of interest * @return <code>true</code> if the component has been iconized; * otherwise returns <code>false</code> */ protected boolean wasIcon(JInternalFrame f) { return (f.getClientProperty(HAS_BEEN_ICONIFIED_PROPERTY) == Boolean.TRUE); } JDesktopPane getDesktopPane( JComponent frame ) { JDesktopPane pane = null; Component c = frame.getParent(); // Find the JDesktopPane while ( pane == null ) { if ( c instanceof JDesktopPane ) { pane = (JDesktopPane)c; } else if ( c == null ) { break; } else { c = c.getParent(); } } return pane; } // =========== stuff for faster frame dragging =================== private void dragFrameFaster(JComponent f, int newX, int newY) { Rectangle previousBounds = new Rectangle(currentBounds.x, currentBounds.y, currentBounds.width, currentBounds.height); // move the frame currentBounds.x = newX; currentBounds.y = newY; if (didDrag) { // Only initiate cleanup if we have actually done a drag. emergencyCleanup(f); } else { didDrag = true; // We reset the danger field as until now we haven't actually // moved the internal frame so we don't need to initiate repaint. ((JInternalFrame)f).danger = false; } boolean floaterCollision = isFloaterCollision(previousBounds, currentBounds); // System.out.println(previousBounds); JComponent parent = (JComponent)f.getParent(); Rectangle visBounds = previousBounds.intersection(desktopBounds); // System.out.println(previousBounds); // System.out.println(visBounds); RepaintManager currentManager = RepaintManager.currentManager(f); currentManager.beginPaint(); try { if(!floaterCollision) { currentManager.copyArea(parent, desktopGraphics, visBounds.x, visBounds.y, visBounds.width, visBounds.height, newX - previousBounds.x, newY - previousBounds.y, true); } f.setBounds(currentBounds); if(floaterCollision) { // since we couldn't blit we just redraw as fast as possible // the isDragging mucking is to avoid activating emergency // cleanup ((JInternalFrame)f).isDragging = false; parent.paintImmediately(currentBounds); ((JInternalFrame)f).isDragging = true; } // fake out the repaint manager. We'll take care of everything currentManager.markCompletelyClean(parent); currentManager.markCompletelyClean(f); // compute the minimal newly exposed area // if the rects intersect then we use computeDifference. Otherwise // we'll repaint the entire previous bounds Rectangle[] dirtyRects = null; if ( previousBounds.intersects(currentBounds) ) { dirtyRects = SwingUtilities.computeDifference(previousBounds, currentBounds); } else { dirtyRects = new Rectangle[1]; dirtyRects[0] = previousBounds; // System.out.println("no intersection"); }; // Fix the damage for (int i = 0; i < dirtyRects.length; i++) { parent.paintImmediately(dirtyRects[i]); } // new areas of blit were exposed if ( !(visBounds.equals(previousBounds)) ) { dirtyRects = SwingUtilities.computeDifference(previousBounds, desktopBounds); for (int i = 0; i < dirtyRects.length; i++) { dirtyRects[i].x += newX - previousBounds.x; dirtyRects[i].y += newY - previousBounds.y; ((JInternalFrame)f).isDragging = false; parent.paintImmediately(dirtyRects[i]); ((JInternalFrame)f).isDragging = true; // System.out.println(dirtyRects[i]); } } } finally { currentManager.endPaint(); } } private boolean isFloaterCollision(Rectangle moveFrom, Rectangle moveTo) { if (floatingItems.length == 0) { // System.out.println("no floaters"); return false; } for (int i = 0; i < floatingItems.length; i++) { boolean intersectsFrom = moveFrom.intersects(floatingItems[i]); if (intersectsFrom) { return true; } boolean intersectsTo = moveTo.intersects(floatingItems[i]); if (intersectsTo) { return true; } } return false; } private Rectangle[] findFloatingItems(JComponent f) { Container desktop = f.getParent(); Component[] children = desktop.getComponents(); int i = 0; for (i = 0; i < children.length; i++) { if (children[i] == f) { break; } } // System.out.println(i); Rectangle[] floaters = new Rectangle[i]; for (i = 0; i < floaters.length; i++) { floaters[i] = children[i].getBounds(); } return floaters; } /** * This method is here to clean up problems associated * with a race condition which can occur when the full contents * of a copyArea's source argument is not available onscreen. * This uses brute force to clean up in case of possible damage */ private void emergencyCleanup(final JComponent f) { if ( ((JInternalFrame)f).danger ) { SwingUtilities.invokeLater( new Runnable(){ public void run(){ ((JInternalFrame)f).isDragging = false; f.paintImmediately(0,0, f.getWidth(), f.getHeight()); //finalFrame.repaint(); ((JInternalFrame)f).isDragging = true; // System.out.println("repair complete"); }}); ((JInternalFrame)f).danger = false; } } }
andreagenso/java2scala
test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/javax/swing/DefaultDesktopManager.java
Java
apache-2.0
27,642
/* * Copyright 2016 Alexey Andreev. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.backend.wasm.model.expression; import java.util.Objects; public class WasmFloatUnary extends WasmExpression { private WasmFloatType type; private WasmFloatUnaryOperation operation; private WasmExpression operand; public WasmFloatUnary(WasmFloatType type, WasmFloatUnaryOperation operation, WasmExpression operand) { Objects.requireNonNull(type); Objects.requireNonNull(operation); Objects.requireNonNull(operand); this.type = type; this.operation = operation; this.operand = operand; } public WasmFloatType getType() { return type; } public void setType(WasmFloatType type) { Objects.requireNonNull(type); this.type = type; } public WasmFloatUnaryOperation getOperation() { return operation; } public void setOperation(WasmFloatUnaryOperation operation) { Objects.requireNonNull(operation); this.operation = operation; } public WasmExpression getOperand() { return operand; } public void setOperand(WasmExpression operand) { Objects.requireNonNull(operand); this.operand = operand; } @Override public void acceptVisitor(WasmExpressionVisitor visitor) { visitor.visit(this); } }
jtulach/teavm
core/src/main/java/org/teavm/backend/wasm/model/expression/WasmFloatUnary.java
Java
apache-2.0
1,924
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.v2.orc import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.connector.read.PartitionReaderFactory import org.apache.spark.sql.execution.datasources.PartitioningAwareFileIndex import org.apache.spark.sql.execution.datasources.v2.FileScan import org.apache.spark.sql.sources.Filter import org.apache.spark.sql.types.StructType import org.apache.spark.sql.util.CaseInsensitiveStringMap import org.apache.spark.util.SerializableConfiguration case class OrcScan( sparkSession: SparkSession, hadoopConf: Configuration, fileIndex: PartitioningAwareFileIndex, dataSchema: StructType, readDataSchema: StructType, readPartitionSchema: StructType, options: CaseInsensitiveStringMap, pushedFilters: Array[Filter], partitionFilters: Seq[Expression] = Seq.empty, dataFilters: Seq[Expression] = Seq.empty) extends FileScan { override def isSplitable(path: Path): Boolean = true override def createReaderFactory(): PartitionReaderFactory = { val broadcastedConf = sparkSession.sparkContext.broadcast( new SerializableConfiguration(hadoopConf)) // The partition values are already truncated in `FileScan.partitions`. // We should use `readPartitionSchema` as the partition schema here. OrcPartitionReaderFactory(sparkSession.sessionState.conf, broadcastedConf, dataSchema, readDataSchema, readPartitionSchema, pushedFilters) } override def equals(obj: Any): Boolean = obj match { case o: OrcScan => super.equals(o) && dataSchema == o.dataSchema && options == o.options && equivalentFilters(pushedFilters, o.pushedFilters) case _ => false } override def hashCode(): Int = getClass.hashCode() override def description(): String = { super.description() + ", PushedFilters: " + seqToString(pushedFilters) } override def getMetaData(): Map[String, String] = { super.getMetaData() ++ Map("PushedFilers" -> seqToString(pushedFilters)) } override def withFilters( partitionFilters: Seq[Expression], dataFilters: Seq[Expression]): FileScan = this.copy(partitionFilters = partitionFilters, dataFilters = dataFilters) }
witgo/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScan.scala
Scala
apache-2.0
3,137
package samples; import java.beans.*; import java.lang.reflect.*; import net.sf.cglib.proxy.*; /** * * @author baliuka */ public class Beans implements MethodInterceptor { private PropertyChangeSupport propertySupport; public void addPropertyChangeListener(PropertyChangeListener listener) { propertySupport.addPropertyChangeListener(listener); } public void removePropertyChangeListener(PropertyChangeListener listener) { propertySupport.removePropertyChangeListener(listener); } public static Object newInstance( Class clazz ){ try{ Beans interceptor = new Beans(); Enhancer e = new Enhancer(); e.setSuperclass(clazz); e.setCallback(interceptor); Object bean = e.create(); interceptor.propertySupport = new PropertyChangeSupport( bean ); return bean; }catch( Throwable e ){ e.printStackTrace(); throw new Error(e.getMessage()); } } static final Class C[] = new Class[0]; static final Object emptyArgs [] = new Object[0]; public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable { Object retValFromSuper = null; try { if (!Modifier.isAbstract(method.getModifiers())) { retValFromSuper = proxy.invokeSuper(obj, args); } } finally { String name = method.getName(); if( name.equals("addPropertyChangeListener")) { addPropertyChangeListener((PropertyChangeListener)args[0]); }else if ( name.equals( "removePropertyChangeListener" ) ){ removePropertyChangeListener((PropertyChangeListener)args[0]); } if( name.startsWith("set") && args.length == 1 && method.getReturnType() == Void.TYPE ){ char propName[] = name.substring("set".length()).toCharArray(); propName[0] = Character.toLowerCase( propName[0] ); propertySupport.firePropertyChange( new String( propName ) , null , args[0]); } } return retValFromSuper; } public static void main( String args[] ){ Bean bean = (Bean)newInstance( Bean.class ); bean.addPropertyChangeListener( new PropertyChangeListener(){ public void propertyChange(PropertyChangeEvent evt){ System.out.println(evt); } } ); bean.setSampleProperty("TEST"); } }
bric3/nCGLIB
src/test/java/samples/Beans.java
Java
apache-2.0
2,740
/* Copyright JS Foundation and other contributors, http://js.foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "jerryscript-port.h" #include "jerryscript-port-default.h" /** * Pointer to the current context. * Note that it is a global variable, and is not a thread safe implementation. */ static jerry_context_t *current_context_p = NULL; /** * Set the current_context_p as the passed pointer. */ void jerry_port_default_set_current_context (jerry_context_t *context_p) /**< points to the created context */ { current_context_p = context_p; } /* jerry_port_default_set_current_context */ /** * Get the current context. * * @return the pointer to the current context */ jerry_context_t * jerry_port_get_current_context (void) { return current_context_p; } /* jerry_port_get_current_context */
zherczeg/jerryscript
jerry-port/default/default-external-context.c
C
apache-2.0
1,340
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Text; using Roslyn.Test.Utilities; namespace Microsoft.CodeAnalysis.Test.Utilities.CodeRuntime { internal sealed class RuntimeAssemblyManager : MarshalByRefObject, IDisposable { private enum Kind { ModuleData, Assembly } private struct AssemblyData { internal ModuleData ModuleData { get; } internal Assembly Assembly { get; } internal Kind Kind => Assembly != null ? Kind.Assembly : Kind.ModuleData; internal ModuleDataId Id => Assembly != null ? new ModuleDataId(Assembly, Assembly.ManifestModule.ModuleVersionId) : ModuleData.Id; internal AssemblyData(ModuleData moduleData) { ModuleData = moduleData; Assembly = null; } internal AssemblyData(Assembly assembly) { ModuleData = default(ModuleData); Assembly = assembly; } } private readonly AppDomainAssemblyCache _assemblyCache = AppDomainAssemblyCache.GetOrCreate(); private readonly Dictionary<string, AssemblyData> _fullNameToAssemblyDataMap; private readonly Dictionary<Guid, AssemblyData> _mvidToAssemblyDataMap; private readonly List<Guid> _mainMvids; // Assemblies loaded by this manager. private readonly HashSet<Assembly> _loadedAssemblies; /// <summary> /// The AppDomain we create to host the RuntimeAssemblyManager will always have the mscorlib /// it was compiled against. It's possible the data we are verifying or running used a slightly /// different mscorlib. Hence we can't do exact MVID matching on them. This tracks the set of /// modules loaded when we started the RuntimeAssemblyManager for which we can't do strict /// comparisons. /// </summary> private readonly HashSet<string> _preloadedSet; private bool _containsNetModules; internal IEnumerable<ModuleData> ModuleDatas => _fullNameToAssemblyDataMap.Values.Where(x => x.Kind == Kind.ModuleData).Select(x => x.ModuleData); public RuntimeAssemblyManager() { _fullNameToAssemblyDataMap = new Dictionary<string, AssemblyData>(StringComparer.OrdinalIgnoreCase); _mvidToAssemblyDataMap = new Dictionary<Guid, AssemblyData>(); _loadedAssemblies = new HashSet<Assembly>(); _mainMvids = new List<Guid>(); var currentDomain = AppDomain.CurrentDomain; currentDomain.AssemblyResolve += AssemblyResolve; currentDomain.AssemblyLoad += AssemblyLoad; CLRHelpers.ReflectionOnlyAssemblyResolve += ReflectionOnlyAssemblyResolve; _preloadedSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase); foreach (var assembly in currentDomain.GetAssemblies()) { var assemblyData = new AssemblyData(assembly); _preloadedSet.Add(assemblyData.Id.SimpleName); AddAssemblyData(assemblyData); } } public string DumpAssemblyData(out string dumpDirectory) { return RuntimeUtilities.DumpAssemblyData(ModuleDatas, out dumpDirectory); } public void Dispose() { // clean up our handlers, so that they don't accumulate AppDomain.CurrentDomain.AssemblyResolve -= AssemblyResolve; AppDomain.CurrentDomain.AssemblyLoad -= AssemblyLoad; CLRHelpers.ReflectionOnlyAssemblyResolve -= ReflectionOnlyAssemblyResolve; foreach (var assembly in _loadedAssemblies) { if (!MonoHelpers.IsRunningOnMono()) { assembly.ModuleResolve -= ModuleResolve; } } //EDMAURER Some RuntimeAssemblyManagers are created via reflection in an AppDomain of our creation. //Sometimes those AppDomains are not released. I don't fully understand how that appdomain roots //a RuntimeAssemblyManager, but according to heap dumps, it does. Even though the appdomain is not //unloaded, its RuntimeAssemblyManager is explicitly disposed. So make sure that it cleans up this //memory hog - the modules dictionary. _fullNameToAssemblyDataMap.Clear(); _mvidToAssemblyDataMap.Clear(); } /// <summary> /// Adds given MVID into a list of module MVIDs that are considered owned by this manager. /// </summary> public void AddMainModuleMvid(Guid mvid) { if (!_mvidToAssemblyDataMap.ContainsKey(mvid)) { throw new Exception($"No module with {mvid} loaded"); } _mainMvids.Add(mvid); } /// <summary> /// True if given assembly is owned by this manager. /// </summary> private bool IsOwned(Assembly assembly) { if (assembly == null) { return false; } return _mainMvids.Count == 0 || (assembly.ManifestModule != null && _mainMvids.Contains(assembly.ManifestModule.ModuleVersionId)) || _loadedAssemblies.Contains(assembly); } public bool ContainsNetModules() { return _containsNetModules; } public override object InitializeLifetimeService() { return null; } /// <summary> /// Add this to the set of <see cref="ModuleData"/> that is managed by this instance. It is okay to /// return values that are already present. /// </summary> /// <param name="modules"></param> public void AddModuleData(List<RuntimeModuleData> modules) { foreach (var module in modules.Select(x => x.Data)) { // If the module is already added then nothing else to do AssemblyData assemblyData; bool fullMatch; if (TryGetMatchingByFullName(module.Id, out assemblyData, out fullMatch)) { if (!fullMatch) { throw new Exception($"Two modules of name {assemblyData.Id.FullName} have different MVID"); } } else { if (module.Kind == OutputKind.NetModule) { _containsNetModules = true; } AddAssemblyData(new AssemblyData(module)); } } } public bool HasConflicts(List<RuntimeModuleDataId> moduleDataIds) { foreach (var id in moduleDataIds.Select(x => x.Id)) { AssemblyData assemblyData; bool fullMatch; if (TryGetMatchingByFullName(id, out assemblyData, out fullMatch) && !fullMatch) { return true; } } return false; } private void AddAssemblyData(AssemblyData assemblyData) { _fullNameToAssemblyDataMap.Add(assemblyData.Id.FullName, assemblyData); _mvidToAssemblyDataMap.Add(assemblyData.Id.Mvid, assemblyData); } /// <summary> /// Return the subset of IDs passed in which are not currently tracked by this instance. /// </summary> public List<RuntimeModuleDataId> GetMissing(List<RuntimeModuleDataId> moduleIds) { var list = new List<RuntimeModuleDataId>(); foreach (var id in moduleIds.Select(x => x.Id)) { AssemblyData other; bool fullMatch; if (!TryGetMatchingByFullName(id, out other, out fullMatch) || !fullMatch) { list.Add(new RuntimeModuleDataId(id)); } } return list; } private bool TryGetMatchingByFullName(ModuleDataId id, out AssemblyData assemblyData, out bool fullMatch) { if (_fullNameToAssemblyDataMap.TryGetValue(id.FullName, out assemblyData)) { fullMatch = _preloadedSet.Contains(id.SimpleName) || id.Mvid == assemblyData.Id.Mvid; return true; } assemblyData = default(AssemblyData); fullMatch = false; return false; } private ImmutableArray<byte> GetModuleBytesByName(string moduleName) { AssemblyData data; if (!_fullNameToAssemblyDataMap.TryGetValue(moduleName, out data)) { throw new KeyNotFoundException(String.Format("Could not find image for module '{0}'.", moduleName)); } if (data.Kind != Kind.ModuleData) { throw new Exception($"Cannot get bytes for preloaded Assembly {data.Id.FullName}"); } return data.ModuleData.Image; } private void AssemblyLoad(object sender, AssemblyLoadEventArgs args) { var assembly = args.LoadedAssembly; // ModuleResolve needs to be hooked up for the main assembly once its loaded. // We won't get an AssemblyResolve event for the main assembly so we need to do it here. if (_mainMvids.Contains(assembly.ManifestModule.ModuleVersionId) && _loadedAssemblies.Add(assembly)) { if (!MonoHelpers.IsRunningOnMono()) { assembly.ModuleResolve += ModuleResolve; } } } private Assembly AssemblyResolve(object sender, ResolveEventArgs args) { return AssemblyResolve(args, reflectionOnly: false); } private Assembly ReflectionOnlyAssemblyResolve(object sender, ResolveEventArgs args) { return AssemblyResolve(args, reflectionOnly: true); } private Assembly AssemblyResolve(ResolveEventArgs args, bool reflectionOnly) { // only respond to requests for dependencies of assemblies owned by this manager: if (IsOwned(args.RequestingAssembly)) { return GetAssembly(args.Name, reflectionOnly); } return null; } private Assembly GetAssembly(string fullName, bool reflectionOnly) { AssemblyData data; if (!_fullNameToAssemblyDataMap.TryGetValue(fullName, out data)) { return null; } Assembly assembly; switch (data.Kind) { case Kind.Assembly: assembly = data.Assembly; if (reflectionOnly && !assembly.ReflectionOnly) { assembly = Assembly.ReflectionOnlyLoad(assembly.FullName); } break; case Kind.ModuleData: assembly = _assemblyCache.GetOrLoad(data.ModuleData, reflectionOnly); break; default: throw new InvalidOperationException(); } if (!MonoHelpers.IsRunningOnMono()) { assembly.ModuleResolve += ModuleResolve; } _loadedAssemblies.Add(assembly); return assembly; } private Module ModuleResolve(object sender, ResolveEventArgs args) { var assembly = args.RequestingAssembly; var rawModule = GetModuleBytesByName(args.Name); Debug.Assert(assembly != null); Debug.Assert(!rawModule.IsDefault); return assembly.LoadModule(args.Name, rawModule.ToArray()); } public SortedSet<string> GetMemberSignaturesFromMetadata(string fullyQualifiedTypeName, string memberName, List<RuntimeModuleDataId> searchModules) { try { var signatures = new SortedSet<string>(); foreach (var id in searchModules.Select(x => x.Id)) // Check inside each assembly in the compilation { var assembly = GetAssembly(id.FullName, reflectionOnly: true); foreach (var signature in MetadataSignatureHelper.GetMemberSignatures(assembly, fullyQualifiedTypeName, memberName)) { signatures.Add(signature); } } return signatures; } catch (Exception ex) { var builder = new StringBuilder(); builder.AppendLine($"Error getting signatures {fullyQualifiedTypeName}.{memberName}"); builder.AppendLine($"Assemblies"); foreach (var module in _fullNameToAssemblyDataMap.Values) { builder.AppendLine($"\t{module.Id.SimpleName} {module.Id.Mvid} - {module.Kind} {_assemblyCache.GetOrDefault(module.Id, reflectionOnly: false) != null} {_assemblyCache.GetOrDefault(module.Id, reflectionOnly: true) != null}"); } throw new Exception(builder.ToString(), ex); } } private SortedSet<string> GetFullyQualifiedTypeNames(string assemblyName) { var typeNames = new SortedSet<string>(); Assembly assembly = GetAssembly(assemblyName, true); foreach (var typ in assembly.GetTypes()) typeNames.Add(typ.FullName); return typeNames; } public int Execute(string moduleName, string[] mainArgs, int? expectedOutputLength, out string output) { ImmutableArray<byte> bytes = GetModuleBytesByName(moduleName); Assembly assembly = DesktopRuntimeUtil.LoadAsAssembly(moduleName, bytes); MethodInfo entryPoint = assembly.EntryPoint; Debug.Assert(entryPoint != null, "Attempting to execute an assembly that has no entrypoint; is your test trying to execute a DLL?"); object result = null; string stdOut, stdErr; DesktopRuntimeEnvironment.Capture(() => { var count = entryPoint.GetParameters().Length; object[] args; if (count == 0) { args = new object[0]; } else if (count == 1) { args = new object[] { mainArgs ?? new string[0] }; } else { throw new Exception("Unrecognized entry point"); } result = entryPoint.Invoke(null, args); }, expectedOutputLength ?? 0, out stdOut, out stdErr); output = stdOut + stdErr; return result is int ? (int)result : 0; } public string[] PeVerifyModules(string[] modulesToVerify, bool throwOnError = true) { // For Windows RT (ARM) THE CLRHelper.Peverify appears to not work and will exclude this // for ARM testing at present. StringBuilder errors = new StringBuilder(); List<string> allOutput = new List<string>(); foreach (var name in modulesToVerify) { var assemblyData = _fullNameToAssemblyDataMap[name]; if (assemblyData.Kind != Kind.ModuleData) { continue; } var module = assemblyData.ModuleData; string[] output = CLRHelpers.PeVerify(module.Image); if (output.Length > 0) { if (modulesToVerify.Length > 1) { errors.AppendLine(); errors.AppendLine("<<" + name + ">>"); errors.AppendLine(); } foreach (var error in output) { errors.AppendLine(error); } } if (!throwOnError) { allOutput.AddRange(output); } } if (throwOnError && errors.Length > 0) { string dumpDir; RuntimeUtilities.DumpAssemblyData(ModuleDatas, out dumpDir); throw new RuntimePeVerifyException(errors.ToString(), dumpDir); } return allOutput.ToArray(); } } }
amcasey/roslyn
src/Test/Utilities/Desktop/CodeRuntime/RuntimeAssemblyManager.cs
C#
apache-2.0
17,128
/////////////////////////////////////////////////////////////////////////////////////////////////////// // Tencent is pleased to support the open source community by making Appecker available. // // Copyright (C) 2015 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in // compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is // distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////////////////////////////////// #import <Foundation/Foundation.h> @interface AppeckerHookManager : NSObject + (void)hijackClassSelector:(SEL)originalSelector inClass:(Class) srcCls withSelector:(SEL)newSelector inClass:(Class) dstCls; + (void)hijackInstanceSelector:(SEL)originalSelector inClass:(Class) srcCls withSelector:(SEL)newSelector inClass:(Class) dstCls; @end
TencentOpen/Appecker
hook/privateHeader/AppeckerHookManager.h
C
apache-2.0
1,268
# DesignOverlay UI Test ## Set up Install sauce labs client library: ```shell pip install sauceclient ``` Install appium client library: ```shell pip install Appium-Python-Client pip install pytest ``` ## how to run (SauceLabs) For configuration, look at the config_sauce_labs.json. ```shell ./gradlew sauceLabsDebug ```
poovarasanvasudevan/DesignOverlay-Android
appium/README.md
Markdown
apache-2.0
327
<!doctype html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Example - example-forms-custom-triggers-debug</title> <script src="../../../angular.js"></script> <script src="script.js"></script> </head> <body ng-app="customTriggerExample"> <div ng-controller="ExampleController"> <form> Name: <input type="text" ng-model="user.name" ng-model-options="{ updateOn: 'blur' }" /><br /> Other data: <input type="text" ng-model="user.data" /><br /> </form> <pre>username = "{{user.name}}"</pre> <pre>userdata = "{{user.data}}"</pre> </div> </body> </html>
LADOSSIFPB/nutrif
nutrif-web-refactor/lib/angular/docs/examples/example-forms-custom-triggers/index-debug.html
HTML
apache-2.0
601
package com.github.dockerjava.api.model; import com.google.common.base.Objects; import com.google.common.base.Optional; import java.io.Serializable; /** * @author magnayn */ public class Identifier implements Serializable { private static final long serialVersionUID = 1L; public final Repository repository; public final Optional<String> tag; public Identifier(Repository repository, String tag) { this.repository = repository; if (tag == null) { this.tag = Optional.absent(); } else { this.tag = Optional.of(tag); } } /** * Return an identifier that correctly splits up the repository and tag. There can be &gt; 1 ":" fred/jim --&gt; fred/jim, [] * fred/jim:123 --&gt; fred/jim, 123 fred:123/jim:123 --&gt; fred:123/jim, 123 * * * @param identifier * as a string * @return parsed identifier. */ public static Identifier fromCompoundString(String identifier) { String[] parts = identifier.split("/"); if (parts.length != 2) { String[] rhs = identifier.split(":"); if (rhs.length != 2) { return new Identifier(new Repository(identifier), null); } else { return new Identifier(new Repository(rhs[0]), rhs[1]); } } String[] rhs = parts[1].split(":"); if (rhs.length != 2) { return new Identifier(new Repository(identifier), null); } return new Identifier(new Repository(parts[0] + "/" + rhs[0]), rhs[1]); } @Override public String toString() { return Objects.toStringHelper(this).add("repository", repository).add("tag", tag).toString(); } }
ollie314/docker-java
src/main/java/com/github/dockerjava/api/model/Identifier.java
Java
apache-2.0
1,760
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions.aggregate import java.util import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.{AttributeReference, ExpectsInputTypes, Expression} import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData, HyperLogLogPlusPlusHelper} import org.apache.spark.sql.types._ /** * This function counts the approximate number of distinct values (ndv) in * intervals constructed from endpoints specified in `endpointsExpression`. The endpoints should be * sorted into ascending order. E.g., given an array of endpoints * (endpoint_1, endpoint_2, ... endpoint_N), returns the approximate ndv's for intervals * [endpoint_1, endpoint_2], (endpoint_2, endpoint_3], ... (endpoint_N-1, endpoint_N]. * To count ndv's in these intervals, apply the HyperLogLogPlusPlus algorithm in each of them. * @param child to estimate the ndv's of. * @param endpointsExpression An array expression to construct the intervals. It must be foldable, * and its elements should be sorted into ascending order. * Duplicate endpoints are allowed, e.g. (1, 5, 5, 10), and ndv for * interval (5, 5] would be 1. * @param relativeSD The maximum estimation error allowed in the HyperLogLogPlusPlus algorithm. */ case class ApproxCountDistinctForIntervals( child: Expression, endpointsExpression: Expression, relativeSD: Double = 0.05, mutableAggBufferOffset: Int = 0, inputAggBufferOffset: Int = 0) extends ImperativeAggregate with ExpectsInputTypes { def this(child: Expression, endpointsExpression: Expression) = { this( child = child, endpointsExpression = endpointsExpression, relativeSD = 0.05, mutableAggBufferOffset = 0, inputAggBufferOffset = 0) } def this(child: Expression, endpointsExpression: Expression, relativeSD: Expression) = { this( child = child, endpointsExpression = endpointsExpression, relativeSD = HyperLogLogPlusPlus.validateDoubleLiteral(relativeSD), mutableAggBufferOffset = 0, inputAggBufferOffset = 0) } override def inputTypes: Seq[AbstractDataType] = { Seq(TypeCollection(NumericType, TimestampType, DateType), ArrayType) } // Mark as lazy so that endpointsExpression is not evaluated during tree transformation. lazy val endpoints: Array[Double] = (endpointsExpression.dataType, endpointsExpression.eval()) match { case (ArrayType(elementType, _), arrayData: ArrayData) => arrayData.toObjectArray(elementType).map(_.toString.toDouble) } override def checkInputDataTypes(): TypeCheckResult = { val defaultCheck = super.checkInputDataTypes() if (defaultCheck.isFailure) { defaultCheck } else if (!endpointsExpression.foldable) { TypeCheckFailure("The endpoints provided must be constant literals") } else { endpointsExpression.dataType match { case ArrayType(_: NumericType | DateType | TimestampType, _) => if (endpoints.length < 2) { TypeCheckFailure("The number of endpoints must be >= 2 to construct intervals") } else { TypeCheckSuccess } case _ => TypeCheckFailure("Endpoints require (numeric or timestamp or date) type") } } } // N endpoints construct N-1 intervals, creating a HLLPP for each interval private lazy val hllppArray = { val array = new Array[HyperLogLogPlusPlusHelper](endpoints.length - 1) for (i <- array.indices) { array(i) = new HyperLogLogPlusPlusHelper(relativeSD) } // `numWords` in each HLLPPHelper should be the same because it is determined by `relativeSD` // which is shared among all HLLPPHelpers. assert(array.map(_.numWords).distinct.length == 1) array } private lazy val numWordsPerHllpp = hllppArray.head.numWords private lazy val totalNumWords = numWordsPerHllpp * hllppArray.length /** Allocate enough words to store all registers. */ override lazy val aggBufferAttributes: Seq[AttributeReference] = { Seq.tabulate(totalNumWords) { i => AttributeReference(s"MS[$i]", LongType)() } } override def aggBufferSchema: StructType = StructType.fromAttributes(aggBufferAttributes) // Note: although this simply copies aggBufferAttributes, this common code can not be placed // in the superclass because that will lead to initialization ordering issues. override lazy val inputAggBufferAttributes: Seq[AttributeReference] = aggBufferAttributes.map(_.newInstance()) /** Fill all words with zeros. */ override def initialize(buffer: InternalRow): Unit = { var word = 0 while (word < totalNumWords) { buffer.setLong(mutableAggBufferOffset + word, 0) word += 1 } } override def update(buffer: InternalRow, input: InternalRow): Unit = { val value = child.eval(input) // Ignore empty rows if (value != null) { // convert the value into a double value for searching in the double array val doubleValue = child.dataType match { case n: NumericType => n.numeric.toDouble(value.asInstanceOf[n.InternalType]) case _: DateType => value.asInstanceOf[Int].toDouble case _: TimestampType => value.asInstanceOf[Long].toDouble } // endpoints are sorted into ascending order already if (endpoints.head > doubleValue || endpoints.last < doubleValue) { // ignore if the value is out of the whole range return } val hllppIndex = findHllppIndex(doubleValue) val offset = mutableAggBufferOffset + hllppIndex * numWordsPerHllpp hllppArray(hllppIndex).update(buffer, offset, value, child.dataType) } } // Find which interval (HyperLogLogPlusPlusHelper) should receive the given value. def findHllppIndex(value: Double): Int = { var index = util.Arrays.binarySearch(endpoints, value) if (index >= 0) { // The value is found. if (index == 0) { 0 } else { // If the endpoints contains multiple elements with the specified value, there is no // guarantee which one binarySearch will return. We remove this uncertainty by moving the // index to the first position of these elements. var first = index - 1 while (first >= 0 && endpoints(first) == value) { first -= 1 } index = first + 1 if (index == 0) { // reach the first endpoint 0 } else { // send values in (endpoints(index-1), endpoints(index)] to hllpps(index-1) index - 1 } } } else { // The value is not found, binarySearch returns (-(<i>insertion point</i>) - 1). // The <i>insertion point</i> is defined as the point at which the key would be inserted // into the array: the index of the first element greater than the key. val insertionPoint = - (index + 1) if (insertionPoint == 0) 0 else insertionPoint - 1 } } override def merge(buffer1: InternalRow, buffer2: InternalRow): Unit = { for (i <- hllppArray.indices) { hllppArray(i).merge( buffer1 = buffer1, buffer2 = buffer2, offset1 = mutableAggBufferOffset + i * numWordsPerHllpp, offset2 = inputAggBufferOffset + i * numWordsPerHllpp) } } override def eval(buffer: InternalRow): Any = { val ndvArray = hllppResults(buffer) // If the endpoints contains multiple elements with the same value, // we set ndv=1 for intervals between these elements. // E.g. given four endpoints (1, 2, 2, 4) and input sequence (0.5, 2), // the ndv's for the three intervals should be (2, 1, 0) for (i <- ndvArray.indices) { if (endpoints(i) == endpoints(i + 1)) ndvArray(i) = 1 } new GenericArrayData(ndvArray) } def hllppResults(buffer: InternalRow): Array[Long] = { val ndvArray = new Array[Long](hllppArray.length) for (i <- ndvArray.indices) { ndvArray(i) = hllppArray(i).query(buffer, mutableAggBufferOffset + i * numWordsPerHllpp) } ndvArray } override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): ImperativeAggregate = copy(mutableAggBufferOffset = newMutableAggBufferOffset) override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): ImperativeAggregate = copy(inputAggBufferOffset = newInputAggBufferOffset) override def children: Seq[Expression] = Seq(child, endpointsExpression) override def nullable: Boolean = false override def dataType: DataType = ArrayType(LongType) override def prettyName: String = "approx_count_distinct_for_intervals" }
aray/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala
Scala
apache-2.0
9,716
# Encoding: utf-8 # Cloud Foundry Java Buildpack # Copyright 2013 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'java_buildpack/logging/logger_factory' require 'java_buildpack/repository' require 'java_buildpack/repository/version_resolver' require 'java_buildpack/util/cache' require 'java_buildpack/util/cache/download_cache' require 'java_buildpack/util/configuration_utils' require 'rbconfig' require 'yaml' module JavaBuildpack module Repository # A repository index represents the index of repository containing various versions of a file. class RepositoryIndex # Creates a new repository index, populating it with values from an index file. # # @param [String] repository_root the root of the repository to create the index for def initialize(repository_root) @logger = JavaBuildpack::Logging::LoggerFactory.instance.get_logger RepositoryIndex @default_repository_root = JavaBuildpack::Util::ConfigurationUtils.load('repository')['default_repository_root'] .chomp('/') cache.get("#{canonical repository_root}#{INDEX_PATH}") do |file| @index = YAML.load_file(file) @logger.debug { @index } end end # Finds a version of the file matching the given, possibly wildcarded, version. # # @param [String] version the possibly wildcarded version to find # @return [TokenizedVersion] the version of the file found # @return [String] the URI of the file found def find_item(version) found_version = VersionResolver.resolve(version, @index.keys) fail "No version resolvable for '#{version}' in #{@index.keys.join(', ')}" if found_version.nil? uri = @index[found_version.to_s] [found_version, uri] end private INDEX_PATH = '/index.yml'.freeze private_constant :INDEX_PATH def architecture `uname -m`.strip end def cache JavaBuildpack::Util::Cache::DownloadCache.new(Pathname.new(Dir.tmpdir), JavaBuildpack::Util::Cache::CACHED_RESOURCES_DIRECTORY) end def canonical(raw) cooked = raw .gsub(/\{default.repository.root\}/, @default_repository_root) .gsub(/\{platform\}/, platform) .gsub(/\{architecture\}/, architecture) .chomp('/') @logger.debug { "#{raw} expanded to #{cooked}" } cooked end def platform redhat_release = Pathname.new('/etc/redhat-release') if redhat_release.exist? "centos#{redhat_release.read.match(/CentOS release (\d)/)[1]}" elsif `uname -s` =~ /Darwin/ 'mountainlion' elsif !`which lsb_release 2> /dev/null`.empty? `lsb_release -cs`.strip else fail 'Unable to determine platform' end end end end end
brijeshgp/java-buildpack
lib/java_buildpack/repository/repository_index.rb
Ruby
apache-2.0
3,513
/* * Copyright 2015 Naver Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <errno.h> #include <stdlib.h> #include <string.h> #include <sys/time.h> #include <sys/types.h> #include <unistd.h> #ifdef SFI_ENABLED #include <pthread.h> #endif #include "smr.h" #include "log_internal.h" #define ERRNO_FILE_ID UTIL_FILE_ID long long currtime_usec (void) { struct timeval tv; long long usec; gettimeofday (&tv, NULL); usec = tv.tv_sec * 1000000; usec += tv.tv_usec; return usec; } int ll_cmpr (const void *v1, const void *v2) { long long ll1 = *(long long *) v1; long long ll2 = *(long long *) v2; return (ll1 > ll2) ? 1 : ((ll1 == ll2) ? 0 : -1); } int init_log_file (int fd) { int ret; ret = ftruncate (fd, SMR_LOG_FILE_ACTUAL_SIZE); if (ret < 0) { ERRNO_POINT (); return -1; } // extended parts reads null bytes return 0; } #ifdef SFI_ENABLED static pthread_once_t sfi_once = PTHREAD_ONCE_INIT; static pthread_key_t sfi_key; // ISO C forbids conversion of function pointer to object pointer type struct funcWrap { void (*callback) (char *, int); }; static void initialize_key (void) { pthread_key_create (&sfi_key, free); } void sfi_mshmcs_probe (char *file, int line) { struct funcWrap *wrap; (void) pthread_once (&sfi_once, initialize_key); wrap = pthread_getspecific (sfi_key); if (wrap) { wrap->callback (file, line); } } void sfi_mshmcs_register (void (*callback) (char *, int)) { struct funcWrap *wrap = NULL; (void) pthread_once (&sfi_once, initialize_key); wrap = malloc (sizeof (struct funcWrap)); if (wrap) { wrap->callback = callback; } pthread_setspecific (sfi_key, wrap); } #endif
cl9200/nbase-arc
smr/smr/log_util.c
C
apache-2.0
2,232
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.http; import org.apache.http.message.BasicHeader; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { private static final List<RequestAndHeaders> requests = new CopyOnWriteArrayList<>(); private static final String CUSTOM_HEADER = "SomeCustomHeader"; private String randomHeaderValue = randomAlphaOfLength(20); private String queryIndex = "query-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); private String lookupIndex = "lookup-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(ActionLoggingPlugin.class); plugins.add(CustomHeadersPlugin.class); return plugins; } @Before public void createIndices() throws Exception { String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("location").field("type", "geo_shape").endObject() .startObject("name").field("type", "text").endObject() .endObject() .endObject().endObject()); Settings settings = Settings.builder() .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .build(); assertAcked(transportClient().admin().indices().prepareCreate(lookupIndex) .setSettings(settings).addMapping("type", mapping, XContentType.JSON)); assertAcked(transportClient().admin().indices().prepareCreate(queryIndex) .setSettings(settings).addMapping("type", mapping, XContentType.JSON)); ensureGreen(queryIndex, lookupIndex); requests.clear(); } @After public void checkAllRequestsContainHeaders() { assertRequestsContainHeader(IndexRequest.class); assertRequestsContainHeader(RefreshRequest.class); } public void testThatTermsLookupGetRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1") .setSource(jsonBuilder().startObject().array("followers", "foo", "bar", "baz").endObject()).get(); transportClient().prepareIndex(queryIndex, "type", "1") .setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get(); transportClient().admin().indices().prepareRefresh(queryIndex, lookupIndex).get(); TermsLookup termsLookup = new TermsLookup(lookupIndex, "type", "1", "followers"); TermsQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username", termsLookup); BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(termsLookupFilterBuilder); SearchResponse searchResponse = transportClient() .prepareSearch(queryIndex) .setQuery(queryBuilder) .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); assertGetRequestsContainHeaders(); } public void testThatGeoShapeQueryGetRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1").setSource(jsonBuilder().startObject() .field("name", "Munich Suburban Area") .startObject("location") .field("type", "polygon") .startArray("coordinates").startArray() .startArray().value(11.34).value(48.25).endArray() .startArray().value(11.68).value(48.25).endArray() .startArray().value(11.65).value(48.06).endArray() .startArray().value(11.37).value(48.13).endArray() .startArray().value(11.34).value(48.25).endArray() // close the polygon .endArray().endArray() .endObject() .endObject()) .get(); // second document transportClient().prepareIndex(queryIndex, "type", "1").setSource(jsonBuilder().startObject() .field("name", "Munich Center") .startObject("location") .field("type", "point") .startArray("coordinates").value(11.57).value(48.13).endArray() .endObject() .endObject()) .get(); transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get(); GeoShapeQueryBuilder queryBuilder = QueryBuilders.geoShapeQuery("location", "1", "type") .indexedShapeIndex(lookupIndex) .indexedShapePath("location"); SearchResponse searchResponse = transportClient() .prepareSearch(queryIndex) .setQuery(queryBuilder) .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); assertThat(requests, hasSize(greaterThan(0))); assertGetRequestsContainHeaders(); } public void testThatMoreLikeThisQueryMultiTermVectorRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1") .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) .get(); transportClient().prepareIndex(queryIndex, "type", "1") .setSource(jsonBuilder().startObject().field("name", "Jar Jar Binks - A horrible mistake").endObject()) .get(); transportClient().prepareIndex(queryIndex, "type", "2") .setSource(jsonBuilder().startObject().field("name", "Star Wars - Return of the jedi").endObject()) .get(); transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get(); MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery(new String[]{"name"}, null, new Item[]{new Item(lookupIndex, "type", "1")}) .minTermFreq(1) .minDocFreq(1); SearchResponse searchResponse = transportClient() .prepareSearch(queryIndex) .setQuery(moreLikeThisQueryBuilder) .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); assertRequestsContainHeader(MultiTermVectorsRequest.class); } public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws IOException { final String IRRELEVANT_HEADER = "SomeIrrelevantHeader"; Response response = getRestClient().performRequest("GET", "/" + queryIndex + "/_search", new BasicHeader(CUSTOM_HEADER, randomHeaderValue), new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue)); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); List<RequestAndHeaders> searchRequests = getRequests(SearchRequest.class); assertThat(searchRequests, hasSize(greaterThan(0))); for (RequestAndHeaders requestAndHeaders : searchRequests) { assertThat(requestAndHeaders.headers.containsKey(CUSTOM_HEADER), is(true)); // was not specified, thus is not included assertThat(requestAndHeaders.headers.containsKey(IRRELEVANT_HEADER), is(false)); } } private List<RequestAndHeaders> getRequests(Class<?> clazz) { List<RequestAndHeaders> results = new ArrayList<>(); for (RequestAndHeaders request : requests) { if (request.request.getClass().equals(clazz)) { results.add(request); } } return results; } private void assertRequestsContainHeader(Class<? extends ActionRequest> clazz) { List<RequestAndHeaders> classRequests = getRequests(clazz); for (RequestAndHeaders request : classRequests) { assertRequestContainsHeader(request.request, request.headers); } } private void assertGetRequestsContainHeaders() { assertGetRequestsContainHeaders(this.lookupIndex); } private void assertGetRequestsContainHeaders(String index) { List<RequestAndHeaders> getRequests = getRequests(GetRequest.class); assertThat(getRequests, hasSize(greaterThan(0))); for (RequestAndHeaders request : getRequests) { if (!((GetRequest)request.request).index().equals(index)) { continue; } assertRequestContainsHeader(request.request, request.headers); } } private void assertRequestContainsHeader(ActionRequest request, Map<String, String> context) { String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", CUSTOM_HEADER, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", CUSTOM_HEADER, indexRequest.index(), indexRequest.type(), indexRequest.id()); } assertThat(msg, context.containsKey(CUSTOM_HEADER), is(true)); assertThat(context.get(CUSTOM_HEADER).toString(), is(randomHeaderValue)); } /** * a transport client that adds our random header */ private Client transportClient() { return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(CUSTOM_HEADER, randomHeaderValue)); } public static class ActionLoggingPlugin extends Plugin implements ActionPlugin { private final SetOnce<LoggingFilter> loggingFilter = new SetOnce<>(); @Override public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { loggingFilter.set(new LoggingFilter(clusterService.getSettings(), threadPool)); return Collections.emptyList(); } @Override public List<ActionFilter> getActionFilters() { return singletonList(loggingFilter.get()); } } public static class LoggingFilter extends ActionFilter.Simple { private final ThreadPool threadPool; public LoggingFilter(Settings settings, ThreadPool pool) { super(settings); this.threadPool = pool; } @Override public int order() { return 999; } @Override protected boolean apply(String action, ActionRequest request, ActionListener<?> listener) { requests.add(new RequestAndHeaders(threadPool.getThreadContext().getHeaders(), request)); return true; } } private static class RequestAndHeaders { final Map<String, String> headers; final ActionRequest request; private RequestAndHeaders(Map<String, String> headers, ActionRequest request) { this.headers = headers; this.request = request; } } public static class CustomHeadersPlugin extends Plugin implements ActionPlugin { public Collection<String> getRestHeaders() { return Collections.singleton(CUSTOM_HEADER); } } }
s1monw/elasticsearch
qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java
Java
apache-2.0
15,442
--- layout: post100 title: Port usage Control categories: XAP100ADM parent: network.html weight: 700 --- XAP space and client components open different ports in various situations. The following list describes the different ports used by XAP and how these can be modified: {% refer %}Learn how to **[set XAP over a firewall](./network-over-firewall.html)**.{% endrefer %} {: .table .table-bordered .table-condensed} | Property name | Description | Default value | |:--------|:------------|:----------------------| |com.sun.jini.reggie.initialUnicastDiscoveryPort|Lookup Service listening port<br>Used as part of the unicast lookup discovery protocol.|XAP 6:4162{% wbr %}XAP 7:4164{% wbr %}XAP 8:4166{% wbr %}XAP 9:4170{% wbr %}<nobr>XAP 9.5:4174</nobr>| |com.gs.transport_protocol.lrmi.bind-port|LRMI listening port<br>Used with client-space and space-space communication. |variable , random| |com.gigaspaces.system.registryPort|RMI registry listening port <br>Used as an alternative directory service.|10098 and above.| |com.gigaspaces.system.registryRetries|RMI registry Retries <br>Used as an alternative directory service.|Default is 20.| |com.gigaspaces.start.httpPort|Webster listening port<br>Internal web service used as part of the application deployment process. |9813| - When starting a space and providing the port as part of the URL - i.e. `java://localhost:PORT/container/space` - the specified port will be used both for the RMI registry listener and also for the container to register into the RMI registry. - The Jini Lookup Service uses unicast and multicast announcements and requests. - The **multicast** discovery protocol uses ports 4170. - You can **completely disable multicast announcement traffic**. Refer to the [Lookup Service Configuration](./network-lookup-service-configuration.html) or [Setting XAP Over Firewall](./network-over-firewall.html) sections for more details. - When running a clustered space using replication via multicast, additional ports are used. {% comment %} | Webster | linux: `NO_HTTP` Windows: `noHTTP` | `0` | `\-Dcom.gigaspaces.start.httpPort=0 \-Dcom.gigaspaces.start.httpServerRetries=20` | [How to Control the Used Ports](webster.xml) | Additional properties can also be overridden (for example: `httpServerRetries`, `hostAddress`) `httpServerRetries` retries N-1 consecutive ports if the initial port is used (relevant if the initial port is different than zero). Default is 20. | | JMX | linux: `NO_JMX` Windows: `noJMX` | `10098` | `\-Dcom.gigaspaces.system.registryPort=10098` | XML override using `com.gigaspaces.start.jmx.svcDesc` | `registryRetries` retries N-1 consecutive ports if the initial port is used (for example: `10098`, `10099`, `..\[10098+(N-1)\]`) | {% endcomment %}
barakb/gigaspaces-wiki-jekyll
xap100adm/network-ports.markdown
Markdown
apache-2.0
2,810
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/ // Portions Copyright 2000-2004 Jonathan de Halleux // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Threading; using Gallio.Framework.Pattern; using Gallio.Common.Reflection; namespace MbUnit.Framework { /// <summary> /// Sets the apartment state to be used to run the decorated test and its children /// unless subsequently overridden. /// </summary> /// <remarks> /// <para> /// If no apartment state is specified or if it is <see cref="System.Threading.ApartmentState.Unknown" /> /// the test will inherit the apartment state of its parent test. Consequently if the apartment /// state is set on the fixture then its tests will use the same apartment state unless overridden. /// </para> /// <para> /// The default apartment state for a test assembly is <see cref="System.Threading.ApartmentState.STA"/> and /// may be overridden by setting <see cref="ApartmentStateAttribute" /> attribute on the assembly. /// </para> /// </remarks> [AttributeUsage(PatternAttributeTargets.Test, AllowMultiple = false, Inherited = true)] public class ApartmentStateAttribute : TestDecoratorPatternAttribute { private readonly ApartmentState apartmentState; /// <summary> /// Sets the apartment state to be used to run the decorated test and its children /// unless subsequently overridden. /// </summary> /// <remarks> /// <para> /// If no apartment state is specified or if it is <see cref="System.Threading.ApartmentState.Unknown" /> /// the test will inherit the apartment state of its parent test. Consequently if the apartment /// state is set on the fixture then its tests will use the same apartment state unless overridden. /// </para> /// <para> /// The default apartment state for a test assembly is <see cref="System.Threading.ApartmentState.STA"/> and /// may be overridden by setting <see cref="ApartmentStateAttribute" /> attribute on the assembly. /// </para> /// </remarks> /// <param name="apartmentState">The apartment state to use.</param> public ApartmentStateAttribute(ApartmentState apartmentState) { this.apartmentState = apartmentState; } /// <summary> /// Gets the apartment state to be used to run the decorated test. /// </summary> public ApartmentState ApartmentState { get { return apartmentState; } } /// <inheritdoc /> protected override void DecorateTest(IPatternScope scope, ICodeElementInfo codeElement) { scope.TestBuilder.ApartmentState = apartmentState; } } }
mterwoord/mbunit-v3
src/MbUnit/MbUnit/Framework/ApartmentStateAttribute.cs
C#
apache-2.0
3,425
/* * Copyright 2011 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.examples.machinereassignment.swingui; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.AbstractAction; import javax.swing.GroupLayout; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import org.optaplanner.examples.common.swingui.SolutionPanel; import org.optaplanner.examples.common.swingui.components.LabeledComboBoxRenderer; import org.optaplanner.examples.machinereassignment.domain.MachineReassignment; import org.optaplanner.examples.machinereassignment.domain.MrMachine; import org.optaplanner.examples.machinereassignment.domain.MrProcessAssignment; import org.optaplanner.examples.machinereassignment.domain.MrResource; public class MachineReassignmentPanel extends SolutionPanel<MachineReassignment> { public static final String LOGO_PATH = "/org/optaplanner/examples/machinereassignment/swingui/machineReassignmentLogo.png"; private JPanel machineListPanel; private MrMachinePanel unassignedPanel; private JLabel tooBigLabel = null; private Map<MrMachine, MrMachinePanel> machineToPanelMap; public MachineReassignmentPanel() { GroupLayout layout = new GroupLayout(this); setLayout(layout); createMachineListPanel(); JPanel headerPanel = new JPanel(); layout.setHorizontalGroup(layout.createParallelGroup() .addComponent(headerPanel).addComponent(machineListPanel)); layout.setVerticalGroup(layout.createSequentialGroup() .addComponent(headerPanel, GroupLayout.PREFERRED_SIZE, GroupLayout.PREFERRED_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(machineListPanel, GroupLayout.PREFERRED_SIZE, GroupLayout.PREFERRED_SIZE, GroupLayout.PREFERRED_SIZE)); } private void createMachineListPanel() { machineListPanel = new JPanel(new GridLayout(0, 1)); unassignedPanel = new MrMachinePanel(this, Collections.<MrResource>emptyList(), null); machineListPanel.add(unassignedPanel); machineToPanelMap = new LinkedHashMap<>(); machineToPanelMap.put(null, unassignedPanel); } @Override public void resetPanel(MachineReassignment machineReassignment) { for (MrMachinePanel machinePanel : machineToPanelMap.values()) { if (machinePanel.getMachine() != null) { machineListPanel.remove(machinePanel); } } machineToPanelMap.clear(); machineToPanelMap.put(null, unassignedPanel); unassignedPanel.clearProcessAssignments(); updatePanel(machineReassignment); } @Override public void updatePanel(MachineReassignment machineReassignment) { List<MrResource> resourceList = machineReassignment.getResourceList(); unassignedPanel.setResourceList(resourceList); if (machineReassignment.getMachineList().size() > 1000) { if (tooBigLabel == null) { tooBigLabel = new JLabel("The dataset is too big to show."); machineListPanel.add(tooBigLabel); } } else { if (tooBigLabel != null) { machineListPanel.remove(tooBigLabel); tooBigLabel = null; } Set<MrMachine> deadMachineSet = new LinkedHashSet<>(machineToPanelMap.keySet()); deadMachineSet.remove(null); for (MrMachine machine : machineReassignment.getMachineList()) { deadMachineSet.remove(machine); MrMachinePanel machinePanel = machineToPanelMap.get(machine); if (machinePanel == null) { machinePanel = new MrMachinePanel(this, resourceList, machine); machineListPanel.add(machinePanel); machineToPanelMap.put(machine, machinePanel); } machinePanel.clearProcessAssignments(); } unassignedPanel.clearProcessAssignments(); for (MrProcessAssignment processAssignment : machineReassignment.getProcessAssignmentList()) { MrMachine machine = processAssignment.getMachine(); MrMachinePanel machinePanel = machineToPanelMap.get(machine); machinePanel.addProcessAssignment(processAssignment); } for (MrMachine deadMachine : deadMachineSet) { MrMachinePanel deadMachinePanel = machineToPanelMap.remove(deadMachine); machineListPanel.remove(deadMachinePanel); } for (MrMachinePanel machinePanel : machineToPanelMap.values()) { machinePanel.update(); } } } public void deleteMachine(final MrMachine machine) { logger.info("Scheduling delete of machine ({}).", machine); doProblemFactChange(scoreDirector -> { MachineReassignment machineReassignment = scoreDirector.getWorkingSolution(); MrMachine workingMachine = scoreDirector.lookUpWorkingObject(machine); if (workingMachine == null) { // The machine has already been deleted (the UI asked to changed the same machine twice), so do nothing return; } // First remove the problem fact from all planning entities that use it for (MrProcessAssignment processAssignment : machineReassignment.getProcessAssignmentList()) { if (processAssignment.getOriginalMachine() == workingMachine) { scoreDirector.beforeProblemPropertyChanged(processAssignment); processAssignment.setOriginalMachine(null); scoreDirector.afterProblemPropertyChanged(processAssignment); } if (processAssignment.getMachine() == workingMachine) { scoreDirector.beforeVariableChanged(processAssignment, "machine"); processAssignment.setMachine(null); scoreDirector.afterVariableChanged(processAssignment, "machine"); } } // A SolutionCloner does not clone problem fact lists (such as machineList) // Shallow clone the machineList so only workingSolution is affected, not bestSolution or guiSolution ArrayList<MrMachine> machineList = new ArrayList<>(machineReassignment.getMachineList()); machineReassignment.setMachineList(machineList); // Remove it the problem fact itself scoreDirector.beforeProblemFactRemoved(workingMachine); machineList.remove(workingMachine); scoreDirector.afterProblemFactRemoved(workingMachine); scoreDirector.triggerVariableListeners(); }); } private class MrProcessAssignmentAction extends AbstractAction { private MrProcessAssignment processAssignment; public MrProcessAssignmentAction(MrProcessAssignment processAssignment) { super(processAssignment.getLabel()); this.processAssignment = processAssignment; } @Override public void actionPerformed(ActionEvent e) { List<MrMachine> machineList = getSolution().getMachineList(); // Add 1 to array size to add null, which makes the entity unassigned JComboBox machineListField = new JComboBox( machineList.toArray(new Object[machineList.size() + 1])); LabeledComboBoxRenderer.applyToComboBox(machineListField); machineListField.setSelectedItem(processAssignment.getMachine()); int result = JOptionPane.showConfirmDialog(MachineReassignmentPanel.this.getRootPane(), machineListField, "Select machine", JOptionPane.OK_CANCEL_OPTION); if (result == JOptionPane.OK_OPTION) { MrMachine toMrMachine = (MrMachine) machineListField.getSelectedItem(); solutionBusiness.doChangeMove(processAssignment, "machine", toMrMachine); solverAndPersistenceFrame.resetScreen(); } } } }
gsheldon/optaplanner
optaplanner-examples/src/main/java/org/optaplanner/examples/machinereassignment/swingui/MachineReassignmentPanel.java
Java
apache-2.0
8,996
/* * Copyright 2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.rsocket.core; import java.util.List; import io.rsocket.SocketAcceptor; import io.rsocket.metadata.WellKnownMimeType; import io.rsocket.plugins.SocketAcceptorInterceptor; import org.springframework.lang.Nullable; import org.springframework.security.rsocket.api.PayloadInterceptor; import org.springframework.util.Assert; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; /** * A {@link SocketAcceptorInterceptor} that applies the {@link PayloadInterceptor}s * * @author Rob Winch * @since 5.2 */ public class PayloadSocketAcceptorInterceptor implements SocketAcceptorInterceptor { private final List<PayloadInterceptor> interceptors; @Nullable private MimeType defaultDataMimeType; private MimeType defaultMetadataMimeType = MimeTypeUtils .parseMimeType(WellKnownMimeType.MESSAGE_RSOCKET_COMPOSITE_METADATA.getString()); public PayloadSocketAcceptorInterceptor(List<PayloadInterceptor> interceptors) { this.interceptors = interceptors; } @Override public SocketAcceptor apply(SocketAcceptor socketAcceptor) { PayloadSocketAcceptor acceptor = new PayloadSocketAcceptor(socketAcceptor, this.interceptors); acceptor.setDefaultDataMimeType(this.defaultDataMimeType); acceptor.setDefaultMetadataMimeType(this.defaultMetadataMimeType); return acceptor; } public void setDefaultDataMimeType(@Nullable MimeType defaultDataMimeType) { this.defaultDataMimeType = defaultDataMimeType; } public void setDefaultMetadataMimeType(MimeType defaultMetadataMimeType) { Assert.notNull(defaultMetadataMimeType, "defaultMetadataMimeType cannot be null"); this.defaultMetadataMimeType = defaultMetadataMimeType; } }
djechelon/spring-security
rsocket/src/main/java/org/springframework/security/rsocket/core/PayloadSocketAcceptorInterceptor.java
Java
apache-2.0
2,336
# Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for image bundling tool.""" import logging import os import subprocess import time import urllib2 METADATA_URL_PREFIX = 'http://169.254.169.254/computeMetadata/' METADATA_V1_URL_PREFIX = METADATA_URL_PREFIX + 'v1/' class MakeFileSystemException(Exception): """Error occurred in file system creation.""" class TarAndGzipFileException(Exception): """Error occurred in creating the tarball.""" class LoadDiskImage(object): """Loads raw disk image using kpartx.""" def __init__(self, file_path): """Initializes LoadDiskImage object. Args: file_path: a path to a file containing raw disk image. Returns: A list of devices for every partition found in an image. """ self._file_path = file_path def __enter__(self): """Map disk image as a device.""" SyncFileSystem() kpartx_cmd = ['kpartx', '-a', '-v', '-s', self._file_path] output = RunCommand(kpartx_cmd) devs = [] for line in output.splitlines(): split_line = line.split() if (len(split_line) > 2 and split_line[0] == 'add' and split_line[1] == 'map'): devs.append('/dev/mapper/' + split_line[2]) time.sleep(2) return devs def __exit__(self, unused_exc_type, unused_exc_value, unused_exc_tb): """Unmap disk image as a device. Args: unused_exc_type: unused. unused_exc_value: unused. unused_exc_tb: unused. """ SyncFileSystem() time.sleep(2) kpartx_cmd = ['kpartx', '-d', '-v', '-s', self._file_path] RunCommand(kpartx_cmd) class MountFileSystem(object): """Mounts a file system.""" def __init__(self, dev_path, dir_path, fs_type): """Initializes MountFileSystem object. Args: dev_path: A path to a device to mount. dir_path: A path to a directory where a device is to be mounted. """ self._dev_path = dev_path self._dir_path = dir_path self._fs_type = fs_type def __enter__(self): """Mounts a device. """ # Since the bundled image can have the same uuid as the root disk, # we should prevent uuid conflicts for xfs mounts. if self._fs_type is 'xfs': mount_cmd = ['mount', '-o', 'nouuid', self._dev_path, self._dir_path] else: mount_cmd = ['mount', self._dev_path, self._dir_path] RunCommand(mount_cmd) def __exit__(self, unused_exc_type, unused_exc_value, unused_exc_tb): """Unmounts a file system. Args: unused_exc_type: unused. unused_exc_value: unused. unused_exc_tb: unused. """ umount_cmd = ['umount', self._dir_path] RunCommand(umount_cmd) SyncFileSystem() def SyncFileSystem(): RunCommand(['sync']) def GetMounts(root='/'): """Find all mount points under the specified root. Args: root: a path to look for a mount points. Returns: A list of mount points. """ output = RunCommand(['/bin/mount', '-l']) mounts = [] for line in output.splitlines(): split_line = line.split() mount_point = split_line[2] if mount_point == root: continue # We are simply ignoring the fs_type of fs for now. But we can use that # later Just verify that these are actually mount points. if os.path.ismount(mount_point) and mount_point.startswith(root): mounts.append(mount_point) return mounts def MakePartitionTable(file_path): """Create a partition table in a file. Args: file_path: A path to a file where a partition table will be created. """ RunCommand(['parted', file_path, 'mklabel', 'msdos']) def MakePartition(file_path, partition_type, fs_type, start, end): """Create a partition in a file. Args: file_path: A path to a file where a partition will be created. partition_type: A type of a partition to be created. Tested option is msdos. fs_type: A type of a file system to be created. For example, ext2, ext3, etc. start: Start offset of a partition in bytes. end: End offset of a partition in bytes. """ parted_cmd = ['parted', file_path, 'unit B', 'mkpart', partition_type, fs_type, str(start), str(end)] RunCommand(parted_cmd) def MakeFileSystem(dev_path, fs_type, uuid=None): """Create a file system in a device. Args: dev_path: A path to a device. fs_type: A type of a file system to be created. For example ext2, ext3, etc. uuid: The value to use as the UUID for the filesystem. If none, a random UUID will be generated and used. Returns: The uuid of the filesystem. This will be the same as the passed value if a value was specified. If no uuid was passed in, this will be the randomly generated uuid. Raises: MakeFileSystemException: If mkfs encounters an error. """ if uuid is None: uuid = RunCommand(['uuidgen']).strip() if uuid is None: raise MakeFileSystemException(dev_path) mkfs_cmd = ['mkfs', '-t', fs_type, dev_path] RunCommand(mkfs_cmd) if fs_type is 'xfs': set_uuid_cmd = ['xfs_admin', '-U', uuid, dev_path] else: set_uuid_cmd = ['tune2fs', '-U', uuid, dev_path] RunCommand(set_uuid_cmd) return uuid def Rsync(src, dest, exclude_file, ignore_hard_links, recursive, xattrs): """Copy files from specified directory using rsync. Args: src: Source location to copy. dest: Destination to copy files to. exclude_file: A path to a file which contains a list of exclude from copy filters. ignore_hard_links: If True a hard links are copied as a separate files. If False, hard link are recreated in dest. recursive: Specifies if directories are copied recursively or not. xattrs: Specifies if extended attributes are preserved or not. """ rsync_cmd = ['rsync', '--times', '--perms', '--owner', '--group', '--links', '--devices', '--acls', '--sparse'] if not ignore_hard_links: rsync_cmd.append('--hard-links') if recursive: rsync_cmd.append('--recursive') else: rsync_cmd.append('--dirs') if xattrs: rsync_cmd.append('--xattrs') if exclude_file: rsync_cmd.append('--exclude-from=' + exclude_file) rsync_cmd.extend([src, dest]) logging.debug('Calling: %s', repr(rsync_cmd)) if exclude_file: logging.debug('Contents of exclude file %s:', exclude_file) with open(exclude_file, 'rb') as excludes: for line in excludes: logging.debug(' %s', line.rstrip()) RunCommand(rsync_cmd) def GetUUID(partition_path): """Fetches the UUID of the filesystem on the specified partition. Args: partition_path: The path to the partition. Returns: The uuid of the filesystem. """ output = RunCommand(['blkid', partition_path]) for token in output.split(): if token.startswith('UUID='): uuid = token.strip()[len('UUID="'):-1] logging.debug('found uuid = %s', uuid) return uuid def CopyBytes(src, dest, count): """Copies count bytes from the src to dest file. Args: src: The source to read bytes from. dest: The destination to copy bytes to. count: Number of bytes to copy. """ block_size = 4096 block_count = count / block_size dd_command = ['dd', 'if=%s' % src, 'of=%s' % dest, 'conv=notrunc', 'bs=%s' % block_size, 'count=%s' % block_count] RunCommand(dd_command) remaining_bytes = count - block_count * block_size if remaining_bytes: logging.debug('remaining bytes to copy = %s', remaining_bytes) dd_command = ['dd', 'if=%s' % src, 'of=%s' % dest, 'seek=%s' % block_count, 'skip=%s' % block_count, 'conv=notrunc', 'bs=1', 'count=%s' % remaining_bytes] RunCommand(dd_command) def GetPartitionStart(disk_path, partition_number): """Returns the starting position in bytes of the partition. Args: disk_path: The path to disk device. partition_number: The partition number to lookup. 1 based. Returns: The starting position of the first partition in bytes. Raises: subprocess.CalledProcessError: If running parted fails. IndexError: If there is no partition at the given number. """ parted_cmd = ['parted', disk_path, 'unit B', 'print'] # In case the device is not valid and parted throws the retry/cancel prompt # write c to stdin. output = RunCommand(parted_cmd, input_str='c') for line in output.splitlines(): split_line = line.split() if len(split_line) > 4 and split_line[0] == str(partition_number): return int(split_line[1][:-1]) raise IndexError() def RemovePartition(disk_path, partition_number): """Removes the partition number from the disk. Args: disk_path: The disk to remove the partition from. partition_number: The partition number to remove. """ parted_cmd = ['parted', disk_path, 'rm', str(partition_number)] # In case the device is not valid and parted throws the retry/cancel prompt # write c to stdin. RunCommand(parted_cmd, input_str='c') def GetDiskSize(disk_file): """Returns the size of the disk device in bytes. Args: disk_file: The full path to the disk device. Returns: The size of the disk device in bytes. Raises: subprocess.CalledProcessError: If fdisk command fails for the disk file. """ output = RunCommand(['fdisk', '-s', disk_file]) return int(output) * 1024 def RunCommand(command, input_str=None): """Runs the command and returns the output printed on stdout. Args: command: The command to run. input_str: The input to pass to subprocess via stdin. Returns: The stdout from running the command. Raises: subprocess.CalledProcessError: if the command fails. """ logging.debug('running %s with input=%s', command, input_str) p = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) cmd_output = p.communicate(input_str) logging.debug('stdout %s', cmd_output[0]) logging.debug('stderr %s', cmd_output[1]) logging.debug('returncode %s', p.returncode) if p.returncode: logging.warning('Error while running %s return_code = %s\n' 'stdout=%s\nstderr=%s', command, p.returncode, cmd_output[0], cmd_output[1]) raise subprocess.CalledProcessError(p.returncode, cmd=command) return cmd_output[0] def TarAndGzipFile(src_paths, dest): """Pack file in tar archive and optionally gzip it. Args: src_paths: A list of files that will be archived. (Must be in the same directory.) dest: An archive name. If a file ends with .gz or .tgz an archive is gzipped as well. Raises: TarAndGzipFileException: If tar encounters an error. """ if dest.endswith('.gz') or dest.endswith('.tgz'): mode = 'czSf' else: mode = 'cSf' src_names = [os.path.basename(src_path) for src_path in src_paths] # Take the directory of the first file in the list, all files are expected # to be in the same directory. src_dir = os.path.dirname(src_paths[0]) tar_cmd = ['tar', mode, dest, '-C', src_dir] + src_names retcode = subprocess.call(tar_cmd) if retcode: raise TarAndGzipFileException(','.join(src_paths)) class Http(object): def Get(self, request, timeout=None): return urllib2.urlopen(request, timeout=timeout).read() def GetMetadata(self, url_path, recursive=False, timeout=None): """Retrieves instance metadata. Args: url_path: The path of the metadata url after the api version. http://169.254.169.254/computeMetadata/v1/url_path recursive: If set, returns the tree of metadata starting at url_path as a json string. timeout: How long to wait for blocking operations (in seconds). A value of None uses urllib2's default timeout. Returns: The metadata returned based on the url path. """ # Use the latest version of the metadata. suffix = '' if recursive: suffix = '?recursive=true' url = '{0}{1}{2}'.format(METADATA_V1_URL_PREFIX, url_path, suffix) request = urllib2.Request(url) request.add_unredirected_header('Metadata-Flavor', 'Google') return self.Get(request, timeout=timeout) def IsRunningOnGCE(): """Detect if we are running on GCE. Returns: True if we are running on GCE, False otherwise. """ # Try accessing DMI/SMBIOS informations through dmidecode first try: dmidecode_cmd = ['dmidecode', '-s', 'bios-vendor'] output = RunCommand(dmidecode_cmd) return 'Google' in output except subprocess.CalledProcessError: # We fail if dmidecode doesn't exist or we have insufficient privileges pass # If dmidecode is not working, fallback to contacting the metadata server try: Http().GetMetadata('instance/id', timeout=1) return True except urllib2.HTTPError as e: logging.warning('HTTP error: %s (http status code=%s)' % (e.reason, e.code)) except urllib2.URLError as e: logging.warning('Cannot reach metadata server: %s' % e.reason) return False
tweksteen/compute-image-packages
gcimagebundle/gcimagebundlelib/utils.py
Python
apache-2.0
13,831
using System.Collections.Generic; using System.Collections.Specialized; using System.Linq; using DataTests.AdventureWorks.LTS; using Microsoft.VisualStudio.TestTools.UnitTesting; using OpenRiaServices.Silverlight.Testing; using TestDomainServices; namespace OpenRiaServices.DomainServices.Client.Test { public class CatalogEntityContainer : EntityContainer { public CatalogEntityContainer() { CreateEntitySet<Product>(EntitySetOperations.Add|EntitySetOperations.Edit|EntitySetOperations.Remove); CreateEntitySet<PurchaseOrder>(EntitySetOperations.Add | EntitySetOperations.Edit | EntitySetOperations.Remove); CreateEntitySet<PurchaseOrderDetail>(EntitySetOperations.Add | EntitySetOperations.Edit | EntitySetOperations.Remove); CreateEntitySet<Employee>(EntitySetOperations.Add | EntitySetOperations.Edit | EntitySetOperations.Remove); } } [TestClass] public class AssociationTests : UnitTestBase { private PurchaseOrder TestOrder; private List<PurchaseOrderDetail> TestDetails; private int NumNotifications = 0; private int purchaseOrderDetailIDSequence = 1; [TestInitialize] public void TestSetup() { TestOrder = new PurchaseOrder { PurchaseOrderID = 1 }; TestDetails = new List<PurchaseOrderDetail> { new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() }, new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() }, new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() }, new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() }, new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() }, }; } private int GetUniquePurchaseOrderID() { return purchaseOrderDetailIDSequence++; } internal sealed class ScenariosEntityTestContainer : EntityContainer { public ScenariosEntityTestContainer() { this.CreateEntitySet<A>(EntitySetOperations.Add | EntitySetOperations.Edit | EntitySetOperations.Remove); this.CreateEntitySet<C>(EntitySetOperations.Add | EntitySetOperations.Edit | EntitySetOperations.Remove); this.CreateEntitySet<D>(EntitySetOperations.Add | EntitySetOperations.Edit | EntitySetOperations.Remove); } } /// <summary> /// Repro test for Bug #588085, which was a stack overflow caused by infinite /// recursion in the generated EntityRef association code. /// </summary> [TestMethod] public void TestAssociation_OneToOne() { ScenariosEntityTestContainer ec = new ScenariosEntityTestContainer(); // set up association, C pointing to D D d1 = new D { ID = 1 }; C c1 = new C { ID = 1, DID_Ref1 = 1 }; ec.GetEntitySet<D>().Attach(d1); ec.GetEntitySet<C>().Attach(c1); // now point another C at the above D that is already // part of an association C c2 = new C { ID = 2 }; // this line was causing the recursion c2.D_Ref1 = d1; Assert.AreSame(d1, c2.D_Ref1); Assert.AreEqual(d1.ID, c2.DID_Ref1); } [TestMethod] [WorkItem(591588)] public void Bug591588_TestAssociation_OneToOne() { ScenariosEntityTestContainer ec = new ScenariosEntityTestContainer(); // set up 2 associations D d1 = new D { ID = 1 }; C c1 = new C { ID = 1, DID_Ref1 = 1 }; D d2 = new D { ID = 2 }; C c2= new C { ID = 2, DID_Ref1 = 2 }; ec.GetEntitySet<D>().Attach(d1); ec.GetEntitySet<D>().Attach(d2); ec.GetEntitySet<C>().Attach(c1); ec.GetEntitySet<C>().Attach(c2); Assert.AreSame(d1.C, c1); Assert.AreSame(d2.C, c2); ec.GetEntitySet<C>().Remove(c1); d1.C = d2.C; Assert.AreSame(d1.C, c2); Assert.IsNull(d2.C); } [TestMethod] public void TestEntityCollectionCaching() { CatalogEntityContainer container = new CatalogEntityContainer(); PurchaseOrder order = new PurchaseOrder { PurchaseOrderID = 1 }; PurchaseOrderDetail detail1 = new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = 1 }; PurchaseOrderDetail detail2 = new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = 2 }; container.LoadEntities(new Entity[] { order, detail1, detail2 }); Assert.AreEqual(2, order.PurchaseOrderDetails.Count); // now add a couple new details PurchaseOrderDetail detail3 = new PurchaseOrderDetail(); PurchaseOrderDetail detail4 = new PurchaseOrderDetail(); order.PurchaseOrderDetails.Add(detail3); order.PurchaseOrderDetails.Add(detail4); Assert.AreEqual(4, order.PurchaseOrderDetails.Count); // now modify the parent FK, which will cause the cached // results to be reset, but we expect the explicitly added // entities to be retained // Here we're using ApplyState to allow us to set a PK member w/o validation failure // since PK members cannot be changed. This test should really be based on an association // not involving PK, but the test is still valid this way. order.ApplyState(new Dictionary<string, object> { { "PurchaseOrderID", 2 } }); Assert.AreEqual(2, order.PurchaseOrderDetails.Count); Assert.IsTrue(order.PurchaseOrderDetails.Contains(detail3)); Assert.IsTrue(order.PurchaseOrderDetails.Contains(detail4)); } [TestMethod] public void TestEntityRefCaching() { CatalogEntityContainer container = new CatalogEntityContainer(); PurchaseOrderDetail detail = new PurchaseOrderDetail { PurchaseOrderDetailID = 1, PurchaseOrderID = 1 }; PurchaseOrder order = new PurchaseOrder { PurchaseOrderID = 1 }; PurchaseOrder order2 = new PurchaseOrder { PurchaseOrderID = 2 }; container.LoadEntities(new Entity[] { order, order2}); container.LoadEntities(new Entity[] { detail }); // force the EntityRef to cache Assert.AreSame(order, detail.PurchaseOrder); // clear the entity set to verify that the cached // entity is cleared EntitySet purchaseOrderSet = container.GetEntitySet<PurchaseOrder>(); purchaseOrderSet.Clear(); Assert.AreEqual(0, purchaseOrderSet.Count); // after the set has been cleared, we expect null Assert.IsNull(detail.PurchaseOrder); // change the FK and verify that we requery again, getting no match // since all orders have been cleared from the set detail.PurchaseOrderID = 2; Assert.AreSame(null, detail.PurchaseOrder); // Reload the order entities and verify we get the // correct order container.LoadEntities(new Entity[] { order, order2 }); Assert.AreSame(order2, detail.PurchaseOrder); // reset the FK and verify that we requery to get the // right entity detail.PurchaseOrderID = 1; Assert.AreSame(order, detail.PurchaseOrder); } [TestMethod] public void TestEntityCaching_NewEntities() { CatalogEntityContainer container = new CatalogEntityContainer(); // add two orders and a detail PurchaseOrder order1 = new PurchaseOrder(); PurchaseOrder order2 = new PurchaseOrder(); PurchaseOrderDetail detail = new PurchaseOrderDetail(); container.GetEntitySet<PurchaseOrder>().Add(order1); container.GetEntitySet<PurchaseOrder>().Add(order2); container.GetEntitySet<PurchaseOrderDetail>().Add(detail); // examine the order ref of the detail - ensure that // no result is returned, since a FK query would match // BOTH orders Assert.IsNull(detail.PurchaseOrder); // now that we've cached a null, make sure that if more // new entities are added, the ref doesn't change container.GetEntitySet<PurchaseOrder>().Add(new PurchaseOrder()); Assert.IsNull(detail.PurchaseOrder); // now assign order1, and remove order2 - make sure that our // ref to order1 remains detail.PurchaseOrder = order1; Assert.AreSame(order1, detail.PurchaseOrder); container.GetEntitySet<PurchaseOrder>().Remove(order2); Assert.AreSame(order1, detail.PurchaseOrder); container.GetEntitySet<PurchaseOrder>().Remove(order1); Assert.IsNull(detail.PurchaseOrder); } [TestMethod] public void TestEntityRefCaching_Detach() { CatalogEntityContainer container = new CatalogEntityContainer(); PurchaseOrderDetail detail = new PurchaseOrderDetail { PurchaseOrderDetailID = 1, PurchaseOrderID = 1 }; PurchaseOrder order = new PurchaseOrder { PurchaseOrderID = 1 }; container.LoadEntities(new Entity[] { order, detail }); Assert.AreSame(order, detail.PurchaseOrder); // now detach the detail and verify that the // cached entity is still returned container.GetEntitySet<PurchaseOrderDetail>().Detach(detail); Assert.AreSame(order, detail.PurchaseOrder); } [TestMethod] public void EntityRefCaching_MultipartKeys() { TestProvider_Scenarios ctxt = new TestProvider_Scenarios(TestURIs.TestProvider_Scenarios); B b = new B { ID1 = 1, ID2 = 2 }; A a = new A { ID = 1 }; ctxt.EntityContainer.LoadEntities(new Entity[] { a, b }); int propChangeCount = 0; a.PropertyChanged += (s, e) => { if (e.PropertyName == "B") { propChangeCount++; } }; a.B = b; Assert.AreSame(b, a.B); Assert.AreEqual(2, propChangeCount); // if we set the FK member directly, we expect the // cached reference to be reset a.BID2 = 0; Assert.IsNull(a.B); Assert.AreEqual(3, propChangeCount); // if we set all values of the multipart // key back to valid values we expect to // get the valid entity a.BID1 = 1; a.BID2 = 2; Assert.AreSame(b, a.B); Assert.AreEqual(4, propChangeCount); } [TestMethod] public void TestCollectionQuery_DetachedEntity() { CatalogEntityContainer ec = new CatalogEntityContainer(); // with the order not part of any EntityContainer/Set, // its collection returns empty Assert.IsNull(TestOrder.EntitySet); Assert.AreEqual(0, TestOrder.PurchaseOrderDetails.Count()); ((INotifyCollectionChanged)TestOrder.PurchaseOrderDetails).CollectionChanged -= EntityCollectionChanged; } [TestMethod] public void TestCollectionQuery_SubscribeBeforeAttach() { CatalogEntityContainer ec = new CatalogEntityContainer(); NumNotifications = 0; // here we subscribe to the event BEFORE the entity is added // to the container ((INotifyCollectionChanged)TestOrder.PurchaseOrderDetails).CollectionChanged += delegate(object sender, NotifyCollectionChangedEventArgs e) { NumNotifications++; }; ec.LoadEntities(new PurchaseOrder[] { TestOrder }); TestNotifications(ec); } [TestMethod] public void TestCollectionQuery_SubscribeAfterAttach() { CatalogEntityContainer ec = new CatalogEntityContainer(); NumNotifications = 0; ec.LoadEntities(new PurchaseOrder[] { TestOrder }); // here we subscribe to the event AFTER the entity is added // to the container ((INotifyCollectionChanged)TestOrder.PurchaseOrderDetails).CollectionChanged += new NotifyCollectionChangedEventHandler(EntityCollectionChanged); TestNotifications(ec); } private void EntityCollectionChanged(object sender, NotifyCollectionChangedEventArgs e) { NumNotifications++; } [TestMethod] public void TestCollectionQuery_DetachParent() { CatalogEntityContainer ec = new CatalogEntityContainer(); NumNotifications = 0; ec.LoadEntities(new PurchaseOrder[] { TestOrder }); ((INotifyCollectionChanged)TestOrder.PurchaseOrderDetails).CollectionChanged += new NotifyCollectionChangedEventHandler(EntityCollectionChanged); Assert.AreEqual(0, TestOrder.PurchaseOrderDetails.Count); // load a detail and verify we are notified ec.LoadEntities(new PurchaseOrderDetail[] { new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() } }); Assert.AreEqual(1, NumNotifications); // detach the parent entity and verify we no longer receive notifications NumNotifications = 0; TestOrder.EntitySet = null; ec.LoadEntities(new PurchaseOrderDetail[] { new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() } }); Assert.AreEqual(0, NumNotifications); } private void TestNotifications(CatalogEntityContainer ec) { // with only the order in the container // its collection returns empty Assert.IsNotNull(TestOrder.EntitySet); Assert.AreEqual(0, TestOrder.PurchaseOrderDetails.Count()); Assert.AreEqual(0, NumNotifications); // after we load some entities, we expect a change notification ec.LoadEntities(TestDetails); Assert.AreEqual(TestDetails.Count, NumNotifications); Assert.IsTrue(TestDetails.SequenceEqual(TestOrder.PurchaseOrderDetails)); // now add an entity that doesn't match the predicate and // verify we don't get notified NumNotifications = 0; ec.LoadEntities(new PurchaseOrderDetail[] { new PurchaseOrderDetail { PurchaseOrderID = 9, PurchaseOrderDetailID = GetUniquePurchaseOrderID() } }); Assert.AreEqual(0, NumNotifications); // now load one matching and verify we get notified ec.LoadEntities(new PurchaseOrderDetail[] { new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() } }); Assert.AreEqual(1, NumNotifications); // verify we get notified if the set is cleared NumNotifications = 0; EntitySet<PurchaseOrderDetail> entitySet = ec.GetEntitySet<PurchaseOrderDetail>(); entitySet.Clear(); Assert.AreEqual(1, NumNotifications); // verify that we can reuse the set and continue getting notifications NumNotifications = 0; ec.LoadEntities(new PurchaseOrderDetail[] { new PurchaseOrderDetail { PurchaseOrderID = 1, PurchaseOrderDetailID = GetUniquePurchaseOrderID() } }); Assert.AreEqual(1, NumNotifications); } } }
Daniel-Svensson/OpenRiaServices
OpenRiaServices.DomainServices.Client/Test/Silverlight/Data/AssociationTests.cs
C#
apache-2.0
16,594
// <auto-generated> // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // </auto-generated> namespace Microsoft.Azure.Management.Internal.Network.Version2017_10_01.Models { /// <summary> /// Defines values for ApplicationGatewaySslPolicyType. /// </summary> public static class ApplicationGatewaySslPolicyType { public const string Predefined = "Predefined"; public const string Custom = "Custom"; } }
devigned/azure-powershell
src/Common/Commands.Common.Network/Version2017_10_01/Models/ApplicationGatewaySslPolicyType.cs
C#
apache-2.0
704
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0_11) on Sat Aug 18 11:01:03 CEST 2007 --> <META http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"> <TITLE> Uses of Class org.apache.commons.httpclient.auth.AuthPolicy (HttpClient 3.1 API) </TITLE> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="Uses of Class org.apache.commons.httpclient.auth.AuthPolicy (HttpClient 3.1 API)"; } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/commons/httpclient/auth/AuthPolicy.html" title="class in org.apache.commons.httpclient.auth"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/commons/httpclient/auth//class-useAuthPolicy.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="AuthPolicy.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>org.apache.commons.httpclient.auth.AuthPolicy</B></H2> </CENTER> No usage of org.apache.commons.httpclient.auth.AuthPolicy <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/commons/httpclient/auth/AuthPolicy.html" title="class in org.apache.commons.httpclient.auth"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/commons/httpclient/auth//class-useAuthPolicy.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="AuthPolicy.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &copy; 2001-2007 Apache Software Foundation. All Rights Reserved. </BODY> </HTML>
fmassart/commons-httpclient
docs/apidocs/org/apache/commons/httpclient/auth/class-use/AuthPolicy.html
HTML
apache-2.0
6,102
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.application.impl.metadata.spi; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import org.camunda.bpm.application.impl.metadata.ProcessArchiveXmlImpl; import org.camunda.bpm.container.impl.metadata.spi.ProcessEngineXml; import org.camunda.bpm.engine.repository.ResumePreviousBy; /** * <p>Java API representation of the {@link ProcessesXml} Metadata.</p> * * @author Daniel Meyer * */ public interface ProcessesXml { /** * @return A {@link List} of {@link ProcessEngineXml} Metadata Items representing process engine configurations. */ public List<ProcessEngineXml> getProcessEngines(); /** * @return A {@link List} of {@link ProcessArchiveXml} Metadata Items representing process archive deployments. */ public List<ProcessArchiveXml> getProcessArchives(); /** * <p>Constant representing the empty processes.xml</p> */ public final static ProcessesXml EMPTY_PROCESSES_XML = new ProcessesXml() { public List<ProcessEngineXml> getProcessEngines() { return Collections.emptyList(); } public List<ProcessArchiveXml> getProcessArchives() { List<ProcessArchiveXml> processArchives = new ArrayList<ProcessArchiveXml>(); // add single PA ProcessArchiveXmlImpl pa = new ProcessArchiveXmlImpl(); processArchives.add(pa); pa.setProcessResourceNames(Collections.<String>emptyList()); // with default properties HashMap<String, String> properties = new HashMap<String, String>(); pa.setProperties(properties); properties.put(ProcessArchiveXml.PROP_IS_DELETE_UPON_UNDEPLOY, Boolean.FALSE.toString()); properties.put(ProcessArchiveXml.PROP_IS_SCAN_FOR_PROCESS_DEFINITIONS, Boolean.TRUE.toString()); properties.put(ProcessArchiveXml.PROP_IS_DEPLOY_CHANGED_ONLY, Boolean.FALSE.toString()); properties.put(ProcessArchiveXml.PROP_RESUME_PREVIOUS_BY, ResumePreviousBy.RESUME_BY_PROCESS_DEFINITION_KEY); return processArchives; } }; }
camunda/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/application/impl/metadata/spi/ProcessesXml.java
Java
apache-2.0
2,856
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Struct template multiplies_assign</title> <link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../../proto/reference.html#header.boost.proto.traits_hpp" title="Header &lt;boost/proto/traits.hpp&gt;"> <link rel="prev" href="shift_right_assign/impl.html" title="Struct template impl"> <link rel="next" href="multiplies_assign/impl.html" title="Struct template impl"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td> <td align="center"><a href="../../../../index.html">Home</a></td> <td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="shift_right_assign/impl.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../proto/reference.html#header.boost.proto.traits_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="multiplies_assign/impl.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="refentry"> <a name="boost.proto.multiplies_assign"></a><div class="titlepage"></div> <div class="refnamediv"> <h2><span class="refentrytitle">Struct template multiplies_assign</span></h2> <p>boost::proto::multiplies_assign &#8212; A metafunction for generating multiplies-assign expression types, a grammar element for matching multiplies-assign expressions, and a <a class="link" href="../../PrimitiveTransform.html" title="Concept PrimitiveTransform">PrimitiveTransform</a> that dispatches to the <code class="computeroutput"><a class="link" href="pass_through.html" title="Struct template pass_through">proto::pass_through&lt;&gt;</a></code> transform.</p> </div> <h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2> <div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: &lt;<a class="link" href="../../proto/reference.html#header.boost.proto.traits_hpp" title="Header &lt;boost/proto/traits.hpp&gt;">boost/proto/traits.hpp</a>&gt; </span><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> T<span class="special">,</span> <span class="keyword">typename</span> U<span class="special">&gt;</span> <span class="keyword">struct</span> <a class="link" href="multiplies_assign.html" title="Struct template multiplies_assign">multiplies_assign</a> <span class="special">:</span> <span class="keyword"></span> <a class="link" href="transform.html" title="Struct template transform">proto::transform</a><span class="special">&lt;</span> <span class="identifier">multiplies_assign</span><span class="special">&lt;</span><span class="identifier">T</span><span class="special">,</span> <span class="identifier">U</span><span class="special">&gt;</span> <span class="special">&gt;</span> <span class="special">{</span> <span class="comment">// types</span> <span class="keyword">typedef</span> <a class="link" href="expr.html" title="Struct template expr">proto::expr</a><span class="special">&lt;</span> <a class="link" href="tag/multiplies_assign.html" title="Struct multiplies_assign">proto::tag::multiplies_assign</a><span class="special">,</span> <a class="link" href="listN.html" title="Struct template listN">proto::list2</a><span class="special">&lt;</span> <span class="identifier">T</span><span class="special">,</span> <span class="identifier">U</span> <span class="special">&gt;</span> <span class="special">&gt;</span> <a name="boost.proto.multiplies_assign.type"></a><span class="identifier">type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="basic_expr.html" title="Struct template basic_expr">proto::basic_expr</a><span class="special">&lt;</span> <a class="link" href="tag/multiplies_assign.html" title="Struct multiplies_assign">proto::tag::multiplies_assign</a><span class="special">,</span> <a class="link" href="listN.html" title="Struct template listN">proto::list2</a><span class="special">&lt;</span> <span class="identifier">T</span><span class="special">,</span> <span class="identifier">U</span> <span class="special">&gt;</span> <span class="special">&gt;</span> <a name="boost.proto.multiplies_assign.proto_grammar"></a><span class="identifier">proto_grammar</span><span class="special">;</span> <span class="comment">// member classes/structs/unions</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> <a class="link" href="../../Expr.html" title="Concept Expr">Expr</a><span class="special">,</span> <span class="keyword">typename</span> State<span class="special">,</span> <span class="keyword">typename</span> Data<span class="special">&gt;</span> <span class="keyword">struct</span> <a class="link" href="multiplies_assign/impl.html" title="Struct template impl">impl</a> <span class="special">:</span> <span class="keyword"></span> <a class="link" href="pass_through.html" title="Struct template pass_through">proto::pass_through</a>&lt;multiplies_assign&gt;::template impl&lt;Expr, State, Data&gt; <span class="special">{</span> <span class="special">}</span><span class="special">;</span> <span class="special">}</span><span class="special">;</span></pre></div> <div class="refsect1"> <a name="idp384703280"></a><h2>Description</h2> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2008 Eric Niebler<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="shift_right_assign/impl.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../proto/reference.html#header.boost.proto.traits_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="multiplies_assign/impl.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
biospi/seamass-windeps
src/boost_1_57_0/doc/html/boost/proto/multiplies_assign.html
HTML
apache-2.0
7,301
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.ocaml; import com.facebook.buck.cxx.CxxPlatforms; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.CellPathResolver; import com.facebook.buck.rules.CommonDescriptionArg; import com.facebook.buck.rules.Description; import com.facebook.buck.rules.HasDeclaredDeps; import com.facebook.buck.rules.ImplicitDepsInferringDescription; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.coercer.OcamlSource; import com.facebook.buck.rules.macros.StringWithMacros; import com.facebook.buck.util.immutables.BuckStyleImmutable; import com.facebook.buck.versions.VersionPropagator; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import java.util.Optional; import org.immutables.value.Value; public class OcamlLibraryDescription implements Description<OcamlLibraryDescriptionArg>, ImplicitDepsInferringDescription< OcamlLibraryDescription.AbstractOcamlLibraryDescriptionArg>, VersionPropagator<OcamlLibraryDescriptionArg> { private final OcamlBuckConfig ocamlBuckConfig; public OcamlLibraryDescription(OcamlBuckConfig ocamlBuckConfig) { this.ocamlBuckConfig = ocamlBuckConfig; } public OcamlBuckConfig getOcamlBuckConfig() { return ocamlBuckConfig; } @Override public Class<OcamlLibraryDescriptionArg> getConstructorArgType() { return OcamlLibraryDescriptionArg.class; } @Override public BuildRule createBuildRule( TargetGraph targetGraph, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver resolver, CellPathResolver cellRoots, OcamlLibraryDescriptionArg args) throws NoSuchBuildTargetException { ImmutableList<OcamlSource> srcs = args.getSrcs(); ImmutableList.Builder<com.facebook.buck.rules.args.Arg> flags = ImmutableList.builder(); flags.addAll( OcamlDescriptionEnhancer.toStringWithMacrosArgs( buildTarget, cellRoots, resolver, args.getCompilerFlags())); if (ocamlBuckConfig.getWarningsFlags().isPresent() || args.getWarningsFlags().isPresent()) { flags.addAll( StringArg.from( "-w", ocamlBuckConfig.getWarningsFlags().orElse("") + args.getWarningsFlags().orElse(""))); } ImmutableList<String> linkerflags = args.getLinkerFlags(); boolean bytecodeOnly = args.getBytecodeOnly(); boolean nativePlugin = !bytecodeOnly && args.getNativePlugin(); return OcamlRuleBuilder.createBuildRule( ocamlBuckConfig, buildTarget, projectFilesystem, params, resolver, srcs, /*isLibrary*/ true, bytecodeOnly, flags.build(), linkerflags, nativePlugin); } @Override public void findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellPathResolver cellRoots, AbstractOcamlLibraryDescriptionArg constructorArg, ImmutableCollection.Builder<BuildTarget> extraDepsBuilder, ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) { extraDepsBuilder.addAll(CxxPlatforms.getParseTimeDeps(ocamlBuckConfig.getCxxPlatform())); } @BuckStyleImmutable @Value.Immutable interface AbstractOcamlLibraryDescriptionArg extends CommonDescriptionArg, HasDeclaredDeps { ImmutableList<OcamlSource> getSrcs(); ImmutableList<StringWithMacros> getCompilerFlags(); ImmutableList<String> getLinkerFlags(); Optional<String> getWarningsFlags(); @Value.Default default boolean getBytecodeOnly() { return false; } @Value.Default default boolean getNativePlugin() { return false; } } }
marcinkwiatkowski/buck
src/com/facebook/buck/ocaml/OcamlLibraryDescription.java
Java
apache-2.0
4,652
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.util.network; import com.google.common.collect.ImmutableList; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.Multimap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import java.util.Collection; import java.util.Optional; import javax.annotation.concurrent.GuardedBy; /** Fake implementation of {@link ScribeLogger} which records logged lines to memory. */ public final class FakeScribeLogger extends ScribeLogger { // Keep the lines in the order in which they were written. @GuardedBy("this") private final Multimap<String, String> loggedCategoryLines = LinkedListMultimap.create(); @Override public ListenableFuture<Void> log( String category, Iterable<String> lines, Optional<Integer> bucket) { synchronized (this) { loggedCategoryLines.putAll(category, lines); return Futures.immediateFuture(null); } } public ImmutableList<String> getLinesForCategory(String category) { synchronized (this) { // This is guaranteed to be non-null even if no lines were logged to category. Collection<String> lines = loggedCategoryLines.get(category); // Make a defensive copy to ensure new lines aren't logged to the category after we return. return ImmutableList.copyOf(lines); } } @Override public void close() {} }
brettwooldridge/buck
test/com/facebook/buck/util/network/FakeScribeLogger.java
Java
apache-2.0
2,023
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package spyglass import ( "context" "encoding/json" "fmt" "os" "reflect" "sort" "strings" "testing" "k8s.io/apimachinery/pkg/util/sets" coreapi "k8s.io/api/core/v1" "k8s.io/test-infra/prow/gcsupload" "k8s.io/test-infra/prow/pod-utils/downwardapi" "github.com/fsouza/fake-gcs-server/fakestorage" "github.com/sirupsen/logrus" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" tgconf "github.com/GoogleCloudPlatform/testgrid/pb/config" prowapi "k8s.io/test-infra/prow/apis/prowjobs/v1" "k8s.io/test-infra/prow/config" "k8s.io/test-infra/prow/deck/jobs" "k8s.io/test-infra/prow/io" "k8s.io/test-infra/prow/kube" "k8s.io/test-infra/prow/spyglass/api" "k8s.io/test-infra/prow/spyglass/lenses" "k8s.io/test-infra/prow/spyglass/lenses/common" ctrlruntimeclient "sigs.k8s.io/controller-runtime/pkg/client" ) var ( fakeJa *jobs.JobAgent fakeGCSServer *fakestorage.Server ) type fkc []prowapi.ProwJob func (f fkc) List(ctx context.Context, pjs *prowapi.ProwJobList, _ ...ctrlruntimeclient.ListOption) error { pjs.Items = f return nil } type fpkc string func (f fpkc) GetLogs(name, container string) ([]byte, error) { if name == "wowowow" || name == "powowow" { return []byte(fmt.Sprintf("%s.%s", f, container)), nil } return nil, fmt.Errorf("pod not found: %s", name) } type fca struct { c config.Config } func (ca fca) Config() *config.Config { return &ca.c } func TestMain(m *testing.M) { var longLog string for i := 0; i < 300; i++ { longLog += "here a log\nthere a log\neverywhere a log log\n" } fakeGCSServer = fakestorage.NewServer([]fakestorage.Object{ { BucketName: "test-bucket", Name: "logs/example-ci-run/403/build-log.txt", Content: []byte("Oh wow\nlogs\nthis is\ncrazy"), }, { BucketName: "test-bucket", Name: "logs/example-ci-run/403/long-log.txt", Content: []byte(longLog), }, { BucketName: "test-bucket", Name: "logs/example-ci-run/403/junit_01.xml", Content: []byte(`<testsuite tests="1017" failures="1017" time="0.016981535"> <testcase name="BeforeSuite" classname="Kubernetes e2e suite" time="0.006343795"> <failure type="Failure"> test/e2e/e2e.go:137 BeforeSuite on Node 1 failed test/e2e/e2e.go:137 </failure> </testcase> </testsuite>`), }, { BucketName: "test-bucket", Name: "logs/example-ci-run/403/started.json", Content: []byte(`{ "node": "gke-prow-default-pool-3c8994a8-qfhg", "repo-version": "v1.12.0-alpha.0.985+e6f64d0a79243c", "timestamp": 1528742858, "repos": { "k8s.io/kubernetes": "master", "k8s.io/release": "master" }, "version": "v1.12.0-alpha.0.985+e6f64d0a79243c", "metadata": { "pod": "cbc53d8e-6da7-11e8-a4ff-0a580a6c0269" } }`), }, { BucketName: "test-bucket", Name: "logs/example-ci-run/403/finished.json", Content: []byte(`{ "timestamp": 1528742943, "version": "v1.12.0-alpha.0.985+e6f64d0a79243c", "result": "SUCCESS", "passed": true, "job-version": "v1.12.0-alpha.0.985+e6f64d0a79243c", "metadata": { "repo": "k8s.io/kubernetes", "repos": { "k8s.io/kubernetes": "master", "k8s.io/release": "master" }, "infra-commit": "260081852", "pod": "cbc53d8e-6da7-11e8-a4ff-0a580a6c0269", "repo-commit": "e6f64d0a79243c834babda494151fc5d66582240" }, },`), }, { BucketName: "test-bucket", Name: "logs/symlink-party/123.txt", Content: []byte(`gs://test-bucket/logs/the-actual-place/123`), }, { BucketName: "multi-container-one-log", Name: "logs/job/123/test-1-build-log.txt", Content: []byte("this log exists in gcs!"), }, }) defer fakeGCSServer.Stop() kc := fkc{ prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Agent: prowapi.KubernetesAgent, Job: "job", }, Status: prowapi.ProwJobStatus{ PodName: "wowowow", BuildID: "123", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Agent: prowapi.KubernetesAgent, Job: "jib", Cluster: "trusted", }, Status: prowapi.ProwJobStatus{ PodName: "powowow", BuildID: "123", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Agent: prowapi.KubernetesAgent, Job: "example-ci-run", PodSpec: &coreapi.PodSpec{ Containers: []coreapi.Container{ { Image: "tester", }, }, }, }, Status: prowapi.ProwJobStatus{ PodName: "wowowow", BuildID: "404", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Agent: prowapi.KubernetesAgent, Job: "multiple-container-job", PodSpec: &coreapi.PodSpec{ Containers: []coreapi.Container{ { Name: "test-1", }, { Name: "test-2", }, }, }, }, Status: prowapi.ProwJobStatus{ PodName: "wowowow", BuildID: "123", }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config) fakeJa.Start() os.Exit(m.Run()) } type dumpLens struct{} func (dumpLens) Config() lenses.LensConfig { return lenses.LensConfig{ Name: "dump", Title: "Dump View", } } func (dumpLens) Header(artifacts []api.Artifact, resourceDir string, config json.RawMessage, spyglassConfig config.Spyglass) string { return "" } func (dumpLens) Body(artifacts []api.Artifact, resourceDir string, data string, config json.RawMessage, spyglassConfig config.Spyglass) string { var view []byte for _, a := range artifacts { data, err := a.ReadAll() if err != nil { logrus.WithError(err).Error("Error reading artifact") continue } view = append(view, data...) } return string(view) } func (dumpLens) Callback(artifacts []api.Artifact, resourceDir string, data string, config json.RawMessage, spyglassConfig config.Spyglass) string { return "" } func TestViews(t *testing.T) { fakeGCSClient := fakeGCSServer.Client() testCases := []struct { name string registeredViewers []lenses.Lens lenses []int expectedLensTitles []string }{ { name: "Spyglass basic test", registeredViewers: []lenses.Lens{dumpLens{}}, lenses: []int{0}, expectedLensTitles: []string{"Dump View"}, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { for _, l := range tc.registeredViewers { lenses.RegisterLens(l) } c := fca{ c: config.Config{ ProwConfig: config.ProwConfig{ Deck: config.Deck{ Spyglass: config.Spyglass{ Lenses: []config.LensFileConfig{ { Lens: config.LensConfig{ Name: "dump", }, }, }, }, }, }, }, } sg := New(context.Background(), fakeJa, c.Config, io.NewGCSOpener(fakeGCSClient), false) _, ls := sg.Lenses(tc.lenses) for _, l := range ls { var found bool for _, title := range tc.expectedLensTitles { if title == l.Config().Title { found = true } } if !found { t.Errorf("lens title %s not found in expected titles.", l.Config().Title) } } for _, title := range tc.expectedLensTitles { var found bool for _, l := range ls { if title == l.Config().Title { found = true } } if !found { t.Errorf("expected title %s not found in produced lenses.", title) } } }) } } func TestSplitSrc(t *testing.T) { testCases := []struct { name string src string expKeyType string expKey string expError bool }{ { name: "empty string", src: "", expError: true, }, { name: "missing key", src: "gcs", expError: true, }, { name: "prow key", src: "prowjob/example-job-name/123456", expKeyType: "prowjob", expKey: "example-job-name/123456", }, { name: "gcs key", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/", expKeyType: "gcs", expKey: "kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/", }, } for _, tc := range testCases { keyType, key, err := splitSrc(tc.src) if tc.expError && err == nil { t.Errorf("test %q expected error", tc.name) } if !tc.expError && err != nil { t.Errorf("test %q encountered unexpected error: %v", tc.name, err) } if keyType != tc.expKeyType || key != tc.expKey { t.Errorf("test %q: splitting src %q: Expected <%q, %q>, got <%q, %q>", tc.name, tc.src, tc.expKeyType, tc.expKey, keyType, key) } } } func TestJobPath(t *testing.T) { kc := fkc{ prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PeriodicJob, Job: "example-periodic-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1111", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "example-presubmit-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "2222", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "undecorated-job", }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "missing-gcs-job", DecorationConfig: &prowapi.DecorationConfig{}, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1", }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config) fakeJa.Start() testCases := []struct { name string src string expJobPath string expError bool }{ { name: "non-presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/logs/example-job-name/123/", expJobPath: "gs/kubernetes-jenkins/logs/example-job-name", }, { name: "non-presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/logs/example-job-name/123", expJobPath: "gs/kubernetes-jenkins/logs/example-job-name", }, { name: "presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/", expJobPath: "gs/kubernetes-jenkins/pr-logs/directory/example-job-name", }, { name: "presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159", expJobPath: "gs/kubernetes-jenkins/pr-logs/directory/example-job-name", }, { name: "non-presubmit Prow job", src: "prowjob/example-periodic-job/1111", expJobPath: "gs/chum-bucket/logs/example-periodic-job", }, { name: "Prow presubmit job", src: "prowjob/example-presubmit-job/2222", expJobPath: "gs/chum-bucket/pr-logs/directory/example-presubmit-job", }, { name: "nonexistent job", src: "prowjob/example-periodic-job/0000", expError: true, }, { name: "invalid key type", src: "oh/my/glob/drama/bomb", expError: true, }, { name: "invalid GCS path", src: "gcs/kubernetes-jenkins/bad-path", expError: true, }, { name: "job missing decoration", src: "prowjob/undecorated-job/1", expError: true, }, { name: "job missing GCS config", src: "prowjob/missing-gcs-job/1", expError: true, }, } for _, tc := range testCases { fakeGCSClient := fakeGCSServer.Client() fakeOpener := io.NewGCSOpener(fakeGCSClient) fca := config.Agent{} sg := New(context.Background(), fakeJa, fca.Config, fakeOpener, false) jobPath, err := sg.JobPath(tc.src) if tc.expError && err == nil { t.Errorf("test %q: JobPath(%q) expected error", tc.name, tc.src) continue } if !tc.expError && err != nil { t.Errorf("test %q: JobPath(%q) returned unexpected error %v", tc.name, tc.src, err) continue } if jobPath != tc.expJobPath { t.Errorf("test %q: JobPath(%q) expected %q, got %q", tc.name, tc.src, tc.expJobPath, jobPath) } } } func TestProwJobName(t *testing.T) { kc := fkc{ prowapi.ProwJob{ ObjectMeta: metav1.ObjectMeta{Name: "flying-whales-1"}, Spec: prowapi.ProwJobSpec{ Type: prowapi.PeriodicJob, Job: "example-periodic-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1111", }, }, prowapi.ProwJob{ ObjectMeta: metav1.ObjectMeta{Name: "flying-whales-2"}, Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "example-presubmit-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "2222", }, }, prowapi.ProwJob{ ObjectMeta: metav1.ObjectMeta{Name: "flying-whales-3"}, Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "undecorated-job", }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "missing-name-job", DecorationConfig: &prowapi.DecorationConfig{}, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1", }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config) fakeJa.Start() testCases := []struct { name string src string expJobPath string expError bool }{ { name: "non-presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/logs/example-periodic-job/1111/", expJobPath: "flying-whales-1", }, { name: "presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-presubmit-job/2222/", expJobPath: "flying-whales-2", }, { name: "non-presubmit Prow job", src: "prowjob/example-periodic-job/1111", expJobPath: "flying-whales-1", }, { name: "Prow presubmit job", src: "prowjob/example-presubmit-job/2222", expJobPath: "flying-whales-2", }, { name: "nonexistent job", src: "prowjob/example-periodic-job/0000", expJobPath: "", }, { name: "job missing name", src: "prowjob/missing-name-job/1", expJobPath: "", }, { name: "previously invalid key type is now valid but nonexistent", src: "oh/my/glob/drama/bomb", expJobPath: "", }, { name: "invalid GCS path", src: "gcs/kubernetes-jenkins/bad-path", expError: true, }, } for _, tc := range testCases { fakeGCSClient := fakeGCSServer.Client() fakeOpener := io.NewGCSOpener(fakeGCSClient) fca := config.Agent{} sg := New(context.Background(), fakeJa, fca.Config, fakeOpener, false) jobPath, err := sg.ProwJobName(tc.src) if tc.expError && err == nil { t.Errorf("test %q: JobPath(%q) expected error", tc.name, tc.src) continue } if !tc.expError && err != nil { t.Errorf("test %q: JobPath(%q) returned unexpected error %v", tc.name, tc.src, err) continue } if jobPath != tc.expJobPath { t.Errorf("test %q: JobPath(%q) expected %q, got %q", tc.name, tc.src, tc.expJobPath, jobPath) } } } func TestRunPath(t *testing.T) { kc := fkc{ prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PeriodicJob, Job: "example-periodic-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1111", URL: "http://magic/view/gcs/chum-bucket/logs/example-periodic-job/1111", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "example-presubmit-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, Refs: &prowapi.Refs{ Org: "some-org", Repo: "some-repo", Pulls: []prowapi.Pull{ { Number: 42, }, }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "2222", URL: "http://magic/view/gcs/chum-bucket/pr-logs/pull/some-org_some-repo/42/example-presubmit-job/2222", }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config) fakeJa.Start() testCases := []struct { name string src string expRunPath string expError bool }{ { name: "non-presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/logs/example-job-name/123/", expRunPath: "kubernetes-jenkins/logs/example-job-name/123", }, { name: "non-presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/logs/example-job-name/123", expRunPath: "kubernetes-jenkins/logs/example-job-name/123", }, { name: "presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159/", expRunPath: "kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159", }, { name: "presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159", expRunPath: "kubernetes-jenkins/pr-logs/pull/test-infra/0000/example-job-name/314159", }, { name: "non-presubmit Prow job", src: "prowjob/example-periodic-job/1111", expRunPath: "chum-bucket/logs/example-periodic-job/1111", }, { name: "Prow presubmit job with full path", src: "prowjob/example-presubmit-job/2222", expRunPath: "chum-bucket/pr-logs/pull/some-org_some-repo/42/example-presubmit-job/2222", }, { name: "nonexistent job", src: "prowjob/example-periodic-job/0000", expError: true, }, { name: "previously invalid key type is now valid", src: "oh/my/glob/drama/bomb", expRunPath: "my/glob/drama/bomb", }, { name: "nonsense string errors", src: "this is not useful", expError: true, }, } for _, tc := range testCases { fakeGCSClient := fakeGCSServer.Client() fakeOpener := io.NewGCSOpener(fakeGCSClient) fca := config.Agent{} fca.Set(&config.Config{ ProwConfig: config.ProwConfig{ Plank: config.Plank{ JobURLPrefixConfig: map[string]string{"*": "http://magic/view/gcs/"}, }, }, }) sg := New(context.Background(), fakeJa, fca.Config, fakeOpener, false) jobPath, err := sg.RunPath(tc.src) if tc.expError && err == nil { t.Errorf("test %q: RunPath(%q) expected error, got %q", tc.name, tc.src, jobPath) continue } if !tc.expError && err != nil { t.Errorf("test %q: RunPath(%q) returned unexpected error %v", tc.name, tc.src, err) continue } if jobPath != tc.expRunPath { t.Errorf("test %q: RunPath(%q) expected %q, got %q", tc.name, tc.src, tc.expRunPath, jobPath) } } } func TestRunToPR(t *testing.T) { kc := fkc{ prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PeriodicJob, Job: "example-periodic-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "1111", URL: "http://magic/view/gcs/chum-bucket/logs/example-periodic-job/1111", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Type: prowapi.PresubmitJob, Job: "example-presubmit-job", DecorationConfig: &prowapi.DecorationConfig{ GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "chum-bucket", }, }, Refs: &prowapi.Refs{ Org: "some-org", Repo: "some-repo", Pulls: []prowapi.Pull{ { Number: 42, }, }, }, }, Status: prowapi.ProwJobStatus{ PodName: "flying-whales", BuildID: "2222", }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config) fakeJa.Start() testCases := []struct { name string src string expOrg string expRepo string expNumber int expError bool }{ { name: "presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/Katharine_test-infra/1234/example-job-name/314159/", expOrg: "Katharine", expRepo: "test-infra", expNumber: 1234, }, { name: "presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/pr-logs/pull/Katharine_test-infra/1234/example-job-name/314159", expOrg: "Katharine", expRepo: "test-infra", expNumber: 1234, }, { name: "presubmit job in GCS without org name", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/2345/example-job-name/314159", expOrg: "kubernetes", expRepo: "test-infra", expNumber: 2345, }, { name: "presubmit job in GCS without org or repo name", src: "gcs/kubernetes-jenkins/pr-logs/pull/3456/example-job-name/314159", expOrg: "kubernetes", expRepo: "kubernetes", expNumber: 3456, }, { name: "Prow presubmit job", src: "prowjob/example-presubmit-job/2222", expOrg: "some-org", expRepo: "some-repo", expNumber: 42, }, { name: "Prow periodic job errors", src: "prowjob/example-periodic-job/1111", expError: true, }, { name: "GCS periodic job errors", src: "gcs/kuberneretes-jenkins/logs/example-periodic-job/1111", expError: true, }, { name: "GCS job with non-numeric PR number errors", src: "gcs/kubernetes-jenkins/pr-logs/pull/asdf/example-job-name/314159", expError: true, }, { name: "GCS PR job in directory errors", src: "gcs/kubernetes-jenkins/pr-logs/directory/example-job-name/314159", expError: true, }, { name: "Bad GCS key errors", src: "gcs/this is just nonsense", expError: true, }, { name: "Longer bad GCS key errors", src: "gcs/kubernetes-jenkins/pr-logs", expError: true, }, { name: "Nonsense string errors", src: "friendship is magic", expError: true, }, } for _, tc := range testCases { fakeGCSClient := fakeGCSServer.Client() fca := config.Agent{} fca.Set(&config.Config{ ProwConfig: config.ProwConfig{ Plank: config.Plank{ DefaultDecorationConfigs: config.DefaultDecorationMapToSliceTesting( map[string]*prowapi.DecorationConfig{ "*": { GCSConfiguration: &prowapi.GCSConfiguration{ Bucket: "kubernetes-jenkins", DefaultOrg: "kubernetes", DefaultRepo: "kubernetes", PathStrategy: "legacy", }, }, }), }, }, }) sg := New(context.Background(), fakeJa, fca.Config, io.NewGCSOpener(fakeGCSClient), false) org, repo, num, err := sg.RunToPR(tc.src) if tc.expError && err == nil { t.Errorf("test %q: RunToPR(%q) expected error", tc.name, tc.src) continue } if !tc.expError && err != nil { t.Errorf("test %q: RunToPR(%q) returned unexpected error %v", tc.name, tc.src, err) continue } if org != tc.expOrg || repo != tc.expRepo || num != tc.expNumber { t.Errorf("test %q: RunToPR(%q) expected %s/%s#%d, got %s/%s#%d", tc.name, tc.src, tc.expOrg, tc.expRepo, tc.expNumber, org, repo, num) } } } func TestProwToGCS(t *testing.T) { testCases := []struct { name string key string configPrefix string expectedPath string expectError bool }{ { name: "extraction from gubernator-like URL", key: "gubernator-job/1111", configPrefix: "https://gubernator.example.com/build/", expectedPath: "some-bucket/gubernator-job/1111/", expectError: false, }, { name: "extraction from spyglass-like URL", key: "spyglass-job/2222", configPrefix: "https://prow.example.com/view/gcs/", expectedPath: "some-bucket/spyglass-job/2222/", expectError: false, }, { name: "failed extraction from wrong URL", key: "spyglass-job/1111", configPrefix: "https://gubernator.example.com/build/", expectedPath: "", expectError: true, }, { name: "prefix longer than URL", key: "spyglass-job/2222", configPrefix: strings.Repeat("!", 100), expectError: true, }, } for _, tc := range testCases { kc := fkc{ prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Job: "gubernator-job", }, Status: prowapi.ProwJobStatus{ URL: "https://gubernator.example.com/build/some-bucket/gubernator-job/1111/", BuildID: "1111", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Job: "spyglass-job", }, Status: prowapi.ProwJobStatus{ URL: "https://prow.example.com/view/gcs/some-bucket/spyglass-job/2222/", BuildID: "2222", }, }, } fakeGCSClient := fakeGCSServer.Client() fakeConfigAgent := fca{ c: config.Config{ ProwConfig: config.ProwConfig{ Plank: config.Plank{ JobURLPrefixConfig: map[string]string{"*": tc.configPrefix}, }, }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config) fakeJa.Start() sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false) _, p, err := sg.prowToGCS(tc.key) if err != nil && !tc.expectError { t.Errorf("test %q: unexpected error: %v", tc.key, err) continue } if err == nil && tc.expectError { t.Errorf("test %q: expected an error but instead got success and path '%s'", tc.key, p) continue } if p != tc.expectedPath { t.Errorf("test %q: expected '%s' but got '%s'", tc.key, tc.expectedPath, p) } } } func TestGCSPathRoundTrip(t *testing.T) { testCases := []struct { name string pathStrategy string defaultOrg string defaultRepo string org string repo string }{ { name: "simple explicit path", pathStrategy: "explicit", org: "test-org", repo: "test-repo", }, { name: "explicit path with underscores", pathStrategy: "explicit", org: "test-org", repo: "underscore_repo", }, { name: "'single' path with default repo", pathStrategy: "single", defaultOrg: "default-org", defaultRepo: "default-repo", org: "default-org", repo: "default-repo", }, { name: "'single' path with non-default repo", pathStrategy: "single", defaultOrg: "default-org", defaultRepo: "default-repo", org: "default-org", repo: "random-repo", }, { name: "'single' path with non-default org but default repo", pathStrategy: "single", defaultOrg: "default-org", defaultRepo: "default-repo", org: "random-org", repo: "default-repo", }, { name: "'single' path with non-default org and repo", pathStrategy: "single", defaultOrg: "default-org", defaultRepo: "default-repo", org: "random-org", repo: "random-repo", }, { name: "legacy path with default repo", pathStrategy: "legacy", defaultOrg: "default-org", defaultRepo: "default-repo", org: "default-org", repo: "default-repo", }, { name: "legacy path with non-default repo", pathStrategy: "legacy", defaultOrg: "default-org", defaultRepo: "default-repo", org: "default-org", repo: "random-repo", }, { name: "legacy path with non-default org but default repo", pathStrategy: "legacy", defaultOrg: "default-org", defaultRepo: "default-repo", org: "random-org", repo: "default-repo", }, { name: "legacy path with non-default org and repo", pathStrategy: "legacy", defaultOrg: "default-org", defaultRepo: "default-repo", org: "random-org", repo: "random-repo", }, { name: "legacy path with non-default org and repo with underscores", pathStrategy: "legacy", defaultOrg: "default-org", defaultRepo: "default-repo", org: "random-org", repo: "underscore_repo", }, } for _, tc := range testCases { kc := fkc{} fakeConfigAgent := fca{ c: config.Config{ ProwConfig: config.ProwConfig{ Plank: config.Plank{ DefaultDecorationConfigs: config.DefaultDecorationMapToSliceTesting( map[string]*prowapi.DecorationConfig{ "*": { GCSConfiguration: &prowapi.GCSConfiguration{ DefaultOrg: tc.defaultOrg, DefaultRepo: tc.defaultRepo, }, }, }), }, }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config) fakeJa.Start() fakeGCSClient := fakeGCSServer.Client() sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false) gcspath, _, _ := gcsupload.PathsForJob( &prowapi.GCSConfiguration{Bucket: "test-bucket", PathStrategy: tc.pathStrategy}, &downwardapi.JobSpec{ Job: "test-job", BuildID: "1234", Type: prowapi.PresubmitJob, Refs: &prowapi.Refs{ Org: tc.org, Repo: tc.repo, Pulls: []prowapi.Pull{{Number: 42}}, }, }, "") fmt.Println(gcspath) org, repo, prnum, err := sg.RunToPR("gcs/test-bucket/" + gcspath) if err != nil { t.Errorf("unexpected error: %v", err) continue } if org != tc.org || repo != tc.repo || prnum != 42 { t.Errorf("expected %s/%s#42, got %s/%s#%d", tc.org, tc.repo, org, repo, prnum) } } } func TestTestGridLink(t *testing.T) { testCases := []struct { name string src string expQuery string expError bool }{ { name: "non-presubmit job in GCS with trailing /", src: "gcs/kubernetes-jenkins/logs/periodic-job/123/", expQuery: "some-dashboard#periodic", }, { name: "non-presubmit job in GCS without trailing /", src: "gcs/kubernetes-jenkins/logs/periodic-job/123", expQuery: "some-dashboard#periodic", }, { name: "presubmit job in GCS", src: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/0000/presubmit-job/314159/", expQuery: "some-dashboard#presubmit", }, { name: "non-presubmit Prow job", src: "prowjob/periodic-job/1111", expQuery: "some-dashboard#periodic", }, { name: "presubmit Prow job", src: "prowjob/presubmit-job/2222", expQuery: "some-dashboard#presubmit", }, { name: "nonexistent job", src: "prowjob/nonexistent-job/0000", expError: true, }, { name: "invalid key type", src: "oh/my/glob/drama/bomb", expError: true, }, { name: "nonsense string errors", src: "this is not useful", expError: true, }, } kc := fkc{} fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fca{}.Config) fakeJa.Start() tg := TestGrid{c: &tgconf.Configuration{ Dashboards: []*tgconf.Dashboard{ { Name: "some-dashboard", DashboardTab: []*tgconf.DashboardTab{ { Name: "periodic", TestGroupName: "periodic-job", }, { Name: "presubmit", TestGroupName: "presubmit-job", }, { Name: "some-other-job", TestGroupName: "some-other-job", }, }, }, }, }} for _, tc := range testCases { fakeGCSClient := fakeGCSServer.Client() fca := config.Agent{} fca.Set(&config.Config{ ProwConfig: config.ProwConfig{ Deck: config.Deck{ Spyglass: config.Spyglass{ TestGridRoot: "https://testgrid.com/", }, }, }, }) sg := New(context.Background(), fakeJa, fca.Config, io.NewGCSOpener(fakeGCSClient), false) sg.testgrid = &tg link, err := sg.TestGridLink(tc.src) if tc.expError { if err == nil { t.Errorf("test %q: TestGridLink(%q) expected error, got %q", tc.name, tc.src, link) } continue } if err != nil { t.Errorf("test %q: TestGridLink(%q) returned unexpected error %v", tc.name, tc.src, err) continue } if link != "https://testgrid.com/"+tc.expQuery { t.Errorf("test %q: TestGridLink(%q) expected %q, got %q", tc.name, tc.src, "https://testgrid.com/"+tc.expQuery, link) } } } func TestFetchArtifactsPodLog(t *testing.T) { kc := fkc{ prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Agent: prowapi.KubernetesAgent, Job: "job", }, Status: prowapi.ProwJobStatus{ PodName: "wowowow", BuildID: "123", URL: "https://gubernator.example.com/build/job/123", }, }, prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Agent: prowapi.KubernetesAgent, Job: "multi-container-one-log", }, Status: prowapi.ProwJobStatus{ PodName: "wowowow", BuildID: "123", URL: "https://gubernator.example.com/build/multi-container/123", }, }, } fakeConfigAgent := fca{ c: config.Config{ ProwConfig: config.ProwConfig{ Deck: config.Deck{ AllKnownStorageBuckets: sets.NewString("job", "kubernetes-jenkins", "multi-container-one-log"), }, Plank: config.Plank{ JobURLPrefixConfig: map[string]string{"*": "https://gubernator.example.com/build/"}, }, }, }, } fakeJa = jobs.NewJobAgent(context.Background(), kc, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config) fakeJa.Start() fakeGCSClient := fakeGCSServer.Client() sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false) testKeys := []string{ "prowjob/job/123", "gcs/kubernetes-jenkins/logs/job/123/", "gcs/kubernetes-jenkins/logs/job/123", } for _, key := range testKeys { result, err := sg.FetchArtifacts(context.Background(), key, "", 500e6, []string{"build-log.txt"}) if err != nil { t.Errorf("Unexpected error grabbing pod log for %s: %v", key, err) continue } if len(result) != 1 { t.Errorf("Expected 1 artifact for %s, got %d", key, len(result)) continue } content, err := result[0].ReadAll() if err != nil { t.Errorf("Unexpected error reading pod log for %s: %v", key, err) continue } if string(content) != fmt.Sprintf("clusterA.%s", kube.TestContainerName) { t.Errorf("Bad pod log content for %s: %q (expected 'clusterA')", key, content) } } multiContainerOneLogKey := "gcs/multi-container-one-log/logs/job/123" testKeys = append(testKeys, multiContainerOneLogKey) for _, key := range testKeys { containers := []string{"test-1", "test-2"} result, err := sg.FetchArtifacts(context.Background(), key, "", 500e6, []string{fmt.Sprintf("%s-%s", containers[0], singleLogName), fmt.Sprintf("%s-%s", containers[1], singleLogName)}) if err != nil { t.Errorf("Unexpected error grabbing pod log for %s: %v", key, err) continue } for i, art := range result { content, err := art.ReadAll() if err != nil { t.Errorf("Unexpected error reading pod log for %s: %v", key, err) continue } expected := fmt.Sprintf("clusterA.%s", containers[i]) if key == multiContainerOneLogKey && containers[i] == "test-1" { expected = "this log exists in gcs!" } if string(content) != expected { t.Errorf("Bad pod log content for %s: %q (expected '%s')", key, content, expected) } } } } func TestKeyToJob(t *testing.T) { testCases := []struct { name string path string jobName string buildID string expectErr bool }{ { name: "GCS periodic path with trailing slash", path: "gcs/kubernetes-jenkins/logs/periodic-kubernetes-bazel-test-1-14/40/", jobName: "periodic-kubernetes-bazel-test-1-14", buildID: "40", }, { name: "GCS periodic path without trailing slash", path: "gcs/kubernetes-jenkins/logs/periodic-kubernetes-bazel-test-1-14/40", jobName: "periodic-kubernetes-bazel-test-1-14", buildID: "40", }, { name: "GCS PR path with trailing slash", path: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/11573/pull-test-infra-bazel/25366/", jobName: "pull-test-infra-bazel", buildID: "25366", }, { name: "GCS PR path without trailing slash", path: "gcs/kubernetes-jenkins/pr-logs/pull/test-infra/11573/pull-test-infra-bazel/25366", jobName: "pull-test-infra-bazel", buildID: "25366", }, { name: "Prowjob path with trailing slash", path: "prowjob/pull-test-infra-bazel/25366/", jobName: "pull-test-infra-bazel", buildID: "25366", }, { name: "Prowjob path without trailing slash", path: "prowjob/pull-test-infra-bazel/25366", jobName: "pull-test-infra-bazel", buildID: "25366", }, { name: "Path with only one component", path: "nope", expectErr: true, }, } for _, tc := range testCases { jobName, buildID, err := common.KeyToJob(tc.path) if err != nil { if !tc.expectErr { t.Errorf("%s: unexpected error %v", tc.name, err) } continue } if tc.expectErr { t.Errorf("%s: expected an error, but got result %s #%s", tc.name, jobName, buildID) continue } if jobName != tc.jobName { t.Errorf("%s: expected job name %q, but got %q", tc.name, tc.jobName, jobName) continue } if buildID != tc.buildID { t.Errorf("%s: expected build ID %q, but got %q", tc.name, tc.buildID, buildID) } } } func TestResolveSymlink(t *testing.T) { testCases := []struct { name string path string result string expectErr bool }{ { name: "symlink without trailing slash is resolved", path: "gcs/test-bucket/logs/symlink-party/123", result: "gs/test-bucket/logs/the-actual-place/123", }, { name: "symlink with trailing slash is resolved", path: "gcs/test-bucket/logs/symlink-party/123/", result: "gs/test-bucket/logs/the-actual-place/123", }, { name: "non-symlink without trailing slash is unchanged", path: "gcs/test-bucket/better-logs/42", result: "gs/test-bucket/better-logs/42", }, { name: "non-symlink with trailing slash drops the slash", path: "gcs/test-bucket/better-logs/42/", result: "gs/test-bucket/better-logs/42", }, { name: "prowjob without trailing slash is unchanged", path: "prowjob/better-logs/42", result: "prowjob/better-logs/42", }, { name: "prowjob with trailing slash drops the slash", path: "prowjob/better-logs/42/", result: "prowjob/better-logs/42", }, { name: "unknown key type is an error", path: "wtf/what-is-this/send-help", expectErr: true, }, { name: "insufficient path components are an error", path: "gcs/hi", expectErr: true, }, } for _, tc := range testCases { fakeConfigAgent := fca{} fakeJa = jobs.NewJobAgent(context.Background(), fkc{}, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config) fakeJa.Start() fakeGCSClient := fakeGCSServer.Client() sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(fakeGCSClient), false) result, err := sg.ResolveSymlink(tc.path) if err != nil { if !tc.expectErr { t.Errorf("test %q: unexpected error: %v", tc.name, err) } continue } if tc.expectErr { t.Errorf("test %q: expected an error, but got result %q", tc.name, result) continue } if result != tc.result { t.Errorf("test %q: expected %q, but got %q", tc.name, tc.result, result) continue } } } func TestExtraLinks(t *testing.T) { testCases := []struct { name string content string links []ExtraLink expectErr bool }{ { name: "does nothing without error given no started.json", links: nil, }, { name: "errors given a malformed started.json", content: "this isn't json", expectErr: true, }, { name: "does nothing given metadata with no links", content: `{"metadata": {"somethingThatIsntLinks": 23}}`, links: nil, }, { name: "returns well-formed links", content: `{"metadata": {"links": {"ResultStore": {"url": "http://resultstore", "description": "The thing that isn't spyglass"}}}}`, links: []ExtraLink{{Name: "ResultStore", URL: "http://resultstore", Description: "The thing that isn't spyglass"}}, }, { name: "returns links without a description", content: `{"metadata": {"links": {"ResultStore": {"url": "http://resultstore"}}}}`, links: []ExtraLink{{Name: "ResultStore", URL: "http://resultstore"}}, }, { name: "skips links without a URL", content: `{"metadata": {"links": {"No Link": {"description": "bad link"}, "ResultStore": {"url": "http://resultstore"}}}}`, links: []ExtraLink{{Name: "ResultStore", URL: "http://resultstore"}}, }, { name: "skips links without a name", content: `{"metadata": {"links": {"": {"url": "http://resultstore"}}}}`, links: []ExtraLink{}, }, { name: "returns no links when links is empty", content: `{"metadata": {"links": {}}}`, links: []ExtraLink{}, }, { name: "returns multiple links", content: `{"metadata": {"links": {"A": {"url": "http://a", "description": "A!"}, "B": {"url": "http://b"}}}}`, links: []ExtraLink{{Name: "A", URL: "http://a", Description: "A!"}, {Name: "B", URL: "http://b"}}, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { var objects []fakestorage.Object if tc.content != "" { objects = []fakestorage.Object{ { BucketName: "test-bucket", Name: "logs/some-job/42/started.json", Content: []byte(tc.content), }, } } gcsServer := fakestorage.NewServer(objects) defer gcsServer.Stop() gcsClient := gcsServer.Client() fakeConfigAgent := fca{ c: config.Config{ ProwConfig: config.ProwConfig{ Deck: config.Deck{ AllKnownStorageBuckets: sets.NewString("test-bucket"), }, }, }, } fakeJa = jobs.NewJobAgent(context.Background(), fkc{}, false, true, []string{}, map[string]jobs.PodLogClient{kube.DefaultClusterAlias: fpkc("clusterA"), "trusted": fpkc("clusterB")}, fakeConfigAgent.Config) fakeJa.Start() sg := New(context.Background(), fakeJa, fakeConfigAgent.Config, io.NewGCSOpener(gcsClient), false) result, err := sg.ExtraLinks(context.Background(), "gcs/test-bucket/logs/some-job/42") if err != nil { if !tc.expectErr { t.Fatalf("unexpected error: %v", err) } return } sort.Slice(result, func(i, j int) bool { return result[i].Name < result[j].Name }) sort.Slice(tc.links, func(i, j int) bool { return tc.links[i].Name < tc.links[j].Name }) if !reflect.DeepEqual(result, tc.links) { t.Fatalf("Expected links %#v, got %#v", tc.links, result) } }) } }
michelle192837/test-infra
prow/spyglass/spyglass_test.go
GO
apache-2.0
45,162
extern crate futures; use indy::IndyError; use indy::blob_storage; use self::futures::Future; pub fn open_reader(type_: &str, config_json: &str) -> Result<i32, IndyError> { blob_storage::open_reader(type_, config_json).wait() } pub fn open_writer(type_: &str, config_json: &str) -> Result<i32, IndyError> { blob_storage::open_writer(type_, config_json).wait() }
Artemkaaas/indy-sdk
libindy/tests/utils/blob_storage.rs
Rust
apache-2.0
373
# Copyright (c) 2001-2004 Twisted Matrix Laboratories. # See LICENSE for details. # ''' Usage: advogato.py <name> <diary entry file> ''' from twisted.web.xmlrpc import Proxy from twisted.internet import reactor from getpass import getpass import sys class AddDiary: def __init__(self, name, password): self.name = name self.password = password self.proxy = Proxy('http://advogato.org/XMLRPC') def __call__(self, filename): self.data = open(filename).read() d = self.proxy.callRemote('authenticate', self.name, self.password) d.addCallbacks(self.login, self.noLogin) def noLogin(self, reason): print "could not login" reactor.stop() def login(self, cookie): d = self.proxy.callRemote('diary.set', cookie, -1, self.data) d.addCallbacks(self.setDiary, self.errorSetDiary) def setDiary(self, response): reactor.stop() def errorSetDiary(self, error): print "could not set diary", error reactor.stop() diary = AddDiary(sys.argv[1], getpass()) diary(sys.argv[2]) reactor.run()
jxta/cc
vendor/Twisted-10.0.0/doc/web/examples/advogato.py
Python
apache-2.0
1,110
<html> <title>org.continuent.appia.core.events.channel</title> <body> <i>Appia</i> events processed by any channel. <br> This package contains all the events that will be accepted and/or provided by the channel. </body> </html>
archie/appia-byzantine
src/core/net/sf/appia/core/events/channel/package.html
HTML
apache-2.0
249
from openflow.optin_manager.sfa.rspecs.elements.element import Element class PLTag(Element): fields = [ 'tagname', 'value', ]
dana-i2cat/felix
optin_manager/src/python/openflow/optin_manager/sfa/rspecs/elements/pltag.py
Python
apache-2.0
161
package registry import ( "bytes" "crypto/sha256" // this is required for some certificates _ "crypto/sha512" "encoding/hex" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/http/cookiejar" "net/url" "strconv" "strings" "time" "github.com/Sirupsen/logrus" "github.com/docker/docker/pkg/httputils" "github.com/docker/docker/pkg/requestdecorator" "github.com/docker/docker/pkg/tarsum" ) type Session struct { authConfig *AuthConfig reqFactory *requestdecorator.RequestFactory indexEndpoint *Endpoint jar *cookiejar.Jar timeout TimeoutType } func NewSession(authConfig *AuthConfig, factory *requestdecorator.RequestFactory, endpoint *Endpoint, timeout bool) (r *Session, err error) { r = &Session{ authConfig: authConfig, indexEndpoint: endpoint, } if timeout { r.timeout = ReceiveTimeout } r.jar, err = cookiejar.New(nil) if err != nil { return nil, err } // If we're working with a standalone private registry over HTTPS, send Basic Auth headers // alongside our requests. if r.indexEndpoint.VersionString(1) != IndexServerAddress() && r.indexEndpoint.URL.Scheme == "https" { info, err := r.indexEndpoint.Ping() if err != nil { return nil, err } if info.Standalone { logrus.Debugf("Endpoint %s is eligible for private registry. Enabling decorator.", r.indexEndpoint.String()) dec := requestdecorator.NewAuthDecorator(authConfig.Username, authConfig.Password) factory.AddDecorator(dec) } } r.reqFactory = factory return r, nil } func (r *Session) doRequest(req *http.Request) (*http.Response, *http.Client, error) { return doRequest(req, r.jar, r.timeout, r.indexEndpoint.IsSecure) } // Retrieve the history of a given image from the Registry. // Return a list of the parent's json (requested image included) func (r *Session) GetRemoteHistory(imgID, registry string, token []string) ([]string, error) { req, err := r.reqFactory.NewRequest("GET", registry+"images/"+imgID+"/ancestry", nil) if err != nil { return nil, err } setTokenAuth(req, token) res, _, err := r.doRequest(req) if err != nil { return nil, err } defer res.Body.Close() if res.StatusCode != 200 { if res.StatusCode == 401 { return nil, errLoginRequired } return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Server error: %d trying to fetch remote history for %s", res.StatusCode, imgID), res) } jsonString, err := ioutil.ReadAll(res.Body) if err != nil { return nil, fmt.Errorf("Error while reading the http response: %s", err) } logrus.Debugf("Ancestry: %s", jsonString) history := new([]string) if err := json.Unmarshal(jsonString, history); err != nil { return nil, err } return *history, nil } // Check if an image exists in the Registry func (r *Session) LookupRemoteImage(imgID, registry string, token []string) error { req, err := r.reqFactory.NewRequest("GET", registry+"images/"+imgID+"/json", nil) if err != nil { return err } setTokenAuth(req, token) res, _, err := r.doRequest(req) if err != nil { return err } res.Body.Close() if res.StatusCode != 200 { return httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d", res.StatusCode), res) } return nil } // Retrieve an image from the Registry. func (r *Session) GetRemoteImageJSON(imgID, registry string, token []string) ([]byte, int, error) { // Get the JSON req, err := r.reqFactory.NewRequest("GET", registry+"images/"+imgID+"/json", nil) if err != nil { return nil, -1, fmt.Errorf("Failed to download json: %s", err) } setTokenAuth(req, token) res, _, err := r.doRequest(req) if err != nil { return nil, -1, fmt.Errorf("Failed to download json: %s", err) } defer res.Body.Close() if res.StatusCode != 200 { return nil, -1, httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d", res.StatusCode), res) } // if the size header is not present, then set it to '-1' imageSize := -1 if hdr := res.Header.Get("X-Docker-Size"); hdr != "" { imageSize, err = strconv.Atoi(hdr) if err != nil { return nil, -1, err } } jsonString, err := ioutil.ReadAll(res.Body) if err != nil { return nil, -1, fmt.Errorf("Failed to parse downloaded json: %s (%s)", err, jsonString) } return jsonString, imageSize, nil } func (r *Session) GetRemoteImageLayer(imgID, registry string, token []string, imgSize int64) (io.ReadCloser, error) { var ( retries = 5 statusCode = 0 client *http.Client res *http.Response imageURL = fmt.Sprintf("%simages/%s/layer", registry, imgID) ) req, err := r.reqFactory.NewRequest("GET", imageURL, nil) if err != nil { return nil, fmt.Errorf("Error while getting from the server: %s\n", err) } setTokenAuth(req, token) for i := 1; i <= retries; i++ { statusCode = 0 res, client, err = r.doRequest(req) if err != nil { logrus.Debugf("Error contacting registry: %s", err) if res != nil { if res.Body != nil { res.Body.Close() } statusCode = res.StatusCode } if i == retries { return nil, fmt.Errorf("Server error: Status %d while fetching image layer (%s)", statusCode, imgID) } time.Sleep(time.Duration(i) * 5 * time.Second) continue } break } if res.StatusCode != 200 { res.Body.Close() return nil, fmt.Errorf("Server error: Status %d while fetching image layer (%s)", res.StatusCode, imgID) } if res.Header.Get("Accept-Ranges") == "bytes" && imgSize > 0 { logrus.Debugf("server supports resume") return httputils.ResumableRequestReaderWithInitialResponse(client, req, 5, imgSize, res), nil } logrus.Debugf("server doesn't support resume") return res.Body, nil } func (r *Session) GetRemoteTags(registries []string, repository string, token []string) (map[string]string, error) { if strings.Count(repository, "/") == 0 { // This will be removed once the Registry supports auto-resolution on // the "library" namespace repository = "library/" + repository } for _, host := range registries { endpoint := fmt.Sprintf("%srepositories/%s/tags", host, repository) req, err := r.reqFactory.NewRequest("GET", endpoint, nil) if err != nil { return nil, err } setTokenAuth(req, token) res, _, err := r.doRequest(req) if err != nil { return nil, err } logrus.Debugf("Got status code %d from %s", res.StatusCode, endpoint) defer res.Body.Close() if res.StatusCode == 404 { return nil, fmt.Errorf("Repository not found") } if res.StatusCode != 200 { continue } result := make(map[string]string) if err := json.NewDecoder(res.Body).Decode(&result); err != nil { return nil, err } return result, nil } return nil, fmt.Errorf("Could not reach any registry endpoint") } func buildEndpointsList(headers []string, indexEp string) ([]string, error) { var endpoints []string parsedURL, err := url.Parse(indexEp) if err != nil { return nil, err } var urlScheme = parsedURL.Scheme // The Registry's URL scheme has to match the Index' for _, ep := range headers { epList := strings.Split(ep, ",") for _, epListElement := range epList { endpoints = append( endpoints, fmt.Sprintf("%s://%s/v1/", urlScheme, strings.TrimSpace(epListElement))) } } return endpoints, nil } func (r *Session) GetRepositoryData(remote string) (*RepositoryData, error) { repositoryTarget := fmt.Sprintf("%srepositories/%s/images", r.indexEndpoint.VersionString(1), remote) logrus.Debugf("[registry] Calling GET %s", repositoryTarget) req, err := r.reqFactory.NewRequest("GET", repositoryTarget, nil) if err != nil { return nil, err } if r.authConfig != nil && len(r.authConfig.Username) > 0 { req.SetBasicAuth(r.authConfig.Username, r.authConfig.Password) } req.Header.Set("X-Docker-Token", "true") res, _, err := r.doRequest(req) if err != nil { return nil, err } defer res.Body.Close() if res.StatusCode == 401 { return nil, errLoginRequired } // TODO: Right now we're ignoring checksums in the response body. // In the future, we need to use them to check image validity. if res.StatusCode == 404 { return nil, httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code: %d", res.StatusCode), res) } else if res.StatusCode != 200 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { logrus.Debugf("Error reading response body: %s", err) } return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to pull repository %s: %q", res.StatusCode, remote, errBody), res) } var tokens []string if res.Header.Get("X-Docker-Token") != "" { tokens = res.Header["X-Docker-Token"] } var endpoints []string if res.Header.Get("X-Docker-Endpoints") != "" { endpoints, err = buildEndpointsList(res.Header["X-Docker-Endpoints"], r.indexEndpoint.VersionString(1)) if err != nil { return nil, err } } else { // Assume the endpoint is on the same host endpoints = append(endpoints, fmt.Sprintf("%s://%s/v1/", r.indexEndpoint.URL.Scheme, req.URL.Host)) } remoteChecksums := []*ImgData{} if err := json.NewDecoder(res.Body).Decode(&remoteChecksums); err != nil { return nil, err } // Forge a better object from the retrieved data imgsData := make(map[string]*ImgData) for _, elem := range remoteChecksums { imgsData[elem.ID] = elem } return &RepositoryData{ ImgList: imgsData, Endpoints: endpoints, Tokens: tokens, }, nil } func (r *Session) PushImageChecksumRegistry(imgData *ImgData, registry string, token []string) error { logrus.Debugf("[registry] Calling PUT %s", registry+"images/"+imgData.ID+"/checksum") req, err := r.reqFactory.NewRequest("PUT", registry+"images/"+imgData.ID+"/checksum", nil) if err != nil { return err } setTokenAuth(req, token) req.Header.Set("X-Docker-Checksum", imgData.Checksum) req.Header.Set("X-Docker-Checksum-Payload", imgData.ChecksumPayload) res, _, err := r.doRequest(req) if err != nil { return fmt.Errorf("Failed to upload metadata: %s", err) } defer res.Body.Close() if len(res.Cookies()) > 0 { r.jar.SetCookies(req.URL, res.Cookies()) } if res.StatusCode != 200 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { return fmt.Errorf("HTTP code %d while uploading metadata and error when trying to parse response body: %s", res.StatusCode, err) } var jsonBody map[string]string if err := json.Unmarshal(errBody, &jsonBody); err != nil { errBody = []byte(err.Error()) } else if jsonBody["error"] == "Image already exists" { return ErrAlreadyExists } return fmt.Errorf("HTTP code %d while uploading metadata: %q", res.StatusCode, errBody) } return nil } // Push a local image to the registry func (r *Session) PushImageJSONRegistry(imgData *ImgData, jsonRaw []byte, registry string, token []string) error { logrus.Debugf("[registry] Calling PUT %s", registry+"images/"+imgData.ID+"/json") req, err := r.reqFactory.NewRequest("PUT", registry+"images/"+imgData.ID+"/json", bytes.NewReader(jsonRaw)) if err != nil { return err } req.Header.Add("Content-type", "application/json") setTokenAuth(req, token) res, _, err := r.doRequest(req) if err != nil { return fmt.Errorf("Failed to upload metadata: %s", err) } defer res.Body.Close() if res.StatusCode == 401 && strings.HasPrefix(registry, "http://") { return httputils.NewHTTPRequestError("HTTP code 401, Docker will not send auth headers over HTTP.", res) } if res.StatusCode != 200 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { return httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d while uploading metadata and error when trying to parse response body: %s", res.StatusCode, err), res) } var jsonBody map[string]string if err := json.Unmarshal(errBody, &jsonBody); err != nil { errBody = []byte(err.Error()) } else if jsonBody["error"] == "Image already exists" { return ErrAlreadyExists } return httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d while uploading metadata: %q", res.StatusCode, errBody), res) } return nil } func (r *Session) PushImageLayerRegistry(imgID string, layer io.Reader, registry string, token []string, jsonRaw []byte) (checksum string, checksumPayload string, err error) { logrus.Debugf("[registry] Calling PUT %s", registry+"images/"+imgID+"/layer") tarsumLayer, err := tarsum.NewTarSum(layer, false, tarsum.Version0) if err != nil { return "", "", err } h := sha256.New() h.Write(jsonRaw) h.Write([]byte{'\n'}) checksumLayer := io.TeeReader(tarsumLayer, h) req, err := r.reqFactory.NewRequest("PUT", registry+"images/"+imgID+"/layer", checksumLayer) if err != nil { return "", "", err } req.Header.Add("Content-Type", "application/octet-stream") req.ContentLength = -1 req.TransferEncoding = []string{"chunked"} setTokenAuth(req, token) res, _, err := r.doRequest(req) if err != nil { return "", "", fmt.Errorf("Failed to upload layer: %s", err) } if rc, ok := layer.(io.Closer); ok { if err := rc.Close(); err != nil { return "", "", err } } defer res.Body.Close() if res.StatusCode != 200 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { return "", "", httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d while uploading metadata and error when trying to parse response body: %s", res.StatusCode, err), res) } return "", "", httputils.NewHTTPRequestError(fmt.Sprintf("Received HTTP code %d while uploading layer: %q", res.StatusCode, errBody), res) } checksumPayload = "sha256:" + hex.EncodeToString(h.Sum(nil)) return tarsumLayer.Sum(jsonRaw), checksumPayload, nil } // push a tag on the registry. // Remote has the format '<user>/<repo> func (r *Session) PushRegistryTag(remote, revision, tag, registry string, token []string) error { // "jsonify" the string revision = "\"" + revision + "\"" path := fmt.Sprintf("repositories/%s/tags/%s", remote, tag) req, err := r.reqFactory.NewRequest("PUT", registry+path, strings.NewReader(revision)) if err != nil { return err } req.Header.Add("Content-type", "application/json") setTokenAuth(req, token) req.ContentLength = int64(len(revision)) res, _, err := r.doRequest(req) if err != nil { return err } res.Body.Close() if res.StatusCode != 200 && res.StatusCode != 201 { return httputils.NewHTTPRequestError(fmt.Sprintf("Internal server error: %d trying to push tag %s on %s", res.StatusCode, tag, remote), res) } return nil } func (r *Session) PushImageJSONIndex(remote string, imgList []*ImgData, validate bool, regs []string) (*RepositoryData, error) { cleanImgList := []*ImgData{} if validate { for _, elem := range imgList { if elem.Checksum != "" { cleanImgList = append(cleanImgList, elem) } } } else { cleanImgList = imgList } imgListJSON, err := json.Marshal(cleanImgList) if err != nil { return nil, err } var suffix string if validate { suffix = "images" } u := fmt.Sprintf("%srepositories/%s/%s", r.indexEndpoint.VersionString(1), remote, suffix) logrus.Debugf("[registry] PUT %s", u) logrus.Debugf("Image list pushed to index:\n%s", imgListJSON) headers := map[string][]string{ "Content-type": {"application/json"}, "X-Docker-Token": {"true"}, } if validate { headers["X-Docker-Endpoints"] = regs } // Redirect if necessary var res *http.Response for { if res, err = r.putImageRequest(u, headers, imgListJSON); err != nil { return nil, err } if !shouldRedirect(res) { break } res.Body.Close() u = res.Header.Get("Location") logrus.Debugf("Redirected to %s", u) } defer res.Body.Close() if res.StatusCode == 401 { return nil, errLoginRequired } var tokens, endpoints []string if !validate { if res.StatusCode != 200 && res.StatusCode != 201 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { logrus.Debugf("Error reading response body: %s", err) } return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to push repository %s: %q", res.StatusCode, remote, errBody), res) } if res.Header.Get("X-Docker-Token") == "" { return nil, fmt.Errorf("Index response didn't contain an access token") } tokens = res.Header["X-Docker-Token"] logrus.Debugf("Auth token: %v", tokens) if res.Header.Get("X-Docker-Endpoints") == "" { return nil, fmt.Errorf("Index response didn't contain any endpoints") } endpoints, err = buildEndpointsList(res.Header["X-Docker-Endpoints"], r.indexEndpoint.VersionString(1)) if err != nil { return nil, err } } if validate { if res.StatusCode != 204 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { logrus.Debugf("Error reading response body: %s", err) } return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to push checksums %s: %q", res.StatusCode, remote, errBody), res) } } return &RepositoryData{ Tokens: tokens, Endpoints: endpoints, }, nil } func (r *Session) putImageRequest(u string, headers map[string][]string, body []byte) (*http.Response, error) { req, err := r.reqFactory.NewRequest("PUT", u, bytes.NewReader(body)) if err != nil { return nil, err } req.SetBasicAuth(r.authConfig.Username, r.authConfig.Password) req.ContentLength = int64(len(body)) for k, v := range headers { req.Header[k] = v } response, _, err := r.doRequest(req) if err != nil { return nil, err } return response, nil } func shouldRedirect(response *http.Response) bool { return response.StatusCode >= 300 && response.StatusCode < 400 } func (r *Session) SearchRepositories(term string) (*SearchResults, error) { logrus.Debugf("Index server: %s", r.indexEndpoint) u := r.indexEndpoint.VersionString(1) + "search?q=" + url.QueryEscape(term) req, err := r.reqFactory.NewRequest("GET", u, nil) if err != nil { return nil, err } if r.authConfig != nil && len(r.authConfig.Username) > 0 { req.SetBasicAuth(r.authConfig.Username, r.authConfig.Password) } req.Header.Set("X-Docker-Token", "true") res, _, err := r.doRequest(req) if err != nil { return nil, err } defer res.Body.Close() if res.StatusCode != 200 { return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Unexpected status code %d", res.StatusCode), res) } result := new(SearchResults) err = json.NewDecoder(res.Body).Decode(result) return result, err } func (r *Session) GetAuthConfig(withPasswd bool) *AuthConfig { password := "" if withPasswd { password = r.authConfig.Password } return &AuthConfig{ Username: r.authConfig.Username, Password: password, Email: r.authConfig.Email, } } func setTokenAuth(req *http.Request, token []string) { if req.Header.Get("Authorization") == "" { // Don't override req.Header.Set("Authorization", "Token "+strings.Join(token, ",")) } }
camallen/docker
registry/session.go
GO
apache-2.0
18,640
#!/usr/bin/python # # Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example creates a campaign in a given advertiser. To create an advertiser, run create_advertiser.py. The LoadFromStorage method is pulling credentials and properties from a "googleads.yaml" file. By default, it looks for this file in your home directory. For more information, see the "Caching authentication information" section of our README. """ # Import appropriate modules from the client library. from googleads import dfa ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE' CAMPAIGN_NAME = 'INSERT_CAMPAIGN_NAME_HERE' URL = 'INSERT_LANDING_PAGE_URL_HERE' LANDING_PAGE_NAME = 'INSERT_LANDING_PAGE_NAME_HERE' START_DATE = '%(year)s-%(month)02d-%(day)02dT12:00:00' % { 'year': 'INSERT_START_YEAR_HERE', 'month': int('INSERT_START_MONTH_HERE'), 'day': int('INSERT_START_DAY_HERE')} END_DATE = '%(year)s-%(month)02d-%(day)02dT12:00:00' % { 'year': 'INSERT_END_YEAR_HERE', 'month': int('INSERT_END_MONTH_HERE'), 'day': int('INSERT_END_DAY_HERE')} def main(client, advertiser_id, campaign_name, url, landing_page_name, start_date, end_date): # Initialize appropriate service. campaign_service = client.GetService( 'campaign', 'v1.20', 'https://advertisersapitest.doubleclick.net') # Create a default landing page for the campaign and save it. default_landing_page = { 'url': url, 'name': landing_page_name } default_landing_page_id = campaign_service.saveLandingPage( default_landing_page)['id'] # Construct and save the campaign. campaign = { 'name': campaign_name, 'advertiserId': advertiser_id, 'defaultLandingPageId': default_landing_page_id, 'archived': 'false', 'startDate': start_date, 'endDate': end_date } result = campaign_service.saveCampaign(campaign) # Display results. print 'Campaign with ID \'%s\' was created.' % result['id'] if __name__ == '__main__': # Initialize client object. dfa_client = dfa.DfaClient.LoadFromStorage() main(dfa_client, ADVERTISER_ID, CAMPAIGN_NAME, URL, LANDING_PAGE_NAME, START_DATE, END_DATE)
wubr2000/googleads-python-lib
examples/dfa/v1_20/create_campaign.py
Python
apache-2.0
2,699
package org.collectionspace.services.test; //import org.collectionspace.services.intake.Intake; //import org.collectionspace.services.intake.IntakeList; /** * A IntakeServiceTest. * * @version $Revision:$ */ public class DimensionServiceTest { //empty }
cherryhill/collectionspace-services
services/dimension/service/src/test/java/org/collectionspace/services/test/DimensionServiceTest.java
Java
apache-2.0
262
Neuro Evolution of Augmenting Topologies ======================================== A slightly tidier version of Kenneth Stanley's NEAT original source code. *** Copyright 2010 The University of Texas at Austin ------------------------------------------------ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License in this [site][2]. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. NEAT source code distribution version 1.2, 7/23/10 ---------------------------------------------------- The NEAT software is based on the real-time NeuroEvolution of Augmenting Topologies method of evolving artificial neural networks, by Stanley and Miikkulainen (2002). Source code included in this release is originally written by Kenneth Stanley (kstanley@cs.ucf.edu). This is the official distribution [site][1]. The core NEAT code is updated significantly from earlier releases. Traits can now be made functional, and adaptive networks can be evolved (though the default is non-adaptive). Included Files ------------------------------------- This is a list of files included in the distribution, and also ones that are created when it is run or made using the Makefile. ``` Makefile : Makes neat on linux CMakeLists.txt : cmake file for cross-platform make (use http://www.cmake.org/) README.md : This file LICENSE : The Apache License Version 2.0 which describes the terms for the release of this package experiments.cpp : Sample experiments code experiments.h gen_* : A printout of a generation- produced by generational NEAT gene.cpp : Gene class definitions gene.h genome.cpp : Genome class definitions genome.h innovation.cpp : Innovation class definitions innovation.h link.cpp : Link class definitions link.h neat.cpp : Main NEAT class neat.h neatmain.cpp : Location of main, entry to the executable neatmain.h network.cpp : Network class defintions network.h nnode.cpp : NNode class definitions nnode.h organism.cpp : Organism class definitions organism.h p2mpar3bare.ne : Sample parameter file p2nv.ne : Parameter file for non-markov double pole balancing p2test.ne : Sample parameter file params256.ne : Sample parameter file that was used in some major experiments with pop size 256 pole1startgenes : Starter genes for signle pole balancing pole2_markov.ne : Parameter file for markovian double pole balancing pole2startgenes : Start genes for double pole balancing pole2startgenes1 : Start genes for markovian double pole balancing pole2startgenes2 : Start genes for non-markovian double pole balancing population.cpp : Population class definitions population.h neat : Main executable (must execute "make" to produce this file) species.cpp : Species class definitions species.h statout : Stat file output after some experiments test.ne : Sample parameter file trait.cpp : Trait class definitions trait.h xorstartgenes : Start genes for XOR experiment ``` Installation ------------ To compile the NEAT code just run `$ make`, and this wil create the `./build` folder containing all the compiling objects and the `neat` executable. There are also two other commands: * `$ make clean`: which deletes the "./build" folder * `$ make purge`: which deletes both the "./build" folder and the "neat" executable Included Experiments -------------------- After running "make" to create the "neat" executable, NEAT can be run from the command line as follows: `$ ./neat paramfilename.ne` "paramfilename.ne" must be included so that NEAT knows what evolution parameters you want to use. You can use one of the supplied parameter files (they all end in the .ne extension), or create your own. The pole2_markov.ne parameter file was designed for markovian pole balancing of any type (single pole, double pole, real-time). The p2nv.ne parameter file was designed for double pole balancing without veolicity information. However, pole2_markov.ne also works with this experiment, and with XOR. When you run NEAT from the command line, you are given the option of 5 experiments: Please choose an experiment: 1. - 1-pole balancing 2. - 2-pole balancing, velocity info provided 3. - 2-pole balancing, no velocity info provided (non-markov) 4. - XOR Number: At the "Number:" prompt, you can enter your choice. The correct starter genome will be loaded in automatically and evolution will commence. Most experiments output generational population files ("gen_#") at regular intervals based on the "print_every" parameter in the supplied .ne file. For example, if print_every is 5, then gen_5, gen_10, gen_15, etc., will be written to the directory containing NEAT. Conclusion ---------- We hope that this software will be a useful starting point for your own explorations in NEAT. The software is provided as is, however, we will do our best to maintain it and accommodate suggestions. If you want to be notified of future releases of the software or have questions, comments, bug reports or suggestions, send email to kstanley@cs.ucf.edu. [1]:http://nn.cs.utexas.edu/keyword?neat-c [2]:http://www.apache.org/licenses/LICENSE-2.0
slowriot/NEAT
README.md
Markdown
apache-2.0
5,582
// Copyright 2016 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. package symbolizer import ( "bufio" "bytes" "os/exec" "strconv" ) type Symbol struct { Addr uint64 Size int } // ReadSymbols returns list of text symbols in the binary bin. func ReadSymbols(bin string) (map[string][]Symbol, error) { cmd := exec.Command("nm", "-nS", bin) stdout, err := cmd.StdoutPipe() if err != nil { return nil, err } defer stdout.Close() if err := cmd.Start(); err != nil { return nil, err } defer cmd.Wait() symbols := make(map[string][]Symbol) s := bufio.NewScanner(stdout) text := [][]byte{[]byte(" t "), []byte(" T ")} for s.Scan() { // A line looks as: "ffffffff8104db90 0000000000000059 t snb_uncore_msr_enable_box" ln := s.Bytes() if bytes.Index(ln, text[0]) == -1 && bytes.Index(ln, text[1]) == -1 { continue } sp1 := bytes.IndexByte(ln, ' ') if sp1 == -1 { continue } sp2 := bytes.IndexByte(ln[sp1+1:], ' ') if sp2 == -1 { continue } sp2 += sp1 + 1 if !bytes.HasPrefix(ln[sp2:], text[0]) && !bytes.HasPrefix(ln[sp2:], text[1]) { continue } addr, err := strconv.ParseUint(string(ln[:sp1]), 16, 64) if err != nil { continue } size, err := strconv.ParseUint(string(ln[sp1+1:sp2]), 16, 64) if err != nil { continue } name := string(ln[sp2+len(text[0]):]) // Note: sizes reported by kernel do not match nm. // Kernel probably subtracts address of this symbol from address of the next symbol. // We could do the same, but for now we just round up size to 16. symbols[name] = append(symbols[name], Symbol{addr, int(size+15) / 16 * 16}) } if err := s.Err(); err != nil { return nil, err } return symbols, nil }
sploving/syzkaller
symbolizer/nm.go
GO
apache-2.0
1,794
/* * Copyright 2013, 2014 Deutsche Nationalbibliothek * * Licensed under the Apache License, Version 2.0 the "License"; * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.culturegraph.mf.stream.source; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.culturegraph.mf.exceptions.MetafactureException; import org.culturegraph.mf.framework.DefaultObjectPipe; import org.culturegraph.mf.framework.ObjectReceiver; import org.culturegraph.mf.framework.annotations.Description; import org.culturegraph.mf.framework.annotations.In; import org.culturegraph.mf.framework.annotations.Out; /** * Opens a bzip2 file and passes a reader for it to the receiver. * * @deprecated Use FileOpener instead and set compression to AUTO or bzip2 * * @author Christoph Böhme * */ @Description("Opens a bzip2 file.") @In(String.class) @Out(java.io.Reader.class) @Deprecated public final class Bzip2Opener extends DefaultObjectPipe<String, ObjectReceiver<Reader>> implements Opener { private static final int DEFAULT_BUFFER_SIZE = 16 * 1024 * 1024; private int bufferSize = DEFAULT_BUFFER_SIZE; private String encoding = "UTF-8"; /** * Returns the encoding used to open the resource. * * @return current default setting */ public String getEncoding() { return encoding; } /** * Sets the encoding used to open the resource. * * @param encoding * new encoding */ public void setEncoding(final String encoding) { this.encoding = encoding; } /** * @param bufferSize * in MB */ public void setBufferSize(final int bufferSize) { this.bufferSize = bufferSize * 1024 * 1024; } @Override public void process(final String file) { try { getReceiver().process( new BufferedReader(new InputStreamReader(new BZip2CompressorInputStream(new FileInputStream(file)), encoding), bufferSize)); } catch (FileNotFoundException e) { throw new MetafactureException(e); } catch (IOException e) { throw new MetafactureException(e); } } }
philboeselager/metafacture-core
src/main/java/org/culturegraph/mf/stream/source/Bzip2Opener.java
Java
apache-2.0
2,699
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Threading; using System.Threading.Tasks; using System.Windows; using System.Windows.Threading; namespace Microsoft.VisualStudioTools.Project { /// <summary> /// Interaction logic for WaitForCompleteAnalysisDialog.xaml /// </summary> internal partial class TaskProgressBar : DialogWindowVersioningWorkaround { private readonly Task _task; private readonly DispatcherTimer _timer; private readonly CancellationTokenSource _cancelSource; public TaskProgressBar(Task task, CancellationTokenSource cancelSource, string message) { this._task = task; InitializeComponent(); this._waitLabel.Text = message; this._timer = new DispatcherTimer(); this._timer.Interval = new TimeSpan(0, 0, 1); this._timer.Start(); this._timer.Tick += this.TimerTick; this._cancelSource = cancelSource; } private void TimerTick(object sender, EventArgs e) { this._progress.Value = (this._progress.Value + 1) % 100; } protected override void OnInitialized(System.EventArgs e) { // when the task completes we post back onto our UI thread to close the dialog box. // Capture the UI scheduler, and setup a continuation to do the close. var curScheduler = TaskScheduler.FromCurrentSynchronizationContext(); this._task.ContinueWith(new CloseDialog(curScheduler, this).Close); base.OnInitialized(e); } private class CloseDialog { private readonly TaskScheduler _ui; private readonly TaskProgressBar _progressBar; public CloseDialog(TaskScheduler uiScheduler, TaskProgressBar progressBar) { this._ui = uiScheduler; this._progressBar = progressBar; } public void Close(Task task) { var newTask = new Task(this.CloseWorker); newTask.Start(this._ui); newTask.Wait(); } private void CloseWorker() { this._progressBar.DialogResult = true; this._progressBar.Close(); } } private void _cancelButton_Click(object sender, RoutedEventArgs e) { this._cancelSource.Cancel(); this.DialogResult = false; this.Close(); } } }
lukedgr/nodejstools
Nodejs/Product/Nodejs/SharedProject/TaskProgressBar.xaml.cs
C#
apache-2.0
2,688
/** * plugin.js * * Copyright, Moxiecode Systems AB * Released under LGPL License. * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /*global tinymce:true */ tinymce.PluginManager.add('image', function(editor) { function getImageSize(url, callback) { var img = document.createElement('img'); function done(width, height) { if (img.parentNode) { img.parentNode.removeChild(img); } callback({width: width, height: height}); } img.onload = function() { done(img.clientWidth, img.clientHeight); }; img.onerror = function() { done(); }; var style = img.style; style.visibility = 'hidden'; style.position = 'fixed'; style.bottom = style.left = 0; style.width = style.height = 'auto'; document.body.appendChild(img); img.src = url; } function buildListItems(inputList, itemCallback, startItems) { function appendItems(values, output) { output = output || []; tinymce.each(values, function(item) { var menuItem = {text: item.text || item.title}; if (item.menu) { menuItem.menu = appendItems(item.menu); } else { menuItem.value = item.value; itemCallback(menuItem); } output.push(menuItem); }); return output; } return appendItems(inputList, startItems || []); } function createImageList(callback) { return function() { var imageList = editor.settings.image_list; if (typeof(imageList) == "string") { tinymce.util.XHR.send({ url: imageList, success: function(text) { callback(tinymce.util.JSON.parse(text)); } }); } else if (typeof(imageList) == "function") { imageList(callback); } else { callback(imageList); } }; } function showDialog(imageList) { var win, data = {}, dom = editor.dom, imgElm = editor.selection.getNode(); var width, height, imageListCtrl, classListCtrl, imageDimensions = editor.settings.image_dimensions !== false; function recalcSize() { var widthCtrl, heightCtrl, newWidth, newHeight; widthCtrl = win.find('#width')[0]; heightCtrl = win.find('#height')[0]; if (!widthCtrl || !heightCtrl) { return; } newWidth = widthCtrl.value(); newHeight = heightCtrl.value(); if (win.find('#constrain')[0].checked() && width && height && newWidth && newHeight) { if (width != newWidth) { newHeight = Math.round((newWidth / width) * newHeight); heightCtrl.value(newHeight); } else { newWidth = Math.round((newHeight / height) * newWidth); widthCtrl.value(newWidth); } } width = newWidth; height = newHeight; } function onSubmitForm() { function waitLoad(imgElm) { function selectImage() { imgElm.onload = imgElm.onerror = null; if (editor.selection) { editor.selection.select(imgElm); editor.nodeChanged(); } } imgElm.onload = function() { if (!data.width && !data.height && imageDimensions) { dom.setAttribs(imgElm, { width: imgElm.clientWidth, height: imgElm.clientHeight }); } selectImage(); }; imgElm.onerror = selectImage; } updateStyle(); recalcSize(); data = tinymce.extend(data, win.toJSON()); if (!data.alt) { data.alt = ''; } if (data.width === '') { data.width = null; } if (data.height === '') { data.height = null; } if (!data.style) { data.style = null; } // Setup new data excluding style properties data = { src: data.src, alt: data.alt, width: data.width, height: data.height, style: data.style, "class": data["class"] }; editor.undoManager.transact(function() { if (!data.src) { if (imgElm) { dom.remove(imgElm); editor.focus(); editor.nodeChanged(); } return; } if (!imgElm) { data.id = '__mcenew'; editor.focus(); editor.selection.setContent(dom.createHTML('img', data)); imgElm = dom.get('__mcenew'); dom.setAttrib(imgElm, 'id', null); } else { dom.setAttribs(imgElm, data); } waitLoad(imgElm); }); } function removePixelSuffix(value) { if (value) { value = value.replace(/px$/, ''); } return value; } function srcChange(e) { var meta = e.meta || {}; if (imageListCtrl) { imageListCtrl.value(editor.convertURL(this.value(), 'src')); } tinymce.each(meta, function(value, key) { win.find('#' + key).value(value); }); if (!meta.width && !meta.height) { getImageSize(this.value(), function(data) { if (data.width && data.height && imageDimensions) { width = data.width; height = data.height; win.find('#width').value(width); win.find('#height').value(height); } }); } } width = dom.getAttrib(imgElm, 'width'); height = dom.getAttrib(imgElm, 'height'); if (imgElm.nodeName == 'IMG' && !imgElm.getAttribute('data-mce-object') && !imgElm.getAttribute('data-mce-placeholder')) { data = { src: dom.getAttrib(imgElm, 'src'), alt: dom.getAttrib(imgElm, 'alt'), "class": dom.getAttrib(imgElm, 'class'), width: width, height: height }; } else { imgElm = null; } if (imageList) { imageListCtrl = { type: 'listbox', label: 'Image list', values: buildListItems( imageList, function(item) { item.value = editor.convertURL(item.value || item.url, 'src'); }, [{text: 'None', value: ''}] ), value: data.src && editor.convertURL(data.src, 'src'), onselect: function(e) { var altCtrl = win.find('#alt'); if (!altCtrl.value() || (e.lastControl && altCtrl.value() == e.lastControl.text())) { altCtrl.value(e.control.text()); } win.find('#src').value(e.control.value()).fire('change'); }, onPostRender: function() { imageListCtrl = this; } }; } if (editor.settings.image_class_list) { classListCtrl = { name: 'class', type: 'listbox', label: 'Class', values: buildListItems( editor.settings.image_class_list, function(item) { if (item.value) { item.textStyle = function() { return editor.formatter.getCssText({inline: 'img', classes: [item.value]}); }; } } ) }; } // General settings shared between simple and advanced dialogs var generalFormItems = [ { type: 'panel', html: '<div style="background: white;">It is preferable to add <span style="font-weight: bold;">Images</span> to an entry using <span style="font-weight: bold;">Details->Media</span> form.<br>This will embed the image in the description.</div>' }, { name: 'src', type: 'filepicker', filetype: 'image', label: 'Source URL', placeholder: 'http://media.com/image.png', autofocus: true, onchange: srcChange }, imageListCtrl ]; if (editor.settings.image_description !== false) { generalFormItems.push({name: 'alt', type: 'textbox', label: 'Image description'}); } if (imageDimensions) { generalFormItems.push({ type: 'container', label: 'Dimensions', layout: 'flex', direction: 'row', align: 'center', spacing: 5, items: [ {name: 'width', type: 'textbox', maxLength: 5, size: 3, onchange: recalcSize, ariaLabel: 'Width'}, {type: 'label', text: 'x'}, {name: 'height', type: 'textbox', maxLength: 5, size: 3, onchange: recalcSize, ariaLabel: 'Height'}, {name: 'constrain', type: 'checkbox', checked: true, text: 'Constrain proportions'} ] }); } generalFormItems.push(classListCtrl); function updateStyle() { function addPixelSuffix(value) { if (value.length > 0 && /^[0-9]+$/.test(value)) { value += 'px'; } return value; } if (!editor.settings.image_advtab) { return; } var data = win.toJSON(); var css = dom.parseStyle(data.style); delete css.margin; css['margin-top'] = css['margin-bottom'] = addPixelSuffix(data.vspace); css['margin-left'] = css['margin-right'] = addPixelSuffix(data.hspace); css['border-width'] = addPixelSuffix(data.border); win.find('#style').value(dom.serializeStyle(dom.parseStyle(dom.serializeStyle(css)))); } if (editor.settings.image_advtab) { // Parse styles from img if (imgElm) { data.hspace = removePixelSuffix(imgElm.style.marginLeft || imgElm.style.marginRight); data.vspace = removePixelSuffix(imgElm.style.marginTop || imgElm.style.marginBottom); data.border = removePixelSuffix(imgElm.style.borderWidth); data.style = editor.dom.serializeStyle(editor.dom.parseStyle(editor.dom.getAttrib(imgElm, 'style'))); } // Advanced dialog shows general+advanced tabs win = editor.windowManager.open({ title: 'Insert/edit image', data: data, bodyType: 'tabpanel', body: [ { title: 'General', type: 'form', items: generalFormItems }, { title: 'Advanced', type: 'form', pack: 'start', items: [ { label: 'Style', name: 'style', type: 'textbox' }, { type: 'form', layout: 'grid', packV: 'start', columns: 2, padding: 0, alignH: ['left', 'right'], defaults: { type: 'textbox', maxWidth: 50, onchange: updateStyle }, items: [ {label: 'Vertical space', name: 'vspace'}, {label: 'Horizontal space', name: 'hspace'}, {label: 'Border', name: 'border'} ] } ] } ], onSubmit: onSubmitForm }); } else { // Simple default dialog win = editor.windowManager.open({ title: 'Insert/edit image', data: data, body: generalFormItems, onSubmit: onSubmitForm }); } } editor.addButton('image', { icon: 'image', tooltip: 'Insert/edit image', onclick: createImageList(showDialog), stateSelector: 'img:not([data-mce-object],[data-mce-placeholder])' }); editor.addMenuItem('image', { icon: 'image', text: 'Insert image', onclick: createImageList(showDialog), context: 'insert', prependToContext: true }); editor.addCommand('mceImage', createImageList(showDialog)); });
jbottel/openstorefront
server/openstorefront/tinymcetextarea/src/main/resources/META-INF/resources/webjars/tinymcetextarea/5.1/tinymce/plugins/image/plugin.js
JavaScript
apache-2.0
10,236
<?php final class PhabricatorRepositoryCommitData extends PhabricatorRepositoryDAO { protected $commitID; protected $authorName = ''; protected $commitMessage = ''; protected $commitDetails = array(); private $commitRef; protected function getConfiguration() { return array( self::CONFIG_TIMESTAMPS => false, self::CONFIG_SERIALIZATION => array( 'commitDetails' => self::SERIALIZATION_JSON, ), self::CONFIG_COLUMN_SCHEMA => array( 'authorName' => 'text', 'commitMessage' => 'text', ), self::CONFIG_KEY_SCHEMA => array( 'commitID' => array( 'columns' => array('commitID'), 'unique' => true, ), ), ) + parent::getConfiguration(); } public function getSummary() { $message = $this->getCommitMessage(); return self::summarizeCommitMessage($message); } public static function summarizeCommitMessage($message) { $max_bytes = id(new PhabricatorRepositoryCommit()) ->getColumnMaximumByteLength('summary'); $summary = phutil_split_lines($message, $retain_endings = false); $summary = head($summary); $summary = id(new PhutilUTF8StringTruncator()) ->setMaximumBytes($max_bytes) ->setMaximumGlyphs(80) ->truncateString($summary); return $summary; } public function getCommitDetail($key, $default = null) { return idx($this->commitDetails, $key, $default); } public function setCommitDetail($key, $value) { $this->commitDetails[$key] = $value; return $this; } public function toDictionary() { return array( 'commitID' => $this->commitID, 'authorName' => $this->authorName, 'commitMessage' => $this->commitMessage, 'commitDetails' => json_encode($this->commitDetails), ); } public static function newFromDictionary(array $dict) { return id(new PhabricatorRepositoryCommitData()) ->loadFromArray($dict); } public function newPublisherHoldReasons() { $holds = $this->getCommitDetail('holdReasons'); // Look for the legacy "autocloseReason" if we don't have a modern list // of hold reasons. if (!$holds) { $old_hold = $this->getCommitDetail('autocloseReason'); if ($old_hold) { $holds = array($old_hold); } } if (!$holds) { $holds = array(); } foreach ($holds as $key => $reason) { $holds[$key] = PhabricatorRepositoryPublisherHoldReason::newForHoldKey( $reason); } return array_values($holds); } public function getAuthorString() { $ref = $this->getCommitRef(); $author = $ref->getAuthor(); if (strlen($author)) { return $author; } $author = phutil_string_cast($this->authorName); if (strlen($author)) { return $author; } return null; } public function getAuthorDisplayName() { return $this->getCommitRef()->getAuthorName(); } public function getAuthorEmail() { return $this->getCommitRef()->getAuthorEmail(); } public function getAuthorEpoch() { $epoch = $this->getCommitRef()->getAuthorEpoch(); if ($epoch) { return (int)$epoch; } return null; } public function getCommitterString() { $ref = $this->getCommitRef(); $committer = $ref->getCommitter(); if (strlen($committer)) { return $committer; } return $this->getCommitDetailString('committer'); } public function getCommitterDisplayName() { return $this->getCommitRef()->getCommitterName(); } public function getCommitterEmail() { return $this->getCommitRef()->getCommitterEmail(); } private function getCommitDetailString($key) { $string = $this->getCommitDetail($key); $string = phutil_string_cast($string); if (strlen($string)) { return $string; } return null; } public function setCommitRef(DiffusionCommitRef $ref) { $this->setCommitDetail('ref', $ref->newDictionary()); $this->commitRef = null; return $this; } public function getCommitRef() { if ($this->commitRef === null) { $map = $this->getCommitDetail('ref', array()); if (!is_array($map)) { $map = array(); } $map = $map + array( 'authorName' => $this->getCommitDetailString('authorName'), 'authorEmail' => $this->getCommitDetailString('authorEmail'), 'authorEpoch' => $this->getCommitDetailString('authorEpoch'), 'committerName' => $this->getCommitDetailString('committerName'), 'committerEmail' => $this->getCommitDetailString('committerEmail'), 'message' => $this->getCommitMessage(), ); $ref = DiffusionCommitRef::newFromDictionary($map); $this->commitRef = $ref; } return $this->commitRef; } }
freebsd/phabricator
src/applications/repository/storage/PhabricatorRepositoryCommitData.php
PHP
apache-2.0
4,785
# Copyright (c) 2014 VMware, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # import hashlib import mock from neutron.common import constants from neutron.common import exceptions from neutron.plugins.vmware.common import utils from neutron.plugins.vmware.nsxlib import switch as switchlib from neutron.tests.unit import test_api_v2 from neutron.tests.unit.vmware.nsxlib import base _uuid = test_api_v2._uuid class LogicalSwitchesTestCase(base.NsxlibTestCase): def test_create_and_get_lswitches_single(self): tenant_id = 'pippo' transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch(self.fake_cluster, _uuid(), tenant_id, 'fake-switch', transport_zones_config) res_lswitch = switchlib.get_lswitches(self.fake_cluster, lswitch['uuid']) self.assertEqual(len(res_lswitch), 1) self.assertEqual(res_lswitch[0]['uuid'], lswitch['uuid']) def test_create_and_get_lswitches_single_name_exceeds_40_chars(self): tenant_id = 'pippo' transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch(self.fake_cluster, tenant_id, _uuid(), '*' * 50, transport_zones_config) res_lswitch = switchlib.get_lswitches(self.fake_cluster, lswitch['uuid']) self.assertEqual(len(res_lswitch), 1) self.assertEqual(res_lswitch[0]['uuid'], lswitch['uuid']) self.assertEqual(res_lswitch[0]['display_name'], '*' * 40) def test_create_and_get_lswitches_multiple(self): tenant_id = 'pippo' transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] network_id = _uuid() main_lswitch = switchlib.create_lswitch( self.fake_cluster, network_id, tenant_id, 'fake-switch', transport_zones_config, tags=[{'scope': 'multi_lswitch', 'tag': 'True'}]) # Create secondary lswitch second_lswitch = switchlib.create_lswitch( self.fake_cluster, network_id, tenant_id, 'fake-switch-2', transport_zones_config) res_lswitch = switchlib.get_lswitches(self.fake_cluster, network_id) self.assertEqual(len(res_lswitch), 2) switch_uuids = [ls['uuid'] for ls in res_lswitch] self.assertIn(main_lswitch['uuid'], switch_uuids) self.assertIn(second_lswitch['uuid'], switch_uuids) for ls in res_lswitch: if ls['uuid'] == main_lswitch['uuid']: main_ls = ls else: second_ls = ls main_ls_tags = self._build_tag_dict(main_ls['tags']) second_ls_tags = self._build_tag_dict(second_ls['tags']) self.assertIn('multi_lswitch', main_ls_tags) self.assertNotIn('multi_lswitch', second_ls_tags) self.assertIn('quantum_net_id', main_ls_tags) self.assertIn('quantum_net_id', second_ls_tags) self.assertEqual(main_ls_tags['quantum_net_id'], network_id) self.assertEqual(second_ls_tags['quantum_net_id'], network_id) def test_update_lswitch(self): new_name = 'new-name' new_tags = [{'scope': 'new_tag', 'tag': 'xxx'}] transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch(self.fake_cluster, _uuid(), 'pippo', 'fake-switch', transport_zones_config) switchlib.update_lswitch(self.fake_cluster, lswitch['uuid'], new_name, tags=new_tags) res_lswitch = switchlib.get_lswitches(self.fake_cluster, lswitch['uuid']) self.assertEqual(len(res_lswitch), 1) self.assertEqual(res_lswitch[0]['display_name'], new_name) switch_tags = self._build_tag_dict(res_lswitch[0]['tags']) self.assertIn('new_tag', switch_tags) self.assertEqual(switch_tags['new_tag'], 'xxx') def test_update_non_existing_lswitch_raises(self): self.assertRaises(exceptions.NetworkNotFound, switchlib.update_lswitch, self.fake_cluster, 'whatever', 'foo', 'bar') def test_delete_networks(self): transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch(self.fake_cluster, _uuid(), 'pippo', 'fake-switch', transport_zones_config) switchlib.delete_networks(self.fake_cluster, lswitch['uuid'], [lswitch['uuid']]) self.assertRaises(exceptions.NotFound, switchlib.get_lswitches, self.fake_cluster, lswitch['uuid']) def test_delete_non_existing_lswitch_raises(self): self.assertRaises(exceptions.NetworkNotFound, switchlib.delete_networks, self.fake_cluster, 'whatever', ['whatever']) class LogicalPortsTestCase(base.NsxlibTestCase): def _create_switch_and_port(self, tenant_id='pippo', neutron_port_id='whatever', name='name', device_id='device_id'): transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch(self.fake_cluster, _uuid(), tenant_id, 'fake-switch', transport_zones_config) lport = switchlib.create_lport(self.fake_cluster, lswitch['uuid'], tenant_id, neutron_port_id, name, device_id, True) return lswitch, lport def test_create_and_get_port(self): lswitch, lport = self._create_switch_and_port() lport_res = switchlib.get_port(self.fake_cluster, lswitch['uuid'], lport['uuid']) self.assertEqual(lport['uuid'], lport_res['uuid']) # Try again with relation lport_res = switchlib.get_port(self.fake_cluster, lswitch['uuid'], lport['uuid'], relations='LogicalPortStatus') self.assertEqual(lport['uuid'], lport_res['uuid']) def test_plug_interface(self): lswitch, lport = self._create_switch_and_port() switchlib.plug_vif_interface(self.fake_cluster, lswitch['uuid'], lport['uuid'], 'VifAttachment', 'fake') lport_res = switchlib.get_port(self.fake_cluster, lswitch['uuid'], lport['uuid']) self.assertEqual(lport['uuid'], lport_res['uuid']) def test_get_port_by_tag(self): lswitch, lport = self._create_switch_and_port() lport2 = switchlib.get_port_by_neutron_tag(self.fake_cluster, lswitch['uuid'], 'whatever') self.assertIsNotNone(lport2) self.assertEqual(lport['uuid'], lport2['uuid']) def test_get_port_by_tag_not_found_with_switch_id_raises_not_found(self): tenant_id = 'pippo' neutron_port_id = 'whatever' transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch( self.fake_cluster, tenant_id, _uuid(), 'fake-switch', transport_zones_config) self.assertRaises(exceptions.NotFound, switchlib.get_port_by_neutron_tag, self.fake_cluster, lswitch['uuid'], neutron_port_id) def test_get_port_by_tag_not_find_wildcard_lswitch_returns_none(self): tenant_id = 'pippo' neutron_port_id = 'whatever' transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] switchlib.create_lswitch( self.fake_cluster, tenant_id, _uuid(), 'fake-switch', transport_zones_config) lport = switchlib.get_port_by_neutron_tag( self.fake_cluster, '*', neutron_port_id) self.assertIsNone(lport) def test_get_port_status(self): lswitch, lport = self._create_switch_and_port() status = switchlib.get_port_status( self.fake_cluster, lswitch['uuid'], lport['uuid']) self.assertEqual(constants.PORT_STATUS_ACTIVE, status) def test_get_port_status_non_existent_raises(self): self.assertRaises(exceptions.PortNotFoundOnNetwork, switchlib.get_port_status, self.fake_cluster, 'boo', 'boo') def test_update_port(self): lswitch, lport = self._create_switch_and_port() switchlib.update_port( self.fake_cluster, lswitch['uuid'], lport['uuid'], 'neutron_port_id', 'pippo2', 'new_name', 'device_id', False) lport_res = switchlib.get_port(self.fake_cluster, lswitch['uuid'], lport['uuid']) self.assertEqual(lport['uuid'], lport_res['uuid']) self.assertEqual('new_name', lport_res['display_name']) self.assertEqual('False', lport_res['admin_status_enabled']) port_tags = self._build_tag_dict(lport_res['tags']) self.assertIn('os_tid', port_tags) self.assertIn('q_port_id', port_tags) self.assertIn('vm_id', port_tags) def test_create_port_device_id_less_than_40_chars(self): lswitch, lport = self._create_switch_and_port() lport_res = switchlib.get_port(self.fake_cluster, lswitch['uuid'], lport['uuid']) port_tags = self._build_tag_dict(lport_res['tags']) self.assertEqual('device_id', port_tags['vm_id']) def test_create_port_device_id_more_than_40_chars(self): dev_id = "this_is_a_very_long_device_id_with_lots_of_characters" lswitch, lport = self._create_switch_and_port(device_id=dev_id) lport_res = switchlib.get_port(self.fake_cluster, lswitch['uuid'], lport['uuid']) port_tags = self._build_tag_dict(lport_res['tags']) self.assertNotEqual(len(dev_id), len(port_tags['vm_id'])) def test_get_ports_with_obsolete_and_new_vm_id_tag(self): def obsolete(device_id, obfuscate=False): return hashlib.sha1(device_id).hexdigest() with mock.patch.object(utils, 'device_id_to_vm_id', new=obsolete): dev_id1 = "short-dev-id-1" _, lport1 = self._create_switch_and_port(device_id=dev_id1) dev_id2 = "short-dev-id-2" _, lport2 = self._create_switch_and_port(device_id=dev_id2) lports = switchlib.get_ports(self.fake_cluster, None, [dev_id1]) port_tags = self._build_tag_dict(lports['whatever']['tags']) self.assertNotEqual(dev_id1, port_tags['vm_id']) lports = switchlib.get_ports(self.fake_cluster, None, [dev_id2]) port_tags = self._build_tag_dict(lports['whatever']['tags']) self.assertEqual(dev_id2, port_tags['vm_id']) def test_update_non_existent_port_raises(self): self.assertRaises(exceptions.PortNotFoundOnNetwork, switchlib.update_port, self.fake_cluster, 'boo', 'boo', 'boo', 'boo', 'boo', 'boo', False) def test_delete_port(self): lswitch, lport = self._create_switch_and_port() switchlib.delete_port(self.fake_cluster, lswitch['uuid'], lport['uuid']) self.assertRaises(exceptions.PortNotFoundOnNetwork, switchlib.get_port, self.fake_cluster, lswitch['uuid'], lport['uuid']) def test_delete_non_existent_port_raises(self): lswitch = self._create_switch_and_port()[0] self.assertRaises(exceptions.PortNotFoundOnNetwork, switchlib.delete_port, self.fake_cluster, lswitch['uuid'], 'bad_port_uuid') def test_query_lswitch_ports(self): lswitch, lport = self._create_switch_and_port() switch_port_uuids = [ switchlib.create_lport( self.fake_cluster, lswitch['uuid'], 'pippo', 'qportid-%s' % k, 'port-%s' % k, 'deviceid-%s' % k, True)['uuid'] for k in range(2)] switch_port_uuids.append(lport['uuid']) ports = switchlib.query_lswitch_lports( self.fake_cluster, lswitch['uuid']) self.assertEqual(len(ports), 3) for res_port in ports: self.assertIn(res_port['uuid'], switch_port_uuids)
sajuptpm/neutron-ipam
neutron/tests/unit/vmware/nsxlib/test_switch.py
Python
apache-2.0
14,400
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef APR_POLL_H #define APR_POLL_H /** * @file apr_poll.h * @brief APR Poll interface */ #include "apr.h" #include "apr_pools.h" #include "apr_errno.h" #include "apr_inherit.h" #include "apr_file_io.h" #include "apr_network_io.h" #if APR_HAVE_NETINET_IN_H #include <netinet/in.h> #endif #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ /** * @defgroup apr_poll Poll Routines * @ingroup APR * @{ */ /** * @defgroup pollopts Poll options * @ingroup apr_poll * @{ */ #define APR_POLLIN 0x001 /**< Can read without blocking */ #define APR_POLLPRI 0x002 /**< Priority data available */ #define APR_POLLOUT 0x004 /**< Can write without blocking */ #define APR_POLLERR 0x010 /**< Pending error */ #define APR_POLLHUP 0x020 /**< Hangup occurred */ #define APR_POLLNVAL 0x040 /**< Descriptor invalid */ /** @} */ /** * @defgroup pollflags Pollset Flags * @ingroup apr_poll * @{ */ #define APR_POLLSET_THREADSAFE 0x001 /**< Adding or removing a descriptor is * thread-safe */ #define APR_POLLSET_NOCOPY 0x002 /**< Descriptors passed to apr_pollset_add() * are not copied */ #define APR_POLLSET_WAKEABLE 0x004 /**< Poll operations are interruptable by * apr_pollset_wakeup() or apr_pollcb_wakeup() */ #define APR_POLLSET_NODEFAULT 0x010 /**< Do not try to use the default method if * the specified non-default method cannot be * used */ /** @} */ /** * Pollset Methods */ typedef enum { APR_POLLSET_DEFAULT, /**< Platform default poll method */ APR_POLLSET_SELECT, /**< Poll uses select method */ APR_POLLSET_KQUEUE, /**< Poll uses kqueue method */ APR_POLLSET_PORT, /**< Poll uses Solaris event port method */ APR_POLLSET_EPOLL, /**< Poll uses epoll method */ APR_POLLSET_POLL, /**< Poll uses poll method */ APR_POLLSET_AIO_MSGQ /**< Poll uses z/OS asio method */ } apr_pollset_method_e; /** Used in apr_pollfd_t to determine what the apr_descriptor is */ typedef enum { APR_NO_DESC, /**< nothing here */ APR_POLL_SOCKET, /**< descriptor refers to a socket */ APR_POLL_FILE, /**< descriptor refers to a file */ APR_POLL_LASTDESC /**< @deprecated descriptor is the last one in the list */ } apr_datatype_e ; /** Union of either an APR file or socket. */ typedef union { apr_file_t *f; /**< file */ apr_socket_t *s; /**< socket */ } apr_descriptor; /** @see apr_pollfd_t */ typedef struct apr_pollfd_t apr_pollfd_t; /** Poll descriptor set. */ struct apr_pollfd_t { apr_pool_t *p; /**< associated pool */ apr_datatype_e desc_type; /**< descriptor type */ apr_int16_t reqevents; /**< requested events */ apr_int16_t rtnevents; /**< returned events */ apr_descriptor desc; /**< @see apr_descriptor */ void *client_data; /**< allows app to associate context */ }; /* General-purpose poll API for arbitrarily large numbers of * file descriptors */ /** Opaque structure used for pollset API */ typedef struct apr_pollset_t apr_pollset_t; /** * Set up a pollset object * @param pollset The pointer in which to return the newly created object * @param size The maximum number of descriptors that this pollset can hold * @param p The pool from which to allocate the pollset * @param flags Optional flags to modify the operation of the pollset. * * @remark If flags contains APR_POLLSET_THREADSAFE, then a pollset is * created on which it is safe to make concurrent calls to * apr_pollset_add(), apr_pollset_remove() and apr_pollset_poll() * from separate threads. This feature is only supported on some * platforms; the apr_pollset_create() call will fail with * APR_ENOTIMPL on platforms where it is not supported. * @remark If flags contains APR_POLLSET_WAKEABLE, then a pollset is * created with an additional internal pipe object used for the * apr_pollset_wakeup() call. The actual size of pollset is * in that case @a size + 1. This feature is only supported on some * platforms; the apr_pollset_create() call will fail with * APR_ENOTIMPL on platforms where it is not supported. * @remark If flags contains APR_POLLSET_NOCOPY, then the apr_pollfd_t * structures passed to apr_pollset_add() are not copied and * must have a lifetime at least as long as the pollset. * @remark Some poll methods (including APR_POLLSET_KQUEUE, * APR_POLLSET_PORT, and APR_POLLSET_EPOLL) do not have a * fixed limit on the size of the pollset. For these methods, * the size parameter controls the maximum number of * descriptors that will be returned by a single call to * apr_pollset_poll(). */ APR_DECLARE(apr_status_t) apr_pollset_create(apr_pollset_t **pollset, apr_uint32_t size, apr_pool_t *p, apr_uint32_t flags); /** * Set up a pollset object * @param pollset The pointer in which to return the newly created object * @param size The maximum number of descriptors that this pollset can hold * @param p The pool from which to allocate the pollset * @param flags Optional flags to modify the operation of the pollset. * @param method Poll method to use. See #apr_pollset_method_e. If this * method cannot be used, the default method will be used unless the * APR_POLLSET_NODEFAULT flag has been specified. * * @remark If flags contains APR_POLLSET_THREADSAFE, then a pollset is * created on which it is safe to make concurrent calls to * apr_pollset_add(), apr_pollset_remove() and apr_pollset_poll() * from separate threads. This feature is only supported on some * platforms; the apr_pollset_create_ex() call will fail with * APR_ENOTIMPL on platforms where it is not supported. * @remark If flags contains APR_POLLSET_WAKEABLE, then a pollset is * created with additional internal pipe object used for the * apr_pollset_wakeup() call. The actual size of pollset is * in that case size + 1. This feature is only supported on some * platforms; the apr_pollset_create_ex() call will fail with * APR_ENOTIMPL on platforms where it is not supported. * @remark If flags contains APR_POLLSET_NOCOPY, then the apr_pollfd_t * structures passed to apr_pollset_add() are not copied and * must have a lifetime at least as long as the pollset. * @remark Some poll methods (including APR_POLLSET_KQUEUE, * APR_POLLSET_PORT, and APR_POLLSET_EPOLL) do not have a * fixed limit on the size of the pollset. For these methods, * the size parameter controls the maximum number of * descriptors that will be returned by a single call to * apr_pollset_poll(). */ APR_DECLARE(apr_status_t) apr_pollset_create_ex(apr_pollset_t **pollset, apr_uint32_t size, apr_pool_t *p, apr_uint32_t flags, apr_pollset_method_e method); /** * Destroy a pollset object * @param pollset The pollset to destroy */ APR_DECLARE(apr_status_t) apr_pollset_destroy(apr_pollset_t *pollset); /** * Add a socket or file descriptor to a pollset * @param pollset The pollset to which to add the descriptor * @param descriptor The descriptor to add * @remark If you set client_data in the descriptor, that value * will be returned in the client_data field whenever this * descriptor is signalled in apr_pollset_poll(). * @remark If the pollset has been created with APR_POLLSET_THREADSAFE * and thread T1 is blocked in a call to apr_pollset_poll() for * this same pollset that is being modified via apr_pollset_add() * in thread T2, the currently executing apr_pollset_poll() call in * T1 will either: (1) automatically include the newly added descriptor * in the set of descriptors it is watching or (2) return immediately * with APR_EINTR. Option (1) is recommended, but option (2) is * allowed for implementations where option (1) is impossible * or impractical. * @remark If the pollset has been created with APR_POLLSET_NOCOPY, the * apr_pollfd_t structure referenced by descriptor will not be copied * and must have a lifetime at least as long as the pollset. * @remark Do not add the same socket or file descriptor to the same pollset * multiple times, even if the requested events differ for the * different calls to apr_pollset_add(). If the events of interest * for a descriptor change, you must first remove the descriptor * from the pollset with apr_pollset_remove(), then add it again * specifying all requested events. */ APR_DECLARE(apr_status_t) apr_pollset_add(apr_pollset_t *pollset, const apr_pollfd_t *descriptor); /** * Remove a descriptor from a pollset * @param pollset The pollset from which to remove the descriptor * @param descriptor The descriptor to remove * @remark If the descriptor is not found, APR_NOTFOUND is returned. * @remark If the pollset has been created with APR_POLLSET_THREADSAFE * and thread T1 is blocked in a call to apr_pollset_poll() for * this same pollset that is being modified via apr_pollset_remove() * in thread T2, the currently executing apr_pollset_poll() call in * T1 will either: (1) automatically exclude the newly added descriptor * in the set of descriptors it is watching or (2) return immediately * with APR_EINTR. Option (1) is recommended, but option (2) is * allowed for implementations where option (1) is impossible * or impractical. * @remark apr_pollset_remove() cannot be used to remove a subset of requested * events for a descriptor. The reqevents field in the apr_pollfd_t * parameter must contain the same value when removing as when adding. */ APR_DECLARE(apr_status_t) apr_pollset_remove(apr_pollset_t *pollset, const apr_pollfd_t *descriptor); /** * Block for activity on the descriptor(s) in a pollset * @param pollset The pollset to use * @param timeout The amount of time in microseconds to wait. This is a * maximum, not a minimum. If a descriptor is signalled, the * function will return before this time. If timeout is * negative, the function will block until a descriptor is * signalled or until apr_pollset_wakeup() has been called. * @param num Number of signalled descriptors (output parameter) * @param descriptors Array of signalled descriptors (output parameter) * @remark APR_EINTR will be returned if the pollset has been created with * APR_POLLSET_WAKEABLE, apr_pollset_wakeup() has been called while * waiting for activity, and there were no signalled descriptors at the * time of the wakeup call. * @remark Multiple signalled conditions for the same descriptor may be reported * in one or more returned apr_pollfd_t structures, depending on the * implementation. */ APR_DECLARE(apr_status_t) apr_pollset_poll(apr_pollset_t *pollset, apr_interval_time_t timeout, apr_int32_t *num, const apr_pollfd_t **descriptors); /** * Interrupt the blocked apr_pollset_poll() call. * @param pollset The pollset to use * @remark If the pollset was not created with APR_POLLSET_WAKEABLE the * return value is APR_EINIT. */ APR_DECLARE(apr_status_t) apr_pollset_wakeup(apr_pollset_t *pollset); /** * Poll the descriptors in the poll structure * @param aprset The poll structure we will be using. * @param numsock The number of descriptors we are polling * @param nsds The number of descriptors signalled (output parameter) * @param timeout The amount of time in microseconds to wait. This is a * maximum, not a minimum. If a descriptor is signalled, the * function will return before this time. If timeout is * negative, the function will block until a descriptor is * signalled or until apr_pollset_wakeup() has been called. * @remark The number of descriptors signalled is returned in the third argument. * This is a blocking call, and it will not return until either a * descriptor has been signalled or the timeout has expired. * @remark The rtnevents field in the apr_pollfd_t array will only be filled- * in if the return value is APR_SUCCESS. */ APR_DECLARE(apr_status_t) apr_poll(apr_pollfd_t *aprset, apr_int32_t numsock, apr_int32_t *nsds, apr_interval_time_t timeout); /** * Return a printable representation of the pollset method. * @param pollset The pollset to use */ APR_DECLARE(const char *) apr_pollset_method_name(apr_pollset_t *pollset); /** * Return a printable representation of the default pollset method * (APR_POLLSET_DEFAULT). */ APR_DECLARE(const char *) apr_poll_method_defname(void); /** Opaque structure used for pollcb API */ typedef struct apr_pollcb_t apr_pollcb_t; /** * Set up a pollcb object * @param pollcb The pointer in which to return the newly created object * @param size The maximum number of descriptors that a single _poll can return. * @param p The pool from which to allocate the pollcb * @param flags Optional flags to modify the operation of the pollcb. * * @remark If flags contains APR_POLLSET_WAKEABLE, then a pollcb is * created with an additional internal pipe object used for the * apr_pollcb_wakeup() call. The actual size of pollcb is * in that case @a size + 1. * @remark Pollcb is only supported on some platforms; the apr_pollcb_create() * call will fail with APR_ENOTIMPL on platforms where it is not supported. */ APR_DECLARE(apr_status_t) apr_pollcb_create(apr_pollcb_t **pollcb, apr_uint32_t size, apr_pool_t *p, apr_uint32_t flags); /** * Set up a pollcb object * @param pollcb The pointer in which to return the newly created object * @param size The maximum number of descriptors that a single _poll can return. * @param p The pool from which to allocate the pollcb * @param flags Optional flags to modify the operation of the pollcb. * @param method Poll method to use. See #apr_pollset_method_e. If this * method cannot be used, the default method will be used unless the * APR_POLLSET_NODEFAULT flag has been specified. * * @remark If flags contains APR_POLLSET_WAKEABLE, then a pollcb is * created with an additional internal pipe object used for the * apr_pollcb_wakeup() call. The actual size of pollcb is * in that case @a size + 1. * @remark Pollcb is only supported on some platforms; the apr_pollcb_create_ex() * call will fail with APR_ENOTIMPL on platforms where it is not supported. */ APR_DECLARE(apr_status_t) apr_pollcb_create_ex(apr_pollcb_t **pollcb, apr_uint32_t size, apr_pool_t *p, apr_uint32_t flags, apr_pollset_method_e method); /** * Add a socket or file descriptor to a pollcb * @param pollcb The pollcb to which to add the descriptor * @param descriptor The descriptor to add * @remark If you set client_data in the descriptor, that value will be * returned in the client_data field whenever this descriptor is * signalled in apr_pollcb_poll(). * @remark Unlike the apr_pollset API, the descriptor is not copied, and users * must retain the memory used by descriptor, as the same pointer will * be returned to them from apr_pollcb_poll. * @remark Do not add the same socket or file descriptor to the same pollcb * multiple times, even if the requested events differ for the * different calls to apr_pollcb_add(). If the events of interest * for a descriptor change, you must first remove the descriptor * from the pollcb with apr_pollcb_remove(), then add it again * specifying all requested events. */ APR_DECLARE(apr_status_t) apr_pollcb_add(apr_pollcb_t *pollcb, apr_pollfd_t *descriptor); /** * Remove a descriptor from a pollcb * @param pollcb The pollcb from which to remove the descriptor * @param descriptor The descriptor to remove * @remark If the descriptor is not found, APR_NOTFOUND is returned. * @remark apr_pollcb_remove() cannot be used to remove a subset of requested * events for a descriptor. The reqevents field in the apr_pollfd_t * parameter must contain the same value when removing as when adding. */ APR_DECLARE(apr_status_t) apr_pollcb_remove(apr_pollcb_t *pollcb, apr_pollfd_t *descriptor); /** * Function prototype for pollcb handlers * @param baton Opaque baton passed into apr_pollcb_poll() * @param descriptor Contains the notification for an active descriptor. * The @a rtnevents member describes which events were triggered * for this descriptor. * @remark If the pollcb handler does not return APR_SUCCESS, the apr_pollcb_poll() * call returns with the handler's return value. */ typedef apr_status_t (*apr_pollcb_cb_t)(void *baton, apr_pollfd_t *descriptor); /** * Block for activity on the descriptor(s) in a pollcb * @param pollcb The pollcb to use * @param timeout The amount of time in microseconds to wait. This is a * maximum, not a minimum. If a descriptor is signalled, the * function will return before this time. If timeout is * negative, the function will block until a descriptor is * signalled or until apr_pollcb_wakeup() has been called. * @param func Callback function to call for each active descriptor. * @param baton Opaque baton passed to the callback function. * @remark Multiple signalled conditions for the same descriptor may be reported * in one or more calls to the callback function, depending on the * implementation. * @remark APR_EINTR will be returned if the pollset has been created with * APR_POLLSET_WAKEABLE and apr_pollcb_wakeup() has been called while * waiting for activity. */ APR_DECLARE(apr_status_t) apr_pollcb_poll(apr_pollcb_t *pollcb, apr_interval_time_t timeout, apr_pollcb_cb_t func, void *baton); /** * Interrupt the blocked apr_pollcb_poll() call. * @param pollcb The pollcb to use * @remark If the pollcb was not created with APR_POLLSET_WAKEABLE the * return value is APR_EINIT. */ APR_DECLARE(apr_status_t) apr_pollcb_wakeup(apr_pollcb_t *pollcb); /** * Return a printable representation of the pollcb method. * @param pollcb The pollcb to use */ APR_DECLARE(const char *) apr_pollcb_method_name(apr_pollcb_t *pollcb); /** @} */ #ifdef __cplusplus } #endif #endif /* ! APR_POLL_H */
lightenna/structuredfun-pc
structured/vendor/Apache24/include/apr_poll.h
C
apache-2.0
21,099
CCM (Cassandra Cluster Manager) ==================================================== A script/library to create, launch and remove an Apache Cassandra cluster on localhost. The goal of ccm and ccmlib is to make it easy to create, manage and destroy a small Cassandra cluster on a local box. It is meant for testing a Cassandra cluster. Requirements ------------ - A working python installation (tested to work with python 2.7). - pyYAML (http://pyyaml.org/ -- `sudo easy_install pyYaml`) - six (https://pypi.python.org/pypi/six -- `sudo easy_install six`) - ant (http://ant.apache.org/, on Mac OS X, `brew install ant`) - psutil (https://pypi.python.org/pypi/psutil) - Java (which version depends on the version of Cassandra you plan to use. If unsure, use Java 7 as it is known to work with current versions of Cassandra). - ccm only works on localhost for now. If you want to create multiple node clusters, the simplest way is to use multiple loopback aliases. On modern linux distributions you probably don't need to do anything, but on Mac OS X, you will need to create the aliases with sudo ifconfig lo0 alias 127.0.0.2 up sudo ifconfig lo0 alias 127.0.0.3 up ... Note that the usage section assumes that at least 127.0.0.1, 127.0.0.2 and 127.0.0.3 are available. Known issues ------------ Windows only: - `node start` pops up a window, stealing focus. - cli and cqlsh started from ccm show incorrect prompts on command-prompt - non nodetool-based command-line options fail (sstablesplit, scrub, etc) - cli_session does not accept commands. - To install psutil, you must use the .msi from pypi. pip install psutil will not work - You will need ant.bat in your PATH in order to build C* from source - You must run with an Unrestricted Powershell Execution-Policy if using Cassandra 2.1.0+ - Ant installed via [chocolatey](https://chocolatey.org/) will not be found by ccm, so you must create a symbolic link in order to fix the issue (as administrator): - cmd /c mklink C:\ProgramData\chocolatey\bin\ant.bat C:\ProgramData\chocolatey\bin\ant.exe Installation ------------ ccm uses python distutils so from the source directory run: sudo ./setup.py install ccm is available on the [Python Package Index][pip]: pip install ccm There is also a [Homebrew package][brew] available: brew install ccm [pip]: https://pypi.python.org/pypi/ccm [brew]: https://github.com/Homebrew/homebrew/blob/master/Library/Formula/ccm.rb Usage ----- Let's say you wanted to fire up a 3 node Cassandra cluster. ### Short version ccm create test -v 2.0.5 -n 3 -s You will of course want to replace `2.0.5` by whichever version of Cassandra you want to test. ### Longer version ccm works from a Cassandra source tree (not the jars). There are two ways to tell ccm how to find the sources: 1. If you have downloaded *and* compiled Cassandra sources, you can ask ccm to use those by initiating a new cluster with: ccm create test --install-dir=<path/to/cassandra-sources> or, from that source tree directory, simply ccm create test 2. You can ask ccm to use a released version of Cassandra. For instance to use Cassandra 2.0.5, run ccm create test -v 2.0.5 ccm will download the binary (from http://archive.apache.org/dist/cassandra), and set the new cluster to use it. This means that this command can take a few minutes the first time you create a cluster for a given version. ccm saves the compiled source in `~/.ccm/repository/`, so creating a cluster for that version will be much faster the second time you run it (note however that if you create a lot of clusters with different versions, this will take up disk space). Once the cluster is created, you can populate it with 3 nodes with: ccm populate -n 3 Note: If you’re running on Mac OSX, create a new interface for every node besides the first, for example if you populated your cluster with 3 nodes, create interfaces for 127.0.0.2 and 127.0.0.3 like so: sudo ifconfig lo0 alias 127.0.0.2 sudo ifconfig lo0 alias 127.0.0.3 Otherwise you will get the following error message: (...) Inet address 127.0.0.1:9042 is not available: [Errno 48] Address already in use After that execute: ccm start That will start 3 nodes on IP 127.0.0.[1, 2, 3] on port 9160 for thrift, port 7000 for the internal cluster communication and ports 7100, 7200 and 7300 for JMX. You can check that the cluster is correctly set up with ccm node1 ring You can then bootstrap a 4th node with ccm add node4 -i 127.0.0.4 -j 7400 -b (populate is just a shortcut for adding multiple nodes initially) ccm provides a number of conveniences, like flushing all of the nodes of the cluster: ccm flush or only one node: ccm node2 flush You can also easily look at the log file of a given node with: ccm node1 showlog Finally, you can get rid of the whole cluster (which will stop the node and remove all the data) with ccm remove The list of other provided commands is available through ccm Each command is then documented through the `-h` (or `--help`) flag. For instance `ccm add -h` describes the options for `ccm add`. ### Source Distribution If you'd like to use a source distribution instead of the default binary each time (for example, for Continuous Integration), you can prefix cassandra version with `source:`, for example: ``` ccm create test -v source:2.0.5 -n 3 -s ``` ### Automatic Version Fallback If 'binary:' or 'source:' are not explicitly specified in your version string, then ccm will fallback to building the requested version from git if it cannot access the apache mirrors. ### Git and GitHub To use the latest version from the [canonical Apache Git repository](https://git-wip-us.apache.org/repos/asf?p=cassandra.git), use the version name `git:branch-name`, e.g.: ``` ccm create trunk -v git:trunk -n 5 ``` and to download a branch from a GitHub fork of Cassandra, you can prefix the repository and branch with `github:`, e.g.: ``` ccm create patched -v github:jbellis/trunk -n 1 ``` Remote debugging ----------------------- If you would like to connect to your Cassandra nodes with a remote debugger you have to pass the `-d` (or `--debug`) flag to the populate command: ccm populate -d -n 3 That will populate 3 nodes on IP 127.0.0.[1, 2, 3] setting up the remote debugging on ports 2100, 2200 and 2300. The main thread will not be suspended so you don't have to connect with a remote debugger to start a node. Alternatively you can also specify a remote port with the `-r` (or `--remote-debug-port`) flag while adding a node ccm add node4 -r 5005 -i 127.0.0.4 -j 7400 -b Where things are stored ----------------------- By default, ccm stores all the node data and configuration files under `~/.ccm/cluster_name/`. This can be overridden using the `--config-dir` option with each command. DataStax Enterprise ------------------- CCM 2.0 supports creating and interacting with DSE clusters. The --dse option must be used with the `ccm create` command. See the `ccm create -h` help for assistance. CCM Lib ------- The ccm facilities are available programmatically through ccmlib. This could be used to implement automated tests again Cassandra. A simple example of how to use ccmlib follows: import ccmlib CLUSTER_PATH="." cluster = ccmlib.Cluster(CLUSTER_PATH, 'test', cassandra_version='2.0.5') cluster.populate(3).start() [node1, node2, node3] = cluster.nodelist() # do some tests on the cluster/nodes. To connect to a node through thrift, # the host and port to a node is available through # node.network_interfaces['thrift'] cluster.flush() node2.compact() # do some other tests # after the test, you can leave the cluster running, you can stop all nodes # using cluster.stop() but keep the data around (in CLUSTER_PATH/test), or # you can remove everything with cluster.remove() -- Sylvain Lebresne <sylvain@datastax.com>
kishkaru/ccm
README.md
Markdown
apache-2.0
8,112
/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package app // This file exists to force the desired plugin implementations to be linked. // This should probably be part of some configuration fed into the build for a // given binary target. import ( // Cloud providers _ "k8s.io/kubernetes/pkg/cloudprovider/providers" // Admission policies _ "k8s.io/kubernetes/plugin/pkg/admission/admit" _ "k8s.io/kubernetes/plugin/pkg/admission/alwayspullimages" _ "k8s.io/kubernetes/plugin/pkg/admission/antiaffinity" _ "k8s.io/kubernetes/plugin/pkg/admission/deny" _ "k8s.io/kubernetes/plugin/pkg/admission/exec" _ "k8s.io/kubernetes/plugin/pkg/admission/initialresources" _ "k8s.io/kubernetes/plugin/pkg/admission/limitranger" _ "k8s.io/kubernetes/plugin/pkg/admission/namespace/autoprovision" _ "k8s.io/kubernetes/plugin/pkg/admission/namespace/exists" _ "k8s.io/kubernetes/plugin/pkg/admission/namespace/lifecycle" _ "k8s.io/kubernetes/plugin/pkg/admission/persistentvolume/label" _ "k8s.io/kubernetes/plugin/pkg/admission/resourcequota" _ "k8s.io/kubernetes/plugin/pkg/admission/securitycontext/scdeny" _ "k8s.io/kubernetes/plugin/pkg/admission/serviceaccount" )
gouyang/kubernetes
cmd/kube-apiserver/app/plugins.go
GO
apache-2.0
1,718
package com.mapswithme.maps.widget.placepage; import android.location.Location; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.TextView; import com.mapswithme.maps.Framework; import com.mapswithme.maps.R; import com.mapswithme.maps.base.BaseMwmDialogFragment; import com.mapswithme.maps.bookmarks.data.DistanceAndAzimut; import com.mapswithme.maps.bookmarks.data.MapObject; import com.mapswithme.maps.location.LocationHelper; import com.mapswithme.maps.widget.ArrowView; import com.mapswithme.util.LocationUtils; import com.mapswithme.util.UiUtils; import com.mapswithme.util.statistics.AlohaHelper; import com.mapswithme.util.statistics.Statistics; public class DirectionFragment extends BaseMwmDialogFragment implements LocationHelper.LocationListener { private static final String EXTRA_MAP_OBJECT = "MapObject"; private ArrowView mAvDirection; private TextView mTvTitle; private TextView mTvSubtitle; private TextView mTvDistance; private MapObject mMapObject; @Override protected int getCustomTheme() { return R.style.MwmTheme_DialogFragment_Fullscreen_Translucent; } @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { final View root = inflater.inflate(R.layout.fragment_direction, container, false); root.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { dismiss(); Statistics.INSTANCE.trackEvent(Statistics.EventName.PP_DIRECTION_ARROW_CLOSE); AlohaHelper.logClick(AlohaHelper.PP_DIRECTION_ARROW_CLOSE); return false; } }); initViews(root); if (savedInstanceState != null) setMapObject(savedInstanceState.<MapObject>getParcelable(EXTRA_MAP_OBJECT)); return root; } @Override public void onSaveInstanceState(Bundle outState) { outState.putParcelable(EXTRA_MAP_OBJECT, mMapObject); super.onSaveInstanceState(outState); } private void initViews(View root) { mAvDirection = (ArrowView) root.findViewById(R.id.av__direction); mTvTitle = (TextView) root.findViewById(R.id.tv__title); mTvSubtitle = (TextView) root.findViewById(R.id.tv__subtitle); mTvDistance = (TextView) root.findViewById(R.id.tv__straight_distance); UiUtils.waitLayout(mTvTitle, new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { final int height = mTvTitle.getHeight(); final int lineHeight = mTvTitle.getLineHeight(); mTvTitle.setMaxLines(height / lineHeight); } }); } public void setMapObject(MapObject object) { mMapObject = object; refreshViews(); } private void refreshViews() { if (mMapObject != null && isResumed()) { mTvTitle.setText(mMapObject.getTitle()); mTvSubtitle.setText(mMapObject.getSubtitle()); } } @Override public void onResume() { super.onResume(); LocationHelper.INSTANCE.addLocationListener(this, true); refreshViews(); } @Override public void onPause() { super.onPause(); LocationHelper.INSTANCE.removeLocationListener(this); } @Override public void onLocationUpdated(Location location) { if (mMapObject != null) { final DistanceAndAzimut distanceAndAzimuth = Framework.nativeGetDistanceAndAzimuthFromLatLon(mMapObject.getLat(), mMapObject.getLon(), location.getLatitude(), location.getLongitude(), 0.0); mTvDistance.setText(distanceAndAzimuth.getDistance()); } } @Override public void onCompassUpdated(long time, double magneticNorth, double trueNorth, double accuracy) { final Location last = LocationHelper.INSTANCE.getSavedLocation(); if (last == null || mMapObject == null) return; final int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation(); magneticNorth = LocationUtils.correctCompassAngle(rotation, magneticNorth); trueNorth = LocationUtils.correctCompassAngle(rotation, trueNorth); final double north = (trueNorth >= 0.0) ? trueNorth : magneticNorth; final DistanceAndAzimut da = Framework.nativeGetDistanceAndAzimuthFromLatLon( mMapObject.getLat(), mMapObject.getLon(), last.getLatitude(), last.getLongitude(), north); if (da.getAzimuth() >= 0) mAvDirection.setAzimuth(da.getAzimuth()); } @Override public void onLocationError(int errorCode) {} }
yunikkk/omim
android/src/com/mapswithme/maps/widget/placepage/DirectionFragment.java
Java
apache-2.0
4,758
# # %CopyrightBegin% # # Copyright Ericsson AB 1997-2016. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # %CopyrightEnd% # include $(ERL_TOP)/make/target.mk include $(ERL_TOP)/make/$(TARGET)/otp.mk # ---------------------------------------------------- # Target Specs # ---------------------------------------------------- MODULES= \ rpc_SUITE \ pdict_SUITE \ bif_SUITE \ kernel_SUITE \ application_SUITE \ myApp \ topApp \ topApp2 \ topApp3 \ ch \ ch_sup \ appinc1 \ appinc1x \ appinc2 \ appinc2top \ appinc2A \ appinc2B \ code_SUITE \ code_b_test \ disk_log_SUITE \ erl_boot_server_SUITE \ erl_distribution_SUITE \ erl_distribution_wb_SUITE \ erl_prim_loader_SUITE \ error_handler_SUITE \ error_logger_SUITE \ error_logger_warn_SUITE \ file_SUITE \ file_name_SUITE \ prim_file_SUITE \ ram_file_SUITE \ gen_tcp_api_SUITE \ gen_tcp_echo_SUITE \ gen_tcp_misc_SUITE \ gen_udp_SUITE \ gen_sctp_SUITE \ global_SUITE \ global_group_SUITE \ heart_SUITE \ inet_SUITE \ inet_sockopt_SUITE \ inet_res_SUITE \ interactive_shell_SUITE \ init_SUITE \ kernel_config_SUITE \ os_SUITE \ pg2_SUITE \ seq_trace_SUITE \ wrap_log_reader_SUITE \ cleanup \ ignore_cores \ zlib_SUITE \ loose_node \ sendfile_SUITE \ standard_error_SUITE \ multi_load_SUITE APP_FILES = \ appinc.app \ appinc1.app \ appinc1x.app \ appinc2.app \ appinc2top.app \ appinc2A.app \ appinc2B.app \ myApp.app \ topApp.app \ topApp2.app \ topApp3.app ERL_FILES= $(MODULES:%=%.erl) code_a_test.erl TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) INSTALL_PROGS= $(TARGET_FILES) EMAKEFILE=Emakefile COVERFILE=kernel.cover # ---------------------------------------------------- # Release directory specification # ---------------------------------------------------- RELSYSDIR = $(RELEASE_PATH)/kernel_test # ---------------------------------------------------- # FLAGS # ---------------------------------------------------- ERL_MAKE_FLAGS += ERL_COMPILE_FLAGS += EBIN = . # ---------------------------------------------------- # Targets # ---------------------------------------------------- make_emakefile: $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) '*_SUITE_make' \ > $(EMAKEFILE) $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES) \ >> $(EMAKEFILE) tests debug opt: make_emakefile erl $(ERL_MAKE_FLAGS) -make clean: rm -f $(EMAKEFILE) rm -f $(TARGET_FILES) $(GEN_FILES) rm -f core docs: # ---------------------------------------------------- # Release Target # ---------------------------------------------------- include $(ERL_TOP)/make/otp_release_targets.mk release_spec: opt release_tests_spec: make_emakefile $(INSTALL_DIR) "$(RELSYSDIR)" $(INSTALL_DATA) $(ERL_FILES) "$(RELSYSDIR)" $(INSTALL_DATA) $(APP_FILES) "$(RELSYSDIR)" $(INSTALL_DATA) kernel.spec kernel_smoke.spec kernel_bench.spec \ $(EMAKEFILE) $(COVERFILE) "$(RELSYSDIR)" chmod -R u+w "$(RELSYSDIR)" @tar cf - *_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) release_docs_spec:
falkevik/otp
lib/kernel/test/Makefile
Makefile
apache-2.0
3,560
/* * Copyright 2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #if !defined (MONGOC_COMPILATION) #error "Only <mongoc.h> can be included directly." #endif #ifndef MONGOC_SCRAM_PRIVATE_H #define MONGOC_SCRAM_PRIVATE_H #include <bson.h> #include "mongoc-crypto-private.h" BSON_BEGIN_DECLS #define MONGOC_SCRAM_HASH_SIZE 20 typedef struct _mongoc_scram_t { bool done; int step; char *user; char *pass; uint8_t salted_password[MONGOC_SCRAM_HASH_SIZE]; char encoded_nonce[48]; int32_t encoded_nonce_len; uint8_t *auth_message; uint32_t auth_messagemax; uint32_t auth_messagelen; #ifdef MONGOC_ENABLE_CRYPTO mongoc_crypto_t crypto; #endif } mongoc_scram_t; void _mongoc_scram_startup(); void _mongoc_scram_init (mongoc_scram_t *scram); void _mongoc_scram_set_pass (mongoc_scram_t *scram, const char *pass); void _mongoc_scram_set_user (mongoc_scram_t *scram, const char *user); void _mongoc_scram_destroy (mongoc_scram_t *scram); bool _mongoc_scram_step (mongoc_scram_t *scram, const uint8_t *inbuf, uint32_t inbuflen, uint8_t *outbuf, uint32_t outbufmax, uint32_t *outbuflen, bson_error_t *error); BSON_END_DECLS #endif /* MONGOC_SCRAM_PRIVATE_H */
christopherjwang/mongo-c-driver
src/mongoc/mongoc-scram-private.h
C
apache-2.0
2,068
# ___ ___ _ _ ___ ___ _ _____ ___ ___ # / __| __| \| | __| _ \ /_\_ _| __| \ # | (_ | _|| .` | _|| / / _ \| | | _|| |) | # \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____ # | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _| # | |) | (_) | | .` | (_) || | | _|| |) | | | | # |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_| # # Example docker run command # docker run -ti --net=host --name web --rm=true oso-rhel7-saml-sso # /root/start.sh will then start the httpd. FROM oso-rhel7-ops-base:latest # Pause indefinitely if asked to do so. RUN test "$OO_PAUSE_ON_BUILD" = "true" && while sleep 10; do true; done || : EXPOSE 8443 ADD prep_simplesaml.sh start.sh /usr/local/bin/ # Install SimpleSAML and modules from RPMs, then run our setup/hardening script for SimpleSAML RUN yum install -y \ httpd \ mod_ssl \ openshift-tools-web-simplesamlphp-modules \ openssh-server \ oso-simplesamlphp \ php php-cli \ php-google-apiclient \ php-pecl-memcache \ php-pecl-yaml \ && \ yum -y update && yum clean all && \ prep_simplesaml.sh && \ ln -sf /usr/share/simplesamlphp/modules/authorizeyaml/bin/get_saml_token.php /usr/local/bin/get_saml_token # Copy config files ADD sshd_config /etc/ssh/sshd_config # Copy index redirect page and readyness/liveness probe handler ADD index.php status.php /var/www/html/ # Start apache & sshd CMD /usr/local/bin/start.sh # Add config file templates and startup playbook ADD root/ /root/ # Fix v3 specific environment # Make the container work more consistently in and out of openshift # BE CAREFUL!!! If you change these, you may bloat the image! Use 'docker history' to see the size! RUN mkdir -p /run/httpd && \ chmod -R g+rwX /etc/httpd /etc/passwd /etc/group /run /var/log /usr/share/simplesamlphp/config && \ chgrp -R root /run/ /var/log /var/lib/php/session && \ ansible-playbook /root/build.yaml && \ rm -rf /root/.ansible
andrewklau/openshift-tools
docker/oso-saml-sso/rhel7/Dockerfile
Dockerfile
apache-2.0
2,037
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include <cassert> #include <chrono> #include <cstdint> #include <cstring> #include <ctime> #include <iomanip> #include <iostream> #include <utility> #include "arrow/io/file.h" #include "parquet/exception.h" #include "parquet/stream_reader.h" #include "parquet/stream_writer.h" // This file gives an example of how to use the parquet::StreamWriter // and parquet::StreamReader classes. // It shows writing/reading of the supported types as well as how a // user-defined type can be handled. template <typename T> using optional = parquet::StreamReader::optional<T>; // Example of a user-defined type to be written to/read from Parquet // using C++ input/output operators. class UserTimestamp { public: UserTimestamp() = default; UserTimestamp(const std::chrono::microseconds v) : ts_{v} {} bool operator==(const UserTimestamp& x) const { return ts_ == x.ts_; } void dump(std::ostream& os) const { const auto t = static_cast<std::time_t>( std::chrono::duration_cast<std::chrono::seconds>(ts_).count()); os << std::put_time(std::gmtime(&t), "%Y%m%d-%H%M%S"); } void dump(parquet::StreamWriter& os) const { os << ts_; } private: std::chrono::microseconds ts_; }; std::ostream& operator<<(std::ostream& os, const UserTimestamp& v) { v.dump(os); return os; } parquet::StreamWriter& operator<<(parquet::StreamWriter& os, const UserTimestamp& v) { v.dump(os); return os; } parquet::StreamReader& operator>>(parquet::StreamReader& os, UserTimestamp& v) { std::chrono::microseconds ts; os >> ts; v = UserTimestamp{ts}; return os; } std::shared_ptr<parquet::schema::GroupNode> GetSchema() { parquet::schema::NodeVector fields; fields.push_back(parquet::schema::PrimitiveNode::Make( "string_field", parquet::Repetition::OPTIONAL, parquet::Type::BYTE_ARRAY, parquet::ConvertedType::UTF8)); fields.push_back(parquet::schema::PrimitiveNode::Make( "char_field", parquet::Repetition::REQUIRED, parquet::Type::FIXED_LEN_BYTE_ARRAY, parquet::ConvertedType::NONE, 1)); fields.push_back(parquet::schema::PrimitiveNode::Make( "char[4]_field", parquet::Repetition::REQUIRED, parquet::Type::FIXED_LEN_BYTE_ARRAY, parquet::ConvertedType::NONE, 4)); fields.push_back(parquet::schema::PrimitiveNode::Make( "int8_field", parquet::Repetition::REQUIRED, parquet::Type::INT32, parquet::ConvertedType::INT_8)); fields.push_back(parquet::schema::PrimitiveNode::Make( "uint16_field", parquet::Repetition::REQUIRED, parquet::Type::INT32, parquet::ConvertedType::UINT_16)); fields.push_back(parquet::schema::PrimitiveNode::Make( "int32_field", parquet::Repetition::REQUIRED, parquet::Type::INT32, parquet::ConvertedType::INT_32)); fields.push_back(parquet::schema::PrimitiveNode::Make( "uint64_field", parquet::Repetition::OPTIONAL, parquet::Type::INT64, parquet::ConvertedType::UINT_64)); fields.push_back(parquet::schema::PrimitiveNode::Make( "double_field", parquet::Repetition::REQUIRED, parquet::Type::DOUBLE, parquet::ConvertedType::NONE)); // User defined timestamp type. fields.push_back(parquet::schema::PrimitiveNode::Make( "timestamp_field", parquet::Repetition::REQUIRED, parquet::Type::INT64, parquet::ConvertedType::TIMESTAMP_MICROS)); fields.push_back(parquet::schema::PrimitiveNode::Make( "chrono_milliseconds_field", parquet::Repetition::REQUIRED, parquet::Type::INT64, parquet::ConvertedType::TIMESTAMP_MILLIS)); return std::static_pointer_cast<parquet::schema::GroupNode>( parquet::schema::GroupNode::Make("schema", parquet::Repetition::REQUIRED, fields)); } struct TestData { static const int num_rows = 2000; static void init() { std::time(&ts_offset_); } static optional<std::string> GetOptString(const int i) { if (i % 2 == 0) return {}; return "Str #" + std::to_string(i); } static arrow::util::string_view GetStringView(const int i) { string_ = "StringView #" + std::to_string(i); return arrow::util::string_view(string_); } static const char* GetCharPtr(const int i) { string_ = "CharPtr #" + std::to_string(i); return string_.c_str(); } static char GetChar(const int i) { return i & 1 ? 'M' : 'F'; } static int8_t GetInt8(const int i) { return static_cast<int8_t>((i % 256) - 128); } static uint16_t GetUInt16(const int i) { return static_cast<uint16_t>(i); } static int32_t GetInt32(const int i) { return 3 * i - 17; } static optional<uint64_t> GetOptUInt64(const int i) { if (i % 11 == 0) return {}; return (1ull << 40) + i * i + 101; } static double GetDouble(const int i) { return 6.62607004e-34 * 3e8 * i; } static UserTimestamp GetUserTimestamp(const int i) { return UserTimestamp{std::chrono::microseconds{(ts_offset_ + 3 * i) * 1000000 + i}}; } static std::chrono::milliseconds GetChronoMilliseconds(const int i) { return std::chrono::milliseconds{(ts_offset_ + 3 * i) * 1000ull + i}; } static char char4_array[4]; private: static std::time_t ts_offset_; static std::string string_; }; char TestData::char4_array[] = "XYZ"; std::time_t TestData::ts_offset_; std::string TestData::string_; void WriteParquetFile() { std::shared_ptr<arrow::io::FileOutputStream> outfile; PARQUET_ASSIGN_OR_THROW( outfile, arrow::io::FileOutputStream::Open("parquet-stream-api-example.parquet")); parquet::WriterProperties::Builder builder; #if defined ARROW_WITH_BROTLI builder.compression(parquet::Compression::BROTLI); #elif defined ARROW_WITH_ZSTD builder.compression(parquet::Compression::ZSTD); #endif parquet::StreamWriter os{ parquet::ParquetFileWriter::Open(outfile, GetSchema(), builder.build())}; os.SetMaxRowGroupSize(1000); for (auto i = 0; i < TestData::num_rows; ++i) { // Output string using 3 different types: std::string, arrow::util::string_view and // const char *. switch (i % 3) { case 0: os << TestData::GetOptString(i); break; case 1: os << TestData::GetStringView(i); break; case 2: os << TestData::GetCharPtr(i); break; } os << TestData::GetChar(i); switch (i % 2) { case 0: os << TestData::char4_array; break; case 1: os << parquet::StreamWriter::FixedStringView{TestData::GetCharPtr(i), 4}; break; } os << TestData::GetInt8(i); os << TestData::GetUInt16(i); os << TestData::GetInt32(i); os << TestData::GetOptUInt64(i); os << TestData::GetDouble(i); os << TestData::GetUserTimestamp(i); os << TestData::GetChronoMilliseconds(i); os << parquet::EndRow; if (i == TestData::num_rows / 2) { os << parquet::EndRowGroup; } } std::cout << "Parquet Stream Writing complete." << std::endl; } void ReadParquetFile() { std::shared_ptr<arrow::io::ReadableFile> infile; PARQUET_ASSIGN_OR_THROW( infile, arrow::io::ReadableFile::Open("parquet-stream-api-example.parquet")); parquet::StreamReader os{parquet::ParquetFileReader::Open(infile)}; optional<std::string> opt_string; char ch; char char_array[4]; int8_t int8; uint16_t uint16; int32_t int32; optional<uint64_t> opt_uint64; double d; UserTimestamp ts_user; std::chrono::milliseconds ts_ms; int i; for (i = 0; !os.eof(); ++i) { os >> opt_string; os >> ch; os >> char_array; os >> int8; os >> uint16; os >> int32; os >> opt_uint64; os >> d; os >> ts_user; os >> ts_ms; os >> parquet::EndRow; if (0) { // For debugging. std::cout << "Row #" << i << std::endl; std::cout << "string["; if (opt_string) { std::cout << *opt_string; } else { std::cout << "N/A"; } std::cout << "] char[" << ch << "] charArray[" << char_array << "] int8[" << int(int8) << "] uint16[" << uint16 << "] int32[" << int32; std::cout << "] uint64["; if (opt_uint64) { std::cout << *opt_uint64; } else { std::cout << "N/A"; } std::cout << "] double[" << d << "] tsUser[" << ts_user << "] tsMs[" << ts_ms.count() << "]" << std::endl; } // Check data. switch (i % 3) { case 0: assert(opt_string == TestData::GetOptString(i)); break; case 1: assert(*opt_string == TestData::GetStringView(i)); break; case 2: assert(*opt_string == TestData::GetCharPtr(i)); break; } assert(ch == TestData::GetChar(i)); switch (i % 2) { case 0: assert(0 == std::memcmp(char_array, TestData::char4_array, sizeof(char_array))); break; case 1: assert(0 == std::memcmp(char_array, TestData::GetCharPtr(i), sizeof(char_array))); break; } assert(int8 == TestData::GetInt8(i)); assert(uint16 == TestData::GetUInt16(i)); assert(int32 == TestData::GetInt32(i)); assert(opt_uint64 == TestData::GetOptUInt64(i)); assert(std::abs(d - TestData::GetDouble(i)) < 1e-6); assert(ts_user == TestData::GetUserTimestamp(i)); assert(ts_ms == TestData::GetChronoMilliseconds(i)); } assert(TestData::num_rows == i); std::cout << "Parquet Stream Reading complete." << std::endl; } int main() { WriteParquetFile(); ReadParquetFile(); return 0; }
cpcloud/arrow
cpp/examples/parquet/parquet_stream_api/stream_reader_writer.cc
C++
apache-2.0
10,174
=head1 NAME Apache2::FilterRec - Perl API for manipulating the Apache filter record =head1 Synopsis use Apache2::Filter (); use Apache2::FilterRec (); my $frec = $filter->frec; print "filter name is:", $frec->name; =head1 Description C<Apache2::FilterRec> provides an access to the filter record structure. The C<Apache2::FilterRec> object is retrieved by calling C<L<frec()|docs::2.0::api::Apache2::Filter/C_frec_>>: $frec = $filter->frec; =head1 API C<Apache2::FilterRec> provides the following functions and/or methods: =head2 C<name> The registered name for this filter $name = $frec->name(); =over 4 =item obj: C<$frec> ( C<L<Apache2::FilterRec object|docs::2.0::api::Apache2::FilterRec>> ) =item ret: C<$name> (string) =item since: 2.0.00 =back mod_perl filters have four names: modperl_request_output modperl_request_input modperl_connection_output modperl_connection_input You can see the names of the non-mod_perl filters as well. By calling C<L<$filter-E<gt>next-E<gt>frec-E<gt>name|docs::2.0::api::Apache2::Filter/C_next_>> you can get the name of the next filter in the chain. Example: Let's print the name of the current and the filter that follows it: use Apache2::Filter (); use Apache2::FilterRec (); for my $frec ($filter->frec, $filter->next->frec) { print "Name: ", $frec->name; } =head1 See Also L<mod_perl 2.0 documentation|docs::2.0::index>. =head1 Copyright mod_perl 2.0 and its core modules are copyrighted under The Apache Software License, Version 2.0. =head1 Authors L<The mod_perl development team and numerous contributors|about::contributors::people>. =cut
Distrotech/mod_perl
docs/src/docs/2.0/api/Apache2/FilterRec.pod
Perl
apache-2.0
1,679
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.spi; /** * Provide an interface to inject JSF artifacts using JavaEE annotations * as described in JSF 2.2 spec section 5.4 * * @author Leonardo Uribe */ public abstract class InjectionProvider { public abstract Object inject(Object instance) throws InjectionProviderException; public abstract void postConstruct(Object instance, Object creationMetaData) throws InjectionProviderException; public abstract void preDestroy(Object instance, Object creationMetaData) throws InjectionProviderException; public boolean isAvailable() { return true; } }
kulinski/myfaces
impl/src/main/java/org/apache/myfaces/spi/InjectionProvider.java
Java
apache-2.0
1,428
const int classes-5_abc_length = 2609; const int classes-5_abc_method_count = 20; const int classes-5_abc_class_count = 5; const int classes-5_abc_script_count = 1; const unsigned char classes-5_abc_data[2609] = { 0x10, 0x00, 0x2e, 0x00, 0x00, 0x00, 0x03, 0x48, 0xaf, 0xbc, 0x9a, 0xf2, 0xd7, 0x7a, 0x3e, 0x8d, 0xed, 0xb5, 0xa0, 0xf7, 0xc6, 0xb0, 0x3e, 0x61, 0x2e, 0x2e, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x73, 0x2f, 0x72, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x2f, 0x63, 0x6f, 0x72, 0x72, 0x65, 0x63, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x70, 0x61, 0x73, 0x73, 0x2f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x65, 0x73, 0x2d, 0x35, 0x2e, 0x61, 0x73, 0x01, 0x41, 0x0e, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x65, 0x73, 0x2d, 0x35, 0x2e, 0x61, 0x73, 0x24, 0x31, 0x00, 0x03, 0x58, 0x59, 0x5a, 0x01, 0x61, 0x01, 0x42, 0x01, 0x43, 0x01, 0x44, 0x01, 0x45, 0x01, 0x62, 0x01, 0x63, 0x01, 0x64, 0x01, 0x65, 0x05, 0x74, 0x72, 0x61, 0x63, 0x65, 0x03, 0x2d, 0x2d, 0x2d, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x06, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x06, 0x61, 0x63, 0x74, 0x75, 0x61, 0x6c, 0x1b, 0x2e, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x73, 0x2f, 0x72, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x2f, 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2e, 0x61, 0x73, 0x0c, 0x2c, 0x20, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x0a, 0x2c, 0x20, 0x61, 0x63, 0x74, 0x75, 0x61, 0x6c, 0x3a, 0x20, 0x02, 0x2c, 0x20, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x45, 0x71, 0x75, 0x61, 0x6c, 0x13, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x65, 0x73, 0x2d, 0x35, 0x2e, 0x61, 0x73, 0x24, 0x31, 0x3a, 0x74, 0x65, 0x73, 0x74, 0x06, 0x70, 0x61, 0x73, 0x73, 0x65, 0x64, 0x06, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x4e, 0x61, 0x4e, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x4e, 0x61, 0x4e, 0x20, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x08, 0x61, 0x63, 0x74, 0x75, 0x61, 0x6c, 0x3a, 0x20, 0x08, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x3a, 0x20, 0x05, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x0a, 0x74, 0x79, 0x70, 0x65, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x04, 0x74, 0x72, 0x75, 0x65, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x04, 0x4d, 0x61, 0x74, 0x68, 0x03, 0x61, 0x62, 0x73, 0x19, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x65, 0x73, 0x2d, 0x35, 0x2e, 0x61, 0x73, 0x24, 0x31, 0x3a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x45, 0x71, 0x75, 0x61, 0x6c, 0x05, 0x58, 0x59, 0x5a, 0x3a, 0x41, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x41, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x41, 0x04, 0x41, 0x3a, 0x3a, 0x61, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x41, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x61, 0x17, 0x5f, 0x5f, 0x67, 0x6f, 0x5f, 0x74, 0x6f, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x03, 0x70, 0x6f, 0x73, 0x02, 0x39, 0x30, 0x06, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x05, 0x58, 0x59, 0x5a, 0x3a, 0x42, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x42, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x42, 0x04, 0x42, 0x3a, 0x3a, 0x62, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x42, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x62, 0x03, 0x32, 0x31, 0x34, 0x05, 0x58, 0x59, 0x5a, 0x3a, 0x43, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x43, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x43, 0x04, 0x43, 0x3a, 0x3a, 0x63, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x43, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x63, 0x03, 0x33, 0x33, 0x38, 0x05, 0x58, 0x59, 0x5a, 0x3a, 0x44, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x44, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x64, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x44, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x44, 0x03, 0x35, 0x32, 0x31, 0x03, 0x34, 0x38, 0x39, 0x05, 0x58, 0x59, 0x5a, 0x3a, 0x45, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x45, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x65, 0x0b, 0x58, 0x59, 0x5a, 0x3a, 0x45, 0x2f, 0x58, 0x59, 0x5a, 0x3a, 0x45, 0x03, 0x36, 0x37, 0x36, 0x03, 0x36, 0x34, 0x34, 0x06, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x05, 0x41, 0x72, 0x72, 0x61, 0x79, 0x07, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x03, 0x69, 0x6e, 0x74, 0x04, 0x75, 0x69, 0x6e, 0x74, 0x06, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x04, 0x74, 0x65, 0x73, 0x74, 0x1c, 0x5f, 0x5f, 0x67, 0x6f, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x02, 0x34, 0x36, 0x02, 0x32, 0x39, 0x03, 0x31, 0x37, 0x30, 0x03, 0x31, 0x34, 0x33, 0x03, 0x32, 0x39, 0x34, 0x03, 0x32, 0x36, 0x37, 0x06, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x03, 0x63, 0x6c, 0x73, 0x03, 0x34, 0x33, 0x38, 0x03, 0x34, 0x31, 0x31, 0x03, 0x35, 0x39, 0x33, 0x03, 0x35, 0x36, 0x36, 0x01, 0x6f, 0x04, 0x31, 0x30, 0x34, 0x38, 0x08, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x73, 0x04, 0x31, 0x31, 0x30, 0x35, 0x01, 0x69, 0x04, 0x31, 0x31, 0x38, 0x30, 0x01, 0x78, 0x14, 0x05, 0x03, 0x16, 0x04, 0x16, 0x05, 0x17, 0x05, 0x05, 0x28, 0x18, 0x28, 0x1a, 0x28, 0x05, 0x31, 0x18, 0x31, 0x1a, 0x31, 0x05, 0x36, 0x18, 0x36, 0x1a, 0x36, 0x05, 0x3b, 0x18, 0x3b, 0x1a, 0x3b, 0x05, 0x40, 0x18, 0x40, 0x1a, 0x40, 0x08, 0x04, 0x01, 0x02, 0x03, 0x04, 0x02, 0x01, 0x02, 0x07, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x07, 0x08, 0x09, 0x0a, 0x09, 0x01, 0x02, 0x03, 0x04, 0x07, 0x0a, 0x0b, 0x0c, 0x0d, 0x0a, 0x01, 0x02, 0x03, 0x04, 0x07, 0x0a, 0x0d, 0x0e, 0x0f, 0x10, 0x0b, 0x01, 0x02, 0x03, 0x04, 0x07, 0x0a, 0x0d, 0x10, 0x11, 0x12, 0x13, 0x30, 0x09, 0x02, 0x01, 0x09, 0x06, 0x01, 0x09, 0x07, 0x01, 0x09, 0x08, 0x01, 0x09, 0x09, 0x01, 0x09, 0x0a, 0x01, 0x09, 0x0b, 0x01, 0x09, 0x0c, 0x01, 0x09, 0x0d, 0x01, 0x09, 0x0e, 0x01, 0x09, 0x0f, 0x01, 0x09, 0x0f, 0x02, 0x09, 0x18, 0x02, 0x09, 0x25, 0x02, 0x09, 0x26, 0x02, 0x09, 0x20, 0x02, 0x09, 0x0f, 0x03, 0x07, 0x04, 0x06, 0x07, 0x03, 0x02, 0x07, 0x02, 0x30, 0x09, 0x0f, 0x04, 0x07, 0x04, 0x0b, 0x07, 0x03, 0x07, 0x09, 0x0f, 0x05, 0x07, 0x04, 0x0c, 0x07, 0x03, 0x08, 0x09, 0x0f, 0x06, 0x07, 0x04, 0x0d, 0x07, 0x03, 0x09, 0x09, 0x0f, 0x07, 0x07, 0x04, 0x0e, 0x07, 0x03, 0x0a, 0x09, 0x30, 0x01, 0x09, 0x45, 0x01, 0x09, 0x46, 0x01, 0x09, 0x47, 0x01, 0x09, 0x48, 0x01, 0x09, 0x49, 0x01, 0x09, 0x4a, 0x01, 0x1b, 0x02, 0x09, 0x4b, 0x01, 0x07, 0x01, 0x18, 0x07, 0x01, 0x4c, 0x07, 0x04, 0x5a, 0x07, 0x04, 0x5c, 0x07, 0x04, 0x5e, 0x07, 0x04, 0x60, 0x15, 0x00, 0x00, 0x04, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x19, 0x80, 0x11, 0x12, 0x13, 0x02, 0x00, 0x00, 0x00, 0x27, 0x80, 0x12, 0x13, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x29, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x32, 0x00, 0x00, 0x00, 0x34, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x37, 0x00, 0x00, 0x00, 0x39, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3c, 0x20, 0x00, 0x00, 0x3d, 0x00, 0x00, 0x00, 0x3c, 0x20, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x41, 0x20, 0x00, 0x00, 0x42, 0x00, 0x00, 0x00, 0x41, 0x20, 0x00, 0x00, 0x04, 0x00, 0x16, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x2f, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x35, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x3a, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x3e, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x3f, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x43, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x44, 0x4d, 0x02, 0x2d, 0x2e, 0x01, 0x4e, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x4f, 0x4d, 0x02, 0x2d, 0x2e, 0x01, 0x50, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x51, 0x4d, 0x02, 0x2d, 0x2e, 0x01, 0x52, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x53, 0x54, 0x01, 0x55, 0x09, 0x4d, 0x02, 0x2d, 0x2e, 0x01, 0x56, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x57, 0x54, 0x01, 0x55, 0x0a, 0x4d, 0x02, 0x2d, 0x2e, 0x01, 0x58, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x59, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x5b, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x5d, 0x2c, 0x02, 0x2d, 0x2e, 0x01, 0x5f, 0x05, 0x13, 0x14, 0x09, 0x06, 0x00, 0x04, 0x01, 0x12, 0x41, 0x00, 0x05, 0x01, 0x00, 0x17, 0x13, 0x09, 0x09, 0x00, 0x07, 0x01, 0x16, 0x41, 0x00, 0x08, 0x01, 0x01, 0x1a, 0x17, 0x09, 0x0c, 0x00, 0x0a, 0x01, 0x19, 0x41, 0x00, 0x0b, 0x01, 0x02, 0x1d, 0x1a, 0x09, 0x0f, 0x00, 0x0e, 0x01, 0x1c, 0x41, 0x00, 0x0f, 0x01, 0x04, 0x20, 0x1d, 0x09, 0x12, 0x00, 0x12, 0x01, 0x1f, 0x41, 0x00, 0x13, 0x01, 0x06, 0x03, 0x00, 0x06, 0x00, 0x09, 0x00, 0x0c, 0x01, 0x1c, 0x51, 0x03, 0x0d, 0x01, 0x03, 0x10, 0x01, 0x1f, 0x51, 0x03, 0x11, 0x01, 0x05, 0x01, 0x14, 0x0b, 0x2a, 0x00, 0x01, 0x00, 0x00, 0x2b, 0x00, 0x02, 0x00, 0x00, 0x13, 0x44, 0x03, 0x00, 0x02, 0x07, 0x08, 0x17, 0x44, 0x04, 0x01, 0x02, 0x09, 0x0a, 0x1a, 0x44, 0x05, 0x02, 0x02, 0x0b, 0x0c, 0x1d, 0x44, 0x06, 0x03, 0x03, 0x0d, 0x0e, 0x0f, 0x20, 0x44, 0x07, 0x04, 0x03, 0x10, 0x11, 0x12, 0x2c, 0x40, 0x08, 0x00, 0x00, 0x01, 0x13, 0x2d, 0x40, 0x09, 0x00, 0x00, 0x01, 0x14, 0x2e, 0x40, 0x0a, 0x00, 0x00, 0x01, 0x15, 0x2f, 0x00, 0x0b, 0x00, 0x00, 0x11, 0x00, 0x02, 0x01, 0x01, 0x01, 0xc6, 0x01, 0xf1, 0x01, 0xf0, 0x35, 0x5d, 0x01, 0x4a, 0x01, 0x00, 0x46, 0x02, 0x00, 0x29, 0xf0, 0x36, 0x5d, 0x03, 0x4a, 0x03, 0x00, 0x46, 0x02, 0x00, 0x29, 0xf0, 0x37, 0x5d, 0x04, 0x4a, 0x04, 0x00, 0x46, 0x02, 0x00, 0x29, 0xf0, 0x38, 0x5d, 0x05, 0x4a, 0x05, 0x00, 0x46, 0x02, 0x00, 0x29, 0xf0, 0x39, 0x5d, 0x06, 0x4a, 0x06, 0x00, 0x46, 0x02, 0x00, 0x29, 0xf0, 0x3b, 0x5d, 0x03, 0x4a, 0x03, 0x00, 0x46, 0x07, 0x00, 0x29, 0xf0, 0x3c, 0x5d, 0x04, 0x4a, 0x04, 0x00, 0x46, 0x07, 0x00, 0x29, 0xf0, 0x3d, 0x5d, 0x05, 0x4a, 0x05, 0x00, 0x46, 0x07, 0x00, 0x29, 0xf0, 0x3e, 0x5d, 0x06, 0x4a, 0x06, 0x00, 0x46, 0x07, 0x00, 0x29, 0xf0, 0x40, 0x5d, 0x04, 0x4a, 0x04, 0x00, 0x46, 0x08, 0x00, 0x29, 0xf0, 0x41, 0x5d, 0x05, 0x4a, 0x05, 0x00, 0x46, 0x08, 0x00, 0x29, 0xf0, 0x42, 0x5d, 0x06, 0x4a, 0x06, 0x00, 0x46, 0x08, 0x00, 0x29, 0xf0, 0x44, 0x5d, 0x05, 0x4a, 0x05, 0x00, 0x46, 0x09, 0x00, 0x29, 0xf0, 0x45, 0x5d, 0x06, 0x4a, 0x06, 0x00, 0x46, 0x09, 0x00, 0x29, 0xf0, 0x47, 0x5d, 0x06, 0x4a, 0x06, 0x00, 0x46, 0x0a, 0x00, 0x29, 0xf0, 0x49, 0x5d, 0x0b, 0x2c, 0x10, 0x46, 0x0b, 0x01, 0x29, 0xf0, 0x4b, 0x64, 0x6c, 0x06, 0x46, 0x09, 0x00, 0x29, 0xf0, 0x4c, 0x64, 0x6c, 0x07, 0x46, 0x0a, 0x00, 0x29, 0xf0, 0x4d, 0x47, 0x00, 0x00, 0x01, 0x05, 0x04, 0x01, 0x01, 0x32, 0xef, 0x01, 0x11, 0x00, 0x35, 0xef, 0x01, 0x12, 0x01, 0x35, 0xef, 0x01, 0x13, 0x02, 0x35, 0xf1, 0x14, 0xf0, 0x36, 0x5d, 0x0c, 0xd1, 0x2c, 0x15, 0xa0, 0xd2, 0xa0, 0x2c, 0x16, 0xa0, 0xd3, 0xa0, 0x2c, 0x17, 0xa0, 0x5d, 0x0d, 0xd2, 0xd3, 0x46, 0x0d, 0x02, 0xa0, 0x46, 0x0c, 0x01, 0x29, 0xf0, 0x37, 0x47, 0x00, 0x00, 0x02, 0x03, 0x04, 0x01, 0x01, 0xc7, 0x02, 0xef, 0x01, 0x12, 0x00, 0x01, 0xef, 0x01, 0x13, 0x01, 0x01, 0xef, 0x01, 0x1a, 0x02, 0x15, 0xf1, 0x14, 0xf0, 0x04, 0xd2, 0xd2, 0x13, 0x1a, 0x00, 0x00, 0xf0, 0x05, 0xd2, 0x95, 0x2c, 0x1b, 0x14, 0x0a, 0x00, 0x00, 0x2c, 0x1c, 0xf0, 0x06, 0x82, 0xd6, 0x10, 0x06, 0x00, 0x00, 0x2c, 0x1d, 0xf0, 0x08, 0x82, 0xd6, 0xf0, 0x0b, 0xd1, 0xd1, 0x13, 0x1a, 0x00, 0x00, 0xf0, 0x0c, 0xd1, 0x95, 0x2c, 0x1b, 0x14, 0x0a, 0x00, 0x00, 0x2c, 0x1c, 0xf0, 0x0d, 0x82, 0xd5, 0x10, 0x06, 0x00, 0x00, 0x2c, 0x1d, 0xf0, 0x0f, 0x82, 0xd5, 0xf0, 0x12, 0x5d, 0x0c, 0x2c, 0x1e, 0xd2, 0xa0, 0x46, 0x0c, 0x01, 0x29, 0xf0, 0x13, 0x5d, 0x0c, 0x2c, 0x1f, 0xd1, 0xa0, 0x46, 0x0c, 0x01, 0x29, 0x2c, 0x04, 0xf0, 0x15, 0x82, 0xd7, 0xf0, 0x16, 0xd1, 0xd2, 0x14, 0x5b, 0x00, 0x00, 0xf0, 0x17, 0xd1, 0x95, 0xd2, 0x95, 0xab, 0x96, 0x76, 0x2a, 0x76, 0x12, 0x34, 0x00, 0x00, 0x29, 0xf0, 0x18, 0xd1, 0x95, 0x2c, 0x20, 0xab, 0x76, 0x2a, 0x76, 0x12, 0x07, 0x00, 0x00, 0x29, 0xd2, 0x95, 0x2c, 0x21, 0xab, 0x76, 0x2a, 0x76, 0x11, 0x16, 0x00, 0x00, 0x29, 0xf0, 0x19, 0xd2, 0x95, 0x2c, 0x20, 0xab, 0x76, 0x2a, 0x76, 0x12, 0x07, 0x00, 0x00, 0x29, 0xd1, 0x95, 0x2c, 0x21, 0xab, 0x76, 0x96, 0x76, 0x12, 0x0a, 0x00, 0x00, 0x2c, 0x22, 0xf0, 0x1c, 0x82, 0xd7, 0x10, 0x06, 0x00, 0x00, 0x2c, 0x23, 0xf0, 0x1e, 0x82, 0xd7, 0x10, 0x6d, 0x00, 0x00, 0x2c, 0x24, 0xf0, 0x21, 0x82, 0xd7, 0xf0, 0x24, 0xd2, 0x95, 0x2c, 0x21, 0xab, 0x76, 0x2a, 0x76, 0x12, 0x07, 0x00, 0x00, 0x29, 0xd1, 0x95, 0x2c, 0x21, 0xab, 0x76, 0x12, 0x18, 0x00, 0x00, 0xf0, 0x25, 0x5d, 0x0e, 0x66, 0x0e, 0xd2, 0xd1, 0xa1, 0x46, 0x0f, 0x01, 0x2f, 0x01, 0x0c, 0x06, 0x00, 0x00, 0x2c, 0x23, 0xf0, 0x26, 0x82, 0xd7, 0xf0, 0x2c, 0xd2, 0x95, 0x2c, 0x20, 0xab, 0x76, 0x2a, 0x76, 0x12, 0x07, 0x00, 0x00, 0x29, 0xd1, 0x95, 0x2c, 0x20, 0xab, 0x76, 0x12, 0x1d, 0x00, 0x00, 0xf0, 0x2d, 0x5d, 0x10, 0x66, 0x10, 0xd2, 0xd1, 0xa1, 0x46, 0x0f, 0x01, 0x5d, 0x10, 0x2f, 0x02, 0x46, 0x10, 0x01, 0x0c, 0x06, 0x00, 0x00, 0x2c, 0x23, 0xf0, 0x2e, 0x82, 0xd7, 0xf0, 0x32, 0xd3, 0x48, 0x00, 0x00, 0x03, 0x01, 0x01, 0x03, 0x04, 0x03, 0xd0, 0x30, 0x47, 0x00, 0x00, 0x04, 0x02, 0x01, 0x04, 0x05, 0x18, 0xf1, 0x01, 0xf0, 0x03, 0xd0, 0x30, 0xf0, 0x03, 0xd0, 0x49, 0x00, 0xf0, 0x04, 0x5d, 0x11, 0x2c, 0x02, 0x46, 0x11, 0x01, 0x29, 0xf0, 0x05, 0x47, 0x00, 0x00, 0x05, 0x02, 0x01, 0x04, 0x05, 0x13, 0xf1, 0x01, 0xf0, 0x07, 0xd0, 0x30, 0xf0, 0x08, 0x5d, 0x11, 0x2c, 0x2a, 0x46, 0x11, 0x01, 0x29, 0xf0, 0x09, 0x47, 0x00, 0x00, 0x06, 0x01, 0x01, 0x04, 0x05, 0x03, 0xd0, 0x30, 0x47, 0x00, 0x00, 0x07, 0x02, 0x01, 0x05, 0x06, 0x18, 0xf1, 0x01, 0xf0, 0x0d, 0xd0, 0x30, 0xf0, 0x0d, 0xd0, 0x49, 0x00, 0xf0, 0x0e, 0x5d, 0x15, 0x2c, 0x07, 0x46, 0x15, 0x01, 0x29, 0xf0, 0x0f, 0x47, 0x00, 0x00, 0x08, 0x02, 0x01, 0x05, 0x06, 0x13, 0xf1, 0x01, 0xf0, 0x11, 0xd0, 0x30, 0xf0, 0x12, 0x5d, 0x15, 0x2c, 0x33, 0x46, 0x15, 0x01, 0x29, 0xf0, 0x13, 0x47, 0x00, 0x00, 0x09, 0x01, 0x01, 0x05, 0x06, 0x03, 0xd0, 0x30, 0x47, 0x00, 0x00, 0x0a, 0x02, 0x01, 0x06, 0x07, 0x18, 0xf1, 0x01, 0xf0, 0x17, 0xd0, 0x30, 0xf0, 0x17, 0xd0, 0x49, 0x00, 0xf0, 0x18, 0x5d, 0x18, 0x2c, 0x08, 0x46, 0x18, 0x01, 0x29, 0xf0, 0x19, 0x47, 0x00, 0x00, 0x0b, 0x02, 0x01, 0x06, 0x07, 0x13, 0xf1, 0x01, 0xf0, 0x1b, 0xd0, 0x30, 0xf0, 0x1c, 0x5d, 0x18, 0x2c, 0x38, 0x46, 0x18, 0x01, 0x29, 0xf0, 0x1d, 0x47, 0x00, 0x00, 0x0c, 0x01, 0x01, 0x06, 0x07, 0x03, 0xd0, 0x30, 0x47, 0x00, 0x00, 0x0e, 0x02, 0x01, 0x07, 0x08, 0x18, 0xf1, 0x01, 0xf0, 0x22, 0xd0, 0x30, 0xf0, 0x22, 0xd0, 0x49, 0x00, 0xf0, 0x23, 0x5d, 0x1b, 0x2c, 0x09, 0x46, 0x1b, 0x01, 0x29, 0xf0, 0x24, 0x47, 0x00, 0x00, 0x10, 0x01, 0x01, 0x07, 0x08, 0x03, 0xd0, 0x30, 0x47, 0x00, 0x00, 0x12, 0x02, 0x01, 0x08, 0x09, 0x18, 0xf1, 0x01, 0xf0, 0x2c, 0xd0, 0x30, 0xf0, 0x2c, 0xd0, 0x49, 0x00, 0xf0, 0x2d, 0x5d, 0x1e, 0x2c, 0x0a, 0x46, 0x1e, 0x01, 0x29, 0xf0, 0x2e, 0x47, 0x00, 0x00, 0x14, 0x07, 0x03, 0x01, 0x07, 0xd0, 0x02, 0xd0, 0x30, 0xf1, 0x14, 0xf0, 0x01, 0x40, 0x02, 0x64, 0x2b, 0x6d, 0x01, 0xf1, 0x14, 0xf0, 0x35, 0x40, 0x01, 0x64, 0x2b, 0x6d, 0x02, 0xf1, 0x01, 0xf0, 0x02, 0x65, 0x00, 0x5d, 0x14, 0x66, 0x14, 0x30, 0x5d, 0x21, 0x66, 0x21, 0x58, 0x00, 0x1d, 0x68, 0x13, 0xf1, 0x01, 0xf0, 0x0c, 0x65, 0x00, 0x5d, 0x14, 0x66, 0x14, 0x30, 0x5d, 0x13, 0x66, 0x13, 0x30, 0x64, 0x6c, 0x03, 0x58, 0x01, 0x1d, 0x1d, 0x68, 0x17, 0xf1, 0x01, 0xf0, 0x16, 0x65, 0x00, 0x5d, 0x14, 0x66, 0x14, 0x30, 0x5d, 0x13, 0x66, 0x13, 0x30, 0x5d, 0x17, 0x66, 0x17, 0x30, 0x64, 0x6c, 0x04, 0x58, 0x02, 0x1d, 0x1d, 0x1d, 0x68, 0x1a, 0xf1, 0x01, 0xf0, 0x21, 0x65, 0x00, 0x5d, 0x14, 0x66, 0x14, 0x30, 0x5d, 0x13, 0x66, 0x13, 0x30, 0x5d, 0x17, 0x66, 0x17, 0x30, 0x5d, 0x1a, 0x66, 0x1a, 0x30, 0x64, 0x6c, 0x05, 0x58, 0x03, 0x1d, 0x1d, 0x1d, 0x1d, 0x68, 0x1d, 0xf1, 0x01, 0xf0, 0x2b, 0x65, 0x00, 0x5d, 0x14, 0x66, 0x14, 0x30, 0x5d, 0x13, 0x66, 0x13, 0x30, 0x5d, 0x17, 0x66, 0x17, 0x30, 0x5d, 0x1a, 0x66, 0x1a, 0x30, 0x5d, 0x1d, 0x66, 0x1d, 0x30, 0x64, 0x6c, 0x06, 0x58, 0x04, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x68, 0x20, 0xf1, 0x01, 0xf0, 0x34, 0x40, 0x00, 0x64, 0x41, 0x00, 0x29, 0xf0, 0x4f, 0x5d, 0x21, 0x4a, 0x21, 0x00, 0x64, 0x2b, 0x6d, 0x08, 0xf0, 0x50, 0x5d, 0x0b, 0x64, 0x6c, 0x08, 0x46, 0x0b, 0x01, 0x29, 0xf0, 0x51, 0x5d, 0x0b, 0x5d, 0x21, 0x64, 0x6c, 0x08, 0x46, 0x21, 0x01, 0x46, 0x0b, 0x01, 0x82, 0xd5, 0xf0, 0x53, 0x5d, 0x21, 0x66, 0x21, 0x5d, 0x22, 0x66, 0x22, 0x5d, 0x23, 0x66, 0x23, 0x5d, 0x24, 0x66, 0x24, 0x5d, 0x25, 0x66, 0x25, 0x5d, 0x26, 0x66, 0x26, 0x5d, 0x27, 0x66, 0x27, 0x56, 0x07, 0x64, 0x2b, 0x6d, 0x09, 0xf0, 0x54, 0x24, 0x00, 0x64, 0x2b, 0x6d, 0x0a, 0x10, 0x2b, 0x00, 0x00, 0x09, 0xf0, 0x55, 0x64, 0x6c, 0x09, 0x64, 0x6c, 0x0a, 0x66, 0x28, 0x42, 0x00, 0x64, 0x2b, 0x6d, 0x0b, 0xf0, 0x56, 0x5d, 0x0b, 0x64, 0x6c, 0x0b, 0x46, 0x0b, 0x01, 0x82, 0xd5, 0xf0, 0x54, 0x64, 0x6c, 0x0a, 0x91, 0xd6, 0xd2, 0x64, 0x2b, 0x6d, 0x0a, 0x08, 0x02, 0x64, 0x6c, 0x0a, 0x64, 0x6c, 0x09, 0x66, 0x29, 0x15, 0xc9, 0xff, 0xff, 0xf0, 0x01, 0xd1, 0x48, 0x08, 0x01, 0x00, 0x00 };
tschneidereit/shumway
test/avm2/shumway/classes-5.cpp
C++
apache-2.0
15,870
// Copyright 2004-present Facebook. All Rights Reserved. // // You are hereby granted a non-exclusive, worldwide, royalty-free license to use, // copy, modify, and distribute this software in source code or binary form for use // in connection with the web services and APIs provided by Facebook. // // As with any software that integrates with the Facebook platform, your use of // this software is subject to the Facebook Developer Principles and Policies // [http://developers.facebook.com/policy/]. This copyright notice shall be // included in all copies or substantial portions of the software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #ifndef FBAudienceNetwork_FBAdDefines_h #define FBAudienceNetwork_FBAdDefines_h #ifdef __cplusplus #define FB_EXTERN_C_BEGIN extern "C" { #define FB_EXTERN_C_END } #else #define FB_EXTERN_C_BEGIN #define FB_EXTERN_C_END #endif #ifdef __cplusplus #define FB_EXPORT extern "C" __attribute__((visibility("default"))) #else #define FB_EXPORT extern __attribute__((visibility("default"))) #endif #define FB_CLASS_EXPORT __attribute__((visibility("default"))) #define FB_DEPRECATED __attribute__((deprecated)) #define FB_DEPRECATED_WITH_MESSAGE(M) __attribute__((deprecated(M))) #if __has_feature(objc_generics) #define FB_NSArrayOf(x) NSArray<x> #define FB_NSMutableArrayOf(x) NSMutableArray<x> #define FB_NSDictionaryOf(x, y) NSDictionary<x, y> #define FB_NSMutableDictionaryOf(x, y) NSMutableDictionary<x, y> #define FB_NSSetOf(x) NSSet<x> #define FB_NSMutableSetOf(x) NSMutableSet<x> #else #define FB_NSArrayOf(x) NSArray #define FB_NSMutableArrayOf(x) NSMutableArray #define FB_NSDictionaryOf(x, y) NSDictionary #define FB_NSMutableDictionaryOf(x, y) NSMutableDictionary #define FB_NSSetOf(x) NSSet #define FB_NSMutableSetOf(x) NSMutableSet #define __covariant #endif #if !__has_feature(nullability) #define NS_ASSUME_NONNULL_BEGIN #define NS_ASSUME_NONNULL_END #define nullable #define __nullable #endif #ifndef FB_SUBCLASSING_RESTRICTED #if defined(__has_attribute) && __has_attribute(objc_subclassing_restricted) #define FB_SUBCLASSING_RESTRICTED __attribute__((objc_subclassing_restricted)) #else #define FB_SUBCLASSING_RESTRICTED #endif #endif #endif
darina/omim
iphone/Maps/Pods/FBAudienceNetwork/Static/FBAudienceNetwork.framework/Headers/FBAdDefines.h
C
apache-2.0
2,646