code
stringlengths
4
1.01M
language
stringclasses
2 values
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Conversion from AST representation of types to the ty.rs //! representation. The main routine here is `ast_ty_to_ty()`: each use //! is parameterized by an instance of `AstConv` and a `RegionScope`. //! //! The parameterization of `ast_ty_to_ty()` is because it behaves //! somewhat differently during the collect and check phases, //! particularly with respect to looking up the types of top-level //! items. In the collect phase, the crate context is used as the //! `AstConv` instance; in this phase, the `get_item_type_scheme()` //! function triggers a recursive call to `type_scheme_of_item()` //! (note that `ast_ty_to_ty()` will detect recursive types and report //! an error). In the check phase, when the FnCtxt is used as the //! `AstConv`, `get_item_type_scheme()` just looks up the item type in //! `tcx.tcache` (using `ty::lookup_item_type`). //! //! The `RegionScope` trait controls what happens when the user does //! not specify a region in some location where a region is required //! (e.g., if the user writes `&Foo` as a type rather than `&'a Foo`). //! See the `rscope` module for more details. //! //! Unlike the `AstConv` trait, the region scope can change as we descend //! the type. This is to accommodate the fact that (a) fn types are binding //! scopes and (b) the default region may change. To understand case (a), //! consider something like: //! //! type foo = { x: &a.int, y: |&a.int| } //! //! The type of `x` is an error because there is no region `a` in scope. //! In the type of `y`, however, region `a` is considered a bound region //! as it does not already appear in scope. //! //! Case (b) says that if you have a type: //! type foo<'a> = ...; //! type bar = fn(&foo, &a.foo) //! The fully expanded version of type bar is: //! type bar = fn(&'foo &, &a.foo<'a>) //! Note that the self region for the `foo` defaulted to `&` in the first //! case but `&a` in the second. Basically, defaults that appear inside //! an rptr (`&r.T`) use the region `r` that appears in the rptr. use middle::astconv_util::{prim_ty_to_ty, check_path_args, NO_TPS, NO_REGIONS}; use middle::const_eval; use middle::def; use middle::resolve_lifetime as rl; use middle::privacy::{AllPublic, LastMod}; use middle::subst::{FnSpace, TypeSpace, SelfSpace, Subst, Substs}; use middle::traits; use middle::ty::{self, RegionEscape, Ty}; use rscope::{self, UnelidableRscope, RegionScope, ElidableRscope, ExplicitRscope, ObjectLifetimeDefaultRscope, ShiftedRscope, BindingRscope}; use util::common::{ErrorReported, FN_OUTPUT_NAME}; use util::nodemap::FnvHashSet; use util::ppaux::{self, Repr, UserString}; use std::iter::repeat; use std::rc::Rc; use std::slice; use syntax::{abi, ast, ast_util}; use syntax::codemap::Span; use syntax::parse::token; use syntax::print::pprust; pub trait AstConv<'tcx> { fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx>; /// Identify the type scheme for an item with a type, like a type /// alias, fn, or struct. This allows you to figure out the set of /// type parameters defined on the item. fn get_item_type_scheme(&self, span: Span, id: ast::DefId) -> Result<ty::TypeScheme<'tcx>, ErrorReported>; /// Returns the `TraitDef` for a given trait. This allows you to /// figure out the set of type parameters defined on the trait. fn get_trait_def(&self, span: Span, id: ast::DefId) -> Result<Rc<ty::TraitDef<'tcx>>, ErrorReported>; /// Ensure that the super-predicates for the trait with the given /// id are available and also for the transitive set of /// super-predicates. fn ensure_super_predicates(&self, span: Span, id: ast::DefId) -> Result<(), ErrorReported>; /// Returns the set of bounds in scope for the type parameter with /// the given id. fn get_type_parameter_bounds(&self, span: Span, def_id: ast::NodeId) -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>; /// Returns true if the trait with id `trait_def_id` defines an /// associated type with the name `name`. fn trait_defines_associated_type_named(&self, trait_def_id: ast::DefId, name: ast::Name) -> bool; /// Return an (optional) substitution to convert bound type parameters that /// are in scope into free ones. This function should only return Some /// within a fn body. /// See ParameterEnvironment::free_substs for more information. fn get_free_substs(&self) -> Option<&Substs<'tcx>> { None } /// What type should we use when a type is omitted? fn ty_infer(&self, span: Span) -> Ty<'tcx>; /// Projecting an associated type from a (potentially) /// higher-ranked trait reference is more complicated, because of /// the possibility of late-bound regions appearing in the /// associated type binding. This is not legal in function /// signatures for that reason. In a function body, we can always /// handle it because we can use inference variables to remove the /// late-bound regions. fn projected_ty_from_poly_trait_ref(&self, span: Span, poly_trait_ref: ty::PolyTraitRef<'tcx>, item_name: ast::Name) -> Ty<'tcx> { if ty::binds_late_bound_regions(self.tcx(), &poly_trait_ref) { span_err!(self.tcx().sess, span, E0212, "cannot extract an associated type from a higher-ranked trait bound \ in this context"); self.tcx().types.err } else { // no late-bound regions, we can just ignore the binder self.projected_ty(span, poly_trait_ref.0.clone(), item_name) } } /// Project an associated type from a non-higher-ranked trait reference. /// This is fairly straightforward and can be accommodated in any context. fn projected_ty(&self, span: Span, _trait_ref: Rc<ty::TraitRef<'tcx>>, _item_name: ast::Name) -> Ty<'tcx>; } pub fn ast_region_to_region(tcx: &ty::ctxt, lifetime: &ast::Lifetime) -> ty::Region { let r = match tcx.named_region_map.get(&lifetime.id) { None => { // should have been recorded by the `resolve_lifetime` pass tcx.sess.span_bug(lifetime.span, "unresolved lifetime"); } Some(&rl::DefStaticRegion) => { ty::ReStatic } Some(&rl::DefLateBoundRegion(debruijn, id)) => { ty::ReLateBound(debruijn, ty::BrNamed(ast_util::local_def(id), lifetime.name)) } Some(&rl::DefEarlyBoundRegion(space, index, id)) => { ty::ReEarlyBound(id, space, index, lifetime.name) } Some(&rl::DefFreeRegion(scope, id)) => { ty::ReFree(ty::FreeRegion { scope: scope, bound_region: ty::BrNamed(ast_util::local_def(id), lifetime.name) }) } }; debug!("ast_region_to_region(lifetime={} id={}) yields {}", lifetime.repr(tcx), lifetime.id, r.repr(tcx)); r } pub fn opt_ast_region_to_region<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, default_span: Span, opt_lifetime: &Option<ast::Lifetime>) -> ty::Region { let r = match *opt_lifetime { Some(ref lifetime) => { ast_region_to_region(this.tcx(), lifetime) } None => { match rscope.anon_regions(default_span, 1) { Err(v) => { debug!("optional region in illegal location"); span_err!(this.tcx().sess, default_span, E0106, "missing lifetime specifier"); match v { Some(v) => { let mut m = String::new(); let len = v.len(); for (i, (name, n)) in v.into_iter().enumerate() { let help_name = if name.is_empty() { format!("argument {}", i + 1) } else { format!("`{}`", name) }; m.push_str(&(if n == 1 { help_name } else { format!("one of {}'s {} elided lifetimes", help_name, n) })[..]); if len == 2 && i == 0 { m.push_str(" or "); } else if i + 2 == len { m.push_str(", or "); } else if i + 1 != len { m.push_str(", "); } } if len == 1 { fileline_help!(this.tcx().sess, default_span, "this function's return type contains a borrowed value, but \ the signature does not say which {} it is borrowed from", m); } else if len == 0 { fileline_help!(this.tcx().sess, default_span, "this function's return type contains a borrowed value, but \ there is no value for it to be borrowed from"); fileline_help!(this.tcx().sess, default_span, "consider giving it a 'static lifetime"); } else { fileline_help!(this.tcx().sess, default_span, "this function's return type contains a borrowed value, but \ the signature does not say whether it is borrowed from {}", m); } } None => {}, } ty::ReStatic } Ok(rs) => rs[0], } } }; debug!("opt_ast_region_to_region(opt_lifetime={}) yields {}", opt_lifetime.repr(this.tcx()), r.repr(this.tcx())); r } /// Given a path `path` that refers to an item `I` with the declared generics `decl_generics`, /// returns an appropriate set of substitutions for this particular reference to `I`. pub fn ast_path_substs_for_ty<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, decl_generics: &ty::Generics<'tcx>, item_segment: &ast::PathSegment) -> Substs<'tcx> { let tcx = this.tcx(); // ast_path_substs() is only called to convert paths that are // known to refer to traits, types, or structs. In these cases, // all type parameters defined for the item being referenced will // be in the TypeSpace or SelfSpace. // // Note: in the case of traits, the self parameter is also // defined, but we don't currently create a `type_param_def` for // `Self` because it is implicit. assert!(decl_generics.regions.all(|d| d.space == TypeSpace)); assert!(decl_generics.types.all(|d| d.space != FnSpace)); let (regions, types, assoc_bindings) = match item_segment.parameters { ast::AngleBracketedParameters(ref data) => { convert_angle_bracketed_parameters(this, rscope, span, decl_generics, data) } ast::ParenthesizedParameters(ref data) => { span_err!(tcx.sess, span, E0214, "parenthesized parameters may only be used with a trait"); convert_parenthesized_parameters(this, rscope, span, decl_generics, data) } }; prohibit_projections(this.tcx(), &assoc_bindings); create_substs_for_ast_path(this, span, param_mode, decl_generics, None, types, regions) } #[derive(PartialEq, Eq)] pub enum PathParamMode { // Any path in a type context. Explicit, // The `module::Type` in `module::Type::method` in an expression. Optional } fn create_region_substs<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, decl_generics: &ty::Generics<'tcx>, regions_provided: Vec<ty::Region>) -> Substs<'tcx> { let tcx = this.tcx(); // If the type is parameterized by the this region, then replace this // region with the current anon region binding (in other words, // whatever & would get replaced with). let expected_num_region_params = decl_generics.regions.len(TypeSpace); let supplied_num_region_params = regions_provided.len(); let regions = if expected_num_region_params == supplied_num_region_params { regions_provided } else { let anon_regions = rscope.anon_regions(span, expected_num_region_params); if supplied_num_region_params != 0 || anon_regions.is_err() { report_lifetime_number_error(tcx, span, supplied_num_region_params, expected_num_region_params); } match anon_regions { Ok(anon_regions) => anon_regions, Err(_) => (0..expected_num_region_params).map(|_| ty::ReStatic).collect() } }; Substs::new_type(vec![], regions) } /// Given the type/region arguments provided to some path (along with /// an implicit Self, if this is a trait reference) returns the complete /// set of substitutions. This may involve applying defaulted type parameters. /// /// Note that the type listing given here is *exactly* what the user provided. /// /// The `region_substs` should be the result of `create_region_substs` /// -- that is, a substitution with no types but the correct number of /// regions. fn create_substs_for_ast_path<'tcx>( this: &AstConv<'tcx>, span: Span, param_mode: PathParamMode, decl_generics: &ty::Generics<'tcx>, self_ty: Option<Ty<'tcx>>, types_provided: Vec<Ty<'tcx>>, region_substs: Substs<'tcx>) -> Substs<'tcx> { let tcx = this.tcx(); debug!("create_substs_for_ast_path(decl_generics={}, self_ty={}, \ types_provided={}, region_substs={}", decl_generics.repr(tcx), self_ty.repr(tcx), types_provided.repr(tcx), region_substs.repr(tcx)); assert_eq!(region_substs.regions().len(TypeSpace), decl_generics.regions.len(TypeSpace)); assert!(region_substs.types.is_empty()); // Convert the type parameters supplied by the user. let ty_param_defs = decl_generics.types.get_slice(TypeSpace); let formal_ty_param_count = ty_param_defs.len(); let required_ty_param_count = ty_param_defs.iter() .take_while(|x| x.default.is_none()) .count(); // Fill with `ty_infer` if no params were specified, as long as // they were optional (e.g. paths inside expressions). let mut type_substs = if param_mode == PathParamMode::Optional && types_provided.is_empty() { (0..formal_ty_param_count).map(|_| this.ty_infer(span)).collect() } else { types_provided }; let supplied_ty_param_count = type_substs.len(); check_type_argument_count(this.tcx(), span, supplied_ty_param_count, required_ty_param_count, formal_ty_param_count); if supplied_ty_param_count < required_ty_param_count { while type_substs.len() < required_ty_param_count { type_substs.push(tcx.types.err); } } else if supplied_ty_param_count > formal_ty_param_count { type_substs.truncate(formal_ty_param_count); } assert!(type_substs.len() >= required_ty_param_count && type_substs.len() <= formal_ty_param_count); let mut substs = region_substs; substs.types.extend(TypeSpace, type_substs.into_iter()); match self_ty { None => { // If no self-type is provided, it's still possible that // one was declared, because this could be an object type. } Some(ty) => { // If a self-type is provided, one should have been // "declared" (in other words, this should be a // trait-ref). assert!(decl_generics.types.get_self().is_some()); substs.types.push(SelfSpace, ty); } } let actual_supplied_ty_param_count = substs.types.len(TypeSpace); for param in &ty_param_defs[actual_supplied_ty_param_count..] { if let Some(default) = param.default { // If we are converting an object type, then the // `Self` parameter is unknown. However, some of the // other type parameters may reference `Self` in their // defaults. This will lead to an ICE if we are not // careful! if self_ty.is_none() && ty::type_has_self(default) { tcx.sess.span_err( span, &format!("the type parameter `{}` must be explicitly specified \ in an object type because its default value `{}` references \ the type `Self`", param.name.user_string(tcx), default.user_string(tcx))); substs.types.push(TypeSpace, tcx.types.err); } else { // This is a default type parameter. let default = default.subst_spanned(tcx, &substs, Some(span)); substs.types.push(TypeSpace, default); } } else { tcx.sess.span_bug(span, "extra parameter without default"); } } substs } struct ConvertedBinding<'tcx> { item_name: ast::Name, ty: Ty<'tcx>, span: Span, } fn convert_angle_bracketed_parameters<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, decl_generics: &ty::Generics<'tcx>, data: &ast::AngleBracketedParameterData) -> (Substs<'tcx>, Vec<Ty<'tcx>>, Vec<ConvertedBinding<'tcx>>) { let regions: Vec<_> = data.lifetimes.iter() .map(|l| ast_region_to_region(this.tcx(), l)) .collect(); let region_substs = create_region_substs(this, rscope, span, decl_generics, regions); let types: Vec<_> = data.types.iter() .enumerate() .map(|(i,t)| ast_ty_arg_to_ty(this, rscope, decl_generics, i, &region_substs, t)) .collect(); let assoc_bindings: Vec<_> = data.bindings.iter() .map(|b| ConvertedBinding { item_name: b.ident.name, ty: ast_ty_to_ty(this, rscope, &*b.ty), span: b.span }) .collect(); (region_substs, types, assoc_bindings) } /// Returns the appropriate lifetime to use for any output lifetimes /// (if one exists) and a vector of the (pattern, number of lifetimes) /// corresponding to each input type/pattern. fn find_implied_output_region(input_tys: &[Ty], input_pats: Vec<String>) -> (Option<ty::Region>, Vec<(String, usize)>) { let mut lifetimes_for_params: Vec<(String, usize)> = Vec::new(); let mut possible_implied_output_region = None; for (input_type, input_pat) in input_tys.iter().zip(input_pats.into_iter()) { let mut accumulator = Vec::new(); ty::accumulate_lifetimes_in_type(&mut accumulator, *input_type); if accumulator.len() == 1 { // there's a chance that the unique lifetime of this // iteration will be the appropriate lifetime for output // parameters, so lets store it. possible_implied_output_region = Some(accumulator[0]) } lifetimes_for_params.push((input_pat, accumulator.len())); } let implied_output_region = if lifetimes_for_params.iter().map(|&(_, n)| n).sum::<usize>() == 1 { assert!(possible_implied_output_region.is_some()); possible_implied_output_region } else { None }; (implied_output_region, lifetimes_for_params) } fn convert_ty_with_lifetime_elision<'tcx>(this: &AstConv<'tcx>, implied_output_region: Option<ty::Region>, param_lifetimes: Vec<(String, usize)>, ty: &ast::Ty) -> Ty<'tcx> { match implied_output_region { Some(implied_output_region) => { let rb = ElidableRscope::new(implied_output_region); ast_ty_to_ty(this, &rb, ty) } None => { // All regions must be explicitly specified in the output // if the lifetime elision rules do not apply. This saves // the user from potentially-confusing errors. let rb = UnelidableRscope::new(param_lifetimes); ast_ty_to_ty(this, &rb, ty) } } } fn convert_parenthesized_parameters<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, decl_generics: &ty::Generics<'tcx>, data: &ast::ParenthesizedParameterData) -> (Substs<'tcx>, Vec<Ty<'tcx>>, Vec<ConvertedBinding<'tcx>>) { let region_substs = create_region_substs(this, rscope, span, decl_generics, Vec::new()); let binding_rscope = BindingRscope::new(); let inputs = data.inputs.iter() .map(|a_t| ast_ty_arg_to_ty(this, &binding_rscope, decl_generics, 0, &region_substs, a_t)) .collect::<Vec<Ty<'tcx>>>(); let input_params: Vec<_> = repeat(String::new()).take(inputs.len()).collect(); let (implied_output_region, params_lifetimes) = find_implied_output_region(&*inputs, input_params); let input_ty = ty::mk_tup(this.tcx(), inputs); let (output, output_span) = match data.output { Some(ref output_ty) => { (convert_ty_with_lifetime_elision(this, implied_output_region, params_lifetimes, &**output_ty), output_ty.span) } None => { (ty::mk_nil(this.tcx()), data.span) } }; let output_binding = ConvertedBinding { item_name: token::intern(FN_OUTPUT_NAME), ty: output, span: output_span }; (region_substs, vec![input_ty], vec![output_binding]) } pub fn instantiate_poly_trait_ref<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, ast_trait_ref: &ast::PolyTraitRef, self_ty: Option<Ty<'tcx>>, poly_projections: &mut Vec<ty::PolyProjectionPredicate<'tcx>>) -> ty::PolyTraitRef<'tcx> { let trait_ref = &ast_trait_ref.trait_ref; let trait_def_id = trait_def_id(this, trait_ref); ast_path_to_poly_trait_ref(this, rscope, trait_ref.path.span, PathParamMode::Explicit, trait_def_id, self_ty, trait_ref.path.segments.last().unwrap(), poly_projections) } /// Instantiates the path for the given trait reference, assuming that it's /// bound to a valid trait type. Returns the def_id for the defining trait. /// Fails if the type is a type other than a trait type. /// /// If the `projections` argument is `None`, then assoc type bindings like `Foo<T=X>` /// are disallowed. Otherwise, they are pushed onto the vector given. pub fn instantiate_mono_trait_ref<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, trait_ref: &ast::TraitRef, self_ty: Option<Ty<'tcx>>) -> Rc<ty::TraitRef<'tcx>> { let trait_def_id = trait_def_id(this, trait_ref); ast_path_to_mono_trait_ref(this, rscope, trait_ref.path.span, PathParamMode::Explicit, trait_def_id, self_ty, trait_ref.path.segments.last().unwrap()) } fn trait_def_id<'tcx>(this: &AstConv<'tcx>, trait_ref: &ast::TraitRef) -> ast::DefId { let path = &trait_ref.path; match ::lookup_full_def(this.tcx(), path.span, trait_ref.ref_id) { def::DefTrait(trait_def_id) => trait_def_id, _ => { span_fatal!(this.tcx().sess, path.span, E0245, "`{}` is not a trait", path.user_string(this.tcx())); } } } fn object_path_to_poly_trait_ref<'a,'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, trait_def_id: ast::DefId, trait_segment: &ast::PathSegment, mut projections: &mut Vec<ty::PolyProjectionPredicate<'tcx>>) -> ty::PolyTraitRef<'tcx> { ast_path_to_poly_trait_ref(this, rscope, span, param_mode, trait_def_id, None, trait_segment, projections) } fn ast_path_to_poly_trait_ref<'a,'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, trait_def_id: ast::DefId, self_ty: Option<Ty<'tcx>>, trait_segment: &ast::PathSegment, poly_projections: &mut Vec<ty::PolyProjectionPredicate<'tcx>>) -> ty::PolyTraitRef<'tcx> { // The trait reference introduces a binding level here, so // we need to shift the `rscope`. It'd be nice if we could // do away with this rscope stuff and work this knowledge // into resolve_lifetimes, as we do with non-omitted // lifetimes. Oh well, not there yet. let shifted_rscope = &ShiftedRscope::new(rscope); let (substs, assoc_bindings) = create_substs_for_ast_trait_ref(this, shifted_rscope, span, param_mode, trait_def_id, self_ty, trait_segment); let poly_trait_ref = ty::Binder(Rc::new(ty::TraitRef::new(trait_def_id, substs))); { let converted_bindings = assoc_bindings .iter() .filter_map(|binding| { // specify type to assert that error was already reported in Err case: let predicate: Result<_, ErrorReported> = ast_type_binding_to_poly_projection_predicate(this, poly_trait_ref.clone(), self_ty, binding); predicate.ok() // ok to ignore Err() because ErrorReported (see above) }); poly_projections.extend(converted_bindings); } poly_trait_ref } fn ast_path_to_mono_trait_ref<'a,'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, trait_def_id: ast::DefId, self_ty: Option<Ty<'tcx>>, trait_segment: &ast::PathSegment) -> Rc<ty::TraitRef<'tcx>> { let (substs, assoc_bindings) = create_substs_for_ast_trait_ref(this, rscope, span, param_mode, trait_def_id, self_ty, trait_segment); prohibit_projections(this.tcx(), &assoc_bindings); Rc::new(ty::TraitRef::new(trait_def_id, substs)) } fn create_substs_for_ast_trait_ref<'a,'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, trait_def_id: ast::DefId, self_ty: Option<Ty<'tcx>>, trait_segment: &ast::PathSegment) -> (&'tcx Substs<'tcx>, Vec<ConvertedBinding<'tcx>>) { debug!("create_substs_for_ast_trait_ref(trait_segment={:?})", trait_segment); let trait_def = match this.get_trait_def(span, trait_def_id) { Ok(trait_def) => trait_def, Err(ErrorReported) => { // No convenient way to recover from a cycle here. Just bail. Sorry! this.tcx().sess.abort_if_errors(); this.tcx().sess.bug("ErrorReported returned, but no errors reports?") } }; let (regions, types, assoc_bindings) = match trait_segment.parameters { ast::AngleBracketedParameters(ref data) => { // For now, require that parenthetical notation be used // only with `Fn()` etc. if !this.tcx().sess.features.borrow().unboxed_closures && trait_def.paren_sugar { span_err!(this.tcx().sess, span, E0215, "angle-bracket notation is not stable when \ used with the `Fn` family of traits, use parentheses"); fileline_help!(this.tcx().sess, span, "add `#![feature(unboxed_closures)]` to \ the crate attributes to enable"); } convert_angle_bracketed_parameters(this, rscope, span, &trait_def.generics, data) } ast::ParenthesizedParameters(ref data) => { // For now, require that parenthetical notation be used // only with `Fn()` etc. if !this.tcx().sess.features.borrow().unboxed_closures && !trait_def.paren_sugar { span_err!(this.tcx().sess, span, E0216, "parenthetical notation is only stable when \ used with the `Fn` family of traits"); fileline_help!(this.tcx().sess, span, "add `#![feature(unboxed_closures)]` to \ the crate attributes to enable"); } convert_parenthesized_parameters(this, rscope, span, &trait_def.generics, data) } }; let substs = create_substs_for_ast_path(this, span, param_mode, &trait_def.generics, self_ty, types, regions); (this.tcx().mk_substs(substs), assoc_bindings) } fn ast_type_binding_to_poly_projection_predicate<'tcx>( this: &AstConv<'tcx>, mut trait_ref: ty::PolyTraitRef<'tcx>, self_ty: Option<Ty<'tcx>>, binding: &ConvertedBinding<'tcx>) -> Result<ty::PolyProjectionPredicate<'tcx>, ErrorReported> { let tcx = this.tcx(); // Given something like `U : SomeTrait<T=X>`, we want to produce a // predicate like `<U as SomeTrait>::T = X`. This is somewhat // subtle in the event that `T` is defined in a supertrait of // `SomeTrait`, because in that case we need to upcast. // // That is, consider this case: // // ``` // trait SubTrait : SuperTrait<int> { } // trait SuperTrait<A> { type T; } // // ... B : SubTrait<T=foo> ... // ``` // // We want to produce `<B as SuperTrait<int>>::T == foo`. // Simple case: X is defined in the current trait. if this.trait_defines_associated_type_named(trait_ref.def_id(), binding.item_name) { return Ok(ty::Binder(ty::ProjectionPredicate { // <-------------------+ projection_ty: ty::ProjectionTy { // | trait_ref: trait_ref.skip_binder().clone(), // Binder moved here --+ item_name: binding.item_name, }, ty: binding.ty, })); } // Otherwise, we have to walk through the supertraits to find // those that do. This is complicated by the fact that, for an // object type, the `Self` type is not present in the // substitutions (after all, it's being constructed right now), // but the `supertraits` iterator really wants one. To handle // this, we currently insert a dummy type and then remove it // later. Yuck. let dummy_self_ty = ty::mk_infer(tcx, ty::FreshTy(0)); if self_ty.is_none() { // if converting for an object type let mut dummy_substs = trait_ref.skip_binder().substs.clone(); // binder moved here -+ assert!(dummy_substs.self_ty().is_none()); // | dummy_substs.types.push(SelfSpace, dummy_self_ty); // | trait_ref = ty::Binder(Rc::new(ty::TraitRef::new(trait_ref.def_id(), // <------------+ tcx.mk_substs(dummy_substs)))); } try!(this.ensure_super_predicates(binding.span, trait_ref.def_id())); let mut candidates: Vec<ty::PolyTraitRef> = traits::supertraits(tcx, trait_ref.clone()) .filter(|r| this.trait_defines_associated_type_named(r.def_id(), binding.item_name)) .collect(); // If converting for an object type, then remove the dummy-ty from `Self` now. // Yuckety yuck. if self_ty.is_none() { for candidate in &mut candidates { let mut dummy_substs = candidate.0.substs.clone(); assert!(dummy_substs.self_ty() == Some(dummy_self_ty)); dummy_substs.types.pop(SelfSpace); *candidate = ty::Binder(Rc::new(ty::TraitRef::new(candidate.def_id(), tcx.mk_substs(dummy_substs)))); } } let candidate = try!(one_bound_for_assoc_type(tcx, candidates, &trait_ref.user_string(tcx), &token::get_name(binding.item_name), binding.span)); Ok(ty::Binder(ty::ProjectionPredicate { // <-------------------------+ projection_ty: ty::ProjectionTy { // | trait_ref: candidate.skip_binder().clone(), // binder is moved up here --+ item_name: binding.item_name, }, ty: binding.ty, })) } fn ast_path_to_ty<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, did: ast::DefId, item_segment: &ast::PathSegment) -> Ty<'tcx> { let tcx = this.tcx(); let (generics, decl_ty) = match this.get_item_type_scheme(span, did) { Ok(ty::TypeScheme { generics, ty: decl_ty }) => { (generics, decl_ty) } Err(ErrorReported) => { return tcx.types.err; } }; let substs = ast_path_substs_for_ty(this, rscope, span, param_mode, &generics, item_segment); // FIXME(#12938): This is a hack until we have full support for DST. if Some(did) == this.tcx().lang_items.owned_box() { assert_eq!(substs.types.len(TypeSpace), 1); return ty::mk_uniq(this.tcx(), *substs.types.get(TypeSpace, 0)); } decl_ty.subst(this.tcx(), &substs) } type TraitAndProjections<'tcx> = (ty::PolyTraitRef<'tcx>, Vec<ty::PolyProjectionPredicate<'tcx>>); fn ast_ty_to_trait_ref<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, ty: &ast::Ty, bounds: &[ast::TyParamBound]) -> Result<TraitAndProjections<'tcx>, ErrorReported> { /*! * In a type like `Foo + Send`, we want to wait to collect the * full set of bounds before we make the object type, because we * need them to infer a region bound. (For example, if we tried * made a type from just `Foo`, then it wouldn't be enough to * infer a 'static bound, and hence the user would get an error.) * So this function is used when we're dealing with a sum type to * convert the LHS. It only accepts a type that refers to a trait * name, and reports an error otherwise. */ match ty.node { ast::TyPath(None, ref path) => { let def = match this.tcx().def_map.borrow().get(&ty.id) { Some(&def::PathResolution { base_def, depth: 0, .. }) => Some(base_def), _ => None }; match def { Some(def::DefTrait(trait_def_id)) => { let mut projection_bounds = Vec::new(); let trait_ref = object_path_to_poly_trait_ref(this, rscope, path.span, PathParamMode::Explicit, trait_def_id, path.segments.last().unwrap(), &mut projection_bounds); Ok((trait_ref, projection_bounds)) } _ => { span_err!(this.tcx().sess, ty.span, E0172, "expected a reference to a trait"); Err(ErrorReported) } } } _ => { span_err!(this.tcx().sess, ty.span, E0178, "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty)); match ty.node { ast::TyRptr(None, ref mut_ty) => { fileline_help!(this.tcx().sess, ty.span, "perhaps you meant `&{}({} +{})`? (per RFC 438)", ppaux::mutability_to_string(mut_ty.mutbl), pprust::ty_to_string(&*mut_ty.ty), pprust::bounds_to_string(bounds)); } ast::TyRptr(Some(ref lt), ref mut_ty) => { fileline_help!(this.tcx().sess, ty.span, "perhaps you meant `&{} {}({} +{})`? (per RFC 438)", pprust::lifetime_to_string(lt), ppaux::mutability_to_string(mut_ty.mutbl), pprust::ty_to_string(&*mut_ty.ty), pprust::bounds_to_string(bounds)); } _ => { fileline_help!(this.tcx().sess, ty.span, "perhaps you forgot parentheses? (per RFC 438)"); } } Err(ErrorReported) } } } fn trait_ref_to_object_type<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, trait_ref: ty::PolyTraitRef<'tcx>, projection_bounds: Vec<ty::PolyProjectionPredicate<'tcx>>, bounds: &[ast::TyParamBound]) -> Ty<'tcx> { let existential_bounds = conv_existential_bounds(this, rscope, span, trait_ref.clone(), projection_bounds, bounds); let result = make_object_type(this, span, trait_ref, existential_bounds); debug!("trait_ref_to_object_type: result={}", result.repr(this.tcx())); result } fn make_object_type<'tcx>(this: &AstConv<'tcx>, span: Span, principal: ty::PolyTraitRef<'tcx>, bounds: ty::ExistentialBounds<'tcx>) -> Ty<'tcx> { let tcx = this.tcx(); let object = ty::TyTrait { principal: principal, bounds: bounds }; let object_trait_ref = object.principal_trait_ref_with_self_ty(tcx, tcx.types.err); // ensure the super predicates and stop if we encountered an error if this.ensure_super_predicates(span, object.principal_def_id()).is_err() { return tcx.types.err; } let mut associated_types: FnvHashSet<(ast::DefId, ast::Name)> = traits::supertraits(tcx, object_trait_ref) .flat_map(|tr| { let trait_def = ty::lookup_trait_def(tcx, tr.def_id()); trait_def.associated_type_names .clone() .into_iter() .map(move |associated_type_name| (tr.def_id(), associated_type_name)) }) .collect(); for projection_bound in &object.bounds.projection_bounds { let pair = (projection_bound.0.projection_ty.trait_ref.def_id, projection_bound.0.projection_ty.item_name); associated_types.remove(&pair); } for (trait_def_id, name) in associated_types { span_err!(tcx.sess, span, E0191, "the value of the associated type `{}` (from the trait `{}`) must be specified", name.user_string(tcx), ty::item_path_str(tcx, trait_def_id)); } ty::mk_trait(tcx, object.principal, object.bounds) } fn report_ambiguous_associated_type(tcx: &ty::ctxt, span: Span, type_str: &str, trait_str: &str, name: &str) { span_err!(tcx.sess, span, E0223, "ambiguous associated type; specify the type using the syntax \ `<{} as {}>::{}`", type_str, trait_str, name); } // Search for a bound on a type parameter which includes the associated item // given by assoc_name. ty_param_node_id is the node id for the type parameter // (which might be `Self`, but only if it is the `Self` of a trait, not an // impl). This function will fail if there are no suitable bounds or there is // any ambiguity. fn find_bound_for_assoc_item<'tcx>(this: &AstConv<'tcx>, ty_param_node_id: ast::NodeId, assoc_name: ast::Name, span: Span) -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> { let tcx = this.tcx(); let bounds = match this.get_type_parameter_bounds(span, ty_param_node_id) { Ok(v) => v, Err(ErrorReported) => { return Err(ErrorReported); } }; // Ensure the super predicates and stop if we encountered an error. if bounds.iter().any(|b| this.ensure_super_predicates(span, b.def_id()).is_err()) { return Err(ErrorReported); } // Check that there is exactly one way to find an associated type with the // correct name. let suitable_bounds: Vec<_> = traits::transitive_bounds(tcx, &bounds) .filter(|b| this.trait_defines_associated_type_named(b.def_id(), assoc_name)) .collect(); let ty_param_name = tcx.type_parameter_def(ty_param_node_id).name; one_bound_for_assoc_type(tcx, suitable_bounds, &token::get_name(ty_param_name), &token::get_name(assoc_name), span) } // Checks that bounds contains exactly one element and reports appropriate // errors otherwise. fn one_bound_for_assoc_type<'tcx>(tcx: &ty::ctxt<'tcx>, bounds: Vec<ty::PolyTraitRef<'tcx>>, ty_param_name: &str, assoc_name: &str, span: Span) -> Result<ty::PolyTraitRef<'tcx>, ErrorReported> { if bounds.len() == 0 { span_err!(tcx.sess, span, E0220, "associated type `{}` not found for `{}`", assoc_name, ty_param_name); return Err(ErrorReported); } if bounds.len() > 1 { span_err!(tcx.sess, span, E0221, "ambiguous associated type `{}` in bounds of `{}`", assoc_name, ty_param_name); for bound in &bounds { span_note!(tcx.sess, span, "associated type `{}` could derive from `{}`", ty_param_name, bound.user_string(tcx)); } } Ok(bounds[0].clone()) } // Create a type from a a path to an associated type. // For a path A::B::C::D, ty and ty_path_def are the type and def for A::B::C // and item_segment is the path segment for D. We return a type and a def for // the whole path. // Will fail except for T::A and Self::A; i.e., if ty/ty_path_def are not a type // parameter or Self. fn associated_path_def_to_ty<'tcx>(this: &AstConv<'tcx>, span: Span, ty: Ty<'tcx>, ty_path_def: def::Def, item_segment: &ast::PathSegment) -> (Ty<'tcx>, def::Def) { let tcx = this.tcx(); let assoc_name = item_segment.identifier.name; debug!("associated_path_def_to_ty: {}::{}", ty.repr(tcx), token::get_name(assoc_name)); check_path_args(tcx, slice::ref_slice(item_segment), NO_TPS | NO_REGIONS); // Find the type of the associated item, and the trait where the associated // item is declared. let bound = match (&ty.sty, ty_path_def) { (_, def::DefSelfTy(Some(trait_did), Some((impl_id, _)))) => { // `Self` in an impl of a trait - we have a concrete self type and a // trait reference. match tcx.map.expect_item(impl_id).node { ast::ItemImpl(_, _, _, Some(ref trait_ref), _, _) => { if this.ensure_super_predicates(span, trait_did).is_err() { return (tcx.types.err, ty_path_def); } let trait_segment = &trait_ref.path.segments.last().unwrap(); let trait_ref = ast_path_to_mono_trait_ref(this, &ExplicitRscope, span, PathParamMode::Explicit, trait_did, Some(ty), trait_segment); let candidates: Vec<ty::PolyTraitRef> = traits::supertraits(tcx, ty::Binder(trait_ref.clone())) .filter(|r| this.trait_defines_associated_type_named(r.def_id(), assoc_name)) .collect(); match one_bound_for_assoc_type(tcx, candidates, "Self", &token::get_name(assoc_name), span) { Ok(bound) => bound, Err(ErrorReported) => return (tcx.types.err, ty_path_def), } } _ => unreachable!() } } (&ty::ty_param(_), def::DefTyParam(..)) | (&ty::ty_param(_), def::DefSelfTy(Some(_), None)) => { // A type parameter or Self, we need to find the associated item from // a bound. let ty_param_node_id = ty_path_def.local_node_id(); match find_bound_for_assoc_item(this, ty_param_node_id, assoc_name, span) { Ok(bound) => bound, Err(ErrorReported) => return (tcx.types.err, ty_path_def), } } _ => { report_ambiguous_associated_type(tcx, span, &ty.user_string(tcx), "Trait", &token::get_name(assoc_name)); return (tcx.types.err, ty_path_def); } }; let trait_did = bound.0.def_id; let ty = this.projected_ty_from_poly_trait_ref(span, bound, assoc_name); let item_did = if trait_did.krate == ast::LOCAL_CRATE { // `ty::trait_items` used below requires information generated // by type collection, which may be in progress at this point. match tcx.map.expect_item(trait_did.node).node { ast::ItemTrait(_, _, _, ref trait_items) => { let item = trait_items.iter() .find(|i| i.ident.name == assoc_name) .expect("missing associated type"); ast_util::local_def(item.id) } _ => unreachable!() } } else { let trait_items = ty::trait_items(tcx, trait_did); let item = trait_items.iter().find(|i| i.name() == assoc_name); item.expect("missing associated type").def_id() }; (ty, def::DefAssociatedTy(trait_did, item_did)) } fn qpath_to_ty<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, opt_self_ty: Option<Ty<'tcx>>, trait_def_id: ast::DefId, trait_segment: &ast::PathSegment, item_segment: &ast::PathSegment) -> Ty<'tcx> { let tcx = this.tcx(); check_path_args(tcx, slice::ref_slice(item_segment), NO_TPS | NO_REGIONS); let self_ty = if let Some(ty) = opt_self_ty { ty } else { let path_str = ty::item_path_str(tcx, trait_def_id); report_ambiguous_associated_type(tcx, span, "Type", &path_str, &token::get_ident(item_segment.identifier)); return tcx.types.err; }; debug!("qpath_to_ty: self_type={}", self_ty.repr(tcx)); let trait_ref = ast_path_to_mono_trait_ref(this, rscope, span, param_mode, trait_def_id, Some(self_ty), trait_segment); debug!("qpath_to_ty: trait_ref={}", trait_ref.repr(tcx)); this.projected_ty(span, trait_ref, item_segment.identifier.name) } /// Convert a type supplied as value for a type argument from AST into our /// our internal representation. This is the same as `ast_ty_to_ty` but that /// it applies the object lifetime default. /// /// # Parameters /// /// * `this`, `rscope`: the surrounding context /// * `decl_generics`: the generics of the struct/enum/trait declaration being /// referenced /// * `index`: the index of the type parameter being instantiated from the list /// (we assume it is in the `TypeSpace`) /// * `region_substs`: a partial substitution consisting of /// only the region type parameters being supplied to this type. /// * `ast_ty`: the ast representation of the type being supplied pub fn ast_ty_arg_to_ty<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, decl_generics: &ty::Generics<'tcx>, index: usize, region_substs: &Substs<'tcx>, ast_ty: &ast::Ty) -> Ty<'tcx> { let tcx = this.tcx(); if let Some(def) = decl_generics.types.opt_get(TypeSpace, index) { let object_lifetime_default = def.object_lifetime_default.subst(tcx, region_substs); let rscope1 = &ObjectLifetimeDefaultRscope::new(rscope, object_lifetime_default); ast_ty_to_ty(this, rscope1, ast_ty) } else { ast_ty_to_ty(this, rscope, ast_ty) } } // Check the base def in a PathResolution and convert it to a Ty. If there are // associated types in the PathResolution, these will need to be seperately // resolved. fn base_def_to_ty<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, def: &def::Def, opt_self_ty: Option<Ty<'tcx>>, base_segments: &[ast::PathSegment]) -> Ty<'tcx> { let tcx = this.tcx(); match *def { def::DefTrait(trait_def_id) => { // N.B. this case overlaps somewhat with // TyObjectSum, see that fn for details let mut projection_bounds = Vec::new(); let trait_ref = object_path_to_poly_trait_ref(this, rscope, span, param_mode, trait_def_id, base_segments.last().unwrap(), &mut projection_bounds); check_path_args(tcx, base_segments.init(), NO_TPS | NO_REGIONS); trait_ref_to_object_type(this, rscope, span, trait_ref, projection_bounds, &[]) } def::DefTy(did, _) | def::DefStruct(did) => { check_path_args(tcx, base_segments.init(), NO_TPS | NO_REGIONS); ast_path_to_ty(this, rscope, span, param_mode, did, base_segments.last().unwrap()) } def::DefTyParam(space, index, _, name) => { check_path_args(tcx, base_segments, NO_TPS | NO_REGIONS); ty::mk_param(tcx, space, index, name) } def::DefSelfTy(_, Some((_, self_ty_id))) => { // Self in impl (we know the concrete type). check_path_args(tcx, base_segments, NO_TPS | NO_REGIONS); if let Some(&ty) = tcx.ast_ty_to_ty_cache.borrow().get(&self_ty_id) { ty } else { tcx.sess.span_bug(span, "self type has not been fully resolved") } } def::DefSelfTy(Some(_), None) => { // Self in trait. check_path_args(tcx, base_segments, NO_TPS | NO_REGIONS); ty::mk_self_type(tcx) } def::DefAssociatedTy(trait_did, _) => { check_path_args(tcx, &base_segments[..base_segments.len()-2], NO_TPS | NO_REGIONS); qpath_to_ty(this, rscope, span, param_mode, opt_self_ty, trait_did, &base_segments[base_segments.len()-2], base_segments.last().unwrap()) } def::DefMod(id) => { // Used as sentinel by callers to indicate the `<T>::A::B::C` form. // FIXME(#22519) This part of the resolution logic should be // avoided entirely for that form, once we stop needed a Def // for `associated_path_def_to_ty`. // Fixing this will also let use resolve <Self>::Foo the same way we // resolve Self::Foo, at the moment we can't resolve the former because // we don't have the trait information around, which is just sad. if !base_segments.is_empty() { span_err!(tcx.sess, span, E0247, "found module name used as a type: {}", tcx.map.node_to_string(id.node)); return this.tcx().types.err; } opt_self_ty.expect("missing T in <T>::a::b::c") } def::DefPrimTy(prim_ty) => { prim_ty_to_ty(tcx, base_segments, prim_ty) } _ => { span_err!(tcx.sess, span, E0248, "found value name used as a type: {:?}", *def); return this.tcx().types.err; } } } // Note that both base_segments and assoc_segments may be empty, although not at // the same time. pub fn finish_resolving_def_to_ty<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, param_mode: PathParamMode, def: &def::Def, opt_self_ty: Option<Ty<'tcx>>, base_segments: &[ast::PathSegment], assoc_segments: &[ast::PathSegment]) -> Ty<'tcx> { let mut ty = base_def_to_ty(this, rscope, span, param_mode, def, opt_self_ty, base_segments); let mut def = *def; // If any associated type segments remain, attempt to resolve them. for segment in assoc_segments { if ty.sty == ty::ty_err { break; } // This is pretty bad (it will fail except for T::A and Self::A). let (a_ty, a_def) = associated_path_def_to_ty(this, span, ty, def, segment); ty = a_ty; def = a_def; } ty } /// Parses the programmer's textual representation of a type into our /// internal notion of a type. pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, ast_ty: &ast::Ty) -> Ty<'tcx> { debug!("ast_ty_to_ty(ast_ty={})", ast_ty.repr(this.tcx())); let tcx = this.tcx(); if let Some(&ty) = tcx.ast_ty_to_ty_cache.borrow().get(&ast_ty.id) { return ty; } let typ = match ast_ty.node { ast::TyVec(ref ty) => { ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyObjectSum(ref ty, ref bounds) => { match ast_ty_to_trait_ref(this, rscope, &**ty, bounds) { Ok((trait_ref, projection_bounds)) => { trait_ref_to_object_type(this, rscope, ast_ty.span, trait_ref, projection_bounds, bounds) } Err(ErrorReported) => { this.tcx().types.err } } } ast::TyPtr(ref mt) => { ty::mk_ptr(tcx, ty::mt { ty: ast_ty_to_ty(this, rscope, &*mt.ty), mutbl: mt.mutbl }) } ast::TyRptr(ref region, ref mt) => { let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region); debug!("ty_rptr r={}", r.repr(this.tcx())); let rscope1 = &ObjectLifetimeDefaultRscope::new( rscope, Some(ty::ObjectLifetimeDefault::Specific(r))); let t = ast_ty_to_ty(this, rscope1, &*mt.ty); ty::mk_rptr(tcx, tcx.mk_region(r), ty::mt {ty: t, mutbl: mt.mutbl}) } ast::TyTup(ref fields) => { let flds = fields.iter() .map(|t| ast_ty_to_ty(this, rscope, &**t)) .collect(); ty::mk_tup(tcx, flds) } ast::TyParen(ref typ) => ast_ty_to_ty(this, rscope, &**typ), ast::TyBareFn(ref bf) => { if bf.decl.variadic && bf.abi != abi::C { span_err!(tcx.sess, ast_ty.span, E0222, "variadic function must have C calling convention"); } let bare_fn = ty_of_bare_fn(this, bf.unsafety, bf.abi, &*bf.decl); ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn)) } ast::TyPolyTraitRef(ref bounds) => { conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds) } ast::TyPath(ref maybe_qself, ref path) => { let path_res = if let Some(&d) = tcx.def_map.borrow().get(&ast_ty.id) { d } else if let Some(ast::QSelf { position: 0, .. }) = *maybe_qself { // Create some fake resolution that can't possibly be a type. def::PathResolution { base_def: def::DefMod(ast_util::local_def(ast::CRATE_NODE_ID)), last_private: LastMod(AllPublic), depth: path.segments.len() } } else { tcx.sess.span_bug(ast_ty.span, &format!("unbound path {}", ast_ty.repr(tcx))) }; let def = path_res.base_def; let base_ty_end = path.segments.len() - path_res.depth; let opt_self_ty = maybe_qself.as_ref().map(|qself| { ast_ty_to_ty(this, rscope, &qself.ty) }); let ty = finish_resolving_def_to_ty(this, rscope, ast_ty.span, PathParamMode::Explicit, &def, opt_self_ty, &path.segments[..base_ty_end], &path.segments[base_ty_end..]); if path_res.depth != 0 && ty.sty != ty::ty_err { // Write back the new resolution. tcx.def_map.borrow_mut().insert(ast_ty.id, def::PathResolution { base_def: def, last_private: path_res.last_private, depth: 0 }); } ty } ast::TyFixedLengthVec(ref ty, ref e) => { match const_eval::eval_const_expr_partial(tcx, &**e, Some(tcx.types.usize)) { Ok(r) => { match r { const_eval::const_int(i) => ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), Some(i as usize)), const_eval::const_uint(i) => ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), Some(i as usize)), _ => { span_err!(tcx.sess, ast_ty.span, E0249, "expected constant expr for array length"); this.tcx().types.err } } } Err(ref r) => { let subspan = ast_ty.span.lo <= r.span.lo && r.span.hi <= ast_ty.span.hi; span_err!(tcx.sess, r.span, E0250, "array length constant evaluation error: {}", r.description()); if !subspan { span_note!(tcx.sess, ast_ty.span, "for array length here") } this.tcx().types.err } } } ast::TyTypeof(ref _e) => { tcx.sess.span_bug(ast_ty.span, "typeof is reserved but unimplemented"); } ast::TyInfer => { // TyInfer also appears as the type of arguments or return // values in a ExprClosure, or as // the type of local variables. Both of these cases are // handled specially and will not descend into this routine. this.ty_infer(ast_ty.span) } }; tcx.ast_ty_to_ty_cache.borrow_mut().insert(ast_ty.id, typ); return typ; } pub fn ty_of_arg<'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, a: &ast::Arg, expected_ty: Option<Ty<'tcx>>) -> Ty<'tcx> { match a.ty.node { ast::TyInfer if expected_ty.is_some() => expected_ty.unwrap(), ast::TyInfer => this.ty_infer(a.ty.span), _ => ast_ty_to_ty(this, rscope, &*a.ty), } } struct SelfInfo<'a, 'tcx> { untransformed_self_ty: Ty<'tcx>, explicit_self: &'a ast::ExplicitSelf, } pub fn ty_of_method<'tcx>(this: &AstConv<'tcx>, sig: &ast::MethodSig, untransformed_self_ty: Ty<'tcx>) -> (ty::BareFnTy<'tcx>, ty::ExplicitSelfCategory) { let self_info = Some(SelfInfo { untransformed_self_ty: untransformed_self_ty, explicit_self: &sig.explicit_self, }); let (bare_fn_ty, optional_explicit_self_category) = ty_of_method_or_bare_fn(this, sig.unsafety, sig.abi, self_info, &sig.decl); (bare_fn_ty, optional_explicit_self_category.unwrap()) } pub fn ty_of_bare_fn<'tcx>(this: &AstConv<'tcx>, unsafety: ast::Unsafety, abi: abi::Abi, decl: &ast::FnDecl) -> ty::BareFnTy<'tcx> { let (bare_fn_ty, _) = ty_of_method_or_bare_fn(this, unsafety, abi, None, decl); bare_fn_ty } fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, unsafety: ast::Unsafety, abi: abi::Abi, opt_self_info: Option<SelfInfo<'a, 'tcx>>, decl: &ast::FnDecl) -> (ty::BareFnTy<'tcx>, Option<ty::ExplicitSelfCategory>) { debug!("ty_of_method_or_bare_fn"); // New region names that appear inside of the arguments of the function // declaration are bound to that function type. let rb = rscope::BindingRscope::new(); // `implied_output_region` is the region that will be assumed for any // region parameters in the return type. In accordance with the rules for // lifetime elision, we can determine it in two ways. First (determined // here), if self is by-reference, then the implied output region is the // region of the self parameter. let mut explicit_self_category_result = None; let (self_ty, mut implied_output_region) = match opt_self_info { None => (None, None), Some(self_info) => { // This type comes from an impl or trait; no late-bound // regions should be present. assert!(!self_info.untransformed_self_ty.has_escaping_regions()); // Figure out and record the explicit self category. let explicit_self_category = determine_explicit_self_category(this, &rb, &self_info); explicit_self_category_result = Some(explicit_self_category); match explicit_self_category { ty::StaticExplicitSelfCategory => { (None, None) } ty::ByValueExplicitSelfCategory => { (Some(self_info.untransformed_self_ty), None) } ty::ByReferenceExplicitSelfCategory(region, mutability) => { (Some(ty::mk_rptr(this.tcx(), this.tcx().mk_region(region), ty::mt { ty: self_info.untransformed_self_ty, mutbl: mutability })), Some(region)) } ty::ByBoxExplicitSelfCategory => { (Some(ty::mk_uniq(this.tcx(), self_info.untransformed_self_ty)), None) } } } }; // HACK(eddyb) replace the fake self type in the AST with the actual type. let input_params = if self_ty.is_some() { &decl.inputs[1..] } else { &decl.inputs[..] }; let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None)); let input_pats: Vec<String> = input_params.iter() .map(|a| pprust::pat_to_string(&*a.pat)) .collect(); let self_and_input_tys: Vec<Ty> = self_ty.into_iter().chain(input_tys).collect(); // Second, if there was exactly one lifetime (either a substitution or a // reference) in the arguments, then any anonymous regions in the output // have that lifetime. let lifetimes_for_params = if implied_output_region.is_none() { let input_tys = if self_ty.is_some() { // Skip the first argument if `self` is present. &self_and_input_tys[1..] } else { &self_and_input_tys[..] }; let (ior, lfp) = find_implied_output_region(input_tys, input_pats); implied_output_region = ior; lfp } else { vec![] }; let output_ty = match decl.output { ast::Return(ref output) if output.node == ast::TyInfer => ty::FnConverging(this.ty_infer(output.span)), ast::Return(ref output) => ty::FnConverging(convert_ty_with_lifetime_elision(this, implied_output_region, lifetimes_for_params, &**output)), ast::DefaultReturn(..) => ty::FnConverging(ty::mk_nil(this.tcx())), ast::NoReturn(..) => ty::FnDiverging }; (ty::BareFnTy { unsafety: unsafety, abi: abi, sig: ty::Binder(ty::FnSig { inputs: self_and_input_tys, output: output_ty, variadic: decl.variadic }), }, explicit_self_category_result) } fn determine_explicit_self_category<'a, 'tcx>(this: &AstConv<'tcx>, rscope: &RegionScope, self_info: &SelfInfo<'a, 'tcx>) -> ty::ExplicitSelfCategory { return match self_info.explicit_self.node { ast::SelfStatic => ty::StaticExplicitSelfCategory, ast::SelfValue(_) => ty::ByValueExplicitSelfCategory, ast::SelfRegion(ref lifetime, mutability, _) => { let region = opt_ast_region_to_region(this, rscope, self_info.explicit_self.span, lifetime); ty::ByReferenceExplicitSelfCategory(region, mutability) } ast::SelfExplicit(ref ast_type, _) => { let explicit_type = ast_ty_to_ty(this, rscope, &**ast_type); // We wish to (for now) categorize an explicit self // declaration like `self: SomeType` into either `self`, // `&self`, `&mut self`, or `Box<self>`. We do this here // by some simple pattern matching. A more precise check // is done later in `check_method_self_type()`. // // Examples: // // ``` // impl Foo for &T { // // Legal declarations: // fn method1(self: &&T); // ByReferenceExplicitSelfCategory // fn method2(self: &T); // ByValueExplicitSelfCategory // fn method3(self: Box<&T>); // ByBoxExplicitSelfCategory // // // Invalid cases will be caught later by `check_method_self_type`: // fn method_err1(self: &mut T); // ByReferenceExplicitSelfCategory // } // ``` // // To do the check we just count the number of "modifiers" // on each type and compare them. If they are the same or // the impl has more, we call it "by value". Otherwise, we // look at the outermost modifier on the method decl and // call it by-ref, by-box as appropriate. For method1, for // example, the impl type has one modifier, but the method // type has two, so we end up with // ByReferenceExplicitSelfCategory. let impl_modifiers = count_modifiers(self_info.untransformed_self_ty); let method_modifiers = count_modifiers(explicit_type); debug!("determine_explicit_self_category(self_info.untransformed_self_ty={} \ explicit_type={} \ modifiers=({},{})", self_info.untransformed_self_ty.repr(this.tcx()), explicit_type.repr(this.tcx()), impl_modifiers, method_modifiers); if impl_modifiers >= method_modifiers { ty::ByValueExplicitSelfCategory } else { match explicit_type.sty { ty::ty_rptr(r, mt) => ty::ByReferenceExplicitSelfCategory(*r, mt.mutbl), ty::ty_uniq(_) => ty::ByBoxExplicitSelfCategory, _ => ty::ByValueExplicitSelfCategory, } } } }; fn count_modifiers(ty: Ty) -> usize { match ty.sty { ty::ty_rptr(_, mt) => count_modifiers(mt.ty) + 1, ty::ty_uniq(t) => count_modifiers(t) + 1, _ => 0, } } } pub fn ty_of_closure<'tcx>( this: &AstConv<'tcx>, unsafety: ast::Unsafety, decl: &ast::FnDecl, abi: abi::Abi, expected_sig: Option<ty::FnSig<'tcx>>) -> ty::ClosureTy<'tcx> { debug!("ty_of_closure(expected_sig={})", expected_sig.repr(this.tcx())); // new region names that appear inside of the fn decl are bound to // that function type let rb = rscope::BindingRscope::new(); let input_tys: Vec<_> = decl.inputs.iter().enumerate().map(|(i, a)| { let expected_arg_ty = expected_sig.as_ref().and_then(|e| { // no guarantee that the correct number of expected args // were supplied if i < e.inputs.len() { Some(e.inputs[i]) } else { None } }); ty_of_arg(this, &rb, a, expected_arg_ty) }).collect(); let expected_ret_ty = expected_sig.map(|e| e.output); let is_infer = match decl.output { ast::Return(ref output) if output.node == ast::TyInfer => true, ast::DefaultReturn(..) => true, _ => false }; let output_ty = match decl.output { _ if is_infer && expected_ret_ty.is_some() => expected_ret_ty.unwrap(), _ if is_infer => ty::FnConverging(this.ty_infer(decl.output.span())), ast::Return(ref output) => ty::FnConverging(ast_ty_to_ty(this, &rb, &**output)), ast::DefaultReturn(..) => unreachable!(), ast::NoReturn(..) => ty::FnDiverging }; debug!("ty_of_closure: input_tys={}", input_tys.repr(this.tcx())); debug!("ty_of_closure: output_ty={}", output_ty.repr(this.tcx())); ty::ClosureTy { unsafety: unsafety, abi: abi, sig: ty::Binder(ty::FnSig {inputs: input_tys, output: output_ty, variadic: decl.variadic}), } } /// Given an existential type like `Foo+'a+Bar`, this routine converts the `'a` and `Bar` intos an /// `ExistentialBounds` struct. The `main_trait_refs` argument specifies the `Foo` -- it is absent /// for closures. Eventually this should all be normalized, I think, so that there is no "main /// trait ref" and instead we just have a flat list of bounds as the existential type. fn conv_existential_bounds<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, principal_trait_ref: ty::PolyTraitRef<'tcx>, projection_bounds: Vec<ty::PolyProjectionPredicate<'tcx>>, ast_bounds: &[ast::TyParamBound]) -> ty::ExistentialBounds<'tcx> { let partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds); conv_existential_bounds_from_partitioned_bounds( this, rscope, span, principal_trait_ref, projection_bounds, partitioned_bounds) } fn conv_ty_poly_trait_ref<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, ast_bounds: &[ast::TyParamBound]) -> Ty<'tcx> { let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[..]); let mut projection_bounds = Vec::new(); let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { let trait_bound = partitioned_bounds.trait_bounds.remove(0); instantiate_poly_trait_ref(this, rscope, trait_bound, None, &mut projection_bounds) } else { span_err!(this.tcx().sess, span, E0224, "at least one non-builtin trait is required for an object type"); return this.tcx().types.err; }; let bounds = conv_existential_bounds_from_partitioned_bounds(this, rscope, span, main_trait_bound.clone(), projection_bounds, partitioned_bounds); make_object_type(this, span, main_trait_bound, bounds) } pub fn conv_existential_bounds_from_partitioned_bounds<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, principal_trait_ref: ty::PolyTraitRef<'tcx>, mut projection_bounds: Vec<ty::PolyProjectionPredicate<'tcx>>, // Empty for boxed closures partitioned_bounds: PartitionedBounds) -> ty::ExistentialBounds<'tcx> { let PartitionedBounds { builtin_bounds, trait_bounds, region_bounds } = partitioned_bounds; if !trait_bounds.is_empty() { let b = &trait_bounds[0]; span_err!(this.tcx().sess, b.trait_ref.path.span, E0225, "only the builtin traits can be used as closure or object bounds"); } let region_bound = compute_object_lifetime_bound(this, rscope, span, &region_bounds, principal_trait_ref, builtin_bounds); ty::sort_bounds_list(&mut projection_bounds); ty::ExistentialBounds { region_bound: region_bound, builtin_bounds: builtin_bounds, projection_bounds: projection_bounds, } } /// Given the bounds on an object, determines what single region bound /// (if any) we can use to summarize this type. The basic idea is that we will use the bound the /// user provided, if they provided one, and otherwise search the supertypes of trait bounds for /// region bounds. It may be that we can derive no bound at all, in which case we return `None`. fn compute_object_lifetime_bound<'tcx>( this: &AstConv<'tcx>, rscope: &RegionScope, span: Span, explicit_region_bounds: &[&ast::Lifetime], principal_trait_ref: ty::PolyTraitRef<'tcx>, builtin_bounds: ty::BuiltinBounds) -> ty::Region { let tcx = this.tcx(); debug!("compute_opt_region_bound(explicit_region_bounds={:?}, \ principal_trait_ref={}, builtin_bounds={})", explicit_region_bounds, principal_trait_ref.repr(tcx), builtin_bounds.repr(tcx)); if explicit_region_bounds.len() > 1 { span_err!(tcx.sess, explicit_region_bounds[1].span, E0226, "only a single explicit lifetime bound is permitted"); } if explicit_region_bounds.len() != 0 { // Explicitly specified region bound. Use that. let r = explicit_region_bounds[0]; return ast_region_to_region(tcx, r); } if let Err(ErrorReported) = this.ensure_super_predicates(span,principal_trait_ref.def_id()) { return ty::ReStatic; } // No explicit region bound specified. Therefore, examine trait // bounds and see if we can derive region bounds from those. let derived_region_bounds = object_region_bounds(tcx, &principal_trait_ref, builtin_bounds); // If there are no derived region bounds, then report back that we // can find no region bound. if derived_region_bounds.len() == 0 { match rscope.object_lifetime_default(span) { Some(r) => { return r; } None => { span_err!(this.tcx().sess, span, E0228, "the lifetime bound for this object type cannot be deduced \ from context; please supply an explicit bound"); return ty::ReStatic; } } } // If any of the derived region bounds are 'static, that is always // the best choice. if derived_region_bounds.iter().any(|r| ty::ReStatic == *r) { return ty::ReStatic; } // Determine whether there is exactly one unique region in the set // of derived region bounds. If so, use that. Otherwise, report an // error. let r = derived_region_bounds[0]; if derived_region_bounds[1..].iter().any(|r1| r != *r1) { span_err!(tcx.sess, span, E0227, "ambiguous lifetime bound, explicit lifetime bound required"); } return r; } /// Given an object type like `SomeTrait+Send`, computes the lifetime /// bounds that must hold on the elided self type. These are derived /// from the declarations of `SomeTrait`, `Send`, and friends -- if /// they declare `trait SomeTrait : 'static`, for example, then /// `'static` would appear in the list. The hard work is done by /// `ty::required_region_bounds`, see that for more information. pub fn object_region_bounds<'tcx>( tcx: &ty::ctxt<'tcx>, principal: &ty::PolyTraitRef<'tcx>, others: ty::BuiltinBounds) -> Vec<ty::Region> { // Since we don't actually *know* the self type for an object, // this "open(err)" serves as a kind of dummy standin -- basically // a skolemized type. let open_ty = ty::mk_infer(tcx, ty::FreshTy(0)); // Note that we preserve the overall binding levels here. assert!(!open_ty.has_escaping_regions()); let substs = tcx.mk_substs(principal.0.substs.with_self_ty(open_ty)); let trait_refs = vec!(ty::Binder(Rc::new(ty::TraitRef::new(principal.0.def_id, substs)))); let param_bounds = ty::ParamBounds { region_bounds: Vec::new(), builtin_bounds: others, trait_bounds: trait_refs, projection_bounds: Vec::new(), // not relevant to computing region bounds }; let predicates = ty::predicates(tcx, open_ty, &param_bounds); ty::required_region_bounds(tcx, open_ty, predicates) } pub struct PartitionedBounds<'a> { pub builtin_bounds: ty::BuiltinBounds, pub trait_bounds: Vec<&'a ast::PolyTraitRef>, pub region_bounds: Vec<&'a ast::Lifetime>, } /// Divides a list of bounds from the AST into three groups: builtin bounds (Copy, Sized etc), /// general trait bounds, and region bounds. pub fn partition_bounds<'a>(tcx: &ty::ctxt, _span: Span, ast_bounds: &'a [ast::TyParamBound]) -> PartitionedBounds<'a> { let mut builtin_bounds = ty::empty_builtin_bounds(); let mut region_bounds = Vec::new(); let mut trait_bounds = Vec::new(); for ast_bound in ast_bounds { match *ast_bound { ast::TraitTyParamBound(ref b, ast::TraitBoundModifier::None) => { match ::lookup_full_def(tcx, b.trait_ref.path.span, b.trait_ref.ref_id) { def::DefTrait(trait_did) => { if ty::try_add_builtin_trait(tcx, trait_did, &mut builtin_bounds) { let segments = &b.trait_ref.path.segments; let parameters = &segments[segments.len() - 1].parameters; if parameters.types().len() > 0 { check_type_argument_count(tcx, b.trait_ref.path.span, parameters.types().len(), 0, 0); } if parameters.lifetimes().len() > 0 { report_lifetime_number_error(tcx, b.trait_ref.path.span, parameters.lifetimes().len(), 0); } continue; // success } } _ => { // Not a trait? that's an error, but it'll get // reported later. } } trait_bounds.push(b); } ast::TraitTyParamBound(_, ast::TraitBoundModifier::Maybe) => {} ast::RegionTyParamBound(ref l) => { region_bounds.push(l); } } } PartitionedBounds { builtin_bounds: builtin_bounds, trait_bounds: trait_bounds, region_bounds: region_bounds, } } fn prohibit_projections<'tcx>(tcx: &ty::ctxt<'tcx>, bindings: &[ConvertedBinding<'tcx>]) { for binding in bindings.iter().take(1) { span_err!(tcx.sess, binding.span, E0229, "associated type bindings are not allowed here"); } } fn check_type_argument_count(tcx: &ty::ctxt, span: Span, supplied: usize, required: usize, accepted: usize) { if supplied < required { let expected = if required < accepted { "expected at least" } else { "expected" }; span_err!(tcx.sess, span, E0243, "wrong number of type arguments: {} {}, found {}", expected, required, supplied); } else if supplied > accepted { let expected = if required < accepted { "expected at most" } else { "expected" }; span_err!(tcx.sess, span, E0244, "wrong number of type arguments: {} {}, found {}", expected, accepted, supplied); } } fn report_lifetime_number_error(tcx: &ty::ctxt, span: Span, number: usize, expected: usize) { span_err!(tcx.sess, span, E0107, "wrong number of lifetime parameters: expected {}, found {}", expected, number); }
Java
package de.uniulm.omi.cloudiator.sword.multicloud.service; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; import de.uniulm.omi.cloudiator.sword.domain.Cloud; import de.uniulm.omi.cloudiator.sword.domain.Pricing; import de.uniulm.omi.cloudiator.sword.multicloud.pricing.PricingSupplierFactory; import de.uniulm.omi.cloudiator.sword.service.PricingService; import java.util.*; import static com.google.common.base.Preconditions.checkNotNull; public class MultiCloudPricingService implements PricingService { private final CloudRegistry cloudRegistry; @Inject private PricingSupplierFactory pricingSupplierFactory; @Inject public MultiCloudPricingService(CloudRegistry cloudRegistry) { this.cloudRegistry = checkNotNull(cloudRegistry, "cloudRegistry is null"); } @Override public Iterable<Pricing> listPricing() { /*final ImmutableSet.Builder<Pricing> builder = ImmutableSet.builder(); Optional<Cloud> awsCloud = cloudRegistry.list().stream().filter(cloud -> cloud.api().providerName().equals("aws-ec2")).findFirst(); if(awsCloud.isPresent()) { Supplier<Set<Pricing>> awsPricingSupplier = pricingSupplierFactory.createAWSPricingSupplier(awsCloud.get().credential()); builder.addAll(awsPricingSupplier.get()); } return builder.build();*/ final ImmutableSet.Builder<Pricing> builder = ImmutableSet.builder(); cloudRegistry .list() .stream() .filter(cloud -> cloud.api().providerName().equals("aws-ec2")) .findFirst() .ifPresent(cloud -> builder.addAll(pricingSupplierFactory.createAWSPricingSupplier(cloud.credential()).get())); return builder.build(); } }
Java
<!-- Generated template for the PicturePage page. See http://ionicframework.com/docs/components/#navigation for more info on Ionic pages and navigation. --> <ion-header> <ion-navbar> <div class="adjust-horizontal inline"> <button class="inline-right" (click)="closeModal()" ion-button clear> Cerrar</button> </div> </ion-navbar> </ion-header> <ion-content padding> <ion-slides *ngIf="fileList && fileList.length>0" pager="true" zoom="true"> <ion-slide *ngFor="let file of fileList"> <img [src]="ext+'/containers/'+file.container+'/download/'+file.name" alt=""> </ion-slide> </ion-slides> </ion-content>
Java
<!-- This file is machine generated: DO NOT EDIT! --> # Sparse Tensors Note: Functions taking `Tensor` arguments can also take anything accepted by [`tf.convert_to_tensor`](framework.md#convert_to_tensor). [TOC] ## Sparse Tensor Representation TensorFlow supports a `SparseTensor` representation for data that is sparse in multiple dimensions. Contrast this representation with `IndexedSlices`, which is efficient for representing tensors that are sparse in their first dimension, and dense along all other dimensions. - - - ### `class tf.SparseTensor` {#SparseTensor} Represents a sparse tensor. TensorFlow represents a sparse tensor as three separate dense tensors: `indices`, `values`, and `shape`. In Python, the three tensors are collected into a `SparseTensor` class for ease of use. If you have separate `indices`, `values`, and `shape` tensors, wrap them in a `SparseTensor` object before passing to the ops below. Concretely, the sparse tensor `SparseTensor(indices, values, shape)` is * `indices`: A 2-D int64 tensor of shape `[N, ndims]`. * `values`: A 1-D tensor of any type and shape `[N]`. * `shape`: A 1-D int64 tensor of shape `[ndims]`. where `N` and `ndims` are the number of values, and number of dimensions in the `SparseTensor` respectively. The corresponding dense tensor satisfies ```python dense.shape = shape dense[tuple(indices[i])] = values[i] ``` By convention, `indices` should be sorted in row-major order (or equivalently lexicographic order on the tuples `indices[i]`). This is not enforced when `SparseTensor` objects are constructed, but most ops assume correct ordering. If the ordering of sparse tensor `st` is wrong, a fixed version can be obtained by calling `tf.sparse_reorder(st)`. Example: The sparse tensor ```python SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], shape=[3, 4]) ``` represents the dense tensor ```python [[1, 0, 0, 0] [0, 0, 2, 0] [0, 0, 0, 0]] ``` - - - #### `tf.SparseTensor.__init__(indices, values, shape)` {#SparseTensor.__init__} Creates a `SparseTensor`. ##### Args: * <b>`indices`</b>: A 2-D int64 tensor of shape `[N, ndims]`. * <b>`values`</b>: A 1-D tensor of any type and shape `[N]`. * <b>`shape`</b>: A 1-D int64 tensor of shape `[ndims]`. ##### Returns: A `SparseTensor` - - - #### `tf.SparseTensor.indices` {#SparseTensor.indices} The indices of non-zero values in the represented dense tensor. ##### Returns: A 2-D Tensor of int64 with shape `[N, ndims]`, where `N` is the number of non-zero values in the tensor, and `ndims` is the rank. - - - #### `tf.SparseTensor.values` {#SparseTensor.values} The non-zero values in the represented dense tensor. ##### Returns: A 1-D Tensor of any data type. - - - #### `tf.SparseTensor.shape` {#SparseTensor.shape} A 1-D Tensor of int64 representing the shape of the dense tensor. - - - #### `tf.SparseTensor.dtype` {#SparseTensor.dtype} The `DType` of elements in this tensor. - - - #### `tf.SparseTensor.op` {#SparseTensor.op} The `Operation` that produces `values` as an output. - - - #### `tf.SparseTensor.graph` {#SparseTensor.graph} The `Graph` that contains the index, value, and shape tensors. #### Other Methods - - - #### `tf.SparseTensor.eval(feed_dict=None, session=None)` {#SparseTensor.eval} Evaluates this sparse tensor in a `Session`. Calling this method will execute all preceding operations that produce the inputs needed for the operation that produces this tensor. *N.B.* Before invoking `SparseTensor.eval()`, its graph must have been launched in a session, and either a default session must be available, or `session` must be specified explicitly. ##### Args: * <b>`feed_dict`</b>: A dictionary that maps `Tensor` objects to feed values. See [`Session.run()`](../../api_docs/python/client.md#Session.run) for a description of the valid feed values. * <b>`session`</b>: (Optional.) The `Session` to be used to evaluate this sparse tensor. If none, the default session will be used. ##### Returns: A `SparseTensorValue` object. - - - #### `tf.SparseTensor.from_value(cls, sparse_tensor_value)` {#SparseTensor.from_value} - - - ### `class tf.SparseTensorValue` {#SparseTensorValue} SparseTensorValue(indices, values, shape) - - - #### `tf.SparseTensorValue.indices` {#SparseTensorValue.indices} Alias for field number 0 - - - #### `tf.SparseTensorValue.shape` {#SparseTensorValue.shape} Alias for field number 2 - - - #### `tf.SparseTensorValue.values` {#SparseTensorValue.values} Alias for field number 1 ## Conversion - - - ### `tf.sparse_to_dense(sparse_indices, output_shape, sparse_values, default_value=0, validate_indices=True, name=None)` {#sparse_to_dense} Converts a sparse representation into a dense tensor. Builds an array `dense` with shape `output_shape` such that ```python # If sparse_indices is scalar dense[i] = (i == sparse_indices ? sparse_values : default_value) # If sparse_indices is a vector, then for each i dense[sparse_indices[i]] = sparse_values[i] # If sparse_indices is an n by d matrix, then for each i in [0, n) dense[sparse_indices[i][0], ..., sparse_indices[i][d-1]] = sparse_values[i] ``` All other values in `dense` are set to `default_value`. If `sparse_values` is a scalar, all sparse indices are set to this single value. Indices should be sorted in lexicographic order, and indices must not contain any repeats. If `validate_indices` is True, these properties are checked during execution. ##### Args: * <b>`sparse_indices`</b>: A 0-D, 1-D, or 2-D `Tensor` of type `int32` or `int64`. `sparse_indices[i]` contains the complete index where `sparse_values[i]` will be placed. * <b>`output_shape`</b>: A 1-D `Tensor` of the same type as `sparse_indices`. Shape of the dense output tensor. * <b>`sparse_values`</b>: A 0-D or 1-D `Tensor`. Values corresponding to each row of `sparse_indices`, or a scalar value to be used for all sparse indices. * <b>`default_value`</b>: A 0-D `Tensor` of the same type as `sparse_values`. Value to set for indices not specified in `sparse_indices`. Defaults to zero. * <b>`validate_indices`</b>: A boolean value. If True, indices are checked to make sure they are sorted in lexicographic order and that there are no repeats. * <b>`name`</b>: A name for the operation (optional). ##### Returns: Dense `Tensor` of shape `output_shape`. Has the same type as `sparse_values`. - - - ### `tf.sparse_tensor_to_dense(sp_input, default_value=0, validate_indices=True, name=None)` {#sparse_tensor_to_dense} Converts a `SparseTensor` into a dense tensor. This op is a convenience wrapper around `sparse_to_dense` for `SparseTensor`s. For example, if `sp_input` has shape `[3, 5]` and non-empty string values: [0, 1]: a [0, 3]: b [2, 0]: c and `default_value` is `x`, then the output will be a dense `[3, 5]` string tensor with values: [[x a x b x] [x x x x x] [c x x x x]] Indices must be without repeats. This is only tested if validate_indices is True. ##### Args: * <b>`sp_input`</b>: The input `SparseTensor`. * <b>`default_value`</b>: Scalar value to set for indices not specified in `sp_input`. Defaults to zero. * <b>`validate_indices`</b>: A boolean value. If `True`, indices are checked to make sure they are sorted in lexicographic order and that there are no repeats. * <b>`name`</b>: A name prefix for the returned tensors (optional). ##### Returns: A dense tensor with shape `sp_input.shape` and values specified by the non-empty values in `sp_input`. Indices not in `sp_input` are assigned `default_value`. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. - - - ### `tf.sparse_to_indicator(sp_input, vocab_size, name=None)` {#sparse_to_indicator} Converts a `SparseTensor` of ids into a dense bool indicator tensor. The last dimension of `sp_input.indices` is discarded and replaced with the values of `sp_input`. If `sp_input.shape = [D0, D1, ..., Dn, K]`, then `output.shape = [D0, D1, ..., Dn, vocab_size]`, where output[d_0, d_1, ..., d_n, sp_input[d_0, d_1, ..., d_n, k]] = True and False elsewhere in `output`. For example, if `sp_input.shape = [2, 3, 4]` with non-empty values: [0, 0, 0]: 0 [0, 1, 0]: 10 [1, 0, 3]: 103 [1, 1, 2]: 150 [1, 1, 3]: 149 [1, 1, 4]: 150 [1, 2, 1]: 121 and `vocab_size = 200`, then the output will be a `[2, 3, 200]` dense bool tensor with False everywhere except at positions (0, 0, 0), (0, 1, 10), (1, 0, 103), (1, 1, 149), (1, 1, 150), (1, 2, 121). Note that repeats are allowed in the input SparseTensor. This op is useful for converting `SparseTensor`s into dense formats for compatibility with ops that expect dense tensors. The input `SparseTensor` must be in row-major order. ##### Args: * <b>`sp_input`</b>: A `SparseTensor` with `values` property of type `int32` or `int64`. * <b>`vocab_size`</b>: A scalar int64 Tensor (or Python int) containing the new size of the last dimension, `all(0 <= sp_input.values < vocab_size)`. * <b>`name`</b>: A name prefix for the returned tensors (optional) ##### Returns: A dense bool indicator tensor representing the indices with specified value. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. - - - ### `tf.sparse_merge(sp_ids, sp_values, vocab_size, name=None)` {#sparse_merge} Combines a batch of feature ids and values into a single `SparseTensor`. The most common use case for this function occurs when feature ids and their corresponding values are stored in `Example` protos on disk. `parse_example` will return a batch of ids and a batch of values, and this function joins them into a single logical `SparseTensor` for use in functions such as `sparse_tensor_dense_matmul`, `sparse_to_dense`, etc. The `SparseTensor` returned by this function has the following properties: - `indices` is equivalent to `sp_ids.indices` with the last dimension discarded and replaced with `sp_ids.values`. - `values` is simply `sp_values.values`. - If `sp_ids.shape = [D0, D1, ..., Dn, K]`, then `output.shape = [D0, D1, ..., Dn, vocab_size]`. For example, consider the following feature vectors: vector1 = [-3, 0, 0, 0, 0, 0] vector2 = [ 0, 1, 0, 4, 1, 0] vector3 = [ 5, 0, 0, 9, 0, 0] These might be stored sparsely in the following Example protos by storing only the feature ids (column number if the vectors are treated as a matrix) of the non-zero elements and the corresponding values: examples = [Example(features={ "ids": Feature(int64_list=Int64List(value=[0])), "values": Feature(float_list=FloatList(value=[-3]))}), Example(features={ "ids": Feature(int64_list=Int64List(value=[1, 4, 3])), "values": Feature(float_list=FloatList(value=[1, 1, 4]))}), Example(features={ "ids": Feature(int64_list=Int64List(value=[0, 3])), "values": Feature(float_list=FloatList(value=[5, 9]))})] The result of calling parse_example on these examples will produce a dictionary with entries for "ids" and "values". Passing those two objects to this function along with vocab_size=6, will produce a `SparseTensor` that sparsely represents all three instances. Namely, the `indices` property will contain the coordinates of the non-zero entries in the feature matrix (the first dimension is the row number in the matrix, i.e., the index within the batch, and the second dimension is the column number, i.e., the feature id); `values` will contain the actual values. `shape` will be the shape of the original matrix, i.e., (3, 6). For our example above, the output will be equal to: SparseTensor(indices=[[0, 0], [1, 1], [1, 3], [1, 4], [2, 0], [2, 3]], values=[-3, 1, 4, 1, 5, 9], shape=[3, 6]) ##### Args: * <b>`sp_ids`</b>: A `SparseTensor` with `values` property of type `int32` or `int64`. * <b>`sp_values`</b>: A`SparseTensor` of any type. * <b>`vocab_size`</b>: A scalar `int64` Tensor (or Python int) containing the new size of the last dimension, `all(0 <= sp_ids.values < vocab_size)`. * <b>`name`</b>: A name prefix for the returned tensors (optional) ##### Returns: A `SparseTensor` compactly representing a batch of feature ids and values, useful for passing to functions that expect such a `SparseTensor`. ##### Raises: * <b>`TypeError`</b>: If `sp_ids` or `sp_values` are not a `SparseTensor`. ## Manipulation - - - ### `tf.sparse_concat(concat_dim, sp_inputs, name=None, expand_nonconcat_dim=False)` {#sparse_concat} Concatenates a list of `SparseTensor` along the specified dimension. Concatenation is with respect to the dense versions of each sparse input. It is assumed that each inputs is a `SparseTensor` whose elements are ordered along increasing dimension number. If expand_nonconcat_dim is False, all inputs' shapes must match, except for the concat dimension. If expand_nonconcat_dim is True, then inputs' shapes are allowd to vary among all inputs. The `indices`, `values`, and `shapes` lists must have the same length. If expand_nonconcat_dim is False, then the output shape is identical to the inputs', except along the concat dimension, where it is the sum of the inputs' sizes along that dimension. If expand_nonconcat_dim is True, then the output shape along the non-concat dimensions will be expand to be the largest among all inputs, and it is the sum of the inputs sizes along the concat dimension. The output elements will be resorted to preserve the sort order along increasing dimension number. This op runs in `O(M log M)` time, where `M` is the total number of non-empty values across all inputs. This is due to the need for an internal sort in order to concatenate efficiently across an arbitrary dimension. For example, if `concat_dim = 1` and the inputs are sp_inputs[0]: shape = [2, 3] [0, 2]: "a" [1, 0]: "b" [1, 1]: "c" sp_inputs[1]: shape = [2, 4] [0, 1]: "d" [0, 2]: "e" then the output will be shape = [2, 7] [0, 2]: "a" [0, 4]: "d" [0, 5]: "e" [1, 0]: "b" [1, 1]: "c" Graphically this is equivalent to doing [ a] concat [ d e ] = [ a d e ] [b c ] [ ] [b c ] Another example, if 'concat_dim = 1' and the inputs are sp_inputs[0]: shape = [3, 3] [0, 2]: "a" [1, 0]: "b" [2, 1]: "c" sp_inputs[1]: shape = [2, 4] [0, 1]: "d" [0, 2]: "e" if expand_nonconcat_dim = False, this will result in an error. But if expand_nonconcat_dim = True, this will result in: shape = [3, 7] [0, 2]: "a" [0, 4]: "d" [0, 5]: "e" [1, 0]: "b" [2, 1]: "c" Graphically this is equivalent to doing [ a] concat [ d e ] = [ a d e ] [b ] [ ] [b ] [ c ] [ c ] ##### Args: * <b>`concat_dim`</b>: Dimension to concatenate along. * <b>`sp_inputs`</b>: List of `SparseTensor` to concatenate. * <b>`name`</b>: A name prefix for the returned tensors (optional). * <b>`expand_nonconcat_dim`</b>: Whether to allow the expansion in the non-concat dimensions. Defaulted to False. ##### Returns: A `SparseTensor` with the concatenated output. ##### Raises: * <b>`TypeError`</b>: If `sp_inputs` is not a list of `SparseTensor`. - - - ### `tf.sparse_reorder(sp_input, name=None)` {#sparse_reorder} Reorders a `SparseTensor` into the canonical, row-major ordering. Note that by convention, all sparse ops preserve the canonical ordering along increasing dimension number. The only time ordering can be violated is during manual manipulation of the indices and values to add entries. Reordering does not affect the shape of the `SparseTensor`. For example, if `sp_input` has shape `[4, 5]` and `indices` / `values`: [0, 3]: b [0, 1]: a [3, 1]: d [2, 0]: c then the output will be a `SparseTensor` of shape `[4, 5]` and `indices` / `values`: [0, 1]: a [0, 3]: b [2, 0]: c [3, 1]: d ##### Args: * <b>`sp_input`</b>: The input `SparseTensor`. * <b>`name`</b>: A name prefix for the returned tensors (optional) ##### Returns: A `SparseTensor` with the same shape and non-empty values, but in canonical ordering. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. - - - ### `tf.sparse_reshape(sp_input, shape, name=None)` {#sparse_reshape} Reshapes a `SparseTensor` to represent values in a new dense shape. This operation has the same semantics as `reshape` on the represented dense tensor. The indices of non-empty values in `sp_input` are recomputed based on the new dense shape, and a new `SparseTensor` is returned containing the new indices and new shape. The order of non-empty values in `sp_input` is unchanged. If one component of `shape` is the special value -1, the size of that dimension is computed so that the total dense size remains constant. At most one component of `shape` can be -1. The number of dense elements implied by `shape` must be the same as the number of dense elements originally represented by `sp_input`. For example, if `sp_input` has shape `[2, 3, 6]` and `indices` / `values`: [0, 0, 0]: a [0, 0, 1]: b [0, 1, 0]: c [1, 0, 0]: d [1, 2, 3]: e and `shape` is `[9, -1]`, then the output will be a `SparseTensor` of shape `[9, 4]` and `indices` / `values`: [0, 0]: a [0, 1]: b [1, 2]: c [4, 2]: d [8, 1]: e ##### Args: * <b>`sp_input`</b>: The input `SparseTensor`. * <b>`shape`</b>: A 1-D (vector) int64 `Tensor` specifying the new dense shape of the represented `SparseTensor`. * <b>`name`</b>: A name prefix for the returned tensors (optional) ##### Returns: A `SparseTensor` with the same non-empty values but with indices calculated by the new dense shape. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. - - - ### `tf.sparse_split(split_dim, num_split, sp_input, name=None)` {#sparse_split} Split a `SparseTensor` into `num_split` tensors along `split_dim`. If the `sp_input.shape[split_dim]` is not an integer multiple of `num_split` each slice starting from 0:`shape[split_dim] % num_split` gets extra one dimension. For example, if `split_dim = 1` and `num_split = 2` and the input is: input_tensor = shape = [2, 7] [ a d e ] [b c ] Graphically the output tensors are: output_tensor[0] = [ a ] [b c ] output_tensor[1] = [ d e ] [ ] ##### Args: * <b>`split_dim`</b>: A 0-D `int32` `Tensor`. The dimension along which to split. * <b>`num_split`</b>: A Python integer. The number of ways to split. * <b>`sp_input`</b>: The `SparseTensor` to split. * <b>`name`</b>: A name for the operation (optional). ##### Returns: `num_split` `SparseTensor` objects resulting from splitting `value`. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. - - - ### `tf.sparse_retain(sp_input, to_retain)` {#sparse_retain} Retains specified non-empty values within a `SparseTensor`. For example, if `sp_input` has shape `[4, 5]` and 4 non-empty string values: [0, 1]: a [0, 3]: b [2, 0]: c [3, 1]: d and `to_retain = [True, False, False, True]`, then the output will be a `SparseTensor` of shape `[4, 5]` with 2 non-empty values: [0, 1]: a [3, 1]: d ##### Args: * <b>`sp_input`</b>: The input `SparseTensor` with `N` non-empty elements. * <b>`to_retain`</b>: A bool vector of length `N` with `M` true values. ##### Returns: A `SparseTensor` with the same shape as the input and `M` non-empty elements corresponding to the true positions in `to_retain`. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. - - - ### `tf.sparse_reset_shape(sp_input, new_shape=None)` {#sparse_reset_shape} Resets the shape of a `SparseTensor` with indices and values unchanged. If `new_shape` is None, returns a copy of `sp_input` with its shape reset to the tight bounding box of `sp_input`. If `new_shape` is provided, then it must be larger or equal in all dimensions compared to the shape of `sp_input`. When this condition is met, the returned SparseTensor will have its shape reset to `new_shape` and its indices and values unchanged from that of `sp_input.` For example: Consider a `sp_input` with shape [2, 3, 5]: [0, 0, 1]: a [0, 1, 0]: b [0, 2, 2]: c [1, 0, 3]: d - It is an error to set `new_shape` as [3, 7] since this represents a rank-2 tensor while `sp_input` is rank-3. This is either a ValueError during graph construction (if both shapes are known) or an OpError during run time. - Setting `new_shape` as [2, 3, 6] will be fine as this shape is larger or eqaul in every dimension compared to the original shape [2, 3, 5]. - On the other hand, setting new_shape as [2, 3, 4] is also an error: The third dimension is smaller than the original shape [2, 3, 5] (and an `InvalidArgumentError` will be raised). - If `new_shape` is None, the returned SparseTensor will have a shape [2, 3, 4], which is the tight bounding box of `sp_input`. ##### Args: * <b>`sp_input`</b>: The input `SparseTensor`. * <b>`new_shape`</b>: None or a vector representing the new shape for the returned `SpraseTensor`. ##### Returns: A `SparseTensor` indices and values unchanged from `input_sp`. Its shape is `new_shape` if that is set. Otherwise it is the tight bounding box of `input_sp` ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. * <b>`ValueError`</b>: If `new_shape` represents a tensor with a different rank from that of `sp_input` (if shapes are known when graph is constructed). * <b>`OpError`</b>: - If `new_shape` has dimension sizes that are too small. - If shapes are not known during graph construction time, and during run time it is found out that the ranks do not match. - - - ### `tf.sparse_fill_empty_rows(sp_input, default_value, name=None)` {#sparse_fill_empty_rows} Fills empty rows in the input 2-D `SparseTensor` with a default value. This op adds entries with the specified `default_value` at index `[row, 0]` for any row in the input that does not already have a value. For example, suppose `sp_input` has shape `[5, 6]` and non-empty values: [0, 1]: a [0, 3]: b [2, 0]: c [3, 1]: d Rows 1 and 4 are empty, so the output will be of shape `[5, 6]` with values: [0, 1]: a [0, 3]: b [1, 0]: default_value [2, 0]: c [3, 1]: d [4, 0]: default_value Note that the input may have empty columns at the end, with no effect on this op. The output `SparseTensor` will be in row-major order and will have the same shape as the input. This op also returns an indicator vector such that empty_row_indicator[i] = True iff row i was an empty row. ##### Args: * <b>`sp_input`</b>: A `SparseTensor` with shape `[N, M]`. * <b>`default_value`</b>: The value to fill for empty rows, with the same type as `sp_input.` * <b>`name`</b>: A name prefix for the returned tensors (optional) ##### Returns: * <b>`sp_ordered_output`</b>: A `SparseTensor` with shape `[N, M]`, and with all empty rows filled in with `default_value`. * <b>`empty_row_indicator`</b>: A bool vector of length `N` indicating whether each input row was empty. ##### Raises: * <b>`TypeError`</b>: If `sp_input` is not a `SparseTensor`. ## Reduction - - - ### `tf.sparse_reduce_sum(sp_input, reduction_axes=None, keep_dims=False)` {#sparse_reduce_sum} Computes the sum of elements across dimensions of a SparseTensor. This Op takes a SparseTensor and is the sparse counterpart to `tf.reduce_sum()`. In particular, this Op also returns a dense `Tensor` instead of a sparse one. Reduces `sp_input` along the dimensions given in `reduction_axes`. Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each entry in `reduction_axes`. If `keep_dims` is true, the reduced dimensions are retained with length 1. If `reduction_axes` has no entries, all dimensions are reduced, and a tensor with a single element is returned. Additionally, the axes can be negative, similar to the indexing rules in Python. For example: ```python # 'x' represents [[1, ?, 1] # [?, 1, ?]] # where ? is implictly-zero. tf.sparse_reduce_sum(x) ==> 3 tf.sparse_reduce_sum(x, 0) ==> [1, 1, 1] tf.sparse_reduce_sum(x, 1) ==> [2, 1] # Can also use -1 as the axis. tf.sparse_reduce_sum(x, 1, keep_dims=True) ==> [[2], [1]] tf.sparse_reduce_sum(x, [0, 1]) ==> 3 ``` ##### Args: * <b>`sp_input`</b>: The SparseTensor to reduce. Should have numeric type. * <b>`reduction_axes`</b>: The dimensions to reduce; list or scalar. If `None` (the default), reduces all dimensions. * <b>`keep_dims`</b>: If true, retain reduced dimensions with length 1. ##### Returns: The reduced Tensor. ## Math Operations - - - ### `tf.sparse_add(a, b, thresh=0)` {#sparse_add} Adds two tensors, at least one of each is a `SparseTensor`. If one `SparseTensor` and one `Tensor` are passed in, returns a `Tensor`. If both arguments are `SparseTensor`s, this returns a `SparseTensor`. The order of arguments does not matter. Use vanilla `tf.add()` for adding two dense `Tensor`s. The indices of any input `SparseTensor` are assumed ordered in standard lexicographic order. If this is not the case, before this step run `SparseReorder` to restore index ordering. If both arguments are sparse, we perform "clipping" as follows. By default, if two values sum to zero at some index, the output `SparseTensor` would still include that particular location in its index, storing a zero in the corresponding value slot. To override this, callers can specify `thresh`, indicating that if the sum has a magnitude strictly smaller than `thresh`, its corresponding value and index would then not be included. In particular, `thresh == 0.0` (default) means everything is kept and actual thresholding happens only for a positive value. For example, suppose the logical sum of two sparse operands is (densified): [ 2] [.1 0] [ 6 -.2] Then, - thresh == 0 (the default): all 5 index/value pairs will be returned. - thresh == 0.11: only .1 and 0 will vanish, and the remaining three index/value pairs will be returned. - thresh == 0.21: .1, 0, and -.2 will vanish. ##### Args: * <b>`a`</b>: The first operand; `SparseTensor` or `Tensor`. * <b>`b`</b>: The second operand; `SparseTensor` or `Tensor`. At least one operand must be sparse. * <b>`thresh`</b>: A 0-D `Tensor`. The magnitude threshold that determines if an output value/index pair takes space. Its dtype should match that of the values if they are real; if the latter are complex64/complex128, then the dtype should be float32/float64, correspondingly. ##### Returns: A `SparseTensor` or a `Tensor`, representing the sum. ##### Raises: * <b>`TypeError`</b>: If both `a` and `b` are `Tensor`s. Use `tf.add()` instead. - - - ### `tf.sparse_softmax(sp_input, name=None)` {#sparse_softmax} Applies softmax to a batched N-D `SparseTensor`. The inputs represent an N-D SparseTensor with logical shape `[..., B, C]` (where `N >= 2`), and with indices sorted in the canonical lexicographic order. This op is equivalent to applying the normal `tf.nn.softmax()` to each innermost logical submatrix with shape `[B, C]`, but with the catch that *the implicitly zero elements do not participate*. Specifically, the algorithm is equivalent to: (1) Applies `tf.nn.softmax()` to a densified view of each innermost submatrix with shape `[B, C]`, along the size-C dimension; (2) Masks out the original implicitly-zero locations; (3) Renormalizes the remaining elements. Hence, the `SparseTensor` result has exactly the same non-zero indices and shape. Example: ```python # First batch: # [? e.] # [1. ? ] # Second batch: # [e ? ] # [e e ] shape = [2, 2, 2] # 3-D SparseTensor values = np.asarray([[[0., np.e], [1., 0.]], [[np.e, 0.], [np.e, np.e]]]) indices = np.vstack(np.where(values)).astype(np.int64).T result = tf.sparse_softmax(tf.SparseTensor(indices, values, shape)) # ...returning a 3-D SparseTensor, equivalent to: # [? 1.] [1 ?] # [1. ? ] and [.5 .5] # where ? means implicitly zero. ``` ##### Args: * <b>`sp_input`</b>: N-D `SparseTensor`, where `N >= 2`. * <b>`name`</b>: optional name of the operation. ##### Returns: * <b>`output`</b>: N-D `SparseTensor` representing the results. - - - ### `tf.sparse_tensor_dense_matmul(sp_a, b, adjoint_a=False, adjoint_b=False, name=None)` {#sparse_tensor_dense_matmul} Multiply SparseTensor (of rank 2) "A" by dense matrix "B". No validity checking is performed on the indices of A. However, the following input format is recommended for optimal behavior: if adjoint_a == false: A should be sorted in lexicographically increasing order. Use sparse_reorder if you're not sure. if adjoint_a == true: A should be sorted in order of increasing dimension 1 (i.e., "column major" order instead of "row major" order). Deciding when to use sparse_tensor_dense_matmul vs. matmul(sp_a=True): There are a number of questions to ask in the decision process, including: * Will the SparseTensor A fit in memory if densified? * Is the column count of the product large (>> 1)? * Is the density of A larger than approximately 15%? If the answer to several of these questions is yes, consider converting the SparseTensor to a dense one and using tf.matmul with sp_a=True. This operation tends to perform well when A is more sparse, if the column size of the product is small (e.g. matrix-vector multiplication), if sp_a.shape takes on large values. Below is a rough speed comparison between sparse_tensor_dense_matmul, labelled 'sparse', and matmul(sp_a=True), labelled 'dense'. For purposes of the comparison, the time spent converting from a SparseTensor to a dense Tensor is not included, so it is overly conservative with respect to the time ratio. Benchmark system: CPU: Intel Ivybridge with HyperThreading (6 cores) dL1:32KB dL2:256KB dL3:12MB GPU: NVidia Tesla k40c Compiled with: -c opt --config=cuda --copt=-mavx ```tensorflow/python/sparse_tensor_dense_matmul_op_test --benchmarks A sparse [m, k] with % nonzero values between 1% and 80% B dense [k, n] % nnz n gpu m k dt(dense) dt(sparse) dt(sparse)/dt(dense) 0.01 1 True 100 100 0.000221166 0.00010154 0.459112 0.01 1 True 100 1000 0.00033858 0.000109275 0.322745 0.01 1 True 1000 100 0.000310557 9.85661e-05 0.317385 0.01 1 True 1000 1000 0.0008721 0.000100875 0.115669 0.01 1 False 100 100 0.000208085 0.000107603 0.51711 0.01 1 False 100 1000 0.000327112 9.51118e-05 0.290762 0.01 1 False 1000 100 0.000308222 0.00010345 0.335635 0.01 1 False 1000 1000 0.000865721 0.000101397 0.117124 0.01 10 True 100 100 0.000218522 0.000105537 0.482958 0.01 10 True 100 1000 0.000340882 0.000111641 0.327506 0.01 10 True 1000 100 0.000315472 0.000117376 0.372064 0.01 10 True 1000 1000 0.000905493 0.000123263 0.136128 0.01 10 False 100 100 0.000221529 9.82571e-05 0.44354 0.01 10 False 100 1000 0.000330552 0.000112615 0.340687 0.01 10 False 1000 100 0.000341277 0.000114097 0.334324 0.01 10 False 1000 1000 0.000819944 0.000120982 0.147549 0.01 25 True 100 100 0.000207806 0.000105977 0.509981 0.01 25 True 100 1000 0.000322879 0.00012921 0.400181 0.01 25 True 1000 100 0.00038262 0.000141583 0.370035 0.01 25 True 1000 1000 0.000865438 0.000202083 0.233504 0.01 25 False 100 100 0.000209401 0.000104696 0.499979 0.01 25 False 100 1000 0.000321161 0.000130737 0.407076 0.01 25 False 1000 100 0.000377012 0.000136801 0.362856 0.01 25 False 1000 1000 0.000861125 0.00020272 0.235413 0.2 1 True 100 100 0.000206952 9.69219e-05 0.46833 0.2 1 True 100 1000 0.000348674 0.000147475 0.422959 0.2 1 True 1000 100 0.000336908 0.00010122 0.300439 0.2 1 True 1000 1000 0.001022 0.000203274 0.198898 0.2 1 False 100 100 0.000207532 9.5412e-05 0.459746 0.2 1 False 100 1000 0.000356127 0.000146824 0.41228 0.2 1 False 1000 100 0.000322664 0.000100918 0.312764 0.2 1 False 1000 1000 0.000998987 0.000203442 0.203648 0.2 10 True 100 100 0.000211692 0.000109903 0.519165 0.2 10 True 100 1000 0.000372819 0.000164321 0.440753 0.2 10 True 1000 100 0.000338651 0.000144806 0.427596 0.2 10 True 1000 1000 0.00108312 0.000758876 0.70064 0.2 10 False 100 100 0.000215727 0.000110502 0.512231 0.2 10 False 100 1000 0.000375419 0.0001613 0.429653 0.2 10 False 1000 100 0.000336999 0.000145628 0.432132 0.2 10 False 1000 1000 0.00110502 0.000762043 0.689618 0.2 25 True 100 100 0.000218705 0.000129913 0.594009 0.2 25 True 100 1000 0.000394794 0.00029428 0.745402 0.2 25 True 1000 100 0.000404483 0.0002693 0.665788 0.2 25 True 1000 1000 0.0012002 0.00194494 1.62052 0.2 25 False 100 100 0.000221494 0.0001306 0.589632 0.2 25 False 100 1000 0.000396436 0.000297204 0.74969 0.2 25 False 1000 100 0.000409346 0.000270068 0.659754 0.2 25 False 1000 1000 0.00121051 0.00193737 1.60046 0.5 1 True 100 100 0.000214981 9.82111e-05 0.456836 0.5 1 True 100 1000 0.000415328 0.000223073 0.537101 0.5 1 True 1000 100 0.000358324 0.00011269 0.314492 0.5 1 True 1000 1000 0.00137612 0.000437401 0.317851 0.5 1 False 100 100 0.000224196 0.000101423 0.452386 0.5 1 False 100 1000 0.000400987 0.000223286 0.556841 0.5 1 False 1000 100 0.000368825 0.00011224 0.304318 0.5 1 False 1000 1000 0.00136036 0.000429369 0.31563 0.5 10 True 100 100 0.000222125 0.000112308 0.505608 0.5 10 True 100 1000 0.000461088 0.00032357 0.701753 0.5 10 True 1000 100 0.000394624 0.000225497 0.571422 0.5 10 True 1000 1000 0.00158027 0.00190898 1.20801 0.5 10 False 100 100 0.000232083 0.000114978 0.495418 0.5 10 False 100 1000 0.000454574 0.000324632 0.714146 0.5 10 False 1000 100 0.000379097 0.000227768 0.600817 0.5 10 False 1000 1000 0.00160292 0.00190168 1.18638 0.5 25 True 100 100 0.00023429 0.000151703 0.647501 0.5 25 True 100 1000 0.000497462 0.000598873 1.20386 0.5 25 True 1000 100 0.000460778 0.000557038 1.20891 0.5 25 True 1000 1000 0.00170036 0.00467336 2.74845 0.5 25 False 100 100 0.000228981 0.000155334 0.678371 0.5 25 False 100 1000 0.000496139 0.000620789 1.25124 0.5 25 False 1000 100 0.00045473 0.000551528 1.21287 0.5 25 False 1000 1000 0.00171793 0.00467152 2.71927 0.8 1 True 100 100 0.000222037 0.000105301 0.47425 0.8 1 True 100 1000 0.000410804 0.000329327 0.801664 0.8 1 True 1000 100 0.000349735 0.000131225 0.375212 0.8 1 True 1000 1000 0.00139219 0.000677065 0.48633 0.8 1 False 100 100 0.000214079 0.000107486 0.502085 0.8 1 False 100 1000 0.000413746 0.000323244 0.781261 0.8 1 False 1000 100 0.000348983 0.000131983 0.378193 0.8 1 False 1000 1000 0.00136296 0.000685325 0.50282 0.8 10 True 100 100 0.000229159 0.00011825 0.516017 0.8 10 True 100 1000 0.000498845 0.000532618 1.0677 0.8 10 True 1000 100 0.000383126 0.00029935 0.781336 0.8 10 True 1000 1000 0.00162866 0.00307312 1.88689 0.8 10 False 100 100 0.000230783 0.000124958 0.541452 0.8 10 False 100 1000 0.000493393 0.000550654 1.11606 0.8 10 False 1000 100 0.000377167 0.000298581 0.791642 0.8 10 False 1000 1000 0.00165795 0.00305103 1.84024 0.8 25 True 100 100 0.000233496 0.000175241 0.75051 0.8 25 True 100 1000 0.00055654 0.00102658 1.84458 0.8 25 True 1000 100 0.000463814 0.000783267 1.68875 0.8 25 True 1000 1000 0.00186905 0.00755344 4.04132 0.8 25 False 100 100 0.000240243 0.000175047 0.728625 0.8 25 False 100 1000 0.000578102 0.00104499 1.80763 0.8 25 False 1000 100 0.000485113 0.000776849 1.60138 0.8 25 False 1000 1000 0.00211448 0.00752736 3.55992 ``` ##### Args: * <b>`sp_a`</b>: SparseTensor A, of rank 2. * <b>`b`</b>: A dense Matrix with the same dtype as sp_a. * <b>`adjoint_a`</b>: Use the adjoint of A in the matrix multiply. If A is complex, this is transpose(conj(A)). Otherwise it's transpose(A). * <b>`adjoint_b`</b>: Use the adjoint of B in the matrix multiply. If B is complex, this is transpose(conj(B)). Otherwise it's transpose(B). * <b>`name`</b>: A name prefix for the returned tensors (optional) ##### Returns: A dense matrix (pseudo-code in dense np.matrix notation): A = A.H if adjoint_a else A B = B.H if adjoint_b else B return A*B - - - ### `tf.sparse_maximum(sp_a, sp_b, name=None)` {#sparse_maximum} Returns the element-wise max of two SparseTensors. Assumes the two SparseTensors have the same shape, i.e., no broadcasting. Example: ```python sp_zero = ops.SparseTensor([[0]], [0], [7]) sp_one = ops.SparseTensor([[1]], [1], [7]) res = tf.sparse_maximum(sp_zero, sp_one).eval() # "res" should be equal to SparseTensor([[0], [1]], [0, 1], [7]). ``` ##### Args: * <b>`sp_a`</b>: a `SparseTensor` operand whose dtype is real, and indices lexicographically ordered. * <b>`sp_b`</b>: the other `SparseTensor` operand with the same requirements (and the same shape). * <b>`name`</b>: optional name of the operation. ##### Returns: * <b>`output`</b>: the output SparseTensor. - - - ### `tf.sparse_minimum(sp_a, sp_b, name=None)` {#sparse_minimum} Returns the element-wise min of two SparseTensors. Assumes the two SparseTensors have the same shape, i.e., no broadcasting. Example: ```python sp_zero = ops.SparseTensor([[0]], [0], [7]) sp_one = ops.SparseTensor([[1]], [1], [7]) res = tf.sparse_minimum(sp_zero, sp_one).eval() # "res" should be equal to SparseTensor([[0], [1]], [0, 0], [7]). ``` ##### Args: * <b>`sp_a`</b>: a `SparseTensor` operand whose dtype is real, and indices lexicographically ordered. * <b>`sp_b`</b>: the other `SparseTensor` operand with the same requirements (and the same shape). * <b>`name`</b>: optional name of the operation. ##### Returns: * <b>`output`</b>: the output SparseTensor.
Java
/* * Copyright 2011-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.transform; import java.lang.reflect.Constructor; import com.amazonaws.AmazonServiceException; public abstract class AbstractErrorUnmarshaller<T> implements Unmarshaller<AmazonServiceException, T> { /** * The type of AmazonServiceException that will be instantiated. Subclasses * specialized for a specific type of exception can control this through the * protected constructor. */ protected final Class<? extends AmazonServiceException> exceptionClass; /** * Constructs a new error unmarshaller that will unmarshall error responses * into AmazonServiceException objects. */ public AbstractErrorUnmarshaller() { this(AmazonServiceException.class); } /** * Constructs a new error unmarshaller that will unmarshall error responses * into objects of the specified class, extending AmazonServiceException. * * @param exceptionClass * The subclass of AmazonServiceException which will be * instantiated and populated by this class. */ public AbstractErrorUnmarshaller(Class<? extends AmazonServiceException> exceptionClass) { this.exceptionClass = exceptionClass; } /** * Constructs a new exception object of the type specified in this class's * constructor and sets the specified error message. * * @param message * The error message to set in the new exception object. * * @return A new exception object of the type specified in this class's * constructor and sets the specified error message. * * @throws Exception * If there are any problems using reflection to invoke the * exception class's constructor. */ protected AmazonServiceException newException(String message) throws Exception { Constructor<? extends AmazonServiceException> constructor = exceptionClass.getConstructor(String.class); return constructor.newInstance(message); } }
Java
package com.icfcc.cache.support; import com.icfcc.cache.Cache; import java.util.Collection; /** * Simple cache manager working against a given collection of caches. * Useful for testing or simple caching declarations. * * @author Costin Leau * @since 3.1 */ public class SimpleCacheManager extends AbstractCacheManager { private Collection<? extends Cache> caches; /** * Specify the collection of Cache instances to use for this CacheManager. */ public void setCaches(Collection<? extends Cache> caches) { this.caches = caches; } @Override protected Collection<? extends Cache> loadCaches() { return this.caches; } }
Java
--- title: Floating IPs redirect_from: latest/usage/openstack/floating-ips canonical_url: 'https://docs.projectcalico.org/v2.6/usage/openstack/floating-ips' --- networking-calico includes beta support for floating IPs. Currently this requires running {{site.prodname}} as a Neutron core plugin (i.e. `core_plugin = calico`) instead of as an ML2 mechanism driver. > **Note**: We would like it to work as an ML2 mechanism driver too—patches > and/or advice welcome! {: .alert .alert-info} To set up a floating IP, you need the same pattern of Neutron data model objects as you do for Neutron in general, which means: - a tenant network, with an instance attached to it, that will be the target of the floating IP - a Neutron router, with the tenant network connected to it - a provider network with `router:external True` that is set as the router's gateway (e.g. with `neutron router-gateway-set`), and with a subnet with a CIDR that floating IPs will be allocated from - a floating IP, allocated from the provider network subnet, that maps onto the instance attached to the tenant network. For example: # Create tenant network and subnet neutron net-create --shared calico neutron subnet-create --gateway 10.65.0.1 --enable-dhcp --ip-version 4 --name calico-v4 calico 10.65.0.0/24 # Boot a VM on that network, and find its Neutron port ID. nova boot [...] neutron port-list # Create external network and subnet - this is where floating # IPs will be allocated from. neutron net-create public --router:external True neutron subnet-create public 172.16.1.0/24 # Create a router connecting the tenant and external networks. neutron router-create router1 neutron router-interface-add router1 <tenant-subnet-id> neutron router-gateway-set router1 public # Create a floating IP and associate it with the target VM. neutron floatingip-create public neutron floatingip-associate <floatingip-id> <target-VM-port-id> Then the {{site.prodname}} agents will arrange that the floating IP is routed to the instance's compute host, and then DNAT'd to the instance's fixed IP address: core@compute-node01:~$ ip r default via 10.240.0.1 dev eth0 10.65.0.13 dev tap9a7e0868-da scope link 10.65.0.14 via 192.168.8.4 dev l2tpeth8-3 proto bird 10.65.0.23 via 192.168.8.4 dev l2tpeth8-3 proto bird 10.240.0.1 dev eth0 scope link 172.16.1.3 dev tap9a7e0868-da scope link 192.168.8.0/24 dev l2tpeth8-3 proto kernel scope link src 192.168.8.3 192.168.122.0/24 dev virbr0 proto kernel scope link src 192.168.122.1 core@compute-node01:~$ sudo iptables -L -n -v -t nat [...] Chain felix-FIP-DNAT (2 references) pkts bytes target prot opt in out source destination 0 0 DNAT all -- * * 0.0.0.0/0 172.16.1.3 to:10.65.0.13 Chain felix-FIP-SNAT (1 references) pkts bytes target prot opt in out source destination 0 0 SNAT all -- * * 10.65.0.13 10.65.0.13 to:172.16.1.3 Chain felix-OUTPUT (1 references) pkts bytes target prot opt in out source destination 1 60 felix-FIP-DNAT all -- * * 0.0.0.0/0 0.0.0.0/0 Chain felix-POSTROUTING (1 references) pkts bytes target prot opt in out source destination 1 60 felix-FIP-SNAT all -- * * 0.0.0.0/0 0.0.0.0/0 Chain felix-PREROUTING (1 references) pkts bytes target prot opt in out source destination 0 0 felix-FIP-DNAT all -- * * 0.0.0.0/0 0.0.0.0/0 0 0 DNAT tcp -- * * 0.0.0.0/0 169.254.169.254 tcp dpt:80 to:127.0.0.1:8775 [...]
Java
# -*- coding: utf-8 -*- # # Copyright (C) 2013-2016 DNAnexus, Inc. # # This file is part of dx-toolkit (DNAnexus platform client libraries). # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy # of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ''' This submodule contains helper functions for parsing and printing the contents of describe hashes for various DNAnexus entities (projects, containers, dataobjects, apps, and jobs). ''' from __future__ import print_function, unicode_literals, division, absolute_import import datetime, time, json, math, sys, copy import locale import subprocess from collections import defaultdict import dxpy from .printing import (RED, GREEN, BLUE, YELLOW, WHITE, BOLD, UNDERLINE, ENDC, DELIMITER, get_delimiter, fill) from ..compat import basestring, USING_PYTHON2 def JOB_STATES(state): if state == 'failed': return BOLD() + RED() + state + ENDC() elif state == 'done': return BOLD() + GREEN() + state + ENDC() elif state in ['running', 'in_progress']: return GREEN() + state + ENDC() elif state == 'partially_failed': return RED() + state + ENDC() else: return YELLOW() + state + ENDC() def DATA_STATES(state): if state == 'open': return YELLOW() + state + ENDC() elif state == 'closing': return YELLOW() + state + ENDC() elif state == 'closed': return GREEN() + state + ENDC() else: return state SIZE_LEVEL = ['bytes', 'KB', 'MB', 'GB', 'TB'] def get_size_str(size): """ Formats a byte size as a string. The returned string is no more than 9 characters long. """ if size is None: return "0 " + SIZE_LEVEL[0] if size == 0: magnitude = 0 level = 0 else: magnitude = math.floor(math.log(size, 10)) level = int(min(math.floor(magnitude // 3), 4)) return ('%d' if level == 0 else '%.2f') % (float(size) / 2**(level*10)) + ' ' + SIZE_LEVEL[level] def parse_typespec(thing): if isinstance(thing, basestring): return thing elif '$and' in thing: return '(' + ' AND '.join(map(parse_typespec, thing['$and'])) + ')' elif '$or' in thing: return '(' + ' OR '.join(map(parse_typespec, thing['$or'])) + ')' else: return 'Type spec could not be parsed' def get_io_desc(parameter, include_class=True, show_opt=True, app_help_version=False): # For interactive help, format array:CLASS inputs as: # -iNAME=CLASS [-iNAME=... [...]] # If input is required (needs >=1 inputs) # [-iNAME=CLASS [...]] # If input is optional (needs >=0 inputs if app_help_version and parameter["class"].startswith("array"): scalar_parameter = parameter.copy() # Munge the parameter dict (strip off "array:" to turn it into a # scalar) and recurse scalar_parameter["class"] = scalar_parameter["class"][6:] if "default" in parameter or parameter.get("optional"): return "[" + get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]]" % (parameter["name"],) else: return get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]" % (parameter["name"],) desc = "" is_optional = False if show_opt: if "default" in parameter or parameter.get("optional"): is_optional = True desc += "[" desc += ('-i' if app_help_version else '') + parameter["name"] include_parens = include_class or 'type' in parameter or 'default' in parameter if include_parens: desc += ("=" if app_help_version else " ") + "(" is_first = True if include_class: desc += parameter["class"] is_first = False if "type" in parameter: if not is_first: desc += ", " else: is_first = False desc += "type " + parse_typespec(parameter["type"]) if "default" in parameter: if not is_first: desc += ', ' desc += 'default=' + json.dumps(parameter['default']) if include_parens: desc += ")" if show_opt and is_optional: desc += "]" return desc def get_io_spec(spec, skip_fields=None): if spec is None: return 'null' if skip_fields is None: skip_fields = [] filtered_spec = [param for param in spec if param["name"] not in skip_fields] groups = defaultdict(list) for param in filtered_spec: groups[param.get('group')].append(param) list_of_params = [] for param in groups.get(None, []): list_of_params.append(get_io_desc(param)) for group in groups: if group is None: continue list_of_params.append("{g}:".format(g=group)) for param in groups[group]: list_of_params.append(" "+get_io_desc(param)) if len(skip_fields) > 0: list_of_params.append("<advanced inputs hidden; use --verbose to see more>") if len(list_of_params) == 0: return '-' if get_delimiter() is not None: return ('\n' + get_delimiter()).join(list_of_params) else: return ('\n' + ' '*16).join([fill(param, subsequent_indent=' '*18, width_adjustment=-18) for param in list_of_params]) def is_job_ref(thing, reftype=dict): ''' :param thing: something that might be a job-based object reference hash :param reftype: type that a job-based object reference would be (default is dict) ''' return isinstance(thing, reftype) and \ ((len(thing) == 2 and \ isinstance(thing.get('field'), basestring) and \ isinstance(thing.get('job'), basestring)) or \ (len(thing) == 1 and \ isinstance(thing.get('$dnanexus_link'), reftype) and \ isinstance(thing['$dnanexus_link'].get('field'), basestring) and \ isinstance(thing['$dnanexus_link'].get('job'), basestring))) def get_job_from_jbor(thing): ''' :returns: Job ID from a JBOR Assumes :func:`is_job_ref` evaluates to True ''' if '$dnanexus_link' in thing: return thing['$dnanexus_link']['job'] else: return thing['job'] def get_field_from_jbor(thing): ''' :returns: Output field name from a JBOR Assumes :func:`is_job_ref` evaluates to True ''' if '$dnanexus_link' in thing: return thing['$dnanexus_link']['field'] else: return thing['field'] def get_index_from_jbor(thing): ''' :returns: Array index of the JBOR if applicable; None otherwise Assumes :func:`is_job_ref` evaluates to True ''' if '$dnanexus_link' in thing: return thing['$dnanexus_link'].get('index') else: return None def is_metadata_ref(thing, reftype=dict): return isinstance(thing, reftype) and \ len(thing) == 1 and \ isinstance(thing.get('$dnanexus_link'), reftype) and \ isinstance(thing['$dnanexus_link'].get('metadata'), basestring) def jbor_to_str(val): ans = get_job_from_jbor(val) + ':' + get_field_from_jbor(val) index = get_index_from_jbor(val) if index is not None: ans += "." + str(index) return ans def io_val_to_str(val): if is_job_ref(val): # Job-based object references return jbor_to_str(val) elif isinstance(val, dict) and '$dnanexus_link' in val: # DNAnexus link if isinstance(val['$dnanexus_link'], basestring): # simple link return val['$dnanexus_link'] elif 'project' in val['$dnanexus_link'] and 'id' in val['$dnanexus_link']: return val['$dnanexus_link']['project'] + ':' + val['$dnanexus_link']['id'] else: return json.dumps(val) elif isinstance(val, list): if len(val) == 0: return '[]' else: return '[ ' + ', '.join([io_val_to_str(item) for item in val]) + ' ]' elif isinstance(val, dict): return '{ ' + ', '.join([key + ': ' + io_val_to_str(value) for key, value in val.items()]) + ' }' else: return json.dumps(val) def job_output_to_str(job_output, prefix='\n', title="Output: ", title_len=None): if len(job_output) == 0: return prefix + title + "-" else: if title_len is None: title_len = len(title) return prefix + title + (prefix+' '*title_len).join([fill(key + ' = ' + io_val_to_str(value), subsequent_indent=' '*9, break_long_words=False) for key, value in job_output.items()]) def get_io_field(io_hash, defaults=None, delim='=', highlight_fields=()): def highlight_value(key, value): if key in highlight_fields: return YELLOW() + value + ENDC() else: return value if defaults is None: defaults = {} if io_hash is None: return '-' if len(io_hash) == 0 and len(defaults) == 0: return '-' if get_delimiter() is not None: return ('\n' + get_delimiter()).join([(key + delim + highlight_value(key, io_val_to_str(value))) for key, value in io_hash.items()] + [('[' + key + delim + io_val_to_str(value) + ']') for key, value in defaults.items()]) else: lines = [fill(key + ' ' + delim + ' ' + highlight_value(key, io_val_to_str(value)), initial_indent=' ' * FIELD_NAME_WIDTH, subsequent_indent=' ' * (FIELD_NAME_WIDTH + 1), break_long_words=False) for key, value in io_hash.items()] lines.extend([fill('[' + key + ' ' + delim + ' ' + io_val_to_str(value) + ']', initial_indent=' ' * FIELD_NAME_WIDTH, subsequent_indent=' ' * (FIELD_NAME_WIDTH + 1), break_long_words=False) for key, value in defaults.items()]) return '\n'.join(lines)[FIELD_NAME_WIDTH:] def get_resolved_jbors(resolved_thing, orig_thing, resolved_jbors): if resolved_thing == orig_thing: return if is_job_ref(orig_thing): jbor_str = jbor_to_str(orig_thing) if jbor_str not in resolved_jbors: try: from dxpy.api import job_describe job_output = job_describe(get_job_from_jbor(orig_thing)).get('output') if job_output is not None: field_value = job_output.get(get_field_from_jbor(orig_thing)) jbor_index = get_index_from_jbor(orig_thing) if jbor_index is not None: if isinstance(field_value, list): resolved_jbors[jbor_str] = field_value[jbor_index] else: resolved_jbors[jbor_str] = field_value except: # Just don't report any resolved JBORs if there are # any problems pass elif isinstance(orig_thing, list): for i in range(len(orig_thing)): get_resolved_jbors(resolved_thing[i], orig_thing[i], resolved_jbors) elif isinstance(orig_thing, dict) and '$dnanexus_link' not in orig_thing: for key in orig_thing: get_resolved_jbors(resolved_thing[key], orig_thing[key], resolved_jbors) def render_bundleddepends(thing): from ..bindings.search import find_one_data_object from ..exceptions import DXError bundles = [] for item in thing: bundle_asset_record = dxpy.DXFile(item["id"]["$dnanexus_link"]).get_properties().get("AssetBundle") asset = None if bundle_asset_record: asset = dxpy.DXRecord(bundle_asset_record) if asset: try: bundles.append(asset.describe().get("name") + " (" + asset.get_id() + ")") except DXError: asset = None if not asset: bundles.append(item["name"] + " (" + item["id"]["$dnanexus_link"] + ")") return bundles def render_execdepends(thing): rendered = [] for item in thing: dep = copy.copy(item) dep.setdefault('package_manager', 'apt') dep['version'] = ' = '+dep['version'] if 'version' in dep else '' rendered.append("{package_manager}: {name}{version}".format(**dep)) return rendered def render_stage(title, stage, as_stage_of=None): lines_to_print = [] if stage['name'] is not None: lines_to_print.append((title, "{name} ({id})".format(name=stage['name'], id=stage['id']))) else: lines_to_print.append((title, stage['id'])) lines_to_print.append((' Executable', stage['executable'] + \ (" (" + RED() + "inaccessible" + ENDC() + ")" \ if stage.get('accessible') is False else ""))) if 'execution' in stage: is_cached_result = as_stage_of is not None and 'parentAnalysis' in stage['execution'] and \ stage['execution']['parentAnalysis'] != as_stage_of execution_id_str = stage['execution']['id'] if is_cached_result: execution_id_str = "[" + execution_id_str + "]" if 'state' in stage['execution']: lines_to_print.append((' Execution', execution_id_str + ' (' + JOB_STATES(stage['execution']['state']) + ')')) else: lines_to_print.append((' Execution', execution_id_str)) if is_cached_result: lines_to_print.append((' Cached from', stage['execution']['parentAnalysis'])) for line in lines_to_print: print_field(line[0], line[1]) def render_short_timestamp(timestamp): return str(datetime.datetime.fromtimestamp(timestamp//1000)) def render_timestamp(timestamp): return datetime.datetime.fromtimestamp(timestamp//1000).ctime() FIELD_NAME_WIDTH = 22 def print_field(label, value): if get_delimiter() is not None: sys.stdout.write(label + get_delimiter() + value + '\n') else: sys.stdout.write( label + " " * (FIELD_NAME_WIDTH-len(label)) + fill(value, subsequent_indent=' '*FIELD_NAME_WIDTH, width_adjustment=-FIELD_NAME_WIDTH) + '\n') def print_nofill_field(label, value): sys.stdout.write(label + DELIMITER(" " * (FIELD_NAME_WIDTH - len(label))) + value + '\n') def print_list_field(label, values): print_field(label, ('-' if len(values) == 0 else DELIMITER(', ').join(values))) def print_json_field(label, json_value): print_field(label, json.dumps(json_value, ensure_ascii=False)) def print_project_desc(desc, verbose=False): recognized_fields = [ 'id', 'class', 'name', 'summary', 'description', 'protected', 'restricted', 'created', 'modified', 'dataUsage', 'sponsoredDataUsage', 'tags', 'level', 'folders', 'objects', 'permissions', 'properties', 'appCaches', 'billTo', 'version', 'createdBy', 'totalSponsoredEgressBytes', 'consumedSponsoredEgressBytes', 'containsPHI', 'databaseUIViewOnly', 'region', 'storageCost', 'pendingTransfer','atSpendingLimit', # Following are app container-specific 'destroyAt', 'project', 'type', 'app', 'appName' ] # Basic metadata print_field("ID", desc["id"]) print_field("Class", desc["class"]) if "name" in desc: print_field("Name", desc["name"]) if 'summary' in desc: print_field("Summary", desc["summary"]) if 'description' in desc and (verbose or 'summary' not in desc): print_field("Description", desc['description']) if 'version' in desc and verbose: print_field("Version", str(desc['version'])) # Ownership and permissions if 'billTo' in desc: print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:]) if 'pendingTransfer' in desc and (verbose or desc['pendingTransfer'] is not None): print_json_field('Pending transfer to', desc['pendingTransfer']) if "level" in desc: print_field("Access level", desc["level"]) if 'region' in desc: print_field('Region', desc['region']) # Project settings if 'protected' in desc: print_json_field("Protected", desc["protected"]) if 'restricted' in desc: print_json_field("Restricted", desc["restricted"]) if 'containsPHI' in desc: print_json_field('Contains PHI', desc['containsPHI']) if 'databaseUIViewOnly' in desc and desc['databaseUIViewOnly']: print_json_field('Database UI View Only', desc['databaseUIViewOnly']) # Usage print_field("Created", render_timestamp(desc['created'])) if 'createdBy' in desc: print_field("Created by", desc['createdBy']['user'][desc['createdBy']['user'].find('-') + 1:]) print_field("Last modified", render_timestamp(desc['modified'])) print_field("Data usage", ('%.2f' % desc["dataUsage"]) + ' GB') if 'sponsoredDataUsage' in desc: print_field("Sponsored data", ('%.2f' % desc["sponsoredDataUsage"]) + ' GB') if 'storageCost' in desc: print_field("Storage cost", "$%.3f/month" % desc["storageCost"]) if 'totalSponsoredEgressBytes' in desc or 'consumedSponsoredEgressBytes' in desc: total_egress_str = '%.2f GB' % (desc['totalSponsoredEgressBytes'] / 1073741824.,) \ if 'totalSponsoredEgressBytes' in desc else '??' consumed_egress_str = '%.2f GB' % (desc['consumedSponsoredEgressBytes'] / 1073741824.,) \ if 'consumedSponsoredEgressBytes' in desc else '??' print_field('Sponsored egress', ('%s used of %s total' % (consumed_egress_str, total_egress_str))) if 'atSpendingLimit' in desc: print_json_field("At spending limit?", desc['atSpendingLimit']) # Misc metadata if "objects" in desc: print_field("# Files", str(desc["objects"])) if "folders" in desc: print_list_field("Folders", desc["folders"]) if "permissions" in desc: print_list_field( "Permissions", [key[5 if key.startswith('user-') else 0:] + ':' + value for key, value in desc["permissions"].items()] ) if 'tags' in desc: print_list_field("Tags", desc["tags"]) if "properties" in desc: print_list_field("Properties", [key + '=' + value for key, value in desc["properties"].items()]) if "appCaches" in desc: print_json_field("App caches", desc["appCaches"]) # Container-specific if 'type' in desc: print_field("Container type", desc["type"]) if 'project' in desc: print_field("Associated project", desc["project"]) if 'destroyAt' in desc: print_field("To be destroyed", render_timestamp(desc['modified'])) if 'app' in desc: print_field("Associated App ID", desc["app"]) if 'appName' in desc: print_field("Associated App", desc["appName"]) for field in desc: if field not in recognized_fields: print_json_field(field, desc[field]) def get_advanced_inputs(desc, verbose): details = desc.get("details") if not verbose and isinstance(details, dict): return details.get("advancedInputs", []) return [] def print_app_desc(desc, verbose=False): recognized_fields = ['id', 'class', 'name', 'version', 'aliases', 'createdBy', 'created', 'modified', 'deleted', 'published', 'title', 'subtitle', 'description', 'categories', 'access', 'dxapi', 'inputSpec', 'outputSpec', 'runSpec', 'resources', 'billTo', 'installed', 'openSource', 'summary', 'applet', 'installs', 'billing', 'details', 'developerNotes', 'authorizedUsers'] print_field("ID", desc["id"]) print_field("Class", desc["class"]) if 'billTo' in desc: print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:]) print_field("Name", desc["name"]) print_field("Version", desc["version"]) print_list_field("Aliases", desc["aliases"]) print_field("Created by", desc["createdBy"][5 if desc['createdBy'].startswith('user-') else 0:]) print_field("Created", render_timestamp(desc['created'])) print_field("Last modified", render_timestamp(desc['modified'])) print_field("Created from", desc["applet"]) print_json_field('Installed', desc['installed']) print_json_field('Open source', desc['openSource']) print_json_field('Deleted', desc['deleted']) if not desc['deleted']: advanced_inputs = [] details = desc["details"] if isinstance(details, dict) and "advancedInputs" in details: if not verbose: advanced_inputs = details["advancedInputs"] del details["advancedInputs"] if 'published' not in desc or desc["published"] < 0: print_field("Published", "-") else: print_field("Published", render_timestamp(desc['published'])) if "title" in desc and desc['title'] is not None: print_field("Title", desc["title"]) if "subtitle" in desc and desc['subtitle'] is not None: print_field("Subtitle", desc["subtitle"]) if 'summary' in desc and desc['summary'] is not None: print_field("Summary", desc['summary']) print_list_field("Categories", desc["categories"]) if 'details' in desc: print_json_field("Details", desc["details"]) print_json_field("Access", desc["access"]) print_field("API version", desc["dxapi"]) if 'inputSpec' in desc: print_nofill_field("Input Spec", get_io_spec(desc["inputSpec"], skip_fields=advanced_inputs)) print_nofill_field("Output Spec", get_io_spec(desc["outputSpec"])) print_field("Interpreter", desc["runSpec"]["interpreter"]) if "resources" in desc["runSpec"]: print_json_field("Resources", desc["runSpec"]["resources"]) if "bundledDepends" in desc["runSpec"]: print_list_field("bundledDepends", render_bundleddepends(desc["runSpec"]["bundledDepends"])) if "execDepends" in desc["runSpec"]: print_list_field("execDepends", render_execdepends(desc["runSpec"]["execDepends"])) if "systemRequirements" in desc['runSpec']: print_json_field('Sys Requirements', desc['runSpec']['systemRequirements']) if 'resources' in desc: print_field("Resources", desc['resources']) if 'installs' in desc: print_field('# Installs', str(desc['installs'])) if 'authorizedUsers' in desc: print_list_field('AuthorizedUsers', desc["authorizedUsers"]) for field in desc: if field not in recognized_fields: print_json_field(field, desc[field]) def print_globalworkflow_desc(desc, verbose=False): recognized_fields = ['id', 'class', 'name', 'version', 'aliases', 'createdBy', 'created', 'modified', 'deleted', 'published', 'title', 'description', 'categories', 'dxapi', 'billTo', 'summary', 'billing', 'developerNotes', 'authorizedUsers', 'regionalOptions'] is_locked_workflow = False print_field("ID", desc["id"]) print_field("Class", desc["class"]) if 'billTo' in desc: print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:]) print_field("Name", desc["name"]) print_field("Version", desc["version"]) print_list_field("Aliases", desc["aliases"]) print_field("Created by", desc["createdBy"][5 if desc['createdBy'].startswith('user-') else 0:]) print_field("Created", render_timestamp(desc['created'])) print_field("Last modified", render_timestamp(desc['modified'])) # print_json_field('Open source', desc['openSource']) print_json_field('Deleted', desc.get('deleted', False)) if not desc.get('deleted', False): if 'published' not in desc or desc["published"] < 0: print_field("Published", "-") else: print_field("Published", render_timestamp(desc['published'])) if "title" in desc and desc['title'] is not None: print_field("Title", desc["title"]) if "subtitle" in desc and desc['subtitle'] is not None: print_field("Subtitle", desc["subtitle"]) if 'summary' in desc and desc['summary'] is not None: print_field("Summary", desc['summary']) print_list_field("Categories", desc["categories"]) if 'details' in desc: print_json_field("Details", desc["details"]) print_field("API version", desc["dxapi"]) # Additionally, print inputs, outputs, stages of the underlying workflow # from the region of the current workspace current_project = dxpy.WORKSPACE_ID if current_project: region = dxpy.api.project_describe(current_project, input_params={"fields": {"region": True}})["region"] if region and region in desc['regionalOptions']: workflow_desc = desc['regionalOptions'][region]['workflowDescribe'] print_field("Workflow region", region) if 'id' in workflow_desc: print_field("Workflow ID", workflow_desc['id']) if workflow_desc.get('inputSpec') is not None and workflow_desc.get('inputs') is None: print_nofill_field("Input Spec", get_io_spec(workflow_desc['inputSpec'], skip_fields=get_advanced_inputs(workflow_desc, verbose))) if workflow_desc.get('outputSpec') is not None and workflow_desc.get('outputs') is None: print_nofill_field("Output Spec", get_io_spec(workflow_desc['outputSpec'])) if workflow_desc.get('inputs') is not None: is_locked_workflow = True print_nofill_field("Workflow Inputs", get_io_spec(workflow_desc['inputs'])) if workflow_desc.get('outputs') is not None: print_nofill_field("Workflow Outputs", get_io_spec(workflow_desc['outputs'])) if 'stages' in workflow_desc: for i, stage in enumerate(workflow_desc["stages"]): render_stage("Stage " + str(i), stage) if 'authorizedUsers' in desc: print_list_field('AuthorizedUsers', desc["authorizedUsers"]) if is_locked_workflow: print_locked_workflow_note() for field in desc: if field not in recognized_fields: print_json_field(field, desc[field]) def get_col_str(col_desc): return col_desc['name'] + DELIMITER(" (") + col_desc['type'] + DELIMITER(")") def print_data_obj_desc(desc, verbose=False): recognized_fields = ['id', 'class', 'project', 'folder', 'name', 'properties', 'tags', 'types', 'hidden', 'details', 'links', 'created', 'modified', 'state', 'title', 'subtitle', 'description', 'inputSpec', 'outputSpec', 'runSpec', 'summary', 'dxapi', 'access', 'createdBy', 'summary', 'sponsored', 'developerNotes', 'stages', 'inputs', 'outputs', 'latestAnalysis', 'editVersion', 'outputFolder', 'initializedFrom', 'temporary'] is_locked_workflow = False print_field("ID", desc["id"]) print_field("Class", desc["class"]) if 'project' in desc: print_field("Project", desc['project']) if 'folder' in desc: print_field("Folder", desc["folder"]) print_field("Name", desc["name"]) if 'state' in desc: print_field("State", DATA_STATES(desc['state'])) if 'hidden' in desc: print_field("Visibility", ("hidden" if desc["hidden"] else "visible")) if 'types' in desc: print_list_field("Types", desc['types']) if 'properties' in desc: print_list_field("Properties", ['='.join([k, v]) for k, v in desc['properties'].items()]) if 'tags' in desc: print_list_field("Tags", desc['tags']) if verbose and 'details' in desc: print_json_field("Details", desc["details"]) if 'links' in desc: print_list_field("Outgoing links", desc['links']) print_field("Created", render_timestamp(desc['created'])) if 'createdBy' in desc: print_field("Created by", desc['createdBy']['user'][5:]) if 'job' in desc["createdBy"]: print_field(" via the job", desc['createdBy']['job']) if verbose and 'executable' in desc['createdBy']: print_field(" running", desc['createdBy']['executable']) print_field("Last modified", render_timestamp(desc['modified'])) if "editVersion" in desc: print_field("Edit Version", str(desc['editVersion'])) if "title" in desc: print_field("Title", desc["title"]) if "subtitle" in desc: print_field("Subtitle", desc["subtitle"]) if 'summary' in desc: print_field("Summary", desc['summary']) if 'description' in desc and verbose: print_field("Description", desc["description"]) if 'outputFolder' in desc: print_field("Output Folder", desc["outputFolder"] if desc["outputFolder"] is not None else "-") if 'access' in desc: print_json_field("Access", desc["access"]) if 'dxapi' in desc: print_field("API version", desc["dxapi"]) # In case of a workflow: do not display "Input/Output Specs" that show stages IO # when the workflow has workflow-level input/output fields defined. if desc.get('inputSpec') is not None and desc.get('inputs') is None: print_nofill_field("Input Spec", get_io_spec(desc['inputSpec'], skip_fields=get_advanced_inputs(desc, verbose))) if desc.get('outputSpec') is not None and desc.get('outputs') is None: print_nofill_field("Output Spec", get_io_spec(desc['outputSpec'])) if desc.get('inputs') is not None: is_locked_workflow = True print_nofill_field("Workflow Inputs", get_io_spec(desc['inputs'])) if desc.get('outputs') is not None: print_nofill_field("Workflow Outputs", get_io_spec(desc['outputs'])) if 'runSpec' in desc: print_field("Interpreter", desc["runSpec"]["interpreter"]) if "resources" in desc['runSpec']: print_json_field("Resources", desc["runSpec"]["resources"]) if "bundledDepends" in desc["runSpec"]: print_list_field("bundledDepends", render_bundleddepends(desc["runSpec"]["bundledDepends"])) if "execDepends" in desc["runSpec"]: print_list_field("execDepends", render_execdepends(desc["runSpec"]["execDepends"])) if "systemRequirements" in desc['runSpec']: print_json_field('Sys Requirements', desc['runSpec']['systemRequirements']) if 'stages' in desc: for i, stage in enumerate(desc["stages"]): render_stage("Stage " + str(i), stage) if 'initializedFrom' in desc: print_field("initializedFrom", desc["initializedFrom"]["id"]) if 'latestAnalysis' in desc and desc['latestAnalysis'] is not None: print_field("Last execution", desc["latestAnalysis"]["id"]) print_field(" run at", render_timestamp(desc["latestAnalysis"]["created"])) print_field(" state", JOB_STATES(desc["latestAnalysis"]["state"])) for field in desc: if field in recognized_fields: continue else: if field == "media": print_field("Media type", desc['media']) elif field == "size": if desc["class"] == "file": sponsored_str = "" if 'sponsored' in desc and desc['sponsored']: sponsored_str = DELIMITER(", ") + "sponsored by DNAnexus" print_field("Size", get_size_str(desc['size']) + sponsored_str) else: print_field("Size", str(desc['size'])) elif field == "length": print_field("Length", str(desc['length'])) elif field == "columns": if len(desc['columns']) > 0: coldescs = "Columns" + DELIMITER(" " *(16-len("Columns"))) + get_col_str(desc["columns"][0]) for column in desc["columns"][1:]: coldescs += '\n' + DELIMITER(" "*16) + get_col_str(column) print(coldescs) else: print_list_field("Columns", desc['columns']) else: # Unhandled prettifying print_json_field(field, desc[field]) if is_locked_workflow: print_locked_workflow_note() def printable_ssh_host_key(ssh_host_key): try: keygen = subprocess.Popen(["ssh-keygen", "-lf", "/dev/stdin"], stdin=subprocess.PIPE, stdout=subprocess.PIPE) if USING_PYTHON2: (stdout, stderr) = keygen.communicate(ssh_host_key) else: (stdout, stderr) = keygen.communicate(ssh_host_key.encode()) except: return ssh_host_key.strip() else: if not USING_PYTHON2: stdout = stdout.decode() return stdout.replace(" no comment", "").strip() def print_execution_desc(desc): recognized_fields = ['id', 'class', 'project', 'workspace', 'region', 'app', 'applet', 'executable', 'workflow', 'state', 'rootExecution', 'parentAnalysis', 'parentJob', 'originJob', 'analysis', 'stage', 'function', 'runInput', 'originalInput', 'input', 'output', 'folder', 'launchedBy', 'created', 'modified', 'failureReason', 'failureMessage', 'stdout', 'stderr', 'waitingOnChildren', 'dependsOn', 'resources', 'projectCache', 'details', 'tags', 'properties', 'name', 'instanceType', 'systemRequirements', 'executableName', 'failureFrom', 'billTo', 'startedRunning', 'stoppedRunning', 'stateTransitions', 'delayWorkspaceDestruction', 'stages', 'totalPrice', 'isFree', 'invoiceMetadata', 'priority', 'sshHostKey'] print_field("ID", desc["id"]) print_field("Class", desc["class"]) if "name" in desc and desc['name'] is not None: print_field("Job name", desc['name']) if "executableName" in desc and desc['executableName'] is not None: print_field("Executable name", desc['executableName']) print_field("Project context", desc["project"]) if 'region' in desc: print_field("Region", desc["region"]) if 'billTo' in desc: print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:]) if 'workspace' in desc: print_field("Workspace", desc["workspace"]) if 'projectCache' in desc: print_field('Cache workspace', desc['projectCache']) print_field('Resources', desc['resources']) if "app" in desc: print_field("App", desc["app"]) elif desc.get("executable", "").startswith("globalworkflow"): print_field("Workflow", desc["executable"]) elif "applet" in desc: print_field("Applet", desc["applet"]) elif "workflow" in desc: print_field("Workflow", desc["workflow"]["id"]) if "instanceType" in desc and desc['instanceType'] is not None: print_field("Instance Type", desc["instanceType"]) if "priority" in desc: print_field("Priority", desc["priority"]) print_field("State", JOB_STATES(desc["state"])) if "rootExecution" in desc: print_field("Root execution", desc["rootExecution"]) if "originJob" in desc: if desc["originJob"] is None: print_field("Origin job", "-") else: print_field("Origin job", desc["originJob"]) if desc["parentJob"] is None: print_field("Parent job", "-") else: print_field("Parent job", desc["parentJob"]) if "parentAnalysis" in desc: if desc["parentAnalysis"] is not None: print_field("Parent analysis", desc["parentAnalysis"]) if "analysis" in desc and desc["analysis"] is not None: print_field("Analysis", desc["analysis"]) print_field("Stage", desc["stage"]) if "stages" in desc: for i, (stage, analysis_stage) in enumerate(zip(desc["workflow"]["stages"], desc["stages"])): stage['execution'] = analysis_stage['execution'] render_stage("Stage " + str(i), stage, as_stage_of=desc["id"]) if "function" in desc: print_field("Function", desc["function"]) if 'runInput' in desc: default_fields = {k: v for k, v in desc["originalInput"].items() if k not in desc["runInput"]} print_nofill_field("Input", get_io_field(desc["runInput"], defaults=default_fields)) else: print_nofill_field("Input", get_io_field(desc["originalInput"])) resolved_jbors = {} input_with_jbors = desc.get('runInput', desc['originalInput']) for k in desc["input"]: if k in input_with_jbors and desc["input"][k] != input_with_jbors[k]: get_resolved_jbors(desc["input"][k], input_with_jbors[k], resolved_jbors) if len(resolved_jbors) != 0: print_nofill_field("Resolved JBORs", get_io_field(resolved_jbors, delim=(GREEN() + '=>' + ENDC()))) print_nofill_field("Output", get_io_field(desc["output"])) if 'folder' in desc: print_field('Output folder', desc['folder']) print_field("Launched by", desc["launchedBy"][5:]) print_field("Created", render_timestamp(desc['created'])) if 'startedRunning' in desc: if 'stoppedRunning' in desc: print_field("Started running", render_timestamp(desc['startedRunning'])) else: print_field("Started running", "{t} (running for {rt})".format(t=render_timestamp(desc['startedRunning']), rt=datetime.timedelta(seconds=int(time.time())-desc['startedRunning']//1000))) if 'stoppedRunning' in desc: print_field("Stopped running", "{t} (Runtime: {rt})".format( t=render_timestamp(desc['stoppedRunning']), rt=datetime.timedelta(seconds=(desc['stoppedRunning']-desc['startedRunning'])//1000))) if desc.get('class') == 'analysis' and 'stateTransitions' in desc and desc['stateTransitions']: # Display finishing time of the analysis if available if desc['stateTransitions'][-1]['newState'] in ['done', 'failed', 'terminated']: print_field("Finished", "{t} (Wall-clock time: {wt})".format( t=render_timestamp(desc['stateTransitions'][-1]['setAt']), wt=datetime.timedelta(seconds=(desc['stateTransitions'][-1]['setAt']-desc['created'])//1000))) print_field("Last modified", render_timestamp(desc['modified'])) if 'waitingOnChildren' in desc: print_list_field('Pending subjobs', desc['waitingOnChildren']) if 'dependsOn' in desc: print_list_field('Depends on', desc['dependsOn']) if "failureReason" in desc: print_field("Failure reason", desc["failureReason"]) if "failureMessage" in desc: print_field("Failure message", desc["failureMessage"]) if "failureFrom" in desc and desc['failureFrom'] is not None and desc['failureFrom']['id'] != desc['id']: print_field("Failure is from", desc['failureFrom']['id']) if 'systemRequirements' in desc: print_json_field("Sys Requirements", desc['systemRequirements']) if "tags" in desc: print_list_field("Tags", desc["tags"]) if "properties" in desc: print_list_field("Properties", [key + '=' + value for key, value in desc["properties"].items()]) if "details" in desc and "clonedFrom" in desc["details"]: cloned_hash = desc["details"]["clonedFrom"] if "id" in cloned_hash: print_field("Re-run of", cloned_hash["id"]) print_field(" named", cloned_hash["name"]) same_executable = cloned_hash["executable"] == desc.get("applet", desc.get("app", "")) print_field(" using", ("" if same_executable else YELLOW()) + \ cloned_hash["executable"] + \ (" (same)" if same_executable else ENDC())) same_project = cloned_hash["project"] == desc["project"] same_folder = cloned_hash["folder"] == desc["folder"] or not same_project print_field(" output folder", ("" if same_project else YELLOW()) + \ cloned_hash["project"] + \ ("" if same_project else ENDC()) + ":" + \ ("" if same_folder else YELLOW()) + \ cloned_hash["folder"] + \ (" (same)" if (same_project and same_folder) else "" if same_folder else ENDC())) different_inputs = [] for item in cloned_hash["runInput"]: if cloned_hash["runInput"][item] != desc["runInput"][item]: different_inputs.append(item) print_nofill_field(" input", get_io_field(cloned_hash["runInput"], highlight_fields=different_inputs)) cloned_sys_reqs = cloned_hash.get("systemRequirements") if isinstance(cloned_sys_reqs, dict): if cloned_sys_reqs == desc.get('systemRequirements'): print_nofill_field(" sys reqs", json.dumps(cloned_sys_reqs) + ' (same)') else: print_nofill_field(" sys reqs", YELLOW() + json.dumps(cloned_sys_reqs) + ENDC()) if not desc.get('isFree') and desc.get('totalPrice') is not None: print_field('Total Price', format_currency(desc['totalPrice'], meta=desc['currency'])) if desc.get('invoiceMetadata'): print_json_field("Invoice Metadata", desc['invoiceMetadata']) if desc.get('sshHostKey'): print_nofill_field("SSH Host Key", printable_ssh_host_key(desc['sshHostKey'])) for field in desc: if field not in recognized_fields: print_json_field(field, desc[field]) def locale_from_currency_code(dx_code): """ This is a (temporary) hardcoded mapping between currency_list.json in nucleus and standard locale string useful for further formatting :param dx_code: An id of nucleus/commons/pricing_models/currency_list.json collection :return: standardised locale, eg 'en_US'; None when no mapping found """ currency_locale_map = {0: 'en_US', 1: 'en_GB'} return currency_locale_map[dx_code] if dx_code in currency_locale_map else None def format_currency_from_meta(value, meta): """ Formats currency value into properly decorated currency string based on provided currency metadata. Please note that this is very basic solution missing some of the localisation features (such as negative symbol position and type. Better option is to use 'locale' module to reflect currency string decorations more accurately. See 'format_currency' :param value: :param meta: :return: """ prefix = '-' if value < 0 else '' # .. TODO: some locales position neg symbol elsewhere, missing meta prefix += meta['symbol'] if meta['symbolPosition'] == 'left' else '' suffix = ' %s' % meta["symbol"] if meta['symbolPosition'] == 'right' else '' # .. TODO: take the group and decimal separators from meta into account (US & UK are the same, so far we're safe) formatted_value = '{:,.2f}'.format(abs(value)) return prefix + formatted_value + suffix def format_currency(value, meta, currency_locale=None): """ Formats currency value into properly decorated currency string based on either locale (preferred) or if that is not available then currency metadata. Until locale is provided from the server a crude mapping between `currency.dxCode` and a locale string is used instead (eg 0: 'en_US') :param value: amount :param meta: server metadata (`currency`) :return: formatted currency string """ try: if currency_locale is None: currency_locale = locale_from_currency_code(meta['dxCode']) if currency_locale is None: return format_currency_from_meta(value, meta) else: locale.setlocale(locale.LC_ALL, currency_locale) return locale.currency(value, grouping=True) except locale.Error: # .. locale is probably not available -> fallback to format manually return format_currency_from_meta(value, meta) def print_user_desc(desc): print_field("ID", desc["id"]) print_field("Name", desc["first"] + " " + ((desc["middle"] + " ") if desc["middle"] != '' else '') + desc["last"]) if "email" in desc: print_field("Email", desc["email"]) bill_to_label = "Default bill to" if "billTo" in desc: print_field(bill_to_label, desc["billTo"]) if "appsInstalled" in desc: print_list_field("Apps installed", desc["appsInstalled"]) def print_generic_desc(desc): for field in desc: print_json_field(field, desc[field]) def print_desc(desc, verbose=False): ''' :param desc: The describe hash of a DNAnexus entity :type desc: dict Depending on the class of the entity, this method will print a formatted and human-readable string containing the data in *desc*. ''' if desc['class'] in ['project', 'workspace', 'container']: print_project_desc(desc, verbose=verbose) elif desc['class'] == 'app': print_app_desc(desc, verbose=verbose) elif desc['class'] == 'globalworkflow': print_globalworkflow_desc(desc, verbose=verbose) elif desc['class'] in ['job', 'analysis']: print_execution_desc(desc) elif desc['class'] == 'user': print_user_desc(desc) elif desc['class'] in ['org', 'team']: print_generic_desc(desc) else: print_data_obj_desc(desc, verbose=verbose) def get_ls_desc(desc, print_id=False): addendum = ' : ' + desc['id'] if print_id is True else '' if desc['class'] in ['applet', 'workflow']: return BOLD() + GREEN() + desc['name'] + ENDC() + addendum else: return desc['name'] + addendum def print_ls_desc(desc, **kwargs): print(get_ls_desc(desc, **kwargs)) def get_ls_l_header(): return (BOLD() + 'State' + DELIMITER(' ') + 'Last modified' + DELIMITER(' ') + 'Size' + DELIMITER(' ') + 'Name' + DELIMITER(' (') + 'ID' + DELIMITER(')') + ENDC()) def print_ls_l_header(): print(get_ls_l_header()) def get_ls_l_desc_fields(): return { 'id': True, 'class': True, 'folder': True, 'length': True, 'modified': True, 'name': True, 'project': True, 'size': True, 'state': True } def get_ls_l_desc(desc, include_folder=False, include_project=False): """ desc must have at least all the fields given by get_ls_l_desc_fields. """ # If you make this method consume an additional field, you must add it to # get_ls_l_desc_fields above. if 'state' in desc: state_len = len(desc['state']) if desc['state'] != 'closed': state_str = YELLOW() + desc['state'] + ENDC() else: state_str = GREEN() + desc['state'] + ENDC() else: state_str = '' state_len = 0 name_str = '' if include_folder: name_str += desc['folder'] + ('/' if desc['folder'] != '/' else '') name_str += desc['name'] if desc['class'] in ['applet', 'workflow']: name_str = BOLD() + GREEN() + name_str + ENDC() size_str = '' if 'size' in desc and desc['class'] == 'file': size_str = get_size_str(desc['size']) elif 'length' in desc: size_str = str(desc['length']) + ' rows' size_padding = ' ' * max(0, 9 - len(size_str)) return (state_str + DELIMITER(' '*(8 - state_len)) + render_short_timestamp(desc['modified']) + DELIMITER(' ') + size_str + DELIMITER(size_padding + ' ') + name_str + DELIMITER(' (') + ((desc['project'] + DELIMITER(':')) if include_project else '') + desc['id'] + DELIMITER(')')) def print_ls_l_desc(desc, **kwargs): print(get_ls_l_desc(desc, **kwargs)) def get_find_executions_string(desc, has_children, single_result=False, show_outputs=True, is_cached_result=False): ''' :param desc: hash of execution's describe output :param has_children: whether the execution has children to be printed :param single_result: whether the execution is displayed as a single result or as part of an execution tree :param is_cached_result: whether the execution should be formatted as a cached result ''' is_not_subjob = desc['parentJob'] is None or desc['class'] == 'analysis' or single_result result = ("* " if is_not_subjob and get_delimiter() is None else "") canonical_execution_name = desc['executableName'] if desc['class'] == 'job': canonical_execution_name += ":" + desc['function'] execution_name = desc.get('name', '<no name>') # Format the name of the execution if is_cached_result: result += BOLD() + "[" + ENDC() result += BOLD() + BLUE() if desc['class'] == 'analysis': result += UNDERLINE() result += execution_name + ENDC() if execution_name != canonical_execution_name and execution_name+":main" != canonical_execution_name: result += ' (' + canonical_execution_name + ')' if is_cached_result: result += BOLD() + "]" + ENDC() # Format state result += DELIMITER(' (') + JOB_STATES(desc['state']) + DELIMITER(') ') + desc['id'] # Add unicode pipe to child if necessary result += DELIMITER('\n' + (u'│ ' if is_not_subjob and has_children else (" " if is_not_subjob else ""))) result += desc['launchedBy'][5:] + DELIMITER(' ') result += render_short_timestamp(desc['created']) cached_and_runtime_strs = [] if is_cached_result: cached_and_runtime_strs.append(YELLOW() + "cached" + ENDC()) if desc['class'] == 'job': # Only print runtime if it ever started running if desc.get('startedRunning'): if desc['state'] in ['done', 'failed', 'terminated', 'waiting_on_output']: runtime = datetime.timedelta(seconds=int(desc['stoppedRunning']-desc['startedRunning'])//1000) cached_and_runtime_strs.append("runtime " + str(runtime)) elif desc['state'] == 'running': seconds_running = max(int(time.time()-desc['startedRunning']//1000), 0) msg = "running for {rt}".format(rt=datetime.timedelta(seconds=seconds_running)) cached_and_runtime_strs.append(msg) if cached_and_runtime_strs: result += " (" + ", ".join(cached_and_runtime_strs) + ")" if show_outputs: prefix = DELIMITER('\n' + (u'│ ' if is_not_subjob and has_children else (" " if is_not_subjob else ""))) if desc.get("output") != None: result += job_output_to_str(desc['output'], prefix=prefix) elif desc['state'] == 'failed' and 'failureReason' in desc: result += prefix + BOLD() + desc['failureReason'] + ENDC() + ": " + fill(desc.get('failureMessage', ''), subsequent_indent=prefix.lstrip('\n')) return result def print_locked_workflow_note(): print_field('Note', 'This workflow has an explicit input specification (i.e. it is locked), and as such stage inputs cannot be modified at run-time.')
Java
package fr.fablabmars.model; import java.util.ArrayList; import fr.fablabmars.observer.Observable; import fr.fablabmars.observer.Observer; /** * Observable contenant le menu courant. * * @author Guillaume Perouffe * @see Observable */ public class CardMenu implements Observable { /** * Liste des observateurs de cet observable. */ private ArrayList<Observer> listObserver = new ArrayList<Observer>(); /** * Indice du menu courant */ private int panel; /** * Constructeur de l'observable * <p> * On initialise le menu sur le 'panel' par défaut, * d'indice 0. * </p> * * @see CardMenu#panel */ public CardMenu(){ panel = 0; } /** * Change le panneau courant et notifie les observateurs. * * @param panel * Indice du nouveau menu. * * @see CardMenu#panel * @see Observable#notifyObservers() */ public void setPanel(int panel){ this.panel = panel; notifyObservers(); } @Override public void addObserver(Observer obs) { listObserver.add(obs); } @Override public void removeObserver(Observer obs) { listObserver.remove(obs); } @Override public void notifyObservers() { for(Observer obs:listObserver){ obs.update(this); } } /** * Retourne le menu courant * * @return Menu courant * * @see CardMenu#panel */ @Override public int getState(){ return panel; } }
Java
package ru.job4j.collections.tree; /** * Бинарное дерево . * * @author Hincu Andrei (andreih1981@gmail.com) by 20.10.17; * @version $Id$ * @since 0.1 * @param <E> тип данных. */ public class BinaryTree<E extends Comparable<E>> extends Tree<E> { /** * Корень дерева. */ private Node<E> node; /** * Размер дерева. */ private int size; /** * Узел дерева. * @param <E> значение. */ private class Node<E> { /** * Значение. */ private E value; /** * левый сын. */ private Node<E> left; /** * правый сын. */ private Node<E> right; /** * Конструктор. * @param value значение узла. */ private Node(E value) { this.value = value; } } /** * Добавляем новый элемент или корень дерева. * @param e значение. */ public void add(E e) { if (node == null) { node = new Node<>(e); size++; } else { addNewElement(e, node); } } /** * Метод для поиска места вставки. * @param e значение. * @param n текуший узел дерева. */ private void addNewElement(E e, Node<E> n) { if (e.compareTo(n.value) < 0) { if (n.left == null) { n.left = new Node<>(e); size++; } else { addNewElement(e, n.left); } } else if (e.compareTo(n.value) > 0) { if (n.right == null) { n.right = new Node<>(e); size++; } else { addNewElement(e, n.right); } } } /** * геттер. * @return размер дерева. */ public int getSize() { return size; } }
Java
<?php include_once('procedures.php'); ?> <?php include("top.php"); ?> <script src="./js/visualRound.js"></script> <style> #bottomBar { position: fixed; left: 0px; bottom: 0px; width: 100%; height: 40px; background-color: #EEE; border-top-width: 1px; border-top-color: #999; border-top-style: solid; overflow: hidden; z-index: 100; } #bottomBarText { position: relative; left: 20px; top: 5px; font-size: 20px; color: black; opacity: 1.0; } .bottom_gray_text { position: relative; top: 9px; font-size: 15px; color: gray; opacity: 1.0; } .footer { margin-bottom: 50px; } .button { background-color: #9999FF; cursor: pointer; } .left { float: left; } .right { float:right; } .pointer { cursor: pointer; } .timer { top: 12px; } #bottomBarTextOld { left: 40px; } #bottomCounter { right: 10px; } #timersEditor { right: 5px; } </style> <div class = "container content"> </div> <div id = "dataContainer" class = "container content"> <?php $roundId = intval($_GET['round']); $roundData = getRoundData($roundId); ?> <div class = "centeredText"> <h2>Раунд "<?php echo $roundData['name']; ?>" игры "<?php echo $roundData['gameName']; ?>" от <?php echo $roundData['date']; ?></h2> </div> <h3>Результаты раунда</h3> <table class = "table table-bordered"> <tr align = center> <td>Пользователь</td> <td>Счет</td> </tr> <?php $result = getUsersRoundScoresNoSort($roundId); $i = -1; foreach ($result as $row) { $i++; ?> <tr class="tableRow" title="<?php echo $i; ?>" id="r<?php echo $row['id']; ?>" align = "center"> <td> <?php echo $row['name']; ?></td> <td id="c<?php echo $row['id']; ?>"><?php echo 0; ?></td> </tr> <?php } ?> </table> <br> </div> <script> round = <?php echo $roundId; ?>; startVisualization(); </script> <div id="bottomBar"> <button id="pause" class="btn btn-info left" onClick="pause();">Начать</button> <button class="btn btn-info left" onClick="step();">Далее</button> <p id="bottomBarText" class="left"></p> <p id="bottomBarTextOld" class="bottom_gray_text left"></p> <p id="timersEditor" class="glyphicon glyphicon-time pointer timer right" onClick="showTimersDialog();"></p> <p id="bottomCounter" class="bottom_gray_text right"></p> </div> <?php include("bottom.php"); ?>
Java
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ByteNet tests.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensor2tensor.data_generators import problem_hparams from tensor2tensor.models import bytenet import tensorflow as tf class ByteNetTest(tf.test.TestCase): def testByteNet(self): vocab_size = 9 x = np.random.random_integers(1, high=vocab_size - 1, size=(3, 5, 1, 1)) y = np.random.random_integers(1, high=vocab_size - 1, size=(3, 6, 1, 1)) hparams = bytenet.bytenet_base() p_hparams = problem_hparams.test_problem_hparams(vocab_size, vocab_size) with self.test_session() as session: features = { "inputs": tf.constant(x, dtype=tf.int32), "targets": tf.constant(y, dtype=tf.int32), } model = bytenet.ByteNet( hparams, tf.estimator.ModeKeys.TRAIN, p_hparams) logits, _ = model(features) session.run(tf.global_variables_initializer()) res = session.run(logits) self.assertEqual(res.shape, (3, 50, 1, 1, vocab_size)) if __name__ == "__main__": tf.test.main()
Java
// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "freertos/FreeRTOS.h" #include "freertos/task.h" #include "freertos/event_groups.h" #include "esp_system.h" #include "esp_log.h" #include "nvs_flash.h" #include "bt.h" #include "bta_api.h" #include "esp_gap_ble_api.h" #include "esp_gatts_api.h" #include "esp_bt_defs.h" #include "esp_bt_main.h" #include "esp_bt_main.h" #include "gatts_table_creat_demo.h" #define GATTS_TABLE_TAG "GATTS_TABLE_DEMO" #define HEART_PROFILE_NUM 1 #define HEART_PROFILE_APP_IDX 0 #define ESP_HEART_RATE_APP_ID 0x55 #define SAMPLE_DEVICE_NAME "ESP_HEART_RATE" #define SAMPLE_MANUFACTURER_DATA_LEN 17 #define HEART_RATE_SVC_INST_ID 0 #define GATTS_DEMO_CHAR_VAL_LEN_MAX 0x40 uint8_t char1_str[] ={0x11,0x22,0x33}; uint16_t heart_rate_handle_table[HRS_IDX_NB]; esp_attr_value_t gatts_demo_char1_val = { .attr_max_len = GATTS_DEMO_CHAR_VAL_LEN_MAX, .attr_len = sizeof(char1_str), .attr_value = char1_str, }; static uint8_t heart_rate_service_uuid[16] = { /* LSB <--------------------------------------------------------------------------------> MSB */ //first uuid, 16bit, [12],[13] is the value 0xfb, 0x34, 0x9b, 0x5f, 0x80, 0x00, 0x00, 0x80, 0x00, 0x10, 0x00, 0x00, 0x18, 0x0D, 0x00, 0x00, }; static esp_ble_adv_data_t heart_rate_adv_config = { .set_scan_rsp = false, .include_name = true, .include_txpower = true, .min_interval = 0x20, .max_interval = 0x40, .appearance = 0x00, .manufacturer_len = 0, //TEST_MANUFACTURER_DATA_LEN, .p_manufacturer_data = NULL, //&test_manufacturer[0], .service_data_len = 0, .p_service_data = NULL, .service_uuid_len = 32, .p_service_uuid = heart_rate_service_uuid, .flag = (ESP_BLE_ADV_FLAG_GEN_DISC | ESP_BLE_ADV_FLAG_BREDR_NOT_SPT), }; static esp_ble_adv_params_t heart_rate_adv_params = { .adv_int_min = 0x20, .adv_int_max = 0x40, .adv_type = ADV_TYPE_IND, .own_addr_type = BLE_ADDR_TYPE_PUBLIC, //.peer_addr = //.peer_addr_type = .channel_map = ADV_CHNL_ALL, .adv_filter_policy = ADV_FILTER_ALLOW_SCAN_ANY_CON_ANY, }; struct gatts_profile_inst { esp_gatts_cb_t gatts_cb; uint16_t gatts_if; uint16_t app_id; uint16_t conn_id; uint16_t service_handle; esp_gatt_srvc_id_t service_id; uint16_t char_handle; esp_bt_uuid_t char_uuid; esp_gatt_perm_t perm; esp_gatt_char_prop_t property; uint16_t descr_handle; esp_bt_uuid_t descr_uuid; }; static void gatts_profile_event_handler(esp_gatts_cb_event_t event, esp_gatt_if_t gatts_if, esp_ble_gatts_cb_param_t *param); /* One gatt-based profile one app_id and one gatts_if, this array will store the gatts_if returned by ESP_GATTS_REG_EVT */ static struct gatts_profile_inst heart_rate_profile_tab[HEART_PROFILE_NUM] = { [HEART_PROFILE_APP_IDX] = { .gatts_cb = gatts_profile_event_handler, .gatts_if = ESP_GATT_IF_NONE, /* Not get the gatt_if, so initial is ESP_GATT_IF_NONE */ }, }; /* * HTPT PROFILE ATTRIBUTES **************************************************************************************** */ /* * Heart Rate PROFILE ATTRIBUTES **************************************************************************************** */ /// Heart Rate Sensor Service static const uint16_t heart_rate_svc = ESP_GATT_UUID_HEART_RATE_SVC; #define CHAR_DECLARATION_SIZE (sizeof(uint8_t)) static const uint16_t primary_service_uuid = ESP_GATT_UUID_PRI_SERVICE; static const uint16_t character_declaration_uuid = ESP_GATT_UUID_CHAR_DECLARE; static const uint16_t character_client_config_uuid = ESP_GATT_UUID_CHAR_CLIENT_CONFIG; static const uint8_t char_prop_notify = ESP_GATT_CHAR_PROP_BIT_NOTIFY; static const uint8_t char_prop_read = ESP_GATT_CHAR_PROP_BIT_READ; static const uint8_t char_prop_read_write = ESP_GATT_CHAR_PROP_BIT_WRITE|ESP_GATT_CHAR_PROP_BIT_READ; /// Heart Rate Sensor Service - Heart Rate Measurement Characteristic, notify static const uint16_t heart_rate_meas_uuid = ESP_GATT_HEART_RATE_MEAS; static const uint8_t heart_measurement_ccc[2] ={ 0x00, 0x00}; /// Heart Rate Sensor Service -Body Sensor Location characteristic, read static const uint16_t body_sensor_location_uuid = ESP_GATT_BODY_SENSOR_LOCATION; static const uint8_t body_sensor_loc_val[1] = {0x00}; /// Heart Rate Sensor Service - Heart Rate Control Point characteristic, write&read static const uint16_t heart_rate_ctrl_point = ESP_GATT_HEART_RATE_CNTL_POINT; static const uint8_t heart_ctrl_point[1] = {0x00}; /// Full HRS Database Description - Used to add attributes into the database static const esp_gatts_attr_db_t heart_rate_gatt_db[HRS_IDX_NB] = { // Heart Rate Service Declaration [HRS_IDX_SVC] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&primary_service_uuid, ESP_GATT_PERM_READ, sizeof(uint16_t), sizeof(heart_rate_svc), (uint8_t *)&heart_rate_svc}}, // Heart Rate Measurement Characteristic Declaration [HRS_IDX_HR_MEAS_CHAR] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&character_declaration_uuid, ESP_GATT_PERM_READ, CHAR_DECLARATION_SIZE,CHAR_DECLARATION_SIZE, (uint8_t *)&char_prop_notify}}, // Heart Rate Measurement Characteristic Value [HRS_IDX_HR_MEAS_VAL] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&heart_rate_meas_uuid, ESP_GATT_PERM_READ, HRPS_HT_MEAS_MAX_LEN,0, NULL}}, // Heart Rate Measurement Characteristic - Client Characteristic Configuration Descriptor [HRS_IDX_HR_MEAS_NTF_CFG] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&character_client_config_uuid, ESP_GATT_PERM_READ|ESP_GATT_PERM_WRITE, sizeof(uint16_t),sizeof(heart_measurement_ccc), (uint8_t *)heart_measurement_ccc}}, // Body Sensor Location Characteristic Declaration [HRS_IDX_BOBY_SENSOR_LOC_CHAR] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&character_declaration_uuid, ESP_GATT_PERM_READ, CHAR_DECLARATION_SIZE,CHAR_DECLARATION_SIZE, (uint8_t *)&char_prop_read}}, // Body Sensor Location Characteristic Value [HRS_IDX_BOBY_SENSOR_LOC_VAL] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&body_sensor_location_uuid, ESP_GATT_PERM_READ, sizeof(uint8_t), sizeof(body_sensor_loc_val), (uint8_t *)body_sensor_loc_val}}, // Heart Rate Control Point Characteristic Declaration [HRS_IDX_HR_CTNL_PT_CHAR] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&character_declaration_uuid, ESP_GATT_PERM_READ, CHAR_DECLARATION_SIZE,CHAR_DECLARATION_SIZE, (uint8_t *)&char_prop_read_write}}, // Heart Rate Control Point Characteristic Value [HRS_IDX_HR_CTNL_PT_VAL] = {{ESP_GATT_AUTO_RSP}, {ESP_UUID_LEN_16, (uint8_t *)&heart_rate_ctrl_point, ESP_GATT_PERM_WRITE|ESP_GATT_PERM_READ, sizeof(uint8_t), sizeof(heart_ctrl_point), (uint8_t *)heart_ctrl_point}}, }; static void gap_event_handler(esp_gap_ble_cb_event_t event, esp_ble_gap_cb_param_t *param) { ESP_LOGE(GATTS_TABLE_TAG, "GAP_EVT, event %d\n", event); switch (event) { case ESP_GAP_BLE_ADV_DATA_SET_COMPLETE_EVT: esp_ble_gap_start_advertising(&heart_rate_adv_params); break; case ESP_GAP_BLE_ADV_START_COMPLETE_EVT: //advertising start complete event to indicate advertising start successfully or failed if (param->adv_start_cmpl.status != ESP_BT_STATUS_SUCCESS) { ESP_LOGE(GATTS_TABLE_TAG, "Advertising start failed\n"); } break; default: break; } } static void gatts_profile_event_handler(esp_gatts_cb_event_t event, esp_gatt_if_t gatts_if, esp_ble_gatts_cb_param_t *param) { ESP_LOGE(GATTS_TABLE_TAG, "event = %x\n",event); switch (event) { case ESP_GATTS_REG_EVT: ESP_LOGI(GATTS_TABLE_TAG, "%s %d\n", __func__, __LINE__); esp_ble_gap_set_device_name(SAMPLE_DEVICE_NAME); ESP_LOGI(GATTS_TABLE_TAG, "%s %d\n", __func__, __LINE__); esp_ble_gap_config_adv_data(&heart_rate_adv_config); ESP_LOGI(GATTS_TABLE_TAG, "%s %d\n", __func__, __LINE__); esp_ble_gatts_create_attr_tab(heart_rate_gatt_db, gatts_if, HRS_IDX_NB, HEART_RATE_SVC_INST_ID); break; case ESP_GATTS_READ_EVT: break; case ESP_GATTS_WRITE_EVT: break; case ESP_GATTS_EXEC_WRITE_EVT: break; case ESP_GATTS_MTU_EVT: break; case ESP_GATTS_CONF_EVT: break; case ESP_GATTS_UNREG_EVT: break; case ESP_GATTS_DELETE_EVT: break; case ESP_GATTS_START_EVT: break; case ESP_GATTS_STOP_EVT: break; case ESP_GATTS_CONNECT_EVT: break; case ESP_GATTS_DISCONNECT_EVT: break; case ESP_GATTS_OPEN_EVT: break; case ESP_GATTS_CANCEL_OPEN_EVT: break; case ESP_GATTS_CLOSE_EVT: break; case ESP_GATTS_LISTEN_EVT: break; case ESP_GATTS_CONGEST_EVT: break; case ESP_GATTS_CREAT_ATTR_TAB_EVT:{ ESP_LOGE(GATTS_TABLE_TAG, "The number handle =%x\n",param->add_attr_tab.num_handle); if(param->add_attr_tab.num_handle == HRS_IDX_NB){ memcpy(heart_rate_handle_table, param->add_attr_tab.handles, sizeof(heart_rate_handle_table)); esp_ble_gatts_start_service(heart_rate_handle_table[HRS_IDX_SVC]); } break; } default: break; } } static void gatts_event_handler(esp_gatts_cb_event_t event, esp_gatt_if_t gatts_if, esp_ble_gatts_cb_param_t *param) { ESP_LOGI(GATTS_TABLE_TAG, "EVT %d, gatts if %d\n", event, gatts_if); /* If event is register event, store the gatts_if for each profile */ if (event == ESP_GATTS_REG_EVT) { if (param->reg.status == ESP_GATT_OK) { heart_rate_profile_tab[HEART_PROFILE_APP_IDX].gatts_if = gatts_if; } else { ESP_LOGI(GATTS_TABLE_TAG, "Reg app failed, app_id %04x, status %d\n", param->reg.app_id, param->reg.status); return; } } do { int idx; for (idx = 0; idx < HEART_PROFILE_NUM; idx++) { if (gatts_if == ESP_GATT_IF_NONE || /* ESP_GATT_IF_NONE, not specify a certain gatt_if, need to call every profile cb function */ gatts_if == heart_rate_profile_tab[idx].gatts_if) { if (heart_rate_profile_tab[idx].gatts_cb) { heart_rate_profile_tab[idx].gatts_cb(event, gatts_if, param); } } } } while (0); } void app_main() { esp_err_t ret; esp_bt_controller_init(); ret = esp_bt_controller_enable(ESP_BT_MODE_BTDM); if (ret) { ESP_LOGE(GATTS_TABLE_TAG, "%s enable controller failed\n", __func__); return; } ESP_LOGI(GATTS_TABLE_TAG, "%s init bluetooth\n", __func__); ret = esp_bluedroid_init(); if (ret) { ESP_LOGE(GATTS_TABLE_TAG, "%s init bluetooth failed\n", __func__); return; } ret = esp_bluedroid_enable(); if (ret) { ESP_LOGE(GATTS_TABLE_TAG, "%s enable bluetooth failed\n", __func__); return; } esp_ble_gatts_register_callback(gatts_event_handler); esp_ble_gap_register_callback(gap_event_handler); esp_ble_gatts_app_register(ESP_HEART_RATE_APP_ID); return; }
Java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.core.startree.v2.store; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.nio.ByteOrder; import java.util.List; import java.util.Map; import org.apache.commons.configuration.ConfigurationException; import org.apache.pinot.common.segment.ReadMode; import org.apache.pinot.core.segment.index.column.ColumnIndexContainer; import org.apache.pinot.core.segment.index.metadata.SegmentMetadataImpl; import org.apache.pinot.core.segment.memory.PinotDataBuffer; import org.apache.pinot.core.startree.v2.StarTreeV2; import org.apache.pinot.core.startree.v2.StarTreeV2Constants; import static org.apache.pinot.core.startree.v2.store.StarTreeIndexMapUtils.IndexKey; import static org.apache.pinot.core.startree.v2.store.StarTreeIndexMapUtils.IndexValue; /** * The {@code StarTreeIndexContainer} class contains the indexes for multiple star-trees. */ public class StarTreeIndexContainer implements Closeable { private final PinotDataBuffer _dataBuffer; private final List<StarTreeV2> _starTrees; public StarTreeIndexContainer(File segmentDirectory, SegmentMetadataImpl segmentMetadata, Map<String, ColumnIndexContainer> indexContainerMap, ReadMode readMode) throws ConfigurationException, IOException { File indexFile = new File(segmentDirectory, StarTreeV2Constants.INDEX_FILE_NAME); if (readMode == ReadMode.heap) { _dataBuffer = PinotDataBuffer .loadFile(indexFile, 0, indexFile.length(), ByteOrder.LITTLE_ENDIAN, "Star-tree V2 data buffer"); } else { _dataBuffer = PinotDataBuffer .mapFile(indexFile, true, 0, indexFile.length(), ByteOrder.LITTLE_ENDIAN, "Star-tree V2 data buffer"); } File indexMapFile = new File(segmentDirectory, StarTreeV2Constants.INDEX_MAP_FILE_NAME); List<Map<IndexKey, IndexValue>> indexMapList = StarTreeIndexMapUtils.loadFromFile(indexMapFile, segmentMetadata.getStarTreeV2MetadataList().size()); _starTrees = StarTreeLoaderUtils.loadStarTreeV2(_dataBuffer, indexMapList, segmentMetadata, indexContainerMap); } public List<StarTreeV2> getStarTrees() { return _starTrees; } @Override public void close() throws IOException { _dataBuffer.close(); } }
Java
/* Copyright 2010-2011 Zhengmao HU (James) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package net.sf.jabb.util.text; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * Given a text string to be tested, and list of matching strings, find out which matching string the * text string starts with.<br> * 给定一个待检查的文本字符串,以及一批开头匹配字符串,看看待检查的文本字符串以哪个匹配字符串开头。 * <p> * The matching is case sensitive. * If one matching string starts with another, * and the text string starts with them, then the longer one will be considered to be matched. * <p> * 匹配时对大小写敏感。如果匹配字符串之间互相饱含,则匹配其中最长的。 * * <p> * If the matching need to be checked upon number segments (start number ~ end number) represented * as strings, {@link #expandNumberMatchingRange(Map, String, String, Object)} method can be used to * expand number segments to heading number strings. * <p> * 如果需要对代表数字号码(开始号码~结束号码)的字符串进行匹配,可使用 * {@link #expandNumberMatchingRange(Map, String, String, Object)} 方法 * 将号码段字符串(一个开始号码,一个结束号码)转换为号码头字符串。 * * @author Zhengmao HU (James) * */ public class StringStartWithMatcher extends StartWithMatcher { private static final long serialVersionUID = -2501231925022032723L; /** * Create a new instance according to heading strings and their corresponding attachment objects.<br> * 根据开头匹配字符串、开头匹配字符串所对应的附件对象,创建一个新的实例。 * <p> * When initializing internal data structure, choose to consume more memory for better matching speed. * <p> * 在创建内部数据结构的时候,选择占用更多内存,而换取速度上的提升。 * * @param headingDefinitions Key is the heading string, Value is its associated attachment object. * When the heading string is matched, the attachment object will be returned * as identifier.<p> * Key是匹配字符串,Value是附件对象。 * 当进行匹配检查的时候,返回附件对象来标识哪一个匹配字符串被匹配上了。 */ public StringStartWithMatcher(Map<String, ? extends Object> headingDefinitions) { super(normalizeMatchingDefinitions(headingDefinitions)); } /** * Create a new instance according to heading strings and their corresponding attachment objects.<br> * 根据开头匹配字符串、开头匹配字符串所对应的附件对象,创建一个新的实例。 * * @param headingDefinitions Key是匹配字符串,Value是附件对象。 * 当进行匹配检查的时候,返回附件对象来标识哪一个匹配字符串被匹配上了。 * <p> * Key is the heading string, Value is its associated attachment object. * When the heading string is matched, the attachment object will be returned * as identifier. * @param moreSpaceForSpeed 是否占用更多内存,而换取速度上的提升。 * <p>Whether or not to consume * more memory for better matching speed. */ public StringStartWithMatcher(Map<String, ? extends Object> headingDefinitions, boolean moreSpaceForSpeed) { super(normalizeMatchingDefinitions(headingDefinitions), moreSpaceForSpeed); } /** * Create a copy, the copy will have exactly the same matching * definitions as the original copy.<br> * 创建一个副本,这个副本与原先的对象具有完全相同匹配方式。 * * @param toBeCopied 原本。<br>The original copy. */ public StringStartWithMatcher(StringStartWithMatcher toBeCopied) { super(toBeCopied); } /** * Normalize matching definitions according to requirements of {@link StartWithMatcher}.<br> * 根据{@link StartWithMatcher}的需要来规范化匹配条件定义。 * * @param headingDefinitions Key是匹配字符串,Value是附件对象。 * 当进行匹配检查的时候,返回附件对象来标识哪一个匹配字符串被匹配上了。 * <p> * Key is the heading string, Value is its associated attachment object. * When the heading string is matched, the attachment object will be returned * as identifier. * @return {@link StartWithMatcher}所需的匹配条件定义。 * <br>Matching definitions for usage of {@link StartWithMatcher}. */ static protected List<MatchingDefinition> normalizeMatchingDefinitions(Map<String, ? extends Object> headingDefinitions){ // exactMatchExample自动设置为与regularExpression相同 List<MatchingDefinition> l = new ArrayList<MatchingDefinition>(headingDefinitions.size()); for (Map.Entry<String, ? extends Object> e: headingDefinitions.entrySet()){ MatchingDefinition c = new MatchingDefinition(); c.setRegularExpression(escapeForRegExp(e.getKey())); c.setAttachment(e.getValue()); c.setExactMatchExample(e.getKey()); l.add(c); } return l; } /** * Expand number segments (such as 138000~138999 or 138000~138029) into number headings * (such as 138 or {13800,13801,13802}).<br> * 把号码段(类似:138000~138999或138000~138029)展开成号码头(类似:138或13800,13801,13802)。 * * @param headingDefinitions 可用来对{@link StringStartWithMatcher}进行初始化的展开后的匹配条件 * 会被放到这个Map里。 * <br> Equivalent heading definitions that could be used to * create instance of {@link StringStartWithMatcher} will be put into this Map. * @param start 起始号码 <br> first/starting number * @param end 结束号码 <br> last/ending number * @param attachment 匹配附件<br>attachment to identify that the segment matches a string */ public static <T> void expandNumberMatchingRange(Map<String, T> headingDefinitions, String start, String end, T attachment){ int firstDiff; //第一个不相同字符的位置 int lastDiff; //末尾0:9对应段开始的位置 // 先强行保证起始号码与结束号码长度相同 if (start.length() > end.length()){ StringBuilder sb = new StringBuilder(end); while (start.length() > sb.length()){ sb.append("9"); } end = sb.toString(); } else if (end.length() > start.length()){ StringBuilder sb = new StringBuilder(start); while (end.length() > sb.length()){ sb.append("0"); } start = sb.toString(); } // 然后寻找第一个不相同字符的位置 for (firstDiff = 0; firstDiff < start.length(); firstDiff++){ if (start.charAt(firstDiff) != end.charAt(firstDiff)){ break; } } // 再寻找末尾0:9对应段开始的位置 for (lastDiff = start.length() - 1; lastDiff >= 0; lastDiff--){ if (start.charAt(lastDiff) != '0' || end.charAt(lastDiff) != '9'){ break; } } lastDiff++; if (firstDiff == lastDiff){ // 则表示可合并为一条 headingDefinitions.put(start.substring(0, firstDiff), attachment); } else { // 则表示要扩展为多条 int j = Integer.parseInt(start.substring(firstDiff, lastDiff)); int k = Integer.parseInt(end.substring(firstDiff, lastDiff)); String head = start.substring(0, firstDiff); String f = "%" + (lastDiff-firstDiff) + "d"; StringBuilder sb = new StringBuilder(); for (int i = j; i <= k; i++){ sb.setLength(0); sb.append(head); sb.append(String.format(f, i)); headingDefinitions.put(sb.toString(), attachment); } } } }
Java
# -*- coding: utf-8 -*- """ Linguistic and other taggers. Tagging each token in a sentence with supplementary information, such as its part-of-speech (POS) tag, and named entity (NE) tag. """ __all__ = [ "PerceptronTagger", "pos_tag", "pos_tag_sents", "tag_provinces", "chunk_parse", "NER", ] from pythainlp.tag.locations import tag_provinces from pythainlp.tag.pos_tag import pos_tag, pos_tag_sents from pythainlp.tag._tag_perceptron import PerceptronTagger from pythainlp.tag.chunk import chunk_parse from pythainlp.tag.named_entity import NER
Java
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CMAR3 { #[doc = r" Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct MAR { bits: u32, } impl MAR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } } #[doc = r" Proxy"] pub struct _MAW<'a> { w: &'a mut W, } impl<'a> _MAW<'a> { #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u32) -> &'a mut W { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:31 - Memory address"] #[inline(always)] pub fn ma(&self) -> MAR { let bits = { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u32 }; MAR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline(always)] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:31 - Memory address"] #[inline(always)] pub fn ma(&mut self) -> _MAW { _MAW { w: self } } }
Java
<?php namespace Topxia\WebBundle\Listener; use Symfony\Component\HttpKernel\KernelEvents; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\EventDispatcher\EventSubscriberInterface; class LocaleListener implements EventSubscriberInterface { private $defaultLocale; public function __construct($defaultLocale) { if ($defaultLocale == 'en') { $defaultLocale = 'en_US'; //兼容原来的配置 } $this->defaultLocale = $defaultLocale; } public function onKernelRequest(GetResponseEvent $event) { $request = $event->getRequest(); if (!$request->hasPreviousSession()) { return; } $locale = $request->getSession()->get('_locale', $request->cookies->get('_last_logout_locale') ?: $this->defaultLocale); $request->setLocale($locale); } public static function getSubscribedEvents() { return array( // must be registered after the default Locale listener KernelEvents::REQUEST => array(array('onKernelRequest', 15)) ); } }
Java
<?php namespace BankId\Merchant\Library\Schemas\saml\metadata; /** * Class representing Extensions */ class Extensions extends ExtensionsType { }
Java
package com.dexvis.simple.transform; import javafx.scene.web.HTMLEditor; import org.simpleframework.xml.transform.Transform; public class HTMLEditorTransform implements Transform<HTMLEditor> { public HTMLEditor read(String value) throws Exception { HTMLEditor editor = new HTMLEditor(); editor.setHtmlText(value); return editor; } @Override public String write(HTMLEditor value) throws Exception { return value.getHtmlText(); } }
Java
package ecologylab.bigsemantics.service.crawler; import java.io.IOException; /** * A general framework for crawling resources. * * @author quyin */ public interface ResourceCrawler<T> { /** * Queue a resource with the given URI. * * @param uri */ void queue(String uri); /** * If the crawler has more resources to crawl. * * @return true if there are still resources to crawl. */ boolean hasNext(); /** * Retrieve the next resource. * * @return The next crawled resource. * @throws IOException * If the resource cannot be accessed. */ T next() throws IOException; /** * Expand a given resource. * * @param resource */ void expand(T resource); /** * @return The number of resources queued. */ int countQueued(); /** * @return The number of resources that are to be crawled. */ int countWaiting(); /** * @return The number of resources that have been accessed. */ int countAccessed(); /** * @return The number of resources that have been accessed successfully. */ int countSuccess(); /** * @return The number of resources that have been accessed unsuccessfully. */ int countFailure(); }
Java
package net.sf.anpr.rcp.widget; import java.awt.Color; import java.awt.Graphics; import java.awt.Rectangle; import java.awt.image.BufferedImage; import javax.swing.ImageIcon; import javax.swing.JLabel; import javax.swing.SwingConstants; public class JCanvasPanel extends JLabel { private static final long serialVersionUID = 1L; private Rectangle focusArea=new Rectangle(); private BufferedImage image; public JCanvasPanel() { super(); this.setVerticalAlignment(SwingConstants.TOP); this.setHorizontalAlignment(SwingConstants.LEFT); } protected void paintComponent(Graphics g) { super.paintComponent(g); if(focusArea==null) return ; if (focusArea.width >= 0 && focusArea.height >= 0) { Color c = g.getColor(); g.setColor(Color.RED); g.drawRect(focusArea.x, focusArea.y, focusArea.width, focusArea.height); g.setColor(c); } g.dispose(); } protected void setImage(BufferedImage image){ this.image=image; this.setIcon(new ImageIcon(image)); } public void setFocusArea(Rectangle focusArea) { this.focusArea = focusArea; } protected BufferedImage getImage() { return image; } }
Java
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Common Policy Engine Implementation Policies can be expressed in one of two forms: A list of lists, or a string written in the new policy language. In the list-of-lists representation, each check inside the innermost list is combined as with an "and" conjunction--for that check to pass, all the specified checks must pass. These innermost lists are then combined as with an "or" conjunction. This is the original way of expressing policies, but there now exists a new way: the policy language. In the policy language, each check is specified the same way as in the list-of-lists representation: a simple "a:b" pair that is matched to the correct code to perform that check. However, conjunction operators are available, allowing for more expressiveness in crafting policies. As an example, take the following rule, expressed in the list-of-lists representation:: [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] In the policy language, this becomes:: role:admin or (project_id:%(project_id)s and role:projectadmin) The policy language also has the "not" operator, allowing a richer policy rule:: project_id:%(project_id)s and not role:dunce Finally, two special policy checks should be mentioned; the policy check "@" will always accept an access, and the policy check "!" will always reject an access. (Note that if a rule is either the empty list ("[]") or the empty string, this is equivalent to the "@" policy check.) Of these, the "!" policy check is probably the most useful, as it allows particular rules to be explicitly disabled. """ import abc import re import urllib import urllib2 from oslo.config import cfg import six from kwstandby.openstack.common import fileutils from kwstandby.openstack.common.gettextutils import _ from kwstandby.openstack.common import jsonutils from kwstandby.openstack.common import log as logging policy_opts = [ cfg.StrOpt('policy_file', default='policy.json', help=_('JSON file containing policy')), cfg.StrOpt('policy_default_rule', default='default', help=_('Rule enforced when requested rule is not found')), ] CONF = cfg.CONF CONF.register_opts(policy_opts) LOG = logging.getLogger(__name__) _checks = {} class PolicyNotAuthorized(Exception): def __init__(self, rule): msg = _("Policy doesn't allow %s to be performed.") % rule super(PolicyNotAuthorized, self).__init__(msg) class Rules(dict): """A store for rules. Handles the default_rule setting directly.""" @classmethod def load_json(cls, data, default_rule=None): """Allow loading of JSON rule data.""" # Suck in the JSON data and parse the rules rules = dict((k, parse_rule(v)) for k, v in jsonutils.loads(data).items()) return cls(rules, default_rule) def __init__(self, rules=None, default_rule=None): """Initialize the Rules store.""" super(Rules, self).__init__(rules or {}) self.default_rule = default_rule def __missing__(self, key): """Implements the default rule handling.""" # If the default rule isn't actually defined, do something # reasonably intelligent if not self.default_rule or self.default_rule not in self: raise KeyError(key) return self[self.default_rule] def __str__(self): """Dumps a string representation of the rules.""" # Start by building the canonical strings for the rules out_rules = {} for key, value in self.items(): # Use empty string for singleton TrueCheck instances if isinstance(value, TrueCheck): out_rules[key] = '' else: out_rules[key] = str(value) # Dump a pretty-printed JSON representation return jsonutils.dumps(out_rules, indent=4) class Enforcer(object): """Responsible for loading and enforcing rules. :param policy_file: Custom policy file to use, if none is specified, `CONF.policy_file` will be used. :param rules: Default dictionary / Rules to use. It will be considered just in the first instantiation. If `load_rules(True)`, `clear()` or `set_rules(True)` is called this will be overwritten. :param default_rule: Default rule to use, CONF.default_rule will be used if none is specified. """ def __init__(self, policy_file=None, rules=None, default_rule=None): self.rules = Rules(rules) self.default_rule = default_rule or CONF.policy_default_rule self.policy_path = None self.policy_file = policy_file or CONF.policy_file def set_rules(self, rules, overwrite=True): """Create a new Rules object based on the provided dict of rules. :param rules: New rules to use. It should be an instance of dict. :param overwrite: Whether to overwrite current rules or update them with the new rules. """ if not isinstance(rules, dict): raise TypeError(_("Rules must be an instance of dict or Rules, " "got %s instead") % type(rules)) if overwrite: self.rules = Rules(rules) else: self.update(rules) def clear(self): """Clears Enforcer rules, policy's cache and policy's path.""" self.set_rules({}) self.policy_path = None def load_rules(self, force_reload=False): """Loads policy_path's rules. Policy file is cached and will be reloaded if modified. :param force_reload: Whether to overwrite current rules. """ if not self.policy_path: self.policy_path = self._get_policy_path() reloaded, data = fileutils.read_cached_file(self.policy_path, force_reload=force_reload) if reloaded: rules = Rules.load_json(data, self.default_rule) self.set_rules(rules) LOG.debug(_("Rules successfully reloaded")) def _get_policy_path(self): """Locate the policy json data file. :param policy_file: Custom policy file to locate. :returns: The policy path :raises: ConfigFilesNotFoundError if the file couldn't be located. """ policy_file = CONF.find_file(self.policy_file) if policy_file: return policy_file raise cfg.ConfigFilesNotFoundError(path=CONF.policy_file) def enforce(self, rule, target, creds, do_raise=False, exc=None, *args, **kwargs): """Checks authorization of a rule against the target and credentials. :param rule: A string or BaseCheck instance specifying the rule to evaluate. :param target: As much information about the object being operated on as possible, as a dictionary. :param creds: As much information about the user performing the action as possible, as a dictionary. :param do_raise: Whether to raise an exception or not if check fails. :param exc: Class of the exception to raise if the check fails. Any remaining arguments passed to check() (both positional and keyword arguments) will be passed to the exception class. If not specified, PolicyNotAuthorized will be used. :return: Returns False if the policy does not allow the action and exc is not provided; otherwise, returns a value that evaluates to True. Note: for rules using the "case" expression, this True value will be the specified string from the expression. """ # NOTE(flaper87): Not logging target or creds to avoid # potential security issues. LOG.debug(_("Rule %s will be now enforced") % rule) self.load_rules() # Allow the rule to be a Check tree if isinstance(rule, BaseCheck): result = rule(target, creds, self) elif not self.rules: # No rules to reference means we're going to fail closed result = False else: try: # Evaluate the rule result = self.rules[rule](target, creds, self) except KeyError: LOG.debug(_("Rule [%s] doesn't exist") % rule) # If the rule doesn't exist, fail closed result = False # If it is False, raise the exception if requested if do_raise and not result: if exc: raise exc(*args, **kwargs) raise PolicyNotAuthorized(rule) return result class BaseCheck(object): """Abstract base class for Check classes.""" __metaclass__ = abc.ABCMeta @abc.abstractmethod def __str__(self): """String representation of the Check tree rooted at this node.""" pass @abc.abstractmethod def __call__(self, target, cred): """Triggers if instance of the class is called. Performs the check. Returns False to reject the access or a true value (not necessary True) to accept the access. """ pass class FalseCheck(BaseCheck): """A policy check that always returns False (disallow).""" def __str__(self): """Return a string representation of this check.""" return "!" def __call__(self, target, cred): """Check the policy.""" return False class TrueCheck(BaseCheck): """A policy check that always returns True (allow).""" def __str__(self): """Return a string representation of this check.""" return "@" def __call__(self, target, cred): """Check the policy.""" return True class Check(BaseCheck): """A base class to allow for user-defined policy checks.""" def __init__(self, kind, match): """Initiates Check instance. :param kind: The kind of the check, i.e., the field before the ':'. :param match: The match of the check, i.e., the field after the ':'. """ self.kind = kind self.match = match def __str__(self): """Return a string representation of this check.""" return "%s:%s" % (self.kind, self.match) class NotCheck(BaseCheck): """Implements the "not" logical operator. A policy check that inverts the result of another policy check. """ def __init__(self, rule): """Initialize the 'not' check. :param rule: The rule to negate. Must be a Check. """ self.rule = rule def __str__(self): """Return a string representation of this check.""" return "not %s" % self.rule def __call__(self, target, cred): """Check the policy. Returns the logical inverse of the wrapped check. """ return not self.rule(target, cred) class AndCheck(BaseCheck): """Implements the "and" logical operator. A policy check that requires that a list of other checks all return True. """ def __init__(self, rules): """Initialize the 'and' check. :param rules: A list of rules that will be tested. """ self.rules = rules def __str__(self): """Return a string representation of this check.""" return "(%s)" % ' and '.join(str(r) for r in self.rules) def __call__(self, target, cred): """Check the policy. Requires that all rules accept in order to return True. """ for rule in self.rules: if not rule(target, cred): return False return True def add_check(self, rule): """Adds rule to be tested. Allows addition of another rule to the list of rules that will be tested. Returns the AndCheck object for convenience. """ self.rules.append(rule) return self class OrCheck(BaseCheck): """Implements the "or" operator. A policy check that requires that at least one of a list of other checks returns True. """ def __init__(self, rules): """Initialize the 'or' check. :param rules: A list of rules that will be tested. """ self.rules = rules def __str__(self): """Return a string representation of this check.""" return "(%s)" % ' or '.join(str(r) for r in self.rules) def __call__(self, target, cred): """Check the policy. Requires that at least one rule accept in order to return True. """ for rule in self.rules: if rule(target, cred): return True return False def add_check(self, rule): """Adds rule to be tested. Allows addition of another rule to the list of rules that will be tested. Returns the OrCheck object for convenience. """ self.rules.append(rule) return self def _parse_check(rule): """Parse a single base check rule into an appropriate Check object.""" # Handle the special checks if rule == '!': return FalseCheck() elif rule == '@': return TrueCheck() try: kind, match = rule.split(':', 1) except Exception: LOG.exception(_("Failed to understand rule %s") % rule) # If the rule is invalid, we'll fail closed return FalseCheck() # Find what implements the check if kind in _checks: return _checks[kind](kind, match) elif None in _checks: return _checks[None](kind, match) else: LOG.error(_("No handler for matches of kind %s") % kind) return FalseCheck() def _parse_list_rule(rule): """Translates the old list-of-lists syntax into a tree of Check objects. Provided for backwards compatibility. """ # Empty rule defaults to True if not rule: return TrueCheck() # Outer list is joined by "or"; inner list by "and" or_list = [] for inner_rule in rule: # Elide empty inner lists if not inner_rule: continue # Handle bare strings if isinstance(inner_rule, basestring): inner_rule = [inner_rule] # Parse the inner rules into Check objects and_list = [_parse_check(r) for r in inner_rule] # Append the appropriate check to the or_list if len(and_list) == 1: or_list.append(and_list[0]) else: or_list.append(AndCheck(and_list)) # If we have only one check, omit the "or" if not or_list: return FalseCheck() elif len(or_list) == 1: return or_list[0] return OrCheck(or_list) # Used for tokenizing the policy language _tokenize_re = re.compile(r'\s+') def _parse_tokenize(rule): """Tokenizer for the policy language. Most of the single-character tokens are specified in the _tokenize_re; however, parentheses need to be handled specially, because they can appear inside a check string. Thankfully, those parentheses that appear inside a check string can never occur at the very beginning or end ("%(variable)s" is the correct syntax). """ for tok in _tokenize_re.split(rule): # Skip empty tokens if not tok or tok.isspace(): continue # Handle leading parens on the token clean = tok.lstrip('(') for i in range(len(tok) - len(clean)): yield '(', '(' # If it was only parentheses, continue if not clean: continue else: tok = clean # Handle trailing parens on the token clean = tok.rstrip(')') trail = len(tok) - len(clean) # Yield the cleaned token lowered = clean.lower() if lowered in ('and', 'or', 'not'): # Special tokens yield lowered, clean elif clean: # Not a special token, but not composed solely of ')' if len(tok) >= 2 and ((tok[0], tok[-1]) in [('"', '"'), ("'", "'")]): # It's a quoted string yield 'string', tok[1:-1] else: yield 'check', _parse_check(clean) # Yield the trailing parens for i in range(trail): yield ')', ')' class ParseStateMeta(type): """Metaclass for the ParseState class. Facilitates identifying reduction methods. """ def __new__(mcs, name, bases, cls_dict): """Create the class. Injects the 'reducers' list, a list of tuples matching token sequences to the names of the corresponding reduction methods. """ reducers = [] for key, value in cls_dict.items(): if not hasattr(value, 'reducers'): continue for reduction in value.reducers: reducers.append((reduction, key)) cls_dict['reducers'] = reducers return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) def reducer(*tokens): """Decorator for reduction methods. Arguments are a sequence of tokens, in order, which should trigger running this reduction method. """ def decorator(func): # Make sure we have a list of reducer sequences if not hasattr(func, 'reducers'): func.reducers = [] # Add the tokens to the list of reducer sequences func.reducers.append(list(tokens)) return func return decorator class ParseState(object): """Implement the core of parsing the policy language. Uses a greedy reduction algorithm to reduce a sequence of tokens into a single terminal, the value of which will be the root of the Check tree. Note: error reporting is rather lacking. The best we can get with this parser formulation is an overall "parse failed" error. Fortunately, the policy language is simple enough that this shouldn't be that big a problem. """ __metaclass__ = ParseStateMeta def __init__(self): """Initialize the ParseState.""" self.tokens = [] self.values = [] def reduce(self): """Perform a greedy reduction of the token stream. If a reducer method matches, it will be executed, then the reduce() method will be called recursively to search for any more possible reductions. """ for reduction, methname in self.reducers: if (len(self.tokens) >= len(reduction) and self.tokens[-len(reduction):] == reduction): # Get the reduction method meth = getattr(self, methname) # Reduce the token stream results = meth(*self.values[-len(reduction):]) # Update the tokens and values self.tokens[-len(reduction):] = [r[0] for r in results] self.values[-len(reduction):] = [r[1] for r in results] # Check for any more reductions return self.reduce() def shift(self, tok, value): """Adds one more token to the state. Calls reduce().""" self.tokens.append(tok) self.values.append(value) # Do a greedy reduce... self.reduce() @property def result(self): """Obtain the final result of the parse. Raises ValueError if the parse failed to reduce to a single result. """ if len(self.values) != 1: raise ValueError("Could not parse rule") return self.values[0] @reducer('(', 'check', ')') @reducer('(', 'and_expr', ')') @reducer('(', 'or_expr', ')') def _wrap_check(self, _p1, check, _p2): """Turn parenthesized expressions into a 'check' token.""" return [('check', check)] @reducer('check', 'and', 'check') def _make_and_expr(self, check1, _and, check2): """Create an 'and_expr'. Join two checks by the 'and' operator. """ return [('and_expr', AndCheck([check1, check2]))] @reducer('and_expr', 'and', 'check') def _extend_and_expr(self, and_expr, _and, check): """Extend an 'and_expr' by adding one more check.""" return [('and_expr', and_expr.add_check(check))] @reducer('check', 'or', 'check') def _make_or_expr(self, check1, _or, check2): """Create an 'or_expr'. Join two checks by the 'or' operator. """ return [('or_expr', OrCheck([check1, check2]))] @reducer('or_expr', 'or', 'check') def _extend_or_expr(self, or_expr, _or, check): """Extend an 'or_expr' by adding one more check.""" return [('or_expr', or_expr.add_check(check))] @reducer('not', 'check') def _make_not_expr(self, _not, check): """Invert the result of another check.""" return [('check', NotCheck(check))] def _parse_text_rule(rule): """Parses policy to the tree. Translates a policy written in the policy language into a tree of Check objects. """ # Empty rule means always accept if not rule: return TrueCheck() # Parse the token stream state = ParseState() for tok, value in _parse_tokenize(rule): state.shift(tok, value) try: return state.result except ValueError: # Couldn't parse the rule LOG.exception(_("Failed to understand rule %(rule)r") % locals()) # Fail closed return FalseCheck() def parse_rule(rule): """Parses a policy rule into a tree of Check objects.""" # If the rule is a string, it's in the policy language if isinstance(rule, basestring): return _parse_text_rule(rule) return _parse_list_rule(rule) def register(name, func=None): """Register a function or Check class as a policy check. :param name: Gives the name of the check type, e.g., 'rule', 'role', etc. If name is None, a default check type will be registered. :param func: If given, provides the function or class to register. If not given, returns a function taking one argument to specify the function or class to register, allowing use as a decorator. """ # Perform the actual decoration by registering the function or # class. Returns the function or class for compliance with the # decorator interface. def decorator(func): _checks[name] = func return func # If the function or class is given, do the registration if func: return decorator(func) return decorator @register("rule") class RuleCheck(Check): def __call__(self, target, creds, enforcer): """Recursively checks credentials based on the defined rules.""" try: return enforcer.rules[self.match](target, creds, enforcer) except KeyError: # We don't have any matching rule; fail closed return False @register("role") class RoleCheck(Check): def __call__(self, target, creds, enforcer): """Check that there is a matching role in the cred dict.""" return self.match.lower() in [x.lower() for x in creds['roles']] @register('http') class HttpCheck(Check): def __call__(self, target, creds, enforcer): """Check http: rules by calling to a remote server. This example implementation simply verifies that the response is exactly 'True'. """ url = ('http:' + self.match) % target data = {'target': jsonutils.dumps(target), 'credentials': jsonutils.dumps(creds)} post_data = urllib.urlencode(data) f = urllib2.urlopen(url, post_data) return f.read() == "True" @register(None) class GenericCheck(Check): def __call__(self, target, creds, enforcer): """Check an individual match. Matches look like: tenant:%(tenant_id)s role:compute:admin """ # TODO(termie): do dict inspection via dot syntax match = self.match % target if self.kind in creds: return match == six.text_type(creds[self.kind]) return False
Java
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2022 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.app.standalone.about; import org.eclipse.jface.action.IAction; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.actions.ActionDelegate; public class AboutBoxAction extends ActionDelegate { private IWorkbenchWindow window; public AboutBoxAction(IWorkbenchWindow window) { this.window = window; } @Override public void run(IAction action) { // new AboutDialog(window.getShell()).open(); AboutBoxDialog dialog = new AboutBoxDialog(window.getShell()); dialog.open(); } }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="es"> <head> <!-- Generated by javadoc (version 1.7.0_51) on Thu Apr 10 11:52:58 CEST 2014 --> <title>State</title> <meta name="date" content="2014-04-10"> <link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="State"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../JSHOP2/package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../deprecated-list.html">Deprecated</a></li> <li><a href="../index-all.html">Index</a></li> <li><a href="../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../JSHOP2/SolverThread.html" title="class in JSHOP2"><span class="strong">Prev Class</span></a></li> <li><a href="../JSHOP2/StdLib.html" title="class in JSHOP2"><span class="strong">Next Class</span></a></li> </ul> <ul class="navList"> <li><a href="../index.html?JSHOP2/State.html" target="_top">Frames</a></li> <li><a href="State.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#field_summary">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor_summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method_summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li><a href="#field_detail">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor_detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method_detail">Method</a></li> </ul> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">JSHOP2</div> <h2 title="Class State" class="title">Class State</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li>java.lang.Object</li> <li> <ul class="inheritance"> <li>JSHOP2.State</li> </ul> </li> </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <hr> <br> <pre>public class <span class="strong">State</span> extends java.lang.Object</pre> <div class="block">This class is used to represent the current state of the world.</div> <dl><dt><span class="strong">Version:</span></dt> <dd>1.0.3</dd> <dt><span class="strong">Author:</span></dt> <dd>Okhtay Ilghami, <a href="http://www.cs.umd.edu/~okhtay">http://www.cs.umd.edu/~okhtay</a></dd></dl> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- =========== FIELD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="field_summary"> <!-- --> </a> <h3>Field Summary</h3> <table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation"> <caption><span>Fields</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Field and Description</th> </tr> <tr class="altColor"> <td class="colFirst"><code>private java.util.Vector&lt;<a href="../JSHOP2/Term.html" title="class in JSHOP2">Term</a>&gt;[]</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#atoms">atoms</a></strong></code> <div class="block">The atoms in the current state of the world as an array of <code>Vector</code>s.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>private <a href="../JSHOP2/Axiom.html" title="class in JSHOP2">Axiom</a>[][]</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#axioms">axioms</a></strong></code> <div class="block">The axioms in the domain description as a two-dimensional array.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code>private java.util.Vector&lt;<a href="../JSHOP2/NumberedPredicate.html" title="class in JSHOP2">NumberedPredicate</a>&gt;[]</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#protections">protections</a></strong></code> <div class="block">The protections in the current state of the world as an array of <code>Vector</code>s.</div> </td> </tr> </table> </li> </ul> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor_summary"> <!-- --> </a> <h3>Constructor Summary</h3> <table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tr class="altColor"> <td class="colOne"><code><strong><a href="../JSHOP2/State.html#State(int, JSHOP2.Axiom[][])">State</a></strong>(int&nbsp;size, <a href="../JSHOP2/Axiom.html" title="class in JSHOP2">Axiom</a>[][]&nbsp;axiomsIn)</code> <div class="block">To initialize the state of the world.</div> </td> </tr> </table> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method_summary"> <!-- --> </a> <h3>Method Summary</h3> <table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> <caption><span>Methods</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tr class="altColor"> <td class="colFirst"><code>boolean</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#add(JSHOP2.Predicate)">add</a></strong>(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</code> <div class="block">To add a predicate to the current state of the world.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>boolean</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#addProtection(JSHOP2.Predicate)">addProtection</a></strong>(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</code> <div class="block">To protect a given predicate in the current state of the world.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#clear()">clear</a></strong>()</code> <div class="block">To empty the world state.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>int</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#del(JSHOP2.Predicate)">del</a></strong>(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</code> <div class="block">To delete a predicate from the current state of the world.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code>boolean</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#delProtection(JSHOP2.Predicate)">delProtection</a></strong>(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</code> <div class="block">To unprotect a given predicate.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>java.util.ArrayList&lt;java.lang.String&gt;</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#getState()">getState</a></strong>()</code> <div class="block">Returns an ArrayList of strings that represents the state.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code>boolean</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#isProtected(JSHOP2.Predicate)">isProtected</a></strong>(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</code> <div class="block">To check if a predicate is protected.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code><a href="../JSHOP2/MyIterator.html" title="class in JSHOP2">MyIterator</a></code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#iterator(int)">iterator</a></strong>(int&nbsp;head)</code> <div class="block">To initialize and return the appropriate iterator when looking for ways to satisfy a given predicate.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code><a href="../JSHOP2/Term.html" title="class in JSHOP2">Term</a>[]</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#nextBinding(JSHOP2.Predicate, JSHOP2.MyIterator)">nextBinding</a></strong>(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p, <a href="../JSHOP2/MyIterator.html" title="class in JSHOP2">MyIterator</a>&nbsp;me)</code> <div class="block">This function returns the bindings that can satisfy a given precondition one-by-one.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#print()">print</a></strong>()</code> <div class="block">This function is used to print the current state of the world.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><strong><a href="../JSHOP2/State.html#undo(java.util.Vector[])">undo</a></strong>(java.util.Vector[]&nbsp;delAdd)</code> <div class="block">This function is used, in case of a backtrack, to undo the changes that were made to the current state of the world because of the backtracked decision.</div> </td> </tr> </table> <ul class="blockList"> <li class="blockList"><a name="methods_inherited_from_class_java.lang.Object"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.Object</h3> <code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ============ FIELD DETAIL =========== --> <ul class="blockList"> <li class="blockList"><a name="field_detail"> <!-- --> </a> <h3>Field Detail</h3> <a name="atoms"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>atoms</h4> <pre>private&nbsp;java.util.Vector&lt;<a href="../JSHOP2/Term.html" title="class in JSHOP2">Term</a>&gt;[] atoms</pre> <div class="block">The atoms in the current state of the world as an array of <code>Vector</code>s. The array is indexed by the possible heads (i.e., the constant symbol that comes first) of the possible predicates.</div> </li> </ul> <a name="axioms"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>axioms</h4> <pre>private&nbsp;<a href="../JSHOP2/Axiom.html" title="class in JSHOP2">Axiom</a>[][] axioms</pre> <div class="block">The axioms in the domain description as a two-dimensional array. The array is indexed first by the head of the predicates each axiom can prove and second by the axioms themselves.</div> </li> </ul> <a name="protections"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>protections</h4> <pre>private&nbsp;java.util.Vector&lt;<a href="../JSHOP2/NumberedPredicate.html" title="class in JSHOP2">NumberedPredicate</a>&gt;[] protections</pre> <div class="block">The protections in the current state of the world as an array of <code>Vector</code>s. The array is indexed by the heads of protected predicates.</div> </li> </ul> </li> </ul> <!-- ========= CONSTRUCTOR DETAIL ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor_detail"> <!-- --> </a> <h3>Constructor Detail</h3> <a name="State(int, JSHOP2.Axiom[][])"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>State</h4> <pre>public&nbsp;State(int&nbsp;size, <a href="../JSHOP2/Axiom.html" title="class in JSHOP2">Axiom</a>[][]&nbsp;axiomsIn)</pre> <div class="block">To initialize the state of the world.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>size</code> - the number of possible heads of predicates (i.e., the number of constant symbols that can come first in a predicate).</dd><dd><code>axiomsIn</code> - the axioms in the domain description as a two-dimensional array. The array is indexed first by the head of the predicates each axiom can prove and second by the axioms themselves.</dd></dl> </li> </ul> </li> </ul> <!-- ============ METHOD DETAIL ========== --> <ul class="blockList"> <li class="blockList"><a name="method_detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="add(JSHOP2.Predicate)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>add</h4> <pre>public&nbsp;boolean&nbsp;add(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</pre> <div class="block">To add a predicate to the current state of the world.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>p</code> - the predicate to be added.</dd> <dt><span class="strong">Returns:</span></dt><dd><code>true</code> if the predicate was added (i.e., it was not already in the current state of the world), <code>false</code> otherwise.</dd></dl> </li> </ul> <a name="addProtection(JSHOP2.Predicate)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>addProtection</h4> <pre>public&nbsp;boolean&nbsp;addProtection(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</pre> <div class="block">To protect a given predicate in the current state of the world.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>p</code> - the predicate to be protected.</dd> <dt><span class="strong">Returns:</span></dt><dd>this function always returns <code>true</code>.</dd></dl> </li> </ul> <a name="clear()"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>clear</h4> <pre>public&nbsp;void&nbsp;clear()</pre> <div class="block">To empty the world state.</div> </li> </ul> <a name="del(JSHOP2.Predicate)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>del</h4> <pre>public&nbsp;int&nbsp;del(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</pre> <div class="block">To delete a predicate from the current state of the world.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>p</code> - the predicate to be deleted.</dd> <dt><span class="strong">Returns:</span></dt><dd>the index of the predicate that was deleted in the <code>Vector</code> if the predicate was deleted (i.e., it existed in the current state of the world), -1 otherwise. This index is used in case of a backtrack to undo this deletion by inserting the deleted predicate right back where it used to be.</dd></dl> </li> </ul> <a name="delProtection(JSHOP2.Predicate)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>delProtection</h4> <pre>public&nbsp;boolean&nbsp;delProtection(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</pre> <div class="block">To unprotect a given predicate.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>p</code> - the predicate to be unprotected.</dd> <dt><span class="strong">Returns:</span></dt><dd><code>true</code> if the protected is unprotected successfully, <code>false</code> otherwise (i.e., when the predicate was not protected before).</dd></dl> </li> </ul> <a name="isProtected(JSHOP2.Predicate)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>isProtected</h4> <pre>public&nbsp;boolean&nbsp;isProtected(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p)</pre> <div class="block">To check if a predicate is protected.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>p</code> - the predicate to be checked.</dd> <dt><span class="strong">Returns:</span></dt><dd><code>true</code> if the predicate is protected, <code>false</code> otherwise.</dd></dl> </li> </ul> <a name="iterator(int)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>iterator</h4> <pre>public&nbsp;<a href="../JSHOP2/MyIterator.html" title="class in JSHOP2">MyIterator</a>&nbsp;iterator(int&nbsp;head)</pre> <div class="block">To initialize and return the appropriate iterator when looking for ways to satisfy a given predicate.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>head</code> - the index of the constant symbol that is the head of the predicate (i.e., that comes first in the predicate).</dd> <dt><span class="strong">Returns:</span></dt><dd>the iterator to be used to find the satisfiers for this predicate.</dd></dl> </li> </ul> <a name="nextBinding(JSHOP2.Predicate, JSHOP2.MyIterator)"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>nextBinding</h4> <pre>public&nbsp;<a href="../JSHOP2/Term.html" title="class in JSHOP2">Term</a>[]&nbsp;nextBinding(<a href="../JSHOP2/Predicate.html" title="class in JSHOP2">Predicate</a>&nbsp;p, <a href="../JSHOP2/MyIterator.html" title="class in JSHOP2">MyIterator</a>&nbsp;me)</pre> <div class="block">This function returns the bindings that can satisfy a given precondition one-by-one.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>p</code> - the predicate to be satisfied.</dd><dd><code>me</code> - the iterator that keeps track of where we are with the satisfiers so that the next time this function is called, we can take off where we stopped last time.</dd> <dt><span class="strong">Returns:</span></dt><dd>the next binding as an array of terms indexed by the indeices of the variable symbols in the given predicate.</dd></dl> </li> </ul> <a name="print()"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>print</h4> <pre>public&nbsp;void&nbsp;print()</pre> <div class="block">This function is used to print the current state of the world.</div> </li> </ul> <a name="getState()"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>getState</h4> <pre>public&nbsp;java.util.ArrayList&lt;java.lang.String&gt;&nbsp;getState()</pre> <div class="block">Returns an ArrayList of strings that represents the state. Used in conjunction with JSHOP2GUI (Added 5/28/06)</div> <dl><dt><span class="strong">Returns:</span></dt><dd>- An ArrayList<String> representing the state</dd></dl> </li> </ul> <a name="undo(java.util.Vector[])"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>undo</h4> <pre>public&nbsp;void&nbsp;undo(java.util.Vector[]&nbsp;delAdd)</pre> <div class="block">This function is used, in case of a backtrack, to undo the changes that were made to the current state of the world because of the backtracked decision.</div> <dl><dt><span class="strong">Parameters:</span></dt><dd><code>delAdd</code> - a 4-member array of type <code>Vector</code>. These four members are the deleted atoms, the added atoms, the deleted protections and the added protections respectively.</dd></dl> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../JSHOP2/package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../deprecated-list.html">Deprecated</a></li> <li><a href="../index-all.html">Index</a></li> <li><a href="../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../JSHOP2/SolverThread.html" title="class in JSHOP2"><span class="strong">Prev Class</span></a></li> <li><a href="../JSHOP2/StdLib.html" title="class in JSHOP2"><span class="strong">Next Class</span></a></li> </ul> <ul class="navList"> <li><a href="../index.html?JSHOP2/State.html" target="_top">Frames</a></li> <li><a href="State.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#field_summary">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor_summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method_summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li><a href="#field_detail">Field</a>&nbsp;|&nbsp;</li> <li><a href="#constructor_detail">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#method_detail">Method</a></li> </ul> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
Java
using SolrNetLight; using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Serialization; using System.Text; using SolrNetLight.Facet; namespace SolrNetLight { [DataContract] public class SolrResponse<T> { [DataMember(Name="response")] public SolrResponseBase<T> Response { get; set; } [DataMember(Name = "facet_counts")] public FacetCounts Facets { get; set; } } }
Java
package com.action.design.pattern.chain; /** * 创建不同类型的记录器。赋予它们不同的错误级别,并在每个记录器中设置下一个记录器。每个记录器中的下一个记录器代表的是链的一部分。 * Created by wuyunfeng on 2017/6/15. */ public class ChainPatternDemo { private static AbstractLogger getChainOfLoggers() { AbstractLogger errorLogger = new ErrorLogger(AbstractLogger.ERROR); AbstractLogger fileLogger = new FileLogger(AbstractLogger.DEBUG); AbstractLogger consoleLogger = new ConsoleLogger(AbstractLogger.INFO); errorLogger.setNextLogger(fileLogger); fileLogger.setNextLogger(consoleLogger); return errorLogger; } public static void main(String[] args) { AbstractLogger loggerChain = getChainOfLoggers(); loggerChain.logMessage(AbstractLogger.INFO, "This is an information."); loggerChain.logMessage(AbstractLogger.DEBUG, "This is an debug level information."); loggerChain.logMessage(AbstractLogger.ERROR, "This is an error information."); } }
Java
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.descriptor; import boofcv.struct.feature.TupleDesc_B; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.TimeValue; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; @SuppressWarnings("ResultOfMethodCallIgnored") @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) @Warmup(iterations = 2) @Measurement(iterations = 5) @State(Scope.Benchmark) @Fork(value = 1) public class BenchmarkDescriptorDistance { static int NUM_FEATURES = 10000; List<TupleDesc_B> binaryA = new ArrayList<>(); List<TupleDesc_B> binaryB = new ArrayList<>(); HammingTable16 table = new HammingTable16(); @Setup public void setup() { Random rand = new Random(234234); binaryA = new ArrayList<>(); binaryB = new ArrayList<>(); for (int i = 0; i < NUM_FEATURES; i++) { binaryA.add(randomFeature(rand)); binaryB.add(randomFeature(rand)); } } @Benchmark public void hammingTable() { for (int i = 0; i < binaryA.size(); i++) { tableScore(binaryA.get(i), binaryB.get(i)); } } private int tableScore( TupleDesc_B a, TupleDesc_B b ) { int score = 0; for (int i = 0; i < a.data.length; i++) { int dataA = a.data[i]; int dataB = b.data[i]; score += table.lookup((short)dataA, (short)dataB); score += table.lookup((short)(dataA >> 16), (short)(dataB >> 16)); } return score; } @Benchmark public void equationOld() { for (int i = 0; i < binaryA.size(); i++) { ExperimentalDescriptorDistance.hamming(binaryA.get(i), binaryB.get(i)); } } @Benchmark public void equation() { for (int i = 0; i < binaryA.size(); i++) { DescriptorDistance.hamming(binaryA.get(i), binaryB.get(i)); } } private TupleDesc_B randomFeature( Random rand ) { TupleDesc_B feat = new TupleDesc_B(512); for (int j = 0; j < feat.data.length; j++) { feat.data[j] = rand.nextInt(); } return feat; } public static void main( String[] args ) throws RunnerException { Options opt = new OptionsBuilder() .include(BenchmarkDescriptorDistance.class.getSimpleName()) .warmupTime(TimeValue.seconds(1)) .measurementTime(TimeValue.seconds(1)) .build(); new Runner(opt).run(); } }
Java
package adamin90.com.wpp.model.mostsearch; import java.util.ArrayList; import java.util.List; import javax.annotation.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @Generated("org.jsonschema2pojo") public class MostSearchData { @SerializedName("data") @Expose private List<Datum> data = new ArrayList<Datum>(); @SerializedName("code") @Expose private Integer code; /** * * @return * The data */ public List<Datum> getData() { return data; } /** * * @param data * The data */ public void setData(List<Datum> data) { this.data = data; } /** * * @return * The code */ public Integer getCode() { return code; } /** * * @param code * The code */ public void setCode(Integer code) { this.code = code; } }
Java
<html dir="LTR"> <head> <meta http-equiv="Content-Type" content="text/html; charset=Windows-1252" /> <meta name="vs_targetSchema" content="http://schemas.microsoft.com/intellisense/ie5" /> <title>ConfigFileExtension Property</title> <xml> </xml> <link rel="stylesheet" type="text/css" href="MSDN.css" /> </head> <body id="bodyID" class="dtBODY"> <div id="nsbanner"> <div id="bannerrow1"> <table class="bannerparthead" cellspacing="0"> <tr id="hdr"> <td class="runninghead">Apache log4net™ SDK Documentation - Microsoft .NET Framework 4.0</td> <td class="product"> </td> </tr> </table> </div> <div id="TitleRow"> <h1 class="dtH1">XmlConfiguratorAttribute.ConfigFileExtension Property</h1> </div> </div> <div id="nstext"> <p> Gets or sets the extension of the configuration file. </p> <div class="syntax"> <span class="lang">[Visual Basic]</span> <br />Public Property ConfigFileExtension As <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemStringClassTopic.htm">String</a></div> <div class="syntax"> <span class="lang">[C#]</span> <br />public <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemStringClassTopic.htm">string</a> ConfigFileExtension {get; set;}</div> <p> </p> <h4 class="dtH4">Property Value</h4> <p> The extension of the configuration file. </p> <h4 class="dtH4">Remarks</h4> <p> If specified this is the extension for the configuration file. The path to the config file is built by using the <b>application base</b> directory (<a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemAppDomainClassBaseDirectoryTopic.htm">BaseDirectory</a>), the <b>assembly file name</b> and the config file extension. </p> <p> If the <b>ConfigFileExtension</b> is set to <code>MyExt</code> then possible config file names would be: <code>MyConsoleApp.exe.MyExt</code> or <code>MyClassLibrary.dll.MyExt</code>. </p> <p> The <a href="log4net.Config.XmlConfiguratorAttribute.ConfigFile.html">ConfigFile</a> takes priority over the <b>ConfigFileExtension</b>. </p> <h4 class="dtH4">See Also</h4><p><a href="log4net.Config.XmlConfiguratorAttribute.html">XmlConfiguratorAttribute Class</a> | <a href="log4net.Config.html">log4net.Config Namespace</a></p><object type="application/x-oleobject" classid="clsid:1e2a7bd0-dab9-11d0-b93a-00c04fc99f9e" viewastext="true" style="display: none;"><param name="Keyword" value="ConfigFileExtension property"></param><param name="Keyword" value="ConfigFileExtension property, XmlConfiguratorAttribute class"></param><param name="Keyword" value="XmlConfiguratorAttribute.ConfigFileExtension property"></param></object><hr /><div id="footer"><a href='http://logging.apache.org/log4net/'>Copyright 2004-2013 The Apache Software Foundation.</a><br></br>Apache log4net, Apache and log4net are trademarks of The Apache Software Foundation.</div></div> </body> </html>
Java
var activeElements = []; var sleepElements = []; var promises = []; $.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/assetdatabases/D0EgxEhIf8KUieOFdFcX1IWQZ8qIGYDdE0m5aJCwNb4x_gSlVQSVRFUjAwMVxQSUZJVE5FU1M/elements", { type : 'GET', headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")}, success: function(results){ for (var i = 0; i < results.Items.length; i++) { var item = results.Items[i]; getSubElements(item); } } }).done(function(){ $.when.apply($,promises).done(function(){ spinner.stop(target); var blackout = document.getElementById('blackout'); $('#blackout').css('opacity', '0'); $('#blackout').css('width', '0%'); $('#blackout').css('height', '0%'); }); }); var getSubElements = function(personElement){ promises.push($.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/elements/" + personElement.WebId + "/elements", { type : 'GET', headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")}, success: function(results){ for (var i = 0; i < results.Items.length; i++) { var innerItem = results.Items[i]; if (innerItem.TemplateName == "Fitbit Activity Template") { getFitbitActivityAttributes({ Person : personElement.Name, Child : "Fitbit Activity", ChildWebId : innerItem.WebId }); } else if (innerItem.TemplateName == "Fitbit Sleep Template") { getFitbitSleepAttributes({ Person : personElement.Name, Child : "Fitbit Sleep", ChildWebId : innerItem.WebId }); } }}})); } var getFitbitActivityAttributes = function(object) { promises.push($.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/elements/" + object.ChildWebId + "/attributes",{ type : 'GET', headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")}, success: function(results){ object.Attributes = []; activeElements.push(object); for (var i = 0; i < results.Items.length; i++) { var attribute = results.Items[i]; object.Attributes.push({ Attribute : attribute.Name, AttributeWebId : attribute.WebId }); }; } })); } var getFitbitSleepAttributes = function(object) { promises.push($.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/elements/" + object.ChildWebId + "/attributes",{ type : 'GET', headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")}, success: function(results){ object.Attributes = []; sleepElements.push(object); for (var i = 0; i < results.Items.length; i++) { var attribute = results.Items[i]; object.Attributes.push({ Attribute : attribute.Name, AttributeWebId : attribute.WebId }); }; } })); }
Java
import { Seq, Set as ISet } from 'immutable'; import { atom, unwrap } from '../derivable'; import { equals } from './equals'; describe('util/equals', () => { it('should check equality of primitives', () => { expect(equals(NaN, NaN)).toBe(true); expect(equals(4, 2 + 2)).toBe(true); expect(equals(0, 0)).toBe(true); expect(equals('abcd', 'ab' + 'cd')).toBe(true); }); it('should check identity on ordinary object', () => { expect(equals({}, {})).toBe(false); expect(equals([], [])).toBe(false); const arr: never[] = []; const obj = {}; expect(equals(arr, arr)).toBe(true); expect(equals(obj, obj)).toBe(true); }); it('should check equality on immutable objects', () => { const seq = Seq.Indexed.of(1, 2, 3); const set = ISet.of(1, 2, 3); expect(equals(seq, set)).toBe(false); expect(equals(seq.toSetSeq(), set)).toBe(true); expect(equals(seq, [1, 2, 3])).toBe(false); }); it('should check the equality of derivables', () => { const a = atom('foo'); const b = atom('foo'); const notA = atom('bar'); const aDerivable = a.derive(v => v.toUpperCase()); const bDerivable = b.derive(v => v.toUpperCase()); const withObj1 = atom({ hello: 'world' }); const withObj2 = atom({ hello: 'world' }); expect(equals(a, a)).toBe(true); expect(equals(b, b)).toBe(true); expect(equals(a, notA)).toBe(false); expect(equals(a, b)).toBe(false); expect(equals(aDerivable, bDerivable)).toBe(false); expect(equals(withObj1, withObj1)).toBe(true); expect(equals(withObj1, withObj2)).toBe(false); }); it('should test for reference equality, not derivable value equality', () => { const personA = { name$: atom('Sherlock') }; const personB = { name$: atom('Sherlock') }; const person$ = atom(personA); const nameOfPerson$ = person$.derive(p => p.name$).derive(unwrap).autoCache(); expect(nameOfPerson$.get()).toBe('Sherlock'); person$.set(personB); expect(nameOfPerson$.get()).toBe('Sherlock'); personB.name$.set('Moriarty'); expect(nameOfPerson$.get()).toBe('Moriarty'); }); });
Java
<!DOCTYPE html> <html> <head> <title id="titre"></title> <meta charset="UTF-8"/> <script src="https://code.jquery.com/jquery-latest.min.js"></script> <script src="../../dist/jsRealB.js"></script> <!-- to ease debugging we load each file separately -/-> <script src="../../data/lexicon-fr.js"></script> <script src="../../data/rule-fr.js"></script> <script src="../../data/lexicon-en.js"></script> <script src="../../data/rule-en.js"></script> <script src="../../build/Utils.js"></script> <script src="../../build/Constituent.js"></script> <script src="../../build/Phrase.js"></script> <script src="../../build/Terminal.js"></script> <script src="../../build/Date.js"></script> <script src="../../build/Number.js"></script> <script src="../../build/Warnings.js"></script> <!-/- end of separate loading --> <script> var max=4; function kmsAPied(n){ return NP(NO(n).dOpt({nat:true}),N('kilomètre'), PP(P("à"),NP(N("pied")))); } function ligne(l){ return $("<div/>").css("text-align","center").text(l.toString()); } function refrain(){ var s1=S(NP(D("le"),N("peinture"), PP(P("à"),D("le"),N("huile"))).a(","), S(Pro("ce"), VP(V("être").t("p"),Adv("bien"),A("difficile")))); var s2=S('mais', Pro("ce"), VP(V("être").t("p"), AP("bien plus",A("beau"), SP(Pro("que"), NP(D("le"),N("peinture"), PP(P("à"),D("le"),N("eau")) ) ) ) ) ); return $("<p/>").append(ligne(s1)).append(ligne(s2)); } function generer() { loadFr(); var $body=$("body"); var m1=S(kmsAPied(1)); $("#titre").text(m1); var h1=$("<h1/>").css("text-align","center").text(m1) $body.append(h1); var use=V("user").t("p"); var s1=S(Pro("ça"),use); var s2=S(s1,NP(D("le"),N("soulier").n("p"))); for(var i=1;i<=max;i++){ var kmap=kmsAPied(i).a(","); var $lignes=$("<b/>").append("<p/>"); $lignes.append(ligne(S(kmap,s1,s1))).append(ligne(S(kmap,s2))); $body.append($lignes); $body.append(refrain()); }; $body.append(ligne("...")); }; $(document).ready(function() { generer(); }); </script> </head> <body> </body> </html>
Java
# Config For each project a list of branches can be configured, to which submits should not be allowed, until a merge commit has been merged.
Java
// Generated from /POI/java/org/apache/poi/hpsf/VariantBool.java #pragma once #include <fwd-POI.hpp> #include <org/apache/poi/hpsf/fwd-POI.hpp> #include <org/apache/poi/util/fwd-POI.hpp> #include <java/lang/Object.hpp> struct default_init_tag; class poi::hpsf::VariantBool : public virtual ::java::lang::Object { public: typedef ::java::lang::Object super; private: static ::poi::util::POILogger* LOG_; public: /* package */ static constexpr int32_t SIZE { int32_t(2) }; private: bool _value { }; protected: void ctor(); public: /* package */ virtual void read(::poi::util::LittleEndianByteArrayInputStream* lei); virtual bool getValue(); virtual void setValue(bool value); // Generated VariantBool(); protected: VariantBool(const ::default_init_tag&); public: static ::java::lang::Class *class_(); static void clinit(); private: static ::poi::util::POILogger*& LOG(); virtual ::java::lang::Class* getClass0(); };
Java
package com.hangon.saying.viewPager; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.ImageLoader; import com.android.volley.toolbox.ImageRequest; import com.android.volley.toolbox.NetworkImageView; import com.example.fd.ourapplication.R; import com.hangon.common.Constants; import com.hangon.common.MyApplication; import com.hangon.common.ViewHolder; import com.hangon.common.VolleyBitmapCache; import java.util.ArrayList; import java.util.List; /** * Created by Administrator on 2016/5/31. */ public class GradAdapter extends BaseAdapter implements AbsListView.OnScrollListener { Context context; List list = new ArrayList(); private static ImageLoader mImageLoader; // imageLoader对象,用来初始化NetworkImageView /** * 记录每个子项的高度。 */ private int mItemHeight = 0; GradAdapter(Context context, List list) { this.context = context; this.list = list; mImageLoader = new ImageLoader(MyApplication.queues, new VolleyBitmapCache()); // 初始化一个loader对象,可以进行自定义配置 } @Override public int getCount() { return list.size(); } @Override public Object getItem(int position) { return list.get(position); } @Override public long getItemId(int position) { return position; } ViewGradHolder gradHolder; public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { gradHolder = new ViewGradHolder(); convertView = LayoutInflater.from(context).inflate(R.layout.carlife_grade_content, null); gradHolder.img = (ImageView) convertView.findViewById(R.id.item_grida_image); convertView.setTag(gradHolder); } else { gradHolder = (ViewGradHolder) convertView.getTag(); } NetworkImageView networkImageView = (NetworkImageView) gradHolder.img; // 设置默认的图片 networkImageView.setDefaultImageResId(R.drawable.default_photo); // 设置图片加载失败后显示的图片 networkImageView.setErrorImageResId(R.drawable.error_photo); if (list.get(position) != null && !list.get(position).equals("")) { //getImag(list.get(position).toString()); // 开始加载网络图片 networkImageView.setImageUrl(Constants.LOAD_SAYING_IMG_URL + list.get(position), mImageLoader); } return convertView; } class ViewGradHolder { ImageView img; } private void getImag(String path) { String url = Constants.LOAD_SAYING_IMG_URL + path; ImageRequest request = new ImageRequest(url, new Response.Listener<Bitmap>() { @Override public void onResponse(Bitmap bitmap) { gradHolder.img.setImageBitmap(bitmap); } }, 0, 0, Bitmap.Config.ARGB_8888, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError volleyError) { Toast.makeText(context, "说说图片加载失败", Toast.LENGTH_SHORT).show(); } }); MyApplication.getHttpQueues().add(request); } /** * 设置item子项的高度。 */ public void setItemHeight(int height) { if (height == mItemHeight) { return; } mItemHeight = height; notifyDataSetChanged(); } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { // 仅当GridView静止时才去下载图片,GridView滑动时取消所有正在下载的任务 if (scrollState == SCROLL_STATE_IDLE) { // loadBitmaps(mFirstVisibleItem, mVisibleItemCount); } else { // cancelAllTasks(); } } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { } }
Java
/* * MaiKe Labs (2016 - 2026) * * Written by Jack Tan <jiankemeng@gmail.com> * * Connect VCC of the SSD1306 OLED to 3.3V * Connect GND to Ground * Connect SCL to i2c clock - GPIO21 * Connect SDA to i2c data - GPIO22 * Connect DC to GND (The scanned i2c address is 0x3C) * */ #include <stdio.h> #include "freertos/FreeRTOS.h" #include "freertos/task.h" #include "esp_system.h" #include "nvs_flash.h" #include "U8glib.h" U8GLIB_SSD1306_128X64 u8g(U8G_I2C_OPT_NONE); // I2C / TWI void draw(void) { u8g.setFont(u8g_font_unifont); u8g.drawStr(0, 22, "Hello World!"); } void ssd1306_task(void *pvParameter) { // assign default color value if (u8g.getMode() == U8G_MODE_R3G3B2) { u8g.setColorIndex(255); // white } else if (u8g.getMode() == U8G_MODE_GRAY2BIT) { u8g.setColorIndex(3); // max intensity } else if (u8g.getMode() == U8G_MODE_BW) { u8g.setColorIndex(1); // pixel on } else if (u8g.getMode() == U8G_MODE_HICOLOR) { u8g.setHiColorByRGB(255, 255, 255); } while(1) { // picture loop u8g.firstPage(); do { draw(); } while (u8g.nextPage()); draw(); vTaskDelay(1000 / portTICK_RATE_MS); } } extern "C" void app_main() { nvs_flash_init(); printf("Welcome to Noduino!\r\n"); printf("Start to test SSD1306 OLED!\r\n"); xTaskCreate(&ssd1306_task, "ssd1306_task", 2048, NULL, 5, NULL); }
Java
<table border="1" id="table1" style="border-collapse: collapse"> <tr> <td height="25" align="center"><span style="font-size: 16px">三国</span></td> <td height="25" align="center"><span style="font-size: 16px">公元245年</span></td> <td height="25" align="center"><span style="font-size: 16px">乙丑</span></td> <td height="25px" align="center"><span style="font-size: 16px">正始六年</span></td> </tr> <tr> <td colspan="4"> <table border="0" width="100%"> <tr> <td valign="top"> <b>历史纪事</b> </td> <td> <div>吴太子和与鲁王霸不睦   初,吴帝孙权使吴太子和与弟鲁王霸同宫居隹,礼秩相同,群臣多以为不妥。权乃令二人分宫,二子因而有隙。鲁王霸曲意结交当时名士,杨竺、全寄、吴安、孙奇等均为其党。于是由二宫僚属、侍御、宾客起,分为两党,延至大臣。权闻之,以需专心精学为由,禁断二子宾客往来。全公主(孙鲁班、全琮妻)与太子和母王夫人有隙,亦数次谮毁太子,太子宠益衰。陆逊上疏,认为太子为正统,鲁王为藩臣,当使有别。权不悦。太常顾谭亦上疏陈嫡庶之别,于是鲁王与谭有隙。芍陂(今安徽泰县南)战后,全琮(全寄父)子端、绪与顾弟承及子休争功,谮毁二人于孙权,权徙谭、承、休于交州,又追赐休死,吴赤乌八年(245)初,太子太傅吾粲请使鲁王出镇夏口(今湖北武汉),令杨竺等不得在京师,并数次与陆逊通消息;鲁王与杨竺共谮之,权怒,收粲下狱,诛。孙权又数次遣中使责问陆逊,吴赤乌八年二月,陆逊愤恨而卒。 马茂谋杀孙权不遂   吴赤乌八年(245)七月,吴将马茂与兼符节令朱贞、无难督虞钦、牙门将朱志等合谋,欲乘孙权与公卿诸将入苑射猎,权在苑中,而众臣在门外未入时,朱贞持节宣诏,尽收众臣,而由马茂入苑击权,分据宫中及石头坞,遣人报魏。事泄,均被族诛。马茂原为魏钟离(今安徽凤阳东北)长,叛降吴,为吴征西将军,领九江太守、外部督,封侯,领兵千人。 吴凿破岗渎   吴赤乌八年(245)八月,遣校尉陈勋率屯田兵及作士三万人,凿破岗渎,开通从句容(今江苏),以南向东至云阳(今江苏丹阳)西城的河道,使航路可从建业直通吴会。并开市以会商旅。 魏诏学者课试王郎《易传》   魏正始六年(245)十二月初五(辛亥),诏以故司徒王郎所作《易传》课试学者。</div></td> </tr> </table> </td> </tr> <tr> <td colspan="4"> <table border="0" width="100%"> <tr> <td valign="top"> <b>文化纪事</b> </td> <td> <div>缪袭卒   缪袭(186——245)字熙伯,东海兰陵(今山东苍山兰陵镇)人。曾任职御史大夫府,官至尚书,光禄勋。历仕魏四世,有才学,多所著述。魏改汉乐府十二曲为魏鼓吹曲,由袭作词,为操、丕、睿颂功德;诗作以《挽歌》较著名,另存《喜霁赋》、《青龙赋》等文数篇。原有集五卷,均佚。</div></td> </tr> </table> </td> </tr> <tr> <td colspan="4"> <table border="0" width="100%"> <tr> <td valign="top"> <b>杂谭逸事</b> </td> <td> <div>陆逊卒   陆逊(183——245),本名议,字伯言,吴郡吴(今江苏苏州)人。世为江东大族,孙策婿。初为孙权幕府,后累迁为右部督,攻丹杨山越,得精兵数万人,汉建安二十四年(219),与吕蒙定袭取荆州之计,擒杀关羽。吴黄武元年(222),陆逊为大都督,率兵五万西拒刘备,坚守七八个月不战,等蜀军疲,乃顺风放火,取得夷陵之战的胜利。领荆州牧,封江陵侯。吴黄武七年,又大破魏大司马曹休于石亭(今安徽怀宁、桐城间)。吴蜀连和,孙权每与蜀书,常先交陆逊,有所不安,便令改定。吴黄龙元年(229)拜上大将军、右都护。同年,孙权迁都建业,使逊留武昌,辅太子登。吴赤乌七年(244)代顾雍为丞相。仍留驻武昌。时吴太子和与鲁王霸争位,逊数上疏陈嫡庶之分,权不听。逊外甥顾谭、顾承、姚信亦以亲附太子遭流放。逊卒后,孙权以杨竺所白陆逊二十事一一问陆逊子抗,抗事事条答,权意乃稍解。 赵俨卒   赵俨(171——245),字伯然,颍川阳翟(今河南禹县)人。东汉末避乱荆州。建安二年(197),投曹操,为司空掾属主簿。从曹操征荆州。建安二十四年,以议郎与徐晃至樊城助曹仁拒关羽。曹丕即位,俨为侍中,领河东(今山西夏县东北)太守。曹休拒孙权,以俨为军师,曹睿即位,进封都乡侯,齐王曹芳即位,以俨都督雍、凉诸军事。魏正始四年(243)老病求还,六年,迁司空。卒谥穆侯。赵俨与同郡辛毗、陈群、杜袭齐名,号为辛、陈、杜、赵。 蒋琬卒   蜀延熙八年(一说为延熙九年,245——246)十一月,大司马蒋碗卒。蜀帝刘禅自摄国事。蒋琬(?——245),字公琰,零陵湘乡人。以荆州书佐随刘备入蜀,除广都(今四川成都东南,一说今四川双流)长。刘备偶至广都,见蒋琬不理公事,时又酒醉,大怒,欲罪之。诸葛亮认为蒋琬为社稷之器、非百里之才,为琬求请。刘备堍亮,但免蒋琬官而已。不久,又除什邡(今四川)令。刘备称汉中王,琬入为尚书郎。蜀建兴元年(223),琬为丞相东曹掾,迁为参军。八年,为长史,加抚军将军。诸葛亮数次北代,琬常足兵足食以相供给。亮卒,以琬为尚书令,加行都护,假节,领益州牧,迁大将军,录尚书事,封字阳亭侯。蜀延熙二年(239),加为大司马。卒谥恭侯。 董允卒   蜀延熙八年(一说为延熙九年,245——246),蜀守尚书令董允卒。董允(?——245),字休昭,南郡枝江(今湖北)人。刘备立太子,允为太子舍人,徙太子洗马。后主刘祥即位,迁黄门侍郎。诸葛亮将北伐,驻汉中,虑后主年轻,是非不别,上疏请以允任宫省事。迁侍中,领虎贲中郎将,统令宿亲兵。董允事事防制,匡正后主。后主常欲采择宫女,允以为古时天子后妃之数不过十二,今后宫嫔、嫱已具,不宜增加,终不听。后主畏怕之。及后主渐长,宠宦人黄皓,允常正色语后主,并多次责问黄皓,皓畏允,不敢为非。终允之世,皓位不过黄门丞。蜀延熙六年(243),加辅国将军,七年,以侍中守尚书令,为大将军费祎副贰。卒后,黄皓渐操弄权柄,终至灭国。蜀人无不追思允。</div></td> </tr> </table> </td> </tr> <tr> <td colspan="4"> <table border="0" width="100%"> <tr> <td valign="top"> <b>注释</b></td> <td> <div>延熙八年 赤乌八年</div></td> </tr> </table> </td> </tr> <tr> </tr></table>
Java
# encoding: UTF-8 # Copyright 2012 Twitter, Inc # http://www.apache.org/licenses/LICENSE-2.0 # Documentation: https://github.com/hunspell/hyphen/blob/21127cc8493a68d4fe9adbb71377b469b4f2b550/doc/tb87nemeth.pdf module TwitterCldr module Shared class Hyphenator class UnsupportedLocaleError < StandardError; end BASE_RESOURCE_PATH = %w(shared hyphenation).freeze DEFAULT_LEFT_HYPHEN_MIN = 2 DEFAULT_RIGHT_HYPHEN_MIN = 2 DEFAULT_NO_HYPHEN = "-'’".freeze class << self def get(locale) locale = find_supported_locale(locale) unless locale raise UnsupportedLocaleError, "'#{locale}' is not a supported hyphenation locale" end cache[locale] ||= begin resource = resource_for(locale) new(resource[:rules], locale, resource[:options]) end end def supported_locale?(locale) !!find_supported_locale(locale) end def supported_locales @supported_locales ||= begin absolute_resource_path = TwitterCldr.absolute_resource_path( File.join(BASE_RESOURCE_PATH) ) files = Dir.glob(File.join(absolute_resource_path, '*.yml')) files.map { |f| File.basename(f).chomp('.yml') } end end private def find_supported_locale(locale) maximized_locale = Locale.parse(locale.to_s).maximize maximized_locale.permutations('-').find do |locale_candidate| TwitterCldr.resource_exists?( *BASE_RESOURCE_PATH, locale_candidate ) end end def cache @cache ||= {} end def resource_for(locale) TwitterCldr.get_resource(*BASE_RESOURCE_PATH, locale) end end attr_reader :rules, :locale, :options, :trie def initialize(rules, locale, options) @rules = rules @locale = locale @options = options @trie = build_trie_from(rules) end # 0x00AD is a soft hyphen def hyphenate(text, hyphen = "\u00AD") each_chunk(text).to_a.join(hyphen) end def each_chunk(text) if block_given? last_pos = 0 each_position(text) do |pos| yield text[last_pos...pos].tap { last_pos = pos } end if last_pos < text.size yield text[last_pos..text.size] end else to_enum(__method__, text) end end def each_position(text) if block_given? text = ".#{text}." break_weights = break_weights_for(text) left = left_hyphen_min right = text.size - right_hyphen_min - 2 (left...right).each do |idx| yield idx if break_weights[idx].odd? end else to_enum(__method__, text) end end private def break_weights_for(text) break_weights = Array.new(text.size - 1, 0) text.each_char.with_index do |char, idx| subtrie = trie.root counter = idx while subtrie subtrie = subtrie.child(text[counter]) counter += 1 if subtrie && subtrie.has_value? update_break_weights(subtrie.value, break_weights, idx) end end end remove_illegal_hyphens(break_weights, text) end def update_break_weights(pattern, break_weights, start_idx) pattern_idx = 0 pattern.each_char do |segment| if segment =~ /\d/ int_seg = segment.to_i idx = (start_idx + pattern_idx) - 1 break if idx >= break_weights.size break_weights[idx] = if break_weights[idx] > int_seg break_weights[idx] else int_seg end else pattern_idx += 1 end end end def remove_illegal_hyphens(break_weights, text) break_weights.map.with_index do |break_weight, idx| next break_weight if idx.zero? next 0 if no_hyphen.include?(text[idx - 1]) break_weight end end def left_hyphen_min @left_hyphen_min ||= options.fetch(:lefthyphenmin, DEFAULT_LEFT_HYPHEN_MIN).to_i end def right_hyphen_min @right_hyphen_min ||= options.fetch(:righthyphenmin, DEFAULT_RIGHT_HYPHEN_MIN).to_i end def no_hyphen @no_hyphen ||= options.fetch(:nohyphen, DEFAULT_NO_HYPHEN) end def build_trie_from(rules) TwitterCldr::Utils::Trie.new.tap do |trie| rules.each do |rule| trie.add(rule.gsub(/\d/, '').each_char, rule) end end end end end end
Java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/mediaconvert/model/DashIsoGroupSettings.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace MediaConvert { namespace Model { DashIsoGroupSettings::DashIsoGroupSettings() : m_additionalManifestsHasBeenSet(false), m_audioChannelConfigSchemeIdUri(DashIsoGroupAudioChannelConfigSchemeIdUri::NOT_SET), m_audioChannelConfigSchemeIdUriHasBeenSet(false), m_baseUrlHasBeenSet(false), m_destinationHasBeenSet(false), m_destinationSettingsHasBeenSet(false), m_encryptionHasBeenSet(false), m_fragmentLength(0), m_fragmentLengthHasBeenSet(false), m_hbbtvCompliance(DashIsoHbbtvCompliance::NOT_SET), m_hbbtvComplianceHasBeenSet(false), m_imageBasedTrickPlay(DashIsoImageBasedTrickPlay::NOT_SET), m_imageBasedTrickPlayHasBeenSet(false), m_imageBasedTrickPlaySettingsHasBeenSet(false), m_minBufferTime(0), m_minBufferTimeHasBeenSet(false), m_minFinalSegmentLength(0.0), m_minFinalSegmentLengthHasBeenSet(false), m_mpdProfile(DashIsoMpdProfile::NOT_SET), m_mpdProfileHasBeenSet(false), m_ptsOffsetHandlingForBFrames(DashIsoPtsOffsetHandlingForBFrames::NOT_SET), m_ptsOffsetHandlingForBFramesHasBeenSet(false), m_segmentControl(DashIsoSegmentControl::NOT_SET), m_segmentControlHasBeenSet(false), m_segmentLength(0), m_segmentLengthHasBeenSet(false), m_segmentLengthControl(DashIsoSegmentLengthControl::NOT_SET), m_segmentLengthControlHasBeenSet(false), m_writeSegmentTimelineInRepresentation(DashIsoWriteSegmentTimelineInRepresentation::NOT_SET), m_writeSegmentTimelineInRepresentationHasBeenSet(false) { } DashIsoGroupSettings::DashIsoGroupSettings(JsonView jsonValue) : m_additionalManifestsHasBeenSet(false), m_audioChannelConfigSchemeIdUri(DashIsoGroupAudioChannelConfigSchemeIdUri::NOT_SET), m_audioChannelConfigSchemeIdUriHasBeenSet(false), m_baseUrlHasBeenSet(false), m_destinationHasBeenSet(false), m_destinationSettingsHasBeenSet(false), m_encryptionHasBeenSet(false), m_fragmentLength(0), m_fragmentLengthHasBeenSet(false), m_hbbtvCompliance(DashIsoHbbtvCompliance::NOT_SET), m_hbbtvComplianceHasBeenSet(false), m_imageBasedTrickPlay(DashIsoImageBasedTrickPlay::NOT_SET), m_imageBasedTrickPlayHasBeenSet(false), m_imageBasedTrickPlaySettingsHasBeenSet(false), m_minBufferTime(0), m_minBufferTimeHasBeenSet(false), m_minFinalSegmentLength(0.0), m_minFinalSegmentLengthHasBeenSet(false), m_mpdProfile(DashIsoMpdProfile::NOT_SET), m_mpdProfileHasBeenSet(false), m_ptsOffsetHandlingForBFrames(DashIsoPtsOffsetHandlingForBFrames::NOT_SET), m_ptsOffsetHandlingForBFramesHasBeenSet(false), m_segmentControl(DashIsoSegmentControl::NOT_SET), m_segmentControlHasBeenSet(false), m_segmentLength(0), m_segmentLengthHasBeenSet(false), m_segmentLengthControl(DashIsoSegmentLengthControl::NOT_SET), m_segmentLengthControlHasBeenSet(false), m_writeSegmentTimelineInRepresentation(DashIsoWriteSegmentTimelineInRepresentation::NOT_SET), m_writeSegmentTimelineInRepresentationHasBeenSet(false) { *this = jsonValue; } DashIsoGroupSettings& DashIsoGroupSettings::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("additionalManifests")) { Array<JsonView> additionalManifestsJsonList = jsonValue.GetArray("additionalManifests"); for(unsigned additionalManifestsIndex = 0; additionalManifestsIndex < additionalManifestsJsonList.GetLength(); ++additionalManifestsIndex) { m_additionalManifests.push_back(additionalManifestsJsonList[additionalManifestsIndex].AsObject()); } m_additionalManifestsHasBeenSet = true; } if(jsonValue.ValueExists("audioChannelConfigSchemeIdUri")) { m_audioChannelConfigSchemeIdUri = DashIsoGroupAudioChannelConfigSchemeIdUriMapper::GetDashIsoGroupAudioChannelConfigSchemeIdUriForName(jsonValue.GetString("audioChannelConfigSchemeIdUri")); m_audioChannelConfigSchemeIdUriHasBeenSet = true; } if(jsonValue.ValueExists("baseUrl")) { m_baseUrl = jsonValue.GetString("baseUrl"); m_baseUrlHasBeenSet = true; } if(jsonValue.ValueExists("destination")) { m_destination = jsonValue.GetString("destination"); m_destinationHasBeenSet = true; } if(jsonValue.ValueExists("destinationSettings")) { m_destinationSettings = jsonValue.GetObject("destinationSettings"); m_destinationSettingsHasBeenSet = true; } if(jsonValue.ValueExists("encryption")) { m_encryption = jsonValue.GetObject("encryption"); m_encryptionHasBeenSet = true; } if(jsonValue.ValueExists("fragmentLength")) { m_fragmentLength = jsonValue.GetInteger("fragmentLength"); m_fragmentLengthHasBeenSet = true; } if(jsonValue.ValueExists("hbbtvCompliance")) { m_hbbtvCompliance = DashIsoHbbtvComplianceMapper::GetDashIsoHbbtvComplianceForName(jsonValue.GetString("hbbtvCompliance")); m_hbbtvComplianceHasBeenSet = true; } if(jsonValue.ValueExists("imageBasedTrickPlay")) { m_imageBasedTrickPlay = DashIsoImageBasedTrickPlayMapper::GetDashIsoImageBasedTrickPlayForName(jsonValue.GetString("imageBasedTrickPlay")); m_imageBasedTrickPlayHasBeenSet = true; } if(jsonValue.ValueExists("imageBasedTrickPlaySettings")) { m_imageBasedTrickPlaySettings = jsonValue.GetObject("imageBasedTrickPlaySettings"); m_imageBasedTrickPlaySettingsHasBeenSet = true; } if(jsonValue.ValueExists("minBufferTime")) { m_minBufferTime = jsonValue.GetInteger("minBufferTime"); m_minBufferTimeHasBeenSet = true; } if(jsonValue.ValueExists("minFinalSegmentLength")) { m_minFinalSegmentLength = jsonValue.GetDouble("minFinalSegmentLength"); m_minFinalSegmentLengthHasBeenSet = true; } if(jsonValue.ValueExists("mpdProfile")) { m_mpdProfile = DashIsoMpdProfileMapper::GetDashIsoMpdProfileForName(jsonValue.GetString("mpdProfile")); m_mpdProfileHasBeenSet = true; } if(jsonValue.ValueExists("ptsOffsetHandlingForBFrames")) { m_ptsOffsetHandlingForBFrames = DashIsoPtsOffsetHandlingForBFramesMapper::GetDashIsoPtsOffsetHandlingForBFramesForName(jsonValue.GetString("ptsOffsetHandlingForBFrames")); m_ptsOffsetHandlingForBFramesHasBeenSet = true; } if(jsonValue.ValueExists("segmentControl")) { m_segmentControl = DashIsoSegmentControlMapper::GetDashIsoSegmentControlForName(jsonValue.GetString("segmentControl")); m_segmentControlHasBeenSet = true; } if(jsonValue.ValueExists("segmentLength")) { m_segmentLength = jsonValue.GetInteger("segmentLength"); m_segmentLengthHasBeenSet = true; } if(jsonValue.ValueExists("segmentLengthControl")) { m_segmentLengthControl = DashIsoSegmentLengthControlMapper::GetDashIsoSegmentLengthControlForName(jsonValue.GetString("segmentLengthControl")); m_segmentLengthControlHasBeenSet = true; } if(jsonValue.ValueExists("writeSegmentTimelineInRepresentation")) { m_writeSegmentTimelineInRepresentation = DashIsoWriteSegmentTimelineInRepresentationMapper::GetDashIsoWriteSegmentTimelineInRepresentationForName(jsonValue.GetString("writeSegmentTimelineInRepresentation")); m_writeSegmentTimelineInRepresentationHasBeenSet = true; } return *this; } JsonValue DashIsoGroupSettings::Jsonize() const { JsonValue payload; if(m_additionalManifestsHasBeenSet) { Array<JsonValue> additionalManifestsJsonList(m_additionalManifests.size()); for(unsigned additionalManifestsIndex = 0; additionalManifestsIndex < additionalManifestsJsonList.GetLength(); ++additionalManifestsIndex) { additionalManifestsJsonList[additionalManifestsIndex].AsObject(m_additionalManifests[additionalManifestsIndex].Jsonize()); } payload.WithArray("additionalManifests", std::move(additionalManifestsJsonList)); } if(m_audioChannelConfigSchemeIdUriHasBeenSet) { payload.WithString("audioChannelConfigSchemeIdUri", DashIsoGroupAudioChannelConfigSchemeIdUriMapper::GetNameForDashIsoGroupAudioChannelConfigSchemeIdUri(m_audioChannelConfigSchemeIdUri)); } if(m_baseUrlHasBeenSet) { payload.WithString("baseUrl", m_baseUrl); } if(m_destinationHasBeenSet) { payload.WithString("destination", m_destination); } if(m_destinationSettingsHasBeenSet) { payload.WithObject("destinationSettings", m_destinationSettings.Jsonize()); } if(m_encryptionHasBeenSet) { payload.WithObject("encryption", m_encryption.Jsonize()); } if(m_fragmentLengthHasBeenSet) { payload.WithInteger("fragmentLength", m_fragmentLength); } if(m_hbbtvComplianceHasBeenSet) { payload.WithString("hbbtvCompliance", DashIsoHbbtvComplianceMapper::GetNameForDashIsoHbbtvCompliance(m_hbbtvCompliance)); } if(m_imageBasedTrickPlayHasBeenSet) { payload.WithString("imageBasedTrickPlay", DashIsoImageBasedTrickPlayMapper::GetNameForDashIsoImageBasedTrickPlay(m_imageBasedTrickPlay)); } if(m_imageBasedTrickPlaySettingsHasBeenSet) { payload.WithObject("imageBasedTrickPlaySettings", m_imageBasedTrickPlaySettings.Jsonize()); } if(m_minBufferTimeHasBeenSet) { payload.WithInteger("minBufferTime", m_minBufferTime); } if(m_minFinalSegmentLengthHasBeenSet) { payload.WithDouble("minFinalSegmentLength", m_minFinalSegmentLength); } if(m_mpdProfileHasBeenSet) { payload.WithString("mpdProfile", DashIsoMpdProfileMapper::GetNameForDashIsoMpdProfile(m_mpdProfile)); } if(m_ptsOffsetHandlingForBFramesHasBeenSet) { payload.WithString("ptsOffsetHandlingForBFrames", DashIsoPtsOffsetHandlingForBFramesMapper::GetNameForDashIsoPtsOffsetHandlingForBFrames(m_ptsOffsetHandlingForBFrames)); } if(m_segmentControlHasBeenSet) { payload.WithString("segmentControl", DashIsoSegmentControlMapper::GetNameForDashIsoSegmentControl(m_segmentControl)); } if(m_segmentLengthHasBeenSet) { payload.WithInteger("segmentLength", m_segmentLength); } if(m_segmentLengthControlHasBeenSet) { payload.WithString("segmentLengthControl", DashIsoSegmentLengthControlMapper::GetNameForDashIsoSegmentLengthControl(m_segmentLengthControl)); } if(m_writeSegmentTimelineInRepresentationHasBeenSet) { payload.WithString("writeSegmentTimelineInRepresentation", DashIsoWriteSegmentTimelineInRepresentationMapper::GetNameForDashIsoWriteSegmentTimelineInRepresentation(m_writeSegmentTimelineInRepresentation)); } return payload; } } // namespace Model } // namespace MediaConvert } // namespace Aws
Java
/** * Copyright 2013 Agustín Miura <"agustin.miura@gmail.com"> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ar.com.imperium.common.security; import org.springframework.stereotype.Component; @Component("dummyHashService") public class DummyHashServiceImpl implements IHashService { @Override public String hashString(String input) throws Exception { return input; } }
Java
(function(jQuery) { "use strict"; var control = Echo.Control.manifest("Echo.Tests.Controls.TestControl"); if (Echo.Control.isDefined(control)) return; control.init = function() { if (!Echo.Variables) { Echo.Variables = {}; } Echo.Variables.TestControl = "production"; this.ready(); }; control.config = {}; control.templates.main = ""; Echo.Control.create(control); })(Echo.jQuery);
Java
<?php namespace DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter; /*! * This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using * class definitions from HL7 FHIR (https://www.hl7.org/fhir/) * * Class creation date: December 26th, 2019 15:44+0000 * * PHPFHIR Copyright: * * Copyright 2016-2019 Daniel Carbone (daniel.p.carbone@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * FHIR Copyright Notice: * * Copyright (c) 2011+, HL7, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of HL7 nor the names of its contributors may be used to * endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * * Generated on Fri, Nov 1, 2019 09:29+1100 for FHIR v4.0.1 * * Note: the schemas & schematrons do not contain all of the rules about what makes resources * valid. Implementers will still need to be familiar with the content of the specification and with * any profiles that apply to the resources in order to make a conformant implementation. * */ use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement; use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept; use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt; use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference; use DCarbone\PHPFHIRGenerated\R4\PHPFHIRConstants; use DCarbone\PHPFHIRGenerated\R4\PHPFHIRTypeInterface; /** * An interaction between a patient and healthcare provider(s) for the purpose of * providing healthcare service(s) or assessing the health status of a patient. * * Class FHIREncounterDiagnosis * @package \DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter */ class FHIREncounterDiagnosis extends FHIRBackboneElement { // name of FHIR type this class describes const FHIR_TYPE_NAME = PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS; const FIELD_CONDITION = 'condition'; const FIELD_RANK = 'rank'; const FIELD_RANK_EXT = '_rank'; const FIELD_USE = 'use'; /** @var string */ private $_xmlns = 'http://hl7.org/fhir'; /** * A reference from one resource to another. * If the element is present, it must have a value for at least one of the defined * elements, an \@id referenced from the Narrative, or extensions * * Reason the encounter takes place, as specified using information from another * resource. For admissions, this is the admission diagnosis. The indication will * typically be a Condition (with other resources referenced in the * evidence.detail), or a Procedure. * * @var null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference */ protected $condition = null; /** * An integer with a value that is positive (e.g. >0) * If the element is present, it must have either a \@value, an \@id referenced from * the Narrative, or extensions * * Ranking of the diagnosis (for each role type). * * @var null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt */ protected $rank = null; /** * A concept that may be defined by a formal reference to a terminology or ontology * or may be provided by text. * If the element is present, it must have a value for at least one of the defined * elements, an \@id referenced from the Narrative, or extensions * * Role that this diagnosis has within the encounter (e.g. admission, billing, * discharge …). * * @var null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept */ protected $use = null; /** * Validation map for fields in type Encounter.Diagnosis * @var array */ private static $_validationRules = [ ]; /** * FHIREncounterDiagnosis Constructor * @param null|array $data */ public function __construct($data = null) { if (null === $data || [] === $data) { return; } if (!is_array($data)) { throw new \InvalidArgumentException(sprintf( 'FHIREncounterDiagnosis::_construct - $data expected to be null or array, %s seen', gettype($data) )); } parent::__construct($data); if (isset($data[self::FIELD_CONDITION])) { if ($data[self::FIELD_CONDITION] instanceof FHIRReference) { $this->setCondition($data[self::FIELD_CONDITION]); } else { $this->setCondition(new FHIRReference($data[self::FIELD_CONDITION])); } } if (isset($data[self::FIELD_RANK]) || isset($data[self::FIELD_RANK_EXT])) { if (isset($data[self::FIELD_RANK])) { $value = $data[self::FIELD_RANK]; } else { $value = null; } if (isset($data[self::FIELD_RANK_EXT]) && is_array($data[self::FIELD_RANK_EXT])) { $ext = $data[self::FIELD_RANK_EXT]; } else { $ext = []; } if (null !== $value) { if ($value instanceof FHIRPositiveInt) { $this->setRank($value); } else if (is_array($value)) { $this->setRank(new FHIRPositiveInt(array_merge($ext, $value))); } else { $this->setRank(new FHIRPositiveInt([FHIRPositiveInt::FIELD_VALUE => $value] + $ext)); } } else if ([] !== $ext) { $this->setRank(new FHIRPositiveInt($ext)); } } if (isset($data[self::FIELD_USE])) { if ($data[self::FIELD_USE] instanceof FHIRCodeableConcept) { $this->setUse($data[self::FIELD_USE]); } else { $this->setUse(new FHIRCodeableConcept($data[self::FIELD_USE])); } } } /** * @return string */ public function _getFHIRTypeName() { return self::FHIR_TYPE_NAME; } /** * @return string */ public function _getFHIRXMLElementDefinition() { $xmlns = $this->_getFHIRXMLNamespace(); if (null !== $xmlns) { $xmlns = " xmlns=\"{$xmlns}\""; } return "<EncounterDiagnosis{$xmlns}></EncounterDiagnosis>"; } /** * A reference from one resource to another. * If the element is present, it must have a value for at least one of the defined * elements, an \@id referenced from the Narrative, or extensions * * Reason the encounter takes place, as specified using information from another * resource. For admissions, this is the admission diagnosis. The indication will * typically be a Condition (with other resources referenced in the * evidence.detail), or a Procedure. * * @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference */ public function getCondition() { return $this->condition; } /** * A reference from one resource to another. * If the element is present, it must have a value for at least one of the defined * elements, an \@id referenced from the Narrative, or extensions * * Reason the encounter takes place, as specified using information from another * resource. For admissions, this is the admission diagnosis. The indication will * typically be a Condition (with other resources referenced in the * evidence.detail), or a Procedure. * * @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference $condition * @return static */ public function setCondition(FHIRReference $condition = null) { $this->condition = $condition; return $this; } /** * An integer with a value that is positive (e.g. >0) * If the element is present, it must have either a \@value, an \@id referenced from * the Narrative, or extensions * * Ranking of the diagnosis (for each role type). * * @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt */ public function getRank() { return $this->rank; } /** * An integer with a value that is positive (e.g. >0) * If the element is present, it must have either a \@value, an \@id referenced from * the Narrative, or extensions * * Ranking of the diagnosis (for each role type). * * @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt $rank * @return static */ public function setRank($rank = null) { if (null === $rank) { $this->rank = null; return $this; } if ($rank instanceof FHIRPositiveInt) { $this->rank = $rank; return $this; } $this->rank = new FHIRPositiveInt($rank); return $this; } /** * A concept that may be defined by a formal reference to a terminology or ontology * or may be provided by text. * If the element is present, it must have a value for at least one of the defined * elements, an \@id referenced from the Narrative, or extensions * * Role that this diagnosis has within the encounter (e.g. admission, billing, * discharge …). * * @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept */ public function getUse() { return $this->use; } /** * A concept that may be defined by a formal reference to a terminology or ontology * or may be provided by text. * If the element is present, it must have a value for at least one of the defined * elements, an \@id referenced from the Narrative, or extensions * * Role that this diagnosis has within the encounter (e.g. admission, billing, * discharge …). * * @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept $use * @return static */ public function setUse(FHIRCodeableConcept $use = null) { $this->use = $use; return $this; } /** * Returns the validation rules that this type's fields must comply with to be considered "valid" * The returned array is in ["fieldname[.offset]" => ["rule" => {constraint}]] * * @return array */ public function _getValidationRules() { return self::$_validationRules; } /** * Validates that this type conforms to the specifications set forth for it by FHIR. An empty array must be seen as * passing. * * @return array */ public function _getValidationErrors() { $errs = parent::_getValidationErrors(); $validationRules = $this->_getValidationRules(); if (null !== ($v = $this->getCondition())) { if ([] !== ($fieldErrs = $v->_getValidationErrors())) { $errs[self::FIELD_CONDITION] = $fieldErrs; } } if (null !== ($v = $this->getRank())) { if ([] !== ($fieldErrs = $v->_getValidationErrors())) { $errs[self::FIELD_RANK] = $fieldErrs; } } if (null !== ($v = $this->getUse())) { if ([] !== ($fieldErrs = $v->_getValidationErrors())) { $errs[self::FIELD_USE] = $fieldErrs; } } if (isset($validationRules[self::FIELD_CONDITION])) { $v = $this->getCondition(); foreach($validationRules[self::FIELD_CONDITION] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS, self::FIELD_CONDITION, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_CONDITION])) { $errs[self::FIELD_CONDITION] = []; } $errs[self::FIELD_CONDITION][$rule] = $err; } } } if (isset($validationRules[self::FIELD_RANK])) { $v = $this->getRank(); foreach($validationRules[self::FIELD_RANK] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS, self::FIELD_RANK, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_RANK])) { $errs[self::FIELD_RANK] = []; } $errs[self::FIELD_RANK][$rule] = $err; } } } if (isset($validationRules[self::FIELD_USE])) { $v = $this->getUse(); foreach($validationRules[self::FIELD_USE] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS, self::FIELD_USE, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_USE])) { $errs[self::FIELD_USE] = []; } $errs[self::FIELD_USE][$rule] = $err; } } } if (isset($validationRules[self::FIELD_MODIFIER_EXTENSION])) { $v = $this->getModifierExtension(); foreach($validationRules[self::FIELD_MODIFIER_EXTENSION] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_BACKBONE_ELEMENT, self::FIELD_MODIFIER_EXTENSION, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_MODIFIER_EXTENSION])) { $errs[self::FIELD_MODIFIER_EXTENSION] = []; } $errs[self::FIELD_MODIFIER_EXTENSION][$rule] = $err; } } } if (isset($validationRules[self::FIELD_EXTENSION])) { $v = $this->getExtension(); foreach($validationRules[self::FIELD_EXTENSION] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ELEMENT, self::FIELD_EXTENSION, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_EXTENSION])) { $errs[self::FIELD_EXTENSION] = []; } $errs[self::FIELD_EXTENSION][$rule] = $err; } } } if (isset($validationRules[self::FIELD_ID])) { $v = $this->getId(); foreach($validationRules[self::FIELD_ID] as $rule => $constraint) { $err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ELEMENT, self::FIELD_ID, $rule, $constraint, $v); if (null !== $err) { if (!isset($errs[self::FIELD_ID])) { $errs[self::FIELD_ID] = []; } $errs[self::FIELD_ID][$rule] = $err; } } } return $errs; } /** * @param \SimpleXMLElement|string|null $sxe * @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter\FHIREncounterDiagnosis $type * @param null|int $libxmlOpts * @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter\FHIREncounterDiagnosis */ public static function xmlUnserialize($sxe = null, PHPFHIRTypeInterface $type = null, $libxmlOpts = 591872) { if (null === $sxe) { return null; } if (is_string($sxe)) { libxml_use_internal_errors(true); $sxe = new \SimpleXMLElement($sxe, $libxmlOpts, false); if ($sxe === false) { throw new \DomainException(sprintf('FHIREncounterDiagnosis::xmlUnserialize - String provided is not parseable as XML: %s', implode(', ', array_map(function(\libXMLError $err) { return $err->message; }, libxml_get_errors())))); } libxml_use_internal_errors(false); } if (!($sxe instanceof \SimpleXMLElement)) { throw new \InvalidArgumentException(sprintf('FHIREncounterDiagnosis::xmlUnserialize - $sxe value must be null, \\SimpleXMLElement, or valid XML string, %s seen', gettype($sxe))); } if (null === $type) { $type = new FHIREncounterDiagnosis; } elseif (!is_object($type) || !($type instanceof FHIREncounterDiagnosis)) { throw new \RuntimeException(sprintf( 'FHIREncounterDiagnosis::xmlUnserialize - $type must be instance of \DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter\FHIREncounterDiagnosis or null, %s seen.', is_object($type) ? get_class($type) : gettype($type) )); } FHIRBackboneElement::xmlUnserialize($sxe, $type); $xmlNamespaces = $sxe->getDocNamespaces(false, false); if ([] !== $xmlNamespaces) { $ns = reset($xmlNamespaces); if (false !== $ns && '' !== $ns) { $type->_xmlns = $ns; } } $attributes = $sxe->attributes(); $children = $sxe->children(); if (isset($children->condition)) { $type->setCondition(FHIRReference::xmlUnserialize($children->condition)); } if (isset($children->rank)) { $type->setRank(FHIRPositiveInt::xmlUnserialize($children->rank)); } if (isset($attributes->rank)) { $pt = $type->getRank(); if (null !== $pt) { $pt->setValue((string)$attributes->rank); } else { $type->setRank((string)$attributes->rank); } } if (isset($children->use)) { $type->setUse(FHIRCodeableConcept::xmlUnserialize($children->use)); } return $type; } /** * @param null|\SimpleXMLElement $sxe * @param null|int $libxmlOpts * @return \SimpleXMLElement */ public function xmlSerialize(\SimpleXMLElement $sxe = null, $libxmlOpts = 591872) { if (null === $sxe) { $sxe = new \SimpleXMLElement($this->_getFHIRXMLElementDefinition(), $libxmlOpts, false); } parent::xmlSerialize($sxe); if (null !== ($v = $this->getCondition())) { $v->xmlSerialize($sxe->addChild(self::FIELD_CONDITION, null, $v->_getFHIRXMLNamespace())); } if (null !== ($v = $this->getRank())) { $v->xmlSerialize($sxe->addChild(self::FIELD_RANK, null, $v->_getFHIRXMLNamespace())); } if (null !== ($v = $this->getUse())) { $v->xmlSerialize($sxe->addChild(self::FIELD_USE, null, $v->_getFHIRXMLNamespace())); } return $sxe; } /** * @return array */ public function jsonSerialize() { $a = parent::jsonSerialize(); if (null !== ($v = $this->getCondition())) { $a[self::FIELD_CONDITION] = $v; } if (null !== ($v = $this->getRank())) { $a[self::FIELD_RANK] = $v->getValue(); $enc = $v->jsonSerialize(); $cnt = count($enc); if (0 < $cnt && (1 !== $cnt || (1 === $cnt && !array_key_exists(FHIRPositiveInt::FIELD_VALUE, $enc)))) { unset($enc[FHIRPositiveInt::FIELD_VALUE]); $a[self::FIELD_RANK_EXT] = $enc; } } if (null !== ($v = $this->getUse())) { $a[self::FIELD_USE] = $v; } if ([] !== ($vs = $this->_getFHIRComments())) { $a[PHPFHIRConstants::JSON_FIELD_FHIR_COMMENTS] = $vs; } return $a; } /** * @return string */ public function __toString() { return self::FHIR_TYPE_NAME; } }
Java
/* Copyright 2015 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // If you make changes to this file, you should also make the corresponding change in ReplicaSet. package replication import ( "github.com/golang/glog" "k8s.io/kubernetes/pkg/api" client "k8s.io/kubernetes/pkg/client/unversioned" ) // updateReplicaCount attempts to update the Status.Replicas of the given controller, with a single GET/PUT retry. func updateReplicaCount(rcClient client.ReplicationControllerInterface, controller api.ReplicationController, numReplicas int) (updateErr error) { // This is the steady state. It happens when the rc doesn't have any expectations, since // we do a periodic relist every 30s. If the generations differ but the replicas are // the same, a caller might've resized to the same replica count. if controller.Status.Replicas == numReplicas && controller.Generation == controller.Status.ObservedGeneration { return nil } // Save the generation number we acted on, otherwise we might wrongfully indicate // that we've seen a spec update when we retry. // TODO: This can clobber an update if we allow multiple agents to write to the // same status. generation := controller.Generation var getErr error for i, rc := 0, &controller; ; i++ { glog.V(4).Infof("Updating replica count for rc: %v, %d->%d (need %d), sequence No: %v->%v", controller.Name, controller.Status.Replicas, numReplicas, controller.Spec.Replicas, controller.Status.ObservedGeneration, generation) rc.Status = api.ReplicationControllerStatus{Replicas: numReplicas, ObservedGeneration: generation} _, updateErr = rcClient.UpdateStatus(rc) if updateErr == nil || i >= statusUpdateRetries { return updateErr } // Update the controller with the latest resource version for the next poll if rc, getErr = rcClient.Get(controller.Name); getErr != nil { // If the GET fails we can't trust status.Replicas anymore. This error // is bound to be more interesting than the update failure. return getErr } } } // OverlappingControllers sorts a list of controllers by creation timestamp, using their names as a tie breaker. type OverlappingControllers []api.ReplicationController func (o OverlappingControllers) Len() int { return len(o) } func (o OverlappingControllers) Swap(i, j int) { o[i], o[j] = o[j], o[i] } func (o OverlappingControllers) Less(i, j int) bool { if o[i].CreationTimestamp.Equal(o[j].CreationTimestamp) { return o[i].Name < o[j].Name } return o[i].CreationTimestamp.Before(o[j].CreationTimestamp) }
Java
package com.sadc.game.gameobject.trackobject; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.sadc.game.GameConstants; import com.sadc.game.gameobject.GameUtils; import com.sadc.game.gameobject.Player; /** * @author f536985 (Tom Farello) */ public class Wall extends TrackObject { public Wall(float distance, float angle) { setActive(true); setDistance(distance); setAngle(angle); setWidth(22); setTexture(new Texture("brickWall.png")); } @Override public void update(float delta, Player player) { if (collide(player)) { player.crash(); setActive(false); } } @Override public void draw(float delta, float playerDistance, SpriteBatch spriteBatch) { float drawDistance = (float)Math.pow(2 , playerDistance - (getDistance())); GameUtils.setColorByDrawDistance(drawDistance, spriteBatch); spriteBatch.draw(getTexture(), GameConstants.SCREEN_WIDTH / 2 - 50, 15, 50, GameConstants.SCREEN_HEIGHT / 2 - 15, 100, 70, drawDistance, drawDistance, getAngle(), 0, 0, 100, 70, false, false); } }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_111) on Wed Feb 22 09:55:43 CET 2017 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>org.togglz.slack (Togglz 2.4.0.Final API)</title> <meta name="date" content="2017-02-22"> <link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../script.js"></script> </head> <body> <h1 class="bar"><a href="../../../org/togglz/slack/package-summary.html" target="classFrame">org.togglz.slack</a></h1> <div class="indexContainer"> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="SlackStateRepository.html" title="class in org.togglz.slack" target="classFrame">SlackStateRepository</a></li> </ul> </div> </body> </html>
Java
class JobApplicationsController < ApplicationController after_action :verify_authorized after_action :verify_policy_scoped, only: [:index] before_action :require_login before_action :set_job_application, only: [:show, :edit, :update, :destroy, :followup] # GET /posting/1/job_application # GET /posting/1/job_application.json # def index # @posting = Posting.unscoped.find(params[:posting_id]) # authorize @posting, :show? # @job_applications = JobApplication.all # end # GET /posting/1/job_application # GET /posting/1/job_application.json def show end # GET /posting/1/job_application/new def new @job_application = JobApplication.new authorize @job_application end # GET /posting/1/job_application/edit def edit end # POST /posting/1/job_application # POST /posting/1/job_application.json def create @job_application = JobApplication.new(job_application_params) @job_application.posting = Posting.unscoped.find(params[:posting_id]) authorize @job_application authorize @job_application.posting, :update? respond_to do |format| if @job_application.save # TODO: Is this line still needed? @job_application_is_new = true format.html { redirect_to @job_application.posting, notice: 'Job application was successfully created.' } format.json { render action: 'show', status: :created } else format.html { render action: 'new' } format.json { render json: @job_application.errors, status: :unprocessable_entity } end end end # PATCH/PUT /posting/1/job_application/followup.json def followup respond_to do |format| if @job_application.update(followup: Time.now) format.json { render action: 'show' } else format.json { render json: @job_application.errors, status: :unprocessable_entity } end end end # PATCH/PUT /posting/1/job_application # PATCH/PUT /posting/1/job_application.json def update respond_to do |format| if @job_application.update(job_application_params) format.html { redirect_to @job_application.posting, notice: 'Changes saved!' } format.json { render action: 'show', notice: 'Changes saved!' } else format.html { render action: 'edit' } format.json { render json: @job_application.errors, status: :unprocessable_entity } end end end # DELETE /posting/1/job_application # DELETE /posting/1/job_application.json def destroy @job_application.destroy respond_to do |format| format.html { redirect_to @job_application.posting } format.json { head :no_content } end end private # Use callbacks to share common setup or constraints between actions. def set_job_application @job_application = Posting.unscoped.find(params[:posting_id]).job_application authorize @job_application end # Never trust parameters from the scary internet, only allow the white list through. def job_application_params params.require(:job_application).permit(:date_sent, :cover_letter, :posting_id) end end
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0_08) on Wed Jan 10 16:02:59 PST 2007 --> <TITLE> Uses of Interface org.apache.hadoop.mapred.JobSubmissionProtocol (Hadoop 0.10.1 API) </TITLE> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="Uses of Interface org.apache.hadoop.mapred.JobSubmissionProtocol (Hadoop 0.10.1 API)"; } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/mapred/JobSubmissionProtocol.html" title="interface in org.apache.hadoop.mapred"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?org/apache/hadoop/mapred//class-useJobSubmissionProtocol.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="JobSubmissionProtocol.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Interface<br>org.apache.hadoop.mapred.JobSubmissionProtocol</B></H2> </CENTER> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> Packages that use <A HREF="../../../../../org/apache/hadoop/mapred/JobSubmissionProtocol.html" title="interface in org.apache.hadoop.mapred">JobSubmissionProtocol</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><A HREF="#org.apache.hadoop.mapred"><B>org.apache.hadoop.mapred</B></A></TD> <TD>A system for scalable, fault-tolerant, distributed computation over large data collections.&nbsp;</TD> </TR> </TABLE> &nbsp; <P> <A NAME="org.apache.hadoop.mapred"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> Uses of <A HREF="../../../../../org/apache/hadoop/mapred/JobSubmissionProtocol.html" title="interface in org.apache.hadoop.mapred">JobSubmissionProtocol</A> in <A HREF="../../../../../org/apache/hadoop/mapred/package-summary.html">org.apache.hadoop.mapred</A></FONT></TH> </TR> </TABLE> &nbsp; <P> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left" COLSPAN="2">Classes in <A HREF="../../../../../org/apache/hadoop/mapred/package-summary.html">org.apache.hadoop.mapred</A> that implement <A HREF="../../../../../org/apache/hadoop/mapred/JobSubmissionProtocol.html" title="interface in org.apache.hadoop.mapred">JobSubmissionProtocol</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;class</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../org/apache/hadoop/mapred/JobTracker.html" title="class in org.apache.hadoop.mapred">JobTracker</A></B></CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;JobTracker is the central location for submitting and tracking MR jobs in a network environment.</TD> </TR> </TABLE> &nbsp; <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/mapred/JobSubmissionProtocol.html" title="interface in org.apache.hadoop.mapred"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?org/apache/hadoop/mapred//class-useJobSubmissionProtocol.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="JobSubmissionProtocol.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &copy; 2006 The Apache Software Foundation </BODY> </HTML>
Java
package dx.exec import dx.api.{DxApi, DxFile} import dx.core.io.ExecLinkInfo import dx.core.languages.wdl.{TypeSerialization, WdlVarLinksConverter} import spray.json._ import wdlTools.eval.WdlValues import wdlTools.types.WdlTypes case class WfFragInput(blockPath: Vector[Int], env: Map[String, (WdlTypes.T, WdlValues.V)], execLinkInfo: Map[String, ExecLinkInfo]) case class WfFragInputOutput(typeAliases: Map[String, WdlTypes.T], wdlVarLinksConverter: WdlVarLinksConverter, dxApi: DxApi) { private def revTransformVarName(varName: String): String = { varName.replaceAll("___", "\\.") } private def loadWorkflowMetaInfo( metaInfo: Map[String, JsValue] ): (Map[String, ExecLinkInfo], Vector[Int], Map[String, WdlTypes.T]) = { // meta information used for running workflow fragments val execLinkInfo: Map[String, ExecLinkInfo] = metaInfo.get("execLinkInfo") match { case None => Map.empty case Some(JsObject(fields)) => fields.map { case (key, ali) => key -> ExecLinkInfo.readJson(dxApi, ali, typeAliases) } case other => throw new Exception(s"Bad value ${other}") } val blockPath: Vector[Int] = metaInfo.get("blockPath") match { case None => Vector.empty case Some(JsArray(arr)) => arr.map { case JsNumber(n) => n.toInt case _ => throw new Exception("Bad value ${arr}") } case other => throw new Exception(s"Bad value ${other}") } val fqnDictTypes: Map[String, WdlTypes.T] = metaInfo.get("fqnDictTypes") match { case Some(JsObject(fields)) => fields.map { case (key, JsString(value)) => // Transform back to a fully qualified name with dots val orgKeyName = revTransformVarName(key) val wdlType = TypeSerialization(typeAliases).fromString(value) orgKeyName -> wdlType case other => throw new Exception(s"Bad value ${other}") } case other => throw new Exception(s"Bad value ${other}") } (execLinkInfo, blockPath, fqnDictTypes) } // 1. Convert the inputs to WDL values // 2. Setup an environment to evaluate the sub-block. This should // look to the WDL code as if all previous code had been evaluated. def loadInputs(inputs: JsValue, metaInfo: JsValue): WfFragInput = { val regularFields: Map[String, JsValue] = inputs.asJsObject.fields .filter { case (fieldName, _) => !fieldName.endsWith(WdlVarLinksConverter.FLAT_FILES_SUFFIX) } // Extract the meta information needed to setup the closure for the subblock val (execLinkInfo, blockPath, fqnDictTypes) = loadWorkflowMetaInfo(metaInfo.asJsObject.fields) // What remains are inputs from other stages. Convert from JSON to WDL values val env: Map[String, (WdlTypes.T, WdlValues.V)] = regularFields.map { case (name, jsValue) => val fqn = revTransformVarName(name) val wdlType = fqnDictTypes.get(fqn) match { case None => throw new Exception(s"Did not find variable ${fqn} (${name}) in the block environment") case Some(x) => x } val value = wdlVarLinksConverter.unpackJobInput(fqn, wdlType, jsValue) fqn -> (wdlType, value) } WfFragInput(blockPath, env, execLinkInfo) } // find all the dx:files that are referenced from the inputs def findRefDxFiles(inputs: JsValue, metaInfo: JsValue): Vector[DxFile] = { val regularFields: Map[String, JsValue] = inputs.asJsObject.fields .filter { case (fieldName, _) => !fieldName.endsWith(WdlVarLinksConverter.FLAT_FILES_SUFFIX) } val (_, _, fqnDictTypes) = loadWorkflowMetaInfo(metaInfo.asJsObject.fields) // Convert from JSON to WDL values regularFields .map { case (name, jsValue) => val fqn = revTransformVarName(name) if (!fqnDictTypes.contains(fqn)) { throw new Exception( s"Did not find variable ${fqn} (${name}) in the block environment" ) } dxApi.findFiles(jsValue) } .toVector .flatten } }
Java
/* * * * Copyright 2010-2016 OrientDB LTD (http://orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://orientdb.com * */ package com.orientechnologies.orient.core.sql.functions.coll; import com.orientechnologies.orient.core.command.OCommandContext; import com.orientechnologies.orient.core.db.record.OIdentifiable; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * This operator can work as aggregate or inline. If only one argument is passed than aggregates, * otherwise executes, and returns, the SYMMETRIC DIFFERENCE between the collections received as * parameters. Works also with no collection values. * * @author Luca Garulli (l.garulli--(at)--orientdb.com) */ public class OSQLFunctionSymmetricDifference extends OSQLFunctionMultiValueAbstract<Set<Object>> { public static final String NAME = "symmetricDifference"; private Set<Object> rejected; public OSQLFunctionSymmetricDifference() { super(NAME, 1, -1); } private static void addItemToResult(Object o, Set<Object> accepted, Set<Object> rejected) { if (!accepted.contains(o) && !rejected.contains(o)) { accepted.add(o); } else { accepted.remove(o); rejected.add(o); } } private static void addItemsToResult( Collection<Object> co, Set<Object> accepted, Set<Object> rejected) { for (Object o : co) { addItemToResult(o, accepted, rejected); } } @SuppressWarnings("unchecked") public Object execute( Object iThis, OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParams, OCommandContext iContext) { if (iParams[0] == null) return null; Object value = iParams[0]; if (iParams.length == 1) { // AGGREGATION MODE (STATEFUL) if (context == null) { context = new HashSet<Object>(); rejected = new HashSet<Object>(); } if (value instanceof Collection<?>) { addItemsToResult((Collection<Object>) value, context, rejected); } else { addItemToResult(value, context, rejected); } return null; } else { // IN-LINE MODE (STATELESS) final Set<Object> result = new HashSet<Object>(); final Set<Object> rejected = new HashSet<Object>(); for (Object iParameter : iParams) { if (iParameter instanceof Collection<?>) { addItemsToResult((Collection<Object>) iParameter, result, rejected); } else { addItemToResult(iParameter, result, rejected); } } return result; } } @Override public Set<Object> getResult() { if (returnDistributedResult()) { final Map<String, Object> doc = new HashMap<String, Object>(); doc.put("result", context); doc.put("rejected", rejected); return Collections.<Object>singleton(doc); } else { return super.getResult(); } } public String getSyntax() { return "difference(<field>*)"; } @Override public Object mergeDistributedResult(List<Object> resultsToMerge) { if (returnDistributedResult()) { final Set<Object> result = new HashSet<Object>(); final Set<Object> rejected = new HashSet<Object>(); for (Object item : resultsToMerge) { rejected.addAll(unwrap(item, "rejected")); } for (Object item : resultsToMerge) { addItemsToResult(unwrap(item, "result"), result, rejected); } return result; } if (!resultsToMerge.isEmpty()) return resultsToMerge.get(0); return null; } @SuppressWarnings("unchecked") private Set<Object> unwrap(Object obj, String field) { final Set<Object> objAsSet = (Set<Object>) obj; final Map<String, Object> objAsMap = (Map<String, Object>) objAsSet.iterator().next(); final Set<Object> objAsField = (Set<Object>) objAsMap.get(field); return objAsField; } }
Java
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleemail.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * An empty element returned on a successful request. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/email-2010-12-01/SetReceiptRulePosition" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SetReceiptRulePositionResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SetReceiptRulePositionResult == false) return false; SetReceiptRulePositionResult other = (SetReceiptRulePositionResult) obj; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; return hashCode; } @Override public SetReceiptRulePositionResult clone() { try { return (SetReceiptRulePositionResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
Java
import java.io.IOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.stream.Stream; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import com.google.common.base.Optional; import com.google.common.collect.Iterables; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.daisy.pipeline.client.PipelineClient; import org.daisy.pipeline.webservice.jaxb.job.Job; import org.daisy.pipeline.webservice.jaxb.job.JobStatus; import org.daisy.pipeline.webservice.jaxb.job.Messages; import org.daisy.pipeline.webservice.jaxb.request.Callback; import org.daisy.pipeline.webservice.jaxb.request.CallbackType; import org.daisy.pipeline.webservice.jaxb.request.Input; import org.daisy.pipeline.webservice.jaxb.request.Item; import org.daisy.pipeline.webservice.jaxb.request.JobRequest; import org.daisy.pipeline.webservice.jaxb.request.ObjectFactory; import org.daisy.pipeline.webservice.jaxb.request.Script; import org.daisy.pipeline.webservice.jaxb.request.Priority; import org.junit.Assert; import org.junit.Test; public class TestPushNotifications extends Base { private static final PipelineClient client = newClient(TestClientJobs.CREDS_DEF.clientId, TestClientJobs.CREDS_DEF.secret); @Override protected PipelineClient client() { return client; } @Override protected Properties systemProperties() { Properties p = super.systemProperties(); // client authentication is required for push notifications p.setProperty("org.daisy.pipeline.ws.authentication", "true"); p.setProperty("org.daisy.pipeline.ws.authentication.key", TestClientJobs.CREDS_DEF.clientId); p.setProperty("org.daisy.pipeline.ws.authentication.secret", TestClientJobs.CREDS_DEF.secret); return p; } @Test public void testPushNotifications() throws Exception { AbstractCallback testStatusAndMessages = new AbstractCallback() { JobStatus lastStatus = null; BigDecimal lastProgress = BigDecimal.ZERO; Iterator<BigDecimal> mustSee = stream(".25", ".375", ".5", ".55", ".675", ".8", ".9").map(d -> new BigDecimal(d)).iterator(); BigDecimal mustSeeNext = mustSee.next(); List<BigDecimal> seen = new ArrayList<BigDecimal>(); @Override void handleStatus(JobStatus status) { lastStatus = status; } @Override void handleMessages(Messages messages) { BigDecimal progress = messages.getProgress(); if (progress.compareTo(lastProgress) != 0) { Assert.assertTrue("Progress must be monotonic non-decreasing", progress.compareTo(lastProgress) >= 0); if (mustSeeNext != null) { if (progress.compareTo(mustSeeNext) == 0) { seen.clear(); mustSeeNext = mustSee.hasNext() ? mustSee.next() : null; } else { seen.add(progress); Assert.assertTrue("Expected " + mustSeeNext + " but got " + seen, progress.compareTo(mustSeeNext) < 0); } } lastProgress = progress; } } @Override void finalTest() { Assert.assertEquals(JobStatus.SUCCESS, lastStatus); Assert.assertTrue("Expected " + mustSeeNext + " but got " + seen, mustSeeNext == null); } }; HttpServer server; { server = HttpServer.create(new InetSocketAddress(8080), 0); server.createContext("/notify", testStatusAndMessages); server.setExecutor(null); server.start(); } try { JobRequest req; { ObjectFactory oFactory = new ObjectFactory(); req = oFactory.createJobRequest(); Script script = oFactory.createScript(); { Optional<String> href = getScriptHref("mock-messages-script"); Assert.assertTrue(href.isPresent()); script.setHref(href.get()); } req.getScriptOrNicenameOrPriority().add(script); Input input = oFactory.createInput(); { Item source = oFactory.createItem(); source.setValue(getResource("hello.xml").toURI().toString()); input.getItem().add(source); input.setName("source"); } req.getScriptOrNicenameOrPriority().add(input); req.getScriptOrNicenameOrPriority().add(oFactory.createNicename("NICE_NAME")); req.getScriptOrNicenameOrPriority().add(oFactory.createPriority(Priority.LOW)); Callback callback = oFactory.createCallback(); { callback.setType(CallbackType.MESSAGES); callback.setHref("http://localhost:8080/notify"); callback.setFrequency("1"); } req.getScriptOrNicenameOrPriority().add(callback); callback = oFactory.createCallback(); { callback.setType(CallbackType.STATUS); callback.setHref("http://localhost:8080/notify"); callback.setFrequency("1"); } req.getScriptOrNicenameOrPriority().add(callback); } Job job = client().sendJob(req); deleteAfterTest(job); waitForStatus(JobStatus.SUCCESS, job, 10000); // wait until all updates have been pushed Thread.sleep(1000); testStatusAndMessages.finalTest(); } finally { server.stop(1); } } public static abstract class AbstractCallback implements HttpHandler { abstract void handleStatus(JobStatus status); abstract void handleMessages(Messages messages); abstract void finalTest(); @Override public void handle(HttpExchange t) throws IOException { Job job; { try { job = (Job)JAXBContext.newInstance(Job.class).createUnmarshaller().unmarshal(t.getRequestBody()); } catch (JAXBException e) { throw new RuntimeException(e); } } handleStatus(job.getStatus()); Optional<Messages> messages = getMessages(job); if (messages.isPresent()) handleMessages(messages.get()); String response = "got it"; t.sendResponseHeaders(200, response.length()); OutputStream os = t.getResponseBody(); os.write(response.getBytes()); os.close(); } } static Optional<Messages> getMessages(Job job) { return Optional.fromNullable( Iterables.getOnlyElement( Iterables.filter( job.getNicenameOrBatchIdOrScript(), Messages.class), null)); } static <T> Stream<T> stream(T... array) { return Arrays.<T>stream(array); } }
Java
// <copyright file="KeyEvent.cs" company="WebDriver Committers"> // Copyright 2015 Software Freedom Conservancy // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> using System; using System.Collections.Generic; using System.Text; using OpenQA.Selenium; namespace Selenium.Internal.SeleniumEmulation { /// <summary> /// Defines the command for the keyEvent keyword. /// </summary> internal class KeyEvent : SeleneseCommand { private ElementFinder finder; private KeyState keyState; private string eventName; /// <summary> /// Initializes a new instance of the <see cref="KeyEvent"/> class. /// </summary> /// <param name="elementFinder">An <see cref="ElementFinder"/> used to find the element on which to execute the command.</param> /// <param name="state">A <see cref="KeyState"/> object defining the state of modifier keys.</param> /// <param name="eventName">The name of the event to send.</param> public KeyEvent(ElementFinder elementFinder, KeyState state, string eventName) { this.finder = elementFinder; this.keyState = state; this.eventName = eventName; } /// <summary> /// Handles the command. /// </summary> /// <param name="driver">The driver used to execute the command.</param> /// <param name="locator">The first parameter to the command.</param> /// <param name="value">The second parameter to the command.</param> /// <returns>The result of the command.</returns> protected override object HandleSeleneseCommand(IWebDriver driver, string locator, string value) { object[] parameters = new object[] { value, this.keyState.ControlKeyDown, this.keyState.AltKeyDown, this.keyState.ShiftKeyDown, this.keyState.MetaKeyDown }; JavaScriptLibrary.CallEmbeddedSelenium(driver, this.eventName, this.finder.FindElement(driver, locator), parameters); return null; } } }
Java
/* * Copyright 2012-2016 JetBrains s.r.o * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.jetpad.base.edt; public class BufferingEdtManager extends RunningEdtManager { public BufferingEdtManager() { super(); } public BufferingEdtManager(String name) { super(name); } @Override protected void doSchedule(Runnable r) { addTaskToQueue(r); } @Override public String toString() { return "BufferingEdtManager@" + Integer.toHexString(hashCode()) + ("".equals(getName()) ? "" : " (" + getName()+ ")"); } }
Java
# Chaetoplea crossata (Ellis & Everh.) M.E. Barr SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in Mem. N. Y. bot. Gdn 62: 50 (1990) #### Original name Teichospora crossata Ellis & Everh. ### Remarks null
Java
<?php /* * Apolbox - Framework Productiont * * Apolbox adalah kode sumber yang terbuka sebagai aplikasi kerangka kerja untuk membangun website dengan metode penyihir, * yang dapat membuat pembangunan website lebih cepat dan lebih praktis. * * (c) Ayus irfang filaras * */ require_once __DIR__.'/../bootstrap/pustaka.php'; /** * Apolbox - Framework Productiont * * Apolbox adalah kode sumber yang terbuka sebagai aplikasi kerangka kerja untuk membangun website dengan metode penyihir, * yang dapat membuat pembangunan website lebih cepat dan lebih praktis. * * @package apolbox * @subpackage administrator * * @copyright (c) [29 Juni 2016] * @since version 1.0.0 * * @author Ayus Irfang Filaras <ayus.sahabat@gmail.com> * @link https://github.com/apolbox/apolbox.git * @lisence https://github.com/apolbox/apolbox/blob/master/LICENSE */ $pustaka = new Pustaka\Applications(); return $pustaka->run();
Java
/* * Copyright 2016 * Ubiquitous Knowledge Processing (UKP) Lab * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.tudarmstadt.ukp.experiments.argumentation.sequence.io.filters; import de.tudarmstadt.ukp.experiments.argumentation.sequence.DocumentRegister; import org.apache.uima.jcas.JCas; import java.util.HashSet; import java.util.Set; /** * Filter wrt document register * * @author Ivan Habernal */ public class DocumentRegisterFilter implements DocumentCollectionFilter { private final Set<DocumentRegister> documentRegisters = new HashSet<>(); public DocumentRegisterFilter(String documentRegistersString) { // parse document registers if (!documentRegistersString.isEmpty()) { for (String documentDomainSplit : documentRegistersString.split(" ")) { String domain = documentDomainSplit.trim(); if (!domain.isEmpty()) { documentRegisters.add(DocumentRegister.fromString(domain)); } } } } @Override public boolean removeFromCollection(JCas jCas) { DocumentRegister register = DocumentRegister.fromJCas(jCas); return !documentRegisters.isEmpty() && !documentRegisters.contains(register); } @Override public boolean applyFilter() { return !documentRegisters.isEmpty(); } }
Java
# Helicoön fairmani Sacc. SPECIES #### Status SYNONYM #### According to Index Fungorum #### Published in null #### Original name Helicoön fairmani Sacc. ### Remarks null
Java
# Graphorkis venulosa (Rchb.f.) Kuntze SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Udeniomyces Nakase & Takem. GENUS #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in FEMS Microbiol. Lett. 100(1-3): 498 (1992) #### Original name Udeniomyces Nakase & Takem. ### Remarks null
Java
# Helotium kunicense Velen. SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in Monogr. Discom. Bohem. (Prague) 184 (1934) #### Original name Helotium kunicense Velen. ### Remarks null
Java
# Varronia martintiscensis Crantz SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Trichia globosa Vill. SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Trichia globosa Vill. ### Remarks null
Java
# Schenodorus lanatus (Kunth) Roem. & Schult. SPECIES #### Status SYNONYM #### According to Integrated Taxonomic Information System #### Published in null #### Original name null ### Remarks null
Java
# Pleospora scrophulariae var. compositarum (Earle) Wehm. VARIETY #### Status ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Pleospora compositarum Tracy & Earle ### Remarks null
Java
# Geranium subacaulescens var. subacutum Boiss. VARIETY #### Status SYNONYM #### According to GRIN Taxonomy for Plants #### Published in Fl. orient. 1:872. 1867 #### Original name null ### Remarks null
Java
package es.npatarino.android.gotchallenge.chat.message.viewmodel; import android.net.Uri; import es.npatarino.android.gotchallenge.chat.message.model.Payload; public class StickerPayLoad implements Payload { private String stickerFilePath; public StickerPayLoad(String stickerFilePath) { this.stickerFilePath = stickerFilePath; } public String getStickerFilePath() { return stickerFilePath; } public Uri getSticker() { return Uri.parse(stickerFilePath); } }
Java
/* * File: IAS-LangMod-SysMon/src/lang/mod/monitor/sys/mem/ModuleProxy.h * * Copyright (C) 2015, Albert Krzymowski * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _IAS_Lang_MOD_Monitor_SYS_MEM_ModuleProxy_H_ #define _IAS_Lang_MOD_Monitor_SYS_MEM_ModuleProxy_H_ #include <lang/interpreter/extern/ModuleProxy.h> namespace IAS { namespace Lang { namespace MOD { namespace Monitor{ namespace SYS { namespace MEM { /*************************************************************************/ /** The ModuleProxy class. * */ class ModuleProxy : public ::IAS::Lang::Interpreter::Extern::ModuleProxy { public: virtual ~ModuleProxy() throw(); static ModuleProxy* Create(); protected: ModuleProxy(); virtual void setupImpl(); virtual void cleanUpImpl(); friend class ::IAS::Factory<ModuleProxy>; }; /*************************************************************************/ } } } } } } /*************************************************************************/ extern "C"{ void* ias_lang_mod_monitor_sys_mem_proxy(); } /*************************************************************************/ #endif /* _IAS_Lang_MOD_Monitor_SYS_MEM_ModuleProxy_H_ */
Java
exports.view = function() { this.render(); }; exports.async = function() { this.render(); };
Java
module.exports = { readFiles: readFiles }; var filewalker = require('filewalker'); var _ = require('kling/kling.js'); var fs = require('fs'); function readFiles(directory, fileSuffix) { return new Promise(function(resolve, reject) { var files = []; filewalker(directory) .on('file', function(file) { if (file.endsWith(fileSuffix)) { files.push(file); } }) .on('done', function() { var addLazyReaderCurried = _.curry(addLazyReader); resolve(_.fmap(addLazyReaderCurried(directory), files)); }) .walk(); }); } function addLazyReader(directory, file) { return { name: directory+file, getContents: function() { return fs.readFileSync(directory+file, 'utf8'); } }; }
Java
# Search dictionary symbol style Find symbols within the mil2525d specification that match a keyword. ![](screenshot.png) ## Use case You can use support for military symbology to allow users to report changes in the field using the correct military symbols. ## How to use the sample By default, leaving the fields blank and hitting search will find all symbols. To search for certain symbols, enter text into one or more search boxes and click 'Search for symbols'. Results are shown in a list. Pressing 'Clear' will reset the search. ## How it works 1. Create a symbol dictionary with the mil2525d specification by passing the string "mil2525d" and the path to a .stylx file to the `SymbolDictionary` constructor. 2. Create `SymbolStyleSearchParameters`. 3. Add members to the `names`, `tags`, `symbolClasses`, `categories`, and `keys` list fields of the search parameters. 4. Search for symbols using the parameters with `DictionarySymbolStyle.searchSymbols(SymbolStyleSearchParameters)`. 5. Get the `Symbol` from the list of returned `SymbolStyleSearchResultListModel`s. ## Relevant API * DictionarySymbolStyle * Symbol * SymbolStyleSearchParameters * SymbolStyleSearchResultListModel ## Offline Data Read more about how to set up the sample's offline data [here](http://links.esri.com/ArcGISRuntimeQtSamples#use-offline-data-in-the-samples). Link | Local Location ---------|-------| |[Mil2525d Stylx File](https://www.arcgis.com/home/item.html?id=c78b149a1d52414682c86a5feeb13d30)| `<userhome>`/ArcGIS/Runtime/Data/styles/arcade_style/mil2525d.stylx | ## Additional information This sample features the mil2525D specification. ArcGIS Runtime supports other military symbology standards, including mil2525C and mil2525B(change 2). See the [Military Symbology Styles](https://solutions.arcgis.com/defense/help/military-symbology-styles/) overview on *ArcGIS Solutions for Defense* for more information about support for military symbology. While developing, you can omit the path to the **.stylx** style file; Runtime will refer to a copy installed with the SDK. For production, you should take care to deploy the proper style files and explicitly specify the path to that file when creating the symbol dictionary. See the [Military Symbology Styles](https://solutions.arcgis.com/defense/help/military-symbology-styles/) overview on *ArcGIS Solutions for Defense* for more information about support for military symbology. ## Tags CIM, defense, look up, MIL-STD-2525B, MIL-STD-2525C, MIL-STD-2525D, mil2525b, mil2525c, mil2525d, military, military symbology, search, symbology
Java
/* */ package com.hundsun.network.gates.wulin.biz.service.pojo.auction; /* */ /* */ import com.hundsun.network.gates.luosi.biz.security.ServiceException; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumActiveStatus; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumBidCheckStatus; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumBidPriceStatus; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumOperatorType; /* */ import com.hundsun.network.gates.luosi.common.remote.ServiceResult; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.enums.EnumAuctionErrors; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.request.AuctionMulitBidRequest; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.request.SystemMessageRequest; /* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionBidderDAO; /* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionFreeBidDAO; /* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionHallDAO; /* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionLogDAO; /* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionBidder; /* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionFreeBid; /* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionLog; /* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionMulitBidProject; /* */ import com.hundsun.network.gates.wulin.biz.domain.query.AuctionMulitBidProjectQuery; /* */ import com.hundsun.network.gates.wulin.biz.domain.query.MulitAuctionReviewQuery; /* */ import com.hundsun.network.gates.wulin.biz.service.BaseService; /* */ import com.hundsun.network.gates.wulin.biz.service.auction.MulitAuctionService; /* */ import com.hundsun.network.gates.wulin.biz.service.message.SystemMessageService; /* */ import com.hundsun.network.gates.wulin.biz.service.project.ProjectListingService; /* */ import com.hundsun.network.melody.common.util.StringUtil; /* */ import java.io.IOException; /* */ import java.util.ArrayList; /* */ import java.util.HashMap; /* */ import java.util.List; /* */ import java.util.Locale; /* */ import org.apache.commons.logging.Log; /* */ import org.codehaus.jackson.map.ObjectMapper; /* */ import org.springframework.beans.factory.annotation.Autowired; /* */ import org.springframework.context.MessageSource; /* */ import org.springframework.stereotype.Service; /* */ import org.springframework.transaction.TransactionStatus; /* */ import org.springframework.transaction.support.TransactionCallback; /* */ import org.springframework.transaction.support.TransactionTemplate; /* */ /* */ @Service("mulitAuctionService") /* */ public class MulitAuctionServiceImpl extends BaseService /* */ implements MulitAuctionService /* */ { /* */ /* */ @Autowired /* */ private ProjectListingService projectListingService; /* */ /* */ @Autowired /* */ private AuctionFreeBidDAO auctionFreeBidDAO; /* */ /* */ @Autowired /* */ private AuctionBidderDAO auctionBidderDAO; /* */ /* */ @Autowired /* */ private AuctionLogDAO auctionLogDAO; /* */ /* */ @Autowired /* */ private MessageSource messageSource; /* */ /* */ @Autowired /* */ private AuctionHallDAO auctionHallDAO; /* */ /* */ @Autowired /* */ private SystemMessageService systemMessageService; /* */ /* */ public ServiceResult review(final AuctionMulitBidRequest request) /* */ { /* 70 */ ServiceResult serviceResult = new ServiceResult(); /* */ /* 72 */ if ((null == request) || (StringUtil.isEmpty(request.getBidderAccount())) || (StringUtil.isEmpty(request.getReviewer())) || (StringUtil.isEmpty(request.getProjectCode())) || (StringUtil.isEmpty(request.getRemark()))) /* */ { /* 76 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.PARAMETER_ERROR.getValue()), EnumAuctionErrors.PARAMETER_ERROR.getInfo()); /* */ /* 78 */ return serviceResult; /* */ } /* 80 */ AuctionMulitBidProjectQuery query = new AuctionMulitBidProjectQuery(); /* 81 */ query.setReviewer(request.getReviewer()); /* 82 */ query.setProjectCode(request.getProjectCode()); /* 83 */ List projectList = this.projectListingService.queryAuctionMulitBidProjectUncheckedByProjectCode(query); /* */ /* 86 */ if ((null == projectList) || (projectList.size() <= 0)) { /* 87 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.CHECK_PROJECT_LISTING_NULL.getValue()), EnumAuctionErrors.CHECK_PROJECT_LISTING_NULL.getInfo()); /* */ /* 89 */ return serviceResult; /* */ } /* */ /* 92 */ AuctionFreeBid auctionFreeBid = queryTopUncheckFreeBid(request.getProjectCode(), request.getBidderAccount()); /* */ /* 94 */ if (null == auctionFreeBid) { /* 95 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.PARAMETER_ERROR.getValue()), EnumAuctionErrors.PARAMETER_ERROR.getInfo()); /* */ /* 97 */ return serviceResult; /* */ } /* */ /* 100 */ AuctionBidder auctionBidder = this.auctionBidderDAO.selectNormalByBidderAccount(request.getProjectCode(), request.getBidderAccount()); /* */ /* 102 */ if (null == auctionBidder) { /* 103 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.CHECK_BIDDER_NULL.getValue()), EnumAuctionErrors.CHECK_BIDDER_NULL.getInfo()); /* */ /* 105 */ return serviceResult; /* */ } /* 107 */ ObjectMapper mapper = new ObjectMapper(); /* 108 */ String auctionBidderJson = ""; /* */ try { /* 110 */ auctionBidderJson = mapper.writeValueAsString(auctionBidder); /* */ } catch (IOException e) { /* 112 */ if (this.log.isErrorEnabled()) { /* 113 */ this.log.error("convert auctionBidder to json format fail,", e); /* */ } /* */ } /* 116 */ final String fAuctionBidderJson = auctionBidderJson; /* 117 */ final AuctionFreeBid fAuctionFreeBid = auctionFreeBid; /* 118 */ final String logRemark = getMessage("project.auction.mulitbid.review.log.remark", new String[] { request.getReviewer(), auctionBidder.getBidderAccount() }); /* */ /* 120 */ final AuctionBidder fAuctionBidder = auctionBidder; /* 121 */ final AuctionMulitBidProject fAuctionMulitBidProject = (AuctionMulitBidProject)projectList.get(0); /* */ /* 123 */ serviceResult = (ServiceResult)this.transactionTemplate.execute(new TransactionCallback() { /* */ public ServiceResult doInTransaction(TransactionStatus status) { /* 125 */ ServiceResult result = new ServiceResult(); /* 126 */ Object savePoint = status.createSavepoint(); /* */ try /* */ { /* 129 */ AuctionFreeBid auctionFreeBid = new AuctionFreeBid(); /* 130 */ auctionFreeBid.setBidderAccount(fAuctionFreeBid.getBidderAccount()); /* 131 */ auctionFreeBid.setBidderTrademark(fAuctionFreeBid.getBidderTrademark()); /* 132 */ auctionFreeBid.setBidOperatorAccount(fAuctionFreeBid.getBidOperatorAccount()); /* 133 */ auctionFreeBid.setCheckRemark(request.getRemark()); /* 134 */ auctionFreeBid.setCheckStatus(EnumBidCheckStatus.Fail.getValue()); /* 135 */ auctionFreeBid.setIp(fAuctionFreeBid.getIp()); /* 136 */ auctionFreeBid.setOperator(request.getOperator()); /* 137 */ auctionFreeBid.setPrice(fAuctionFreeBid.getPrice()); /* 138 */ auctionFreeBid.setProjectCode(request.getProjectCode()); /* 139 */ auctionFreeBid.setStatus(fAuctionFreeBid.getStatus()); /* 140 */ MulitAuctionServiceImpl.this.auctionFreeBidDAO.insert(auctionFreeBid); /* */ /* 143 */ if (MulitAuctionServiceImpl.this.auctionBidderDAO.deleteByBidderAccount(request.getProjectCode(), request.getBidderAccount()) <= 0) /* */ { /* 145 */ throw new ServiceException(EnumAuctionErrors.REVIEW_DELETE_BIDDER_FAIL.getInfo(), Integer.valueOf(EnumAuctionErrors.REVIEW_DELETE_BIDDER_FAIL.getValue())); /* */ } /* */ /* 150 */ if (EnumActiveStatus.Yes.getValue().equals(fAuctionBidder.getIsPriority())) { /* 151 */ HashMap actionHallMap = new HashMap(); /* 152 */ actionHallMap.put("priorityNumSub", Integer.valueOf(1)); /* 153 */ actionHallMap.put("whereProjectCode", request.getProjectCode()); /* 154 */ if (MulitAuctionServiceImpl.this.auctionHallDAO.updateByMap(actionHallMap) <= 0) { /* 155 */ throw new ServiceException(EnumAuctionErrors.REVIEW_UPDATE_HALL_FALL.getInfo(), Integer.valueOf(EnumAuctionErrors.REVIEW_UPDATE_HALL_FALL.getValue())); /* */ } /* */ /* */ } /* */ /* 172 */ SystemMessageRequest systemMessageRequest = new SystemMessageRequest(); /* 173 */ systemMessageRequest.setSendAccount(EnumOperatorType.SYSTEM.getValue()); /* 174 */ systemMessageRequest.setContent(MulitAuctionServiceImpl.this.getMessage("project.auction.mulitbid.review.message.content", new String[] { fAuctionMulitBidProject.getProjectTitle(), request.getRemark() })); /* */ /* 177 */ systemMessageRequest.setTitle(MulitAuctionServiceImpl.this.getMessage("project.auction.mulitbid.review.message.title", new String[0])); /* */ /* 179 */ List userAccountList = new ArrayList(); /* 180 */ userAccountList.add(fAuctionBidder.getBidderAccount()); /* 181 */ systemMessageRequest.setUserAccountList(userAccountList); /* 182 */ MulitAuctionServiceImpl.this.systemMessageService.sendSystemMessage(systemMessageRequest); /* */ /* 185 */ AuctionLog auctionLog = new AuctionLog(); /* 186 */ auctionLog.setDataJson(fAuctionBidderJson); /* 187 */ auctionLog.setProjectCode(request.getProjectCode()); /* 188 */ auctionLog.setRemark(logRemark); /* 189 */ auctionLog.setOperatorType(EnumOperatorType.REVIEWER.getValue()); /* 190 */ auctionLog.setOperator(request.getReviewer()); /* 191 */ MulitAuctionServiceImpl.this.auctionLogDAO.insert(auctionLog); /* */ } /* */ catch (ServiceException e) { /* 194 */ status.rollbackToSavepoint(savePoint); /* 195 */ MulitAuctionServiceImpl.this.log.error("MulitAuctionServiceImpl review fail", e); /* 196 */ result.setErrorNO(e.getErrorNO()); /* 197 */ result.setErrorInfo(e.getErrorInfo()); /* */ } catch (Exception e) { /* 199 */ status.rollbackToSavepoint(savePoint); /* 200 */ MulitAuctionServiceImpl.this.log.error("MulitAuctionServiceImpl review error", e); /* 201 */ result.setErrorNO(Integer.valueOf(EnumAuctionErrors.INTERNAL_ERROR.getValue())); /* 202 */ result.setErrorInfo(EnumAuctionErrors.INTERNAL_ERROR.getInfo()); /* */ } /* 204 */ return result; /* */ } /* */ }); /* 208 */ return serviceResult; /* */ } /* */ /* */ public AuctionFreeBid queryTopUncheckFreeBid(String projectCode, String bidderAccount) /* */ { /* 213 */ MulitAuctionReviewQuery query = new MulitAuctionReviewQuery(); /* 214 */ query.setBidderAccount(bidderAccount); /* 215 */ query.setCheckStatus(EnumBidCheckStatus.Pass); /* 216 */ query.setProjectCode(projectCode); /* 217 */ query.setStatus(EnumBidPriceStatus.EFFECTIVE); /* 218 */ return this.auctionFreeBidDAO.selectTopByMulitAuctionReviewQuery(query); /* */ } /* */ /* */ protected String getMessage(String code, String[] args) { /* 222 */ return this.messageSource.getMessage(code, args, Locale.CHINA); /* */ } /* */ } /* Location: E:\__安装归档\linquan-20161112\deploy16\wulin\webroot\WEB-INF\classes\ * Qualified Name: com.hundsun.network.gates.wulin.biz.service.pojo.auction.MulitAuctionServiceImpl * JD-Core Version: 0.6.0 */
Java
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.utn.dacs2017.compraventa.vendedor; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; /** * @author Grego Dadone */ @Controller class VendedorController { private final VendedorRepository vendedores; @Autowired public VendedorController(VendedorRepository vendedorService) { this.vendedores = vendedorService; } @RequestMapping(value = { "/vendedores.html" }) public String showVendedorList(Map<String, Object> model) { Vendedores vendedores = new Vendedores(); vendedores.getVendedorList().addAll(this.vendedores.findAll()); model.put("vendedores", vendedores); return "vendedores/vendedorList"; } @RequestMapping(value = { "/vendedores.json", "/vendedores.xml" }) public @ResponseBody Vendedores showResourcesVendedorList() { Vendedores vendedores = new Vendedores(); vendedores.getVendedorList().addAll(this.vendedores.findAll()); return vendedores; } }
Java
--- layout: post title: "LeetCode EFS" subtitle: "" date: 2019-12-20 author: "Jiayi.Liu" header-img: "img/post-bg-2015.jpg" catalog: true tags: - LeetCode - Interview --- ##### 935. Knight Dialer &nbsp;&nbsp;&nbsp;&nbsp;This question is a great example of seeing a problem from **forward or backward**! &nbsp;&nbsp;&nbsp;&nbsp;Also, it looks like we should always think about separating the program into **stages** and **transition function**. ##### 785. Is Graph Bipartite? &nbsp;&nbsp;&nbsp;&nbsp;There are a few steps for solving a programming problem: 1. Analyze the problem and find a way of solving the problem **with natural language**. In this step, we need to understand the problem and think of analytical solution or programmatic solution (search). If no idea, using brute-force method should be considered in this step. 2. Find a programming **pattern** (Search? Traversal? Backtracking? DP? etc.) can fit the 'natural language solution'. Actually step 2 and step 1 are somewhat related because we can also use programming patterns to help finding the natural language solution. 3. See if we can do further optimization with some tricks such as shown in the following problem. ##### 837. New 21 Game > The purpose of DP is **reduce duplicated** calculations! So, once you noticed duplicated calculation in your algorithm, try to use DP. &nbsp;&nbsp;&nbsp;&nbsp;Thinking the brute-force method can really help us understand the problem. For this problem, the brute-force method takes exponential complexity and DP is a easily found better solution. &nbsp;&nbsp;&nbsp;&nbsp;We can learn a trick from this problem : if we want to sum over an array, it can be simplified as `sum(m::n) = sum(1::n) - sum(1::m)`. This is especially usefull with DP. ##### 742. Closest Leaf in a Binary Tree &nbsp;&nbsp;&nbsp;&nbsp;When we face the case we need to *go backward* in a tree, *change it to a graph* can be a good idea. Because the action of going backward basically breaked the meaning of a tree structure, thus we can expand a tree to a more abstract structure that still preserved the relation between nodes. &nbsp;&nbsp;&nbsp;&nbsp;Changing a tree representation to a graph breaks the *parent-children* relationship in tree. The *parent-children* relation can be constrains and also helpers. For example, when we search for the longest path from leaf to leaf, we can use the parent-children relation as a hint and use DP! ##### 373. Find K Pairs with Smallest Sums &nbsp;&nbsp;&nbsp;&nbsp;For this question, it can be super helpful if you think about representing the result with matrix. But even if you don't, you can still do something to **minimize the cost** such as only consider the *first k element in both arrays*. ##### 1182. Shortest Distance to Target Color &nbsp;&nbsp;&nbsp;&nbsp;This is a typical searching problem, and even the brute-force searching looks ok we may be able to improve that. The key of searching is to see **what is the critieria we are searching for** and **how we can get rid of candidates ASAP**. Using critieria as index is a good idea to get rid of candidates. ##### 1261. Find Elements in a Contaminated Binary Tree &nbsp;&nbsp;&nbsp;&nbsp;In this problem we can see that we always have different ways of optimizing. Here we can calculate the path from root to the node which will cause `O(logN)` time complexity and `O(1)` extra memory. However, if we want to optimize for find latency, we can always use a set to store all nodes value. &nbsp;&nbsp;&nbsp;&nbsp;Another point needs to learn from this problem is **make sure you understand what option is needed**, for example 'find existance', 'search with critieria' etc. ##### 1130. Minimum Cost Tree From Leaf Values &nbsp;&nbsp;&nbsp;&nbsp;[This solution](https://leetcode.com/problems/minimum-cost-tree-from-leaf-values/discuss/340004/Python-Easy-DP) is a typical **divide & conquer** method! If the result of mother problem can be divided into result of smaller problem, we should consider both **DP** and **divide & conqure**.
Java
package com.afollestad.breadcrumb; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.Build; import android.support.annotation.NonNull; import android.support.v4.view.ViewCompat; import android.text.TextUtils; import android.util.AttributeSet; import android.util.Log; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.HorizontalScrollView; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * @author Aidan Follestad (afollestad) */ public class LinearBreadcrumb extends HorizontalScrollView implements View.OnClickListener { public static class Crumb implements Serializable { public Crumb(String path,String attachMsg) { mPath = path; mAttachMsg = attachMsg; } private final String mPath; private final String mAttachMsg; private int mScrollY; private int mScrollOffset; public int getScrollY() { return mScrollY; } public int getScrollOffset() { return mScrollOffset; } public void setScrollY(int scrollY) { this.mScrollY = scrollY; } public void setScrollOffset(int scrollOffset) { this.mScrollOffset = scrollOffset; } public String getPath() { return mPath; } public String getTitle() { return (!TextUtils.isEmpty(mAttachMsg)) ? mAttachMsg : mPath; } public String getmAttachMsg() { return mAttachMsg; } @Override public boolean equals(Object o) { return (o instanceof Crumb) && ((Crumb) o).getPath().equals(getPath()); } @Override public String toString() { return "Crumb{" + "mAttachMsg='" + mAttachMsg + '\'' + ", mPath='" + mPath + '\'' + ", mScrollY=" + mScrollY + ", mScrollOffset=" + mScrollOffset + '}'; } } public interface SelectionCallback { void onCrumbSelection(Crumb crumb, String absolutePath, int count, int index); } public LinearBreadcrumb(Context context) { super(context); init(); } public LinearBreadcrumb(Context context, AttributeSet attrs) { super(context, attrs); init(); } public LinearBreadcrumb(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private List<Crumb> mCrumbs; private List<Crumb> mOldCrumbs; private LinearLayout mChildFrame; private int mActive; private SelectionCallback mCallback; private void init() { setMinimumHeight((int) getResources().getDimension(R.dimen.breadcrumb_height)); setClipToPadding(false); mCrumbs = new ArrayList<>(); mChildFrame = new LinearLayout(getContext()); addView(mChildFrame, new ViewGroup.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT)); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void setAlpha(View view, int alpha) { if (view instanceof ImageView && Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { ((ImageView) view).setImageAlpha(alpha); } else { ViewCompat.setAlpha(view, alpha); } } public void addCrumb(@NonNull Crumb crumb, boolean refreshLayout) { LinearLayout view = (LinearLayout) LayoutInflater.from(getContext()).inflate(R.layout.bread_crumb, this, false); view.setTag(mCrumbs.size()); view.setClickable(true); view.setFocusable(true); view.setOnClickListener(this); mChildFrame.addView(view, new ViewGroup.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mCrumbs.add(crumb); if (refreshLayout) { mActive = mCrumbs.size() - 1; requestLayout(); } invalidateActivatedAll(); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); //RTL works fine like this View child = mChildFrame.getChildAt(mActive); if (child != null) smoothScrollTo(child.getLeft(), 0); } public Crumb findCrumb(@NonNull String forDir) { for (int i = 0; i < mCrumbs.size(); i++) { if (mCrumbs.get(i).getPath().equals(forDir)) return mCrumbs.get(i); } return null; } public void clearCrumbs() { try { mOldCrumbs = new ArrayList<>(mCrumbs); mCrumbs.clear(); mChildFrame.removeAllViews(); } catch (IllegalStateException e) { e.printStackTrace(); } } public Crumb getCrumb(int index) { return mCrumbs.get(index); } public void setCallback(SelectionCallback callback) { mCallback = callback; } public boolean setActive(Crumb newActive) { mActive = mCrumbs.indexOf(newActive); for(int i = size()-1;size()>mActive+1;i--){ removeCrumbAt(i); } ((LinearLayout)mChildFrame.getChildAt(mActive)).getChildAt(1).setVisibility(View.GONE); boolean success = mActive > -1; if (success) requestLayout(); return success; } private void invalidateActivatedAll() { for (int i = 0; i < mCrumbs.size(); i++) { Crumb crumb = mCrumbs.get(i); invalidateActivated(mChildFrame.getChildAt(i), mActive == mCrumbs.indexOf(crumb), i < mCrumbs.size() - 1).setText(crumb.getTitle()); } } public void removeCrumbAt(int index) { mCrumbs.remove(index); mChildFrame.removeViewAt(index); } private void updateIndices() { for (int i = 0; i < mChildFrame.getChildCount(); i++) mChildFrame.getChildAt(i).setTag(i); } private boolean isValidPath(String path) { return path == null; } public int size() { return mCrumbs.size(); } private TextView invalidateActivated(View view, boolean isActive, boolean isShowSeparator) { LinearLayout child = (LinearLayout) view; if (isShowSeparator) child.getChildAt(1).setVisibility(View.VISIBLE); return (TextView) child.getChildAt(0); } public int getActiveIndex() { return mActive; } @Override public void onClick(View v) { if (mCallback != null) { int index = (Integer) v.getTag(); if (index >= 0 && index < (size()-1)) { setActive(mCrumbs.get(index)); mCallback.onCrumbSelection(mCrumbs.get(index), getAbsolutePath(mCrumbs.get(index), "/"), mCrumbs.size(), index); } } } public static class SavedStateWrapper implements Serializable { public final int mActive; public final List<Crumb> mCrumbs; public final int mVisibility; public SavedStateWrapper(LinearBreadcrumb view) { mActive = view.mActive; mCrumbs = view.mCrumbs; mVisibility = view.getVisibility(); } } public SavedStateWrapper getStateWrapper() { return new SavedStateWrapper(this); } public void restoreFromStateWrapper(SavedStateWrapper mSavedState, Activity context) { if (mSavedState != null) { mActive = mSavedState.mActive; for (Crumb c : mSavedState.mCrumbs) { addCrumb(c, false); } requestLayout(); setVisibility(mSavedState.mVisibility); } } public String getAbsolutePath(Crumb crumb, @NonNull String separator) { StringBuilder builder = new StringBuilder(); if (size() > 1 && !crumb.equals(mCrumbs.get(0))) { List<Crumb> crumbs = mCrumbs.subList(1, size()); for (Crumb mCrumb : crumbs) { builder.append(mCrumb.getPath()); builder.append(separator); if (mCrumb.equals(crumb)) { break; } } String path = builder.toString(); return path.substring(0, path.length() -1); } else { return null; } } public String getCurAbsolutePath(@NonNull String separator){ return getAbsolutePath(getCrumb(mActive),separator); } public void addRootCrumb() { clearCrumbs(); addCrumb(new Crumb("/","root"), true); } public void addPath(@NonNull String path,@NonNull String sha, @NonNull String separator) { clearCrumbs(); addCrumb(new Crumb("",""), false); String[] paths = path.split(separator); Crumb lastCrumb = null; for (String splitPath : paths) { lastCrumb = new Crumb(splitPath,sha); addCrumb(lastCrumb, false); } if (lastCrumb != null) { setActive(lastCrumb); } } }
Java
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; const invariant = require('invariant'); /** * @typedef {object} request * @property {string} method * @property {string} route - string like /abc/:abc * @property {string} params * @property {object} [body] * @property {object} headers */ /** * @typedef {object} reponse * @property {Error} error - an error which occured during req or res * @property {object} body - content received from server (parsed) * @property {object} headers - set additional request headers * @property {number} status - http status code; 0 on failure */ /** * XHR wrapper for same-domain requests with Content-Type: application/json * * @param {request} request * @return {Promise} */ export default function implore(request) { return new Promise(resolve => { const response = { error: null }; invariant( request, 'implore requires a `request` argument' ); invariant( typeof request.route === 'string', 'implore requires parameter `route` to be a string' ); invariant( typeof request.method === 'string', 'implore requires parameter `method` to be a string' ); const xhr = new XMLHttpRequest(); xhr.open(request.method, getURLFromRequest(request)); switch (request.method) { case 'POST': case 'PUT': case 'PATCH': xhr.setRequestHeader('Content-Type', 'application/json'); break; } if (request.headers) { invariant( typeof request.headers === 'object', 'implore requires parameter `headers` to be an object' ); Object.keys(request.headers).forEach((header) => { xhr.setRequestHeader(header, request.headers[header]); }); } xhr.onreadystatechange = function onreadystatechange() { let responseText; if (xhr.readyState === 4) { responseText = xhr.responseText; response.status = xhr.status; response.type = xhr.getResponseHeader('Content-Type'); if (response.type === 'application/json') { try { response.body = JSON.parse(responseText); } catch (err) { err.message = err.message + ' while parsing `' + responseText + '`'; response.body = {}; response.status = xhr.status || 0; response.error = err; } } else { response.body = responseText; } return resolve({ request, response }); } }; try { if (request.body) { xhr.send(JSON.stringify(request.body)); } else { xhr.send(); } } catch (err) { response.body = {}; response.status = 0; response.error = err; return resolve({ request, response }); } }); } implore.get = function get(options) { options.method = 'GET'; return implore(options); }; implore.post = function post(options) { options.method = 'POST'; return implore(options); }; implore.put = function put(options) { options.method = 'PUT'; return implore(options); }; implore.delete = function httpDelete(options) { options.method = 'DELETE'; return implore(options); }; /** * Combine the route/params/query of a request into a complete URL * * @param {request} request * @param {object|array} request.query * @return {string} url */ function getURLFromRequest(request) { const queryString = makeQueryString(request.query || {}); let formatted = request.route; let name; let value; let regexp; for (name in request.params) { if (request.params.hasOwnProperty(name)) { value = request.params[name]; regexp = new RegExp(':' + name + '(?=(\\\/|$))'); formatted = formatted.replace(regexp, value); } } return formatted + (queryString ? '?' + queryString : ''); } /** * Take a simple object and turn it into a queryString, recursively. * * @param {object} obj - query object * @param {string} prefix - used in recursive calls to keep track of the parent * @return {string} queryString without the '?'' */ function makeQueryString(obj, prefix='') { const str = []; let prop; let key; let value; for (prop in obj) { if (obj.hasOwnProperty(prop)) { key = prefix ? prefix + '[' + prop + ']' : prop; value = obj[prop]; str.push(typeof value === 'object' ? makeQueryString(value, key) : encodeURIComponent(key) + '=' + encodeURIComponent(value)); } } return str.join('&'); }
Java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.orc.impl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.orc.CompressionKind; import org.apache.orc.OrcConf; import org.apache.orc.OrcFile; import org.apache.orc.Reader; import org.apache.orc.RecordReader; import org.apache.orc.TypeDescription; import org.apache.orc.Writer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class TestOrcLargeStripe { private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp")); Configuration conf; FileSystem fs; private Path testFilePath; @Rule public TestName testCaseName = new TestName(); @Before public void openFileSystem() throws Exception { conf = new Configuration(); fs = FileSystem.getLocal(conf); testFilePath = new Path(workDir, "TestOrcFile." + testCaseName.getMethodName() + ".orc"); fs.delete(testFilePath, false); } @Mock private FSDataInputStream mockDataInput; static class RangeBuilder { BufferChunkList result = new BufferChunkList(); RangeBuilder range(long offset, int length) { result.add(new BufferChunk(offset, length)); return this; } BufferChunkList build() { return result; } } @Test public void testZeroCopy() throws Exception { BufferChunkList ranges = new RangeBuilder().range(1000, 3000).build(); HadoopShims.ZeroCopyReaderShim mockZcr = mock(HadoopShims.ZeroCopyReaderShim.class); when(mockZcr.readBuffer(anyInt(), anyBoolean())) .thenAnswer(invocation -> ByteBuffer.allocate(1000)); RecordReaderUtils.readDiskRanges(mockDataInput, mockZcr, ranges, true); verify(mockDataInput).seek(1000); verify(mockZcr).readBuffer(3000, false); verify(mockZcr).readBuffer(2000, false); verify(mockZcr).readBuffer(1000, false); } @Test public void testRangeMerge() throws Exception { BufferChunkList rangeList = new RangeBuilder() .range(100, 1000) .range(1000, 10000) .range(3000, 30000).build(); RecordReaderUtils.readDiskRanges(mockDataInput, null, rangeList, false); verify(mockDataInput).readFully(eq(100L), any(), eq(0), eq(32900)); } @Test public void testRangeSkip() throws Exception { BufferChunkList rangeList = new RangeBuilder() .range(1000, 1000) .range(2000, 1000) .range(4000, 1000) .range(4100, 100) .range(8000, 1000).build(); RecordReaderUtils.readDiskRanges(mockDataInput, null, rangeList, false); verify(mockDataInput).readFully(eq(1000L), any(), eq(0), eq(2000)); verify(mockDataInput).readFully(eq(4000L), any(), eq(0), eq(1000)); verify(mockDataInput).readFully(eq(8000L), any(), eq(0), eq(1000)); } @Test public void testEmpty() throws Exception { BufferChunkList rangeList = new RangeBuilder().build(); RecordReaderUtils.readDiskRanges(mockDataInput, null, rangeList, false); verify(mockDataInput, never()).readFully(anyLong(), any(), anyInt(), anyInt()); } @Test public void testConfigMaxChunkLimit() throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); TypeDescription schema = TypeDescription.createTimestamp(); testFilePath = new Path(workDir, "TestOrcLargeStripe." + testCaseName.getMethodName() + ".orc"); fs.delete(testFilePath, false); Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000).bufferSize(10000) .version(OrcFile.Version.V_0_11).fileSystem(fs)); writer.close(); OrcFile.ReaderOptions opts = OrcFile.readerOptions(conf); Reader reader = OrcFile.createReader(testFilePath, opts); RecordReader recordReader = reader.rows(new Reader.Options().range(0L, Long.MAX_VALUE)); assertTrue(recordReader instanceof RecordReaderImpl); assertEquals(Integer.MAX_VALUE - 1024, ((RecordReaderImpl) recordReader).getMaxDiskRangeChunkLimit()); conf = new Configuration(); conf.setInt(OrcConf.ORC_MAX_DISK_RANGE_CHUNK_LIMIT.getHiveConfName(), 1000); opts = OrcFile.readerOptions(conf); reader = OrcFile.createReader(testFilePath, opts); recordReader = reader.rows(new Reader.Options().range(0L, Long.MAX_VALUE)); assertTrue(recordReader instanceof RecordReaderImpl); assertEquals(1000, ((RecordReaderImpl) recordReader).getMaxDiskRangeChunkLimit()); } @Test public void testStringDirectGreaterThan2GB() throws IOException { final Runtime rt = Runtime.getRuntime(); assumeTrue(rt.maxMemory() > 4_000_000_000L); TypeDescription schema = TypeDescription.createString(); conf.setDouble("hive.exec.orc.dictionary.key.size.threshold", 0.0); Writer writer = OrcFile.createWriter( testFilePath, OrcFile.writerOptions(conf).setSchema(schema) .compress(CompressionKind.NONE)); // 5000 is the lower bound for a stripe int size = 5000; int width = 500_000; // generate a random string that is width characters long Random random = new Random(123); char[] randomChars= new char[width]; int posn = 0; for(int length = 0; length < width && posn < randomChars.length; ++posn) { char cp = (char) random.nextInt(Character.MIN_SUPPLEMENTARY_CODE_POINT); // make sure we get a valid, non-surrogate while (Character.isSurrogate(cp)) { cp = (char) random.nextInt(Character.MIN_SUPPLEMENTARY_CODE_POINT); } // compute the length of the utf8 length += cp < 0x80 ? 1 : (cp < 0x800 ? 2 : 3); randomChars[posn] = cp; } // put the random characters in as a repeating value. VectorizedRowBatch batch = schema.createRowBatch(); BytesColumnVector string = (BytesColumnVector) batch.cols[0]; string.setVal(0, new String(randomChars, 0, posn).getBytes(StandardCharsets.UTF_8)); string.isRepeating = true; for(int rows=size; rows > 0; rows -= batch.size) { batch.size = Math.min(rows, batch.getMaxSize()); writer.addRowBatch(batch); } writer.close(); try { Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); RecordReader rows = reader.rows(); batch = reader.getSchema().createRowBatch(); int rowsRead = 0; while (rows.nextBatch(batch)) { rowsRead += batch.size; } assertEquals(size, rowsRead); } finally { fs.delete(testFilePath, false); } } }
Java
/* * Solo - A small and beautiful blogging system written in Java. * Copyright (c) 2010-present, b3log.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ package org.b3log.solo.processor; import org.apache.commons.lang.StringUtils; import org.b3log.solo.AbstractTestCase; import org.b3log.solo.MockHttpServletRequest; import org.b3log.solo.MockHttpServletResponse; import org.testng.Assert; import org.testng.annotations.Test; /** * {@link ErrorProcessor} test case. * * @author <a href="http://88250.b3log.org">Liang Ding</a> * @version 1.0.1.3, Feb 22, 2019 * @since 1.7.0 */ @Test(suiteName = "processor") public class ErrorProcessorTestCase extends AbstractTestCase { /** * Init. * * @throws Exception exception */ @Test public void init() throws Exception { super.init(); } /** * showErrorPage. */ @Test(dependsOnMethods = "init") public void showErrorPage() { final MockHttpServletRequest request = mockRequest(); request.setRequestURI("/error/403"); final MockHttpServletResponse response = mockResponse(); mockDispatcherServletService(request, response); final String content = response.body(); Assert.assertTrue(StringUtils.contains(content, "<title>403 Forbidden! - Solo 的个人博客</title>")); } }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_171) on Fri Apr 16 05:17:59 UTC 2021 --> <title>ListInitiatorsRequest.Builder</title> <meta name="date" content="2021-04-16"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="ListInitiatorsRequest.Builder"; } } catch(err) { } //--> var methods = {"i0":10,"i1":10,"i2":10,"i3":10}; var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; var tableTab = "tableTab"; var activeTableTab = "activeTableTab"; </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.html" title="class in com.solidfire.element.api"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../com/solidfire/element/api/ListInitiatorsResult.html" title="class in com.solidfire.element.api"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?com/solidfire/element/api/ListInitiatorsRequest.Builder.html" target="_top">Frames</a></li> <li><a href="ListInitiatorsRequest.Builder.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">com.solidfire.element.api</div> <h2 title="Class ListInitiatorsRequest.Builder" class="title">Class ListInitiatorsRequest.Builder</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li>java.lang.Object</li> <li> <ul class="inheritance"> <li>com.solidfire.element.api.ListInitiatorsRequest.Builder</li> </ul> </li> </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <dl> <dt>Enclosing class:</dt> <dd><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.html" title="class in com.solidfire.element.api">ListInitiatorsRequest</a></dd> </dl> <hr> <br> <pre>public static class <span class="typeNameLabel">ListInitiatorsRequest.Builder</span> extends java.lang.Object</pre> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method.summary"> <!-- --> </a> <h3>Method Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tr id="i0" class="altColor"> <td class="colFirst"><code><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.html" title="class in com.solidfire.element.api">ListInitiatorsRequest</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html#build--">build</a></span>()</code>&nbsp;</td> </tr> <tr id="i1" class="rowColor"> <td class="colFirst"><code><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html" title="class in com.solidfire.element.api">ListInitiatorsRequest.Builder</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html#optionalInitiators-java.lang.Long:A-">optionalInitiators</a></span>(java.lang.Long[]&nbsp;initiators)</code>&nbsp;</td> </tr> <tr id="i2" class="altColor"> <td class="colFirst"><code><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html" title="class in com.solidfire.element.api">ListInitiatorsRequest.Builder</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html#optionalLimit-java.lang.Long-">optionalLimit</a></span>(java.lang.Long&nbsp;limit)</code>&nbsp;</td> </tr> <tr id="i3" class="rowColor"> <td class="colFirst"><code><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html" title="class in com.solidfire.element.api">ListInitiatorsRequest.Builder</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html#optionalStartInitiatorID-java.lang.Long-">optionalStartInitiatorID</a></span>(java.lang.Long&nbsp;startInitiatorID)</code>&nbsp;</td> </tr> </table> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.Object</h3> <code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ============ METHOD DETAIL ========== --> <ul class="blockList"> <li class="blockList"><a name="method.detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="build--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>build</h4> <pre>public&nbsp;<a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.html" title="class in com.solidfire.element.api">ListInitiatorsRequest</a>&nbsp;build()</pre> </li> </ul> <a name="optionalStartInitiatorID-java.lang.Long-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>optionalStartInitiatorID</h4> <pre>public&nbsp;<a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html" title="class in com.solidfire.element.api">ListInitiatorsRequest.Builder</a>&nbsp;optionalStartInitiatorID(java.lang.Long&nbsp;startInitiatorID)</pre> </li> </ul> <a name="optionalLimit-java.lang.Long-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>optionalLimit</h4> <pre>public&nbsp;<a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html" title="class in com.solidfire.element.api">ListInitiatorsRequest.Builder</a>&nbsp;optionalLimit(java.lang.Long&nbsp;limit)</pre> </li> </ul> <a name="optionalInitiators-java.lang.Long:A-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>optionalInitiators</h4> <pre>public&nbsp;<a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.Builder.html" title="class in com.solidfire.element.api">ListInitiatorsRequest.Builder</a>&nbsp;optionalInitiators(java.lang.Long[]&nbsp;initiators)</pre> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../com/solidfire/element/api/ListInitiatorsRequest.html" title="class in com.solidfire.element.api"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../com/solidfire/element/api/ListInitiatorsResult.html" title="class in com.solidfire.element.api"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?com/solidfire/element/api/ListInitiatorsRequest.Builder.html" target="_top">Frames</a></li> <li><a href="ListInitiatorsRequest.Builder.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
Java
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.impl.cmd; import java.io.Serializable; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.common.api.FlowableIllegalArgumentException; import org.flowable.engine.common.api.FlowableObjectNotFoundException; import org.flowable.engine.common.impl.interceptor.Command; import org.flowable.engine.common.impl.interceptor.CommandContext; import org.flowable.engine.form.TaskFormData; import org.flowable.engine.impl.form.FormEngine; import org.flowable.engine.impl.form.TaskFormHandler; import org.flowable.engine.impl.persistence.entity.TaskEntity; import org.flowable.engine.impl.util.CommandContextUtil; import org.flowable.engine.impl.util.FormHandlerUtil; import org.flowable.engine.task.Task; /** * @author Tom Baeyens * @author Joram Barrez */ public class GetRenderedTaskFormCmd implements Command<Object>, Serializable { private static final long serialVersionUID = 1L; protected String taskId; protected String formEngineName; public GetRenderedTaskFormCmd(String taskId, String formEngineName) { this.taskId = taskId; this.formEngineName = formEngineName; } public Object execute(CommandContext commandContext) { if (taskId == null) { throw new FlowableIllegalArgumentException("Task id should not be null"); } TaskEntity task = CommandContextUtil.getTaskEntityManager(commandContext).findById(taskId); if (task == null) { throw new FlowableObjectNotFoundException("Task '" + taskId + "' not found", Task.class); } TaskFormHandler taskFormHandler = FormHandlerUtil.getTaskFormHandlder(task); if (taskFormHandler != null) { FormEngine formEngine = CommandContextUtil.getProcessEngineConfiguration(commandContext).getFormEngines().get(formEngineName); if (formEngine == null) { throw new FlowableException("No formEngine '" + formEngineName + "' defined process engine configuration"); } TaskFormData taskForm = taskFormHandler.createTaskForm(task); return formEngine.renderTaskForm(taskForm); } return null; } }
Java
/* * $Id: config.h,v 1.8 2007/01/04 21:29:21 jms Exp $ * * Revision History * =================== * $Log: config.h,v $ * Revision 1.8 2007/01/04 21:29:21 jms * Porting changes uncovered as part of move to VS2005. No impact on data set * * Revision 1.7 2006/06/29 20:46:17 jms * 2.4.0 changes from Meikel * * Revision 1.6 2006/05/31 22:25:21 jms * Rework UnifInt calls in varsub to handle lack of PROTO defn in windows * * Revision 1.5 2006/05/25 22:35:36 jms * qgen porting changes for 32b/64b * * Revision 1.4 2006/03/09 18:54:55 jms * porting bugs * * Revision 1.3 2005/03/04 19:48:39 jms * Changes from Doug Johnson to address very large scale factors * * Revision 1.2 2005/01/03 20:08:58 jms * change line terminations * * Revision 1.1.1.1 2004/11/24 23:31:46 jms * re-establish external server * * Revision 1.7 2004/04/08 17:36:47 jms * clarify config.h/makefile linkage * * Revision 1.6 2004/04/08 17:35:00 jms * SUN/SOLARIS ifdef merge between machines * * Revision 1.5 2004/04/08 17:27:53 jms * solaris porting fixes * * Revision 1.4 2003/08/12 16:45:26 jms * linux porting changes * * Revision 1.3 2003/08/08 21:35:26 jms * first integration of rng64 for o_custkey and l_partkey * * Revision 1.2 2003/08/07 17:58:34 jms * Convery RNG to 64bit space as preparation for new large scale RNG * * Revision 1.1.1.1 2003/04/03 18:54:21 jms * initial checkin * * */ /* * this file allows the compilation of DBGEN to be tailored to specific * architectures and operating systems. Some options are grouped * together to allow easier compilation on a given vendor's hardware. * * The following #defines will effect the code: * KILL(pid) -- how to terminate a process in a parallel load * SPAWN -- name of system call to clone an existing process * SET_HANDLER(proc) -- name of routine to handle signals in parallel load * WAIT(res, pid) -- how to await the termination of a child * SEPARATOR -- character used to separate fields in flat files * STDLIB_HAS_GETOPT -- to prevent confilcts with gloabal getopt() * MDY_DATE -- generate dates as MM-DD-YY * WIN32 -- support for WindowsNT * SUPPORT_64BITS -- compiler defines a 64 bit datatype * DSS_HUGE -- 64 bit data type * HUGE_FORMAT -- printf string for 64 bit data type * EOL_HANDLING -- flat files don't need final column separator * * Certain defines must be provided in the makefile: * MACHINE defines * ========== * ATT -- getopt() handling * DOS -- disable all multi-user functionality/dependency * HP -- posix source inclusion differences * IBM -- posix source inclusion differences * SGI -- getopt() handling * SUN -- getopt() handling * LINUX * WIN32 -- for WINDOWS * * DATABASE defines * ================ * DB2 -- use DB2 dialect in QGEN * INFORMIX -- use Informix dialect in QGEN * SQLSERVER -- use SQLSERVER dialect in QGEN * SYBASE -- use Sybase dialect in QGEN * TDAT -- use Teradata dialect in QGEN * * WORKLOAD defines * ================ * TPCH -- make will create TPCH (set in makefile) */ // JMP -- flat files don't need a SEP at the end of the last column #define EOL_HANDLING #ifdef DOS #define PATH_SEP '\\' #else #ifdef ATT #define STDLIB_HAS_GETOPT #ifdef SQLSERVER #define WIN32 #else /* the 64 bit defines are for the Metaware compiler */ #define SUPPORT_64BITS #define DSS_HUGE long long #define RNG_A 6364136223846793005ull #define RNG_C 1ull #define HUGE_FORMAT "%LLd" #define HUGE_DATE_FORMAT "%02LLd" #endif /* SQLSERVER or MP/RAS */ #endif /* ATT */ #ifdef HP #define _INCLUDE_POSIX_SOURCE #define STDLIB_HAS_GETOPT #define SUPPORT_64BITS #define DSS_HUGE long #define HUGE_COUNT 2 #define HUGE_FORMAT "%ld" #define HUGE_DATE_FORMAT "%02lld" #define RNG_C 1ull #define RNG_A 6364136223846793005ull #endif /* HP */ #ifdef IBM #define STDLIB_HAS_GETOPT #define SUPPORT_64BITS #define DSS_HUGE long long #define HUGE_FORMAT "%lld" #define HUGE_DATE_FORMAT "%02lld" #define RNG_A 6364136223846793005ull #define RNG_C 1ull #endif /* IBM */ #ifdef LINUX #define STDLIB_HAS_GETOPT #define SUPPORT_64BITS #define DSS_HUGE long long int #define HUGE_FORMAT "%lld" #define HUGE_DATE_FORMAT "%02lld" #define RNG_A 6364136223846793005ull #define RNG_C 1ull #endif /* LINUX */ #ifdef SUN #define STDLIB_HAS_GETOPT #define RNG_A 6364136223846793005ull #define RNG_C 1ull #define SUPPORT_64BITS #define DSS_HUGE long long #define HUGE_FORMAT "%lld" #define HUGE_DATE_FORMAT "%02lld" #endif /* SUN */ #ifdef SGI #define STDLIB_HAS_GETOPT #define SUPPORT_64BITS #define DSS_HUGE __int64_t #endif /* SGI */ #if (defined(WIN32)&&!defined(_POSIX_)) #define pid_t int #define SET_HANDLER(proc) signal(SIGINT, proc) #define KILL(pid) \ TerminateProcess(OpenProcess(PROCESS_TERMINATE,FALSE,pid),3) #if (defined (__WATCOMC__)) #define SPAWN() spawnv(P_NOWAIT, spawn_args[0], spawn_args) #define WAIT(res, pid) cwait(res, pid, WAIT_CHILD) #else #define SPAWN() _spawnv(_P_NOWAIT, spawn_args[0], spawn_args) #define WAIT(res, pid) _cwait(res, pid, _WAIT_CHILD) #define getpid _getpid #endif /* WATCOMC */ #define SIGS_DEFINED #define PATH_SEP '\\' #define SUPPORT_64BITS #define DSS_HUGE __int64 #define RNG_A 6364136223846793005uI64 #define RNG_C 1uI64 #define HUGE_FORMAT "%I64d" #define HUGE_DATE_FORMAT "%02I64d" /* need to define process termination codes to match UNIX */ /* these are copied from Linux/GNU and need to be verified as part of a rework of */ /* process handling under NT (29 Apr 98) */ #define WIFEXITED(s) ((s & 0xFF) == 0) #define WIFSIGNALED(s) (((unsigned int)((status)-1) & 0xFFFF) < 0xFF) #define WIFSTOPPED(s) (((s) & 0xff) == 0x7f) #define WTERMSIG(s) ((s) & 0x7f) #define WSTOPSIG(s) (((s) & 0xff00) >> 8) /* requried by move to Visual Studio 2005 */ #define strdup(x) _strdup(x) #endif /* WIN32 */ #ifndef SIGS_DEFINED #define KILL(pid) kill(SIGUSR1, pid) #define SET_HANDLER(proc) signal(SIGUSR1, proc) #define SPAWN fork #define WAIT(res, pid) wait(res) #endif /* DEFAULT */ #endif /* DOS */ #ifndef PATH_SEP #define PATH_SEP '/' #endif /* PATH_SEP */ #ifndef DSS_HUGE #error Support for a 64-bit datatype is required in this release #endif #ifndef DOUBLE_CAST #define DOUBLE_CAST (double) #endif /* DOUBLE_CAST */
Java
# Ophrys lievreae nothosubsp. sanctileonis (O.Danesch & E.Danesch) Del Prete SUBSPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
package com.stdnull.v2api.model; import android.os.Bundle; import android.os.Parcelable; import java.util.ArrayList; import java.util.List; /** * Created by chen on 2017/8/20. */ public class V2MainFragModel { private static final String KEY_V2EXBEAN = "KEY_V2EXBEAN"; private List<V2ExBean> mContentListModel = new ArrayList<>(); public List<V2ExBean> getContentListModel() { return mContentListModel; } public void addContentListModel(List<V2ExBean> contentListModel) { if(contentListModel != null) { this.mContentListModel.addAll(contentListModel); } } public boolean isModelEmpty(){ return mContentListModel.isEmpty() ; } public void clearModel(){ mContentListModel.clear(); } public void save(Bundle bundle){ bundle.putParcelableArrayList(KEY_V2EXBEAN, (ArrayList<? extends Parcelable>) mContentListModel); } public boolean restore(Bundle bundle){ if(bundle == null){ return false; } mContentListModel = bundle.getParcelableArrayList(KEY_V2EXBEAN); return mContentListModel != null && !mContentListModel.isEmpty(); } }
Java
DROP TABLE IF EXISTS tags; DROP TABLE IF EXISTS invitations; DROP TABLE IF EXISTS tasks; DROP TABLE IF EXISTS projects; DROP TABLE IF EXISTS teams_users; DROP TABLE IF EXISTS teams; DROP TABLE IF EXISTS orgs_users; DROP TABLE IF EXISTS organizations; DROP TABLE IF EXISTS users; CREATE TABLE users ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(50) NOT NULL, email varchar(100) NOT NULL, password varchar(100) NOT NULL, enabled tinyint(1) DEFAULT NULL, locked tinyint(1) DEFAULT NULL, activation_token varchar(100) DEFAULT NULL, forgot_password_token varchar(100) DEFAULT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), UNIQUE KEY EMAIL_UNIQUE (email) ); CREATE TABLE organizations ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(100) NOT NULL, owner_user_id int(11) NOT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), UNIQUE KEY ORG_NAME_UNIQUE (owner_user_id,name), CONSTRAINT fk_owner_user_id FOREIGN KEY (owner_user_id) REFERENCES users (id) ); CREATE TABLE orgs_users ( org_id int(11) NOT NULL, user_id int(11) NOT NULL, disabled tinyint(1) DEFAULT NULL, PRIMARY KEY (org_id,user_id) ); CREATE TABLE teams ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(100) NOT NULL, org_id int(11) NOT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), UNIQUE KEY TEAM_NAME_UNIQUE (org_id,name), CONSTRAINT fk_team_org_id FOREIGN KEY (org_id) REFERENCES organizations (id) ); CREATE TABLE teams_users ( team_id int(11) NOT NULL, user_id int(11) NOT NULL, disabled tinyint(1) DEFAULT NULL, PRIMARY KEY (team_id,user_id) ); CREATE TABLE projects ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(100) NOT NULL, team_id int(11) NOT NULL, owner_user_id int(11) NOT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), UNIQUE KEY TEAM_PROJ_NAME_UNIQUE (team_id,name), CONSTRAINT fk_proj_team_id FOREIGN KEY (team_id) REFERENCES teams (id), CONSTRAINT fk_proj_owner_user_id FOREIGN KEY (owner_user_id) REFERENCES users (id) ); CREATE TABLE tasks ( id int(11) NOT NULL AUTO_INCREMENT, title varchar(200) NOT NULL, description longtext, proj_id int(11) NOT NULL, assigned_user_id int(11) DEFAULT NULL, due_date date DEFAULT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), CONSTRAINT fk_proj_task_id FOREIGN KEY (proj_id) REFERENCES projects (id), CONSTRAINT fk_task_user_id FOREIGN KEY (assigned_user_id) REFERENCES users (id) ); CREATE TABLE tags ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(100) NOT NULL, org_id int(11) NOT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), UNIQUE KEY TAG_NAME_UNIQUE (org_id,name), CONSTRAINT fk_tag_org_id FOREIGN KEY (org_id) REFERENCES organizations (id) ); CREATE TABLE invitations ( id int(11) NOT NULL AUTO_INCREMENT, from_email varchar(100) NOT NULL, to_email varchar(100) NOT NULL, org_id int(11) NOT NULL, invitation_code varchar(100) NOT NULL, accepted tinyint(1) DEFAULT NULL, created_by int(11) DEFAULT NULL, created_date date DEFAULT NULL, updated_by int(11) DEFAULT NULL, updated_date date DEFAULT NULL, PRIMARY KEY (id), CONSTRAINT fk_invite_org_id FOREIGN KEY (org_id) REFERENCES organizations (id), CONSTRAINT fk_invite_from_email FOREIGN KEY (from_email) REFERENCES users (email) );
Java
/* * Copyright 2016 Amadeus s.a.s. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ "use strict"; const co = require("co"); const path = require("path"); const assertFilesEqual = require("../helpers/assertFilesEqual"); const exec = require("../helpers/exec"); module.exports = function (results) { const outDir = results.outDir; const atDiffExecutable = require.resolve("../../bin/at-diff"); const filesToCompare = [ // The .json extension is automatically added "version1.parse", "version2.parse", "user.parse", "at.parse", "version1to2.diff", "filteredVersion1to2.diff", "impactsOnUser.diff", "filteredImpactsOnUser.diff" ]; filesToCompare.forEach((fileName) => { const nonDeterministicFileName = `${fileName}.json`; it(nonDeterministicFileName, co.wrap(function *() { this.timeout(10000); const transformCommand = /\.parse$/.test(fileName) ? "reformat" : "reserialize"; const deterministicFileName = `${fileName}.deterministic.json`; yield exec(atDiffExecutable, [transformCommand, nonDeterministicFileName, "--json-output", deterministicFileName, "--deterministic-output", "--json-beautify"], { cwd: outDir }); yield assertFilesEqual(path.join(outDir, deterministicFileName), path.join(__dirname, "..", "expected-output", deterministicFileName)); })); }); };
Java
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <title>canvas-nest.js</title> </head> <body> <!-- 网页粒子背景插件 Canvas-nest.js color="255,0,0" 背景粒子线的颜色 opacity="0.5" 背景粒子线的透明度,一般设置成0.5-1之间 count="99" 背景粒子线的密度 --> <script type="text/javascript" color="255,0,0" opacity='0.7' zIndex="-2" count="200" src="../../../s/canvas-nest/canvas-nest.min.js"></script> </body> </html>
Java
<html xmlns:wicket="http://wicket.apache.org"> <wicket:head> <meta NAME="ROBOTS" CONTENT="noindex" /> </wicket:head> <wicket:extend> </wicket:extend> </html>
Java
# Cyrtopodium woodfordii Sims SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
#ifndef CELLO_SIMULATION_H #define CELLO_SIMULATION_H #include <algorithm> #include <stdexcept> #include <map> #include <memory> #include <queue> #include <string> #include <vector> #include <gameplay.h> #include <core/Delegate.h> #include <core/SimulationTypes.h> #include <json/json.h> #include <ui/Atlas.h> #include <util/Util.h> using std::map; using std::queue; using std::runtime_error; using std::string; using std::vector; using gameplay::PhysicsController; namespace cello { class Component; class ComponentFactory; class Simulation : NonCopyable { public: /** * The reserved ID for the System entity */ static const EntityID SYSTEM_ENTITY_ID = 0; /** * The reserved ID to represent an invalid entity */ static const EntityID INVALID_ENTITY_ID = -1; explicit Simulation(Atlas* atlas); virtual ~Simulation(); Atlas* getAtlas() const { return _atlas; } /** * Get a reference to a component of the specified entity and component type. * Returns nullptr if the component or the entity is not found */ template<class ComponentClass> ComponentClass* getComponent(int entityID) const { ComponentType componentType = ComponentClass::getType(); auto entityMapIt = _entityMap.find(entityID); if (_entityMap.end() == entityMapIt) { return nullptr; } ComponentTypeMap& componentTypeMap = *(entityMapIt->second); auto componentTypeMapIt = componentTypeMap.find(componentType); if (componentTypeMap.end() == componentTypeMapIt) { return nullptr; } return static_cast<ComponentClass*>(componentTypeMapIt->second); } /* * Returns a list of all components of the specified type */ template<class ComponentClass> ComponentList* getComponentsOfType() { ComponentType componentType = ComponentClass::getType(); // If no components of the required type currently exist, a new list // will be created and returned return &_componentMap[componentType]; } bool isLevelLoaded() const { return _levelLoaded; } void loadLevel(string levelFile, Json::Value levelData); Json::Value saveLevel(); string getLevelFile() const { return _levelFile; } string getLevelName() const { return _levelName; } string getScriptName() const { return _scriptName; } void init(); ComponentTypeMap* getComponentTypeMap(EntityID entity); EntityID spawnEntity(string templateName); /** * Queue an entity for destruction. The entity and it's components are removed from * the simulation but the components still exist in memory to allow other * components/systems time to remove references to them */ void killEntity(EntityID entity); /** * Kill entity and immediately destroy it. */ void destroyEntity(EntityID entity); Delegate<EntityID>& onEntityCreated() { return _entityCreatedDelegate; } Delegate<EntityID>& onEntityKilled() { return _entityKilledDelegate; } private: bool _levelLoaded; string _levelFile; string _levelName; string _scriptName; /** * The value at which automatic ID generation starts */ static const EntityID START_ENTITY_ID = 100; /** * Get the next available unique ID to assign to the next created entity */ inline EntityID getNextEntityID() { return _nextEntityID++; } int _nextEntityID; static const string INHERIT_KEY; static const string LOAD_EXTERNAL_KEY; static const string ENTITY_ID_KEY; static const string ENTITIES_LIST_KEY; static const string GAME_STATE_KEY; static const string TEMPLATE_FILE_EXTENSION; void deserializeComponent(Component* component, Json::Value& componentData); Json::Value serializeComponent(Component* component, Json::Value& parentComponentData, bool copyAll); /** * Construct an empty entity from data and returns the Entity ID */ EntityID constructEntity(Json::Value entityData); /** * Construct a component from data, adds it to the specified entity and returns a reference to it */ void constructComponent(EntityID entityID, string componentTypeStr, Json::Value componentData); Json::Value preprocessEntityData(Json::Value entityData); Json::Value preprocessComponentData(Json::Value entityData); Json::Value getExternalData(string filename); /** * Add a component to an entity */ void addComponent(EntityID entityID, ComponentType componentType, Component* component); /** * Remove a component from an entity */ void removeComponent(EntityID entityID, ComponentType); /** * For every entity there is a map of ComponentType=>Component */ map<EntityID, ComponentTypeMap*> _entityMap; /** * Stores the list of entities to be destroyed at the end of a turn. * Entities cannot be destroyed mid-turn */ map<EntityID, ComponentTypeMap*> _killedEntityMap; /** * Stores a list of components for every component type */ map<ComponentType, ComponentList> _componentMap; /** * Delete the components of the entities in the map */ void destroyEntities(map<EntityID, ComponentTypeMap*>& entityMap); void destroyComponentTypeMap(ComponentTypeMap* componentTypeMap); /** * Stores the original template the entity was loaded from. This will * enable us to generate smaller save files */ map<EntityID, string> _entityTemplateMap; ComponentFactory* _componentFactory; Atlas* _atlas; Delegate<EntityID> _entityCreatedDelegate; Delegate<EntityID> _entityKilledDelegate; }; } #endif
Java
# CustomSelectPicker Very small jquery plugin for customize you selectpickers Requirements ------------ Jquery 2.0 and higher Usage ------------ Include minify or unminify plugin assets in head tag **Example** ```HTML <html> <head> //Jquery require <script src="https://ajax.googleapis.com/ajax/libs/jquery/2.2.0/jquery.min.js"></script> //Plugin assets <link rel="stylesheet" href="cselectpicker.min.css" type='text/css'/> <script src="cselectpicker.min.js" type="text/javascript"></script> </head> <body> <select name='example'> <option value="">No selected</option> <option value="first">first</option> <option value="second">second</option> </select> //Init plugin <script type="text/javascript"> $('select').CSelectPicker(); </script> </body> </html> ``` Options and Events example ------------ ```JavaScript $('select').CSelectPicker({ //Optionals placehoder: 'Select your value', multiple: true, itemsClass: 'my-custom-class', maxDropdownItemsShow: 7, //Events onChange: function(element){ // console.log('Value selectbox is - ('+element.val()+')'); }, onReady: function(){ // console.log('Plugin is Ready'); }, onScrollDropdown: function(status){ // console.log('Dropdown scrolling is - ('+status+')') }, dropdownStatus: function(status){ // console.log('Dropdown status is - ('+status+')'); }, }); ``` Events ------------ | Event name | description | | ------------- | ------------- | | onChange | Call when select change state. Takes jquery object select element | | onReady | Call when plugin redy to work. Nothin takes | | onScrollDropdown | Call when in dropdown happens scroll event. Takes state on scroll in dropdown (up or down) | | dropdownStatus | Call when dropdown change state. Takes state in dropdown (show or hide) | Options ------------ | Option name | description | | ------------- | ------------- | | placehoder | (string) Placeholder text in selectpicker. No required | | multiple | (boolean) Change selectpicker on multiselect. No required | | itemsClass | (string) Added custom class in the item-option on dropdown window. No required | | maxDropdownItemsShow | (number) Sets max number to show option items in dropdown window. Create scrollbar .No required |
Java
/* * Solo - A small and beautiful blogging system written in Java. * Copyright (c) 2010-present, b3log.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ package org.b3log.solo.model; /** * This class defines all link model relevant keys. * * @author <a href="http://88250.b3log.org">Liang Ding</a> * @version 1.0.0.2, Oct 31, 2011 * @since 0.3.1 */ public final class Link { /** * Link. */ public static final String LINK = "link"; /** * Links. */ public static final String LINKS = "links"; /** * Key of title. */ public static final String LINK_TITLE = "linkTitle"; /** * Key of address. */ public static final String LINK_ADDRESS = "linkAddress"; /** * Key of description. */ public static final String LINK_DESCRIPTION = "linkDescription"; /** * Key of order. */ public static final String LINK_ORDER = "linkOrder"; /** * Private constructor. */ private Link() { } }
Java
<html lang="ru-RU" xml:lang="ru-RU"> <head> <title>Москва</title> <meta content="ru-RU" http-equiv="Content-Language"> <meta content="text/html; charset=utf-8" http-equiv="Content-Type"> </head> <body style="width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0; background: #fafafa;"> <table cellspacing="0" cellpadding="0" border="0" style="margin:0; padding:0; width:100% !important; line-height: 100% !important; background: #fafafa;" id="backgroundTable"> <tbody><tr> <td valign="top"> <table width="600" cellspacing="0" cellpadding="0" border="0" align="center" class="coreTable" style="background: #e9eaec; border: 1px solid #cccccc"> <tbody><tr height="116" bgcolor="#ffffff"> <td colspan="4" valign="top" width="600"> <a href="http://www.mvideo.ru/?reff=email_transaction_a_coming_soon_start_preOrdering_a_head_logo&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_head_logo" target="_blank" style="text-decoration:none; outline: none;"><img src="http://static.mvideo.ru/assets/img/emailImages/email_head_shadow.png" style="border: none; vertical-align: top;" alt="shadow" height="19" width="600"><img src="http://static.mvideo.ru/assets/img/emailImages/email_head_logo.png" style="border: none" alt="МВидео" height="81" width="600"></a> </td> </tr> <tr> <td width="600" valign="top" colspan="4"> <table cellspacing="0" cellpadding="0" border="0" bgcolor="#ffffff" align="center"> <tbody><tr bgcolor="#ffffff"> <td width="30"></td> <td width="540"> <table width="540" cellspacing="0" cellpadding="0" border="0" align="center"> <tbody> <tr> <td> <h1 style="margin: 0; color: #333333; font-size: 32px; line-height: 40px; font-family: Arial,Helvetica,sans-serif;"> Оформите предзаказ на </h1> </td> </tr> <tr> <td height="16"></td> </tr> <tr> <td> <p style="color: #666666; margin: 0; font-size: 14px; line-height: 19px; font-family: Arial,Helvetica,sans-serif;"> Благодарим за проявленный интерес к ! Теперь вы можете оформить на него предзаказ и получить его одним из первых! </p> </td> </tr> <tr> <td height="25"></td> </tr> <tr> <td> <table width="540" cellspacing="0" cellpadding="0" border="0"> <tbody><tr> <td width="220" valign="middle" height="44" bgcolor="#ed1c24" align="center" style="border-radius: 5px;"> <a href="http://www.mvideo.ru/products/televizor-lg-32lf562u-10007884?reff=email_transaction_a_coming_soon_start_preOrdering_a_btn_close_preOrdering&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_btn_close_preOrdering" style="display: block; height: 44px; line-height: 44px; color: #ffffff; text-decoration: none; font-size: 14px; font-family: Arial,Helvetica,sans-serif;"> Оформить предзаказ </a> </td> <td width="320" height="44"></td> </tr> </tbody></table> </td> </tr> <tr> <td height="30"></td> </tr> </tbody></table> </td> <td width="30"></td> </tr> <tr> <td width="600" colspan="3"> <table width="600" cellspacing="0" cellpadding="0" border="0" bgcolor="#e9eaec" align="center"> <tbody> <tr height="26"> <td colspan="3"><img width="600" height="25" alt="shadow" style="border: none" src="http://static.mvideo.ru/assets/img/emailImages/email_core_shadow.png"></td> </tr> <tr> <td> <table width="580" cellspacing="0" cellpadding="0" border="0" bgcolor="#ffffff" align="center" style="border: 1px solid #cacaca"> <tbody> <tr> <td height="27" colspan="3"></td> </tr> <tr> </tr><tr> <td width="14"></td> <td width="552"> <table width="552" cellspacing="0" cellpadding="0" border="0" align="left"> <tbody> <tr> <td width="10"> <a style="display: block; border: none; text-decoration: none; color: #333333;" href=""><img style="border: none; display: block;" src=""></a> </td> <td width="20"></td> <td valign="top"> <p style="color: #333333; margin: 6px 0 0; font-size: 16px; line-height: 20px; font-family: Arial,Helvetica,sans-serif; font-weight: bold;"> <a style="display: block; border: none; text-decoration: none; color: #333333;" title="" href=""></a> </p> <p style="color: #000000; margin: 0; font-size: 32px; line-height: 50px; font-family: Arial,Helvetica,sans-serif; font-weight: bold;"> <a style="display: block; border: none; text-decoration: none; color: #333333;" title="" href=""></a> </p> </td> </tr> </tbody> </table> </td> <td width="14"></td> </tr> <tr> <td height="27" colspan="3"></td> </tr> </tbody> </table> </td> </tr> <tr> <td width="600" height="20" colspan="3"></td> </tr> </tbody> </table> </td> </tr> </tbody></table> </td> </tr> <tr> <td colspan="4" valign="top" width="600"> <table align="center" bgcolor="#DFDFDF" border="0" cellpadding="0" cellspacing="0"> <tbody><tr> <td colspan="4" height="37"></td> </tr> <tr> <td width="30"></td> <td colspan="2" valign="top" width="540"> <p style="color: #333333; margin: 0 0 24px; font-size: 14px; line-height: 20px; font-family: Arial,Helvetica,sans-serif;"> Это письмо было отправлено автоматически. </p> <p style="color: #333333; margin: 0 0 24px; font-size: 14px; line-height: 20px; font-family: Arial,Helvetica,sans-serif;"> Если Вы считаете, что получили его по ошибке, просто проигнорируйте его. </p> <p style="color: #333333; margin: 0 0 8px; font-size: 14px; line-height: 20px; font-family: Arial,Helvetica,sans-serif;"> Мы уважаем права покупателя и удалим все ваши данные из своей базы <a href="mailto:24@mvideo.ru" style="color:#7c7c7c">по запросу</a> в любое время. </p> </td> <td width="30"></td> </tr> <tr> <td colspan="4" height="21"></td> </tr> <tr> <td colspan="4"> <hr style="border: none; background-color: #cccccc; height: 1px;"> </td> </tr> <tr> <td colspan="4" height="21"></td> </tr> <tr> <td width="30"></td> <td width="236"> <p style="color: #333333; margin: 0 0 0 0; font-size: 14px; line-height: 0; font-family: Arial,Helvetica,sans-serif; font-weight: bold;"> <span style="font-size: 14px; line-height: 20px;">М.видео в социальных сетях</span> </p> </td> <td width="304"> <table align="center" border="0" cellpadding="0" cellspacing="0"> <tbody><tr height="40"> <td align="right" width="40"> <a href="http://facebook.com/mvideo.ru?reff=email_transaction_a_coming_soon_start_preOrdering_a_social_network_menu&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_social_network_menu"><img src="http://static.mvideo.ru/assets/img/emailImages/email_social_icon_fb.png" style="border: none" alt="Facebook" height="40" width="40"></a> </td> <td align="right" width="52"> <a href="http://twitter.com/mvideo?reff=email_transaction_a_coming_soon_start_preOrdering_a_social_network_menu&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_social_network_menu"><img src="http://static.mvideo.ru/assets/img/emailImages/email_social_icon_tw.png" style="border: none" alt="Twitter" height="40" width="40"></a> </td> <td align="right" width="52"> <a href="http://vk.com/mvideo?reff=email_transaction_a_coming_soon_start_preOrdering_a_social_network_menu&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_social_network_menu"><img src="http://static.mvideo.ru/assets/img/emailImages/email_social_icon_vk.png" style="border: none" alt="ВКонтакте" height="40" width="40"></a> </td> <td align="right" width="52"> <a href="http://odnoklassniki.ru/mvideo?reff=email_transaction_a_coming_soon_start_preOrdering_a_social_network_menu&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_social_network_menu"><img src="http://static.mvideo.ru/assets/img/emailImages/email_social_icon_od.png" style="border: none" alt="Одноклассники" height="40" width="40"></a> </td> <td align="right" width="52"> <a href="http://youtube.com/mvideoru?reff=email_transaction_a_coming_soon_start_preOrdering_a_social_network_menu&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_social_network_menu"><img src="http://static.mvideo.ru/assets/img/emailImages/email_social_icon_yt.png" style="border: none" alt="Youtube" height="40" width="40"></a> </td> </tr> </tbody></table> </td> <td width="30"></td> </tr> <tr> <td colspan="4" height="21"></td> </tr> <tr> <td colspan="4"> <hr style="border: none; background-color: #cccccc; height: 1px;"> </td> </tr> <tr> <td width="30"></td> <td colspan="2" width="540"> <p style="color: #333333; margin: 8px 0; font-size: 14px; line-height: 20px; font-family: Arial,Helvetica,sans-serif;">ООО «М.видео Менеджмент», ОГРН 1057746840095.</p> <p style="color: #333333; margin: 8px 0; font-size: 14px; line-height: 20px; font-family: Arial,Helvetica,sans-serif;">Юридический адрес: 105066, Россия, Москва, ул. Нижняя Красносельская, дом 40/12, корп. 20.</p> <table align="center" border="0" cellpadding="0" cellspacing="0"> <tbody><tr height="80"> <td width="70"><img src="http://static.mvideo.ru/assets/img/emailImages/email_m_icon.png" style="border: none" alt="МВидео" height="48" width="48"></td> <td width="202"> <p style="color: #333333; margin: 8px 0; font-size: 12px;line-height: 20px; font-family: Arial,Helvetica,sans-serif;">Copyright &copy; М.Видео, 2016</p> </td> <td align="right" width="202"> <p style="color: #fff; margin: 8px 0; font-size: 12px; line-height: 20px; font-family: Arial,Helvetica,sans-serif;"><a href="http://www.mvideo.ru/legalcontent?reff=email_transaction_a_coming_soon_start_preOrdering_a_foot_links&utm_source=email&utm_medium=transaction&utm_campaign=a_coming_soon_start_preOrdering&utm_content=a_foot_links" style="color:#a3a0a0;text-decoration:underline; white-space: nowrap;">Политика конфиденциальности </a></p> </td> <td align="right" width="66"><img src="http://static.mvideo.ru/assets/img/emailImages/email_akit_icon.png" style="border: none" alt="АКИТ" height="47" width="49"></td> </tr> </tbody></table> </td> <td width="30"></td> </tr> <tr> <td colspan="4" height="20"></td> </tr> </tbody></table> </td> </tr> </tbody></table> </td> </tr> </tbody></table> </body> </html>
Java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.management; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.management.MBeanServer; import javax.management.ObjectName; import javax.management.openmbean.TabularData; import org.apache.camel.CamelContext; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.util.StringHelper; import org.junit.Ignore; /** * @version */ public class ManagedCamelContextTest extends ManagementTestSupport { @Override protected CamelContext createCamelContext() throws Exception { CamelContext context = super.createCamelContext(); // to force a different management name than the camel id context.getManagementNameStrategy().setNamePattern("19-#name#"); return context; } public void testManagedCamelContext() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertTrue("Should be registered", mbeanServer.isRegistered(on)); String name = (String) mbeanServer.getAttribute(on, "CamelId"); assertEquals("camel-1", name); String managementName = (String) mbeanServer.getAttribute(on, "ManagementName"); assertEquals("19-camel-1", managementName); String uptime = (String) mbeanServer.getAttribute(on, "Uptime"); assertNotNull(uptime); String status = (String) mbeanServer.getAttribute(on, "State"); assertEquals("Started", status); Boolean messageHistory = (Boolean) mbeanServer.getAttribute(on, "MessageHistory"); assertEquals(Boolean.TRUE, messageHistory); Integer total = (Integer) mbeanServer.getAttribute(on, "TotalRoutes"); assertEquals(2, total.intValue()); Integer started = (Integer) mbeanServer.getAttribute(on, "StartedRoutes"); assertEquals(2, started.intValue()); // invoke operations MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mbeanServer.invoke(on, "sendBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"}); assertMockEndpointsSatisfied(); resetMocks(); mock.expectedBodiesReceived("Hello World"); mbeanServer.invoke(on, "sendStringBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"}); assertMockEndpointsSatisfied(); Object reply = mbeanServer.invoke(on, "requestBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"}); assertEquals("Bye World", reply); reply = mbeanServer.invoke(on, "requestStringBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"}); assertEquals("Bye World", reply); resetMocks(); mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mock.expectedHeaderReceived("foo", 123); Map<String, Object> headers = new HashMap<String, Object>(); headers.put("foo", 123); mbeanServer.invoke(on, "sendBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"}); assertMockEndpointsSatisfied(); resetMocks(); mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mock.expectedHeaderReceived("foo", 123); reply = mbeanServer.invoke(on, "requestBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"}); assertEquals("Hello World", reply); assertMockEndpointsSatisfied(); // test can send Boolean can = (Boolean) mbeanServer.invoke(on, "canSendToEndpoint", new Object[]{"direct:start"}, new String[]{"java.lang.String"}); assertEquals(true, can.booleanValue()); can = (Boolean) mbeanServer.invoke(on, "canSendToEndpoint", new Object[]{"timer:foo"}, new String[]{"java.lang.String"}); assertEquals(false, can.booleanValue()); // stop Camel mbeanServer.invoke(on, "stop", null, null); } public void testManagedCamelContextCreateEndpoint() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertNull(context.hasEndpoint("seda:bar")); // create a new endpoint Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.TRUE, reply); assertNotNull(context.hasEndpoint("seda:bar")); ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\""); boolean registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); // create it again reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.FALSE, reply); registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); } public void testManagedCamelContextRemoveEndpoint() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertNull(context.hasEndpoint("seda:bar")); // create a new endpoint Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.TRUE, reply); assertNotNull(context.hasEndpoint("seda:bar")); ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\""); boolean registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); // remove it Object num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"}); assertEquals(1, num); assertNull(context.hasEndpoint("seda:bar")); registered = mbeanServer.isRegistered(seda); assertFalse("Should not be registered " + seda, registered); // remove it again num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"}); assertEquals(0, num); assertNull(context.hasEndpoint("seda:bar")); registered = mbeanServer.isRegistered(seda); assertFalse("Should not be registered " + seda, registered); } public void testFindComponentsInClasspath() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertTrue("Should be registered", mbeanServer.isRegistered(on)); @SuppressWarnings("unchecked") Map<String, Properties> info = (Map<String, Properties>) mbeanServer.invoke(on, "findComponents", null, null); assertNotNull(info); assertTrue(info.size() > 20); Properties prop = info.get("seda"); assertNotNull(prop); assertEquals("seda", prop.get("name")); assertEquals("org.apache.camel", prop.get("groupId")); assertEquals("camel-core", prop.get("artifactId")); } public void testManagedCamelContextCreateRouteStaticEndpointJson() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "createRouteStaticEndpointJson", null, null); assertNotNull(json); assertEquals(7, StringHelper.countChar(json, '{')); assertEquals(7, StringHelper.countChar(json, '}')); assertTrue(json.contains("{ \"uri\": \"direct://start\" }")); assertTrue(json.contains("{ \"uri\": \"direct://foo\" }")); } public void testManagedCamelContextExplainEndpointUri() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEndpointJson", new Object[]{"log:foo?groupDelay=2000&groupSize=5", false}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); assertEquals(5, StringHelper.countChar(json, '{')); assertEquals(5, StringHelper.countChar(json, '}')); assertTrue(json.contains("\"groupDelay\": { \"kind\": \"parameter\", \"type\": \"integer\", \"javaType\": \"java.lang.Long\", \"deprecated\": \"false\", \"value\": \"2000\"," + " \"description\": \"Set the initial delay for stats (in millis)\" },")); assertTrue(json.contains("\"groupSize\": { \"kind\": \"parameter\", \"type\": \"integer\", \"javaType\": \"java.lang.Integer\", \"deprecated\": \"false\", \"value\": \"5\"," + " \"description\": \"An integer that specifies a group size for throughput logging.\" }")); assertTrue(json.contains("\"loggerName\": { \"kind\": \"path\", \"type\": \"string\", \"javaType\": \"java.lang.String\", \"deprecated\": \"false\"," + " \"value\": \"foo\", \"description\": \"The logger name to use\" }")); } public void testManagedCamelContextExplainEip() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEipJson", new Object[]{"transform", false}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); assertTrue(json.contains("\"label\": \"transformation\"")); assertTrue(json.contains("\"expression\": { \"kind\": \"element\", \"required\": \"true\", \"type\": \"object\"")); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").to("mock:result"); from("direct:foo").transform(constant("Bye World")); } }; } }
Java
/* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Initial implementation: * http://www.mudynamics.com * http://labs.mudynamics.com * http://www.pcapr.net */ (function($) { var now = new Date().getTime(); var millisInHHour = 1000*60*30; $.jscouch = $.jscouch || {}; $.jscouch.documents = $.jscouch.documents || {}; $.extend($.jscouch.documents, { load: function() { // popluate the DB with initial entries $.jscouch.couchdb.put({ name: 'fish.jpg', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'bob', type: 'jpeg', camera: 'nikon', info: { width: 100, height: 200, size: 12345 }, tags: [ 'tuna', 'shark' ] }); $.jscouch.couchdb.put({ name: 'trees.jpg', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'john', type: 'jpeg', camera: 'canon', info: { width: 30, height: 250, size: 32091 }, tags: [ 'oak' ] }); $.jscouch.couchdb.put({ name: 'snow.png', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'john', type: 'png', camera: 'canon', info: { width: 64, height: 64, size: 1253 }, tags: [ 'tahoe', 'powder' ] }); $.jscouch.couchdb.put({ name: 'hawaii.png', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'john', type: 'png', camera: 'nikon', info: { width: 128, height: 64, size: 92834 }, tags: [ 'maui', 'tuna' ] }); $.jscouch.couchdb.put({ name: 'hawaii.gif', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'bob', type: 'gif', camera: 'canon', info: { width: 320, height: 128, size: 49287 }, tags: [ 'maui' ] }); $.jscouch.couchdb.put({ name: 'island.gif', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'zztop', type: 'gif', camera: 'nikon', info: { width: 640, height: 480, size: 50398 }, tags: [ 'maui' ] }); } }); })(jQuery);
Java
<!-- author: TestAuthor --> <!-- title: TestTitle --> <!-- tags: tag1, tag2, tag3 --> <!-- testtag: test --> # Test_Headline Test-Text
Java
/** --| ADAPTIVE RUNTIME PLATFORM |---------------------------------------------------------------------------------------- (C) Copyright 2013-2015 Carlos Lozano Diez t/a Adaptive.me <http://adaptive.me>. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 . Unless required by appli- -cable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Original author: * Carlos Lozano Diez <http://github.com/carloslozano> <http://twitter.com/adaptivecoder> <mailto:carlos@adaptive.me> Contributors: * Ferran Vila Conesa <http://github.com/fnva> <http://twitter.com/ferran_vila> <mailto:ferran.vila.conesa@gmail.com> * See source code files for contributors. Release: * @version v2.2.0 -------------------------------------------| aut inveniam viam aut faciam |-------------------------------------------- */ using System; namespace Adaptive.Arp.Api { /** Enumeration INetworkStatusListenerError */ public enum INetworkStatusListenerError { NoPermission, Unreachable, Unknown } }
Java
# Psychotria fockeana Miq. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta content="text/html; charset=UTF-8" http-equiv="Content-Type" /> <meta content="2013-12-25 10:18:47 -0700" http-equiv="change-date" /> <title>PV 16</title> <script src='../js/jquery-3.1.1.min.js' type='text/javascript' charset='utf-8'></script> <script src='../js/bpi.js' type="text/javascript" charset="utf-8"></script> <link rel="stylesheet" href='../css/bpi.css' > </head> <body> <div class="header"><h1 id="titulo">Provérbios 16<span id="trecho"></span></h1></div> <div id="passagem"> <div class="bible1 verses"> <p class="verse" verse="1"><sup>1</sup>Ao homem pertencem os planos do coração; mas a resposta da língua é do Senhor.</p> <p class="verse" verse="2"><sup>2</sup>Todos os caminhos do homem são limpos aos seus olhos; mas o Senhor pesa os espíritos.</p> <p class="verse" verse="3"><sup>3</sup>Entrega ao Senhor as tuas obras, e teus desígnios serão estabelecidos.</p> <p class="verse" verse="4"><sup>4</sup>O Senhor fez tudo para um fim; sim, até o ímpio para o dia do mal.</p> <p class="verse" verse="5"><sup>5</sup>Todo homem arrogante é abominação ao Senhor; certamente não ficará impune.</p> <p class="verse" verse="6"><sup>6</sup>Pela misericórdia e pela verdade expia-se a iniqüidade; e pelo temor do Senhor os homens se desviam do mal.</p> <p class="verse" verse="7"><sup>7</sup>Quando os caminhos do homem agradam ao Senhor, faz que até os seus inimigos tenham paz com ele.</p> <p class="verse" verse="8"><sup>8</sup>Melhor é o pouco com justiça, do que grandes rendas com injustiça.</p> <p class="verse" verse="9"><sup>9</sup>O coração do homem propõe o seu caminho; mas o Senhor lhe dirige os passos.</p> <p class="verse" verse="10"><sup>10</sup>Nos lábios do rei acham-se oráculos; em juízo a sua boca não prevarica.</p> <p class="verse" verse="11"><sup>11</sup>O peso e a balança justos são do Senhor; obra sua são todos os pesos da bolsa.</p> <p class="verse" verse="12"><sup>12</sup>Abominação é para os reis o praticarem a impiedade; porque com justiça se estabelece o trono.</p> <p class="verse" verse="13"><sup>13</sup>Lábios justos são o prazer dos reis; e eles amam aquele que fala coisas retas.</p> <p class="verse" verse="14"><sup>14</sup>O furor do rei é mensageiro da morte; mas o homem sábio o aplacará.</p> <p class="verse" verse="15"><sup>15</sup>Na luz do semblante do rei está a vida; e o seu favor é como a nuvem de chuva serôdia.</p> <p class="verse" verse="16"><sup>16</sup>Quanto melhor é adquirir a sabedoria do que o ouro! e quanto mais excelente é escolher o entendimento do que a prata!</p> <p class="verse" verse="17"><sup>17</sup>A estrada dos retos desvia-se do mal; o que guarda o seu caminho preserva a sua vida.</p> <p class="verse" verse="18"><sup>18</sup>A soberba precede a destruição, e a altivez do espírito precede a queda.</p> <p class="verse" verse="19"><sup>19</sup>Melhor é ser humilde de espírito com os mansos, do que repartir o despojo com os soberbos.</p> <p class="verse" verse="20"><sup>20</sup>O que atenta prudentemente para a palavra prosperará; e feliz é aquele que confia no Senhor.</p> <p class="verse" verse="21"><sup>21</sup>O sábio de coração será chamado prudente; e a doçura dos lábios aumenta o saber.</p> <p class="verse" verse="22"><sup>22</sup>O entendimento, para aquele que o possui, é uma fonte de vida, porém a estultícia é o castigo dos insensatos.</p> <p class="verse" verse="23"><sup>23</sup>O coração do sábio instrui a sua boca, e aumenta o saber nos seus lábios.</p> <p class="verse" verse="24"><sup>24</sup>Palavras suaves são como favos de mel, doçura para a alma e saúde para o corpo.</p> <p class="verse" verse="25"><sup>25</sup>Há um caminho que ao homem parece direito, mas o fim dele conduz à morte.</p> <p class="verse" verse="26"><sup>26</sup>O apetite do trabalhador trabalha por ele, porque a sua fome o incita a isso.</p> <p class="verse" verse="27"><sup>27</sup>O homem vil suscita o mal; e nos seus lábios há como que um fogo ardente.</p> <p class="verse" verse="28"><sup>28</sup>O homem perverso espalha contendas; e o difamador separa amigos íntimos.</p> <p class="verse" verse="29"><sup>29</sup>O homem violento alicia o seu vizinho, e guia-o por um caminho que não é bom.</p> <p class="verse" verse="30"><sup>30</sup>Quando fecha os olhos fá-lo para maquinar perversidades; quando morde os lábios, efetua o mal.</p> <p class="verse" verse="31"><sup>31</sup>Coroa de honra são as cãs, a qual se obtém no caminho da justiça.</p> <p class="verse" verse="32"><sup>32</sup>Melhor é o longânimo do que o valente; e o que domina o seu espírito do que o que toma uma cidade.</p> <p class="verse" verse="33"><sup>33</sup>A sorte se lança no regaço; mas do Senhor procede toda a disposição dela.</p> </div> </div> <br/> <br/> <br/> <br/> <br/> <br/> <br/> <br/> <br/> <p class="copyright">Almeida Revista e Atualizada© Copyright © 1993 Sociedade Bíblica do Brasil. Todos os direitos reservados. Texto bíblico utilizado com autorização. Saiba mais sobre a Sociedade Bíblica do Brasil. A Sociedade Bíblica do Brasil trabalha para que a Bíblia esteja, efetivamente, ao alcance de todos e seja lida por todos. A SBB é uma entidade sem fins lucrativos, dedicada a promover o desenvolvimento integral do ser humano.</p> <br/> <br/> <br/> <br/></body> </html>
Java
import {ShopOrderDetail} from './ShopOrderDetail'; export class ShopOrder { order_id: string; user_id: number; username: string; is_vip: number; payment: number; order_no: number; shopway: number; status: number; employee_id: string; buytime: Date; orderdealtime: Date; phone: string; address: string; soft_delete: string; subPrice:number; shopOrderDetails: ShopOrderDetail[]; }
Java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics.stats; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.SearchContext; import java.io.IOException; import java.util.Map; public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, StatsAggregationBuilder> { public static final String NAME = "stats"; private static final ObjectParser<StatsAggregationBuilder, Void> PARSER; static { PARSER = new ObjectParser<>(StatsAggregationBuilder.NAME); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); } public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new StatsAggregationBuilder(aggregationName), null); } public StatsAggregationBuilder(String name) { super(name, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } protected StatsAggregationBuilder(StatsAggregationBuilder clone, Builder factoriesBuilder, Map<String, Object> metaData) { super(clone, factoriesBuilder, metaData); } @Override public AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) { return new StatsAggregationBuilder(this, factoriesBuilder, metaData); } /** * Read from a stream. */ public StatsAggregationBuilder(StreamInput in) throws IOException { super(in, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @Override protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream } @Override protected StatsAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException { return new StatsAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData); } @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { return builder; } @Override protected int innerHashCode() { return 0; } @Override protected boolean innerEquals(Object obj) { return true; } @Override public String getType() { return NAME; } }
Java