text
stringlengths
2
1.04M
meta
dict
package fr.afcepf.al29.groupem.dao.impl; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.transaction.Transactional; import org.springframework.stereotype.Component; import fr.afcepf.al29.groupem.dao.api.CartLineDaoApi; import fr.afcepf.al29.groupem.entities.CartLine; @Transactional @Component public class CartLineDaoImpl implements CartLineDaoApi { @PersistenceContext(unitName="Projet1") private EntityManager entityManager; @Override public CartLine createCartLine(CartLine cartLine) { entityManager.persist(cartLine); return cartLine; } @Override public List<CartLine> getCartLinesByCartId(int cartId) { return entityManager.createQuery("SELECT cl FROM CartLine cl INNER JOIN cl.cart c WHERE c.id = :cartId", CartLine.class).setParameter("cartId", cartId).getResultList(); } @Override public CartLine getCartLineById(int cartLineId) { return entityManager.find(CartLine.class, cartLineId); } @Override public CartLine updateCartLine(CartLine cartLine) { entityManager.merge(cartLine); return cartLine; } @Override public boolean deleteCartLineById(int cartLineId) { entityManager.remove(getCartLineById(cartLineId)); return (getCartLineById(cartLineId) == null); } }
{ "content_hash": "716ea26890074ba3ba003ae3879630fd", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 170, "avg_line_length": 26.346938775510203, "alnum_prop": 0.796281951975213, "repo_name": "Afcepf-GroupeM/ProjetCesium", "id": "b268e0834fd17e0b33cee0453ea0a8281f09567d", "size": "1291", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Projet1/src/main/java/fr/afcepf/al29/groupem/dao/impl/CartLineDaoImpl.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "722" }, { "name": "CSS", "bytes": "251867" }, { "name": "HTML", "bytes": "497148" }, { "name": "Java", "bytes": "427759" }, { "name": "JavaScript", "bytes": "39796" } ], "symlink_target": "" }
import { Command } from "./Command"; import { LineResults } from "./LineResults"; import { Parameter } from "./Parameters/Parameter"; import { RepeatingParameters } from "./Parameters/RepeatingParameters"; import { SingleParameter } from "./Parameters/SingleParameter"; /** * A command for declaring methods within an interface. */ export class InterfaceMethodCommand extends Command { /** * Information on parameters this command takes in. */ private static parameters: Parameter[] = [ new SingleParameter("InterfaceName", "The Interface name.", true), new RepeatingParameters( "Method arguments.", [ new SingleParameter( "argumentName", "Name of argument.", true), new SingleParameter( "argumentType", "Type of argument.", true) ]) ]; /** * @returns Information on parameters this command takes in. */ public getParameters(): Parameter[] { return InterfaceMethodCommand.parameters; } /** * Renders the command for a language with the given parameters. * * @param parameters The command's name, followed by any parameters. * @returns Line(s) of code in the language. */ public render(parameters: string[]): LineResults { let line: string = ""; if (this.language.properties.interfaces.supported === false) { return LineResults.newSingleLine(line, false); } if (this.language.properties.interfaces.methodTypeAfter) { line += parameters[1]; line += this.language.properties.interfaces.declareMethodMiddle; for (let i: number = 3; i < parameters.length; i++) { if (i % 2 !== 0) { line += parameters[i] + ": "; } else if (i !== parameters.length - 1) { line += parameters[i] + ", "; } else { line += parameters[i]; } } line += this.language.properties.interfaces.declareMethodRight + ": " + parameters[2]; } else { line += this.language.properties.interfaces.declareMethodLeft; line += parameters[2] + " " + parameters[1] + this.language.properties.interfaces.declareMethodMiddle; for (let i: number = 3; i < parameters.length - 1; i += 2) { line += parameters[i + 1] + " " + parameters[i]; if (i !== parameters.length - 2) { line += ", "; } } line += this.language.properties.interfaces.declareMethodRight; } return LineResults.newSingleLine(line, true); } }
{ "content_hash": "3d5926caef09deaa3a1a293b2f4e92e9", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 114, "avg_line_length": 33.858823529411765, "alnum_prop": 0.5361362056984017, "repo_name": "chris-j-tang/GLS", "id": "69185b631756be5d356697e4d88349b30b825e39", "size": "2878", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Commands/InterfaceMethodCommand.ts", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "7821" }, { "name": "Java", "bytes": "8752" }, { "name": "JavaScript", "bytes": "11928" }, { "name": "Python", "bytes": "6233" }, { "name": "Ruby", "bytes": "6104" }, { "name": "Smalltalk", "bytes": "1122" }, { "name": "TypeScript", "bytes": "388906" } ], "symlink_target": "" }
from rdopkg.action import Action, Arg ACTIONS = [ Action('review_patch', help="send patch(es) for review", optional_args=[ Arg('local_patches_branch', metavar='PATCHES_BRANCH', positional=True, nargs='?', help="local patches branch with changes to review"), ]), Action('review_spec', help="send distgit (.spec file) change for review", optional_args=[ Arg('branch', metavar='DISTGIT_BRANCH', positional=True, nargs='?', help="local distgit branch with changes to review"), ], steps=[ Action('get_package_env', module='distgit'), Action('review_spec'), ]), ]
{ "content_hash": "03b22137bfa7d160eba5009f672f7fee", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 71, "avg_line_length": 34.43478260869565, "alnum_prop": 0.5050505050505051, "repo_name": "redhat-openstack/rdopkg", "id": "ec8e6f5947855cffa570c9939e876e1a24f0d033", "size": "792", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "rdopkg/actions/review/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Makefile", "bytes": "505" }, { "name": "Python", "bytes": "207508" }, { "name": "Shell", "bytes": "1759" } ], "symlink_target": "" }
<?php namespace backend\modules\user\models\forms; use Yii; use yii\base\Model; /** * Forgot password form */ class ResendForm extends Model { /** * @var string Username and/or email */ public $email; /** * @var \backend\modules\user\models\User */ protected $_user = false; /** * @return array the validation rules. */ public function rules() { return [ ["email", "required"], ["email", "email"], ["email", "validateEmailInactive"], ["email", "filter", "filter" => "trim"], ]; } /** * Validate email exists and set user property */ public function validateEmailInactive() { // check for valid user $user = $this->getUser(); if (!$user) { $this->addError("email", Yii::t("user", "Email not found")); } elseif ($user->status == $user::STATUS_ACTIVE) { $this->addError("email", Yii::t("user", "Email is already active")); } else { $this->_user = $user; } } /** * Get user based on email * * @return \backend\modules\user\models\User|null */ public function getUser() { // get and store user if ($this->_user === false) { $user = Yii::$app->getModule("user")->model("User"); // check email first, then new_email (former is indexed, latter is not) $this->_user = $user::findOne(["email" => $this->email]); if (!$this->_user) { $this->_user = $user::findOne(["new_email" => $this->email]); } } return $this->_user; } /** * @inheritdoc */ public function attributeLabels() { return [ "email" => Yii::t("user", "Email"), ]; } /** * Send forgot email * * @return bool */ public function sendEmail() { // validate if ($this->validate()) { // get user /** @var \backend\modules\user\models\UserKey $userKey */ $user = $this->getUser(); $userKey = Yii::$app->getModule("user")->model("UserKey"); // calculate type if ($user->status == $user::STATUS_INACTIVE) { $type = $userKey::TYPE_EMAIL_ACTIVATE; } //elseif ($user->status == $user::STATUS_UNCONFIRMED_EMAIL) { else { $type = $userKey::TYPE_EMAIL_CHANGE; } // generate userKey $userKey = $userKey::generate($user->id, $type); // send email confirmation return $user->sendEmailConfirmation($userKey); } return false; } }
{ "content_hash": "c5b253571d8b9e8be2941f9f0a8cbf5f", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 83, "avg_line_length": 24.30701754385965, "alnum_prop": 0.4832190544929628, "repo_name": "AleksandrChernyavenko/hintbox", "id": "b63e9b7c19fe4d532d19cc0769e909b7fff0572c", "size": "2771", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backend/modules/user/models/forms/ResendForm.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "487589" }, { "name": "JavaScript", "bytes": "216614" }, { "name": "PHP", "bytes": "497665" }, { "name": "Shell", "bytes": "2097" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <!-- ~ Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. ~ ~ WSO2 Inc. licenses this file to you under the Apache License, ~ Version 2.0 (the "License"); you may not use this file except ~ in compliance with the License. ~ You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, ~ software distributed under the License is distributed on an ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ~ KIND, either express or implied. See the License for the ~ specific language governing permissions and limitations ~ under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <parent> <groupId>org.wso2.carbon.identity</groupId> <artifactId>sts-feature</artifactId> <version>4.5.5-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <modelVersion>4.0.0</modelVersion> <artifactId>org.wso2.carbon.sts.ui.feature</artifactId> <packaging>pom</packaging> <name>STS UI Feature</name> <url>http://wso2.org</url> <description>This feature contains the bundles required for Front-end STS functionality</description> <dependencies> <dependency> <groupId>org.wso2.carbon.identity</groupId> <artifactId>org.wso2.carbon.sts.ui</artifactId> </dependency> <dependency> <groupId>org.wso2.carbon.identity</groupId> <artifactId>org.wso2.carbon.sts.stub</artifactId> </dependency> <dependency> <groupId>org.wso2.carbon.identity</groupId> <artifactId>org.wso2.carbon.identity.sts.mgt.ui</artifactId> </dependency> <dependency> <groupId>org.wso2.carbon.identity</groupId> <artifactId>org.wso2.carbon.identity.sts.mgt.stub</artifactId> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.wso2.maven</groupId> <artifactId>carbon-p2-plugin</artifactId> <version>${carbon.p2.plugin.version}</version> <executions> <execution> <id>4-p2-feature-generation</id> <phase>package</phase> <goals> <goal>p2-feature-gen</goal> </goals> <configuration> <id>org.wso2.carbon.sts.ui</id> <propertiesFile>../../etc/feature.properties</propertiesFile> <adviceFile> <properties> <propertyDef>org.wso2.carbon.p2.category.type:console</propertyDef> <propertyDef>org.eclipse.equinox.p2.type.group:false</propertyDef> </properties> </adviceFile> <bundles> <bundleDef>org.wso2.carbon.identity:org.wso2.carbon.sts.stub</bundleDef> <bundleDef>org.wso2.carbon.identity:org.wso2.carbon.sts.ui</bundleDef> <bundleDef>org.wso2.carbon.identity:org.wso2.carbon.identity.sts.mgt.ui</bundleDef> <bundleDef>org.wso2.carbon.identity:org.wso2.carbon.identity.sts.mgt.stub</bundleDef> </bundles> <importFeatures> <importFeatureDef>org.wso2.carbon.core:${carbon.kernel.version}</importFeatureDef> </importFeatures> </configuration> </execution> </executions> </plugin> </plugins> </build> </project>
{ "content_hash": "12c3e9720d194a01416998bdb4bdde62", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 201, "avg_line_length": 45.01086956521739, "alnum_prop": 0.5525235450374306, "repo_name": "isharak/carbon-identity", "id": "85b79331eaa7f8030fee36d4856dd6e801fb7a8c", "size": "4141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "features/sts/org.wso2.carbon.sts.ui.feature/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "100683" }, { "name": "HTML", "bytes": "115426" }, { "name": "Java", "bytes": "11389445" }, { "name": "JavaScript", "bytes": "394967" }, { "name": "Objective-C", "bytes": "13608" }, { "name": "PLSQL", "bytes": "54176" }, { "name": "Thrift", "bytes": "338" }, { "name": "XSLT", "bytes": "1030" } ], "symlink_target": "" }
"""Utilities to build a pycolab game from ASCII art diagrams.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import itertools import numpy as np from pycolab import engine from pycolab import things import six def ascii_art_to_game(art, what_lies_beneath, sprites=None, drapes=None, backdrop=things.Backdrop, update_schedule=None, z_order=None, occlusion_in_layers=True): """Construct a pycolab game from an ASCII art diagram. This function helps to turn ASCII art diagrams like the following (which is a Sokoban-like puzzle): [' @@@@@@ ', ' @ . @ ', # '@' means "wall" '@@ab @@ ', # 'P' means "player" '@ .c @ ', # '.' means "box storage location" '@. dP@ ', # 'a'-'g' are all for separate boxes '@.@@@@@@', # ' ' means "open, traversable space" '@ @ @@ @', '@ e . @', '@@@@@@@@',] into pycolab games. The basic idea is that you supply the diagram, along with hints about which characters correspond to `Sprite`s and `Drape`s and the classes that implement those `Sprite`s and `Drape`s. This function then returns an initialised `Engine` object, all ready for you to call the `its_showtime` method and start the game. Several of this function's arguments require you to supply subclasses of the classes found in `things.py`. If your subclass constructors take the same number of arguments as their `things.py` superclasses, then they can be listed directly. Otherwise, you will need to pack the subclasses and their additional `args` and `kwargs` into a `Partial` object. So, for example, if you have a `Sprite` subclass with a constructor like this: class MySprite(Sprite): def __init__(self, corner, position, character, mood, drink_quantity): ... you could package `MySprite` and the "extra" arguments in any of the following ways (among others): Partial(MySprite, 'drowsy', 'two pints') Partial(MySprite, 'yawning', drink_quantity='three pints') Partial(MySprite, mood='asleep', drink_quantity='four pints') Args: art: An ASCII art diagram depicting a game board. This should be a list or tuple whose values are all strings containing the same number of ASCII characters. what_lies_beneath: a single-character ASCII string that will be substituted into the `art` diagram at all places where a character that keys `sprites` or `drapes` is found; *or*, this can also be an entire second ASCII art diagram whose values will be substituted into `art` at (only) those locations. In either case, the resulting diagram will be used to initialise the game's `Backdrop`. sprites: a dict mapping single-character ASCII strings to `Sprite` classes (not objects); or to `Partial` objects that hold the classes and "extra" `args`es and `kwargs`es to use during their construction. It's fine if a character used as a key doesn't appear in the `art` diagram: in this case, we assume that the corresponding `Sprite` will be located at `0, 0`. (If you intend your `Sprite` to be invisible, the `Sprite` will have to take care of that on its own after it is built.) (Optional; omit if your game has no sprites.) drapes: a dict mapping single-character ASCII strings to `Drape` classes (not objects); or to `Partial` objects that hold the classes and "extra" `args`es and `kwargs`es to use during their construction. It's fine if a character used as a key doesn't appear in the `art` diagram: in this case, we assume that the `Drape`'s curtain (i.e. its mask) is completely empty (i.e. False). (Optional; omit if your game has no drapes.) backdrop: a `Backdrop` class (not an object); or a `Partial` object that holds the class and "extra" `args` and `kwargs` to use during its construction. (Optional; if unset, `Backdrop` is used directly, which is fine for a game where the background scenery never changes and contains no game logic.) update_schedule: A list of single-character ASCII strings indicating the order in which the `Sprite`s and `Drape`s should be consulted by the `Engine` for updates; or, a list of lists that imposes an ordering as well, but that groups the entities in each list into separate update groups (refer to `Engine` documentation). (Optional; if unspecified, the ordering will be arbitrary---be mindful of this if your game uses advanced features like scrolling, where update order is pretty important.) z_order: A list of single-character ASCII strings indicating the depth ordering of the `Sprite`s and `Drape`s (from back to front). (Optional; if unspecified, the ordering will be the same as what's used for `update_schedule`). occlusion_in_layers: If `True` (the default), game entities or `Backdrop` characters that occupy the same position on the game board will be rendered into the `layers` member of `rendering.Observation`s with "occlusion": only the entity that appears latest in the game's Z-order will have its `layers` entry at that position set to `True`. If `False`, all entities and `Backdrop` characters at that position will have `True` in their `layers` entries there. This flag does not change the rendering of the "flat" `board` member of `Observation`, which always paints game entities on top of each other as dictated by the Z-order. **NOTE: This flag also determines the occlusion behavior in `layers` arguments to all game entities' `update` methods; see docstrings in [things.py] for details.** Returns: An initialised `Engine` object as described. Raises: TypeError: when `update_schedule` is neither a "flat" list of characters nor a list of lists of characters. ValueError: numerous causes, nearly always instances of the user not heeding the requirements stipulated in Args:. The exception messages should make most errors fairly easy to debug. """ ### 1. Set default arguments, normalise arguments, derive various things ### # Convert sprites and drapes to be dicts of Partials only. "Bare" Sprite # and Drape classes become Partials with no args or kwargs. if sprites is None: sprites = {} if drapes is None: drapes = {} sprites = {char: sprite if isinstance(sprite, Partial) else Partial(sprite) for char, sprite in six.iteritems(sprites)} drapes = {char: drape if isinstance(drape, Partial) else Partial(drape) for char, drape in six.iteritems(drapes)} # Likewise, turn a bare Backdrop class into an argument-free Partial. if not isinstance(backdrop, Partial): backdrop = Partial(backdrop) # Compile characters corresponding to all Sprites and Drapes. non_backdrop_characters = set() non_backdrop_characters.update(sprites.keys()) non_backdrop_characters.update(drapes.keys()) if update_schedule is None: update_schedule = list(non_backdrop_characters) # If update_schedule is a string (someone wasn't reading the docs!), # gracefully convert it to a list of single-character strings. if isinstance(update_schedule, str): update_schedule = list(update_schedule) # If update_schedule is not a list-of-lists already, convert it to be one. if all(isinstance(item, str) for item in update_schedule): update_schedule = [update_schedule] ### 2. Argument checking and derivation of more... things ### # The update schedule (flattened) is the basis for the default z-order. try: flat_update_schedule = list(itertools.chain.from_iterable(update_schedule)) except TypeError: raise TypeError('if any element in update_schedule is an iterable (like a ' 'list), all elements in update_schedule must be') if set(flat_update_schedule) != non_backdrop_characters: raise ValueError('if specified, update_schedule must list each sprite and ' 'drape exactly once.') # The default z-order is derived from there. if z_order is None: z_order = flat_update_schedule if set(z_order) != non_backdrop_characters: raise ValueError('if specified, z_order must list each sprite and drape ' 'exactly once.') # All this checking is rather strict, but as this function is likely to be # popular with new users, it will help to fail with a helpful error message # now rather than an incomprehensible stack trace later. if isinstance(what_lies_beneath, str) and len(what_lies_beneath) != 1: raise ValueError( 'what_lies_beneath may either be a single-character ASCII string or ' 'a list of ASCII-character strings') # Note that the what_lies_beneath check works for characters and lists both. try: _ = [ord(character) for character in ''.join(what_lies_beneath)] _ = [ord(character) for character in non_backdrop_characters] _ = [ord(character) for character in z_order] _ = [ord(character) for character in flat_update_schedule] except TypeError: raise ValueError( 'keys of sprites, keys of drapes, what_lies_beneath (or its entries), ' 'values in z_order, and (possibly nested) values in update_schedule ' 'must all be single-character ASCII strings.') if non_backdrop_characters.intersection(''.join(what_lies_beneath)): raise ValueError( 'any character specified in what_lies_beneath must not be one of the ' 'characters used as keys in the sprites or drapes arguments.') ### 3. Convert all ASCII art to numpy arrays ### # Now convert the ASCII art array to a numpy array of uint8s. art = ascii_art_to_uint8_nparray(art) # In preparation for masking out sprites and drapes from the ASCII art array # (to make the background), do similar for what_lies_beneath. if isinstance(what_lies_beneath, str): what_lies_beneath = np.full_like(art, ord(what_lies_beneath)) else: what_lies_beneath = ascii_art_to_uint8_nparray(what_lies_beneath) if art.shape != what_lies_beneath.shape: raise ValueError( 'if not a single ASCII character, what_lies_beneath must be ASCII ' 'art whose shape is the same as that of the ASCII art in art.') ### 4. Other miscellaneous preparation ### # This dict maps the characters associated with Sprites and Drapes to an # identifier for the update group to which they belong. The sorted order of # the identifiers matches the group ordering in update_schedule, but is # otherwise generic. update_group_for = {} for i, update_group in enumerate(update_schedule): group_id = '{:05d}'.format(i) update_group_for.update({character: group_id for character in update_group}) ### 5. Construct engine; populate with Sprites and Drapes ### game = engine.Engine(*art.shape, occlusion_in_layers=occlusion_in_layers) # Sprites and Drapes are added according to the depth-first traversal of the # update schedule. for character in flat_update_schedule: # Switch to this character's update group. game.update_group(update_group_for[character]) # Find locations where this character appears in the ASCII art. mask = art == ord(character) if character in drapes: # Add the drape to the Engine. partial = drapes[character] game.add_prefilled_drape(character, mask, partial.pycolab_thing, *partial.args, **partial.kwargs) if character in sprites: # Get the location of the sprite in the ASCII art, if there was one. row, col = np.where(mask) if len(row) > 1: raise ValueError('sprite character {} can appear in at most one place ' 'in art.'.format(character)) # If there was a location, convert it to integer values; otherwise, 0,0. # gpylint doesn't know how implicit bools work with numpy arrays... row, col = (int(row), int(col)) if len(row) > 0 else (0, 0) # pylint: disable=g-explicit-length-test # Add the sprite to the Engine. partial = sprites[character] game.add_sprite(character, (row, col), partial.pycolab_thing, *partial.args, **partial.kwargs) # Clear out the newly-added Sprite or Drape from the ASCII art. art[mask] = what_lies_beneath[mask] ### 6. Impose specified Z-order ### game.set_z_order(z_order) ### 7. Add the Backdrop to the engine ### game.set_prefilled_backdrop( characters=''.join(chr(c) for c in np.unique(art)), prefill=art.view(np.uint8), backdrop_class=backdrop.pycolab_thing, *backdrop.args, **backdrop.kwargs) # That's all, folks! return game def ascii_art_to_uint8_nparray(art): """Construct a numpy array of dtype `uint8` from an ASCII art diagram. This function takes ASCII art diagrams (expressed as lists or tuples of equal-length strings) and derives 2-D numpy arrays with dtype `uint8`. Args: art: An ASCII art diagram; this should be a list or tuple whose values are all strings containing the same number of ASCII characters. Returns: A 2-D numpy array as described. Raises: ValueError: `art` wasn't an ASCII art diagram, as described; this could be because the strings it is made of contain non-ASCII characters, or do not have constant length. TypeError: `art` was not a list of strings. """ error_text = ( 'the argument to ascii_art_to_uint8_nparray must be a list (or tuple) ' 'of strings containing the same number of strictly-ASCII characters.') try: art = np.vstack([np.frombuffer(line.encode('ascii'), dtype=np.uint8) for line in art]) except AttributeError as e: if isinstance(art, (list, tuple)) and all( isinstance(row, (list, tuple)) for row in art): error_text += ' Did you pass a list of list of single characters?' raise TypeError('{} (original error: {})'.format(error_text, e)) except ValueError as e: raise ValueError('{} (original error from numpy: {})'.format(error_text, e)) if np.any(art > 127): raise ValueError(error_text) return art class Partial(object): """Holds a pycolab "thing" and its extra constructor arguments. In a spirit similar to `functools.partial`, a `Partial` object holds a subclass of one of the pycolab game entities described in `things.py`, along with any "extra" arguments required for its constructor (i.e. those besides the constructor arguments specified by the `things.py` base class constructors). `Partial` instances can be used to pass `Sprite`, `Drape` and `Backdrop` subclasses *and* their necessary "extra" constructor arguments to `ascii_art_to_game`. """ def __init__(self, pycolab_thing, *args, **kwargs): """Construct a new Partial object. Args: pycolab_thing: a `Backdrop`, `Sprite`, or `Drape` subclass (note: not an object, the class itself). *args: "Extra" positional arguments for the `pycolab_thing` constructor. **kwargs: "Extra" keyword arguments for the `pycolab_thing` constructor. Raises: TypeError: `pycolab_thing` was not a `Backdrop`, a `Sprite`, or a `Drape`. """ if not issubclass(pycolab_thing, (things.Backdrop, things.Sprite, things.Drape)): raise TypeError('the pycolab_thing argument to ascii_art.Partial must be ' 'a Backdrop, Sprite, or Drape subclass.') self.pycolab_thing = pycolab_thing self.args = args self.kwargs = kwargs
{ "content_hash": "1cfcea8cba239b3368d061a105be2995", "timestamp": "", "source": "github", "line_count": 350, "max_line_length": 107, "avg_line_length": 45.47428571428571, "alnum_prop": 0.67636340789143, "repo_name": "deepmind/pycolab", "id": "8cd32933b752086bb0928be5b0871e1941ccf74f", "size": "16501", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pycolab/ascii_art.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "621822" } ], "symlink_target": "" }
<?php use Carbon\Carbon; use AcceptanceTester as A; // codecept run acceptance CronCest --steps -f class CronCest { public function _before(A $I) { // if run manually, don't forget to first run // codecept run acceptance HelperTestsCest:resetDatabase $I->amOnPage('/'); $I->setCookie('AuthKey', $I->get('natu', 'AuthKey')); $I->setCookie('XDEBUG_SESSION', 'PHPSTORM'); $I->amOnPage('/admin'); $I->deleteAllEmails(); } public function _after(A $I) { } // codecept run acceptance CronCest:canSeeAllFields --steps -f public function canSeeAllFields(A $I) { $fields = $I->get('cron_items'); foreach ($fields as $index => $label) { $I->canSee($label); $I->seeInSource($index); } } // codecept run acceptance CronCest:canToggleActivation --steps -f public function canToggleActivation(A $I) { $status_path = ['systemstatus', 'Value', ['id' => 'cron_tasks.activation']]; $status = json_decode($I->grabFromDatabase(...$status_path), true); $I->assertTrue(empty($status['rebuild_calendar'])); $cb_path = $I->get('paths', 'rebuild_calendar_cb'); $I->checkOption($cb_path); $I->delay(1.5); $new_status = json_decode($I->grabFromDatabase(...$status_path), true); $I->assertEquals(1, $new_status['rebuild_calendar']); $I->uncheckOption($cb_path); $I->delay(1.5); $newest_status = json_decode($I->grabFromDatabase(...$status_path), true); $I->assertEquals(0, $newest_status['rebuild_calendar']); } // codecept run acceptance CronCest:calendarGetsRebuild --steps -f public function calendarGetsRebuild(A $I) { $cal_path = codecept_root_dir() .'/kalender.ics'; if (file_exists($cal_path)) { $I->delay(); $I->deleteFile($cal_path); } $I->runCronTask('rebuild_calendar'); $I->seeFileFound('kalender.ics', codecept_root_dir()); $I->seeInDatabase('systemstatus', ['id' => 'last_run.rebuild_calendar']); } // codecept run acceptance CronCest:calendarDoesntGetRebuild --steps -f public function calendarDoesntGetRebuild(A $I) { $cal_path = codecept_root_dir() .'/kalender.ics'; $path_args = ['kalender.ics', codecept_root_dir()]; $last_run = ['systemstatus', ['Value' => null], ['id' => 'last_run.rebuild_calendar']]; $I->runCronTask('rebuild_calendar'); if (file_exists($cal_path)) { $I->delay(); $I->deleteFile($cal_path); } $I->runActivatedCronTasks(); $I->dontSeeFileFound(...$path_args); $last_rebuild = $I->grabFromDatabase('systemstatus', 'Value', ['id' => 'last_run.rebuild_calendar']); $last_run[1]['Value'] = Carbon::parse($last_rebuild)->subMinutes(10)->toIso8601String(); $I->updateInDatabase(...$last_run); $I->runActivatedCronTasks(); $I->dontSeeFileFound(...$path_args); $last_run[1]['Value'] = Carbon::parse($last_rebuild)->subMinutes(20)->toIso8601String(); $I->updateInDatabase(...$last_run); $I->runActivatedCronTasks(); $I->seeFileFound(...$path_args); } // codecept run acceptance CronCest:visitConfirmationMessage --steps -f public function visitConfirmationMessage(A $I, $scenario) { $scenario->skip('Deprecated method of sending mail'); $I->runCronTask('send_visit_confirmation_message'); $I->fetchEmails(); $I->haveNumberOfUnreadEmails(2); $mails = [ [ 'sub' => 'Bekräfta ditt besök', 'from' => 'info@sigtunanaturskola.se', 'to' => 'kindulaer@edu.sigtuna.se', 'body' => ['Liv', 'Alfred', '2C', '7 juni'], ], [ 'sub' => 'Bekräfta ditt besök', 'from' => 'info@sigtunanaturskola.se', 'to' => 'krumpf@edu.sigtuna.se', 'body' => ['Universum', 'Björn', '2A', '4 juni'], ], ]; $I->checkMultipleEmails($mails); } // codecept run acceptance CronCest:adminSummaryMail --steps -f public function adminSummaryMail(A $I) { $I->runCronTask('send_admin_summary_mail'); $I->fetchEmails(); $I->haveNumberOfUnreadEmails(1); $mail = [ 'sub' => 'Sammanfattning av databasen', 'from' => 'info@sigtunanaturskola.se', 'to' => 'info@sigtunanaturskola.se', 'body' => [ 'Status av databasen', 'Felaktiga mobilnummer', 'Peter Samuelsson', '071-9638300', 'Per Hedin', '085474218', 'För många elever', 'Obekräftade besök', '2018-06-04: Universum med 2A från S:t Pers skola', ], ]; $I->checkEmail($mail); // TODO: Add more test cases } // codecept run acceptance CronCest:sendChangedGroupleaderMail --steps -f public function sendChangedGroupleaderMail(A $I, $scenario) { $scenario->skip('Deprecated method of sending mail'); $I->runCronTask('send_changed_groupleader_mail'); $I->fetchEmails(); $I->haveNumberOfUnreadEmails(2); //checking that no new mails are sent $I->runActivatedCronTasks(); $I->delay(0.7); $I->fetchEmails(); $I->haveNumberOfUnreadEmails(2); // checking that this is due to the mails already being sent and not just because of the systemstatus $I->changeTestDate('+3 days'); $I->runActivatedCronTasks(); $I->delay(0.7); $I->fetchEmails(); $I->haveNumberOfUnreadEmails(2); // checking the content $mail = [ 'sub' => 'Antal grupper du förvaltar har ökat', 'from' => 'info@sigtunanaturskola.se', 'to' => 'ipsum.leo@edu.sigtuna.se', 'body' => [ 'Ny grupp som du ansvarar för', '5b, åk 5', 'Hej Anna', 'gjort dig ansvarig för en eller flera grupper', 'ändrat ansvaret för nån' ], ]; $I->checkEmail($mail); $mail = [ 'sub' => 'Antal grupper du förvaltar har minskat', 'from' => 'info@sigtunanaturskola.se', 'to' => 'Nulla@edu.sigtuna.se', 'body' => [ 'Borttagen grupp som du ej längre ansvarar för', '5b, åk 5', 'Gruppen som du fortsätter att ansvara för', '5a, åk 5', 'Hej Tomas' ], ]; $I->checkEmail($mail); } // codecept run acceptance CronCest:sendNewUserMail --steps -f public function sendNewUserMail(A $I, $scenario) { $scenario->skip('Deprecated method of sending mail'); $expected_mail_nr = 1; $heinz_welcome_mail = ['User_id' => 102, 'Subject' => 2, 'Carrier' => 0, 'Status' => 1]; $I->dontSeeInDatabase('messages', $heinz_welcome_mail); $I->runCronTask('send_new_user_mail'); $I->seeInDatabase('messages', $heinz_welcome_mail); $I->fetchEmails(); $I->haveNumberOfUnreadEmails($expected_mail_nr); $mail = [ 'sub' => 'Välkommen i Naturskolans besöksportal', 'from' => 'info@sigtunanaturskola.se', 'to' => 'heinz.krumbichel@edu.sigtuna.se', 'body' => [ 'Hej Heinz' ], ]; $I->checkEmail($mail); $user_data = [ 'id' => 103, 'FirstName' => 'Ban Ki', 'LastName' => 'Moon', 'Mail' => 'slindholm0@jiathis.com', 'Status' => 0, 'School_id' => 'jose' ]; $I->seeInDatabase('users', $user_data); $I->updateInDatabase('users', ['Status' => 1], ['id' => 103]); // run task again, but only a few hours after $I->changeTestDate('+2 hours'); $I->runActivatedCronTasks(); $I->fetchEmails(); // expect no new mail $I->haveNumberOfUnreadEmails($expected_mail_nr); // run task again much later $I->changeTestDate('+3 days'); $I->runActivatedCronTasks(); $I->fetchEmails(); // expect one new mail $I->haveNumberOfUnreadEmails($expected_mail_nr + 1); } // codecept run acceptance CronCest:sendUpdateProfileReminder --steps -f public function sendUpdateProfileReminder(A $I, $scenario) { $scenario->skip('Deprecated method of sending mail'); $expected_mail_nr = 1 ; $I->runCronTask('send_update_profile_reminder'); $I->fetchEmails(); $I->haveNumberOfUnreadEmails($expected_mail_nr); $mail = [ 'sub' => 'Vi behöver mer information från dig', 'from' => 'info@sigtunanaturskola.se', 'to' => 'nbrealey0@sphinn.com', 'body' => [ 'Hej Maja', 'behöver vi ett mobilnummer till dig', 'skola/norr' ], ]; $I->checkEmail($mail); $I->runActivatedCronTasks(); // no new mail as there is no change $I->fetchEmails(); $I->haveNumberOfUnreadEmails($expected_mail_nr); $I->changeTestDate('+5 days'); // more than the annoyance interval, so the user will be contacted again $I->runActivatedCronTasks(); $I->fetchEmails(); $I->haveNumberOfUnreadEmails($expected_mail_nr + 1); } // codecept run acceptance CronCest:writeCurrentPasswords --steps -f public function writeCurrentPasswords(A $I) { /* * TODO: recreate this function $I->emptyFilesInFolder('temp'); $initial_pw_count = 23; $I->seeNumRecords($initial_pw_count, 'hashes', ['Category' => 3]); $I->runCronTask('create_new_passwords'); // the there are no passwords that expire before "today + 1/2 year" $I->seeNumRecords($initial_pw_count, 'hashes', ['Category' => 3]); $I->changeTestDate('+6 weeks'); // now the task is due, but still no old passwords $I->runActivatedCronTasks(); $I->seeNumRecords($initial_pw_count, 'hashes', ['Category' => 3]); $I->changeTestDate('+8 months'); // = 2019-02-01 // now all passwords should be renewed $I->runActivatedCronTasks(); $I->seeNumRecords($initial_pw_count * 2, 'hashes', ['Category' => 3]); $I->assertNotEmpty($I->getFileNamesFromFolder('temp')); */ } public function cleanSqlDatabase(A $I) { // TODO: implement this function } // codecept run acceptance CronCest:backupDatabase --steps -f public function backupDatabase(A $I, $scenario) { $scenario->skip('Doesn\'t currently work.'); // TODO: Fix this! $I->emptyFilesInFolder('backup'); $I->assertEmpty($I->getFileNamesFromFolder('backup')); $test_fixture = [ 0 => 1, //06-01, day_nr: 151 4 => 2, //06-05, 154 10 => 2, //06-11, 161 29 => 2, //06-30, 180 (should stay forever) 30 => 3, //07-01, 181 119 => 2, //09-28, 245 ]; foreach($test_fixture as $days_to_add => $files_to_expect){ $I->changeTestDate('+' . $days_to_add . ' days'); if($days_to_add === 0){ $I->runCronTask('backup_database'); } else { $I->runActivatedCronTasks(); } $I->assertCount($files_to_expect, $I->getFileNamesFromFolder('backup')); } } // codecept run acceptance CronCest:backupDatabaseChecker --steps -f /** * @skip * @param AcceptanceTester $I * @throws Exception */ public function backupDatabaseChecker(A $I) { $I->emptyFilesInFolder('backup'); $I->runCronTask('backup_database'); foreach(range(1,500) as $days_to_add){ $I->changeTestDate('+' . $days_to_add . ' days'); $I->runActivatedCronTasks(); $files = $I->getFileNamesFromFolder('backup'); $path = codecept_output_dir() . '/database_check_log.txt'; $data = "-----\n" . $days_to_add . "\n-----\n"; $data .= implode("\n", $files) . "\n\n"; file_put_contents($path, $data ,FILE_APPEND); } } }
{ "content_hash": "8ec7758923c242d1a8d82d73e4eb84a9", "timestamp": "", "source": "github", "line_count": 370, "max_line_length": 111, "avg_line_length": 34.064864864864866, "alnum_prop": 0.5364963503649635, "repo_name": "fridde/naturskolan_database", "id": "e4ad42a33d12c4053eb984d009498728e40b840a", "size": "12634", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/acceptance/CronCest.php", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "129133" }, { "name": "HTML", "bytes": "156438" }, { "name": "JavaScript", "bytes": "48167" }, { "name": "PHP", "bytes": "347890" }, { "name": "Shell", "bytes": "5626" } ], "symlink_target": "" }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.mariadb.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.mariadb.fluent.models.PrivateLinkResourceInner; /** An instance of this class provides access to all the operations defined in PrivateLinkResourcesClient. */ public interface PrivateLinkResourcesClient { /** * Gets the private link resources for MariaDB server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the private link resources for MariaDB server. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<PrivateLinkResourceInner> listByServer(String resourceGroupName, String serverName); /** * Gets the private link resources for MariaDB server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the private link resources for MariaDB server. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<PrivateLinkResourceInner> listByServer(String resourceGroupName, String serverName, Context context); /** * Gets a private link resource for MariaDB server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param groupName The name of the private link resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a private link resource for MariaDB server. */ @ServiceMethod(returns = ReturnType.SINGLE) PrivateLinkResourceInner get(String resourceGroupName, String serverName, String groupName); /** * Gets a private link resource for MariaDB server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param groupName The name of the private link resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a private link resource for MariaDB server. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<PrivateLinkResourceInner> getWithResponse( String resourceGroupName, String serverName, String groupName, Context context); }
{ "content_hash": "fb447c8f87daa1bfcad5009a388f412c", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 119, "avg_line_length": 54.611111111111114, "alnum_prop": 0.7558494404883012, "repo_name": "Azure/azure-sdk-for-java", "id": "bd47e7fd131d8d410e599b110427f69124f81ae3", "size": "3932", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/mariadb/azure-resourcemanager-mariadb/src/main/java/com/azure/resourcemanager/mariadb/fluent/PrivateLinkResourcesClient.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "8762" }, { "name": "Bicep", "bytes": "15055" }, { "name": "CSS", "bytes": "7676" }, { "name": "Dockerfile", "bytes": "2028" }, { "name": "Groovy", "bytes": "3237482" }, { "name": "HTML", "bytes": "42090" }, { "name": "Java", "bytes": "432409546" }, { "name": "JavaScript", "bytes": "36557" }, { "name": "Jupyter Notebook", "bytes": "95868" }, { "name": "PowerShell", "bytes": "737517" }, { "name": "Python", "bytes": "240542" }, { "name": "Scala", "bytes": "1143898" }, { "name": "Shell", "bytes": "18488" }, { "name": "XSLT", "bytes": "755" } ], "symlink_target": "" }
package com.google.android.exoplayer2.upstream; import static com.google.android.exoplayer2.util.Util.castNonNull; import android.content.ContentResolver; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import java.io.EOFException; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.channels.FileChannel; /** A {@link DataSource} for reading from a content URI. */ public final class ContentDataSource extends BaseDataSource { /** * Thrown when an {@link IOException} is encountered reading from a content URI. */ public static class ContentDataSourceException extends IOException { public ContentDataSourceException(IOException cause) { super(cause); } } private final ContentResolver resolver; @Nullable private Uri uri; @Nullable private AssetFileDescriptor assetFileDescriptor; @Nullable private FileInputStream inputStream; private long bytesRemaining; private boolean opened; /** * @param context A context. */ public ContentDataSource(Context context) { super(/* isNetwork= */ false); this.resolver = context.getContentResolver(); } @Override public long open(DataSpec dataSpec) throws ContentDataSourceException { try { Uri uri = dataSpec.uri; this.uri = uri; transferInitializing(dataSpec); AssetFileDescriptor assetFileDescriptor = resolver.openAssetFileDescriptor(uri, "r"); this.assetFileDescriptor = assetFileDescriptor; if (assetFileDescriptor == null) { throw new FileNotFoundException("Could not open file descriptor for: " + uri); } FileInputStream inputStream = new FileInputStream(assetFileDescriptor.getFileDescriptor()); this.inputStream = inputStream; long assetStartOffset = assetFileDescriptor.getStartOffset(); long skipped = inputStream.skip(assetStartOffset + dataSpec.position) - assetStartOffset; if (skipped != dataSpec.position) { // We expect the skip to be satisfied in full. If it isn't then we're probably trying to // skip beyond the end of the data. throw new EOFException(); } if (dataSpec.length != C.LENGTH_UNSET) { bytesRemaining = dataSpec.length; } else { long assetFileDescriptorLength = assetFileDescriptor.getLength(); if (assetFileDescriptorLength == AssetFileDescriptor.UNKNOWN_LENGTH) { // The asset must extend to the end of the file. If FileInputStream.getChannel().size() // returns 0 then the remaining length cannot be determined. FileChannel channel = inputStream.getChannel(); long channelSize = channel.size(); if (channelSize == 0) { bytesRemaining = C.LENGTH_UNSET; } else { bytesRemaining = channelSize - channel.position(); if (bytesRemaining < 0) { throw new EOFException(); } } } else { bytesRemaining = assetFileDescriptorLength - skipped; if (bytesRemaining < 0) { throw new EOFException(); } } } } catch (IOException e) { throw new ContentDataSourceException(e); } opened = true; transferStarted(dataSpec); return bytesRemaining; } @Override public int read(byte[] buffer, int offset, int readLength) throws ContentDataSourceException { if (readLength == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } int bytesRead; try { int bytesToRead = bytesRemaining == C.LENGTH_UNSET ? readLength : (int) Math.min(bytesRemaining, readLength); bytesRead = castNonNull(inputStream).read(buffer, offset, bytesToRead); } catch (IOException e) { throw new ContentDataSourceException(e); } if (bytesRead == -1) { if (bytesRemaining != C.LENGTH_UNSET) { // End of stream reached having not read sufficient data. throw new ContentDataSourceException(new EOFException()); } return C.RESULT_END_OF_INPUT; } if (bytesRemaining != C.LENGTH_UNSET) { bytesRemaining -= bytesRead; } bytesTransferred(bytesRead); return bytesRead; } @Override @Nullable public Uri getUri() { return uri; } @SuppressWarnings("Finally") @Override public void close() throws ContentDataSourceException { uri = null; try { if (inputStream != null) { inputStream.close(); } } catch (IOException e) { throw new ContentDataSourceException(e); } finally { inputStream = null; try { if (assetFileDescriptor != null) { assetFileDescriptor.close(); } } catch (IOException e) { throw new ContentDataSourceException(e); } finally { assetFileDescriptor = null; if (opened) { opened = false; transferEnded(); } } } } }
{ "content_hash": "14847d725ff5c89aef348c8410771db1", "timestamp": "", "source": "github", "line_count": 169, "max_line_length": 97, "avg_line_length": 30.331360946745562, "alnum_prop": 0.6595786188060866, "repo_name": "stari4ek/ExoPlayer", "id": "40fba3767105b3bc890994940f56d31fd9aee751", "size": "5745", "binary": false, "copies": "1", "ref": "refs/heads/iptv-for-androidtv/dev-v2", "path": "library/core/src/main/java/com/google/android/exoplayer2/upstream/ContentDataSource.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "103958" }, { "name": "CMake", "bytes": "1845" }, { "name": "Java", "bytes": "9086256" }, { "name": "Makefile", "bytes": "13840" }, { "name": "Shell", "bytes": "7871" } ], "symlink_target": "" }
// // Copyright (C) INRIA 1999-2008 // // This program is free software; you can redistribute it and/or modify it // under the terms of the GNU General Public License version 2 as published // by the Free Software Foundation. // // This program is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General // Public License for more details. // // You should have received a copy of the GNU General Public License along // with this program; if not, write to the Free Software Foundation, Inc., // 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. //% // @file kernel/Utils.hpp // @author Rémy MOZUL // // Affiliation(s): INRIA, team BIPOP // // Email(s): mozul@inria.fr // // @brief reducing and expanding functions for vecotr and matrices // #ifndef __Kernel_Utils_hpp #define __Kernel_Utils_hpp #ifdef WINDOWS #define extern __declspec (dllexport) #endif #include <boost/numeric/bindings/traits/ublas_vector.hpp> #include <boost/numeric/bindings/traits/ublas_matrix.hpp> #include <boost/numeric/ublas/vector_proxy.hpp> #include <boost/numeric/ublas/matrix_proxy.hpp> using namespace boost::numeric::ublas; /** * Reduce the input matrix to the given indexes * * @param[in] mat (matrix) * @param[in] index1 (int) * @param[in] index2 (int) */ void reduceMatrix(matrix<double, column_major> & mat, vector<int> & index1, vector<int> & index2); /** * Reduce the input matrix to the given indexes * * @param[in] mat (matrix) * @param[in] index1 (bool) <em> dim number of rows of mat </em> * @param[in] index2 (int) */ void reduceMatrix(matrix<double, column_major> & mat, vector<bool> & index1, vector<int> & index2); /** * Reduce the input vector to the given index * * @param[in] vec (vector) * @param[in] index (int) * * @return reduced vector <em> dim size of index </em> */ vector<double> reduceVector(vector<double> & vec, vector<int> & index); /** * Reduce the input vector to the given index * * @param[in/out] vec (vector) * @param[in] index (int) * * @return reduced vector <em> dim size of index </em> */ vector<double> reduceVector(vector<double, array_adaptor<double> > & vec, vector<int> & index); /** * Reduce the input vector to the given index * * @param[in/out] vec (vector) * @param[in] index (bool) <em> dim size of vec </em> * */ void reduceVector(vector<double> & vec, vector<bool> & index); /** * Reduce the input vector to the given index * * @param[in/out] vec (vector) * @param[in] index (bool) <em> dim size of vec </em> * */ void reduceVector(vector<bool> & vec, vector<bool> & index); /** * Expand a vector2 into the given indexes of vector1 * * @param[in/out] tab (double) * @param[in] vector (vector) * @param[in] index (int) */ void expandVector(double * tab, vector<double> & vector2, vector<int> & index); /** * Expand a vector2 into the given indexes of vector1 * * @param[in/out] tab (double) * @param[in] vector (vector) * @param[in] index (int) <em> dim size of tab </em> */ void expandVector(double * tab, vector<double> & vector, vector<bool> & index); #endif /* __Kernel_Utils_hpp */
{ "content_hash": "c6f3d6d950008dcc8f683eaab8d0a226", "timestamp": "", "source": "github", "line_count": 115, "max_line_length": 99, "avg_line_length": 28.026086956521738, "alnum_prop": 0.6850760161340366, "repo_name": "siconos/siconos-deb", "id": "a7b085ad1722d487a77636639336e48d8703e735", "size": "3224", "binary": false, "copies": "2", "ref": "refs/heads/ubuntu/xenial", "path": "examples/Robotics/RX90/RX90Plugin/Utils.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2725" }, { "name": "C", "bytes": "4317052" }, { "name": "C++", "bytes": "8854932" }, { "name": "CMake", "bytes": "381170" }, { "name": "CSS", "bytes": "29334" }, { "name": "Fortran", "bytes": "2539066" }, { "name": "GAMS", "bytes": "5614" }, { "name": "HTML", "bytes": "4771178" }, { "name": "JavaScript", "bytes": "422105" }, { "name": "Makefile", "bytes": "11474" }, { "name": "PostScript", "bytes": "1435858" }, { "name": "Python", "bytes": "1207294" }, { "name": "Shell", "bytes": "44867" }, { "name": "TeX", "bytes": "82998" } ], "symlink_target": "" }
package com.personlife.personinfo.carema; import com.example.personlifep.R; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.GridView; public class GridHolder implements HolderAdapter, AdapterView.OnItemClickListener { private final int columnNumber; private int backgroundColor; private GridView gridView; private ViewGroup headerContainer; private ViewGroup footerContainer; private OnHolderListener listener; private View.OnKeyListener keyListener; private View headerView; private View footerView; public GridHolder(int columnNumber) { this.columnNumber = columnNumber; } @Override public void addHeader(View view) { if (view == null) { return; } headerContainer.addView(view); headerView = view; } @Override public void addFooter(View view) { if (view == null) { return; } footerContainer.addView(view); footerView = view; } @Override public void setAdapter(BaseAdapter adapter) { gridView.setAdapter(adapter); } @Override public void setBackgroundColor(int colorResource) { this.backgroundColor = colorResource; } @Override public View getView(LayoutInflater inflater, ViewGroup parent) { View view = inflater.inflate(R.layout.dialog_grid, parent, false); gridView = (GridView) view.findViewById(R.id.list); gridView.setBackgroundColor(parent.getResources().getColor( getBackgroundColor())); gridView.setNumColumns(columnNumber); gridView.setOnItemClickListener(this); gridView.setOnKeyListener(new View.OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { if (keyListener == null) { throw new NullPointerException( "keyListener should not be null"); } return keyListener.onKey(v, keyCode, event); } }); headerContainer = (ViewGroup) view.findViewById(R.id.header_container); footerContainer = (ViewGroup) view.findViewById(R.id.footer_container); return view; } @Override public void setOnItemClickListener(OnHolderListener listener) { this.listener = listener; } @Override public void setOnKeyListener(View.OnKeyListener keyListener) { this.keyListener = keyListener; } @Override public View getInflatedView() { return gridView; } @Override public View getHeader() { return headerView; } @Override public View getFooter() { return footerView; } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if (listener == null) { return; } listener.onItemClick(parent.getItemAtPosition(position), view, position); } private int getBackgroundColor() { if (backgroundColor == 0) { backgroundColor = android.R.color.white; } return backgroundColor; } }
{ "content_hash": "552ed226b74322f1a688746e50b60891", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 75, "avg_line_length": 23.365853658536587, "alnum_prop": 0.7466945024356297, "repo_name": "feishuai/PersonLife", "id": "503383af51c0be5ca9d7457df9c9cca292ddfc65", "size": "2874", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/personlife/personinfo/carema/GridHolder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "988517" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" style="@style/layout_bg" > <include layout="@layout/top_bar" /> <RelativeLayout style="@style/layout_body" > <TextView android:id="@+id/lable" android:layout_width="match_parent" android:layout_height="wrap_content" android:background="@color/darkgray" android:padding="5dp" android:text="@string/project_update_parent_hint" android:textColor="@color/red" android:textSize="14sp" /> <com.zzn.aeassistant.view.pulltorefresh.PullToRefreshSwipeMenuListView android:id="@+id/base_list" style="@style/base_list" android:layout_below="@+id/lable" /> </RelativeLayout> </LinearLayout>
{ "content_hash": "56c2c2c14505c67522a5f4a3ecc15bdf", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 78, "avg_line_length": 34.32, "alnum_prop": 0.6107226107226107, "repo_name": "ShawnDongAi/AEASSISTANT", "id": "5d9c56b2fb8a22e749f92d31c9978f9ba5906520", "size": "858", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AEAssistant/res/layout/activity_struct_list.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1737122" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <!-- File: build-cordova-install-osx.html Description: How to install Cordova on OSX Author: Mikael Kindborg Copyright (c) 2013-2016 Evothings AB --> <head> <meta charset="utf-8"> <script src="../js/include-head.js"></script> <title>Install Cordova on OS X</title> </head> <body> <div class="evo-page"> <script src="../js/include-page-header.js"></script> <div class="evo-content"> <div class="evo-box evo-content-index"> <div class="evo-box"> <a href="#Introduction">Installing Cordova and SDKs on OS X</a><br/> <a href="#InstallCordova">Install Cordova</a><br/> <a href="#InstallXcode">Install Xcode</a><br/> <a href="#InstallAndroidSDK">Install Android SDK</a><br/> <a href="#NextStep">The fun begins</a><br/> <a href="#EvothingsCordova">Use Evothings Studio</a><br/> </div> </div> <div id="Introduction" class="evo-box"> <h1>Installing Cordova and SDKs on OS X</h1> <p>This document describes how to install Apache Cordova and SDKs for iOS and Android on an OS X machine.</p> </div> <div id="InstallCordova" class="evo-box"> <h2>Install Cordova</h2> <p>Follow these steps to install Cordova:</p> <ol> <li><p><b>Install Node.js.</b> Cordova runs on the Node.js platform, which needs to be installed as the first step. Download installer from: <a href="http://nodejs.org/" target="_blank">http://nodejs.org</a></p></li> <li><p>Go ahead an run the downloaded installation file.</p></li> <li><p>To test the installation, open a terminal window (make sure you open a new terminal window to get the settings made by the Node.js installation), and type: <pre>node --version</pre> If the version number is displayed, Node.js is installed and working!</p></li> <li><p><b>Install Git.</b> Git is a version control system, which is used by Cordova behind-the-scenes. Download the installer from: <a href="http://git-scm.com/" target="_blank">http://git-scm.com</a>. <li><p><b>Install Cordova.</b> Cordova is installed using the Node Package Manager (npm). Type the following in the terminal window to install: <pre>sudo npm install -g cordova</pre></p></li> <li><p>Test the Cordova install by typing: <pre>cordova --version</pre> If you see the version number, you have successfully installed Apache Cordova!</p></li> </ol> </div> <div id="InstallXcode" class="evo-box"> <h2>Install Xcode for iOS development</h2> <p>If you wish to develop iOS apps, go ahead and install Xcode, which is Apple's development tool for iOS app development:</p> <ol> <li><p>To access iOS development resources, you need to sign up for an Apple account if you have not already done that.</p></li> <li><p>To deploy apps on devices (iPhone and iPad) and to publish apps on the AppStore, you should join Apple's <a href="https://developer.apple.com/programs/ios/" target="_blank">iOS Developer Program</a>. This costs 99 USD per year.</p></li> <li><p>Install Xcode from: <a href="https://developer.apple.com/xcode/downloads/" target="_blank">https://developer.apple.com/xcode/downloads</a></p></li> <li><p>When the install is complete, launch Xcode and enable the command line tools for Xcode (these are used by Cordova). <ul> <li><p>From the <b>Xcode</b> menu, select <b>Preferences...</b></p></li> <li><p>Select the <b>Downloads</b> tab.</p></li> <li><p>Under <b>Components</b>, locate <b>Command Line Tools</b> and click the download icon.</p></li> </ul> </p></li> <li><p>If you get stuck, try out the <a href="https://cordova.apache.org/docs/en/latest/guide/platforms/ios/index.html" target="_blank">Cordova documentation for iOS</a>.</p></li> </ol> <p>Xcode is now ready to be used with Cordova. The way this works that that you create and build a Cordova project, the build process generates an Xcode project, that you just double-click to open. When your project is open in Xcode, you can run it in the iOS simulator or deploy the app to a device. Follow the instructions in the <a href="cordova-guide.html">Cordova Guide</a> to try this!</p> </div> <div id="InstallAndroidSDK" class="evo-box"> <h2>Install the Android SDK</h2> <p>To develop Android apps, you need to install Java, the Ant build tool, and the Android SDK.</p> <h3>Install Java</h3> <p>To use the Android SDK the Java SDK needs to be installed (minimum version 1.6). How to install:</p> <ol> <li><p>To check if Java is already installed, type this in a terminal window:<br/> <pre>javac -version</pre> If you see a version number at or above 1.6 you should be good to go. </p></li> <li><p>If Java is not installed, get the <a href="http://support.apple.com/downloads/#java" target="_blank">Java for OS X installer</a> from Apple's download page. Direct link: <a href="http://support.apple.com/kb/DL1572" target="_blank">support.apple.com/kb/DL1572</a>. </p></li> </ol> <h3>Install Ant</h3> <p><a href="http://ant.apache.org/" target="_blank">Apache Ant</a> is a build system for Java, which is used by Cordova and the Android SDK. Use <a href="http://brew.sh/" target="_blank">Homebrew</a> to install Ant, using the following steps:</p> <ol> <li><p>Start by checking if you have Ant installed by opening a terminal window and run: <pre>ant -version</pre> If you see a version number, Ant is already installed.</p></li> <li><p>Next, check if you have Homebrew installed by typing: <pre>brew --version</pre> If you see a version number, Homebrew in installed.</p></li> <li><p>If you have Homebrew installed, but not Ant, install Ant from a terminal window using these commands: <pre>brew update brew install ant</pre> <li><p>If neither Homebrew nor Ant is installed, install both of them with the following commands: <pre>ruby -e "$(curl -fsSL https://raw.github.com/Homebrew/homebrew/go/install)" brew install ant</pre> <li><p>Now test the install with: <pre>ant -version</pre></p></li> </ol> <h3>Install Android Developer Tools (ADT)</h3> <ol> <li><p>Download the Android Developer Tools (Eclipse ADT) from <a href="http://developer.android.com/sdk" target="_blank">developer.android.com/sdk</a>. The install is a zip-file, which you can unpack to a location of your choice. Make a note of there you install the SDK files, as you will have to add these to the system path.</p></li> <li><p>Add the path to the folder where you unzipped the Android SDK tools to the files <b>.bash_profile</b> and <b>.bashrc</b> (these files are located in your home directory). This is needed for Cordova to find the Android build tools.</p></li> <li><p>Edit <b>.bashrc</b> in nano (or any editor you prefer) by typing the following in a terminal window (the <b>cd</b> command takes you to your home directory). <pre>cd nano .bashrc</pre> </p></li> <li><p>In the nano editor, add the following line, with "/Users/username/android" substituted by the actual location of the Android SDK (note that there are two paths in one line, separated by a colon): <p><code>export PATH=/Users/username/android/sdk/platform-tools:/Users/username/android/sdk/tools:$PATH</code></p> Save and exit by typing CTRL+O (the letter key O) and CTRL+X.</p></li> <li><p>Next, check that .bashrc is loaded by .bash_profile (.bash_profile is loaded when you open a terminal window, and we want the path set for terminal windows). Open the file <b>.bash_profile</b> in nano: <pre>nano .bash_profile</pre> </p></li> <li><p>If not present already, add the following lines to .bash_profile: <pre>if [ -f ~/.bashrc ]; then source ~/.bashrc fi</pre></p></li> <li><p>Now test the install. Close any open terminal windows, open a new terminal window and type: <pre>adb version</pre> This should display the version of the Android Debug Bridge.</p></li> <li><p>As the final step, you need to get the specific Android SDK version used by Cordova. This is done by running the <b>Android SDK Manager</b> by typing the command: <pre>android</pre> This launches a window where you can select to install specific Android SDKs.</p></li> <li><p>First time you launch the Android SDK Manager there will be preset selections. It is recommended to leave these untouched. Also select the entry "Android 4.4.2 (API 19)". This is the version used by the current Cordova 3.5 version. Note that the Android SDK required by Cordova will change in the future, as new versions of Cordova and Android are released. When this happens, open the Android SDK Manager again, and install the required API version(s).</p></li> </ol> <p>If you get stuck, consult the <a href="https://cordova.apache.org/docs/en/latest/guide/platforms/android/index.html" target="_blank">Cordova documentation for Android</a>. You are also welcome to ask for help at the <a href="http://forum.evothings.com/" target="_blank">Evothings Forum</a>.</p> </div> <div id="NextStep" class="evo-box"> <h2>The fun begins</h2> <p>Next step is to create and build a Cordova project. Go to the <a href="cordova-guide.html">Cordova Guide</a> to learn more.</p> </div> <div id="EvothingsCordova" class="evo-box"> <h2>Use Evothings Studio for Cordova app development</h2> <p>Evothings Studio makes it easy to develop Cordova apps. Check out how to use your Cordova project with the <a href="cordova-guide.html#SetupCordovaForEvothings">fast Evothings workflow</a>.</p> <p>It is easy and fun to get started with Evothings Studio. <a href="http://evothings.com/download/" target="_blank">Download now</a> and be up and running in 5 minutes!</p> </div> <script src="../js/include-page-footer.js"></script> </div><!-- evo-page-content --> </div><!-- evo-page --> </body> </html>
{ "content_hash": "927ce0f74f7e39a2f8993cc9fd51602a", "timestamp": "", "source": "github", "line_count": 152, "max_line_length": 471, "avg_line_length": 63.23026315789474, "alnum_prop": 0.7099157215690355, "repo_name": "evothings/evothings-doc", "id": "308eb2c8433012293d962a05300a8c5afadc2807", "size": "9611", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "build/cordova-install-osx.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "112751" }, { "name": "HTML", "bytes": "1416650" }, { "name": "JavaScript", "bytes": "7887" } ], "symlink_target": "" }
########################################################################## #Aqueduct - Compliance Remediation Content #Copyright (C) 2011,2012 # Vincent C. Passaro (vincent.passaro@gmail.com) # Shannon Mitchell (shannon.mitchell@fusiontechnology-llc.com) # #This program is free software; you can redistribute it and/or #modify it under the terms of the GNU General Public License #as published by the Free Software Foundation; either version 2 #of the License, or (at your option) any later version. # #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. # #You should have received a copy of the GNU General Public License #along with this program; if not, write to the Free Software #Foundation, Inc., 51 Franklin Street, Fifth Floor, #Boston, MA 02110-1301, USA. ########################################################################## ###################### Fotis Networks LLC ############################### # By Vincent C. Passaro # # Fotis Networks LLC # # Vincent[.]Passaro[@]fotisnetworks[.]com # # www.fotisnetworks.com # ###################### Fotis Networks LLC ############################### # # _____________________________________________________________________ # | Version | Change Information | Author | Date | # |__________|_______________________|____________________|____________| # | 1.0 | Initial Script | Vincent C. Passaro | 1-Aug-2012 | # | | Creation | | | # |__________|_______________________|____________________|____________| # #######################DISA INFORMATION################################## # Group ID (Vulid): V-22506 # Group Title: GEN006565 # Rule ID: SV-37751r1_rule # Severity: medium # Rule Version (STIG-ID): GEN006565 # Rule Title: The system package management tool must be used to verify # system software periodically. # # Vulnerability Discussion: Verification using the system package # management tool can be used to determine that system software has not # been tampered with. # This requirement is not applicable to systems not using package # management tools. # # Responsibility: System Administrator # IAControls: ECAT-1 # # Check Content: # # Check the root crontab (crontab -l) and the global crontabs in # "/etc/crontab", "/etc/cron.*" for the presence of an rpm verification # command such as: # rpm -qVa | awk '$2!="c" {print $0}' # If no such cron job is found, this is a finding. # If the result of the cron job indicates packages which do not pass # verification exist, this is a finding. # # Fix Text: # # Add a cron job to run an rpm verification command such as: # rpm -qVa | awk '$2!="c" {print $0}' # For packages which failed verification: # If the package is not necessary for operations, remove it from the system. # If the package is necessary for operations, re-install the package. #######################DISA INFORMATION################################## # Global Variables PDI=GEN006565 # Start-Lockdown
{ "content_hash": "9714b14562cde2dc307a02077f6fc223", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 76, "avg_line_length": 41, "alnum_prop": 0.5266591038003403, "repo_name": "tomhurd/clip", "id": "533b5c85cf6130a8ad1841843898e03b54197467", "size": "3539", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "packages/aqueduct/aqueduct/compliance/Bash/STIG/rhel-5/dev/GEN006565.sh", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Augeas", "bytes": "1413" }, { "name": "Awk", "bytes": "209" }, { "name": "C", "bytes": "13623" }, { "name": "CSS", "bytes": "11752" }, { "name": "JavaScript", "bytes": "4586" }, { "name": "Makefile", "bytes": "27100" }, { "name": "Pascal", "bytes": "91645" }, { "name": "Perl", "bytes": "8268" }, { "name": "Puppet", "bytes": "18738" }, { "name": "Python", "bytes": "263618" }, { "name": "Ruby", "bytes": "4278" }, { "name": "Shell", "bytes": "5489710" }, { "name": "XSLT", "bytes": "171062" } ], "symlink_target": "" }
package org.jte.constant; public final class Constants { /** * 请求参数:起始索引 */ public final static String REQUEST_PARAM_START = "start"; /** * 请求参数:每页大小 */ public final static String REQUEST_PARAM_PAGE_SIZE = "pageSize"; /** * 请求参数:id */ public final static String REQUEST_PARAM_ID = "id"; /** * 响应属性:数据 */ public final static String RESPONSE_ATTRIBUTE_DATA = "data"; /** * 响应属性:数据的总计 */ public final static String RESPONSE_ATTRIBUTE_TOTAL = "total"; }
{ "content_hash": "4e8fbc23f78f00587059bcbc6d69268e", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 65, "avg_line_length": 16.28125, "alnum_prop": 0.6065259117082533, "repo_name": "chenzuopeng/jte", "id": "9b59b26bf523cbf2399d65bdfa7293add727a753", "size": "808", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/org/jte/constant/Constants.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "227621" }, { "name": "HTML", "bytes": "2586" }, { "name": "Java", "bytes": "84811" }, { "name": "JavaScript", "bytes": "1099821" } ], "symlink_target": "" }
import {Component, ElementRef, ViewChild} from '@angular/core'; import {LoadingService, PopupInfo, JigsawBubbleLoading} from "jigsaw/public_api"; @Component({ templateUrl: './demo.component.html', styleUrls: ['./demo.component.css'] }) export class BubbleLoadingDemoComponent { @ViewChild('block') block: ElementRef; constructor(public loadingService: LoadingService) { } blockLoading: PopupInfo; globalLoading: PopupInfo; popupBlockLoading() { if (!this.blockLoading) { this.blockLoading = this.loadingService.show(this.block, JigsawBubbleLoading); } } closeBlockLoading() { if (this.blockLoading) { this.blockLoading.dispose(); this.blockLoading = null; } } popupGlobalLoading() { if (!this.globalLoading) { this.globalLoading = this.loadingService.show(JigsawBubbleLoading); setTimeout(() => { this.closeGlobalLoading(); }, 3000) } } closeGlobalLoading() { if (this.globalLoading) { this.globalLoading.dispose(); this.globalLoading = null; } } public isLoading: boolean = false; public label: string = 'submit'; startToLoad() { this.isLoading = true; this.label = 'loading...'; setTimeout(() => { this.isLoading = false; this.label = 'submit'; }, 3000) } // ==================================================================== // ignore the following lines, they are not important to this demo // ==================================================================== summary: string = ''; description: string = ''; }
{ "content_hash": "1155ffaf7b6171430137494cd82d623f", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 90, "avg_line_length": 27.88888888888889, "alnum_prop": 0.5389869095048377, "repo_name": "rdkmaster/jigsaw", "id": "19872125551d21cd477dd91b6051f9225f76cff2", "size": "1757", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/for-internal/demo/pc/loading/bubble/demo.component.ts", "mode": "33188", "license": "mit", "language": [ { "name": "AutoIt", "bytes": "7636" }, { "name": "CSS", "bytes": "3231221" }, { "name": "HTML", "bytes": "10423519" }, { "name": "JavaScript", "bytes": "1132519" }, { "name": "SCSS", "bytes": "535631" }, { "name": "Shell", "bytes": "12910" }, { "name": "TypeScript", "bytes": "4455145" } ], "symlink_target": "" }
(function(root, factory) { if (typeof define === 'function' && define.amd) { define(['exports'], function(exports) {root.WebK = factory(root, exports);}); } else if (typeof exports !== 'undefined') { factory(root, exports); } else { root.WebK = factory(root, root.WebK || {}); } }(this, function(root, WebK) { WebK.getURIParam = WebK.getURIParam || function(param) { var search = param+'='; var i1 = window.location.search.indexOf(search); if (i1==-1) return null; var i21 = window.location.search.indexOf('&',i1+1); var i22 = window.location.search.indexOf('#',i21+1); if (i21==-1 && i22==-1) { return window.location.search.substring(i1+search.length); } else if (i21>-1 && (i21<i22 || i22==-1)) { return window.location.search.substring(i1+search.length, i21); } else if (i22>-1 && (i21>i22 || i21==-1)) { return window.location.search.substring(i1+search.length, i22); } else { return null; } } // https://support.google.com/analytics/answer/1033867 // https://developers.google.com/analytics/devguides/collection/analyticsjs/field-reference WebK.addCampaign = function(orig) { var result = orig || {}, value; value = WebK.getURIParam('utm_campaign'); if (value) result['campaignName']=value; value = WebK.getURIParam('utm_source'); if (value) result['campaignSource']=value; value = WebK.getURIParam('utm_medium'); if (value) result['campaignMedium']=value; value = WebK.getURIParam('utm_term'); if (value) result['campaignKeyword']=value; value = WebK.getURIParam('utm_content'); if (value) result['campaignContent']=value; return result; } return WebK; }));
{ "content_hash": "42a6bb2aa36baf876f8e51d8545893eb", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 91, "avg_line_length": 26.650793650793652, "alnum_prop": 0.6515783204288267, "repo_name": "coronadofactory/webkitecture", "id": "0a38e08a02ce53aea88bbf2f1a1556cc870a173c", "size": "1920", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "webkitecture-google.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "7996" }, { "name": "PHP", "bytes": "3797" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using System.Threading.Tasks; using PhotoArchive.Activities; namespace PhotoArchive.FileSystem.Analyzers { /// <summary> /// Checks the directory structure according to a the structure /// - container->album->pictures /// - container->album->group->pictures /// </summary> public class DirectoryStructureAnalyzer { private const string _pattern = @"\d{4}\.\d{2}(\.\d{2})?\.\w*"; private Regex _albumNameMatcher; private IFolderStorage _folderStorage; public DirectoryStructureAnalyzer(IFolderStorage folderStorage) { this._folderStorage = folderStorage; this._albumNameMatcher = new Regex(DirectoryStructureAnalyzer._pattern, RegexOptions.Compiled | RegexOptions.Singleline); } public async Task<List<ActivityBase>> Analyze() { var rootFolder = await this._folderStorage.GetAllFolders(); var activities = new List<ActivityBase>(); CheckRootWithContainers(rootFolder, activities); return activities; } private void CheckRootWithContainers(Folder rootFolder, List<ActivityBase> activities) { activities.AddRange(this._folderStorage.GetContentOfFolder(rootFolder) .Where(this.FilterOtherProgramFolderItems) .Select(x => new MoveFolderItemActivity(x, null))); foreach (var containerFolder in rootFolder.SubFolders.Where(FilterOtherProgramFolders)) { CheckContainerWithAlbums(containerFolder, activities); } } private void CheckContainerWithAlbums(Folder containerFolder, List<ActivityBase> activities) { activities.AddRange(this._folderStorage.GetContentOfFolder(containerFolder) .Where(this.FilterOtherProgramFolderItems) .Select(x => new MoveFolderItemActivity(x, null))); foreach (var albumFolder in containerFolder.SubFolders.Where(FilterOtherProgramFolders)) { CheckAlbumWithGroups(albumFolder, activities); } } private void CheckAlbumWithGroups(Folder albumFolder, List<ActivityBase> activities) { if (!this._albumNameMatcher.IsMatch(albumFolder.DisplayName)) { activities.Add(new RenameFolderActivity(albumFolder)); } var groups = albumFolder.SubFolders.Where(FilterOtherProgramFolders); // if there are sub groups, then there should not be any items on the same level. if (groups.Any()) { activities.AddRange(this._folderStorage.GetContentOfFolder(albumFolder) .Where(this.FilterOtherProgramFolderItems) .Select(x => new MoveFolderItemActivity(x, null))); } foreach (var groupFolder in groups) { CheckGroup(groupFolder, activities); } } private void CheckGroup(Folder groupFolder, List<ActivityBase> activities) { // Create for each remaining sub folder a merge activity (too much nesting). activities.AddRange(groupFolder.SubFolders .Where(FilterOtherProgramFolders) .Select(f => new MergeFolderActivity(groupFolder, f))); } public static bool FilterOtherProgramFolders(Folder folder) { var names = new string[] { // windows ".Thumbnails", // Picasa ".picasaoriginals", "Originals" }; return !names.Any(x => folder.DisplayName == x); } private bool FilterOtherProgramFolderItems(FolderItem arg) { return true; } } }
{ "content_hash": "771fbc409f046c7eb103beb89a1ca817", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 133, "avg_line_length": 35.872727272727275, "alnum_prop": 0.6150532184490624, "repo_name": "PhotoArchive/core", "id": "acac9d270bd1e42b16203fcae8cf48d1186b27ba", "size": "3948", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/PhotoArchive.FileSystem/Analyzers/DirectoryStructureAnalyzer.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "70407" } ], "symlink_target": "" }
package org.apache.olingo.client.core.edm.xml; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.DeserializationContext; import org.apache.commons.lang3.BooleanUtils; import org.apache.olingo.commons.api.edm.provider.CsdlComplexType; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import java.io.IOException; @JsonDeserialize(using = ClientCsdlComplexType.ComplexTypeDeserializer.class) class ClientCsdlComplexType extends CsdlComplexType { private static final long serialVersionUID = 4076944306925840115L; static class ComplexTypeDeserializer extends AbstractClientCsdlEdmDeserializer<CsdlComplexType> { @Override protected CsdlComplexType doDeserialize(final JsonParser jp, final DeserializationContext ctxt) throws IOException { final ClientCsdlComplexType complexType = new ClientCsdlComplexType(); for (; jp.getCurrentToken() != JsonToken.END_OBJECT; jp.nextToken()) { final JsonToken token = jp.getCurrentToken(); if (token == JsonToken.FIELD_NAME) { if ("Name".equals(jp.getCurrentName())) { complexType.setName(jp.nextTextValue()); } else if ("Abstract".equals(jp.getCurrentName())) { complexType.setAbstract(BooleanUtils.toBoolean(jp.nextTextValue())); } else if ("BaseType".equals(jp.getCurrentName())) { complexType.setBaseType(jp.nextTextValue()); } else if ("OpenType".equals(jp.getCurrentName())) { complexType.setOpenType(BooleanUtils.toBoolean(jp.nextTextValue())); } else if ("Property".equals(jp.getCurrentName())) { jp.nextToken(); complexType.getProperties().add(jp.readValueAs(ClientCsdlProperty.class)); } else if ("NavigationProperty".equals(jp.getCurrentName())) { jp.nextToken(); complexType.getNavigationProperties().add(jp.readValueAs(ClientCsdlNavigationProperty.class)); } else if ("Annotation".equals(jp.getCurrentName())) { jp.nextToken(); complexType.getAnnotations().add(jp.readValueAs(ClientCsdlAnnotation.class)); } } } return complexType; } } }
{ "content_hash": "d04ef6a5cf2d9eba3df000df545e2343", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 106, "avg_line_length": 41.27272727272727, "alnum_prop": 0.7030837004405286, "repo_name": "mtaal/olingo-odata4-jpa", "id": "0b59e999b24b5223f333dc84a9b551ef34848a56", "size": "3074", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/client-core/src/main/java/org/apache/olingo/client/core/edm/xml/ClientCsdlComplexType.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "36113" }, { "name": "CSS", "bytes": "1731" }, { "name": "Groovy", "bytes": "5831" }, { "name": "HTML", "bytes": "1289" }, { "name": "Java", "bytes": "7717404" }, { "name": "XSLT", "bytes": "3629" } ], "symlink_target": "" }
title: Certificate Authority aliases: - /certauth/ --- # Certificate Authority Services are generally configured to trust a specific certificate authority. These CAs may be backed by different providers and take responsibility for signing standard x.509 and OpenSSH certificates. ## Supported Providers * [In-Memory]({{< relref "memory" >}}) * [Local Filesystem]({{< relref "fs" >}})
{ "content_hash": "0c00ad99c38f85c8252cf17ee65e23f6", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 200, "avg_line_length": 27.928571428571427, "alnum_prop": 0.7442455242966752, "repo_name": "dpb587/ssoca", "id": "f52c3609cc55b719fa1cecc417be7a1540eb6335", "size": "395", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/reference/certauth/_index.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1942" }, { "name": "Dockerfile", "bytes": "420" }, { "name": "Go", "bytes": "518728" }, { "name": "HTML", "bytes": "13676" }, { "name": "Shell", "bytes": "8467" } ], "symlink_target": "" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.mongodb; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.mongodb.DBRef; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import io.trino.sql.planner.plan.LimitNode; import io.trino.testing.BaseConnectorTest; import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedRow; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; import org.bson.Document; import org.bson.types.ObjectId; import org.testng.SkipException; import org.testng.annotations.AfterClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.math.BigDecimal; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.Arrays; import java.util.Date; import java.util.Optional; import java.util.OptionalInt; import static io.trino.testing.sql.TestTable.randomTableSuffix; import static java.lang.String.format; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; public abstract class BaseMongoConnectorTest extends BaseConnectorTest { protected MongoServer server; protected MongoClient client; @AfterClass(alwaysRun = true) public final void destroy() { server.close(); client.close(); } @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { switch (connectorBehavior) { case SUPPORTS_RENAME_SCHEMA: case SUPPORTS_NOT_NULL_CONSTRAINT: case SUPPORTS_RENAME_COLUMN: return false; default: return super.hasBehavior(connectorBehavior); } } @Override protected TestTable createTableWithDefaultColumns() { throw new SkipException("MongoDB connector does not support column default values"); } @Test(dataProvider = "testColumnNameDataProvider") @Override public void testColumnName(String columnName) { if (columnName.equals("a.dot")) { assertThatThrownBy(() -> super.testColumnName(columnName)) .isInstanceOf(RuntimeException.class) .hasMessage("Column name must not contain '$' or '.' for INSERT: " + columnName); throw new SkipException("Insert would fail"); } super.testColumnName(columnName); } @Test @Override public void testSortItemsReflectedInExplain() { // The format of the string representation of what gets shown in the table scan is connector-specific // and there's no requirement that the conform to a specific shape or contain certain keywords. assertExplain( "EXPLAIN SELECT name FROM nation ORDER BY nationkey DESC NULLS LAST LIMIT 5", "TopNPartial\\[count = 5, orderBy = \\[nationkey DESC"); } @Override protected Optional<DataMappingTestSetup> filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("time(6)") || typeName.equals("timestamp(6)") || typeName.equals("timestamp(6) with time zone")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } @Test(dataProvider = "guessFieldTypesProvider") public void testGuessFieldTypes(String mongoValue, String trinoValue) { Document document = Document.parse(format("{\"test\":%s}", mongoValue)); assertUpdate("DROP TABLE IF EXISTS test.test_guess_field_type"); client.getDatabase("test").getCollection("test_guess_field_type").insertOne(document); assertThat(query("SELECT test FROM test.test_guess_field_type")) .matches("SELECT " + trinoValue); assertUpdate("DROP TABLE test.test_guess_field_type"); } @DataProvider public Object[][] guessFieldTypesProvider() { return new Object[][] { {"true", "true"}, // boolean -> boolean {"2147483647", "bigint '2147483647'"}, // int32 -> bigint {"{\"$numberLong\": \"9223372036854775807\"}", "9223372036854775807"}, // int64 -> bigint {"1.23", "double '1.23'"}, // double -> double {"{\"$date\": \"1970-01-01T00:00:00.000Z\"}", "timestamp '1970-01-01 00:00:00.000'"}, // date -> timestamp(3) {"'String type'", "varchar 'String type'"}, // string -> varchar {"{$binary: \"\",\"$type\": \"0\"}", "to_utf8('')"}, // binary -> varbinary {"{\"$oid\": \"6216f0c6c432d45190f25e7c\"}", "ObjectId('6216f0c6c432d45190f25e7c')"}, // objectid -> objectid {"[1]", "array[bigint '1']"}, // array with single type -> array {"{\"field\": \"object\"}", "CAST(row('object') AS row(field varchar))"}, // object -> row {"[9, \"test\"]", "CAST(row(9, 'test') AS row(_pos1 bigint, _pos2 varchar))"}, // array with multiple types -> row {"{\"$ref\":\"test_ref\",\"$id\":ObjectId(\"4e3f33de6266b5845052c02c\"),\"$db\":\"test_db\"}", "CAST(row('test_db', 'test_ref', ObjectId('4e3f33de6266b5845052c02c')) AS row(databasename varchar, collectionname varchar, id ObjectId))"}, // dbref -> row }; } @Test public void createTableWithEveryType() { String query = "" + "CREATE TABLE test_types_table AS " + "SELECT" + " 'foo' _varchar" + ", cast('bar' as varbinary) _varbinary" + ", cast(1 as bigint) _bigint" + ", 3.14E0 _double" + ", true _boolean" + ", DATE '1980-05-07' _date" + ", TIMESTAMP '1980-05-07 11:22:33.456' _timestamp" + ", ObjectId('ffffffffffffffffffffffff') _objectid" + ", JSON '{\"name\":\"alice\"}' _json" + ", cast(12.3 as decimal(30, 5)) _long_decimal"; assertUpdate(query, 1); MaterializedResult results = getQueryRunner().execute(getSession(), "SELECT * FROM test_types_table").toTestTypes(); assertEquals(results.getRowCount(), 1); MaterializedRow row = results.getMaterializedRows().get(0); assertEquals(row.getField(0), "foo"); assertEquals(row.getField(1), "bar".getBytes(UTF_8)); assertEquals(row.getField(2), 1L); assertEquals(row.getField(3), 3.14); assertEquals(row.getField(4), true); assertEquals(row.getField(5), LocalDate.of(1980, 5, 7)); assertEquals(row.getField(6), LocalDateTime.of(1980, 5, 7, 11, 22, 33, 456_000_000)); assertEquals(row.getField(8), "{\"name\":\"alice\"}"); assertEquals(row.getField(9), new BigDecimal("12.30000")); assertUpdate("DROP TABLE test_types_table"); assertFalse(getQueryRunner().tableExists(getSession(), "test_types_table")); } @Test public void testInsertWithEveryType() { String createSql = "" + "CREATE TABLE test_insert_types_table " + "(" + " vc varchar" + ", vb varbinary" + ", bi bigint" + ", d double" + ", b boolean" + ", dt date" + ", ts timestamp" + ", objid objectid" + ", _json json" + ")"; getQueryRunner().execute(getSession(), createSql); String insertSql = "" + "INSERT INTO test_insert_types_table " + "SELECT" + " 'foo' _varchar" + ", cast('bar' as varbinary) _varbinary" + ", cast(1 as bigint) _bigint" + ", 3.14E0 _double" + ", true _boolean" + ", DATE '1980-05-07' _date" + ", TIMESTAMP '1980-05-07 11:22:33.456' _timestamp" + ", ObjectId('ffffffffffffffffffffffff') _objectid" + ", JSON '{\"name\":\"alice\"}' _json"; getQueryRunner().execute(getSession(), insertSql); MaterializedResult results = getQueryRunner().execute(getSession(), "SELECT * FROM test_insert_types_table").toTestTypes(); assertEquals(results.getRowCount(), 1); MaterializedRow row = results.getMaterializedRows().get(0); assertEquals(row.getField(0), "foo"); assertEquals(row.getField(1), "bar".getBytes(UTF_8)); assertEquals(row.getField(2), 1L); assertEquals(row.getField(3), 3.14); assertEquals(row.getField(4), true); assertEquals(row.getField(5), LocalDate.of(1980, 5, 7)); assertEquals(row.getField(6), LocalDateTime.of(1980, 5, 7, 11, 22, 33, 456_000_000)); assertEquals(row.getField(8), "{\"name\":\"alice\"}"); assertUpdate("DROP TABLE test_insert_types_table"); assertFalse(getQueryRunner().tableExists(getSession(), "test_insert_types_table")); } @Test public void testJson() { assertUpdate("CREATE TABLE test_json (id INT, col JSON)"); assertUpdate("INSERT INTO test_json VALUES (1, JSON '{\"name\":\"alice\"}')", 1); assertQuery("SELECT json_extract_scalar(col, '$.name') FROM test_json WHERE id = 1", "SELECT 'alice'"); assertUpdate("INSERT INTO test_json VALUES (2, JSON '{\"numbers\":[1, 2, 3]}')", 1); assertQuery("SELECT json_extract(col, '$.numbers[0]') FROM test_json WHERE id = 2", "SELECT 1"); assertUpdate("INSERT INTO test_json VALUES (3, NULL)", 1); assertQuery("SELECT col FROM test_json WHERE id = 3", "SELECT NULL"); assertQueryFails( "CREATE TABLE test_json_scalar AS SELECT JSON '1' AS col", "Can't convert json to MongoDB Document.*"); assertQueryFails( "CREATE TABLE test_json_array AS SELECT JSON '[\"a\", \"b\", \"c\"]' AS col", "Can't convert json to MongoDB Document.*"); assertUpdate("DROP TABLE test_json"); } @Test public void testArrays() { assertUpdate("CREATE TABLE tmp_array1 AS SELECT ARRAY[1, 2, NULL] AS col", 1); assertQuery("SELECT col[2] FROM tmp_array1", "SELECT 2"); assertQuery("SELECT col[3] FROM tmp_array1", "SELECT NULL"); assertUpdate("CREATE TABLE tmp_array2 AS SELECT ARRAY[1.0E0, 2.5E0, 3.5E0] AS col", 1); assertQuery("SELECT col[2] FROM tmp_array2", "SELECT 2.5"); assertUpdate("CREATE TABLE tmp_array3 AS SELECT ARRAY['puppies', 'kittens', NULL] AS col", 1); assertQuery("SELECT col[2] FROM tmp_array3", "SELECT 'kittens'"); assertQuery("SELECT col[3] FROM tmp_array3", "SELECT NULL"); assertUpdate("CREATE TABLE tmp_array4 AS SELECT ARRAY[TRUE, NULL] AS col", 1); assertQuery("SELECT col[1] FROM tmp_array4", "SELECT TRUE"); assertQuery("SELECT col[2] FROM tmp_array4", "SELECT NULL"); assertUpdate("CREATE TABLE tmp_array5 AS SELECT ARRAY[ARRAY[1, 2], NULL, ARRAY[3, 4]] AS col", 1); assertQuery("SELECT col[1][2] FROM tmp_array5", "SELECT 2"); assertUpdate("CREATE TABLE tmp_array6 AS SELECT ARRAY[ARRAY['\"hi\"'], NULL, ARRAY['puppies']] AS col", 1); assertQuery("SELECT col[1][1] FROM tmp_array6", "SELECT '\"hi\"'"); assertQuery("SELECT col[3][1] FROM tmp_array6", "SELECT 'puppies'"); } @Test public void testTemporalArrays() { assertUpdate("CREATE TABLE tmp_array7 AS SELECT ARRAY[DATE '2014-09-30'] AS col", 1); assertOneNotNullResult("SELECT col[1] FROM tmp_array7"); assertUpdate("CREATE TABLE tmp_array8 AS SELECT ARRAY[TIMESTAMP '2001-08-22 03:04:05.321'] AS col", 1); assertOneNotNullResult("SELECT col[1] FROM tmp_array8"); } @Test public void testSkipUnknownTypes() { Document document1 = new Document("col", Document.parse("{\"key1\": \"value1\", \"key2\": null}")); client.getDatabase("test").getCollection("tmp_guess_schema1").insertOne(document1); assertQuery("SHOW COLUMNS FROM test.tmp_guess_schema1", "SELECT 'col', 'row(key1 varchar)', '', ''"); assertQuery("SELECT col.key1 FROM test.tmp_guess_schema1", "SELECT 'value1'"); Document document2 = new Document("col", new Document("key1", null)); client.getDatabase("test").getCollection("tmp_guess_schema2").insertOne(document2); assertQueryReturnsEmptyResult("SHOW COLUMNS FROM test.tmp_guess_schema2"); } @Test(dataProvider = "dbRefProvider") public void testDBRef(Object objectId, String expectedValue, String expectedType) { Document document = Document.parse("{\"_id\":ObjectId(\"5126bbf64aed4daf9e2ab771\"),\"col1\":\"foo\"}"); DBRef dbRef = new DBRef("test", "creators", objectId); document.append("creator", dbRef); assertUpdate("DROP TABLE IF EXISTS test.test_dbref"); client.getDatabase("test").getCollection("test_dbref").insertOne(document); assertThat(query("SELECT creator.databaseName, creator.collectionName, creator.id FROM test.test_dbref")) .matches("SELECT varchar 'test', varchar 'creators', " + expectedValue); assertQuery( "SELECT typeof(creator) FROM test.test_dbref", "SELECT 'row(databaseName varchar, collectionName varchar, id " + expectedType + ")'"); assertUpdate("DROP TABLE test.test_dbref"); } @DataProvider public Object[][] dbRefProvider() { return new Object[][] { {"String type", "varchar 'String type'", "varchar"}, {"BinData".getBytes(UTF_8), "to_utf8('BinData')", "varbinary"}, {1234567890, "bigint '1234567890'", "bigint"}, {true, "true", "boolean"}, {12.3f, "double '12.3'", "double"}, {new Date(0), "timestamp '1970-01-01 00:00:00.000'", "timestamp(3)"}, {ImmutableList.of(1), "array[bigint '1']", "array(bigint)"}, {new ObjectId("5126bc054aed4daf9e2ab772"), "ObjectId('5126bc054aed4daf9e2ab772')", "ObjectId"}, }; } @Test public void testMaps() { assertUpdate("CREATE TABLE tmp_map1 AS SELECT MAP(ARRAY[0,1], ARRAY[2,NULL]) AS col", 1); assertQuery("SELECT col[0] FROM tmp_map1", "SELECT 2"); assertQuery("SELECT col[1] FROM tmp_map1", "SELECT NULL"); assertUpdate("CREATE TABLE tmp_map2 AS SELECT MAP(ARRAY[1.0E0], ARRAY[2.5E0]) AS col", 1); assertQuery("SELECT col[1.0] FROM tmp_map2", "SELECT 2.5"); assertUpdate("CREATE TABLE tmp_map3 AS SELECT MAP(ARRAY['puppies'], ARRAY['kittens']) AS col", 1); assertQuery("SELECT col['puppies'] FROM tmp_map3", "SELECT 'kittens'"); assertUpdate("CREATE TABLE tmp_map4 AS SELECT MAP(ARRAY[TRUE], ARRAY[FALSE]) AS col", "SELECT 1"); assertQuery("SELECT col[TRUE] FROM tmp_map4", "SELECT FALSE"); assertUpdate("CREATE TABLE tmp_map5 AS SELECT MAP(ARRAY[1.0E0], ARRAY[ARRAY[1, 2]]) AS col", 1); assertQuery("SELECT col[1.0][2] FROM tmp_map5", "SELECT 2"); assertUpdate("CREATE TABLE tmp_map6 AS SELECT MAP(ARRAY[DATE '2014-09-30'], ARRAY[DATE '2014-09-29']) AS col", 1); assertOneNotNullResult("SELECT col[DATE '2014-09-30'] FROM tmp_map6"); assertUpdate("CREATE TABLE tmp_map7 AS SELECT MAP(ARRAY[TIMESTAMP '2001-08-22 03:04:05.321'], ARRAY[TIMESTAMP '2001-08-22 03:04:05.321']) AS col", 1); assertOneNotNullResult("SELECT col[TIMESTAMP '2001-08-22 03:04:05.321'] FROM tmp_map7"); assertUpdate("CREATE TABLE test.tmp_map8 (col MAP<VARCHAR, VARCHAR>)"); client.getDatabase("test").getCollection("tmp_map8").insertOne(new Document( ImmutableMap.of("col", new Document(ImmutableMap.of("key1", "value1", "key2", "value2"))))); assertQuery("SELECT col['key1'] FROM test.tmp_map8", "SELECT 'value1'"); assertUpdate("CREATE TABLE test.tmp_map9 (col VARCHAR)"); client.getDatabase("test").getCollection("tmp_map9").insertOne(new Document( ImmutableMap.of("col", new Document(ImmutableMap.of("key1", "value1", "key2", "value2"))))); assertQuery("SELECT col FROM test.tmp_map9", "SELECT '{\"key1\": \"value1\", \"key2\": \"value2\"}'"); assertUpdate("CREATE TABLE test.tmp_map10 (col VARCHAR)"); client.getDatabase("test").getCollection("tmp_map10").insertOne(new Document( ImmutableMap.of("col", ImmutableList.of(new Document(ImmutableMap.of("key1", "value1", "key2", "value2")), new Document(ImmutableMap.of("key3", "value3", "key4", "value4")))))); assertQuery("SELECT col FROM test.tmp_map10", "SELECT '[{\"key1\": \"value1\", \"key2\": \"value2\"}, {\"key3\": \"value3\", \"key4\": \"value4\"}]'"); assertUpdate("CREATE TABLE test.tmp_map11 (col VARCHAR)"); client.getDatabase("test").getCollection("tmp_map11").insertOne(new Document( ImmutableMap.of("col", 10))); assertQuery("SELECT col FROM test.tmp_map11", "SELECT '10'"); assertUpdate("CREATE TABLE test.tmp_map12 (col VARCHAR)"); client.getDatabase("test").getCollection("tmp_map12").insertOne(new Document( ImmutableMap.of("col", Arrays.asList(10, null, 11)))); assertQuery("SELECT col FROM test.tmp_map12", "SELECT '[10, null, 11]'"); } @Test public void testCollectionNameContainsDots() { assertUpdate("CREATE TABLE \"tmp.dot1\" AS SELECT 'foo' _varchar", 1); assertQuery("SELECT _varchar FROM \"tmp.dot1\"", "SELECT 'foo'"); assertUpdate("DROP TABLE \"tmp.dot1\""); } @Test public void testObjectIds() { String values = "VALUES " + " (10, NULL, NULL)," + " (11, ObjectId('ffffffffffffffffffffffff'), ObjectId('ffffffffffffffffffffffff'))," + " (12, ObjectId('ffffffffffffffffffffffff'), ObjectId('aaaaaaaaaaaaaaaaaaaaaaaa'))," + " (13, ObjectId('000000000000000000000000'), ObjectId('000000000000000000000000'))," + " (14, ObjectId('ffffffffffffffffffffffff'), NULL)," + " (15, NULL, ObjectId('ffffffffffffffffffffffff'))"; String inlineTable = format("(%s) AS t(i, one, two)", values); assertUpdate("DROP TABLE IF EXISTS tmp_objectid"); assertUpdate("CREATE TABLE tmp_objectid AS SELECT * FROM " + inlineTable, 6); // IS NULL assertQuery("SELECT i FROM " + inlineTable + " WHERE one IS NULL", "VALUES 10, 15"); assertQuery("SELECT i FROM tmp_objectid WHERE one IS NULL", "SELECT 0 WHERE false"); // NULL gets replaced with new unique ObjectId in MongoPageSink, this affects other test cases // CAST AS varchar assertQuery( "SELECT i, CAST(one AS varchar) FROM " + inlineTable + " WHERE i <= 13", "VALUES (10, NULL), (11, 'ffffffffffffffffffffffff'), (12, 'ffffffffffffffffffffffff'), (13, '000000000000000000000000')"); // EQUAL assertQuery("SELECT i FROM tmp_objectid WHERE one = two", "VALUES 11, 13"); assertQuery("SELECT i FROM tmp_objectid WHERE one = ObjectId('ffffffffffffffffffffffff')", "VALUES 11, 12, 14"); // IS DISTINCT FROM assertQuery("SELECT i FROM " + inlineTable + " WHERE one IS DISTINCT FROM two", "VALUES 12, 14, 15"); assertQuery("SELECT i FROM " + inlineTable + " WHERE one IS NOT DISTINCT FROM two", "VALUES 10, 11, 13"); assertQuery("SELECT i FROM tmp_objectid WHERE one IS DISTINCT FROM two", "VALUES 10, 12, 14, 15"); assertQuery("SELECT i FROM tmp_objectid WHERE one IS NOT DISTINCT FROM two", "VALUES 11, 13"); // Join on ObjectId assertQuery( format("SELECT l.i, r.i FROM (%1$s) AS l(i, one, two) JOIN (%1$s) AS r(i, one, two) ON l.one = r.two", values), "VALUES (11, 11), (14, 11), (11, 15), (12, 15), (12, 11), (14, 15), (13, 13)"); // Group by ObjectId (IS DISTINCT FROM) assertQuery("SELECT array_agg(i ORDER BY i) FROM " + inlineTable + " GROUP BY one", "VALUES ((10, 15)), ((11, 12, 14)), ((13))"); assertQuery("SELECT i FROM " + inlineTable + " GROUP BY one, i", "VALUES 10, 11, 12, 13, 14, 15"); // Group by Row(ObjectId) (ID DISTINCT FROM in @OperatorDependency) assertQuery( "SELECT r.i, count(*) FROM (SELECT CAST(row(one, i) AS row(one ObjectId, i bigint)) r FROM " + inlineTable + ") GROUP BY r", "VALUES (10, 1), (11, 1), (12, 1), (13, 1), (14, 1), (15, 1)"); assertQuery( "SELECT r.x, CAST(r.one AS varchar), count(*) FROM (SELECT CAST(row(one, i / 3 * 3) AS row(one ObjectId, x bigint)) r FROM " + inlineTable + ") GROUP BY r", "VALUES (9, NULL, 1), (9, 'ffffffffffffffffffffffff', 1), (12, 'ffffffffffffffffffffffff', 2), (12, '000000000000000000000000', 1), (15, NULL, 1)"); assertUpdate("DROP TABLE tmp_objectid"); } @Test public void testCaseInsensitive() throws Exception { MongoCollection<Document> collection = client.getDatabase("testCase").getCollection("testInsensitive"); collection.insertOne(new Document(ImmutableMap.of("Name", "abc", "Value", 1))); assertQuery("SHOW SCHEMAS IN mongodb LIKE 'testcase'", "SELECT 'testcase'"); assertQuery("SHOW TABLES IN testcase", "SELECT 'testinsensitive'"); assertQuery( "SHOW COLUMNS FROM testcase.testInsensitive", "VALUES ('name', 'varchar', '', ''), ('value', 'bigint', '', '')"); assertQuery("SELECT name, value FROM testcase.testinsensitive", "SELECT 'abc', 1"); assertUpdate("INSERT INTO testcase.testinsensitive VALUES('def', 2)", 1); assertQuery("SELECT value FROM testcase.testinsensitive WHERE name = 'def'", "SELECT 2"); assertUpdate("DROP TABLE testcase.testinsensitive"); } @Test public void testCaseInsensitiveRenameTable() { MongoCollection<Document> collection = client.getDatabase("testCase_RenameTable").getCollection("testInsensitive_RenameTable"); collection.insertOne(new Document(ImmutableMap.of("value", 1))); assertQuery("SHOW TABLES IN testcase_renametable", "SELECT 'testinsensitive_renametable'"); assertQuery("SELECT value FROM testcase_renametable.testinsensitive_renametable", "SELECT 1"); assertUpdate("ALTER TABLE testcase_renametable.testinsensitive_renametable RENAME TO testcase_renametable.testinsensitive_renamed_table"); assertQuery("SHOW TABLES IN testcase_renametable", "SELECT 'testinsensitive_renamed_table'"); assertQuery("SELECT value FROM testcase_renametable.testinsensitive_renamed_table", "SELECT 1"); assertUpdate("DROP TABLE testcase_renametable.testinsensitive_renamed_table"); } @Test public void testNonLowercaseViewName() { // Case insensitive schema name MongoCollection<Document> collection = client.getDatabase("NonLowercaseSchema").getCollection("test_collection"); collection.insertOne(new Document(ImmutableMap.of("Name", "abc", "Value", 1))); client.getDatabase("NonLowercaseSchema").createView("lowercase_view", "test_collection", ImmutableList.of()); assertQuery("SELECT value FROM nonlowercaseschema.lowercase_view WHERE name = 'abc'", "SELECT 1"); // Case insensitive view name collection = client.getDatabase("test_database").getCollection("test_collection"); collection.insertOne(new Document(ImmutableMap.of("Name", "abc", "Value", 1))); client.getDatabase("test_database").createView("NonLowercaseView", "test_collection", ImmutableList.of()); assertQuery("SELECT value FROM test_database.nonlowercaseview WHERE name = 'abc'", "SELECT 1"); // Case insensitive schema and view name client.getDatabase("NonLowercaseSchema").createView("NonLowercaseView", "test_collection", ImmutableList.of()); assertQuery("SELECT value FROM nonlowercaseschema.nonlowercaseview WHERE name = 'abc'", "SELECT 1"); assertUpdate("DROP TABLE nonlowercaseschema.lowercase_view"); assertUpdate("DROP TABLE test_database.nonlowercaseview"); assertUpdate("DROP TABLE nonlowercaseschema.test_collection"); assertUpdate("DROP TABLE test_database.test_collection"); assertUpdate("DROP TABLE nonlowercaseschema.nonlowercaseview"); } @Test public void testSelectView() { assertUpdate("CREATE TABLE test.view_base AS SELECT 'foo' _varchar", 1); client.getDatabase("test").createView("test_view", "view_base", ImmutableList.of()); assertQuery("SELECT * FROM test.view_base", "SELECT 'foo'"); assertUpdate("DROP TABLE test.test_view"); assertUpdate("DROP TABLE test.view_base"); } @Test public void testBooleanPredicates() { assertUpdate("CREATE TABLE boolean_predicates(id integer, value boolean)"); assertUpdate("INSERT INTO boolean_predicates VALUES(1, true)", 1); assertUpdate("INSERT INTO boolean_predicates VALUES(2, false)", 1); assertQuery("SELECT id FROM boolean_predicates WHERE value = true", "VALUES 1"); assertQuery("SELECT id FROM boolean_predicates WHERE value = false", "VALUES 2"); assertUpdate("DROP TABLE boolean_predicates"); } @Test public void testNullPredicates() { assertUpdate("CREATE TABLE test.null_predicates(name varchar, value integer)"); MongoCollection<Document> collection = client.getDatabase("test").getCollection("null_predicates"); collection.insertOne(new Document(ImmutableMap.of("name", "abc", "value", 1))); collection.insertOne(new Document(ImmutableMap.of("name", "abcd"))); collection.insertOne(new Document(Document.parse("{\"name\": \"abcde\", \"value\": null}"))); assertQuery("SELECT count(*) FROM test.null_predicates WHERE value IS NULL OR rand() = 42", "SELECT 2"); assertQuery("SELECT count(*) FROM test.null_predicates WHERE value IS NULL", "SELECT 2"); assertQuery("SELECT count(*) FROM test.null_predicates WHERE value IS NOT NULL", "SELECT 1"); assertUpdate("DROP TABLE test.null_predicates"); } @Test public void testLimitPushdown() { assertThat(query("SELECT name FROM nation LIMIT 30")).isFullyPushedDown(); // Use high limit for result determinism // Make sure LIMIT 0 returns empty result because cursor.limit(0) means no limit in MongoDB assertThat(query("SELECT name FROM nation LIMIT 0")).returnsEmptyResult(); // MongoDB doesn't support limit number greater than integer max assertThat(query("SELECT name FROM nation LIMIT 2147483647")).isFullyPushedDown(); assertThat(query("SELECT name FROM nation LIMIT 2147483648")).isNotFullyPushedDown(LimitNode.class); } @Override public void testAddColumnConcurrently() { // TODO: Enable after supporting multi-document transaction https://www.mongodb.com/docs/manual/core/transactions/ throw new SkipException("TODO"); } @Test public void testRenameTableTo120bytesTableName() { String sourceTableName = "test_rename_source_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + sourceTableName + " AS SELECT 123 x", 1); // The new table has 120 bytes as fully qualified identifier (あ is 3 bytes char) String targetTableName = "a".repeat(120 - "tpch.".length() - 3) + "あ"; assertThat(targetTableName.length()).isLessThan(120); assertUpdate("ALTER TABLE " + sourceTableName + " RENAME TO \"" + targetTableName + "\""); assertQuery("SELECT x FROM \"" + targetTableName + "\"", "VALUES 123"); assertUpdate("DROP TABLE \"" + targetTableName + "\""); targetTableName = targetTableName + "z"; assertUpdate("CREATE TABLE " + sourceTableName + " AS SELECT 123 x", 1); assertQueryFails( "ALTER TABLE " + sourceTableName + " RENAME TO \"" + targetTableName + "\"", "Qualified identifier name must be shorter than or equal to '120' bytes: .*"); assertUpdate("DROP TABLE \"" + sourceTableName + "\""); } @Override protected OptionalInt maxSchemaNameLength() { return OptionalInt.of(63); } @Override protected void verifySchemaNameLengthFailurePermissible(Throwable e) { assertThat(e).hasMessageContaining("Invalid database name"); } @Override protected OptionalInt maxTableNameLength() { return OptionalInt.of(120 - "tpch.".length()); } @Override protected void verifyTableNameLengthFailurePermissible(Throwable e) { assertThat(e).hasMessageMatching(".*fully qualified namespace .* is too long.*|Qualified identifier name must be shorter than or equal to '120'.*"); } private void assertOneNotNullResult(String query) { MaterializedResult results = getQueryRunner().execute(getSession(), query).toTestTypes(); assertEquals(results.getRowCount(), 1); assertEquals(results.getMaterializedRows().get(0).getFieldCount(), 1); assertNotNull(results.getMaterializedRows().get(0).getField(0)); } }
{ "content_hash": "d5e9b742f16f02f6cafb81211b92f7a4", "timestamp": "", "source": "github", "line_count": 633, "max_line_length": 267, "avg_line_length": 47.985781990521325, "alnum_prop": 0.6301234567901235, "repo_name": "smartnews/presto", "id": "eed80afd240ce7c78199103d924471d01ec719b1", "size": "30379", "binary": false, "copies": "1", "ref": "refs/heads/smartnews", "path": "plugin/trino-mongodb/src/test/java/io/trino/plugin/mongodb/BaseMongoConnectorTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "50268" }, { "name": "CSS", "bytes": "13515" }, { "name": "Dockerfile", "bytes": "1967" }, { "name": "Groovy", "bytes": "1702" }, { "name": "HTML", "bytes": "30842" }, { "name": "Java", "bytes": "61596519" }, { "name": "JavaScript", "bytes": "232261" }, { "name": "PLSQL", "bytes": "85" }, { "name": "Python", "bytes": "5266" }, { "name": "Scala", "bytes": "10145" }, { "name": "Shell", "bytes": "51516" }, { "name": "Smarty", "bytes": "1938" } ], "symlink_target": "" }
export const button: string; export const disabled: string; export const active: string; export const alert: string;
{ "content_hash": "007b8198fc3a8c555cd3a89c8b380090", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 30, "avg_line_length": 29.25, "alnum_prop": 0.7948717948717948, "repo_name": "uppsaladatavetare/foobar-kiosk", "id": "5bab570c067656fb649a9864f622553617685d72", "size": "117", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/styles/primary/components/Button.scss.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "11244" }, { "name": "HTML", "bytes": "521" }, { "name": "JavaScript", "bytes": "48" }, { "name": "Python", "bytes": "7235" }, { "name": "Shell", "bytes": "2721" }, { "name": "TypeScript", "bytes": "52149" } ], "symlink_target": "" }
import os from util import * import argparse def main(): parser = argparse.ArgumentParser() parser.add_argument("--lang", type=str, help="language/dir containing files", default="EN") parser.add_argument("--min_sup", type=int, help="minimum support for considering entity", default=100) parser.add_argument("--min_perc", type=int, help="minimum percentage for considering entity", default=100) parser.add_argument("--skip_head_token_stopword", action="store_true", default=False, help="whether entities starting w/ stopwords should be ignored") parser.add_argument("--skip_end_token_stopword", action="store_true", default=False, help="whether entities ending w/ stopwords should be ignored") parser.add_argument("--skip_high_prop_stopword", action="store_true", default=False, help="whether entities w/ high proportion of stopwords should be ignored") parser.add_argument("--no_sep", action="store_true", default=False, help="whether entities w/ sep chars should be ignored") parser.add_argument("--only_alpha", action="store_true", default=False, help="whether only alphanumeric (skipping only numeric) entities should be extracted") args = parser.parse_args() input_filename = os.path.join(args.lang, 'entities') output_filename = os.path.join(args.lang, 'wiki_custom.txt') predicates = [] # Stopwords predicates stopwords = load_stopwords(os.path.join(args.lang, 'stopwords.txt')) stopwords_flags = [ args.skip_head_token_stopword, args.skip_end_token_stopword, args.skip_high_prop_stopword ] if any(stopwords_flags): predicates.append(get_stopwords_joint_pred(stopwords, *stopwords_flags)) if args.only_alpha: predicates.append(only_alpha_pred) if args.no_sep: predicates.append(no_separator_pred) extract_entities(input_filename, predicates, output_filename, args.min_perc, args.min_sup, args.lang) if __name__ == '__main__': main()
{ "content_hash": "2df1000f0aa6413da4f9aa9e29f47431", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 163, "avg_line_length": 48.53658536585366, "alnum_prop": 0.7035175879396985, "repo_name": "shangjingbo1226/AutoPhrase", "id": "6705a7dc7dbaa59e9528e0a90c5b0d6b00281ce8", "size": "1990", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tools/wiki_entities/custom_extract_entities.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "5706" }, { "name": "C++", "bytes": "114834" }, { "name": "Dockerfile", "bytes": "1977" }, { "name": "Java", "bytes": "41322" }, { "name": "Makefile", "bytes": "828" }, { "name": "Perl", "bytes": "52441" }, { "name": "Python", "bytes": "6460" }, { "name": "Shell", "bytes": "27998" } ], "symlink_target": "" }
namespace Santase.AI.ConsoleWebPlayer.Common { public class WebPlayerConstants { public const string BotName = "Console Web Player"; public const int MinimumPointsForClosingGame = 61; public const int HasEnoughTrumpCards = 5; public const int PointsCloseToVictory = 55; } }
{ "content_hash": "12f74b618fb74897ab9c7d736be94a40", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 59, "avg_line_length": 23.071428571428573, "alnum_prop": 0.6873065015479877, "repo_name": "tovaneedsa/Dr.-Radeva-mrazi-voda", "id": "ca921408591f4185500d2d28f92f74c41c5c20fe", "size": "325", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SantaseGameEngine/Source/AI/Santase.AI.ConsoleWebPlayer/Common/ConsoleWebPlayer.Constants.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "287974" } ], "symlink_target": "" }
BEGIN; DROP INDEX recipe_note_recipe_id_idx; DROP INDEX recipe_tag_recipe_id_idx; DROP INDEX recipe_rating_recipe_id_idx; DROP INDEX recipe_image_recipe_id_idx; COMMIT;
{ "content_hash": "f977ade3c47e8f87729370a8c815b24f", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 39, "avg_line_length": 21.25, "alnum_prop": 0.7823529411764706, "repo_name": "chadweimer/gomp", "id": "7adb20be39b43068c9fa0a25cd30cd724a2cc25d", "size": "170", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "db/migrations/postgres/0005_fk_indices.down.sql", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5824" }, { "name": "Dockerfile", "bytes": "2272" }, { "name": "Go", "bytes": "94221" }, { "name": "HTML", "bytes": "1139" }, { "name": "JavaScript", "bytes": "860" }, { "name": "Makefile", "bytes": "5428" }, { "name": "PLpgSQL", "bytes": "22178" }, { "name": "Shell", "bytes": "90" }, { "name": "TypeScript", "bytes": "140605" } ], "symlink_target": "" }
import time from datetime import datetime, timedelta, timezone import jwt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization def main(): with open('/home/run/priv.pem', 'rb') as f: private_key = serialization.load_pem_private_key(f.read(), password=None, backend=default_backend()) serialized_private = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption(), ) now = datetime.now(timezone.utc) encoded = jwt.encode( {'exp': now + timedelta(hours=1), 'nbf': now, 'aud': 'test', 'name': 'datadog'}, serialized_private, algorithm='RS256', ) with open('/home/jwt/claim', 'wb') as f: f.write(encoded) while True: time.sleep(10) if __name__ == '__main__': main()
{ "content_hash": "160316af9d37430e562c61bc32495746", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 108, "avg_line_length": 27.58823529411765, "alnum_prop": 0.6567164179104478, "repo_name": "DataDog/integrations-core", "id": "187952ff7ebb93f2cc850b449f50b153c7038124", "size": "938", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vault/tests/docker/provider/run/main.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "578" }, { "name": "COBOL", "bytes": "12312" }, { "name": "Dockerfile", "bytes": "22998" }, { "name": "Erlang", "bytes": "15518" }, { "name": "Go", "bytes": "6988" }, { "name": "HCL", "bytes": "4080" }, { "name": "HTML", "bytes": "1318" }, { "name": "JavaScript", "bytes": "1817" }, { "name": "Kotlin", "bytes": "430" }, { "name": "Lua", "bytes": "3489" }, { "name": "PHP", "bytes": "20" }, { "name": "PowerShell", "bytes": "2398" }, { "name": "Python", "bytes": "13020828" }, { "name": "Roff", "bytes": "359" }, { "name": "Ruby", "bytes": "241" }, { "name": "Scala", "bytes": "7000" }, { "name": "Shell", "bytes": "83227" }, { "name": "Swift", "bytes": "203" }, { "name": "TSQL", "bytes": "29972" }, { "name": "TypeScript", "bytes": "1019" } ], "symlink_target": "" }
import { or } from "@entity-space/criteria"; import { EntitySelection } from "./entity-selection"; import { EntityQuery } from "./entity-query"; import { QueryPaging } from "./query-paging"; // [todo] clean up this method, it is really hard to read and hacked together. export function mergeQuery(a: EntityQuery, b: EntityQuery): false | EntityQuery { if (a.getEntitySchema().getId() !== b.getEntitySchema().getId()) { return false; } if (!a.getOptions().equivalent(b.getOptions())) { return false; } const pagingA = a.getPaging(); const pagingB = b.getPaging(); const equivalentCriteria = a.getCriteria().equivalent(b.getCriteria()); const equivalentSelection = a.getSelection().equivalent(b.getSelection()); if (pagingA || pagingB) { if (pagingA && !pagingB) { if (equivalentCriteria && equivalentSelection) { return b; } else { return false; } } else if (!pagingA && pagingB) { if (equivalentCriteria && equivalentSelection) { return a; } else { return false; } } else if (pagingA && pagingB) { if (equivalentCriteria) { if (pagingA.equivalent(pagingB)) { if (equivalentSelection) { return a; // could also return b, as everything is equivalent } else { return new EntityQuery({ entitySchema: a.getEntitySchema(), options: a.getOptions(), criteria: a.getCriteria(), selection: a.getSelection().merge(b.getSelection()), paging: a.getPaging(), }); } } else { if (pagingA.equivalentSort(pagingB)) { if (equivalentSelection) { const mergedRange = pagingA.mergeRange(pagingB); if (mergedRange) { return new EntityQuery({ entitySchema: a.getEntitySchema(), options: a.getOptions(), criteria: a.getCriteria(), selection: a.getSelection(), paging: new QueryPaging({ sort: pagingA.getSort(), from: mergedRange.getFrom()?.value, to: mergedRange.getTo()?.value, }), }); } else { return false; } } else { return false; } } else { return false; } } } else { return false; } } } const paging = pagingA; const options = a.getOptions(); const entitySchema = a.getEntitySchema(); if (equivalentCriteria) { // same identity, just merge expansions return new EntityQuery({ entitySchema, criteria: a.getCriteria(), selection: EntitySelection.mergeValues(a.getEntitySchema(), a.getSelectionValue(), b.getSelectionValue()), options, paging, }); } const mergedCriteria = a.getCriteria().merge(b.getCriteria()); if (equivalentSelection) { if (mergedCriteria !== false) { return new EntityQuery({ entitySchema, options, criteria: mergedCriteria, selection: a.getSelectionValue(), }); } else { return new EntityQuery({ entitySchema, criteria: or(a.getCriteria(), b.getCriteria()), selection: a.getSelectionValue(), options, }); } } return false; }
{ "content_hash": "4224c5d417dd7dca9ab6e0a9333c4efc", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 118, "avg_line_length": 37.572649572649574, "alnum_prop": 0.4335759781619654, "repo_name": "kukeiko/entity-space", "id": "bcf990dbea4f240e484a6feef6c76a2a24cc5f18", "size": "4396", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "packages/core/src/lib/query/merge-query.fn.ts", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "15008" }, { "name": "JavaScript", "bytes": "6925" }, { "name": "SCSS", "bytes": "2230" }, { "name": "TypeScript", "bytes": "578087" } ], "symlink_target": "" }
// Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client.change; import com.google.gerrit.client.ConfirmationCallback; import com.google.gerrit.client.ConfirmationDialog; import com.google.gerrit.client.Gerrit; import com.google.gerrit.client.account.AccountInfo; import com.google.gerrit.client.changes.ChangeApi; import com.google.gerrit.client.changes.ChangeInfo; import com.google.gerrit.client.changes.ChangeInfo.ApprovalInfo; import com.google.gerrit.client.changes.ChangeInfo.LabelInfo; import com.google.gerrit.client.changes.Util; import com.google.gerrit.client.rpc.GerritCallback; import com.google.gerrit.client.rpc.NativeMap; import com.google.gerrit.client.rpc.NativeString; import com.google.gerrit.client.rpc.Natives; import com.google.gerrit.client.ui.HintTextBox; import com.google.gerrit.reviewdb.client.Change; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.dom.client.Element; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.KeyDownEvent; import com.google.gwt.event.dom.client.KeyDownHandler; import com.google.gwt.event.logical.shared.SelectionEvent; import com.google.gwt.event.logical.shared.SelectionHandler; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiHandler; import com.google.gwt.user.client.rpc.StatusCodeException; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.HTMLPanel; import com.google.gwt.user.client.ui.SuggestBox; import com.google.gwt.user.client.ui.SuggestBox.DefaultSuggestionDisplay; import com.google.gwt.user.client.ui.SuggestOracle.Suggestion; import com.google.gwt.user.client.ui.UIObject; import com.google.gwtexpui.safehtml.client.SafeHtml; import com.google.gwtexpui.safehtml.client.SafeHtmlBuilder; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** Add reviewers. */ public class Reviewers extends Composite { interface Binder extends UiBinder<HTMLPanel, Reviewers> {} private static final Binder uiBinder = GWT.create(Binder.class); @UiField Element reviewersText; @UiField Button openForm; @UiField Element form; @UiField Element error; @UiField(provided = true) SuggestBox suggestBox; private ChangeScreen2.Style style; private Element ccText; private RestReviewerSuggestOracle reviewerSuggestOracle; private HintTextBox nameTxtBox; private Change.Id changeId; private boolean submitOnSelection; Reviewers() { reviewerSuggestOracle = new RestReviewerSuggestOracle(); nameTxtBox = new HintTextBox(); suggestBox = new SuggestBox(reviewerSuggestOracle, nameTxtBox); initWidget(uiBinder.createAndBindUi(this)); nameTxtBox.setVisibleLength(55); nameTxtBox.setHintText(Util.C.approvalTableAddReviewerHint()); nameTxtBox.addKeyDownHandler(new KeyDownHandler() { @Override public void onKeyDown(KeyDownEvent e) { submitOnSelection = false; if (e.getNativeEvent().getKeyCode() == KeyCodes.KEY_ESCAPE) { onCancel(null); } else if (e.getNativeEvent().getKeyCode() == KeyCodes.KEY_ENTER) { if (((DefaultSuggestionDisplay) suggestBox.getSuggestionDisplay()) .isSuggestionListShowing()) { submitOnSelection = true; } else { onAdd(null); } } } }); suggestBox.addSelectionHandler(new SelectionHandler<Suggestion>() { @Override public void onSelection(SelectionEvent<Suggestion> event) { nameTxtBox.setFocus(true); if (submitOnSelection) { onAdd(null); } } }); } void init(ChangeScreen2.Style style, Element ccText) { this.style = style; this.ccText = ccText; } void set(ChangeInfo info) { this.changeId = info.legacy_id(); display(info); reviewerSuggestOracle.setChange(changeId); openForm.setVisible(Gerrit.isSignedIn()); } @UiHandler("openForm") void onOpenForm(ClickEvent e) { onOpenForm(); } void onOpenForm() { UIObject.setVisible(form, true); UIObject.setVisible(error, false); openForm.setVisible(false); suggestBox.setFocus(true); } @UiHandler("add") void onAdd(ClickEvent e) { String reviewer = suggestBox.getText(); if (!reviewer.isEmpty()) { addReviewer(reviewer, false); } } @UiHandler("addme") void onAddMe(ClickEvent e) { String accountId = String.valueOf(Gerrit.getUserAccountInfo()._account_id()); addReviewer(accountId, false); } @UiHandler("cancel") void onCancel(ClickEvent e) { openForm.setVisible(true); UIObject.setVisible(form, false); suggestBox.setFocus(false); } private void addReviewer(final String reviewer, boolean confirmed) { ChangeApi.reviewers(changeId.get()).post( PostInput.create(reviewer, confirmed), new GerritCallback<PostResult>() { public void onSuccess(PostResult result) { nameTxtBox.setEnabled(true); if (result.confirm()) { askForConfirmation(result.error()); } else if (result.error() != null) { UIObject.setVisible(error, true); error.setInnerText(result.error()); } else { UIObject.setVisible(error, false); error.setInnerText(""); nameTxtBox.setText(""); if (result.reviewers() != null && result.reviewers().length() > 0) { updateReviewerList(); } } } private void askForConfirmation(String text) { new ConfirmationDialog( Util.C.approvalTableAddManyReviewersConfirmationDialogTitle(), new SafeHtmlBuilder().append(text), new ConfirmationCallback() { @Override public void onOk() { addReviewer(reviewer, true); } }).center(); } @Override public void onFailure(Throwable err) { UIObject.setVisible(error, true); error.setInnerText(err instanceof StatusCodeException ? ((StatusCodeException) err).getEncodedResponse() : err.getMessage()); nameTxtBox.setEnabled(true); } }); } private void updateReviewerList() { ChangeApi.detail(changeId.get(), new GerritCallback<ChangeInfo>() { @Override public void onSuccess(ChangeInfo result) { display(result); } }); } private void display(ChangeInfo info) { Map<Integer, AccountInfo> r = new HashMap<>(); Map<Integer, AccountInfo> cc = new HashMap<>(); for (LabelInfo label : Natives.asList(info.all_labels().values())) { if (label.all() != null) { for (ApprovalInfo ai : Natives.asList(label.all())) { (ai.value() != 0 ? r : cc).put(ai._account_id(), ai); } } } for (Integer i : r.keySet()) { cc.remove(i); } r.remove(info.owner()._account_id()); cc.remove(info.owner()._account_id()); Set<Integer> removable = new HashSet<>(); if (info.removable_reviewers() != null) { for (AccountInfo a : Natives.asList(info.removable_reviewers())) { removable.add(a._account_id()); } } Map<Integer, VotableInfo> votable = votable(info); SafeHtml rHtml = Labels.formatUserList(style, r.values(), removable, votable); SafeHtml ccHtml = Labels.formatUserList(style, cc.values(), removable, votable); reviewersText.setInnerSafeHtml(rHtml); ccText.setInnerSafeHtml(ccHtml); } private static Map<Integer, VotableInfo> votable(ChangeInfo change) { Map<Integer, VotableInfo> d = new HashMap<>(); for (String name : change.labels()) { LabelInfo label = change.label(name); if (label.all() != null) { for (ApprovalInfo ai : Natives.asList(label.all())) { int id = ai._account_id(); VotableInfo ad = d.get(id); if (ad == null) { ad = new VotableInfo(); d.put(id, ad); } if (ai.has_value()) { ad.votable(name); } } } } return d; } public static class PostInput extends JavaScriptObject { public static PostInput create(String reviewer, boolean confirmed) { PostInput input = createObject().cast(); input.init(reviewer, confirmed); return input; } private native void init(String reviewer, boolean confirmed) /*-{ this.reviewer = reviewer; if (confirmed) { this.confirmed = true; } }-*/; protected PostInput() { } } public static class ReviewerInfo extends AccountInfo { final Set<String> approvals() { return Natives.keys(_approvals()); } final native String approval(String l) /*-{ return this.approvals[l]; }-*/; private final native NativeMap<NativeString> _approvals() /*-{ return this.approvals; }-*/; protected ReviewerInfo() { } } public static class PostResult extends JavaScriptObject { public final native JsArray<ReviewerInfo> reviewers() /*-{ return this.reviewers; }-*/; public final native boolean confirm() /*-{ return this.confirm || false; }-*/; public final native String error() /*-{ return this.error; }-*/; protected PostResult() { } } }
{ "content_hash": "89f76fa4b45a4bf2b4e2f529613f042f", "timestamp": "", "source": "github", "line_count": 312, "max_line_length": 95, "avg_line_length": 33.00320512820513, "alnum_prop": 0.662425949305623, "repo_name": "midnightradio/gerrit", "id": "10a140dc173986c7e635fefab664e508fe089af1", "size": "10297", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gerrit-gwtui/src/main/java/com/google/gerrit/client/change/Reviewers.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "53403" }, { "name": "GAP", "bytes": "4285" }, { "name": "Go", "bytes": "1865" }, { "name": "Java", "bytes": "8144236" }, { "name": "JavaScript", "bytes": "2306" }, { "name": "Perl", "bytes": "9943" }, { "name": "Prolog", "bytes": "17712" }, { "name": "Python", "bytes": "12549" }, { "name": "Shell", "bytes": "38586" } ], "symlink_target": "" }
import { Template } from 'meteor/templating'; import { Beaches } from '/imports/api/items/beach/beach-item.js'; import { Comments, CommentsSchema } from '/imports/api/comments/CommentsCollection.js'; import { Meteor } from 'meteor/meteor'; import { FlowRouter } from 'meteor/kadira:flow-router'; import { _ } from 'meteor/underscore'; import { Profiles } from '/imports/api/profiles/ProfileCollection.js'; Template.Beach_Page.onCreated(function onCreated() { this.subscribe('Beaches'); this.context = CommentsSchema.namedContext('Beach_Page'); this.subscribe('Comments'); this.subscribe('Profiles'); }); Template.Beach_Page.helpers({ bea: () => Beaches.findOne({ _id: FlowRouter.getParam('_id') }), Comments() { return Comments.find({ itemid: FlowRouter.getParam('_id') }); }, profpath() { return Meteor.user().profile.name; }, displayDate() { return moment(this.createdAt).format('MM/DD/YYYY, HH:MM'); }, inBucketList() { const usernameCurrent = Meteor.user().profile.name; const bucketlist = Profiles.findOne({ username: usernameCurrent }).bucketlist; return _.contains(bucketlist, FlowRouter.getParam('_id')); }, profimage() { const user = Meteor.user().profile.name; const profile = Profiles.findOne({ username: user }); if (profile.image) { return true; } return false; }, image() { const user = Meteor.user().profile.name; const profile = Profiles.findOne({ username: user }); console.log(profile.image); return profile.image; }, }); Template.Beach_Page.events({ 'submit .beach-comment-form'(event, instance) { event.preventDefault(); // Get name (text field) const username = Meteor.user().profile.name; const about = event.target.about.value; const itemid = FlowRouter.getParam('_id'); const newItemData = { username, about, itemid }; instance.context.validate(newItemData); Comments.insert(newItemData); event.target.reset(); }, 'click .beach-bucket'(event) { event.preventDefault(); const usernameCurrent = Meteor.user().profile.name; const profileName = Profiles.findOne({ username: usernameCurrent }); const profileId = profileName._id; const itemid = FlowRouter.getParam('_id'); Profiles.update(profileId, { $push: { bucketlist: itemid } }); }, 'click .user-profile'(event) { const clickedUser = event.target.closest('a'); const clickedUserName = $(clickedUser).attr('data-id'); FlowRouter.go('Public_Profile_Page', { username: clickedUserName }); }, });
{ "content_hash": "cbfafa94790c154a6770820e25d792ed", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 87, "avg_line_length": 34.54054054054054, "alnum_prop": 0.6764475743348983, "repo_name": "chasehawaii/chasehawaii", "id": "a1c6ed00193ab35f03bfa56258459e1c004dc452", "size": "2556", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/imports/ui/pages/items/beach-page.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "8428" }, { "name": "HTML", "bytes": "71465" }, { "name": "JavaScript", "bytes": "852002" } ], "symlink_target": "" }
Blackfriday =========== Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It is paranoid about its input (so you can safely feed it user-supplied data), it is fast, it supports common extensions (tables, smart punctuation substitutions, etc.), and it is safe for all utf-8 (unicode) input. HTML output is currently supported, along with Smartypants extensions. An experimental LaTeX output engine is also included. It started as a translation from C of [upskirt][3]. Installation ------------ Blackfriday is compatible with Go 1. If you are using an older release of Go, consider using v1.1 of blackfriday, which was based on the last stable release of Go prior to Go 1. You can find it as a tagged commit on github. With Go 1 and git installed: go get github.com/russross/blackfriday will download, compile, and install the package into your `$GOROOT` directory hierarchy. Alternatively, you can import it into a project: import "github.com/russross/blackfriday" and when you build that project with `go build`, blackfriday will be downloaded and installed automatically. For basic usage, it is as simple as getting your input into a byte slice and calling: output := blackfriday.MarkdownBasic(input) This renders it with no extensions enabled. To get a more useful feature set, use this instead: output := blackfriday.MarkdownCommon(input) If you want to customize the set of options, first get a renderer (currently either the HTML or LaTeX output engines), then use it to call the more general `Markdown` function. For examples, see the implementations of `MarkdownBasic` and `MarkdownCommon` in `markdown.go`. You can also check out `blackfriday-tool` for a more complete example of how to use it. Download and install it using: go get github.com/russross/blackfriday-tool This is a simple command-line tool that allows you to process a markdown file using a standalone program. You can also browse the source directly on github if you are just looking for some example code: * <http://github.com/russross/blackfriday-tool> Note that if you have not already done so, installing `blackfriday-tool` will be sufficient to download and install blackfriday in addition to the tool itself. The tool binary will be installed in `$GOROOT/bin`. This is a statically-linked binary that can be copied to wherever you need it without worrying about dependencies and library versions. Features -------- All features of upskirt are supported, including: * **Compatibility**. The Markdown v1.0.3 test suite passes with the `--tidy` option. Without `--tidy`, the differences are mostly in whitespace and entity escaping, where blackfriday is more consistent and cleaner. * **Common extensions**, including table support, fenced code blocks, autolinks, strikethroughs, non-strict emphasis, etc. * **Safety**. Blackfriday is paranoid when parsing, making it safe to feed untrusted user input without fear of bad things happening. The test suite stress tests this and there are no known inputs that make it crash. If you find one, please let me know and send me the input that does it. * **Fast processing**. It is fast enough to render on-demand in most web applications without having to cache the output. * **Thread safety**. You can run multiple parsers in different goroutines without ill effect. There is no dependence on global shared state. * **Minimal dependencies**. Blackfriday only depends on standard library packages in Go. The source code is pretty self-contained, so it is easy to add to any project, including Google App Engine projects. * **Standards compliant**. Output successfully validates using the W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. Extensions ---------- In addition to the standard markdown syntax, this package implements the following extensions: * **Intra-word emphasis supression**. The `_` character is commonly used inside words when discussing code, so having markdown interpret it as an emphasis command is usually the wrong thing. Blackfriday lets you treat all emphasis markers as normal characters when they occur inside a word. * **Tables**. Tables can be created by drawing them in the input using a simple syntax: ``` Name | Age --------|------ Bob | 27 Alice | 23 ``` * **Fenced code blocks**. In addition to the normal 4-space indentation to mark code blocks, you can explicitly mark them and supply a language (to make syntax highlighting simple). Just mark it like this: ``` go func getTrue() bool { return true } ``` You can use 3 or more backticks to mark the beginning of the block, and the same number to mark the end of the block. * **Autolinking**. Blackfriday can find URLs that have not been explicitly marked as links and turn them into links. * **Strikethrough**. Use two tildes (`~~`) to mark text that should be crossed out. * **Hard line breaks**. With this extension enabled (it is off by default in the `MarkdownBasic` and `MarkdownCommon` convenience functions), newlines in the input translate into line breaks in the output. * **Smart quotes**. Smartypants-style punctuation substitution is supported, turning normal double- and single-quote marks into curly quotes, etc. * **LaTeX-style dash parsing** is an additional option, where `--` is translated into `&ndash;`, and `---` is translated into `&mdash;`. This differs from most smartypants processors, which turn a single hyphen into an ndash and a double hyphen into an mdash. * **Smart fractions**, where anything that looks like a fraction is translated into suitable HTML (instead of just a few special cases like most smartypant processors). For example, `4/5` becomes `<sup>4</sup>&frasl;<sub>5</sub>`, which renders as <sup>4</sup>&frasl;<sub>5</sub>. LaTeX Output ------------ A rudimentary LaTeX rendering backend is also included. To see an example of its usage, see `main.go`: It renders some basic documents, but is only experimental at this point. In particular, it does not do any inline escaping, so input that happens to look like LaTeX code will be passed through without modification. Todo ---- * More unit testing * Markdown pretty-printer output engine * Improve unicode support. It does not understand all unicode rules (about what constitutes a letter, a punctuation symbol, etc.), so it may fail to detect word boundaries correctly in some instances. It is safe on all utf-8 input. License ------- Blackfriday is distributed under the Simplified BSD License: > Copyright © 2011 Russ Ross > All rights reserved. > > Redistribution and use in source and binary forms, with or without > modification, are permitted provided that the following conditions > are met: > > 1. Redistributions of source code must retain the above copyright > notice, this list of conditions and the following disclaimer. > > 2. Redistributions in binary form must reproduce the above > copyright notice, this list of conditions and the following > disclaimer in the documentation and/or other materials provided with > the distribution. > > THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS > "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT > LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS > FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE > COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, > INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, > BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; > LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER > CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT > LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN > ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE > POSSIBILITY OF SUCH DAMAGE. [1]: http://daringfireball.net/projects/markdown/ "Markdown" [2]: http://golang.org/ "Go Language" [3]: http://github.com/tanoku/upskirt "Upskirt"
{ "content_hash": "4f81ea2040f3f38bc3a56dc5f0577314", "timestamp": "", "source": "github", "line_count": 231, "max_line_length": 74, "avg_line_length": 35.96536796536797, "alnum_prop": 0.7350746268656716, "repo_name": "UserStack/ustackweb", "id": "914d930d0b3ba6e3eb953a47a4927d403c8fd3f5", "size": "8309", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Godeps/_workspace/src/github.com/slene/blackfriday/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "944" }, { "name": "Go", "bytes": "56263" }, { "name": "JavaScript", "bytes": "1715" }, { "name": "Ruby", "bytes": "53" }, { "name": "Shell", "bytes": "215" } ], "symlink_target": "" }
package org.apache.activemq.artemis.core.protocol.mqtt; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.core.remoting.CloseListener; import org.apache.activemq.artemis.core.remoting.FailureListener; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.spi.core.remoting.Connection; public class MQTTConnection implements RemotingConnection { private final Connection transportConnection; private final long creationTime; private AtomicBoolean dataReceived; private boolean destroyed; private boolean connected; private final List<FailureListener> failureListeners = Collections.synchronizedList(new ArrayList<FailureListener>()); private final List<CloseListener> closeListeners = Collections.synchronizedList(new ArrayList<CloseListener>()); public MQTTConnection(Connection transportConnection) throws Exception { this.transportConnection = transportConnection; this.creationTime = System.currentTimeMillis(); this.dataReceived = new AtomicBoolean(); this.destroyed = false; } public Object getID() { return transportConnection.getID(); } @Override public long getCreationTime() { return creationTime; } @Override public String getRemoteAddress() { return transportConnection.getRemoteAddress(); } @Override public void addFailureListener(FailureListener listener) { failureListeners.add(listener); } @Override public boolean removeFailureListener(FailureListener listener) { return failureListeners.remove(listener); } @Override public void addCloseListener(CloseListener listener) { closeListeners.add(listener); } @Override public boolean removeCloseListener(CloseListener listener) { return closeListeners.remove(listener); } @Override public List<CloseListener> removeCloseListeners() { synchronized (closeListeners) { List<CloseListener> deletedCloseListeners = new ArrayList<CloseListener>(closeListeners); closeListeners.clear(); return deletedCloseListeners; } } @Override public void setCloseListeners(List<CloseListener> listeners) { closeListeners.addAll(listeners); } @Override public List<FailureListener> getFailureListeners() { return failureListeners; } @Override public List<FailureListener> removeFailureListeners() { synchronized (failureListeners) { List<FailureListener> deletedFailureListeners = new ArrayList<FailureListener>(failureListeners); failureListeners.clear(); return deletedFailureListeners; } } @Override public void setFailureListeners(List<FailureListener> listeners) { synchronized (failureListeners) { failureListeners.clear(); failureListeners.addAll(listeners); } } @Override public ActiveMQBuffer createTransportBuffer(int size) { return transportConnection.createTransportBuffer(size); } @Override public void fail(ActiveMQException me) { synchronized (failureListeners) { for (FailureListener listener : failureListeners) { listener.connectionFailed(me, false); } } } @Override public void fail(ActiveMQException me, String scaleDownTargetNodeID) { synchronized (failureListeners) { for (FailureListener listener : failureListeners) { //FIXME(mtaylor) How do we check if the node has failed over? listener.connectionFailed(me, false); } } } @Override public void destroy() { //TODO(mtaylor) ensure this properly destroys this connection. destroyed = true; disconnect(false); } @Override public Connection getTransportConnection() { return transportConnection; } @Override public boolean isClient() { return false; } @Override public boolean isDestroyed() { return destroyed; } @Override public void disconnect(boolean criticalError) { transportConnection.forceClose(); } @Override public void disconnect(String scaleDownNodeID, boolean criticalError) { transportConnection.forceClose(); } protected void dataReceived() { dataReceived.set(true); } @Override public boolean checkDataReceived() { return dataReceived.compareAndSet(true, false); } @Override public void flush() { transportConnection.checkFlushBatchBuffer(); } @Override public void bufferReceived(Object connectionID, ActiveMQBuffer buffer) { } public void setConnected(boolean connected) { this.connected = connected; } public boolean getConnected() { return connected; } }
{ "content_hash": "6cf2134a726b3a013773424bed9dccad", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 121, "avg_line_length": 26.413612565445025, "alnum_prop": 0.7137760158572845, "repo_name": "waysact/activemq-artemis", "id": "7bb12c6e6556aa958df1b228fa8a08648b8dab84", "size": "5845", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "artemis-protocols/artemis-mqtt-protocol/src/main/java/org/apache/activemq/artemis/core/protocol/mqtt/MQTTConnection.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5879" }, { "name": "C", "bytes": "23262" }, { "name": "C++", "bytes": "1032" }, { "name": "CMake", "bytes": "4260" }, { "name": "CSS", "bytes": "11732" }, { "name": "HTML", "bytes": "19113" }, { "name": "Java", "bytes": "22819589" }, { "name": "Shell", "bytes": "11911" } ], "symlink_target": "" }
#define PWIZ_SOURCE #include "Reader.hpp" #include "pwiz/utility/misc/Filesystem.hpp" #include "pwiz/utility/misc/Std.hpp" namespace pwiz { namespace proteome { using namespace pwiz::util; using boost::shared_ptr; PWIZ_API_DECL void Reader::read(const string& uri, ProteomeData& result) const { shared_ptr<istream> uriStreamPtr(new ifstream(uri.c_str())); read(uri, uriStreamPtr, result); } PWIZ_API_DECL std::string ReaderList::identify(const string& uri) const { shared_ptr<istream> uriStreamPtr(new ifstream(uri.c_str())); return identify(uri, uriStreamPtr); } PWIZ_API_DECL std::string ReaderList::identify(const string& uri, shared_ptr<istream> uriStreamPtr) const { std::string result; for (const_iterator it=begin(); it!=end(); ++it) { result = (*it)->identify(uri, uriStreamPtr); if (result.length()) { break; } } return result; } PWIZ_API_DECL void ReaderList::read(const string& uri, ProteomeData& result) const { shared_ptr<istream> uriStreamPtr(new ifstream(uri.c_str())); read(uri, uriStreamPtr, result); } PWIZ_API_DECL void ReaderList::read(const string& uri, shared_ptr<istream> uriStreamPtr, ProteomeData& result) const { for (const_iterator it=begin(); it!=end(); ++it) if ((*it)->accept(uri, uriStreamPtr)) { (*it)->read(uri, uriStreamPtr, result); return; } throw ReaderFail(" don't know how to read " + uri); } PWIZ_API_DECL ReaderList& ReaderList::operator +=(const ReaderList& rhs) { insert(end(), rhs.begin(), rhs.end()); return *this; } PWIZ_API_DECL ReaderList& ReaderList::operator +=(const ReaderPtr& rhs) { push_back(rhs); return *this; } PWIZ_API_DECL ReaderList ReaderList::operator +(const ReaderList& rhs) const { ReaderList readerList(*this); readerList += rhs; return readerList; } PWIZ_API_DECL ReaderList ReaderList::operator +(const ReaderPtr& rhs) const { ReaderList readerList(*this); readerList += rhs; return readerList; } PWIZ_API_DECL ReaderList operator +(const ReaderPtr& lhs, const ReaderPtr& rhs) { ReaderList readerList; readerList.push_back(lhs); readerList.push_back(rhs); return readerList; } } // namespace proteome } // namespace pwiz
{ "content_hash": "0172026840de67c1da0b18e0ce7eee22", "timestamp": "", "source": "github", "line_count": 106, "max_line_length": 116, "avg_line_length": 22.528301886792452, "alnum_prop": 0.6465661641541038, "repo_name": "romanzenka/myrimatch", "id": "45f3983fa593ce407a72d95d4452c0383275a6b1", "size": "3170", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "freicore/pwiz_src/pwiz/data/proteome/Reader.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "1537746" }, { "name": "C++", "bytes": "9829157" }, { "name": "Java", "bytes": "29714" }, { "name": "Objective-C", "bytes": "13606" }, { "name": "PHP", "bytes": "45872" }, { "name": "Perl", "bytes": "12744" }, { "name": "Prolog", "bytes": "15" }, { "name": "Python", "bytes": "806795" }, { "name": "Shell", "bytes": "245533" }, { "name": "XSLT", "bytes": "782" } ], "symlink_target": "" }
<?php /** * Get key type * * @author Ivan Shumkov * @package Rediska * @subpackage Commands * @version @package_version@ * @link http://rediska.geometria-lab.net * @license http://www.opensource.org/licenses/bsd-license.php */ class Rediska_Command_GetType extends Rediska_Command_Abstract { /** * Create command * * @param string $key Key name * @return Rediska_Connection_Exec */ public function create($key) { $connection = $this->_rediska->getConnectionByKeyName($key); $command = array('TYPE', $this->_rediska->getOption('namespace') . $key); return new Rediska_Connection_Exec($connection, $command); } }
{ "content_hash": "491c88449ba4c86b31e5e2febe2426e9", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 73, "avg_line_length": 23.766666666666666, "alnum_prop": 0.6185133239831697, "repo_name": "Shumkov/Rediska", "id": "fe90cd241b84e253c63101e9b4c904fa2811ca32", "size": "713", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "library/Rediska/Command/GetType.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "193" }, { "name": "HTML", "bytes": "3195" }, { "name": "PHP", "bytes": "711092" } ], "symlink_target": "" }
ACCEPTED #### According to Index Fungorum #### Published in Karstenia 26(2): 47 (1986) #### Original name Peziza perparva Harmaja ### Remarks null
{ "content_hash": "a3d5737bc323b9736087c91d6aba60fe", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 26, "avg_line_length": 11.538461538461538, "alnum_prop": 0.7, "repo_name": "mdoering/backbone", "id": "4d746cad1646517056977c61c0ac925254f6c225", "size": "197", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Pezizomycetes/Pezizales/Pezizaceae/Peziza/Peziza perparva/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
/* global jQuery */ var timelineEditor = (function($) { 'use strict'; /** * Zero-initialize array. * @param {Number} length Number of elements to initialize. * @return {Array} */ function makeArray(length) { var a = new Array(length); for (var i = 0; i < length; i++) { a[i] = 0; } return a; } /** * Takes a number and wraps it within the range of [0:numEl) * @param {Number} index * @param {Number} numEl * @return {Number} New Index */ function wrapIndex(index, numEl) { index = index % numEl; return (index < 0) ? index + numEl : index; } /** * @class TimelineEditor */ var TimelineEditor = function() {}; TimelineEditor.prototype.name = ''; // History prefix for localStorage. TimelineEditor.prototype.ajaxUrl = ''; // For server-side history; defaults to same page. TimelineEditor.prototype.timelineDefault = ''; // Default JSON to load. /** * JSON of column names for the Additional Process Steps. * Defaults to wildcard (represented as an empty object); * A wildcard is a class given to the "name" field to * differentiate from the grey readonly fields. * @type {String} */ TimelineEditor.prototype.processSteps = '[{}]'; /** * Columns that will always appear in the timeline Object and results. * Enforces an order; additional columns will be put after these. * @type {Array} */ TimelineEditor.prototype.requiredColumns = []; TimelineEditor.prototype.timelineSelector = '#timeline'; TimelineEditor.prototype.rowSelector = '#timeline .timeline-item'; TimelineEditor.prototype.processStepsSelector = '#timeline-wordbank'; TimelineEditor.prototype.localHistorySelector = '#timeline-history'; TimelineEditor.prototype.remoteHistorySelector = '#timeline-history-server'; /** * timeScale: For changing the time interval. * For example, using 0.5 => {0, 2, 4, 6, ...} * using 10.0 => {0, 0.1, 0.2, 0.3, ...} * (Note that time interval == 1/timeScale). */ TimelineEditor.prototype.timeScale = 1; TimelineEditor.prototype.historyCount = 5; // Max number of history items to save. /** * Converts the timeline intervals to a full start-to-stop timeline. * @param {Object} timeline * @param {Object} timeline.raw AA of timeline steps. * @return {Array} timeline[columns] An array for each column. */ TimelineEditor.prototype.buildTimeline = function(timeline) { if (timeline.error) { return timeline; } var timeScale = this.timeScale; var endIndex = parseInt(timeline.endTime*timeScale); $(this.requiredColumns).each(function(i,name) { timeline[name] = makeArray(endIndex+1); }); $(timeline.raw).each(function(i,lineAA) { var name = lineAA.name; if (!name) { return; } if (!timeline[name]) { timeline[name] = makeArray(endIndex+1); } // If iter or stop is NaN, the while becomes false. var iter = parseInt(parseFloat(lineAA.start)*timeScale); var stop = Math.min(parseInt(parseFloat(lineAA.stop)*timeScale), endIndex); while (iter <= stop) { timeline[name][iter++] = parseFloat(lineAA.amount); } }); return timeline; }; /** * Parses the Timeline into tab-separated values. * @param {Object} timeline * @param {Number} timeline.endTime * @return {String} */ TimelineEditor.prototype.saveToString = function(timeline) { // Error checking if (timeline.error) { return timeline.errorMessage; } var header = this.requiredColumns.slice(); // copy-by-value var ignore = ['endTime', 'EndTime', 'raw', '']; $(Object.keys(timeline)).each(function(i,key) { if (timeline.hasOwnProperty(key)) { if ($.inArray(key, header) === -1 && $.inArray(key, ignore) === -1) { header.push(key); } } }); // Write Header var text = 'Time'; for (var i = 0; i < header.length; ++i) { text += '\t' + header[i]; } text += '\n'; // The rest of the timeline for (var i = 0; i <= timeline.endTime*this.timeScale; ++i) { text += i/this.timeScale; // Time for (var j = 0; j < header.length; ++j) { text += '\t' + timeline[header[j]][i]; } text += '\n'; } return text; }; /** * Parses the 'raw' line objects of the timeline and converts that to JSON. * Used for recreating the timeline. * @return {String} JSON. */ TimelineEditor.prototype.saveToJSON = function(timeline) { return (timeline.error) ? timeline.errorMessage : JSON.stringify(timeline.raw); }; /** * Serializes the Timeline fields to a JSON-parsable associative array. * @return {Object} timeline * @return {Number} timeline.endTime * @return {Array} timeline.raw Each line of the timeline in its own Object. * @return {Boolean} timeline.error * @return {String} timeline.errorMessage */ TimelineEditor.prototype.parseFields = function() { var timeline = {}; timeline.raw = []; $(this.rowSelector).each(function() { var line = $(this).children().serializeArray(); if (line[0].value === 'EndTime') { timeline.endTime = parseFloat(line[1].value); } var lineAA = {}; for (var j = 0; j < line.length; j++) { lineAA[line[j].name] = line[j].value; } timeline.raw.push(lineAA); }); // Error checking if (isNaN(timeline.endTime) || timeline.endTime < 0) { return { error: true, errorMessage: 'EndTime must be a positive number.' }; } return timeline; }; /** * Load the input fields from JSON. * @param {String} json * @return {Object} */ TimelineEditor.prototype.loadJSON = function(json) { var jsonObj; try { jsonObj = JSON.parse(json); } catch(e) { jsonObj = JSON.parse(this.timelineDefault); } var timeline = $(this.timelineSelector).empty() .append('<div class="list-group-item"><input type="text" value="Name" class="readonly" readonly><input type="text" value="Start Time" class="readonly" readonly><input type="text" value="Stop Time" class="readonly" readonly><input type="text" value="Amount" class="readonly" readonly></div>'); $(jsonObj).each(function(i, line) { var row = $('<div>', {'class': 'timeline-item list-group-item'}); timeline.append(row); $(['name', 'start', 'stop', 'amount']).each(function(i, name) { // Append input boxes. if (line.name === 'EndTime' && i > 1) { return; } row.append($('<input>', { 'type': 'text', 'class': ((name=='name') ? 'readonly' + ((line.wildcard) ? ' wildcard' : '') : 'numeric'), 'readonly': (name=='name' && !line.wildcard) ? true : false, 'name': name, 'value': line[name] })); }); if (line.name !== 'EndTime') { // Append move button. row.append('<span aria-hidden="true" class="glyphicon glyphicon-move" title="Drag to move">'); } }); return jsonObj; }; /** * Updates the view with the Additional Process Steps. */ TimelineEditor.prototype.loadProcessSteps = function() { var jsonObj; try { jsonObj = JSON.parse(this.processSteps); } catch(e) { return; } var editor = this; $(editor.processStepsSelector).empty(); $(jsonObj).each(function(i, line) { var row = $('<div>', {'class': 'timeline-item list-group-item'}); row.append($('<input>', { 'type': 'text', 'class': 'readonly' + ((typeof(line) === 'string') ? '' : ' wildcard'), 'readonly': (typeof(line) === 'string') ? true : false, 'name': 'name', 'value': (typeof(line) === 'string') ? line : '' })); row.append('<span aria-hidden="true" class="glyphicon glyphicon-move" title="Drag to move">'); $(editor.processStepsSelector).append(row); }); }; /** * Save JSON to a $(historyCount)-slot history in localStorage. * Three pieces if information are stored: * two per history-item (json & timestamp), one global (newest_pointer). */ TimelineEditor.prototype.saveHistory = function() { var json = this.saveToJSON(this.parseFields()); var pointer = localStorage.getItem(this.name + 'History_newest') || '0'; pointer = wrapIndex(parseInt(pointer) + 1, this.historyCount); localStorage.setItem(this.name + 'History_newest', pointer); localStorage.setItem(this.name + 'History_json_' + pointer, json); localStorage.setItem(this.name + 'History_timestamp_' + pointer, new Date().toLocaleString()); }; /** * Loads a local history item by pointer value. */ TimelineEditor.prototype.loadHistory = function(pointer) { var json = localStorage.getItem(this.name + 'History_json_' + wrapIndex(pointer, this.historyCount)); if (json !== null) { this.loadJSON(json); } }; /** * Update the view with the local history items. */ TimelineEditor.prototype.updateHistory = function() { $(this.localHistorySelector).empty(); this.updateHistoryFromServer(); var pointer = localStorage.getItem(this.name + 'History_newest'); for (var i = 0; i < this.historyCount; i++) { var timestamp = localStorage.getItem(this.name + 'History_timestamp_' + pointer); if (timestamp) { $(this.localHistorySelector).append('<a id="history_'+ pointer +'" class="timeline-history-item">'+timestamp+'</a>&nbsp;<span aria-hidden="true" id="history_'+ pointer +'" class="glyphicon glyphicon-save timeline-history-item-save" title="Save to Server"></span><br>'); pointer = wrapIndex(parseInt(pointer) - 1, this.historyCount); } else { break; } } }; /** * Update the view with the server-side history items. */ TimelineEditor.prototype.updateHistoryFromServer = function() { var editor = this; $.ajax({ url: editor.ajaxUrl, type: 'POST', data: { historyAction: 'list' }, success: function(response) { // The server responds with a JSON array of history names $(editor.remoteHistorySelector).empty().append('<option value=""></option>'); try { jQuery.each(JSON.parse(response), function(i, item) { $(editor.remoteHistorySelector).append('<option value="'+item+'">' + item + '<span aria-hidden="true" class="glyphicon glyphicon-save timeline-history-item-save" title="Save to Server"></span></option>'); }); } catch(e) { // JSON parse errors } } }); }; TimelineEditor.prototype.saveHistoryToServer = function(pointer) { //var pointer = ($(this).attr('id')).substr(8); // "history_" prefix var historyName = prompt("Enter a name to save the timeline as."); if (!historyName) { return; } var editor = this; $.ajax({ url: editor.ajaxUrl, type: 'POST', data: { historyAction: 'save', historyData: localStorage.getItem(editor.name + 'History_json_' + pointer), historyName: historyName }, success: function() { editor.updateHistoryFromServer(); } }); }; TimelineEditor.prototype.loadHistoryFromServer = function(historyName) { var editor = this; $.ajax({ url: editor.ajaxUrl, type: 'POST', data: { historyAction: 'load', historyName: historyName }, success: function(response) { editor.loadJSON(response); } }); }; TimelineEditor.prototype.deleteHistoryFromServer = function(historyName) { if (historyName === '') { alert("To delete a timeline from the server, first select one then press this trashcan icon again."); return; } if (confirm('Delete ' + historyName + ' from the server?')) { var editor = this; $.ajax({ url: editor.ajaxUrl, type: 'POST', data: { historyAction: 'delete', historyName: historyName }, success: function() { editor.updateHistoryFromServer(); } }); } }; /* // Unused function TimelineEditor.prototype.buildTimelineOnServer = function() { $.post(this.ajaxUrl, { buildTimeline: this.saveToJSON(this.parseFields()) }).done(function(timelineTxt) { $('#timeline-results').val(timelineTxt).focus().select(); }); };*/ return { TimelineEditor: TimelineEditor, makeArray: makeArray, wrapIndex: wrapIndex }; })(jQuery);
{ "content_hash": "bbe2ed7b24aa1d3f995c607a3c4d5ef9", "timestamp": "", "source": "github", "line_count": 402, "max_line_length": 295, "avg_line_length": 29.48507462686567, "alnum_prop": 0.6428752214629208, "repo_name": "phamd/Timeline-Editor", "id": "5c303acf64159464617a56211dd6f830dc46167f", "size": "11853", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "timelineEditor.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1054" }, { "name": "JavaScript", "bytes": "15832" }, { "name": "PHP", "bytes": "4745" } ], "symlink_target": "" }
package com.scaffold.commons.async.config; import java.util.concurrent.Callable; import java.util.concurrent.Future; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.task.AsyncTaskExecutor; public class ExceptionHandlingAsyncTaskExecutor implements AsyncTaskExecutor, InitializingBean, DisposableBean { private final Logger log = LoggerFactory.getLogger(ExceptionHandlingAsyncTaskExecutor.class); private final AsyncTaskExecutor executor; public ExceptionHandlingAsyncTaskExecutor(AsyncTaskExecutor executor) { this.executor = executor; } @Override public void execute(Runnable task) { executor.execute(task); } @Override public void execute(Runnable task, long startTimeout) { executor.execute(createWrappedRunnable(task), startTimeout); } private <T> Callable<T> createCallable(final Callable<T> task) { return () -> { try { return task.call(); } catch (Exception e) { handle(e); throw e; } }; } private Runnable createWrappedRunnable(final Runnable task) { return () -> { try { task.run(); } catch (Exception e) { handle(e); } }; } protected void handle(Exception e) { log.error("Caught async exception", e); } @Override public Future<?> submit(Runnable task) { return executor.submit(createWrappedRunnable(task)); } @Override public <T> Future<T> submit(Callable<T> task) { return executor.submit(createCallable(task)); } @Override public void destroy() throws Exception { if (executor instanceof DisposableBean) { DisposableBean bean = (DisposableBean) executor; bean.destroy(); } } @Override public void afterPropertiesSet() throws Exception { if (executor instanceof InitializingBean) { InitializingBean bean = (InitializingBean) executor; bean.afterPropertiesSet(); } } }
{ "content_hash": "b073882990f2f0dda6a4265e803a50ef", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 97, "avg_line_length": 27.50602409638554, "alnum_prop": 0.6360052562417872, "repo_name": "mjedrasz/micro-scaffolding", "id": "530874c7a9b1bcc5fc755338ec036a3ee32c83ab", "size": "2283", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "commons/async/src/main/java/com/scaffold/commons/async/config/ExceptionHandlingAsyncTaskExecutor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "24139" }, { "name": "CSS", "bytes": "3093" }, { "name": "Groovy", "bytes": "7260" }, { "name": "HTML", "bytes": "94139" }, { "name": "Java", "bytes": "81471" }, { "name": "JavaScript", "bytes": "126857" }, { "name": "Scala", "bytes": "1083" }, { "name": "Shell", "bytes": "1525" } ], "symlink_target": "" }
import { Injectable } from '@angular/core'; import { Http, Response, Headers } from "@angular/http"; import { User } from "./user.interface"; // rxjs import {Observable} from "rxjs/Observable"; import "rxjs/Rx"; // used for .map @Injectable() export class AppointmentService { private strPostUrl: string = "https://us-central1-leemtek-secure-forms.cloudfunctions.net/tmjsleeptherapycentre/appointment"; constructor(private http: Http) {} // Send the email to REST API. mdSendData(objFinalSenderInfo: any) { const strBody = JSON.stringify(objFinalSenderInfo); const headers = new Headers({ 'Content-Type': 'application/json' }); return this.http.post(this.strPostUrl, strBody, { headers: headers }) .map((data: Response) => data.json()) ; // this.http.post() } }
{ "content_hash": "59b132515bbdf9e1e492f9869fc9bb6b", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 129, "avg_line_length": 34.916666666666664, "alnum_prop": 0.6670644391408115, "repo_name": "leemtek/TMJSleepTherapyCentre.com-Angular-2", "id": "0552ca1dab648a690b374ec112977945ea083b0c", "size": "838", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/forms/appointment/appointment.service.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "8560" }, { "name": "HTML", "bytes": "4305" }, { "name": "JavaScript", "bytes": "9085" }, { "name": "TypeScript", "bytes": "154638" } ], "symlink_target": "" }
package org.apache.hadoop.hbase.io.encoding; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; /** * Compress key by storing size of common prefix with previous KeyValue * and storing raw size of rest. * * Format: * 1-5 bytes: compressed key length minus prefix (7-bit encoding) * 1-5 bytes: compressed value length (7-bit encoding) * 1-3 bytes: compressed length of common key prefix * ... bytes: rest of key (including timestamp) * ... bytes: value * * In a worst case compressed KeyValue will be three bytes longer than original. * */ @InterfaceAudience.Private public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder { @Override public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out) throws IOException { int klength = KeyValueUtil.keyLength(cell); int vlength = cell.getValueLength(); EncodingState state = encodingContext.getEncodingState(); if (state.prevCell == null) { // copy the key, there is no common prefix with none ByteBufferUtils.putCompressedInt(out, klength); ByteBufferUtils.putCompressedInt(out, vlength); ByteBufferUtils.putCompressedInt(out, 0); CellUtil.writeFlatKey(cell, out); } else { // find a common prefix and skip it int common = CellUtil.findCommonPrefixInFlatKey(cell, state.prevCell, true, true); ByteBufferUtils.putCompressedInt(out, klength - common); ByteBufferUtils.putCompressedInt(out, vlength); ByteBufferUtils.putCompressedInt(out, common); writeKeyExcludingCommon(cell, common, out); } // Write the value part CellUtil.writeValue(out, cell, vlength); int size = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; size += afterEncodingKeyValue(cell, out, encodingContext); state.prevCell = cell; return size; } private void writeKeyExcludingCommon(Cell cell, int commonPrefix, DataOutputStream out) throws IOException { short rLen = cell.getRowLength(); if (commonPrefix < rLen + KeyValue.ROW_LENGTH_SIZE) { // Previous and current rows are different. Need to write the differing part followed by // cf,q,ts and type CellUtil.writeRowKeyExcludingCommon(cell, rLen, commonPrefix, out); byte fLen = cell.getFamilyLength(); out.writeByte(fLen); CellUtil.writeFamily(out, cell, fLen); CellUtil.writeQualifier(out, cell, cell.getQualifierLength()); out.writeLong(cell.getTimestamp()); out.writeByte(cell.getTypeByte()); } else { // The full row key part is common. CF part will be common for sure as we deal with Cells in // same family. Just need write the differing part in q, ts and type commonPrefix = commonPrefix - (rLen + KeyValue.ROW_LENGTH_SIZE) - (cell.getFamilyLength() + KeyValue.FAMILY_LENGTH_SIZE); int qLen = cell.getQualifierLength(); int commonQualPrefix = Math.min(commonPrefix, qLen); int qualPartLenToWrite = qLen - commonQualPrefix; if (qualPartLenToWrite > 0) { CellUtil.writeQualifierSkippingBytes(out, cell, qLen, commonQualPrefix); } commonPrefix -= commonQualPrefix; // Common part in TS also? if (commonPrefix > 0) { int commonTimestampPrefix = Math.min(commonPrefix, KeyValue.TIMESTAMP_SIZE); if (commonTimestampPrefix < KeyValue.TIMESTAMP_SIZE) { byte[] curTsBuf = Bytes.toBytes(cell.getTimestamp()); out.write(curTsBuf, commonTimestampPrefix, KeyValue.TIMESTAMP_SIZE - commonTimestampPrefix); } commonPrefix -= commonTimestampPrefix; if (commonPrefix == 0) { out.writeByte(cell.getTypeByte()); } } else { out.writeLong(cell.getTimestamp()); out.writeByte(cell.getTypeByte()); } } } @Override protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { int decompressedSize = source.readInt(); ByteBuffer buffer = ByteBuffer.allocate(decompressedSize + allocateHeaderLength); buffer.position(allocateHeaderLength); int prevKeyOffset = 0; while (source.available() > skipLastBytes) { prevKeyOffset = decodeKeyValue(source, buffer, prevKeyOffset); afterDecodingKeyValue(source, buffer, decodingCtx); } if (source.available() != skipLastBytes) { throw new IllegalStateException("Read too many bytes."); } buffer.limit(buffer.position()); return buffer; } private int decodeKeyValue(DataInputStream source, ByteBuffer buffer, int prevKeyOffset) throws IOException, EncoderBufferTooSmallException { int keyLength = ByteBufferUtils.readCompressedInt(source); int valueLength = ByteBufferUtils.readCompressedInt(source); int commonLength = ByteBufferUtils.readCompressedInt(source); int keyOffset; keyLength += commonLength; ensureSpace(buffer, keyLength + valueLength + KeyValue.ROW_OFFSET); buffer.putInt(keyLength); buffer.putInt(valueLength); // copy the prefix if (commonLength > 0) { keyOffset = buffer.position(); ByteBufferUtils.copyFromBufferToBuffer(buffer, buffer, prevKeyOffset, commonLength); } else { keyOffset = buffer.position(); } // copy rest of the key and value int len = keyLength - commonLength + valueLength; ByteBufferUtils.copyFromStreamToBuffer(buffer, source, len); return keyOffset; } @Override public Cell getFirstKeyCellInBlock(ByteBuff block) { block.mark(); block.position(Bytes.SIZEOF_INT); int keyLength = ByteBuff.readCompressedInt(block); // TODO : See if we can avoid these reads as the read values are not getting used ByteBuff.readCompressedInt(block); int commonLength = ByteBuff.readCompressedInt(block); if (commonLength != 0) { throw new AssertionError("Nonzero common length in the first key in " + "block: " + commonLength); } ByteBuffer key = block.asSubByteBuffer(keyLength).duplicate(); block.reset(); return createFirstKeyCell(key, keyLength); } @Override public String toString() { return PrefixKeyDeltaEncoder.class.getSimpleName(); } @Override public EncodedSeeker createSeeker(CellComparator comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker<SeekerState>(comparator, decodingCtx) { @Override protected void decodeNext() { current.keyLength = ByteBuff.readCompressedInt(currentBuffer); current.valueLength = ByteBuff.readCompressedInt(currentBuffer); current.lastCommonPrefix = ByteBuff.readCompressedInt(currentBuffer); current.keyLength += current.lastCommonPrefix; current.ensureSpaceForKey(); currentBuffer.get(current.keyBuffer, current.lastCommonPrefix, current.keyLength - current.lastCommonPrefix); current.valueOffset = currentBuffer.position(); currentBuffer.skip(current.valueLength); if (includesTags()) { decodeTags(); } if (includesMvcc()) { current.memstoreTS = ByteBuff.readVLong(currentBuffer); } else { current.memstoreTS = 0; } current.nextKvOffset = currentBuffer.position(); } @Override protected void decodeFirst() { currentBuffer.skip(Bytes.SIZEOF_INT); decodeNext(); } }; } }
{ "content_hash": "c1039dc30d1a08936802775c4da06351", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 98, "avg_line_length": 37.71028037383178, "alnum_prop": 0.7038413878562577, "repo_name": "gustavoanatoly/hbase", "id": "842894f73529e7fc9942761694688f2c06023b7c", "size": "8865", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "351" }, { "name": "Batchfile", "bytes": "23942" }, { "name": "C", "bytes": "28534" }, { "name": "C++", "bytes": "56085" }, { "name": "CMake", "bytes": "13186" }, { "name": "CSS", "bytes": "35785" }, { "name": "HTML", "bytes": "15644" }, { "name": "Java", "bytes": "31160752" }, { "name": "JavaScript", "bytes": "2694" }, { "name": "Makefile", "bytes": "1359" }, { "name": "PHP", "bytes": "8385" }, { "name": "Perl", "bytes": "383739" }, { "name": "Protocol Buffer", "bytes": "262179" }, { "name": "Python", "bytes": "87467" }, { "name": "Ruby", "bytes": "481475" }, { "name": "Scala", "bytes": "439837" }, { "name": "Shell", "bytes": "175165" }, { "name": "Thrift", "bytes": "41474" }, { "name": "XSLT", "bytes": "6764" } ], "symlink_target": "" }
''' Created on May 22, 2015 @author: MP ''' import amo.core.polylog as polylog import numpy as np import amo.core.physicalconstants from amo.core.physicalconstants import LithiumSixSI as li import scipy.optimize import unittest import mpmath from liexperiment.traps.calibrations import TrapCalibrationsDipole as cal pc = amo.core.physicalconstants.PhysicalConstantsSI class HarmonicFermi(object): def __init__(self, frequencies, atomnumber, temperature): """ @param frequencies: list of trap frequencies in Hz @param temperature: temperature in kelvin """ self.frequencies = np.array(frequencies) self.atomnumber = atomnumber self.temperature = temperature self.m = li.mass @property def omegabar(self): return np.power(np.product(2 * np.pi * self.frequencies), 1.0 / self.frequencies.shape[0]) @property def beta(self): return 1.0 / (pc.kb * self.temperature) @property def fermi_energy(self): return pc.hbar * (6 * self.atomnumber)**(1.0/3.0) * self.omegabar @property def debroglie(self): return np.sqrt((2 * np.pi * pc.hbar**2)/(self.m * pc.kb * self.temperature)) @property def chemical_potential(self): sol = scipy.optimize.root(self._chemical_potential_eqn, np.array([self.fermi_energy])) return sol.x[0] def _chemical_potential_eqn(self, mu): return -self.atomnumber + polylog.fermi_poly3(self.beta * mu)*(pc.kb * self.temperature / (pc.hbar * self.omegabar))**3 def central_density(self): return mpmath.fp.re(-1.0 / self.debroglie**3 * mpmath.polylog(3.0/2.0, -np.exp(self.beta * (self.chemical_potential)))) def average_velocity(self): return np.sqrt(3 * pc.kb * self.temperature/self.m) def compute_crossed_dipole_parameters(): redsheetP = 0.1; dimpleP = 0; odtP = redsheetP * 11.0; atomnumber = 5.0e4; temperature = 1.0e-6; a = 300 * 56.0e-12; fx, fy, fz = cal.crossed_dipole_trap_frequency(redsheetP, dimpleP, odtP) frequencies = np.array([fx.standard_value, fy.standard_value, fz.standard_value]) trap = HarmonicFermi(frequencies, atomnumber, temperature) fermir = np.sqrt(2 * trap.fermi_energy/(li.mass * (2 * np.pi * frequencies)**2))*10**6 print "fx: {:.2E}".format(frequencies[0]) print "fy: {:.2E}".format(frequencies[1]) print "fz: {:.2E}".format(frequencies[2]) print "temperature {:.2E} kelvin".format(temperature) print "fermi temperature: {:.2E} kelvin".format(trap.fermi_energy / pc.kb) print "fermi radii ({}, {}, {}) micron".format(fermir[0], fermir[1], fermir[2]) print "Central Density: {:.2E} 1/cm^3".format(trap.central_density()/1.0e6) print "Central Collision Rate: {:.2E} 1/s".format(trap.average_velocity() * 4 * np.pi * a**2 * trap.central_density()) class TestHarmonicFermi(unittest.TestCase): def test_mu(self): frequencies = np.array([2.75e3, 2.75e3, 12.2]) atomnumber = 50e4 temperature = 5e-6 a = 300.0 trap = HarmonicFermi(frequencies, atomnumber, temperature) fermir = np.sqrt(2 * trap.fermi_energy/(li.mass * (2 * np.pi * frequencies)**2))*10**6 print "fermi temperature: {:.2E} kelvin".format(trap.fermi_energy / pc.kb) print "fermi radii ({}, {}, {}) micron".format(fermir[0], fermir[1], fermir[2]) print "Central Density: {:.2E} 1/cm^3".format(trap.central_density()/1.0e6) print "Central Collision Rate: {:.2E} 1/s".format(trap.average_velocity() * 4 * np.pi * a**2 * trap.central_density()) if __name__ == "__main__": unittest.main() #compute_crossed_dipole_parameters()
{ "content_hash": "a6242ca948c66488f6a1a729864d746f", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 127, "avg_line_length": 39.50526315789474, "alnum_prop": 0.6373567812416733, "repo_name": "MaxParsons/amo-physics", "id": "dcd84d1fca1265b8c0b353381d766b5f51c17231", "size": "3753", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "liexperiment/traps/harmonicfermi.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "121239" } ], "symlink_target": "" }
#include "DaemonServerParameters.h" #include "IO/Core/Log.h" #include "Core/StringModifier.h" #include "Plugins/cpplibext/range_iterator.hpp" #include <functional> #include <map> namespace Fork { namespace Daemon { /* * Internal members */ static Configuration appConfig; static void ShowInfo() { IO::Log::Message("ForkDaemonServer: No input"); IO::Log::Message("Enter 'ForkDaemon help' for information"); } static void ShowHelp() { IO::Log::Message("Usage:"); { IO::Log::ScopedIndent indent; IO::Log::Message("ForkDaemonServer [Options]"); } IO::Log::Message("Options:"); { IO::Log::ScopedIndent indent; IO::Log::ProgramOptions( { { "server:run", "Runs the daemon server." }, { "verbose", "Prints all client/server communications to standard output." }, { "verbose:logfile FILE", "Prints all client/server communications to standard output and the log file." }, { "verbose:logfile FILE.html", "Prints all client/server communications to standard output and the HTML log file." }, { "singleton", "This daemon instance only communicates with a single client." }, { "print-colors", "Prints the output with more color highlighting." }, { "port N", "Overwrite default port by number 'N'." }, { "update-rate N", "Overwrite default network packet update rate by number 'N'." } } ); } } /* * Global functions */ void ParseAppArguments(const IO::CommandLine::ArgumentListType& arguments) { typedef ext::const_range_forward_iterator<IO::CommandLine::ArgumentListType> ArgIterator; /* Setup program parameters */ std::map<std::string, std::function<void (ArgIterator& argIt)>> parameters { { "help", [](ArgIterator& argIt) { appConfig.showHelp = true; } }, { "server:run", [](ArgIterator& argIt) { appConfig.runServer = true; } }, { "verbose", [](ArgIterator& argIt) { appConfig.isVerbose = true; } }, { "singleton", [](ArgIterator& argIt) { appConfig.isSingleton = true; } }, { "print-colors", [](ArgIterator& argIt) { appConfig.printColors = true; } }, { "verbose:logfile", [](ArgIterator& argIt) { appConfig.isVerbose = true; if (argIt.has_next()) { ++argIt; appConfig.logFilename = *argIt; } else throw std::invalid_argument("Expected filename after \"verbose:logfile\" parameter"); } }, { "port", [](ArgIterator& argIt) { if (argIt.has_next()) { ++argIt; appConfig.port = StrToNum<unsigned short>(*argIt); } else throw std::invalid_argument("Expected number after \"port\" parameter"); } }, { "update-rate", [](ArgIterator& argIt) { if (argIt.has_next()) { ++argIt; appConfig.updateRate = StrToNum<unsigned short>(*argIt); } else throw std::invalid_argument("Expected number after \"update-rate\" parameter"); } } }; /* Parse program arguments */ try { for (ArgIterator it { arguments }; !it.reached_end(); ++it) { /* Map argument to parameter */ auto itParam = parameters.find(*it); if (itParam != parameters.end()) itParam->second(it); else IO::Log::Error("Unknown program parameter \"" + *it + "\""); } } catch (const std::exception& err) { IO::Log::Error(err.what()); } /* Evaluate program arguments */ if (appConfig.showHelp) ShowHelp(); else if (!appConfig.runServer) ShowInfo(); } const Configuration& GetAppConfig() { return appConfig; } } // /namespace Daemon } // /namespace Fork // ========================
{ "content_hash": "b1457bee7d59f040e20b3276225ebe1b", "timestamp": "", "source": "github", "line_count": 152, "max_line_length": 134, "avg_line_length": 30.585526315789473, "alnum_prop": 0.4805334480533448, "repo_name": "LukasBanana/ForkENGINE", "id": "3c099502541964295a8034ae5aa811780a86a4b5", "size": "4811", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sources/Daemon/DaemonServerParameters.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "1439895" }, { "name": "C++", "bytes": "3993678" }, { "name": "CMake", "bytes": "51757" }, { "name": "GLSL", "bytes": "71104" }, { "name": "HLSL", "bytes": "40489" }, { "name": "HTML", "bytes": "481" }, { "name": "PowerShell", "bytes": "79" }, { "name": "Python", "bytes": "11661" }, { "name": "TeX", "bytes": "23660" } ], "symlink_target": "" }
package com.alexbbb.uploadservice.demo; import java.io.File; import java.net.URL; import java.util.UUID; import android.os.Bundle; import android.support.v7.app.ActionBarActivity; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.Toast; import com.alexbbb.uploadservice.AbstractUploadServiceReceiver; import com.alexbbb.uploadservice.ContentType; import com.alexbbb.uploadservice.UploadRequest; import com.alexbbb.uploadservice.UploadService; /** * Activity that demonstrates how to use Android Upload Service. * * @author Alex Gotev * */ public class MainActivity extends ActionBarActivity { private static final String TAG = "AndroidUploadServiceDemo"; private ProgressBar progressBar; private Button uploadButton; private EditText serverUrl; private EditText fileToUpload; private EditText parameterName; private final AbstractUploadServiceReceiver uploadReceiver = new AbstractUploadServiceReceiver() { @Override public void onProgress(String uploadId, int progress) { progressBar.setProgress(progress); Log.i(TAG, "The progress of the upload with ID " + uploadId + " is: " + progress); } @Override public void onError(String uploadId, Exception exception) { progressBar.setProgress(0); String message = "Error in upload with ID: " + uploadId + ". " + exception.getLocalizedMessage(); Log.e(TAG, message, exception); } @Override public void onCompleted(String uploadId, int serverResponseCode, String serverResponseMessage) { progressBar.setProgress(0); String message = "Upload with ID " + uploadId + " is completed: " + serverResponseCode + ", " + serverResponseMessage; Log.i(TAG, message); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Set your application namespace to avoid conflicts with other apps // using this library UploadService.NAMESPACE = "com.alexbbb"; progressBar = (ProgressBar) findViewById(R.id.uploadProgress); serverUrl = (EditText) findViewById(R.id.serverURL); fileToUpload = (EditText) findViewById(R.id.fileToUpload); parameterName = (EditText) findViewById(R.id.parameterName); uploadButton = (Button) findViewById(R.id.uploadButton); uploadButton.setOnClickListener(new Button.OnClickListener() { @Override public void onClick(View arg0) { onUploadButtonClick(); } }); progressBar.setMax(100); progressBar.setProgress(0); // De-comment this line to enable self-signed SSL certificates in HTTPS connections // WARNING: Do not use in production environment. Recommended for development only // AllCertificatesAndHostsTruster.apply(); } @Override protected void onResume() { super.onResume(); uploadReceiver.register(this); } @Override protected void onPause() { super.onPause(); uploadReceiver.unregister(this); } private boolean userInputIsValid(final String serverUrlString, final String fileToUploadPath, final String paramNameString) { if (serverUrlString.length() == 0) { Toast.makeText(this, getString(R.string.provide_valid_server_url), Toast.LENGTH_LONG).show(); return false; } try { new URL(serverUrlString.toString()); } catch (Exception exc) { Toast.makeText(this, getString(R.string.provide_valid_server_url), Toast.LENGTH_LONG).show(); return false; } if (fileToUploadPath.length() == 0) { Toast.makeText(this, getString(R.string.provide_file_to_upload), Toast.LENGTH_LONG).show(); return false; } if (!new File(fileToUploadPath).exists()) { Toast.makeText(this, getString(R.string.file_does_not_exist), Toast.LENGTH_LONG).show(); return false; } if (paramNameString.length() == 0) { Toast.makeText(this, getString(R.string.provide_param_name), Toast.LENGTH_LONG).show(); return false; } return true; } private void onUploadButtonClick() { final String serverUrlString = serverUrl.getText().toString(); final String fileToUploadPath = fileToUpload.getText().toString(); final String paramNameString = parameterName.getText().toString(); if (!userInputIsValid(serverUrlString, fileToUploadPath, paramNameString)) return; final UploadRequest request = new UploadRequest(this, UUID.randomUUID().toString(), serverUrlString); request.addFileToUpload(fileToUploadPath, paramNameString, "test", ContentType.APPLICATION_OCTET_STREAM); request.setNotificationConfig(R.drawable.ic_launcher, getString(R.string.app_name), getString(R.string.uploading), getString(R.string.upload_success), getString(R.string.upload_error), false); try { UploadService.startUpload(request); } catch (Exception exc) { Toast.makeText(this, "Malformed upload request. " + exc.getLocalizedMessage(), Toast.LENGTH_SHORT).show(); } } }
{ "content_hash": "9c0b4fde10dc5b9eabec7d19c75ec08f", "timestamp": "", "source": "github", "line_count": 161, "max_line_length": 118, "avg_line_length": 35.13664596273292, "alnum_prop": 0.6515821106593601, "repo_name": "MarsVard/android-upload-service", "id": "617db61963f44078fb9486f88b9dba54479f661a", "size": "5657", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "examples/app/src/com/alexbbb/uploadservice/demo/MainActivity.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "48341" } ], "symlink_target": "" }
namespace Yahoo.Yui.Compressor.Web.Optimization { public class CssCompressorConfig : CompressorConfig { public CssCompressorConfig() { RemoveComments = true; } public bool RemoveComments { get; set; } } }
{ "content_hash": "8d2284beaea50edf356259bb4fd59354", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 55, "avg_line_length": 22.75, "alnum_prop": 0.5897435897435898, "repo_name": "VAllens/Yahoo.YUI.Compressor.NET.Modified", "id": "accf1d8dc19a9364fde512403ef0285e8a8dd942", "size": "275", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Code/Yahoo.Yui.Compressor.Web.Optimization/CssCompressorConfig.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "14451" }, { "name": "C#", "bytes": "299123" }, { "name": "CSS", "bytes": "141945" }, { "name": "JavaScript", "bytes": "1216486" }, { "name": "Shell", "bytes": "516" } ], "symlink_target": "" }
<ns0:eml xmlns:ns0="eml://ecoinformatics.org/eml-2.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" packageId="judithk.341.11" scope="system" system="knb" xsi:schemaLocation="eml://ecoinformatics.org/eml-2.1.0 eml.xsd"> <access authSystem="knb" order="denyFirst" scope="document"> <allow> <principal>public</principal> <permission>read</permission> </allow> </access> <dataset scope="document"> <title>Kruger National Park river water quality data</title> <creator id="1086295214645" scope="document"> <organizationName>DWAF</organizationName> <address scope="document"> <country>South Africa</country> </address> </creator> <associatedParty id="1091524025738" scope="document"> <individualName> <givenName>Marica</givenName> <surName>Erasmus</surName> </individualName> <organizationName>DWAF</organizationName> <positionName>Quality</positionName> <electronicMailAddress>maricae@dwaf.gov.za</electronicMailAddress> <onlineUrl>http://www.dwaf.gov.za</onlineUrl> <role>Originator</role> </associatedParty> <abstract> <para>All the rivers that flow through the KNP originate outside and to the west of the knp where they are highlly utilised. They are crucially important for the conservation of the unique natural environments and biodiversity of the KNP.</para> </abstract> <keywordSet> <keyword>water quality</keyword> <keyword>KNP river</keyword> </keywordSet> <keywordSet> <keyword>SANParks, South Africa</keyword> <keyword>Kruger National Park, South Africa</keyword> </keywordSet> <intellectualRights> <para>This data is collected and analysed by the KNp and the passed on to DWAF. There are no restrictions on the usage of this data</para> </intellectualRights> <coverage scope="document"> <geographicCoverage scope="document"> <geographicDescription>B9H002 Silwervisdam</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>B9H003 Kanniedooddam</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>A9H011 Pafuri</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>A9H008 Shidzivane</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>B8H018 Engelhard dam</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>B8H028 Mahlangene</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>B7H017 Balule</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.25</westBoundingCoordinate> <eastBoundingCoordinate>31.25</eastBoundingCoordinate> <northBoundingCoordinate>-22.662711</northBoundingCoordinate> <southBoundingCoordinate>-22.662711</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>B7H017 Mamba</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.375</westBoundingCoordinate> <eastBoundingCoordinate>31.375</eastBoundingCoordinate> <northBoundingCoordinate>-23.625</northBoundingCoordinate> <southBoundingCoordinate>-23.625</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>X3h015 Lower Sabie</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.5</westBoundingCoordinate> <eastBoundingCoordinate>31.5</eastBoundingCoordinate> <northBoundingCoordinate>-24.0</northBoundingCoordinate> <southBoundingCoordinate>-24.0</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>X3H006 Perry;s Farm</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.625</westBoundingCoordinate> <eastBoundingCoordinate>31.625</eastBoundingCoordinate> <northBoundingCoordinate>-24.0</northBoundingCoordinate> <southBoundingCoordinate>-24.0</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>X2H016 Ten Bosch</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.5</westBoundingCoordinate> <eastBoundingCoordinate>31.5</eastBoundingCoordinate> <northBoundingCoordinate>-24.875</northBoundingCoordinate> <southBoundingCoordinate>-24.875</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>X2H046 Riverside</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.5</westBoundingCoordinate> <eastBoundingCoordinate>31.5</eastBoundingCoordinate> <northBoundingCoordinate>-25.25</northBoundingCoordinate> <southBoundingCoordinate>-25.25</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <geographicCoverage scope="document"> <geographicDescription>X2H006 karino</geographicDescription> <boundingCoordinates> <westBoundingCoordinate>31.75</westBoundingCoordinate> <eastBoundingCoordinate>31.75</eastBoundingCoordinate> <northBoundingCoordinate>-25.25</northBoundingCoordinate> <southBoundingCoordinate>-25.25</southBoundingCoordinate> </boundingCoordinates> </geographicCoverage> <temporalCoverage scope="document"> <rangeOfDates> <beginDate> <calendarDate>1983-01-01</calendarDate> </beginDate> <endDate> <calendarDate>2003-01-01</calendarDate> </endDate> </rangeOfDates> </temporalCoverage> </coverage> <contact id="1091523847895" scope="document"> <individualName> <salutation>Mr</salutation> <givenName>Jacque</givenName> <surName>Venter</surName> </individualName> <organizationName>SANParks</organizationName> <positionName>Biotechnician</positionName> <address scope="document"> <country>South Africa</country> </address> <phone phonetype="voice">013 735 6519</phone> <electronicMailAddress>jacquev@sanparks.org</electronicMailAddress> </contact> <contact id="1086304783995" scope="document"> <individualName> <givenName>Judith</givenName> <surName>Kruger</surName> </individualName> <organizationName>SANParks</organizationName> <address scope="document"> <deliveryPoint>Private Bag x402</deliveryPoint> <city>Skukuza</city> <postalCode>1350</postalCode> <country>SA</country> </address> <phone phonetype="voice">+0927 13 7354309</phone> <phone phonetype="fax">+0927 13 7354055</phone> <electronicMailAddress>judithk@sanparks.org</electronicMailAddress> </contact> <methods> <methodStep> <description> <section> <title>Sampling and analytical methods</title> <para>Rivers are sampled on a two-weekly basis at a number of stations. the samples are taken and analysed according to the methods applied by the Department of Water Affairs and Forestry(DWAF) fro their national Water Quality Monitoring Programme. the samples are sent to the Institute for Resource Quality Studies in pretoria for analyses.</para> </section> </description> </methodStep> </methods> <dataTable id="1211440609109" scope="document"> <entityName>Waterqualitystations.txt</entityName> <physical id="1211440608357" scope="document"> <objectName>Waterqualitystations.txt</objectName> <size unit="byte">465</size> <dataFormat> <textFormat> <numHeaderLines>1</numHeaderLines> <recordDelimiter>#x0A</recordDelimiter> <attributeOrientation>column</attributeOrientation> <simpleDelimited> <fieldDelimiter>#x09</fieldDelimiter> </simpleDelimited> </textFormat> </dataFormat> <distribution scope="document"> <online> <url function="download">ecogrid://knb/judithk.906.1</url> </online> </distribution> </physical> <attributeList> <attribute id="1211440609124" scope="document"> <attributeName>Stationcode</attributeName> <attributeDefinition>Station code</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <textDomain> <definition>Codes used for the quality stations</definition> </textDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> <attribute id="1211440609140" scope="document"> <attributeName>name</attributeName> <attributeDefinition>Name of the guaging weir</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <textDomain> <definition>Names</definition> </textDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> <attribute id="1211440609156" scope="document"> <attributeName>river</attributeName> <attributeDefinition>River on which the gauging weir occurs</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <textDomain> <definition>River names of the KNP</definition> </textDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> </attributeList> <numberOfRecords>19</numberOfRecords> </dataTable> <dataTable id="1211441011767" scope="document"> <entityName>WaterqualityTPCvalues.txt</entityName> <physical id="1211441011235" scope="document"> <objectName>WaterqualityTPCvalues.txt</objectName> <size unit="byte">627</size> <dataFormat> <textFormat> <numHeaderLines>1</numHeaderLines> <recordDelimiter>#x0A</recordDelimiter> <attributeOrientation>column</attributeOrientation> <simpleDelimited> <fieldDelimiter>#x09</fieldDelimiter> </simpleDelimited> </textFormat> </dataFormat> <distribution scope="document"> <online> <url function="download">ecogrid://knb/judithk.907.1</url> </online> </distribution> </physical> <attributeList> <attribute id="1211441011783" scope="document"> <attributeName>Variable</attributeName> <attributeDefinition>Variable for which the TPC set</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <textDomain> <definition>Variables for which TPC limits have been set</definition> </textDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> <attribute id="1211441011798" scope="document"> <attributeName>Luvuvhu</attributeName> <attributeDefinition>Limits for each variable for the Luvuvhu River</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441011814" scope="document"> <attributeName>Shingwedzi</attributeName> <attributeDefinition>Maximum limits for each variable for station along the Shingwedzi river</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441011830" scope="document"> <attributeName>Letaba</attributeName> <attributeDefinition>Maximum limits for each variable for stations along the Letabab River</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441011845" scope="document"> <attributeName>Olifants</attributeName> <attributeDefinition>Maximum limits for each variable for stations along the Olifants River</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441011861" scope="document"> <attributeName>Sabie</attributeName> <attributeDefinition>Maximum limits for each variable for stations along the Sabie River</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441011877" scope="document"> <attributeName>Crocodile</attributeName> <attributeDefinition>Maximum limits for variables for stations along the Crocodile River</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> </attributeList> <numberOfRecords>22</numberOfRecords> </dataTable> <dataTable id="1211441579326" scope="document"> <entityName>WaterQaulitypre2000.txt</entityName> <physical id="1211441578765" scope="document"> <objectName>waterQaulitypre2000.txt</objectName> <size unit="byte">485150</size> <dataFormat> <textFormat> <numHeaderLines>1</numHeaderLines> <recordDelimiter>#x0A</recordDelimiter> <attributeOrientation>column</attributeOrientation> <simpleDelimited> <fieldDelimiter>,</fieldDelimiter> </simpleDelimited> </textFormat> </dataFormat> <distribution scope="document"> <online> <url function="download">ecogrid://knb/judithk.908.1</url> </online> </distribution> </physical> <attributeList> <attribute id="1211441579342" scope="document"> <attributeName>DATUM</attributeName> <attributeDefinition>Date on which the variable was recorded</attributeDefinition> <measurementScale> <dateTime> <formatString>yy.mm.dd</formatString> <dateTimePrecision>1</dateTimePrecision> </dateTime> </measurementScale> </attribute> <attribute id="1211441579358" scope="document"> <attributeName>STATION</attributeName> <attributeDefinition>Station at which the variable was recorded</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <enumeratedDomain enforced="yes"> <entityCodeList> <entityReference>1211440609109</entityReference> <valueAttributeReference>1211440609124</valueAttributeReference> <definitionAttributeReference>1211440609140</definitionAttributeReference> </entityCodeList> </enumeratedDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> <attribute id="1211441579373" scope="document"> <attributeName>EC</attributeName> <attributeDefinition>Electrical conductivity</attributeDefinition> <measurementScale> <ratio> <unit> <customUnit>ms/m</customUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579389" scope="document"> <attributeName>PH</attributeName> <attributeDefinition>Ph</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579404" scope="document"> <attributeName>NA</attributeName> <attributeDefinition>Sodium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579420" scope="document"> <attributeName>MG</attributeName> <attributeDefinition>Magnesium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579435" scope="document"> <attributeName>CA</attributeName> <attributeDefinition>Calcium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579451" scope="document"> <attributeName>F</attributeName> <attributeDefinition>Flouride</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579467" scope="document"> <attributeName>CL</attributeName> <attributeDefinition>Chloride</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579482" scope="document"> <attributeName>NO3</attributeName> <attributeDefinition>Nitrate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579498" scope="document"> <attributeName>SO4</attributeName> <attributeDefinition>Sulphate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579513" scope="document"> <attributeName>PO4P</attributeName> <attributeDefinition>Phosphate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579529" scope="document"> <attributeName>SI</attributeName> <attributeDefinition>Silicon</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579545" scope="document"> <attributeName>K</attributeName> <attributeDefinition>Potassium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579560" scope="document"> <attributeName>NH4</attributeName> <attributeDefinition>Ammonium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579576" scope="document"> <attributeName>TDS</attributeName> <attributeDefinition>Total dissolved solids</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579591" scope="document"> <attributeName>NH3(ug/l)</attributeName> <attributeDefinition>Ammonium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>microgramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211441579607" scope="document"> <attributeName>NO3NO2N</attributeName> <attributeDefinition>Nitrate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> </attributeList> <numberOfRecords>4724</numberOfRecords> </dataTable> <dataTable id="1211442099996" scope="document"> <entityName>Waterquality2000_2004.txt</entityName> <physical id="1211442099480" scope="document"> <objectName>waterquality2000_2004.txt</objectName> <size unit="byte">89081</size> <dataFormat> <textFormat> <numHeaderLines>1</numHeaderLines> <recordDelimiter>#x0A</recordDelimiter> <attributeOrientation>column</attributeOrientation> <simpleDelimited> <fieldDelimiter>,</fieldDelimiter> </simpleDelimited> </textFormat> </dataFormat> <distribution scope="document"> <online> <url function="download">ecogrid://knb/judithk.909.1</url> </online> </distribution> </physical> <attributeList> <attribute id="1211442100011" scope="document"> <attributeName>DATUM</attributeName> <attributeDefinition>Date</attributeDefinition> <measurementScale> <dateTime> <formatString>yy.mm.dd</formatString> <dateTimePrecision>1</dateTimePrecision> </dateTime> </measurementScale> </attribute> <attribute id="1211442100027" scope="document"> <attributeName>STATION</attributeName> <attributeDefinition>Station at which variable was recorded</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <enumeratedDomain enforced="yes"> <entityCodeList> <entityReference>1211440609109</entityReference> <valueAttributeReference>1211440609124</valueAttributeReference> <definitionAttributeReference>1211440609140</definitionAttributeReference> </entityCodeList> </enumeratedDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> <attribute id="1211442100043" scope="document"> <attributeName>EC</attributeName> <attributeDefinition>Electrical conductivity</attributeDefinition> <measurementScale> <ratio> <unit> <customUnit>ms/m</customUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100058" scope="document"> <attributeName>PH</attributeName> <attributeDefinition>PH</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100074" scope="document"> <attributeName>NA</attributeName> <attributeDefinition>Sodium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100090" scope="document"> <attributeName>MG</attributeName> <attributeDefinition>Magnesium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100105" scope="document"> <attributeName>CA</attributeName> <attributeDefinition>Calcium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100121" scope="document"> <attributeName>F</attributeName> <attributeDefinition>Flouride</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100136" scope="document"> <attributeName>CL</attributeName> <attributeDefinition>Chloride</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100152" scope="document"> <attributeName>NO3</attributeName> <attributeDefinition>Nitrate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100168" scope="document"> <attributeName>SO4</attributeName> <attributeDefinition>Sulphate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100183" scope="document"> <attributeName>PO4P</attributeName> <attributeDefinition>Phosphate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.01</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100199" scope="document"> <attributeName>SI</attributeName> <attributeDefinition>Silicon</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100215" scope="document"> <attributeName>K</attributeName> <attributeDefinition>Potassium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100230" scope="document"> <attributeName>NH4</attributeName> <attributeDefinition>Ammonium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.01</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100246" scope="document"> <attributeName>TDS</attributeName> <attributeDefinition>Total dissolved solids</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100261" scope="document"> <attributeName>NH3(ug/l)</attributeName> <attributeDefinition>Ammonium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>microgramsPerLiter</standardUnit> </unit> <precision>.01</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211442100277" scope="document"> <attributeName>NO3NO2N</attributeName> <attributeDefinition>Ammonium nitrate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> </attributeList> <numberOfRecords>887</numberOfRecords> </dataTable> <dataTable id="1211449072123" scope="document"> <entityName>Waterquality2005-2007.txt</entityName> <physical id="1211449071654" scope="document"> <objectName>waterquality2005-2007.txt</objectName> <size unit="byte">35264</size> <dataFormat> <textFormat> <numHeaderLines>1</numHeaderLines> <recordDelimiter>#x0A</recordDelimiter> <attributeOrientation>column</attributeOrientation> <simpleDelimited> <fieldDelimiter>#x09</fieldDelimiter> </simpleDelimited> </textFormat> </dataFormat> <distribution scope="document"> <online> <url function="download">ecogrid://knb/judithk.910.1</url> </online> </distribution> </physical> <attributeList> <attribute id="1211449072138" scope="document"> <attributeName>datum</attributeName> <attributeDefinition>Date</attributeDefinition> <measurementScale> <dateTime> <formatString>yy.mm.dd</formatString> <dateTimePrecision>1</dateTimePrecision> </dateTime> </measurementScale> </attribute> <attribute id="1211449072154" scope="document"> <attributeName>Station</attributeName> <attributeDefinition>Station identifier</attributeDefinition> <measurementScale> <nominal> <nonNumericDomain> <enumeratedDomain enforced="yes"> <entityCodeList> <entityReference>1211440609109</entityReference> <valueAttributeReference>1211440609124</valueAttributeReference> <definitionAttributeReference>1211440609140</definitionAttributeReference> </entityCodeList> </enumeratedDomain> </nonNumericDomain> </nominal> </measurementScale> </attribute> <attribute id="1211449072169" scope="document"> <attributeName>PH</attributeName> <attributeDefinition>PH</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>dimensionless</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072185" scope="document"> <attributeName>EC</attributeName> <attributeDefinition>Electric conductivity</attributeDefinition> <measurementScale> <ratio> <unit> <customUnit>ms/m</customUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072201" scope="document"> <attributeName>NA</attributeName> <attributeDefinition>Sodium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerCubicMeter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072216" scope="document"> <attributeName>MG</attributeName> <attributeDefinition>magnesium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072232" scope="document"> <attributeName>CA</attributeName> <attributeDefinition>Calcium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072248" scope="document"> <attributeName>F</attributeName> <attributeDefinition>Flouride</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072263" scope="document"> <attributeName>CL</attributeName> <attributeDefinition>Chloride</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072279" scope="document"> <attributeName>NO3</attributeName> <attributeDefinition>Nitrate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072294" scope="document"> <attributeName>SO4</attributeName> <attributeDefinition>Sulphate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072310" scope="document"> <attributeName>PO4P</attributeName> <attributeDefinition>Phosphate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.001</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072326" scope="document"> <attributeName>CACO3</attributeName> <attributeDefinition>Calcium carbonate</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>natural</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072341" scope="document"> <attributeName>Si</attributeName> <attributeDefinition>Silicon</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072357" scope="document"> <attributeName>NH4</attributeName> <attributeDefinition>Ammonium</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.01</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072373" scope="document"> <attributeName>TDS</attributeName> <attributeDefinition>Total dissolved solids</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>milligramsPerLiter</standardUnit> </unit> <precision>.1</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> <attribute id="1211449072388" scope="document"> <attributeName>NH3(ug/l)</attributeName> <attributeDefinition>Ammoniak</attributeDefinition> <measurementScale> <ratio> <unit> <standardUnit>microgramsPerLiter</standardUnit> </unit> <precision>.01</precision> <numericDomain> <numberType>real</numberType> </numericDomain> </ratio> </measurementScale> </attribute> </attributeList> <numberOfRecords>482</numberOfRecords> </dataTable> </dataset> <additionalMetadata> <metadata> <unitList> <unit id="Conductivity" name="Conductivity" parentSI="metersPerSecond" unitType="ms/m"> <description>Conductivity</description> </unit> </unitList> </metadata> </additionalMetadata> <additionalMetadata> <metadata> <unitList> <unit id="mg/l" name="mg/l" parentSI="ampere" unitType="mg/l"> <description>mg/l</description> </unit> </unitList> </metadata> </additionalMetadata> <additionalMetadata> <metadata> <unitList> <unit id="Ug/l" name="Ug/l" parentSI="ampere" unitType="micogramsperliter"> <description>Ug/l</description> </unit> </unitList> </metadata> </additionalMetadata> <additionalMetadata> <metadata> <unitList> <unit id="ms/m" multiplerToSI="/1000" name="ms/m" parentSI="siemen" unitType="conductance"> <description>ms/m</description> </unit> </unitList> </metadata> </additionalMetadata> </ns0:eml>
{ "content_hash": "6fc8570c44821ef9f965a4d80c9e2ce4", "timestamp": "", "source": "github", "line_count": 879, "max_line_length": 349, "avg_line_length": 42.10125142207053, "alnum_prop": 0.7842570324533197, "repo_name": "NCEAS/metadig", "id": "79ac0916834d2de4730c397593d469d49bffc8e7", "size": "37007", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "results/SANPARKS/Ecological_Metadata_Language_version_2.1.0/xml/00259-metadata.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "2361" }, { "name": "Python", "bytes": "42696" }, { "name": "Shell", "bytes": "5798" }, { "name": "XSLT", "bytes": "207495" } ], "symlink_target": "" }
<?php namespace Contagric\BackendBundle\Admin; use Sonata\AdminBundle\Admin\Admin; use Sonata\AdminBundle\Admin\AdminInterface; use Sonata\AdminBundle\Form\FormMapper; use Sonata\AdminBundle\Datagrid\DatagridMapper; use Sonata\AdminBundle\Datagrid\ListMapper; use Sonata\AdminBundle\Show\ShowMapper; use Sonata\AdminBundle\Route\RouteCollection; use Knp\Menu\ItemInterface as MenuItemInterface; use Contagric\BackendBundle\Entity\RelTrabajoTrabajador; class RelTrabajoTrabajadorAdmin extends Admin { protected $parentAssociationMapping = 'campanya'; protected $formOptions = array( 'cascade_validation' => true ); /** * @param \Sonata\AdminBundle\Show\ShowMapper $showMapper * * @return void */ protected function configureShowField(ShowMapper $showMapper) { $showMapper ->with('Coste Trabajadores') ->add('trabajador') ->add('trabajo') ->add('campanya') ->add('horas') ->add('coste') ->add('comentario') ->add('fecha') ->end(); } /** * @param \Sonata\AdminBundle\Form\FormMapper $formMapper * * @return void */ protected function configureFormFields(FormMapper $formMapper) { if (!$this->isChild() && $this->configurationPool->getContainer()->get('request')->get('_route') != "sonata_admin_append_form_element") { throw new \RuntimeException('El gasto de trabajo necesita estar associado a una Campaña'); } else { $formMapper ->with('Coste Trabajadores') ->add('trabajador', 'sonata_type_model_list') ->add('trabajo', 'sonata_type_model_list') ->add('horas','number', array('required' => true, 'precision' => '2')) ->add('coste', 'money', array('required' => true, 'precision' => '2')) ->add('comentario', 'textarea', array('required' => false)) ->add('fecha', 'datePicker') ->end() ; } } /** * @param \Sonata\AdminBundle\Datagrid\ListMapper $listMapper * * @return void */ protected function configureListFields(ListMapper $listMapper) { $listMapper ->addIdentifier('trabajador','trabajo') ->add('trabajo') ->add('horas') ->add('coste') ->add('comentario') ->add('fecha', 'datetime', array('format' => 'Y-m-d')) ->add('_action', 'actions', array( 'actions' => array( 'edit' => array(), 'delete' => array(), ) )) ; } /** * @param \Sonata\AdminBundle\Datagrid\DatagridMapper $datagridMapper * * @return void */ protected function configureDatagridFilters(DatagridMapper $datagridMapper) { $datagridMapper ->add('trabajador.nombre', 'doctrine_orm_string', array()) ->add('trabajo.nombre', 'doctrine_orm_string', array()) ->add('horas') ->add('coste') ->add('comentario') ->add('fecha', 'stnw_date_filter'); } } ?>
{ "content_hash": "00fbb820815f40bd0cccee882ade16e9", "timestamp": "", "source": "github", "line_count": 109, "max_line_length": 143, "avg_line_length": 30.541284403669724, "alnum_prop": 0.542805647341544, "repo_name": "AlexAznar/ContabilidadAgricola", "id": "0d778abef7b208c609a041617a41ba092884b1d5", "size": "3330", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Contagric/BackendBundle/Admin/RelTrabajoTrabajadorAdmin.php", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "2204" }, { "name": "HTML", "bytes": "20291" }, { "name": "PHP", "bytes": "165614" }, { "name": "Ruby", "bytes": "4116" } ], "symlink_target": "" }
"use strict" // A custom Tool for shifting the end point of a Link to be anywhere along the edges of the port. /** * This constructor produces a tool for shifting the end of a link; * use it in a diagram.toolManager.mouseDownTools list: * <pre>myDiagram.toolManager.mouseDownTools.add(new LinkShiftingTool());</pre> * @constructor * @extends Tool * @class */ function LinkShiftingTool() { go.Tool.call(this); this.name = "LinkShifting"; // these are archetypes for the two shift handles, one at each end of the Link: var h = new go.Shape(); h.geometryString = "F1 M0 0 L8 0 M8 4 L0 4"; h.fill = null; h.stroke = "dodgerblue"; h.background = "lightblue"; h.cursor = "pointer"; h.segmentIndex = 0; h.segmentFraction = 1; h.segmentOrientation = go.Link.OrientAlong; /** @type {GraphObject} */ this._fromHandleArchetype = h; h = new go.Shape(); h.geometryString = "F1 M0 0 L8 0 M8 4 L0 4"; h.fill = null; h.stroke = "dodgerblue"; h.background = "lightblue"; h.cursor = "pointer"; h.segmentIndex = -1; h.segmentFraction = 1; h.segmentOrientation = go.Link.OrientAlong; /** @type {GraphObject} */ this._toHandleArchetype = h; // transient state /** @type {GraphObject} */ this._handle = null; /** @type {List} */ this._originalPoints = null; } go.Diagram.inherit(LinkShiftingTool, go.Tool); /** * @this {LinkShiftingTool} * @param {Part} part */ LinkShiftingTool.prototype.updateAdornments = function(part) { if (part === null || !(part instanceof go.Link)) return; // this tool only applies to Links var link = part; // show handles if link is selected, remove them if no longer selected var category = "LinkShiftingFrom"; var adornment = null; if (link.isSelected && !this.diagram.isReadOnly) { var selelt = link.selectionObject; if (selelt !== null && link.actualBounds.isReal() && link.isVisible() && selelt.actualBounds.isReal() && selelt.isVisibleObject()) { var spot = link.computeSpot(true); if (spot.isSide() || spot.isSpot()) { adornment = link.findAdornment(category); if (adornment === null) { adornment = this.makeAdornment(selelt, false); adornment.category = category; link.addAdornment(category, adornment); } } } } if (adornment === null) link.removeAdornment(category); category = "LinkShiftingTo"; adornment = null; if (link.isSelected && !this.diagram.isReadOnly) { var selelt = link.selectionObject; if (selelt !== null && link.actualBounds.isReal() && link.isVisible() && selelt.actualBounds.isReal() && selelt.isVisibleObject()) { var spot = link.computeSpot(false); if (spot.isSide() || spot.isSpot()) { adornment = link.findAdornment(category); if (adornment === null) { adornment = this.makeAdornment(selelt, true); adornment.category = category; link.addAdornment(category, adornment); } } } } if (adornment === null) link.removeAdornment(category); }; /** * @this {LinkShiftingTool} * @param {GraphObject} selelt the {@link GraphObject} of the {@link Link} being shifted. * @param {boolean} toend * @return {Adornment} */ LinkShiftingTool.prototype.makeAdornment = function(selelt, toend) { var adornment = new go.Adornment(); adornment.type = go.Panel.Link; var h = (toend ? this._toHandleArchetype : this._fromHandleArchetype); if (h !== null) { // add a single handle for shifting at one end adornment.add(h.copy()); } adornment.adornedObject = selelt; return adornment; }; /** * @this {LinkShiftingTool} * @return {boolean} */ LinkShiftingTool.prototype.canStart = function() { if (!this.isEnabled) return false; var diagram = this.diagram; if (diagram === null || diagram.isReadOnly || diagram.isModelReadOnly) return false; if (!diagram.lastInput.left) return false; var h = this.findToolHandleAt(diagram.firstInput.documentPoint, "LinkShiftingFrom"); if (h === null) h = this.findToolHandleAt(diagram.firstInput.documentPoint, "LinkShiftingTo"); return (h !== null); } /** * @this {LinkShiftingTool} */ LinkShiftingTool.prototype.doActivate = function() { var diagram = this.diagram; if (diagram === null) return; var h = this.findToolHandleAt(diagram.firstInput.documentPoint, "LinkShiftingFrom"); if (h === null) h = this.findToolHandleAt(diagram.firstInput.documentPoint, "LinkShiftingTo"); if (h === null) return; var ad = h.part; var link = ad.adornedObject.part; if (!(link instanceof go.Link)) return; this._handle = h; this._originalPoints = link.points.copy(); this.startTransaction(this.name); diagram.isMouseCaptured = true; diagram.currentCursor = 'pointer'; this.isActive = true; }; /** * @this {LinkShiftingTool} */ LinkShiftingTool.prototype.doDeactivate = function() { this.isActive = false; var diagram = this.diagram; if (diagram === null) return; diagram.isMouseCaptured = false; diagram.currentCursor = ''; this.stopTransaction(); }; /** * Clean up tool state. * @this {LinkShiftingTool} */ LinkShiftingTool.prototype.doStop = function() { this._handle = null; this._originalPoints = null; }; /** * Clean up tool state. * @this {LinkShiftingTool} */ LinkShiftingTool.prototype.doCancel = function() { var ad = this._handle.part; var link = ad.adornedObject.part; link.points = this._originalPoints; this.stopTool(); }; /** * @this {LinkShiftingTool} */ LinkShiftingTool.prototype.doMouseMove = function() { if (this.isActive) { this.doReshape(this.diagram.lastInput.documentPoint); } }; /** * @this {LinkShiftingTool} */ LinkShiftingTool.prototype.doMouseUp = function() { if (this.isActive) { this.doReshape(this.diagram.lastInput.documentPoint); this.transactionResult = this.name; } this.stopTool(); }; /** * @this {LinkShiftingTool} * @param {Point} pt */ LinkShiftingTool.prototype.doReshape = function(pt) { var ad = this._handle.part; var link = ad.adornedObject.part; var fromend = ad.category === "LinkShiftingFrom"; var port = null; if (fromend) { port = link.fromPort; } else { port = link.toPort; } var portb = new go.Rect(port.getDocumentPoint(go.Spot.TopLeft), port.getDocumentPoint(go.Spot.BottomRight)); // determine new connection point based on closest point to bounds of port property var x = portb.width > 0 ? (pt.x - portb.x) / portb.width : 0; var y = portb.height > 0 ? (pt.y - portb.y) / portb.height : 0; var sx = undefined; var sy = undefined; if (x <= 0) { sx = 0; if (y <= 0) { sy = 0; } else if (y >= 1) { sy = 1; } else { sy = y; } } else if (x >= 1) { sx = 1; if (y <= 0) { sy = 0; } else if (y >= 1) { sy = 1; } else { sy = y; } } else { if (y <= 0) { sx = x; sy = 0; } else if (y >= 1) { sx = x; sy = 1; } else { // in the middle if (x > y) { if (x > 1 - y) { sx = 1; // right side sy = y; } else { sx = x; sy = 0; // top side } } else { // y <= x if (x > 1 - y) { sx = x; sy = 1; // bottom side } else { sx = 0; // left side sy = y; } } } } if (sx !== undefined && sy !== undefined) { if (fromend) { link.fromSpot = new go.Spot(sx, sy); } else { link.toSpot = new go.Spot(sx, sy); } } };
{ "content_hash": "a2438f3c8db5ab944fdc711b58b07631", "timestamp": "", "source": "github", "line_count": 281, "max_line_length": 97, "avg_line_length": 26.82918149466192, "alnum_prop": 0.6222310651280011, "repo_name": "dads-software-brotherhood/sekc", "id": "14701c8a61a8d8549bf6b57e5f0b1a0916e61ca6", "size": "7628", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/webapp/bower_components/goJs/extensions/LinkShiftingTool.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5006" }, { "name": "CSS", "bytes": "695562" }, { "name": "Gherkin", "bytes": "813" }, { "name": "HTML", "bytes": "566875" }, { "name": "Java", "bytes": "675924" }, { "name": "JavaScript", "bytes": "297824" }, { "name": "Shell", "bytes": "10167" } ], "symlink_target": "" }
@interface APHDashboardGraphTableViewCell : APCDashboardGraphTableViewCell @property (strong, nonatomic) IBOutletCollection(UIView) NSArray *tintViews; @property (nonatomic) BOOL showMedicationLegend; + (CGFloat)medicationLegendContainerHeight; @end
{ "content_hash": "6e2945a1ca21eca87b23feb32ba7b312", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 76, "avg_line_length": 28.11111111111111, "alnum_prop": 0.8458498023715415, "repo_name": "Erin-Mounts/mPowerSDK", "id": "e74d5b26a4c7d47d79a4f13c98862817b6fdafdd", "size": "2033", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "mPowerSDK/Dashboard/APHDashboardGraphTableViewCell.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "16512" }, { "name": "Objective-C", "bytes": "670670" }, { "name": "Swift", "bytes": "2379" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.maxdemarzi</groupId> <artifactId>roots</artifactId> <version>1.0</version> <properties> <neo4j.version>2.3.2</neo4j.version> <guava.version>19.0</guava.version> <caffeine.version>2.1.0</caffeine.version> </properties> <dependencies> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> </dependency> <dependency> <groupId>com.github.ben-manes.caffeine</groupId> <artifactId>caffeine</artifactId> <version>${caffeine.version}</version> </dependency> <dependency> <groupId>org.neo4j</groupId> <artifactId>neo4j</artifactId> <version>${neo4j.version}</version> </dependency> <dependency> <groupId>org.neo4j</groupId> <artifactId>neo4j-kernel</artifactId> <type>test-jar</type> <version>${neo4j.version}</version> </dependency> <dependency> <groupId>org.neo4j</groupId> <artifactId>server-api</artifactId> <version>${neo4j.version}</version> </dependency> <dependency> <groupId>org.neo4j.app</groupId> <artifactId>neo4j-server</artifactId> <version>${neo4j.version}</version> </dependency> <dependency> <groupId>org.neo4j</groupId> <artifactId>neo4j-kernel</artifactId> <version>${neo4j.version}</version> <type>test-jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.neo4j</groupId> <artifactId>neo4j-io</artifactId> <version>${neo4j.version}</version> <type>test-jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.neo4j.app</groupId> <artifactId>neo4j-server</artifactId> <version>${neo4j.version}</version> <type>test-jar</type> </dependency> <dependency> <groupId>org.neo4j.test</groupId> <artifactId>neo4j-harness</artifactId> <version>${neo4j.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.9</version> </dependency> </dependencies> <build> <plugins> <plugin> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>1.8</source> <target>1.8</target> </configuration> </plugin> </plugins> </build> </project>
{ "content_hash": "a11406ea6e76246bf2b1103d03b08c59", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 108, "avg_line_length": 34.12903225806452, "alnum_prop": 0.5431632010081916, "repo_name": "maxdemarzi/neo_roots", "id": "7c8a1df9a4b3827cac5722ceddeec1c771e8c64a", "size": "3174", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pom.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "20917" } ], "symlink_target": "" }
package pgx_test import ( "bytes" "github.com/flynn/flynn-discovery/Godeps/_workspace/src/github.com/jackc/pgx" "strings" "testing" "time" ) func TestConnQueryScan(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var sum, rowCount int32 rows, err := conn.Query("select generate_series(1,$1)", 10) if err != nil { t.Fatalf("conn.Query failed: ", err) } defer rows.Close() for rows.Next() { var n int32 rows.Scan(&n) sum += n rowCount++ } if rows.Err() != nil { t.Fatalf("conn.Query failed: ", err) } if rowCount != 10 { t.Error("Select called onDataRow wrong number of times") } if sum != 55 { t.Error("Wrong values returned") } } func TestConnQueryValues(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var rowCount int32 rows, err := conn.Query("select 'foo', n, null from generate_series(1,$1) n", 10) if err != nil { t.Fatalf("conn.Query failed: ", err) } defer rows.Close() for rows.Next() { rowCount++ values, err := rows.Values() if err != nil { t.Fatalf("rows.Values failed: %v", err) } if len(values) != 3 { t.Errorf("Expected rows.Values to return 3 values, but it returned %d", len(values)) } if values[0] != "foo" { t.Errorf(`Expected values[0] to be "foo", but it was %v`, values[0]) } if values[0] != "foo" { t.Errorf(`Expected values[0] to be "foo", but it was %v`, values[0]) } if values[1] != rowCount { t.Errorf(`Expected values[1] to be %d, but it was %d`, rowCount, values[1]) } if values[2] != nil { t.Errorf(`Expected values[2] to be %d, but it was %d`, nil, values[2]) } } if rows.Err() != nil { t.Fatalf("conn.Query failed: ", err) } if rowCount != 10 { t.Error("Select called onDataRow wrong number of times") } } // Test that a connection stays valid when query results are closed early func TestConnQueryCloseEarly(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) // Immediately close query without reading any rows rows, err := conn.Query("select generate_series(1,$1)", 10) if err != nil { t.Fatalf("conn.Query failed: ", err) } rows.Close() ensureConnValid(t, conn) // Read partial response then close rows, err = conn.Query("select generate_series(1,$1)", 10) if err != nil { t.Fatalf("conn.Query failed: ", err) } ok := rows.Next() if !ok { t.Fatal("rows.Next terminated early") } var n int32 rows.Scan(&n) if n != 1 { t.Fatalf("Expected 1 from first row, but got %v", n) } rows.Close() ensureConnValid(t, conn) } // Test that a connection stays valid when query results read incorrectly func TestConnQueryReadWrongTypeError(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) // Read a single value incorrectly rows, err := conn.Query("select generate_series(1,$1)", 10) if err != nil { t.Fatalf("conn.Query failed: ", err) } rowsRead := 0 for rows.Next() { var t time.Time rows.Scan(&t) rowsRead++ } if rowsRead != 1 { t.Fatalf("Expected error to cause only 1 row to be read, but %d were read", rowsRead) } if rows.Err() == nil { t.Fatal("Expected Rows to have an error after an improper read but it didn't") } ensureConnValid(t, conn) } // Test that a connection stays valid when query results read incorrectly func TestConnQueryReadTooManyValues(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) // Read too many values rows, err := conn.Query("select generate_series(1,$1)", 10) if err != nil { t.Fatalf("conn.Query failed: ", err) } rowsRead := 0 for rows.Next() { var n, m int32 rows.Scan(&n, &m) rowsRead++ } if rowsRead != 1 { t.Fatalf("Expected error to cause only 1 row to be read, but %d were read", rowsRead) } if rows.Err() == nil { t.Fatal("Expected Rows to have an error after an improper read but it didn't") } ensureConnValid(t, conn) } func TestConnQueryScanner(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) rows, err := conn.Query("select null::int8, 1::int8") if err != nil { t.Fatalf("conn.Query failed: ", err) } ok := rows.Next() if !ok { t.Fatal("rows.Next terminated early") } var n, m pgx.NullInt64 err = rows.Scan(&n, &m) if err != nil { t.Fatalf("rows.Scan failed: ", err) } rows.Close() if n.Valid { t.Error("Null should not be valid, but it was") } if !m.Valid { t.Error("1 should be valid, but it wasn't") } if m.Int64 != 1 { t.Errorf("m.Int64 should have been 1, but it was %v", m.Int64) } ensureConnValid(t, conn) } func TestConnQueryEncoder(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) n := pgx.NullInt64{Int64: 1, Valid: true} rows, err := conn.Query("select $1::int8", &n) if err != nil { t.Fatalf("conn.Query failed: ", err) } ok := rows.Next() if !ok { t.Fatal("rows.Next terminated early") } var m pgx.NullInt64 err = rows.Scan(&m) if err != nil { t.Fatalf("rows.Scan failed: ", err) } rows.Close() if !m.Valid { t.Error("m should be valid, but it wasn't") } if m.Int64 != 1 { t.Errorf("m.Int64 should have been 1, but it was %v", m.Int64) } ensureConnValid(t, conn) } func TestQueryEncodeError(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) rows, err := conn.Query("select $1::integer", "wrong") if err != nil { t.Errorf("conn.Query failure: %v", err) } defer rows.Close() rows.Next() if rows.Err() == nil { t.Error("Expected rows.Err() to return error, but it didn't") } if rows.Err().Error() != `ERROR: invalid input syntax for integer: "wrong" (SQLSTATE 22P02)` { t.Error("Expected rows.Err() to return different error:", rows.Err()) } } // Ensure that an argument that implements Encoder works when the parameter type // is a core type. type coreEncoder struct{} func (n coreEncoder) FormatCode() int16 { return pgx.TextFormatCode } func (n *coreEncoder) Encode(w *pgx.WriteBuf, oid pgx.Oid) error { w.WriteInt32(int32(2)) w.WriteBytes([]byte("42")) return nil } func TestQueryEncodeCoreTextFormatError(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var n int32 err := conn.QueryRow("select $1::integer", &coreEncoder{}).Scan(&n) if err != nil { t.Fatalf("Unexpected conn.QueryRow error: %v", err) } if n != 42 { t.Errorf("Expected 42, got %v", n) } } func TestQueryRowCoreTypes(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) type allTypes struct { s string i16 int16 i32 int32 i64 int64 f32 float32 f64 float64 b bool t time.Time oid pgx.Oid } var actual, zero allTypes tests := []struct { sql string queryArgs []interface{} scanArgs []interface{} expected allTypes }{ {"select $1::text", []interface{}{"Jack"}, []interface{}{&actual.s}, allTypes{s: "Jack"}}, {"select $1::int2", []interface{}{int16(42)}, []interface{}{&actual.i16}, allTypes{i16: 42}}, {"select $1::int4", []interface{}{int32(42)}, []interface{}{&actual.i32}, allTypes{i32: 42}}, {"select $1::int8", []interface{}{int64(42)}, []interface{}{&actual.i64}, allTypes{i64: 42}}, {"select $1::float4", []interface{}{float32(1.23)}, []interface{}{&actual.f32}, allTypes{f32: 1.23}}, {"select $1::float8", []interface{}{float64(1.23)}, []interface{}{&actual.f64}, allTypes{f64: 1.23}}, {"select $1::bool", []interface{}{true}, []interface{}{&actual.b}, allTypes{b: true}}, {"select $1::timestamptz", []interface{}{time.Unix(123, 5000)}, []interface{}{&actual.t}, allTypes{t: time.Unix(123, 5000)}}, {"select $1::timestamp", []interface{}{time.Date(2010, 1, 2, 3, 4, 5, 0, time.Local)}, []interface{}{&actual.t}, allTypes{t: time.Date(2010, 1, 2, 3, 4, 5, 0, time.Local)}}, {"select $1::date", []interface{}{time.Date(1987, 1, 2, 0, 0, 0, 0, time.Local)}, []interface{}{&actual.t}, allTypes{t: time.Date(1987, 1, 2, 0, 0, 0, 0, time.Local)}}, {"select $1::oid", []interface{}{pgx.Oid(42)}, []interface{}{&actual.oid}, allTypes{oid: 42}}, } for i, tt := range tests { actual = zero err := conn.QueryRow(tt.sql, tt.queryArgs...).Scan(tt.scanArgs...) if err != nil { t.Errorf("%d. Unexpected failure: %v (sql -> %v, queryArgs -> %v)", i, err, tt.sql, tt.queryArgs) } if actual != tt.expected { t.Errorf("%d. Expected %v, got %v (sql -> %v, queryArgs -> %v)", i, tt.expected, actual, tt.sql, tt.queryArgs) } ensureConnValid(t, conn) // Check that Scan errors when a core type is null err = conn.QueryRow(tt.sql, nil).Scan(tt.scanArgs...) if err == nil { t.Errorf("%d. Expected null to cause error, but it didn't (sql -> %v)", i, tt.sql) } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`%d. Expected null to cause error "Cannot decode null..." but it was %v (sql -> %v)`, i, err, tt.sql) } ensureConnValid(t, conn) } } func TestQueryRowCoreByteSlice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) tests := []struct { sql string queryArg interface{} expected []byte }{ {"select $1::text", "Jack", []byte("Jack")}, {"select $1::text", []byte("Jack"), []byte("Jack")}, {"select $1::varchar", []byte("Jack"), []byte("Jack")}, {"select $1::bytea", []byte{0, 15, 255, 17}, []byte{0, 15, 255, 17}}, } for i, tt := range tests { var actual []byte err := conn.QueryRow(tt.sql, tt.queryArg).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v (sql -> %v)", i, err, tt.sql) } if bytes.Compare(actual, tt.expected) != 0 { t.Errorf("%d. Expected %v, got %v (sql -> %v)", i, tt.expected, actual, tt.sql) } ensureConnValid(t, conn) } } func TestQueryRowUnknownType(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) sql := "select $1::inet" expected := "127.0.0.1" var actual string err := conn.QueryRow(sql, expected).Scan(&actual) if err != nil { t.Errorf("Unexpected failure: %v (sql -> %v)", err, sql) } if actual != expected { t.Errorf(`Expected "%v", got "%v" (sql -> %v)`, expected, actual, sql) } ensureConnValid(t, conn) } func TestQueryRowErrors(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) type allTypes struct { i16 int16 i int s string } var actual, zero allTypes tests := []struct { sql string queryArgs []interface{} scanArgs []interface{} err string }{ {"select $1", []interface{}{"Jack"}, []interface{}{&actual.i16}, "could not determine data type of parameter $1 (SQLSTATE 42P18)"}, {"select $1::badtype", []interface{}{"Jack"}, []interface{}{&actual.i16}, `type "badtype" does not exist`}, {"SYNTAX ERROR", []interface{}{}, []interface{}{&actual.i16}, "SQLSTATE 42601"}, {"select $1::text", []interface{}{"Jack"}, []interface{}{&actual.i16}, "Cannot decode oid 25 into int16"}, {"select $1::point", []interface{}{int(705)}, []interface{}{&actual.s}, "Cannot encode int into oid 600 - int must implement Encoder or be converted to a string"}, {"select 42::int4", []interface{}{}, []interface{}{&actual.i}, "Scan cannot decode into *int"}, } for i, tt := range tests { actual = zero err := conn.QueryRow(tt.sql, tt.queryArgs...).Scan(tt.scanArgs...) if err == nil { t.Errorf("%d. Unexpected success (sql -> %v, queryArgs -> %v)", i, tt.sql, tt.queryArgs) } if err != nil && !strings.Contains(err.Error(), tt.err) { t.Errorf("%d. Expected error to contain %s, but got %v (sql -> %v, queryArgs -> %v)", i, tt.err, err, tt.sql, tt.queryArgs) } ensureConnValid(t, conn) } } func TestQueryRowNoResults(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) sql := "select 1 where 1=0" psName := "selectNothing" mustPrepare(t, conn, psName, sql) for _, sql := range []string{sql, psName} { var n int32 err := conn.QueryRow(sql).Scan(&n) if err != pgx.ErrNoRows { t.Errorf("Expected pgx.ErrNoRows, got %v", err) } ensureConnValid(t, conn) } } func TestQueryRowCoreInt16Slice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var actual []int16 tests := []struct { sql string expected []int16 }{ {"select $1::int2[]", []int16{1, 2, 3, 4, 5}}, {"select $1::int2[]", []int16{}}, } for i, tt := range tests { err := conn.QueryRow(tt.sql, tt.expected).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v", i, err) } if len(actual) != len(tt.expected) { t.Errorf("%d. Expected %v, got %v", i, tt.expected, actual) } for j := 0; j < len(actual); j++ { if actual[j] != tt.expected[j] { t.Errorf("%d. Expected actual[%d] to be %v, got %v", i, j, tt.expected[j], actual[j]) } } ensureConnValid(t, conn) } // Check that Scan errors when an array with a null is scanned into a core slice type err := conn.QueryRow("select '{1, 2, 3, 4, 5, null}'::int2[];").Scan(&actual) if err == nil { t.Error("Expected null to cause error when scanned into slice, but it didn't") } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`Expected null to cause error "Cannot decode null..." but it was %v`, err) } ensureConnValid(t, conn) } func TestQueryRowCoreInt32Slice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var actual []int32 tests := []struct { sql string expected []int32 }{ {"select $1::int4[]", []int32{1, 2, 3, 4, 5}}, {"select $1::int4[]", []int32{}}, } for i, tt := range tests { err := conn.QueryRow(tt.sql, tt.expected).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v", i, err) } if len(actual) != len(tt.expected) { t.Errorf("%d. Expected %v, got %v", i, tt.expected, actual) } for j := 0; j < len(actual); j++ { if actual[j] != tt.expected[j] { t.Errorf("%d. Expected actual[%d] to be %v, got %v", i, j, tt.expected[j], actual[j]) } } ensureConnValid(t, conn) } // Check that Scan errors when an array with a null is scanned into a core slice type err := conn.QueryRow("select '{1, 2, 3, 4, 5, null}'::int4[];").Scan(&actual) if err == nil { t.Error("Expected null to cause error when scanned into slice, but it didn't") } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`Expected null to cause error "Cannot decode null..." but it was %v`, err) } ensureConnValid(t, conn) } func TestQueryRowCoreInt64Slice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var actual []int64 tests := []struct { sql string expected []int64 }{ {"select $1::int8[]", []int64{1, 2, 3, 4, 5}}, {"select $1::int8[]", []int64{}}, } for i, tt := range tests { err := conn.QueryRow(tt.sql, tt.expected).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v", i, err) } if len(actual) != len(tt.expected) { t.Errorf("%d. Expected %v, got %v", i, tt.expected, actual) } for j := 0; j < len(actual); j++ { if actual[j] != tt.expected[j] { t.Errorf("%d. Expected actual[%d] to be %v, got %v", i, j, tt.expected[j], actual[j]) } } ensureConnValid(t, conn) } // Check that Scan errors when an array with a null is scanned into a core slice type err := conn.QueryRow("select '{1, 2, 3, 4, 5, null}'::int8[];").Scan(&actual) if err == nil { t.Error("Expected null to cause error when scanned into slice, but it didn't") } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`Expected null to cause error "Cannot decode null..." but it was %v`, err) } ensureConnValid(t, conn) } func TestQueryRowCoreFloat32Slice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var actual []float32 tests := []struct { sql string expected []float32 }{ {"select $1::float4[]", []float32{1.5, 2.0, 3.5}}, {"select $1::float4[]", []float32{}}, } for i, tt := range tests { err := conn.QueryRow(tt.sql, tt.expected).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v", i, err) } if len(actual) != len(tt.expected) { t.Errorf("%d. Expected %v, got %v", i, tt.expected, actual) } for j := 0; j < len(actual); j++ { if actual[j] != tt.expected[j] { t.Errorf("%d. Expected actual[%d] to be %v, got %v", i, j, tt.expected[j], actual[j]) } } ensureConnValid(t, conn) } // Check that Scan errors when an array with a null is scanned into a core slice type err := conn.QueryRow("select '{1.5, 2.0, 3.5, null}'::float4[];").Scan(&actual) if err == nil { t.Error("Expected null to cause error when scanned into slice, but it didn't") } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`Expected null to cause error "Cannot decode null..." but it was %v`, err) } ensureConnValid(t, conn) } func TestQueryRowCoreFloat64Slice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var actual []float64 tests := []struct { sql string expected []float64 }{ {"select $1::float8[]", []float64{1.5, 2.0, 3.5}}, {"select $1::float8[]", []float64{}}, } for i, tt := range tests { err := conn.QueryRow(tt.sql, tt.expected).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v", i, err) } if len(actual) != len(tt.expected) { t.Errorf("%d. Expected %v, got %v", i, tt.expected, actual) } for j := 0; j < len(actual); j++ { if actual[j] != tt.expected[j] { t.Errorf("%d. Expected actual[%d] to be %v, got %v", i, j, tt.expected[j], actual[j]) } } ensureConnValid(t, conn) } // Check that Scan errors when an array with a null is scanned into a core slice type err := conn.QueryRow("select '{1.5, 2.0, 3.5, null}'::float8[];").Scan(&actual) if err == nil { t.Error("Expected null to cause error when scanned into slice, but it didn't") } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`Expected null to cause error "Cannot decode null..." but it was %v`, err) } ensureConnValid(t, conn) } func TestQueryRowCoreStringSlice(t *testing.T) { t.Parallel() conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var actual []string tests := []struct { sql string expected []string }{ {"select $1::text[]", []string{"Adam", "Eve", "UTF-8 Characters Å Æ Ë Ͽ"}}, {"select $1::text[]", []string{}}, {"select $1::varchar[]", []string{"Adam", "Eve", "UTF-8 Characters Å Æ Ë Ͽ"}}, {"select $1::varchar[]", []string{}}, } for i, tt := range tests { err := conn.QueryRow(tt.sql, tt.expected).Scan(&actual) if err != nil { t.Errorf("%d. Unexpected failure: %v", i, err) } if len(actual) != len(tt.expected) { t.Errorf("%d. Expected %v, got %v", i, tt.expected, actual) } for j := 0; j < len(actual); j++ { if actual[j] != tt.expected[j] { t.Errorf("%d. Expected actual[%d] to be %v, got %v", i, j, tt.expected[j], actual[j]) } } ensureConnValid(t, conn) } // Check that Scan errors when an array with a null is scanned into a core slice type err := conn.QueryRow("select '{Adam,Eve,NULL}'::text[];").Scan(&actual) if err == nil { t.Error("Expected null to cause error when scanned into slice, but it didn't") } if err != nil && !strings.Contains(err.Error(), "Cannot decode null") { t.Errorf(`Expected null to cause error "Cannot decode null..." but it was %v`, err) } ensureConnValid(t, conn) } func TestReadingValueAfterEmptyArray(t *testing.T) { conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var a []string var b int32 err := conn.QueryRow("select '{}'::text[], 42::integer").Scan(&a, &b) if err != nil { t.Fatalf("conn.QueryRow failed: ", err) } if len(a) != 0 { t.Errorf("Expected 'a' to have length 0, but it was: ", len(a)) } if b != 42 { t.Errorf("Expected 'b' to 42, but it was: ", b) } } func TestReadingNullByteArray(t *testing.T) { conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) var a []byte err := conn.QueryRow("select null::text").Scan(&a) if err != nil { t.Fatalf("conn.QueryRow failed: ", err) } if a != nil { t.Errorf("Expected 'a' to be nil, but it was: %v", a) } } func TestReadingNullByteArrays(t *testing.T) { conn := mustConnect(t, *defaultConnConfig) defer closeConn(t, conn) rows, err := conn.Query("select null::text union all select null::text") if err != nil { t.Fatalf("conn.Query failed: ", err) } count := 0 for rows.Next() { count++ var a []byte if err := rows.Scan(&a); err != nil { t.Fatalf("failed to scan row", err) } if a != nil { t.Errorf("Expected 'a' to be nil, but it was: %v", a) } } if count != 2 { t.Errorf("Expected to read 2 rows, read: ", count) } }
{ "content_hash": "140175f974403b914ad30b120986ab2a", "timestamp": "", "source": "github", "line_count": 848, "max_line_length": 175, "avg_line_length": 25.08254716981132, "alnum_prop": 0.6250587682181477, "repo_name": "imjorge/flynn-discovery", "id": "4c4df8a718ac8132d789de0ef92c52e9dd3ca779", "size": "21278", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Godeps/_workspace/src/github.com/jackc/pgx/query_test.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Go", "bytes": "6605" } ], "symlink_target": "" }
namespace ExampleApp { namespace DoubleTapIndoorInteraction { namespace SdkModel { class IDoubleTapIndoorInteractionController; class DoubleTapIndoorInteractionController; } } }
{ "content_hash": "8708573932f7f767778b8a70e547528d", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 56, "avg_line_length": 21.545454545454547, "alnum_prop": 0.6497890295358649, "repo_name": "wrld3d/wrld-example-app", "id": "933e4498f37933e5911103d4e9562b2fd045c9b3", "size": "309", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/DoubleTapIndoorInteraction/DoubleTapIndoorInteraction.h", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "1135" }, { "name": "C", "bytes": "377538" }, { "name": "C#", "bytes": "376920" }, { "name": "C++", "bytes": "4283467" }, { "name": "CMake", "bytes": "273099" }, { "name": "HTML", "bytes": "1013" }, { "name": "Java", "bytes": "434936" }, { "name": "Makefile", "bytes": "179" }, { "name": "Objective-C", "bytes": "602771" }, { "name": "Objective-C++", "bytes": "720039" }, { "name": "Python", "bytes": "3259" }, { "name": "Shell", "bytes": "17461" }, { "name": "Swift", "bytes": "178618" } ], "symlink_target": "" }
import sys, getopt import serial import time try: from progressbar import * usepbar = 1 except: usepbar = 0 # Verbose level QUIET = 20 def mdebug(level, message): if(QUIET >= level): print >> sys.stderr , message class CmdException(Exception): pass class CommandInterface: def open(self, aport='/dev/tty.usbserial-ftCYPMYJ', abaudrate=115200) : self.sp = serial.Serial( port=aport, baudrate=abaudrate, # baudrate bytesize=8, # number of databits parity=serial.PARITY_EVEN, stopbits=1, xonxoff=0, # enable software flow control rtscts=0, # disable RTS/CTS flow control timeout=5 # set a timeout value, None for waiting forever ) def _wait_for_ask(self, info = ""): # wait for ask try: ask = ord(self.sp.read()) except: raise CmdException("Can't read port or timeout") else: if ask == 0x79: # ACK return 1 else: if ask == 0x1F: # NACK raise CmdException("NACK "+info) else: # Unknow responce raise CmdException("Unknow response. "+info+": "+hex(ask)) def reset(self): self.sp.setDTR(0) time.sleep(0.1) self.sp.setDTR(1) time.sleep(0.5) def initChip(self): # Set boot self.sp.setRTS(0) self.reset() self.sp.write("\x7F") # Syncro return self._wait_for_ask("Syncro") def releaseChip(self): self.sp.setRTS(1) self.reset() def cmdGeneric(self, cmd): self.sp.write(chr(cmd)) self.sp.write(chr(cmd ^ 0xFF)) # Control byte return self._wait_for_ask(hex(cmd)) def cmdGet(self): if self.cmdGeneric(0x00): mdebug(10, "*** Get command"); len = ord(self.sp.read()) version = ord(self.sp.read()) mdebug(10, " Bootloader version: "+hex(version)) dat = map(lambda c: hex(ord(c)), self.sp.read(len)) mdebug(10, " Available commands: "+str(dat)) self._wait_for_ask("0x00 end") return version else: raise CmdException("Get (0x00) failed") def cmdGetVersion(self): if self.cmdGeneric(0x01): mdebug(10, "*** GetVersion command") version = ord(self.sp.read()) self.sp.read(2) self._wait_for_ask("0x01 end") mdebug(10, " Bootloader version: "+hex(version)) return version else: raise CmdException("GetVersion (0x01) failed") def cmdGetID(self): if self.cmdGeneric(0x02): mdebug(10, "*** GetID command") len = ord(self.sp.read()) id = self.sp.read(len+1) self._wait_for_ask("0x02 end") return id else: raise CmdException("GetID (0x02) failed") def _encode_addr(self, addr): byte3 = (addr >> 0) & 0xFF byte2 = (addr >> 8) & 0xFF byte1 = (addr >> 16) & 0xFF byte0 = (addr >> 24) & 0xFF crc = byte0 ^ byte1 ^ byte2 ^ byte3 return (chr(byte0) + chr(byte1) + chr(byte2) + chr(byte3) + chr(crc)) def cmdReadMemory(self, addr, lng): assert(lng <= 256) if self.cmdGeneric(0x11): mdebug(10, "*** ReadMemory command") self.sp.write(self._encode_addr(addr)) self._wait_for_ask("0x11 address failed") N = (lng - 1) & 0xFF crc = N ^ 0xFF self.sp.write(chr(N) + chr(crc)) self._wait_for_ask("0x11 length failed") return map(lambda c: ord(c), self.sp.read(lng)) else: raise CmdException("ReadMemory (0x11) failed") def cmdGo(self, addr): if self.cmdGeneric(0x21): mdebug(10, "*** Go command") self.sp.write(self._encode_addr(addr)) self._wait_for_ask("0x21 go failed") else: raise CmdException("Go (0x21) failed") def cmdWriteMemory(self, addr, data): assert(len(data) <= 256) if self.cmdGeneric(0x31): mdebug(10, "*** Write memory command") self.sp.write(self._encode_addr(addr)) self._wait_for_ask("0x31 address failed") #map(lambda c: hex(ord(c)), data) lng = (len(data)-1) & 0xFF mdebug(10, " %s bytes to write" % [lng+1]); self.sp.write(chr(lng)) # len really crc = 0xFF for c in data: crc = crc ^ c self.sp.write(chr(c)) self.sp.write(chr(crc)) self._wait_for_ask("0x31 programming failed") mdebug(10, " Write memory done") else: raise CmdException("Write memory (0x31) failed") def cmdEraseMemory(self, sectors = None): if self.cmdGeneric(0x43): mdebug(10, "*** Erase memory command") if sectors is None: # Global erase self.sp.write(chr(0xFF)) self.sp.write(chr(0x00)) else: # Sectors erase self.sp.write(chr((len(sectors)-1) & 0xFF)) crc = 0xFF for c in sectors: crc = crc ^ c self.sp.write(chr(c)) self.sp.write(chr(crc)) self._wait_for_ask("0x43 erasing failed") mdebug(10, " Erase memory done") else: raise CmdException("Erase memory (0x43) failed") def cmdWriteProtect(self, sectors): if self.cmdGeneric(0x63): mdebug(10, "*** Write protect command") self.sp.write(chr((len(sectors)-1) & 0xFF)) crc = 0xFF for c in sectors: crc = crc ^ c self.sp.write(chr(c)) self.sp.write(chr(crc)) self._wait_for_ask("0x63 write protect failed") mdebug(10, " Write protect done") else: raise CmdException("Write Protect memory (0x63) failed") def cmdWriteUnprotect(self): if self.cmdGeneric(0x73): mdebug(10, "*** Write Unprotect command") self._wait_for_ask("0x73 write unprotect failed") self._wait_for_ask("0x73 write unprotect 2 failed") mdebug(10, " Write Unprotect done") else: raise CmdException("Write Unprotect (0x73) failed") def cmdReadoutProtect(self): if self.cmdGeneric(0x82): mdebug(10, "*** Readout protect command") self._wait_for_ask("0x82 readout protect failed") self._wait_for_ask("0x82 readout protect 2 failed") mdebug(10, " Read protect done") else: raise CmdException("Readout protect (0x82) failed") def cmdReadoutUnprotect(self): if self.cmdGeneric(0x92): mdebug(10, "*** Readout Unprotect command") self._wait_for_ask("0x92 readout unprotect failed") self._wait_for_ask("0x92 readout unprotect 2 failed") mdebug(10, " Read Unprotect done") else: raise CmdException("Readout unprotect (0x92) failed") # Complex commands section def readMemory(self, addr, lng): data = [] if usepbar: widgets = ['Reading: ', Percentage(),', ', ETA(), ' ', Bar()] pbar = ProgressBar(widgets=widgets,maxval=lng, term_width=79).start() while lng > 256: if usepbar: pbar.update(pbar.maxval-lng) else: mdebug(5, "Read %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256}) data = data + self.cmdReadMemory(addr, 256) addr = addr + 256 lng = lng - 256 if usepbar: pbar.update(pbar.maxval-lng) pbar.finish() else: mdebug(5, "Read %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256}) data = data + self.cmdReadMemory(addr, lng) return data def writeMemory(self, addr, data): lng = len(data) if usepbar: widgets = ['Writing: ', Percentage(),' ', ETA(), ' ', Bar()] pbar = ProgressBar(widgets=widgets, maxval=lng, term_width=79).start() offs = 0 while lng > 256: if usepbar: pbar.update(pbar.maxval-lng) else: mdebug(5, "Write %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256}) self.cmdWriteMemory(addr, data[offs:offs+256]) offs = offs + 256 addr = addr + 256 lng = lng - 256 if usepbar: pbar.update(pbar.maxval-lng) pbar.finish() else: mdebug(5, "Write %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256}) self.cmdWriteMemory(addr, data[offs:offs+lng] + ([0xFF] * (256-lng)) ) def __init__(self) : pass def usage(): print """Usage: %s [-hqVewvr] [-l length] [-p port] [-b baud] [-a addr] [file.bin] -h This help -q Quiet -V Verbose -e Erase -w Write -v Verify -r Read -l length Length of read -p port Serial port (default: /dev/tty.usbserial-ftCYPMYJ) -b baud Baud speed (default: 115200) -a addr Target address ./stm32loader.py -e -w -v example/main.bin """ % sys.argv[0] if __name__ == "__main__": # Import Psyco if available try: import psyco psyco.full() print "Using Psyco..." except ImportError: pass conf = { 'port': '/dev/tty.usbserial-ftCYPMYJ', 'baud': 115200, 'address': 0x08000000, 'erase': 0, 'write': 0, 'verify': 0, 'read': 0, } # http://www.python.org/doc/2.5.2/lib/module-getopt.html try: opts, args = getopt.getopt(sys.argv[1:], "hqVewvrp:b:a:l:") except getopt.GetoptError, err: # print help information and exit: print str(err) # will print something like "option -a not recognized" usage() sys.exit(2) QUIET = 5 for o, a in opts: if o == '-V': QUIET = 10 elif o == '-q': QUIET = 0 elif o == '-h': usage() sys.exit(0) elif o == '-e': conf['erase'] = 1 elif o == '-w': conf['write'] = 1 elif o == '-v': conf['verify'] = 1 elif o == '-r': conf['read'] = 1 elif o == '-p': conf['port'] = a elif o == '-b': conf['baud'] = eval(a) elif o == '-a': conf['address'] = eval(a) elif o == '-l': conf['len'] = eval(a) else: assert False, "unhandled option" cmd = CommandInterface() cmd.open(conf['port'], conf['baud']) mdebug(10, "Open port %(port)s, baud %(baud)d" % {'port':conf['port'], 'baud':conf['baud']}) try: try: cmd.initChip() except: print "Can't init. Ensure that BOOT0 is enabled and reset device" bootversion = cmd.cmdGet() mdebug(0, "Bootloader version %X" % bootversion) mdebug(0, "Chip id `%s'" % str(map(lambda c: hex(ord(c)), cmd.cmdGetID()))) # cmd.cmdGetVersion() # cmd.cmdGetID() # cmd.cmdReadoutUnprotect() # cmd.cmdWriteUnprotect() # cmd.cmdWriteProtect([0, 1]) if (conf['write'] or conf['verify']): data = map(lambda c: ord(c), file(args[0], 'rb').read()) if conf['erase']: cmd.cmdEraseMemory() if conf['write']: cmd.writeMemory(conf['address'], data) if conf['verify']: verify = cmd.readMemory(conf['address'], len(data)) if(data == verify): print "Verification OK" else: print "Verification FAILED" print str(len(data)) + ' vs ' + str(len(verify)) for i in xrange(0, len(data)): if data[i] != verify[i]: print hex(i) + ': ' + hex(data[i]) + ' vs ' + hex(verify[i]) if not conf['write'] and conf['read']: rdata = cmd.readMemory(conf['address'], conf['len']) file(args[0], 'wb').write(''.join(map(chr,rdata))) # cmd.cmdGo(addr + 0x04) finally: cmd.releaseChip()
{ "content_hash": "9126c3f7497a30fe0c0fac5a590389c1", "timestamp": "", "source": "github", "line_count": 407, "max_line_length": 96, "avg_line_length": 31.398034398034397, "alnum_prop": 0.4981610454652164, "repo_name": "uvc-ingenieure/cordi", "id": "6cd5d1241c3daee382d5095247509980df8740eb", "size": "13646", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "software/cordi/stm32loader.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "23607" }, { "name": "Python", "bytes": "13646" } ], "symlink_target": "" }
CREATE TABLE [employer_financial].[LevyOverride] ( [Id] BIGINT NOT NULL PRIMARY KEY IDENTITY, [AccountId] BIGINT NOT NULL, [IsLevyPayer] TINYINT NOT NULL DEFAULT 0, [DateAdded] DATETIME NOT NULL, [ChangedBy] VARCHAR(500) NOT NULL ) GO CREATE INDEX [IX_LevyOverride_AccountId_IsLevyPayer] ON [employer_financial].[LevyOverride] (AccountId, DateAdded DESC) INCLUDE (IsLevyPayer) GO
{ "content_hash": "9684cb5b75bf32ce3349b55af4e642b9", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 141, "avg_line_length": 35.18181818181818, "alnum_prop": 0.7700258397932817, "repo_name": "SkillsFundingAgency/das-employerapprenticeshipsservice", "id": "297f8096bbe506981e2eecb2159d8ea28b0c7e7e", "size": "389", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/SFA.DAS.EAS.Employer_Financial.Database/Tables/LevyOverride.sql", "mode": "33188", "license": "mit", "language": [ { "name": "ASP.NET", "bytes": "808" }, { "name": "Batchfile", "bytes": "92" }, { "name": "C#", "bytes": "5033158" }, { "name": "CSS", "bytes": "282148" }, { "name": "Dockerfile", "bytes": "1137" }, { "name": "Gherkin", "bytes": "54605" }, { "name": "HTML", "bytes": "1299641" }, { "name": "JavaScript", "bytes": "219328" }, { "name": "PLpgSQL", "bytes": "4731" }, { "name": "PowerShell", "bytes": "2478" }, { "name": "SCSS", "bytes": "476673" }, { "name": "TSQL", "bytes": "138159" } ], "symlink_target": "" }
<?php class SuperNews_Posts_Varian3_Builder extends WP_Widget { /** * Sets up the widgets. * * @since 1.0.0 */ function __construct() { // Set up the widget options. $widget_options = array( 'classname' => 'builder-supernews-posts-varian-3', 'description' => __( 'Display posts list based on user selected category.', 'supernews' ), 'panels_groups' => array( 'panels' ), ); // Create the widget. $this->WP_Widget( 'supernews-builder-posts-varian-3', // $this->id_base __( 'Builder - Posts List Varian 3', 'supernews' ), // $this->name $widget_options // $this->widget_options ); } /** * Outputs the widget based on the arguments input through the widget controls. * * @since 1.0.0 */ function widget( $args, $instance ) { extract( $args ); // Output the theme's $before_widget wrapper. echo $before_widget; // Pull the selected category. $cat_id = (int) $instance['cat']; // Get the category. $category = get_category( $cat_id ); // Get the category archive link. $cat_link = get_category_link( $cat_id ); // Posts query arguments. $args = array( 'posts_per_page' => (int) $instance['num'], 'post_type' => 'post', ); // Limit to category based on user selected tag. if ( ! empty( $instance['cat'] ) ) { $args['cat'] = $instance['cat']; } // Allow dev to filter the post arguments. $query = apply_filters( 'supernews_posts_varian2_builder', $args ); // The post query. $posts = new WP_Query( $query ); if ( $posts->have_posts() ) : ?> <section class="content-block-3 category-box clearfix"> <div class="block"> <h3 class="section-title"> <a href="<?php echo esc_url( $cat_link ); ?>"><?php echo $category->name; ?></a><span class="see-all"><a href="<?php echo esc_url( $cat_link ); ?>"><?php _e( 'More', 'supernews' ); ?></a></span> </h3> <ul> <?php $i = 0; ?> <?php while ( $posts->have_posts() ) : $posts->the_post(); ?> <?php if ( ++$i == 1 ) : ?> <li class="article-first"> <?php if ( has_post_thumbnail() ) : ?> <a href="<?php the_permalink(); ?>"><?php the_post_thumbnail( 'supernews-grid-1', array( 'class' => 'entry-thumbnail', 'alt' => esc_attr( get_the_title() ) ) ); ?></a> <?php endif; ?> <?php the_title( sprintf( '<h2 class="entry-title"><a href="%s" rel="bookmark">', esc_url( get_permalink() ) ), '</a></h2>' ); ?> <div class="entry-meta"> <time class="entry-date updated" datetime="<?php echo esc_html( get_the_date( 'c' ) ); ?>"><span><?php echo esc_html( get_the_date() ); ?></span></time> <?php if ( ! post_password_required() && ( comments_open() || '0' != get_comments_number() ) ) : ?> <span class="entry-comment"><?php comments_popup_link( __( '0 Comment', 'supernews' ), __( '1 Comment', 'supernews' ), __( '% Comments', 'supernews' ) ); ?></span> <?php endif; ?> </div> <div class="entry-summary"> <?php echo apply_filters( 'truenews_posts_1col', wp_trim_words( get_the_excerpt(), 15 ) ); ?> </div><!-- .entry-summary --> <div class="more-link"> <a href="<?php the_permalink(); ?>"><?php _e( 'Read the rest of this entry', 'supernews' ); ?></a> </div><!-- .more-link --> </li> <?php else : ?> <li class="article-list clearfix"> <?php if ( has_post_thumbnail() ) : ?> <a href="<?php the_permalink(); ?>"><?php the_post_thumbnail( 'supernews-widget-thumb', array( 'class' => 'entry-thumbnail', 'alt' => esc_attr( get_the_title() ) ) ); ?></a> <?php endif; ?> <?php the_title( sprintf( '<h2 class="entry-title"><a href="%s" rel="bookmark">', esc_url( get_permalink() ) ), '</a></h2>' ); ?> <div class="entry-meta"> <time class="entry-date updated" datetime="<?php echo esc_html( get_the_date( 'c' ) ); ?>"><span><?php echo esc_html( get_the_date() ); ?></span></time> <?php if ( ! post_password_required() && ( comments_open() || '0' != get_comments_number() ) ) : ?> <span class="entry-comment"><?php comments_popup_link( __( '0 Comment', 'supernews' ), __( '1 Comment', 'supernews' ), __( '% Comments', 'supernews' ) ); ?></span> <?php endif; ?> </div> </li> <?php endif; ?> <?php endwhile; ?> </ul> </div> </section> <?php endif; // Restore original Post Data. wp_reset_postdata(); // Close the theme's widget wrapper. echo $after_widget; } /** * Updates the widget control options for the particular instance of the widget. * * @since 1.0.0 */ function update( $new_instance, $old_instance ) { $instance = $new_instance; $instance['num'] = (int)( $new_instance['num'] ); $instance['cat'] = $new_instance['cat']; return $instance; } /** * Displays the widget control options in the Widgets admin screen. * * @since 1.0.0 */ function form( $instance ) { // Default value. $defaults = array( 'num' => 5, 'cat' => '', ); $instance = wp_parse_args( (array) $instance, $defaults ); ?> <p> <label for="<?php echo $this->get_field_id( 'num' ); ?>"> <?php _e( 'Number of posts to show', 'supernews' ); ?> </label> <input class="widefat" id="<?php echo $this->get_field_id( 'num' ); ?>" name="<?php echo $this->get_field_name( 'num' ); ?>" type="number" step="1" min="-1" value="<?php echo (int)( $instance['num'] ); ?>" /> </p> <p> <label for="<?php echo $this->get_field_id( 'cat' ); ?>"><?php _e( 'Choose Category:', 'supernews' ); ?></label> <select class="widefat" id="<?php echo $this->get_field_id( 'cat' ); ?>" name="<?php echo $this->get_field_name( 'cat' ); ?>" style="width:100%;"> <?php $categories = get_terms( 'category' ); ?> <?php foreach( $categories as $category ) { ?> <option value="<?php echo esc_attr( $category->term_id ); ?>" <?php selected( $instance['cat'], $category->term_id ); ?>><?php echo esc_html( $category->name ); ?></option> <?php } ?> </select> </p> <?php } }
{ "content_hash": "43cbd226d47eba89db7f4620089da287", "timestamp": "", "source": "github", "line_count": 184, "max_line_length": 211, "avg_line_length": 35.02173913043478, "alnum_prop": 0.521260086902545, "repo_name": "cornelous/theprayingcoupleau", "id": "a6144e096e9b309f11706bc837a21b7b4efc4485", "size": "6672", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "content/themes/supernews/inc/builder/posts-varian-3.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "259174" }, { "name": "HTML", "bytes": "9904" }, { "name": "JavaScript", "bytes": "124888" }, { "name": "PHP", "bytes": "547713" }, { "name": "Ruby", "bytes": "5918" }, { "name": "Shell", "bytes": "185" } ], "symlink_target": "" }
import shutil import pytest from mock import call from scanpointgenerator import CompoundGenerator, LineGenerator, StaticPointGenerator from malcolm.core import Context, Process from malcolm.modules.builtin.defines import tmp_dir from malcolm.modules.pmac.parts import BeamSelectorPart from malcolm.modules.pmac.util import MIN_TIME from malcolm.modules.scanning.util import DetectorTable from malcolm.testutil import ChildTestCase from malcolm.yamlutil import make_block_creator class TestBeamSelectorPart(ChildTestCase): def setUp(self): self.process = Process("Process") self.context = Context(self.process) self.config_dir = tmp_dir("config_dir") pmac_block = make_block_creator(__file__, "test_pmac_manager_block.yaml") self.child = self.create_child_block( pmac_block, self.process, mri_prefix="PMAC", config_dir=self.config_dir.value, ) # These are the child blocks we are interested in self.child_x = self.process.get_controller("BL45P-ML-STAGE-01:X") # self.child_y = self.process.get_controller( # "BL45P-ML-STAGE-01:Y") self.child_cs1 = self.process.get_controller("PMAC:CS1") self.child_traj = self.process.get_controller("PMAC:TRAJ") self.child_status = self.process.get_controller("PMAC:STATUS") # CS1 needs to have the right port otherwise we will error self.set_attributes(self.child_cs1, port="CS1") self.move_time = 0.5 self.o = BeamSelectorPart( name="beamSelector", mri="PMAC", selector_axis="x", imaging_angle=0, diffraction_angle=0.5, imaging_detector="imagingDetector", diffraction_detector="diffDetector", move_time=self.move_time, ) self.context.set_notify_dispatch_request(self.o.notify_dispatch_request) self.process.start() pass def tearDown(self): del self.context self.process.stop(timeout=1) shutil.rmtree(self.config_dir.value) def set_motor_attributes( self, x_pos=0.5, units="deg", x_acceleration=4.0, x_velocity=10.0 ): # create some parts to mock # the motion controller and an axis in a CS self.set_attributes( self.child_x, cs="CS1,A", accelerationTime=x_velocity / x_acceleration, resolution=0.001, offset=0.0, maxVelocity=x_velocity, readback=x_pos, velocitySettle=0.0, units=units, ) def _get_detector_table(self, imaging_exposure_time, diffraction_exposure_time): return DetectorTable( [True, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 1, 2], ) def test_validate_returns_tweaked_generator_duration(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) # First pass we should tweak infos = self.o.on_validate(generator, {}, detectors) self.assertEqual(infos.parameter, "generator") assert infos.value.duration == pytest.approx( self.move_time * 2 + imaging_exposure_time + diffraction_exposure_time ) # Now re-run with our tweaked generator infos = self.o.on_validate(infos.value, {}, detectors) assert infos is None, "We shouldn't need to tweak again" def test_validate_raises_AssertionError_for_bad_generator_type(self): line_generator = LineGenerator("x", "mm", 0.0, 5.0, 10) generator = CompoundGenerator([line_generator], [], [], duration=0.0) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) self.assertRaises(AssertionError, self.o.on_validate, generator, {}, detectors) def test_validate_raises_ValueError_for_detector_with_invalid_frames_per_step(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 bad_imaging_frames_per_step = DetectorTable( [True, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [3, 1, 2], ) bad_diffraction_frames_per_step = DetectorTable( [True, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 10, 2], ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, bad_imaging_frames_per_step ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, bad_diffraction_frames_per_step, ) def test_validate_raises_ValueError_when_detector_not_enabled(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors_with_imaging_disabled = DetectorTable( [False, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 1, 2], ) detectors_with_diffraction_disabled = DetectorTable( [True, False, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 1, 2], ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, detectors_with_imaging_disabled, ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, detectors_with_diffraction_disabled, ) def test_validate_raises_ValueError_for_detector_with_zero_exposure(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors_with_zero_exposure_for_imaging = self._get_detector_table( 0.0, diffraction_exposure_time ) detectors_with_zero_exposure_for_diffraction = self._get_detector_table( imaging_exposure_time, 0.0 ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, detectors_with_zero_exposure_for_imaging, ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, detectors_with_zero_exposure_for_diffraction, ) def test_validate_raises_ValueError_for_missing_detector(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 table_without_imaging_detector = DetectorTable( [True, True], ["diffDetector", "PandA"], ["ML-DIFF-01", "ML-PANDA-01"], [diffraction_exposure_time, 0.0], [1, 2], ) table_without_diffraction_detector = DetectorTable( [True, True], ["imagingDetector", "PandA"], ["ML-IMAGING-01", "ML-PANDA-01"], [imaging_exposure_time, 0.0], [1, 2], ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, table_without_imaging_detector, ) self.assertRaises( ValueError, self.o.on_validate, generator, {}, table_without_diffraction_detector, ) def test_configure_with_one_cycle(self): self.o.imaging_angle = 50.0 self.o.diffraction_angle = 90.0 self.set_motor_attributes(x_pos=50.0, x_velocity=800.0, x_acceleration=100000.0) nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) # Update generator duration based on validate method infos = self.o.on_validate(generator, {}, detectors) generator.duration = infos.value.duration generator.prepare() # Run configure self.o.on_configure(self.context, 0, nCycles, {}, generator, detectors, []) # Expected generator duration is sum of exposure times + 2*move_time assert generator.duration == pytest.approx( self.move_time * 2 + imaging_exposure_time + diffraction_exposure_time ) # Build up our expected values diffraction_detector_time_row = [2000, 250000, 250000, 2000, 300000] imaging_detector_time_row = [2000, 250000, 250000, 2000, 100000] times = nCycles * ( diffraction_detector_time_row + imaging_detector_time_row ) + [2000] diffraction_velocity_row = [1, 0, 1, 1, 1] imaging_velocity_row = [1, 0, 1, 1, 1] velocity_modes = nCycles * (diffraction_velocity_row + imaging_velocity_row) + [ 3 ] diffraction_detector_program_row = [1, 4, 2, 8, 8] imaging_detector_program_row = [1, 4, 2, 8, 8] user_programs = nCycles * ( diffraction_detector_program_row + imaging_detector_program_row ) + [1] diffraction_detector_pos_row = [50.0, 70.0, 90.0, 90.08, 90.08] imaging_detector_pos_row = [90.0, 70.0, 50.0, 49.92, 49.92] positions = nCycles * ( diffraction_detector_pos_row + imaging_detector_pos_row ) + [50.0] completed_steps = [0, 0, 1, 1, 1, 1, 1, 2, 3, 3, 3] assert self.child.handled_requests.mock_calls == [ call.post( "writeProfile", csPort="CS1", timeArray=[0.002], userPrograms=[8] ), call.post("executeProfile"), call.post("moveCS1", moveTime=0.0017888544, a=49.92), # pytest.approx to allow sensible compare with numpy arrays call.post( "writeProfile", csPort="CS1", timeArray=pytest.approx(times), velocityMode=pytest.approx(velocity_modes), userPrograms=pytest.approx(user_programs), a=pytest.approx(positions), ), ] assert self.o.completed_steps_lookup == completed_steps def test_configure_with_three_cycles(self): self.o.imaging_angle = 50.0 self.o.diffraction_angle = 90.0 self.set_motor_attributes(x_pos=50.0, x_velocity=800.0, x_acceleration=100000.0) nCycles = 3 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) # Update generator duration based on validate method infos = self.o.on_validate(generator, {}, detectors) generator.duration = infos.value.duration generator.prepare() # Run configure self.o.on_configure(self.context, 0, nCycles, {}, generator, detectors, []) # Expected generator duration is sum of exposure times + 2*move_time assert generator.duration == pytest.approx( self.move_time * 2 + imaging_exposure_time + diffraction_exposure_time ) # Build up our expected values diffraction_detector_time_row = [2000, 250000, 250000, 2000, 300000] imaging_detector_time_row = [2000, 250000, 250000, 2000, 100000] times = nCycles * ( diffraction_detector_time_row + imaging_detector_time_row ) + [2000] diffraction_velocity_row = [1, 0, 1, 1, 1] imaging_velocity_row = [1, 0, 1, 1, 1] velocity_modes = nCycles * (diffraction_velocity_row + imaging_velocity_row) + [ 3 ] diffraction_detector_program_row = [1, 4, 2, 8, 8] imaging_detector_program_row = [1, 4, 2, 8, 8] user_programs = nCycles * ( diffraction_detector_program_row + imaging_detector_program_row ) + [1] diffraction_detector_pos_row = [50.0, 70.0, 90.0, 90.08, 90.08] imaging_detector_pos_row = [90.0, 70.0, 50.0, 49.92, 49.92] positions = nCycles * ( diffraction_detector_pos_row + imaging_detector_pos_row ) + [50.0] completed_steps = [ 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 7, 7, 7, ] assert self.child.handled_requests.mock_calls == [ call.post( "writeProfile", csPort="CS1", timeArray=[0.002], userPrograms=[8] ), call.post("executeProfile"), call.post("moveCS1", moveTime=0.0017888544, a=49.92), # pytest.approx to allow sensible compare with numpy arrays call.post( "writeProfile", csPort="CS1", timeArray=pytest.approx(times), velocityMode=pytest.approx(velocity_modes), userPrograms=pytest.approx(user_programs), a=pytest.approx(positions), ), ] assert self.o.completed_steps_lookup == completed_steps def test_configure_with_one_cycle_with_long_exposure(self): self.o.imaging_angle = 35.0 self.o.diffraction_angle = 125.0 self.set_motor_attributes(x_pos=35.0, x_velocity=800.0, x_acceleration=100000.0) nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 4.0 diffraction_exposure_time = 10.0 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) # Update generator duration based on validate method infos = self.o.on_validate(generator, {}, detectors) generator.duration = infos.value.duration generator.prepare() # Run configure self.o.on_configure(self.context, 0, nCycles, {}, generator, detectors, []) # Expected generator duration is sum of exposure times + 2*move_time assert ( generator.duration == self.move_time * 2 + imaging_exposure_time + diffraction_exposure_time ) # Build up our expected values diffraction_detector_time_row = [ 2000, 250000, 250000, 2000, 3333333, 3333334, 3333333, ] imaging_detector_time_row = [2000, 250000, 250000, 2000, 4000000] times = nCycles * ( diffraction_detector_time_row + imaging_detector_time_row ) + [2000] diffraction_velocity_row = [1, 0, 1, 1, 0, 0, 1] imaging_velocity_row = [1, 0, 1, 1, 1] velocity_modes = nCycles * (diffraction_velocity_row + imaging_velocity_row) + [ 3 ] diffraction_detector_program_row = [1, 4, 2, 8, 0, 0, 8] imaging_detector_program_row = [1, 4, 2, 8, 8] user_programs = nCycles * ( diffraction_detector_program_row + imaging_detector_program_row ) + [1] diffraction_detector_pos_row = [ 35.0, 80.0, 125.0, 125.18, 125.18, 125.18, 125.18, ] imaging_detector_pos_row = [125.0, 80.0, 35.0, 34.82, 34.82] positions = nCycles * ( diffraction_detector_pos_row + imaging_detector_pos_row ) + [35.0] completed_steps = [0, 0, 1, 1, 1, 1, 1, 1, 1, 2, 3, 3, 3] assert self.child.handled_requests.mock_calls == [ call.post( "writeProfile", csPort="CS1", timeArray=[0.002], userPrograms=[8] ), call.post("executeProfile"), call.post("moveCS1", moveTime=0.0026832816, a=34.82), # pytest.approx to allow sensible compare with numpy arrays call.post( "writeProfile", csPort="CS1", timeArray=pytest.approx(times), velocityMode=pytest.approx(velocity_modes), userPrograms=pytest.approx(user_programs), a=pytest.approx(positions), ), ] assert self.o.completed_steps_lookup == completed_steps def test_configure_with_three_cycles_with_long_exposure(self): self.o.imaging_angle = 35.0 self.o.diffraction_angle = 125.0 self.set_motor_attributes(x_pos=35.0, x_velocity=800.0, x_acceleration=100000.0) nCycles = 3 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 4.0 diffraction_exposure_time = 10.0 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) # Update generator duration based on validate method infos = self.o.on_validate(generator, {}, detectors) generator.duration = infos.value.duration generator.prepare() # Run configure self.o.on_configure(self.context, 0, nCycles, {}, generator, detectors, []) # Expected generator duration is sum of exposure times + 2*move_time assert ( generator.duration == self.move_time * 2 + imaging_exposure_time + diffraction_exposure_time ) # Build up our expected values diffraction_detector_time_row = [ 2000, 250000, 250000, 2000, 3333333, 3333334, 3333333, ] imaging_detector_time_row = [2000, 250000, 250000, 2000, 4000000] times = nCycles * ( diffraction_detector_time_row + imaging_detector_time_row ) + [2000] diffraction_velocity_row = [1, 0, 1, 1, 0, 0, 1] imaging_velocity_row = [1, 0, 1, 1, 1] velocity_modes = nCycles * (diffraction_velocity_row + imaging_velocity_row) + [ 3 ] diffraction_detector_program_row = [1, 4, 2, 8, 0, 0, 8] imaging_detector_program_row = [1, 4, 2, 8, 8] user_programs = nCycles * ( diffraction_detector_program_row + imaging_detector_program_row ) + [1] diffraction_detector_pos_row = [ 35.0, 80.0, 125.0, 125.18, 125.18, 125.18, 125.18, ] imaging_detector_pos_row = [125.0, 80.0, 35.0, 34.82, 34.82] positions = nCycles * ( diffraction_detector_pos_row + imaging_detector_pos_row ) + [35.0] completed_steps = [ 0, 0, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 6, 7, 7, 7, ] assert self.child.handled_requests.mock_calls == [ call.post( "writeProfile", csPort="CS1", timeArray=[0.002], userPrograms=[8] ), call.post("executeProfile"), call.post("moveCS1", moveTime=0.0026832816, a=34.82), # pytest.approx to allow sensible compare with numpy arrays call.post( "writeProfile", csPort="CS1", timeArray=pytest.approx(times), velocityMode=pytest.approx(velocity_modes), userPrograms=pytest.approx(user_programs), a=pytest.approx(positions), ), ] assert self.o.completed_steps_lookup == completed_steps def test_configure_with_exposure_time_less_than_min_turnaround(self): self.o.imaging_angle = 50.0 self.o.diffraction_angle = 90.0 self.set_motor_attributes(x_pos=50.0, x_velocity=800.0, x_acceleration=100000.0) nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.0001 diffraction_exposure_time = 0.3 detectors = self._get_detector_table( imaging_exposure_time, diffraction_exposure_time ) # Update generator duration based on validate method infos = self.o.on_validate(generator, {}, detectors) generator.duration = infos.value.duration generator.prepare() # Run configure self.o.on_configure(self.context, 0, nCycles, {}, generator, detectors, []) # Expected generator duration is affected by min turnaround time assert generator.duration == pytest.approx( self.move_time * 2 + MIN_TIME + diffraction_exposure_time ) # Build up our expected values diffraction_detector_time_row = [2000, 250000, 250000, 2000, 300000] imaging_detector_time_row = [2000, 250000, 250000] times = nCycles * ( diffraction_detector_time_row + imaging_detector_time_row ) + [2000] diffraction_velocity_row = [1, 0, 1, 1, 1] imaging_velocity_row = [1, 0, 1] velocity_modes = nCycles * (diffraction_velocity_row + imaging_velocity_row) + [ 3 ] diffraction_detector_program_row = [1, 4, 2, 8, 8] imaging_detector_program_row = [1, 4, 2] user_programs = nCycles * ( diffraction_detector_program_row + imaging_detector_program_row ) + [1] diffraction_detector_pos_row = [50.0, 70.0, 90.0, 90.08, 90.08] imaging_detector_pos_row = [90.0, 70.0, 50.0] positions = nCycles * ( diffraction_detector_pos_row + imaging_detector_pos_row ) + [50.0] completed_steps = [0, 0, 1, 1, 1, 1, 1, 2, 3] assert self.child.handled_requests.mock_calls == [ call.post( "writeProfile", csPort="CS1", timeArray=[0.002], userPrograms=[8] ), call.post("executeProfile"), call.post("moveCS1", moveTime=0.0017888544, a=49.92), # pytest.approx to allow sensible compare with numpy arrays call.post( "writeProfile", csPort="CS1", timeArray=pytest.approx(times), velocityMode=pytest.approx(velocity_modes), userPrograms=pytest.approx(user_programs), a=pytest.approx(positions), ), ] assert self.o.completed_steps_lookup == completed_steps def test_configure_raises_ValueError_with_invalid_frames_per_step(self): self.set_motor_attributes() nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) generator.prepare() imaging_exposure_time = 0.01 diffraction_exposure_time = 1.0 detectors_with_bad_imaging_frames_per_step = DetectorTable( [True, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [3, 1, 2], ) detectors_with_bad_diffraction_frames_per_step = DetectorTable( [True, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 10, 2], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_with_bad_imaging_frames_per_step, [], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_with_bad_diffraction_frames_per_step, [], ) def test_configure_raises_ValueError_when_detector_not_enabled(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors_with_imaging_disabled = DetectorTable( [False, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 1, 2], ) detectors_with_diffraction_disabled = DetectorTable( [True, False, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, diffraction_exposure_time, 0.0], [1, 1, 2], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_with_imaging_disabled, [], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_with_diffraction_disabled, [], ) def test_configure_raises_ValueError_when_exposure_is_zero(self): nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) imaging_exposure_time = 0.1 diffraction_exposure_time = 0.3 detectors_with_imaging_zero_exposure = DetectorTable( [False, True, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [imaging_exposure_time, 0.0, 0.0], [1, 1, 2], ) detectors_with_diffraction_zero_exposure = DetectorTable( [True, False, True], ["imagingDetector", "diffDetector", "PandA"], ["ML-IMAGING-01", "ML-DIFF-01", "ML-PANDA-01"], [0.0, diffraction_exposure_time, 0.0], [1, 1, 2], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_with_imaging_zero_exposure, [], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_with_diffraction_zero_exposure, [], ) def test_configure_raises_ValueError_with_missing_detector(self): self.set_motor_attributes() nCycles = 1 generator = CompoundGenerator( [StaticPointGenerator(nCycles)], [], [], duration=0.0 ) generator.prepare() exposure_time = 0.01 detectors_without_diffraction = DetectorTable( [True, True], ["imagingDetector", "PandA"], ["ML-IMAGING-01", "ML-PANDA-01"], [exposure_time, 0.0], [1, 2], ) detectors_without_imaging = DetectorTable( [True, True], ["diffDetector", "PandA"], ["ML-DIFF-01", "ML-PANDA-01"], [exposure_time, 0.0], [1, 2], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_without_diffraction, [], ) self.assertRaises( ValueError, self.o.on_configure, self.context, 0, nCycles, {}, generator, detectors_without_imaging, [], ) def test_invalid_parameters_raise_ValueError(self): # Some valid parameters name = "beamSelectorPart" mri = "PMAC" selector_axis = "x" imaging_angle = 30.0 diffraction_angle = 65.0 imaging_detector = "imagingDetector" diffraction_detector = "diffDetector" move_time = 0.25 # Check the valid parameters BeamSelectorPart( name, mri, selector_axis, imaging_angle, diffraction_angle, imaging_detector, diffraction_detector, move_time, ) # Mix with one of these invalid parameters invalid_selector_axes = [0.0, 1] invalid_angles = ["not_an_angle"] invalid_detector_names = [10, 53.3] invalid_move_times = ["this is not a number", -1.0, 0.0, "-0.45"] # Now we check they raise errors for invalid_axis in invalid_selector_axes: self.assertRaises( ValueError, BeamSelectorPart, name, mri, invalid_axis, imaging_angle, diffraction_angle, imaging_detector, diffraction_detector, move_time, ) for invalid_angle in invalid_angles: self.assertRaises( ValueError, BeamSelectorPart, name, mri, selector_axis, invalid_angle, diffraction_angle, imaging_detector, diffraction_detector, move_time, ) self.assertRaises( ValueError, BeamSelectorPart, name, mri, selector_axis, imaging_angle, invalid_angle, imaging_detector, diffraction_detector, move_time, ) for invalid_detector_name in invalid_detector_names: self.assertRaises( ValueError, BeamSelectorPart, name, mri, selector_axis, imaging_angle, diffraction_angle, invalid_detector_name, diffraction_detector, move_time, ) self.assertRaises( ValueError, BeamSelectorPart, name, mri, selector_axis, imaging_angle, diffraction_angle, imaging_detector, invalid_detector_name, move_time, ) for invalid_move_time in invalid_move_times: self.assertRaises( ValueError, BeamSelectorPart, name, mri, selector_axis, imaging_angle, diffraction_angle, imaging_detector, diffraction_detector, invalid_move_time, )
{ "content_hash": "e84c1dcfbf6899de1014530187a50e08", "timestamp": "", "source": "github", "line_count": 990, "max_line_length": 88, "avg_line_length": 34.06060606060606, "alnum_prop": 0.5266014234875445, "repo_name": "dls-controls/pymalcolm", "id": "e11a41ba924c8b8ab8b15c33ee88ee95423e64b9", "size": "33720", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_modules/test_pmac/test_beamselectorpart.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "549" }, { "name": "Python", "bytes": "1583458" }, { "name": "Shell", "bytes": "580" } ], "symlink_target": "" }
package com.utils.andres; import java.util.ArrayList; import java.util.Map; import android.content.SharedPreferences; public class ConflictChecker { ArrayList<Dependency> list = new ArrayList<Dependency>(); SharedPreferences mPreferences; /** * Detector de conflicto en base a las dependencias * @param mSharedPreferences preferencia a usar */ public ConflictChecker(SharedPreferences mSharedPreferences) { mPreferences = mSharedPreferences; } public void addDependency (Dependency mDependency) { list.add(mDependency); } /** * Detecta conflictos y los repara en las preferencias, para que una dependencia se cumpla se debe cumplir la dependencia maestra * y las sub-dependencias, de otro modo se las anula con el valor especificado en cada dependencia * @return */ public boolean detectConflicts () { boolean conflictsCorrected = false; for(Dependency mDependency : list){ // Si el master key cumple con la dependencia String masterValue = mPreferences.getString(mDependency.getMasterKey(), null); if (masterValue != null && Integer.valueOf(masterValue) == mDependency.getMasterValue()) { for(Map.Entry<String, Pair<String, Integer>> entry : mDependency.getDependencyMap().entrySet()){ String key = entry.getKey(); String refKey = entry.getValue().getFirst(); int value = entry.getValue().getSecond(); if (refKey != null) { if(mPreferences.getString(refKey, null) == null){ //throw new NullPointerException("El key de referencia no existe, asegúrese de que exista"); invalidateDependency(mDependency, refKey, mDependency.getInvalidationValue()); conflictsCorrected = true; }else{ int refValue = Integer.valueOf(mPreferences.getString(refKey, null)); String targetKey = key.replace("*", ""+refValue); try { if(Integer.valueOf(mPreferences.getString(targetKey, null)) != value){ invalidateDependency(mDependency, refKey, mDependency.getInvalidationValue()); conflictsCorrected = true; } } catch (NumberFormatException e) { invalidateDependency(mDependency, refKey, mDependency.getInvalidationValue()); conflictsCorrected = true; } } } else{ String keyString = mPreferences.getString(key, null); if(keyString != null && Integer.valueOf(keyString) != value){ invalidateDependency(mDependency, null, mDependency.getInvalidationValue()); conflictsCorrected = true; } } } } } return conflictsCorrected; } private void invalidateDependency (Dependency mDependency, String refKey, int invalidateValue){ mPreferences.edit().putString(mDependency.getMasterKey(), ""+invalidateValue).apply(); // Si no es null cambio directamente el valor del key de referencia if(refKey != null){ mPreferences.edit().putString(refKey, ""+invalidateValue).apply(); } for(String key : mDependency.getDependencyMap().keySet()){ if(!key.contains("*")) mPreferences.edit().putString(key, ""+invalidateValue).apply(); } } }
{ "content_hash": "8606fa23a008f6cb56b8eae7bc362e11", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 130, "avg_line_length": 35.92134831460674, "alnum_prop": 0.6787613387550829, "repo_name": "dragondgold/MultiWork", "id": "e24ac58b12ef9ed2434f219a595c4cf7abf287af", "size": "3198", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MultiWork/src/main/java/com/utils/andres/ConflictChecker.java", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Groovy", "bytes": "716" }, { "name": "Java", "bytes": "214703" } ], "symlink_target": "" }
import chai = require('chai'); import fs = require('fs'); var expect = chai.expect; var tsc = require('..'); describe('CompositeCompilerHost', () => { it('Should accept custom lib.d.ts locations', () => { var cch = new tsc.CompositeCompilerHost({ defaultLibFilename: 'some/random/lib.d.ts' }); expect(cch.getDefaultLibFilename()).to.equal('some/random/lib.d.ts'); }); });
{ "content_hash": "cbd615ab463bc93183e7d7b776da1782", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 92, "avg_line_length": 25.866666666666667, "alnum_prop": 0.654639175257732, "repo_name": "theblacksmith/typescript-compiler", "id": "d82420616a545b9e4987dccac7ee04df01f98a9b", "size": "544", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/compositeCompilerHost.ts", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1142366" }, { "name": "Makefile", "bytes": "179" }, { "name": "TypeScript", "bytes": "22605" } ], "symlink_target": "" }
.leaflet-toolbar .leaflet-control-draw-polygon { background-position: -31px -2px; } .leaflet-control-draw-polygon { font-size: 16px; font-weight: bold; line-height: 21px; } .leaflet-control-draw-polygon a { display: block; text-align: center; text-decoration: none; width: 22px; height: 22px; display: block; text-align: center; color: #ccc; text-decoration: none; font: bold 16px/22px Arial,Helvetica,sans-serif; -webkit-border-radius: 4px; border-radius: 4px; }
{ "content_hash": "1d390a93f215f0e1f14105061c27bd1e", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 50, "avg_line_length": 20.75, "alnum_prop": 0.6967871485943775, "repo_name": "julianray/leaflet_gem", "id": "3075793de4c3611c14def3fb3c22c21079efc05d", "size": "498", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/assets/stylesheets/leaflet_gem/draw_polygon_tool.css", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3845" }, { "name": "HTML", "bytes": "4596" }, { "name": "JavaScript", "bytes": "43453" }, { "name": "Ruby", "bytes": "20110" } ], "symlink_target": "" }
package gov.va.escreening.dto.ae; import java.io.Serializable; import java.util.List; import com.fasterxml.jackson.annotation.JsonRootName; @JsonRootName("page") public class Page implements Serializable { private static final long serialVersionUID = 1L; private String pageTitle; private List<Measure> measures; private int pageNumber; private String description; private boolean hasVisibilityRules; public boolean getHasVisibilityRules() { return hasVisibilityRules; } public void setHasVisibilityRules(boolean hasVisibilityRules) { this.hasVisibilityRules = hasVisibilityRules; } public String getPageTitle() { return pageTitle; } public void setPageTitle(String pageTitle) { this.pageTitle = pageTitle; } public List<Measure> getMeasures() { return measures; } public void setMeasures(List<Measure> measures) { this.measures = measures; } public int getPageNumber() { return pageNumber; } public void setPageNumber(int pageNumber) { this.pageNumber = pageNumber; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Page() { } @Override public String toString() { return "Page [pageTitle=" + pageTitle + ", measures=" + measures + "]"; } }
{ "content_hash": "b66f8e5d664398178eec74748bced7a7", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 79, "avg_line_length": 21.58823529411765, "alnum_prop": 0.6655313351498637, "repo_name": "efloto/Mental-Health-eScreening", "id": "b2ac7dd375b0115cbd0d630e9d5bbd17b382fc98", "size": "1468", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "escreening/src/main/java/gov/va/escreening/dto/ae/Page.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "8420" }, { "name": "CSS", "bytes": "5863912" }, { "name": "FreeMarker", "bytes": "95953" }, { "name": "HTML", "bytes": "6925871" }, { "name": "Java", "bytes": "3630764" }, { "name": "JavaScript", "bytes": "4323611" }, { "name": "PLSQL", "bytes": "1476" }, { "name": "SQLPL", "bytes": "93612" }, { "name": "Scala", "bytes": "3339" }, { "name": "Shell", "bytes": "6295" } ], "symlink_target": "" }
module BackboneEditable module Rails require 'backbone_editable/rails/engine' if defined?(Rails) end end
{ "content_hash": "5f7273fa5326589c636cec68fdddde53", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 63, "avg_line_length": 22.4, "alnum_prop": 0.7767857142857143, "repo_name": "geoffharcourt/backbone_editable-rails", "id": "4600aef3f3be774c191cb1e107284dbcd447ed22", "size": "112", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/backbone_editable-rails.rb", "mode": "33261", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "3920" }, { "name": "JavaScript", "bytes": "154" }, { "name": "Perl", "bytes": "1070" }, { "name": "Ruby", "bytes": "1124" }, { "name": "Shell", "bytes": "38" } ], "symlink_target": "" }
package com.example.administrator.criminalintent_github; import android.content.Context; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumentation test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() throws Exception { // Context of the app under test. Context appContext = InstrumentationRegistry.getTargetContext(); assertEquals("com.example.administrator.criminalintent_github", appContext.getPackageName()); } }
{ "content_hash": "ce95a678be73c99cefa23c22f7c97360", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 101, "avg_line_length": 30.692307692307693, "alnum_prop": 0.7593984962406015, "repo_name": "SoSoMaMaHuHu/CriminalIntent", "id": "049785827246396f4d2693c9a076c439ba28b8bb", "size": "798", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/androidTest/java/com/example/administrator/criminalintent_github/ExampleInstrumentedTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "40642" } ], "symlink_target": "" }
<div class="bg">  <div class="container contact"> <h1 class="text-center">Contact us</h1> <br /> <?php if (validation_errors() != '') { ?> <div class="row"> <div class="alert alert-danger alert-dismissible" role="alert"> <strong>Error !</strong><br/><br/><?php echo validation_errors();?> </div> </div> <?php } ?> <?php if ($success) { ?> <div class="row"> <div class="alert alert-success alert-dismissible" role="alert"> <strong>Success !</strong><br/><br/>Your message has been sent </div> </div> <?php } ?> <div class="row"> <?php echo form_open('verifycontact', array('class' => 'form-contact')); ?> <div class="form-group"> <label class="sr-only" for="email">Email</label> <input name="email" id="email" type="text" class="form-control" placeholder="Enter your email" autofocus value="<?php echo $email ?>"> </div> <div class="form-group"> <label class="sr-only" for="message">Message</label> <textarea name="message" id="message" class="form-control" placeholder="Enter your message" rows="10"><?php echo $message ?></textarea> </div> <button class="btn btn-block bt-login" type="submit">Send</button> </form> </div> </div> </div>
{ "content_hash": "784155e73ddfa908ed7673fbf743c6ca", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 151, "avg_line_length": 38.486486486486484, "alnum_prop": 0.5294943820224719, "repo_name": "ahmed-faresse/blitz", "id": "cd4d50f6c54a1057e07d5966588740f6fcee830c", "size": "1425", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/views/contact.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "468" }, { "name": "CSS", "bytes": "54170" }, { "name": "HTML", "bytes": "5390716" }, { "name": "JavaScript", "bytes": "62183" }, { "name": "PHP", "bytes": "1919895" } ], "symlink_target": "" }
rootpath=$SURVEY_DATA codepath=$SURVEY_CODE if [ "$#" -ne 3 ]; then echo "Usage: $0 trainCollection testCollection feature" exit fi trainCollection=$1 testCollection=$2 feature=$3 k=1000 tagger=tagvote if [ "$feature" = "color64+dsift" ]; then distance=l1 elif [ "$feature" = "vgg-verydeep-16-fc7relu" ]; then distance=cosine else echo "unknown feature $feature" exit fi if [ "$testCollection" == "flickr81" ]; then testAnnotationName=concepts81.txt elif [ "$testCollection" == "flickr51" ]; then testAnnotationName=concepts51ms.txt elif [ "$testCollection" == "mirflickr08" ]; then testAnnotationName=conceptsmir14.txt else echo "unknown testCollection $testCollection" exit fi annotationName=concepts130.txt python $codepath/instance_based/apply_tagger.py $testCollection $trainCollection $annotationName $feature --tagger $tagger --distance $distance --k $k tagvotesfile=$rootpath/$testCollection/autotagging/$testCollection/$trainCollection/$annotationName/$tagger/$feature,"$distance"knn,$k/id.tagvotes.txt if [ ! -f "$tagvotesfile" ]; then echo "tagvotes file $tagvotesfile does not exist!" exit fi conceptfile=$rootpath/$testCollection/Annotations/$testAnnotationName resultfile=$SURVEY_DB/"$trainCollection"_"$testCollection"_$feature,tagvote.pkl python $codepath/postprocess/pickle_tagvotes.py $conceptfile $tagvotesfile $resultfile
{ "content_hash": "e7493a69ef262e140ee7efc5b365b246", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 150, "avg_line_length": 28.693877551020407, "alnum_prop": 0.7467994310099573, "repo_name": "li-xirong/jingwei", "id": "ff71a017dd2138206be8346cdeb3c526e2d6ed22", "size": "1406", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doit/do_tagvote.sh", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "4962" }, { "name": "C", "bytes": "1553" }, { "name": "C++", "bytes": "8649" }, { "name": "CSS", "bytes": "376" }, { "name": "HTML", "bytes": "2354" }, { "name": "Makefile", "bytes": "415" }, { "name": "Matlab", "bytes": "21689" }, { "name": "Python", "bytes": "400259" }, { "name": "Shell", "bytes": "42803" } ], "symlink_target": "" }
<!DOCTYPE html> <!-- webpage built using Twitter Bootstrap, an open source web framework --> <html lang="en"><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <meta charset="utf-8"> <title>SharedHere</title> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="description" content=""> <meta name="author" content=""> <link href="http://twitter.github.com/bootstrap/assets/css/bootstrap.css" rel="stylesheet"> <style type="text/css"> body { padding-top: 60px; padding-bottom: 40px; } .sidebar-nav { padding: 9px 0; } </style> <link href="http://twitter.github.com/bootstrap/assets/css/bootstrap-responsive.css" rel="stylesheet"> <!-- HTML5 shim, for IE6-8 support of HTML5 elements --> <!--[if lt IE 9]> <script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script> <![endif]--> <!-- Fav and touch icons --> <link rel="shortcut icon" href="http://twitter.github.com/bootstrap/assets/ico/favicon.ico"> <link rel="apple-touch-icon-precomposed" sizes="144x144" href="http://twitter.github.com/bootstrap/assets/ico/apple-touch-icon-144-precomposed.png"> <link rel="apple-touch-icon-precomposed" sizes="114x114" href="http://twitter.github.com/bootstrap/assets/ico/apple-touch-icon-114-precomposed.png"> <link rel="apple-touch-icon-precomposed" sizes="72x72" href="http://twitter.github.com/bootstrap/assets/ico/apple-touch-icon-72-precomposed.png"> <link rel="apple-touch-icon-precomposed" href="http://twitter.github.com/bootstrap/assets/ico/apple-touch-icon-57-precomposed.png"> </head> <body> <div class="navbar navbar-fixed-top navbar-inverse"> <div class="navbar-inner"> <div class="container"> <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse"> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </a> <a class="brand" href="#">SharedHere</a> <div class="nav-collapse"> <ul class="nav"> <li class="active"><a href="#">Home</a></li> <li><a href="#about">About</a></li> <li><a href="#">Related Projects</a></li> <li><a hred="#">FAQ</a></li> <li class="dropdown"> <a href="#" class="dropdown-toggle" data-toggle="dropdown"> Contact <b class="caret"></b> </a> <ul class="dropdown-menu"> <li><a href="http://code.google.com/p/sharedhere/people/list">Founding Members</a></li> <li><a href="http://groups.google.com/group/sharedhere">Discussion Group</a></li> </ul> </li><!-- dropdown --> </ul><!-- /.nav --> </div><!--/.nav-collapse --> </div><!-- /.container --> </div><!-- /.navbar-inner --> </div><!-- /.navbar --> </div> <!-- start of body container --> <div class="container-fluid"> <div class="row-fluid"> <div class="span12"> <div class="hero-unit"> <h1>What is SharedHere all about?</h1><hr> <p>SharedHere is an Android App for GeoTagged content sharing. It stores and facilitates the upload and download activity of content tagged with specific information shared with respect to GPS Coordinates. The SharedHere app has a wide array of uses, from casual users wanting to embark on a virtual scavenger hunt, to a commercial business distributing coupons to patrons in close proximity to its doors.</p> </div> </div> </div> <div class="row-fluid"> <div class="span4"> <h2>Built for Android Devices</h2> <img src="img/android.png" width="100" height="100"> </div><!--/span--> <div class="span4"> <h2>Built on Eclipse</h2> <img src="img/eclipse.png" width="150" height="150"> </div><!--/span--> <div class="span4"> <h2>Written in Java</h2> <img src="img/java.png" width="100" height="100"> </div><!--/span--> </div><!--/row--> <div class="row-fluid"> <div class="span4"> <h2>Server-scripting with PHP</h2> <img src="img/php.png" width="100" height="100"> </div><!--/span--> <div class="span4"> <h2>Utilizes Google Maps API</h2> <img src="img/gmaps.jpg" width="100" height="100"> </div><!--/span--> <div class="span4"> <h2>Manages data with mySQL</h2> <img src="img/mysql.png" width="100" height="100"> </div> </div> </div> </div> </div> </div> </div> </body> <hr><br> <footer> <div class="navbar navbar-fixed-bottom navbar-inverse"> <div class="navbar-inner"> <div class="container"> <p style="color: #fff">© SharedHere 2012 <img src="img/shlogo.jpg" height="25" width="25"></p> <p><i class="icon-envelope icon-white"></i><a href="http://sharedhere.com/contact-us.html">Contact</a></p> <p style="color: #2ECCFA"><a href="http://opensource.org/licenses/MIT">SharedHere is released under the MIT License</p> </div> </div> </footer> <!-- javaScript dump --> <script src="./Bootstrap, from Twitter_files/jquery.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-transition.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-alert.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-modal.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-dropdown.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-scrollspy.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-tab.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-tooltip.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-popover.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-button.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-collapse.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-carousel.js"></script> <script src="./Bootstrap, from Twitter_files/bootstrap-typeahead.js"></script> </html>
{ "content_hash": "1fb7688647bfc00cc53865c082296572", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 423, "avg_line_length": 46.9645390070922, "alnum_prop": 0.5795832074901842, "repo_name": "Mohammad-Khan/sharedhere", "id": "1426ecc12d9059f37486181e0b2380c14fb8de17", "size": "6623", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SharedHere Website Redesign/about.html", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "44042" }, { "name": "Java", "bytes": "25878" }, { "name": "PHP", "bytes": "8735" } ], "symlink_target": "" }
const pathModule = require('path'); const expect = require('../unexpected-with-plugins'); const AssetGraph = require('../../lib/AssetGraph'); // These tests require a bunch of different plugins, find the // right place for them when the core transforms have been split out describe.skip('transforms/inlineHtmlTemplates', function () { it('should handle a test case with a single Knockout.js template with a nested template loaded using the systemjs-tpl plugin', async function () { const assetGraph = new AssetGraph({ root: pathModule.resolve( __dirname, '../../testdata/transforms/inlineHtmlTemplates/withNested/' ), }); await assetGraph .loadAssets('index.html') .populate({ followRelations: { type: { $not: 'JavaScriptSourceMappingUrl' } }, }) .bundleSystemJs() .populate({ followRelations: { type: { $not: 'JavaScriptSourceMappingUrl' } }, }) .inlineHtmlTemplates(); expect(assetGraph, 'to contain relations', 'HtmlInlineScriptTemplate', 2); expect( assetGraph, 'to contain relations', { type: 'HtmlInlineScriptTemplate', from: { fileName: 'index.html' } }, 2 ); expect( assetGraph.findAssets({ fileName: 'index.html' })[0].text, 'to contain', '<script type="text/html" id="theEmbeddedTemplate" foo="bar">\n <h1>This is an embedded template, which should also end up in the main document</h1>\n</script>' + '<script type="text/html" id="foo"><div></div>\n\n</script>' ); }); it('should handle a test case with several Knockout.js templates loaded using the systemjs-tpl plugin', async function () { const assetGraph = new AssetGraph({ root: pathModule.resolve( __dirname, '../../testdata/transforms/inlineHtmlTemplates/multiple/' ), }); await assetGraph .loadAssets('index.html') .populate({ followRelations: { type: { $not: 'JavaScriptSourceMappingUrl' } }, }) .bundleSystemJs() .populate({ followRelations: { type: { $not: 'JavaScriptSourceMappingUrl' } }, }) .inlineHtmlTemplates(); expect(assetGraph, 'to contain relations', 'HtmlInlineScriptTemplate', 6); expect( assetGraph, 'to contain relations', { type: 'HtmlInlineScriptTemplate', from: { fileName: 'index.html' } }, 6 ); expect( assetGraph.findAssets({ fileName: 'index.html' })[0].text, 'to contain', '<script type="text/html" id="theEmbeddedTemplate" foo="bar">\n <h1>This is the embedded template, which should also end up in the main document</h1>\n</script>' + '<script type="text/html" foo="bar1">\n <h1>This embedded template has no id. This too should end up in the main document, along with it\'s attributes</h1>\n</script>' + '<script type="text/html" foo="bar2">\n <h1>This embedded template has no id. This too should end up in the main document, along with it\'s attributes</h1>\n</script>' + '<script type="text/html" id="foo"><img data-bind="attr: {src: \'/foo.png\'.toString(\'url\')}">\n</script><script type="text/html" id="bar"><div>\n <h1>bar.ko</h1>\n</div>\n</script><script type="text/html" id="templateWithEmbeddedTemplate"><div data-bind="template: \'theEmbeddedTemplate\'"></div>\n\n\n\n</script></head>' ); let relation = assetGraph.findRelations({ type: 'HtmlInlineScriptTemplate', node(node) { return node.getAttribute('id') === 'foo'; }, })[0]; expect(relation, 'to be ok'); expect( relation.to.text, 'to equal', "<img data-bind=\"attr: {src: '/foo.png'.toString('url')}\">\n" ); relation = assetGraph.findRelations({ type: 'HtmlInlineScriptTemplate', node(node) { return node.getAttribute('id') === 'bar'; }, })[0]; expect(relation, 'to be ok'); expect( relation.to.text, 'to equal', '<div>\n <h1>bar.ko</h1>\n</div>\n' ); }); it('should handle a test case with the same Knockout.js being loaded using the systemjs-tpl plugin in multiple .html pages', async function () { const assetGraph = new AssetGraph({ root: pathModule.resolve( __dirname, '../../testdata/transforms/inlineHtmlTemplates/multipleInMultipleHtmlPages/' ), }); await assetGraph.loadAssets(['index1.html', 'index2.html']); await assetGraph.populate({ followRelations: { type: { $not: 'JavaScriptSourceMappingUrl' } }, }); await assetGraph.bundleSystemJs(); await assetGraph.populate({ followRelations: { type: { $not: 'JavaScriptSourceMappingUrl' } }, }); await assetGraph.inlineHtmlTemplates(); expect(assetGraph, 'to contain relations', 'HtmlInlineScriptTemplate', 12); expect( assetGraph, 'to contain relations', { type: 'HtmlInlineScriptTemplate', from: { fileName: 'index1.html' } }, 6 ); expect( assetGraph.findAssets({ fileName: 'index1.html' })[0].text, 'to contain', '<script type="text/html" id="theEmbeddedTemplate" foo="bar">\n <h1>This is the embedded template, which should also end up in the main document</h1>\n</script>' + '<script type="text/html" foo="bar1">\n <h1>This embedded template has no id. This too should end up in the main document, along with it\'s attributes</h1>\n</script>' + '<script type="text/html" foo="bar2">\n <h1>This embedded template has no id. This too should end up in the main document, along with it\'s attributes</h1>\n</script>' + '<script type="text/html" id="foo"><img data-bind="attr: {src: \'/foo.png\'.toString(\'url\')}">\n</script><script type="text/html" id="bar"><div>\n <h1>bar.ko</h1>\n</div>\n</script><script type="text/html" id="templateWithEmbeddedTemplate"><div data-bind="template: \'theEmbeddedTemplate\'"></div>\n\n\n\n</script></head>' ); let relation = assetGraph.findRelations({ type: 'HtmlInlineScriptTemplate', node(node) { return node.getAttribute('id') === 'foo'; }, })[0]; expect(relation, 'to be ok'); expect( relation.to.text, 'to equal', "<img data-bind=\"attr: {src: '/foo.png'.toString('url')}\">\n" ); relation = assetGraph.findRelations({ type: 'HtmlInlineScriptTemplate', node(node) { return node.getAttribute('id') === 'bar'; }, })[0]; expect(relation, 'to be ok'); expect( relation.to.text, 'to equal', '<div>\n <h1>bar.ko</h1>\n</div>\n' ); expect( assetGraph, 'to contain relations', { type: 'HtmlInlineScriptTemplate', from: { fileName: 'index2.html' } }, 6 ); expect( assetGraph.findAssets({ fileName: 'index2.html' })[0].text, 'to contain', '<script type="text/html" id="theEmbeddedTemplate" foo="bar">\n <h1>This is the embedded template, which should also end up in the main document</h1>\n</script>' + '<script type="text/html" foo="bar1">\n <h1>This embedded template has no id. This too should end up in the main document, along with it\'s attributes</h1>\n</script>' + '<script type="text/html" foo="bar2">\n <h1>This embedded template has no id. This too should end up in the main document, along with it\'s attributes</h1>\n</script>' + '<script type="text/html" id="foo"><img data-bind="attr: {src: \'/foo.png\'.toString(\'url\')}">\n</script><script type="text/html" id="bar"><div>\n <h1>bar.ko</h1>\n</div>\n</script><script type="text/html" id="templateWithEmbeddedTemplate"><div data-bind="template: \'theEmbeddedTemplate\'"></div>\n\n\n\n</script></head>' ); relation = assetGraph.findRelations({ type: 'HtmlInlineScriptTemplate', node(node) { return node.getAttribute('id') === 'foo'; }, })[0]; expect(relation, 'to be ok'); expect( relation.to.text, 'to equal', "<img data-bind=\"attr: {src: '/foo.png'.toString('url')}\">\n" ); relation = assetGraph.findRelations({ type: 'HtmlInlineScriptTemplate', node(node) { return node.getAttribute('id') === 'bar'; }, })[0]; expect(relation, 'to be ok'); expect( relation.to.text, 'to equal', '<div>\n <h1>bar.ko</h1>\n</div>\n' ); }); });
{ "content_hash": "7f2fc28b8ece69ce19e5a115ae526781", "timestamp": "", "source": "github", "line_count": 202, "max_line_length": 335, "avg_line_length": 41.603960396039604, "alnum_prop": 0.6135173726796763, "repo_name": "assetgraph/assetgraph", "id": "220f50c8cb15caa0a94257e7afeca60bbf8ce166", "size": "8404", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/transforms/inlineHtmlTemplates.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "1140587" } ], "symlink_target": "" }
<?php # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/pubsub/v1/pubsub.proto namespace Google\Pubsub\V1; use Google\Protobuf\Internal\GPBType; use Google\Protobuf\Internal\RepeatedField; use Google\Protobuf\Internal\GPBUtil; /** * Request for the UpdateTopic method. * * Generated from protobuf message <code>google.pubsub.v1.UpdateTopicRequest</code> */ class UpdateTopicRequest extends \Google\Protobuf\Internal\Message { /** * The topic to update. * * Generated from protobuf field <code>.google.pubsub.v1.Topic topic = 1;</code> */ private $topic = null; /** * Indicates which fields in the provided topic to update. * Must be specified and non-empty. * * Generated from protobuf field <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private $update_mask = null; public function __construct() { \GPBMetadata\Google\Pubsub\V1\Pubsub::initOnce(); parent::__construct(); } /** * The topic to update. * * Generated from protobuf field <code>.google.pubsub.v1.Topic topic = 1;</code> * @return \Google\Pubsub\V1\Topic */ public function getTopic() { return $this->topic; } /** * The topic to update. * * Generated from protobuf field <code>.google.pubsub.v1.Topic topic = 1;</code> * @param \Google\Pubsub\V1\Topic $var * @return $this */ public function setTopic($var) { GPBUtil::checkMessage($var, \Google\Pubsub\V1\Topic::class); $this->topic = $var; return $this; } /** * Indicates which fields in the provided topic to update. * Must be specified and non-empty. * * Generated from protobuf field <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return \Google\Protobuf\FieldMask */ public function getUpdateMask() { return $this->update_mask; } /** * Indicates which fields in the provided topic to update. * Must be specified and non-empty. * * Generated from protobuf field <code>.google.protobuf.FieldMask update_mask = 2;</code> * @param \Google\Protobuf\FieldMask $var * @return $this */ public function setUpdateMask($var) { GPBUtil::checkMessage($var, \Google\Protobuf\FieldMask::class); $this->update_mask = $var; return $this; } }
{ "content_hash": "16de63e208c4dcb9c35cf546c49bf3cb", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 93, "avg_line_length": 26.565217391304348, "alnum_prop": 0.6284779050736498, "repo_name": "shinfan/api-client-staging", "id": "aec28a6754e4abe5deb4204b51fd970447215501", "size": "2444", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "generated/php/google-cloud-pubsub-v1/proto/src/Google/Pubsub/V1/UpdateTopicRequest.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "2842394" }, { "name": "JavaScript", "bytes": "890945" }, { "name": "PHP", "bytes": "3763710" }, { "name": "Protocol Buffer", "bytes": "605865" }, { "name": "Python", "bytes": "1395644" }, { "name": "Ruby", "bytes": "2468895" }, { "name": "Shell", "bytes": "592" } ], "symlink_target": "" }
'use strict' var cantonService = require('services/provincia.canton.service'); function queryCanton(req, res) { var q = req.query.q; var fields = req.query.fields; var sort = req.query.sort; var page = req.query.page; var perPage = req.query.per_page; cantonService.query(req.params.id_provincia, q,fields, sort, page, perPage) .then(function (response) { if (response.cantons) { res.header('X-Total-Count',response.count); res.send(response.cantons); } else { res.sendStatus(404); } }) .catch(function (err) { res.status(400).send(err); }); }; function getCantonById(req, res) { cantonService.getById(req.params.id_provincia, req.params._id) .then(function (obj) { if (obj) { res.send(obj); } else { res.sendStatus(404); } }) .catch(function (err) { res.status(400).send(err); }); }; function createCanton(req, res) { cantonService.create(req.params.id_provincia,req.body) .then(function () { res.sendStatus(200); }) .catch(function (err) { res.status(400).send(err); }); }; function updateCanton(req, res) { cantonService.update(req.params.id_provincia,req.params._id, req.body) .then(function () { res.sendStatus(200); }) .catch(function (err) { res.status(400).send(err); }); }; function deleteCanton(req, res) { cantonService.delete(req.params.id_provincia, req.params._id) .then(function () { res.sendStatus(200); }) .catch(function (err) { res.status(400).send(err); }); }; module.exports.queryCanton = queryCanton; module.exports.getCantonById = getCantonById; module.exports.createCanton = createCanton; module.exports.updateCanton = updateCanton; module.exports.deleteCanton = deleteCanton;
{ "content_hash": "0d0acccea187554750d1af9b81ff43b7", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 79, "avg_line_length": 27.22077922077922, "alnum_prop": 0.5539122137404581, "repo_name": "ntrujillo/elecciones", "id": "556c5ecb07d6a3eefd02d35e64ab9c341ceab094", "size": "2096", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "controllers/api/provincia/provincia.canton.controller.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "777598" }, { "name": "HTML", "bytes": "387644" }, { "name": "JavaScript", "bytes": "1144134" }, { "name": "Makefile", "bytes": "2764" }, { "name": "Perl", "bytes": "1637" }, { "name": "Shell", "bytes": "680" } ], "symlink_target": "" }
package org.apache.skywalking.oap.server.core.management.ui.template; import java.util.HashMap; import java.util.Map; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; import org.apache.skywalking.oap.server.core.analysis.Stream; import org.apache.skywalking.oap.server.core.analysis.management.ManagementData; import org.apache.skywalking.oap.server.core.analysis.worker.ManagementStreamProcessor; import org.apache.skywalking.oap.server.core.source.ScopeDeclaration; import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder; import org.apache.skywalking.oap.server.core.storage.annotation.Column; import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.UI_TEMPLATE; @Setter @Getter @ScopeDeclaration(id = UI_TEMPLATE, name = "UITemplate") @Stream(name = UITemplate.INDEX_NAME, scopeId = UI_TEMPLATE, builder = UITemplate.Builder.class, processor = ManagementStreamProcessor.class) @EqualsAndHashCode(of = { "name" }, callSuper = false) public class UITemplate extends ManagementData { public static final String INDEX_NAME = "ui_template"; public static final String NAME = "name"; public static final String TYPE = "type"; public static final String CONFIGURATION = "configuration"; public static final String ACTIVATED = "activated"; public static final String DISABLED = "disabled"; @Column(columnName = NAME) private String name; @Column(columnName = TYPE, storageOnly = true) private String type; /** * Configuration in JSON format. */ @Column(columnName = CONFIGURATION, storageOnly = true, length = 1_000_000) private String configuration; @Column(columnName = ACTIVATED, storageOnly = true) private int activated; @Column(columnName = DISABLED) private int disabled; @Override public String id() { return name; } public static class Builder implements StorageHashMapBuilder<UITemplate> { @Override public UITemplate storage2Entity(final Map<String, Object> dbMap) { UITemplate uiTemplate = new UITemplate(); uiTemplate.setName((String) dbMap.get(NAME)); uiTemplate.setType((String) dbMap.get(TYPE)); uiTemplate.setConfiguration((String) dbMap.get(CONFIGURATION)); uiTemplate.setActivated(((Number) dbMap.get(ACTIVATED)).intValue()); uiTemplate.setDisabled(((Number) dbMap.get(DISABLED)).intValue()); return uiTemplate; } @Override public Map<String, Object> entity2Storage(final UITemplate storageData) { final HashMap<String, Object> map = new HashMap<>(); map.put(NAME, storageData.getName()); map.put(TYPE, storageData.getType()); map.put(CONFIGURATION, storageData.getConfiguration()); map.put(ACTIVATED, storageData.getActivated()); map.put(DISABLED, storageData.getDisabled()); return map; } } }
{ "content_hash": "e0da6d27a2642de6db68add6be2f42a0", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 141, "avg_line_length": 39.71052631578947, "alnum_prop": 0.7060967528164347, "repo_name": "ascrutae/sky-walking", "id": "f4544fd746d60f67019c2165cb46f675c705c764", "size": "3822", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/management/ui/template/UITemplate.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "7666" }, { "name": "Batchfile", "bytes": "6450" }, { "name": "Dockerfile", "bytes": "2490" }, { "name": "FreeMarker", "bytes": "13355" }, { "name": "Java", "bytes": "8090931" }, { "name": "Kotlin", "bytes": "7757" }, { "name": "Makefile", "bytes": "3785" }, { "name": "Python", "bytes": "2443" }, { "name": "Scala", "bytes": "6067" }, { "name": "Shell", "bytes": "137411" }, { "name": "Smarty", "bytes": "5594" }, { "name": "TSQL", "bytes": "101499" }, { "name": "Thrift", "bytes": "2814" } ], "symlink_target": "" }
<?php namespace Sylius\Bundle\CurrencyBundle; use Doctrine\Bundle\DoctrineBundle\DependencyInjection\Compiler\DoctrineOrmMappingsPass; use Sylius\Bundle\ResourceBundle\DependencyInjection\Compiler\ResolveDoctrineTargetEntitiesPass; use Sylius\Bundle\ResourceBundle\SyliusResourceBundle; use Symfony\Component\DependencyInjection\ContainerBuilder; use Symfony\Component\HttpKernel\Bundle\Bundle; /** * Currency bundle. * * @author Paweł Jędrzejewski <pjedrzejewski@sylius.pl> */ class SyliusCurrencyBundle extends Bundle { /** * Return array of currently supported drivers. * * @return array */ public static function getSupportedDrivers() { return array( SyliusResourceBundle::DRIVER_DOCTRINE_ORM ); } /** * {@inheritdoc} */ public function build(ContainerBuilder $container) { $interfaces = array( 'Sylius\Component\Currency\Model\CurrencyInterface' => 'sylius.model.currency.class', ); $container->addCompilerPass(new ResolveDoctrineTargetEntitiesPass('sylius_currency', $interfaces)); $mappings = array( realpath(__DIR__ . '/Resources/config/doctrine/model') => 'Sylius\Component\Currency\Model', ); $container->addCompilerPass(DoctrineOrmMappingsPass::createXmlMappingDriver($mappings, array('doctrine.orm.entity_manager'), 'sylius_currency.driver.doctrine/orm')); } }
{ "content_hash": "193b5f34b4599ab74699a6d56647556a", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 173, "avg_line_length": 29.551020408163264, "alnum_prop": 0.7064917127071824, "repo_name": "fatihi-achraf/weshop", "id": "fa9d6b58811a526b409caffe6ac1579b36fcb17b", "size": "1661", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Sylius/Bundle/CurrencyBundle/SyliusCurrencyBundle.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "968562" }, { "name": "CoffeeScript", "bytes": "4704" }, { "name": "Go", "bytes": "6808" }, { "name": "JavaScript", "bytes": "4200957" }, { "name": "PHP", "bytes": "2508829" }, { "name": "Puppet", "bytes": "2891" }, { "name": "Python", "bytes": "5596" }, { "name": "Ruby", "bytes": "907" }, { "name": "Shell", "bytes": "2733" } ], "symlink_target": "" }
static const char* ppszTypeName[] = { "ERROR", "tx", "block", "filtered block", "tx lock request", "tx lock vote", "spork", "sn winner", "sn scan error", "sn budget vote", "sn budget proposal", "sn budget finalized", "sn budget finalized vote", "sn quorum", "sn announce", "sn ping", "sstx" }; CMessageHeader::CMessageHeader() { memcpy(pchMessageStart, Params().MessageStart(), MESSAGE_START_SIZE); memset(pchCommand, 0, sizeof(pchCommand)); pchCommand[1] = 1; nMessageSize = -1; nChecksum = 0; } CMessageHeader::CMessageHeader(const char* pszCommand, unsigned int nMessageSizeIn) { memcpy(pchMessageStart, Params().MessageStart(), MESSAGE_START_SIZE); strncpy(pchCommand, pszCommand, COMMAND_SIZE); nMessageSize = nMessageSizeIn; nChecksum = 0; } std::string CMessageHeader::GetCommand() const { if (pchCommand[COMMAND_SIZE-1] == 0) return std::string(pchCommand, pchCommand + strlen(pchCommand)); else return std::string(pchCommand, pchCommand + COMMAND_SIZE); } bool CMessageHeader::IsValid() const { // Check start string if (memcmp(pchMessageStart, Params().MessageStart(), MESSAGE_START_SIZE) != 0) return false; // Check the command string for errors for (const char* p1 = pchCommand; p1 < pchCommand + COMMAND_SIZE; p1++) { if (*p1 == 0) { // Must be all zeros after the first zero for (; p1 < pchCommand + COMMAND_SIZE; p1++) if (*p1 != 0) return false; } else if (*p1 < ' ' || *p1 > 0x7E) return false; } // Message size if (nMessageSize > MAX_SIZE) { LogPrintf("CMessageHeader::IsValid() : (%s, %u bytes) nMessageSize > MAX_SIZE\n", GetCommand(), nMessageSize); return false; } return true; } CAddress::CAddress() : CService() { Init(); } CAddress::CAddress(CService ipIn, uint64_t nServicesIn) : CService(ipIn) { Init(); nServices = nServicesIn; } void CAddress::Init() { nServices = NODE_NETWORK; nTime = 100000000; nLastTry = 0; } CInv::CInv() { type = 0; hash = 0; } CInv::CInv(int typeIn, const uint256& hashIn) { type = typeIn; hash = hashIn; } CInv::CInv(const std::string& strType, const uint256& hashIn) { unsigned int i; for (i = 1; i < ARRAYLEN(ppszTypeName); i++) { if (strType == ppszTypeName[i]) { type = i; break; } } if (i == ARRAYLEN(ppszTypeName)) LogPrint("net", "CInv::CInv(string, uint256) : unknown type '%s'", strType); hash = hashIn; } bool operator<(const CInv& a, const CInv& b) { return (a.type < b.type || (a.type == b.type && a.hash < b.hash)); } bool CInv::IsKnownType() const { return (type >= 1 && type < (int)ARRAYLEN(ppszTypeName)); } const char* CInv::GetCommand() const { if (!IsKnownType()) LogPrint("net", "CInv::GetCommand() : type=%d unknown type", type); return ppszTypeName[type]; } std::string CInv::ToString() const { return strprintf("%s %s", GetCommand(), hash.ToString()); } void CInv::print() const { LogPrintf("CInv(%s)\n", ToString()); }
{ "content_hash": "0c679b4616276b59718c4394cde15c20", "timestamp": "", "source": "github", "line_count": 150, "max_line_length": 118, "avg_line_length": 21.80666666666667, "alnum_prop": 0.5894221950473861, "repo_name": "GregoryBetz/DarkSilk", "id": "ab8fce6fc6c31c488d7e6de3a934c8cfd01dc90d", "size": "3666", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "src/protocol.cpp", "mode": "33261", "license": "mit", "language": [ { "name": "Assembly", "bytes": "160918" }, { "name": "Batchfile", "bytes": "3764" }, { "name": "C", "bytes": "891224" }, { "name": "C++", "bytes": "6912116" }, { "name": "CSS", "bytes": "1127" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2100" }, { "name": "M4", "bytes": "19679" }, { "name": "Makefile", "bytes": "39551" }, { "name": "NSIS", "bytes": "6088" }, { "name": "Objective-C", "bytes": "3020" }, { "name": "Objective-C++", "bytes": "5844" }, { "name": "Protocol Buffer", "bytes": "484" }, { "name": "Python", "bytes": "197152" }, { "name": "QMake", "bytes": "26637" }, { "name": "Shell", "bytes": "380765" } ], "symlink_target": "" }
<?php use Illuminate\Foundation\Testing\WithoutMiddleware; use Illuminate\Foundation\Testing\DatabaseMigrations; use Illuminate\Foundation\Testing\DatabaseTransactions; class ExampleTest extends TestCase { /** * A basic functional test example. * * @return void */ public function testBasicExample() { $this->visit('/') ->see('Laravel 5 - Semantic UI & Angular'); } }
{ "content_hash": "24b20a63cfcc990cbb96ad84141f4bab", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 56, "avg_line_length": 22.473684210526315, "alnum_prop": 0.6627634660421545, "repo_name": "fdiep/laravel5_semantic", "id": "0f10a0a66a39bbb10c16a283b1720c02e66b1797", "size": "427", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "project/app/tests/ExampleTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "553" }, { "name": "CSS", "bytes": "269" }, { "name": "JavaScript", "bytes": "9108" }, { "name": "PHP", "bytes": "70155" }, { "name": "Ruby", "bytes": "1341" } ], "symlink_target": "" }
======= Credits ======= Development Lead ---------------- * Adam Doyle <adamldoyle@gmail.com> Contributors ------------ None yet. Why not be the first?
{ "content_hash": "7ae6087db9b2911d921417e19fa21dde", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 35, "avg_line_length": 11.923076923076923, "alnum_prop": 0.5419354838709678, "repo_name": "adamldoyle/python-keybase-client", "id": "c5dcb6670f561739e39a0d35322ae9a52a00b180", "size": "155", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AUTHORS.rst", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "20359" }, { "name": "Shell", "bytes": "6466" } ], "symlink_target": "" }
package org.assertj.core.internal.doubles; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.error.ShouldBeLess.shouldBeLess; import static org.assertj.core.test.TestData.someInfo; import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; import static org.assertj.core.util.FailureMessages.actualIsNull; import static org.mockito.Mockito.verify; import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.Doubles; import org.assertj.core.internal.DoublesBaseTest; import org.junit.jupiter.api.Test; /** * Tests for <code>{@link Doubles#assertLessThan(AssertionInfo, Double, double)}</code>. * * @author Alex Ruiz * @author Joel Costigliola */ public class Doubles_assertLessThan_Test extends DoublesBaseTest { @Test public void should_fail_if_actual_is_null() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> doubles.assertLessThan(someInfo(), null, 8d)) .withMessage(actualIsNull()); } @Test public void should_pass_if_actual_is_less_than_other() { doubles.assertLessThan(someInfo(), 6d, 8d); } @Test public void should_fail_if_actual_is_equal_to_other() { AssertionInfo info = someInfo(); try { doubles.assertLessThan(info, 6d, 6d); } catch (AssertionError e) { verify(failures).failure(info, shouldBeLess(6d, 6d)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); } @Test public void should_fail_if_actual_is_greater_than_other() { AssertionInfo info = someInfo(); try { doubles.assertLessThan(info, 8d, 6d); } catch (AssertionError e) { verify(failures).failure(info, shouldBeLess(8d, 6d)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); } @Test public void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> doublesWithAbsValueComparisonStrategy.assertLessThan(someInfo(), null, 8d)) .withMessage(actualIsNull()); } @Test public void should_pass_if_actual_is_less_than_other_according_to_custom_comparison_strategy() { doublesWithAbsValueComparisonStrategy.assertLessThan(someInfo(), 6d, -8d); } @Test public void should_fail_if_actual_is_equal_to_other_according_to_custom_comparison_strategy() { AssertionInfo info = someInfo(); try { doublesWithAbsValueComparisonStrategy.assertLessThan(info, 6d, -6d); } catch (AssertionError e) { verify(failures).failure(info, shouldBeLess(6d, -6d, absValueComparisonStrategy)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); } @Test public void should_fail_if_actual_is_greater_than_other_according_to_custom_comparison_strategy() { AssertionInfo info = someInfo(); try { doublesWithAbsValueComparisonStrategy.assertLessThan(info, -8d, 6d); } catch (AssertionError e) { verify(failures).failure(info, shouldBeLess(-8d, 6d, absValueComparisonStrategy)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); } }
{ "content_hash": "5baa61e0fc5e1d6e4ddfa1956537dee5", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 144, "avg_line_length": 34.54255319148936, "alnum_prop": 0.7145056975669849, "repo_name": "xasx/assertj-core", "id": "a973cce8739d9aae1115197b06f42763f73dd882", "size": "3853", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/test/java/org/assertj/core/internal/doubles/Doubles_assertLessThan_Test.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "13307657" }, { "name": "Shell", "bytes": "37294" } ], "symlink_target": "" }
namespace Bio.Web.Blast { /// <summary> /// Container for the Statistics segment of the XML BLAST format. /// </summary> public class BlastStatistics { /// <summary> /// The number of sequences in the iteration /// </summary> public int SequenceCount { get; set; } /// <summary> /// Database size, for correction /// </summary> public long DatabaseLength { get; set; } /// <summary> /// Effective HSP length /// </summary> public long HspLength { get; set; } /// <summary> /// Effective search space /// </summary> public double EffectiveSearchSpace { get; set; } /// <summary> /// Karlin-Altschul parameter K /// </summary> public double Kappa { get; set; } /// <summary> /// Karlin-Altschul parameter Lambda /// </summary> public double Lambda { get; set; } /// <summary> /// Karlin-Altschul parameter H /// </summary> public double Entropy { get; set; } } }
{ "content_hash": "60ebd493598c1d226f170fe92895a860", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 69, "avg_line_length": 25.953488372093023, "alnum_prop": 0.5161290322580645, "repo_name": "dotnetbio/bio", "id": "d23538829b97418ed3d73bcc86d39c4a416953a8", "size": "1118", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/Bio.Core/Web/Blast/BlastStatistics.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "9905234" }, { "name": "Visual Basic", "bytes": "1424" } ], "symlink_target": "" }
:mod:`apscheduler.schedulers.background` ======================================== .. automodule:: apscheduler.schedulers.background API --- .. autoclass:: BackgroundScheduler :show-inheritance: Introduction ------------ BackgroundScheduler runs in a thread **inside** your existing application. Calling :meth:`~apscheduler.schedulers.blocking.BackgroundScheduler.start` will start the scheduler and it will continue running after the call returns. .. list-table:: :widths: 1 4 * - Default executor - :class:`~apscheduler.executors.pool.PoolExecutor` * - External dependencies - none * - Example - ``examples/schedulers/background.py`` (`view online <https://github.com/agronholm/apscheduler/tree/master/examples/schedulers/background.py>`_).
{ "content_hash": "3a96778c93f8d33f9c58952d3ebd0b7c", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 120, "avg_line_length": 27.17241379310345, "alnum_prop": 0.6852791878172588, "repo_name": "srault95/apscheduler", "id": "b12cd205724376af98618a4c5ce4e6b578aafeff", "size": "788", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/modules/schedulers/background.rst", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "268777" } ], "symlink_target": "" }
<div class="row"> <?php $modules = Modules::get(); foreach( $modules as $_module ) { if( isset( $_module[ 'application' ][ 'details' ][ 'namespace' ] ) ) { $module_namespace = $_module[ 'application' ][ 'details' ][ 'namespace' ]; ?> <div class="col-lg-3"> <div class="box" id="#module-"> <div class="box-header"> <h3 class="box-title"><?php echo isset( $_module[ 'application' ][ 'details' ][ 'name' ] ) ? $_module[ 'application' ][ 'details' ][ 'name' ] : __( 'Tendoo Extension' );?></h3> <div class="box-tools pull-right"> <?php if( ! Modules::is_active( $module_namespace ) ) { ?> <a href="<?php echo site_url( array( 'dashboard' , 'modules' , 'enable' , $module_namespace ) );?>" class="btn btn-default btn-box-tool" data-action="enable"><i style="font-size:20px;" class="fa fa-toggle-on"></i> Enable</a> <?php } else { ?> <a href="<?php echo site_url( array( 'dashboard' , 'modules' , 'disable' , $module_namespace ) );?>" class="btn btn-default btn-box-tool" data-action="disable"><i style="font-size:20px;" class="fa fa-toggle-off"></i> Disable</a> <?php } ?> <a href="<?php echo site_url( array( 'dashboard' , 'modules' , 'remove' , $module_namespace ) );?>" class="btn btn-default btn-box-tool" data-action="uninstall"><i style="font-size:20px;" class="fa fa-trash"></i> <?php _e( 'Remove' );?></a> <button class="btn btn-default btn-box-tool" data-action="update"><i style="font-size:20px;" class="fa fa-refresh"></i></button> </div> </div> <div class="box-body"><?php echo isset( $_module[ 'application' ][ 'details' ][ 'description' ] ) ? $_module[ 'application' ][ 'details' ][ 'description' ] : '';?> </div> <div class="box-footer"> <?php echo 'v.' . ( isset( $_module[ 'application' ][ 'details' ][ 'version' ] ) ? $_module[ 'application' ][ 'details' ][ 'version' ] : 0.1 );?> </div> </div> </div> <?php } } ?> </div>
{ "content_hash": "155ff08a5f11a41821bbf38bf1e5271a", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 254, "avg_line_length": 48.116279069767444, "alnum_prop": 0.5408409859835669, "repo_name": "gopalindians/tendoo-cms", "id": "ec1415431ad85670edc57ff9e31f96103a44717b", "size": "2069", "binary": false, "copies": "1", "ref": "refs/heads/1.5", "path": "application/views/dashboard/modules/list-dom.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "707" }, { "name": "CSS", "bytes": "480636" }, { "name": "HTML", "bytes": "1704055" }, { "name": "JavaScript", "bytes": "2016047" }, { "name": "PHP", "bytes": "2338429" } ], "symlink_target": "" }
<?php /* * GENERATED CODE WARNING * Generated by gapic-generator-php from the file * https://github.com/googleapis/googleapis/blob/master/google/ads/googleads/v11/services/ad_group_service.proto * Updates to the above are reflected here through a refresh process. */ namespace Google\Ads\GoogleAds\V11\Services; use Google\Ads\GoogleAds\Lib\V11\GoogleAdsGapicClientTrait; use Google\Ads\GoogleAds\V11\Services\Gapic\AdGroupServiceGapicClient; /** {@inheritdoc} */ class AdGroupServiceClient extends AdGroupServiceGapicClient { use GoogleAdsGapicClientTrait; }
{ "content_hash": "04696f470464eadf22606469adeb3801", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 112, "avg_line_length": 28.7, "alnum_prop": 0.794425087108014, "repo_name": "googleads/google-ads-php", "id": "be095b04b22551caace79c344c49341f13c0a842", "size": "1168", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/Google/Ads/GoogleAds/V11/Services/AdGroupServiceClient.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "899" }, { "name": "PHP", "bytes": "9952711" }, { "name": "Shell", "bytes": "338" } ], "symlink_target": "" }
""" This demonstrates how to use model fom pytest side """ import pytest import pyosmo from examples.pytest.calculator_test_model import CalculatorTestModel @pytest.fixture(scope='function') def osmo() -> pyosmo.Osmo: """ You can use common parts in fixtures as normally with pytest """ return pyosmo.Osmo(CalculatorTestModel()) @pytest.mark.smoke_test def test_smoke(osmo): """ Small test to run quickly and same way """ osmo.seed = 1234 # Set seed to ensure that it runs same way every time osmo.test_end_condition = pyosmo.end_conditions.Length(10) osmo.test_suite_end_condition = pyosmo.end_conditions.Length(1) osmo.algorithm = pyosmo.algorithm.RandomAlgorithm() osmo.run() @pytest.mark.regression_test def test_regression(osmo): """ Longer test to run in regression sets """ osmo.test_end_condition = pyosmo.end_conditions.Length(100) osmo.test_suite_end_condition = pyosmo.end_conditions.Length(10) osmo.algorithm = pyosmo.algorithm.WeightedAlgorithm() osmo.run() @pytest.mark.long_test def test_random_timing(osmo): """ Longer test to test timings """ osmo.add_model(pyosmo.models.RandomDelayModel(1, 2)) osmo.test_end_condition = pyosmo.end_conditions.Length(10) osmo.test_suite_end_condition = pyosmo.end_conditions.Length(1) osmo.algorithm = pyosmo.algorithm.WeightedAlgorithm() osmo.run()
{ "content_hash": "e89273e879da370be002b492b2dcd01e", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 75, "avg_line_length": 34.675, "alnum_prop": 0.726027397260274, "repo_name": "OPpuolitaival/pyosmo", "id": "54f54692da991dc3d6a3f65f674b956cf3c2ba2c", "size": "1427", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/pytest/test_calculator.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "45300" }, { "name": "Shell", "bytes": "93" } ], "symlink_target": "" }
<?php declare(strict_types=1); namespace Documents; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\Common\NotifyPropertyChanged; use Doctrine\Common\PropertyChangedListener; use Doctrine\ODM\MongoDB\Mapping\Annotations as ODM; /** @ODM\Document @ODM\ChangeTrackingPolicy("NOTIFY") */ class ProfileNotify implements NotifyPropertyChanged { /** @ODM\Id */ private $profileId; /** @ODM\Field */ private $firstName; /** @ODM\Field */ private $lastName; /** @ODM\ReferenceOne(targetDocument=File::class, cascade={"all"}) */ private $image; /** @ODM\ReferenceMany(targetDocument=File::class, cascade={"all"}, collectionClass=ProfileNotifyImagesCollection::class) */ private $images; /** @var PropertyChangedListener[] */ private $listeners = []; public function __construct() { $this->images = new ProfileNotifyImagesCollection(); } public function addPropertyChangedListener(PropertyChangedListener $listener) { $this->listeners[] = $listener; } private function propertyChanged($propName, $oldValue, $newValue) { foreach ($this->listeners as $listener) { $listener->propertyChanged($this, $propName, $oldValue, $newValue); } } public function getProfileId() { return $this->profileId; } public function setFirstName($firstName) { $this->propertyChanged('firstName', $this->firstName, $firstName); $this->firstName = $firstName; } public function getFirstName() { return $this->firstName; } public function setLastName($lastName) { $this->propertyChanged('lastName', $this->lastName, $lastName); $this->lastName = $lastName; } public function getLastName() { return $this->lastName; } public function setImage(File $image) { $this->propertyChanged('image', $this->image, $image); $this->image = $image; } public function getImage() { return $this->image; } public function getImages() { return $this->images; } } class ProfileNotifyImagesCollection extends ArrayCollection { public function move($i, $j) { $tmp = $this->get($i); $this->set($i, $this->get($j)); $this->set($j, $tmp); } }
{ "content_hash": "e8494ced29fd750261b84b65953ebf6c", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 128, "avg_line_length": 23.19607843137255, "alnum_prop": 0.6255283178360102, "repo_name": "alcaeus/mongodb-odm", "id": "022fae73dc15fe7b86d399c9a75bb01176e3eb3f", "size": "2366", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/Documents/ProfileNotify.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "2223824" }, { "name": "Shell", "bytes": "5014" } ], "symlink_target": "" }
<?php interface iFigura { public function getSuperficie(); public function getDiametro(); public function getBase(); public function getAltura(); public function getTipo(); }
{ "content_hash": "c59ee9fee8bf0c6f028eeb638ff71c04", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 36, "avg_line_length": 21.2, "alnum_prop": 0.6415094339622641, "repo_name": "NightZpy/test-geopagos", "id": "c6d5310e959ee6c08464840c5d6aa9f77dcc5700", "size": "212", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Factory/iFigura.php", "mode": "33261", "license": "mit", "language": [ { "name": "PHP", "bytes": "22785" } ], "symlink_target": "" }
/** * @fileoverview Elements cannot use an invalid ARIA attribute. * @author passle */ //------------------------------------------------------------------------------ // Requirements //------------------------------------------------------------------------------ const { RuleTester } = require('eslint'); const rule = require('../../../lib/rules/aria-attrs'); //------------------------------------------------------------------------------ // Tests //------------------------------------------------------------------------------ const ruleTester = new RuleTester({ settings: { litHtmlSources: false }, parserOptions: { sourceType: 'module', ecmaVersion: 2015, }, }); ruleTester.run('aria-attrs', rule, { valid: [ { code: "html`<div aria-labelledby='foo'></div>`", }, // give me some code that won't trigger a warning ], invalid: [ { code: "html`<div aria-foo=''></div>`", errors: [ { message: 'Invalid ARIA attribute "aria-foo".', }, ], }, ], });
{ "content_hash": "c33cc3f905e952b84eff47d3e9de6103", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 80, "avg_line_length": 24.46511627906977, "alnum_prop": 0.38688212927756654, "repo_name": "ChromeDevTools/devtools-frontend", "id": "6e4bcf70ed5fc68ad302ecafee078fd1c06afaa8", "size": "1052", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "node_modules/eslint-plugin-lit-a11y/tests/lib/rules/aria-attrs.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "615241" }, { "name": "Dart", "bytes": "205" }, { "name": "HTML", "bytes": "317251" }, { "name": "JavaScript", "bytes": "1401177" }, { "name": "LLVM", "bytes": "1918" }, { "name": "Makefile", "bytes": "687" }, { "name": "Python", "bytes": "133111" }, { "name": "Shell", "bytes": "1122" }, { "name": "TypeScript", "bytes": "15230731" }, { "name": "WebAssembly", "bytes": "921" } ], "symlink_target": "" }
MINI_YOU.Compiler = function () { }; MINI_YOU.Compiler.prototype.constructor = MINI_YOU.Compiler; /** * @param {string} code */ MINI_YOU.Compiler.prototype.compileCode = function (code) { let json = JSON.parse(code); return json; };
{ "content_hash": "e125c378ac201b21ee6876f424cecd86", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 60, "avg_line_length": 18.692307692307693, "alnum_prop": 0.6790123456790124, "repo_name": "mini-you/avatar-creator", "id": "df97b6a74bbc3baf76cfe3eb00814e55b8707748", "size": "243", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Compiler.js", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "840" }, { "name": "HTML", "bytes": "1741" }, { "name": "JavaScript", "bytes": "17118" } ], "symlink_target": "" }
package com.sequenceiq.environment.experience.common; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.Map; import javax.ws.rs.client.Client; import javax.ws.rs.client.WebTarget; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import com.sequenceiq.environment.experience.config.ExperiencePathConfig; class CommonExperienceWebTargetProviderTest { private static final String MISSING_COMPONENT_TO_REPLACE_EXCEPTION_MSG = "Component what should be replaced in experience path must not be empty or null."; private static final String INVALID_XP_BASE_PATH_GIVEN_MSG = "Experience base path should not be null!"; private static final String TEST_COMPONENT_TO_REPLACE_IN_PATH = "crn"; private static final String TEST_ENV_CRN = "someEnvCrn"; private static final int ONCE = 1; @Mock private Client mockClient; private CommonExperienceWebTargetProvider underTest; @BeforeEach void setUp() { MockitoAnnotations.openMocks(this); underTest = new CommonExperienceWebTargetProvider(new ExperiencePathConfig(Map.of("envCrn", TEST_COMPONENT_TO_REPLACE_IN_PATH)), mockClient); } @Test void testCreateWebTargetForPolicyFetchWhenBasePathIsNullThenIllegalArgumentExceptionShouldCome() { IllegalArgumentException expectedException = assertThrows( IllegalArgumentException.class, () -> underTest.createWebTargetForPolicyFetch(null, "someCloudProvider")); assertNotNull(expectedException); assertEquals(INVALID_XP_BASE_PATH_GIVEN_MSG, expectedException.getMessage()); } @Test void testCreateWebTargetForClusterFetchWhenBasePathIsNullThenIllegalArgumentExceptionShouldCome() { IllegalArgumentException expectedException = assertThrows( IllegalArgumentException.class, () -> underTest.createWebTargetForClusterFetch(null, "someCloudProvider")); assertNotNull(expectedException); assertEquals(INVALID_XP_BASE_PATH_GIVEN_MSG, expectedException.getMessage()); } @Test void testCreateWebTargetBasedOnInputsWhenExperienceBasePathIsNullThenIllegalArgumentExceptionShouldCome() { IllegalArgumentException expectedException = assertThrows( IllegalArgumentException.class, () -> underTest.createWebTargetForClusterFetch(null, TEST_ENV_CRN)); assertNotNull(expectedException); assertEquals(INVALID_XP_BASE_PATH_GIVEN_MSG, expectedException.getMessage()); } @Test void testCreateWebTargetBasedOnInputsWhenAllDataHasBeenGivenThenClientTargetCallShouldHappenWithTheExpectedlyReplacedContent() { String xpBasePathBase = "someBasePath/"; String xpBasePathExtended = xpBasePathBase + TEST_COMPONENT_TO_REPLACE_IN_PATH; String expectedTargetCreationContent = xpBasePathBase + TEST_ENV_CRN; WebTarget expectedWebTarget = mock(WebTarget.class); when(mockClient.target(expectedTargetCreationContent)).thenReturn(expectedWebTarget); WebTarget resultWebTarget = underTest.createWebTargetForClusterFetch(xpBasePathExtended, TEST_ENV_CRN); assertEquals(expectedWebTarget, resultWebTarget); verify(mockClient, times(ONCE)).target(anyString()); verify(mockClient, times(ONCE)).target(expectedTargetCreationContent); } }
{ "content_hash": "bb37c85287edb86a47a211280ccc77b7", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 159, "avg_line_length": 40.84782608695652, "alnum_prop": 0.7716870675891432, "repo_name": "hortonworks/cloudbreak", "id": "6a8e6d1e54774172c70ba2cd5484b55d6f6e027b", "size": "3758", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "environment/src/test/java/com/sequenceiq/environment/experience/common/CommonExperienceWebTargetProviderTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "7535" }, { "name": "Dockerfile", "bytes": "9586" }, { "name": "Fluent", "bytes": "10" }, { "name": "FreeMarker", "bytes": "395982" }, { "name": "Groovy", "bytes": "523" }, { "name": "HTML", "bytes": "9917" }, { "name": "Java", "bytes": "55250904" }, { "name": "JavaScript", "bytes": "47923" }, { "name": "Jinja", "bytes": "190660" }, { "name": "Makefile", "bytes": "8537" }, { "name": "PLpgSQL", "bytes": "1830" }, { "name": "Perl", "bytes": "17726" }, { "name": "Python", "bytes": "29898" }, { "name": "SaltStack", "bytes": "222692" }, { "name": "Scala", "bytes": "11168" }, { "name": "Shell", "bytes": "416225" } ], "symlink_target": "" }
The List View ============= .. note:: This document is a stub representing a new work in progress. If you're reading this you can help contribute, **no matter what your experience level with Sonata is**. Check out the `issues on GitHub`_ for more information about how to get involved. This document will cover the List view which you use to browse the objects in your system. It will cover configuration of the list itself and the filters you can use to control what's visible. Basic configuration ------------------- SonataAdmin Options that may affect the list view: .. code-block:: yaml sonata_admin: templates: list: SonataAdminBundle:CRUD:list.html.twig action: SonataAdminBundle:CRUD:action.html.twig select: SonataAdminBundle:CRUD:list__select.html.twig list_block: SonataAdminBundle:Block:block_admin_list.html.twig short_object_description: SonataAdminBundle:Helper:short-object-description.html.twig batch: SonataAdminBundle:CRUD:list__batch.html.twig inner_list_row: SonataAdminBundle:CRUD:list_inner_row.html.twig base_list_field: SonataAdminBundle:CRUD:base_list_field.html.twig pager_links: SonataAdminBundle:Pager:links.html.twig pager_results: SonataAdminBundle:Pager:results.html.twig To do: - a note about Routes and how disabling them disables the related action - adding custom columns Customizing the fields displayed on the list page ------------------------------------------------- You can customize the columns displayed on the list through the ``configureListFields`` method: .. code-block:: php <?php // Example taken from Sonata E-Commerce Product Admin public function configureListFields(ListMapper $list) { $list // addIdentifier allows to specify that this column will provide a link to the entity's edition ->addIdentifier('name') // You may specify the field type directly as the second argument instead of in the options ->add('isVariation', 'boolean') // The type can be guessed as well ->add('enabled', null, array('editable' => true)) // We can add options to the field depending on the type ->add('price', 'currency', array('currency' => $this->currencyDetector->getCurrency()->getLabel())) // Here we specify which method is used to render the label ->add('productCategories', null, array('associated_tostring' => 'getCategory')) ->add('productCollections', null, array('associated_tostring' => 'getCollection')) // You may also use dotted-notation to access specific properties of a relation to the entity ->add('image.name') // You may also specify the actions you want to be displayed in the list ->add('_action', 'actions', array( 'actions' => array( 'show' => array(), 'edit' => array(), 'delete' => array(), ) )) ; } Options ^^^^^^^ .. note:: * ``(m)`` stands for mandatory * ``(o)`` stands for optional - ``type`` (m): defines the field type - mandatory for the field description itself but will try to detect the type automatically if not specified - ``template`` (o): the template used to render the field - ``name`` (o): the name used for the column's title - ``link_parameters`` (o): add link parameter to the related Admin class when the ``Admin::generateUrl`` is called - ``code`` (o): the method name to retrieve the related value - ``associated_tostring`` (o): (deprecated, use associated_property option) the method to retrieve the "string" representation of the collection element. - ``associated_property`` (o): property path to retrieve the "string" representation of the collection element. - ``identifier`` (o): if set to true a link appears on the value to edit the element Available types and associated options ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: ``(m)`` means that option is mandatory +-----------+----------------+-----------------------------------------------------------------------+ | Type | Options | Description | +===========+================+=======================================================================+ | actions | actions | List of available actions | +-----------+----------------+-----------------------------------------------------------------------+ | batch | | Renders a checkbox | +-----------+----------------+-----------------------------------------------------------------------+ | select | | Renders a select box | +-----------+----------------+-----------------------------------------------------------------------+ | array | | Displays an array | +-----------+----------------+-----------------------------------------------------------------------+ | boolean | ajax_hidden | Yes/No; ajax_hidden allows to hide list field during an AJAX context. | +-----------+----------------+-----------------------------------------------------------------------+ | boolean | editable | Yes/No; editable allows to edit directly from the list if authorized. | +-----------+----------------+-----------------------------------------------------------------------+ | choice | choices | Possible choices | + +----------------+-----------------------------------------------------------------------+ | | multiple | Is it a multiple choice option? Defaults to false. | + +----------------+-----------------------------------------------------------------------+ | | delimiter | Separator of values if multiple. | + +----------------+-----------------------------------------------------------------------+ | | catalogue | Translation catalogue. | +-----------+----------------+-----------------------------------------------------------------------+ | currency | currency (m) | A currency string (EUR or USD for instance). | +-----------+----------------+-----------------------------------------------------------------------+ | date | format | A format understandable by Twig's ``date`` function. | +-----------+----------------+-----------------------------------------------------------------------+ | datetime | format | A format understandable by Twig's ``date`` function. | +-----------+----------------+-----------------------------------------------------------------------+ | percent | | Renders value as a percentage. | +-----------+----------------+-----------------------------------------------------------------------+ | string | | Renders a simple string. | +-----------+----------------+-----------------------------------------------------------------------+ | time | | Renders a datetime's time with format ``H:i:s``. | +-----------+----------------+-----------------------------------------------------------------------+ | trans | catalogue | Translates the value with catalogue ``catalogue`` if defined. | +-----------+----------------+-----------------------------------------------------------------------+ | url | url | Adds a link with url ``url`` to the displayed value | + +----------------+-----------------------------------------------------------------------+ | | route | Give a route to generate the url | + + + + | | name | Route name | + + + + | | parameters | Route parameters | + +----------------+-----------------------------------------------------------------------+ | | hide_protocol | Hide http:// or https:// (default false) | +-----------+----------------+-----------------------------------------------------------------------+ If you have the SonataDoctrineORMAdminBundle installed, you have access to more field types, see `SonataDoctrineORMAdminBundle Documentation <http://sonata-project.org/bundles/doctrine-orm-admin/master/doc/reference/list_field_definition.html>`_. Customizing the query used to generate the list ----------------------------------------------- You can customize the list query thanks to the ``createQuery`` method. .. code-block:: php <?php public function createQuery($context = 'list') { $query = parent::createQuery($context); $query->andWhere( $query->expr()->eq($query->getRootAlias() . '.my_field', ':my_param') ); $query->setParameter('my_param', 'my_value'); return $query; } Customizing the sort order -------------------------- Configure the default ordering in the list view ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Configuring the default ordering column can simply be achieved by overriding the ``datagridValues`` array property. All three keys ``_page``, ``_sort_order`` and ``_sort_by`` can be omitted. .. code-block:: php <?php use Sonata\AdminBundle\Admin\Admin; class PageAdmin extends Admin { // ... /** * Default Datagrid values * * @var array */ protected $datagridValues = array( '_page' => 1, // display the first page (default = 1) '_sort_order' => 'DESC', // reverse order (default = 'ASC') '_sort_by' => 'updated' // name of the ordered field // (default = the model's id field, if any) // the '_sort_by' key can be of the form 'mySubModel.mySubSubModel.myField'. ); // ... } To do: - how to sort by multiple fields (this might be a separate recipe?) Filters ------- To do: - basic filter configuration and options - how to set default filter values - targeting submodel fields using dot-separated notation - advanced filter options (global_search) .. _`issues on GitHub`: https://github.com/sonata-project/SonataAdminBundle/issues/1519
{ "content_hash": "f8eb399c1c2d27dfe72b25a5b647ccf3", "timestamp": "", "source": "github", "line_count": 231, "max_line_length": 246, "avg_line_length": 48.995670995671, "alnum_prop": 0.42640042410319845, "repo_name": "mysterio85/portfolio", "id": "424362c8dbe4bae28cbdfe9a58b3fb955bc2a5db", "size": "11320", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/sonata-project/admin-bundle/Resources/doc/reference/action_list.rst", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "66837" }, { "name": "JavaScript", "bytes": "240087" }, { "name": "PHP", "bytes": "84543" }, { "name": "Shell", "bytes": "1345" } ], "symlink_target": "" }
BRANCH_NAME=$1 COMMENT=$2 if [ $# -ne 2 ]; then echo "$0 : Provide two arguments: branchName and a quoted comment." exit 1 fi APACHEDS_SVN=https://svn.apache.org/repos/asf/directory/apacheds SHARED_SVN=https://svn.apache.org/repos/asf/directory/shared PROJECT_SVN=https://svn.apache.org/repos/asf/directory/project KERBEROS_SVN=https://svn.apache.org/repos/asf/directory/clients/kerberos MANUALS_SVN=https://svn.apache.org/repos/asf/directory/apacheds-manuals CHECKSTYLE_SVN=https://svn.apache.org/repos/asf/directory/buildtools/trunk/checkstyle-configuration JUNIT_SVN=https://svn.apache.org/repos/asf/directory/buildtools/trunk/junit-addons svn cp -m $COMMENT $APACHEDS_SVN/trunk $APACHEDS_SVN/branches/$BRANCH_NAME svn cp -m $COMMENT $SHARED_SVN/trunk $SHARED_SVN/branches/$BRANCH_NAME svn cp -m $COMMENT $PROJECT_SVN/trunk $PROJECT_SVN/branches/$BRANCH_NAME svn cp -m $COMMENT $KERBEROS_SVN/trunk $KERBEROS_SVN/branches/$BRANCH_NAME svn cp -m $COMMENT $MANUELS_SVN/trunk $MANUELS_SVN/branches/$BRANCH_NAME svn mkdir -m $COMMENT $APACHEDS_SVN/branches/$BRANCH_NAME-with-dependencies # Now we create the svn:externals property value file echo apacheds $APACHEDS_SVN/branches/$BRANCH_NAME > VALFILE echo shared $SHARED_SVN/branches/$BRANCH_NAME >> VALFILE echo project $PROJECT_SVN/branches/$BRANCH_NAME >> VALFILE echo kerberos-client $KERBEROS_SVN/branches/$BRANCH_NAME >> VALFILE echo apacheds-manuals $MANUELS_SVN/branches/$BRANCH_NAME >> VALFILE echo checkstyle-configuration $CHECKSTYLE_SVN >> VALFILE echo junit-addons $JUNIT_SVN >> VALFILE svn propset svn:externals -F ./VALFILE $APACHEDS_SVN/branches/$BRANCH_NAME-with-dependencies
{ "content_hash": "be687592a775b6549cd1bac955d36621", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 99, "avg_line_length": 50.25714285714286, "alnum_prop": 0.7362137578169414, "repo_name": "apache/directory-project", "id": "8f11c38601231b7ee937b026786042abfe23f53a", "size": "2710", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "resources/create_branch.sh", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Shell", "bytes": "20479" } ], "symlink_target": "" }
package org.ibase4j.web; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.shiro.authz.annotation.RequiresPermissions; import org.ibase4j.model.SysEmailTemplate; import org.ibase4j.service.SysEmailTemplateService; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import top.ibase4j.core.base.BaseController; import top.ibase4j.core.util.WebUtil; /** * 邮件模版管理控制类 * * @author ShenHuaJie * @version 2016年5月20日 下午3:13:31 */ @RestController @Api(value = "邮件模版管理", description = "邮件模版管理") @RequestMapping(value = "emailTemplate") public class SysEmailTemplateController extends BaseController<SysEmailTemplate, SysEmailTemplateService> { @ApiOperation(value = "查询邮件模版") @RequiresPermissions("sys.email.template.read") @GetMapping("/read/page") public Object query(HttpServletRequest request) { Map<String, Object> param = WebUtil.getParameter(request); return super.query(param); } @ApiOperation(value = "邮件模版详情") @RequiresPermissions("sys.email.template.read") @GetMapping("/read/detail") public Object get(SysEmailTemplate param) { return super.get(param); } @Override @ApiOperation(value = "修改邮件模版") @RequiresPermissions("sys.email.template.update") @RequestMapping(method = RequestMethod.POST) public Object update(SysEmailTemplate param) { return super.update(param); } @Override @ApiOperation(value = "删除邮件模版") @RequiresPermissions("sys.email.template.delete") @RequestMapping(method = RequestMethod.DELETE) public Object delete(SysEmailTemplate param) { return super.delete(param); } }
{ "content_hash": "970c9d7ed25be49d46c277bd42168b63", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 107, "avg_line_length": 33.35, "alnum_prop": 0.7276361819090454, "repo_name": "iBase4J/iBase4J", "id": "ff2dcd52bec546193a56be896711ec6e7850f2a7", "size": "2101", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "iBase4J-SYS-Web/src/main/java/org/ibase4j/web/SysEmailTemplateController.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2040473" }, { "name": "HTML", "bytes": "702651" }, { "name": "Java", "bytes": "285812" }, { "name": "JavaScript", "bytes": "6362897" }, { "name": "PHP", "bytes": "11363" }, { "name": "TSQL", "bytes": "1053172" } ], "symlink_target": "" }
#include <iostream> #include <tins/tins.h> #include "netwhere.hpp" using namespace std; using namespace Tins; #include <unistd.h> int main(int argc, char **argv) { if (argc != 4) { cerr << "usage: " << argv[0] << " interface ip netmask" << endl; return 1; } NetWhere netwhere(argv[1], IPv4Range::from_mask(argv[2], argv[3])); netwhere.start(); }
{ "content_hash": "3b7e6995295029bcbfc33a146db6a1ea", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 69, "avg_line_length": 17.571428571428573, "alnum_prop": 0.6260162601626016, "repo_name": "benhsmith/netwhere", "id": "273b1b2d11967478682e8517b4ece447c241f9cc", "size": "435", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "main.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "33991" }, { "name": "CMake", "bytes": "1897" }, { "name": "HTML", "bytes": "2235" }, { "name": "JavaScript", "bytes": "4546" } ], "symlink_target": "" }
const electron = require('electron') const path = require('path') const url = require('url') const autoUpdater = require('./auto-updater') if (require('electron-squirrel-startup')) electron.app.quit() const app = electron.app const BrowserWindow = electron.BrowserWindow let mainWindow function createWindow () { mainWindow = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true } }) mainWindow.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true })) if (process.env.NODE_ENV === 'development') { mainWindow.webContents.openDevTools() } mainWindow.on('closed', () => { mainWindow = null }) mainWindow.webContents.on('did-finish-load', () => { autoUpdater.init(mainWindow) }) } app.on('ready', createWindow) app.on('window-all-closed', () => { if (process.platform !== 'darwin') { app.quit() } }) app.on('activate', () => { if (mainWindow === null) { createWindow() } })
{ "content_hash": "97809a43d41bb5d049ae4c6716ae5d3a", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 61, "avg_line_length": 20.03846153846154, "alnum_prop": 0.6372360844529751, "repo_name": "matiastucci/electron-auto-updater-example", "id": "dd58d24fdf1b77302266639efa029e54ce74ed7f", "size": "1042", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "636" }, { "name": "HTML", "bytes": "568" }, { "name": "JavaScript", "bytes": "3783" } ], "symlink_target": "" }
using System.Resources; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("$safeprojectname$")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("$safeprojectname$")] [assembly: AssemblyCopyright("Copyright © 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] [assembly: NeutralResourcesLanguage("en")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "60a787eb9ca55e97c138ccbe7e6bb433", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 84, "avg_line_length": 36.3, "alnum_prop": 0.7428833792470156, "repo_name": "bryanbcook/xf.cm.starterkit", "id": "0d46680633d7d54535013ff58d29756b274bac00", "size": "1092", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "VSIX/MultiProjectTemplate/XF/Properties/AssemblyInfo.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "53913" } ], "symlink_target": "" }
layout: news_item title: Example of call another program in expect author: XiaohuiJiang version: 0.1 categories: [expect] --- {% highlight tcl %} {% include code/expect/call_another.exp %} {% endhighlight %}
{ "content_hash": "bf6bea0e064f63122a3a906dcef16001", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 48, "avg_line_length": 20.9, "alnum_prop": 0.7272727272727273, "repo_name": "XiaohuiJiang/XiaohuiJiang.github.io", "id": "9ba002b1f02b39a35593186cc7157e0eeab88f26", "size": "213", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2014-12-14-call-another-program-by-expect.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "47338" }, { "name": "HTML", "bytes": "13212" }, { "name": "JavaScript", "bytes": "503" }, { "name": "Shell", "bytes": "101" } ], "symlink_target": "" }
package me.realmoriss.prog3.nagyhf.entities; import me.realmoriss.prog3.nagyhf.entities.primitives.Vec2D; /** * Created on 11/30/16. */ public class CyanBrick extends Brick { private static final String DEF_CLASSNAME = "prop_brick_cyan"; public CyanBrick(Vec2D pos, String name) { super(pos, name, "cyan"); classname = DEF_CLASSNAME; } }
{ "content_hash": "02cedc4da008191022c5e4aaf69f8d6b", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 63, "avg_line_length": 24.928571428571427, "alnum_prop": 0.7335243553008596, "repo_name": "realmoriss/prog3", "id": "3b788531801e43527f3bd955844107a67fce599a", "size": "349", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nagyhf/src/me/realmoriss/prog3/nagyhf/entities/CyanBrick.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "32644" } ], "symlink_target": "" }
Entries ============ ### Version 0.9.0 - 20121210 #### By Justin Kimbrell / Objective HTML ### *Public Beta* This software is in public beta. It reuses the native exp:channel:entries loop, so it's already really stable. Still, don't use it on any projects that have looming deadlines and expect me to immediately fix things for you if they break. Otherwise, I need all hands on deck for testing and giving me feedback. ### Overview Entries brings a collection of tags that make working with the Channel Entries much more advanced. Entries is designed to work in conjunction with the regular channl entries tag, replace it entirely. ### The Design Entries was designed to have a modular and expressive syntax. The idea is to add new methods over time, and have a robust library that is easy to remember which completely reuses the exp:channel:entries tag. ##### So why is the name *Entries*, which is not to be confused with *Channel Entries*? It just seemed to fit. The idea was to create a tag with really short and memorable syntax. A tag without underscores in the second segment, and *Entries* is short for *Channel Entries*, which is the engine that powers everything. ## assigned_to_member() This method allows you store a delimited string of member_id's in a channel field that can be used to grab "assigned" entries. An assigned entry conists of any entry authored by the given user, or any entry in which the defined field contains a member_id. *The string delimeter must be a '|' character.* ### PARAMETERS from_field : This is the channel field that stores the delimeted member_id's. If the defined member_id is stored in this field, the entry is returned in the results. from_channel : This is the channel that can be defined that contains the field that stores the delimeter member_id's. If no channel is defined, all channels are included in the search. from_author_id : The default value is the member that is logged in, but use this parameter if you wish to override. *All parameters from the exp:channel:entries are accepted.* ### Related Tags - assigned_to_me(); - ids_assigned_to_member(); - ids_assigned_to_me(); ## assigned_to_me() This method returns the same data as `assigned_to_member()`, but rather than specifying an author_id, it only grabs the entries associated with the member that is currectly logged in. *The string delimeter must be a '|' character.* ### PARAMETERS from_field : This is the channel field that stores the delimeted member_id's. If the defined member_id is stored in this field, the entry is returned in the results. from_channel : This is the channel that can be defined that contains the field that stores the delimeter member_id's. If no channel is defined, all channels are included in the search. *All parameters from the exp:channel:entries are accepted.* ### Related Tags - assigned_to_member(); - ids_assigned_to_member(); - ids_assigned_to_me(); ## ids_assigned_to_member() This method runs the same query as `assigned_to_member()` but rather than parsing tagdata, it returned a delimited string of entry_id's. The string will be delimited with a '|' character. ### PARAMETERS from_field : This is the channel field that stores the delimeted member_id's. If the defined member_id is stored in this field, the entry is returned in the results. from_channel : This is the channel that can be defined that contains the field that stores the delimeter member_id's. If no channel is defined, all channels are included in the search. from_author_id : The default value is the member that is logged in, but use this parameter if you wish to override. ### Related Tags - ids_assigned_to_me(); - assigned_to_member - assigned_to_me(); ## ids_assigned_to_me() This method runs the same query as `assigned_to_me()` but rather than parsing tagdata, it returned a delimited string of entry_id's. The string will be delimited with a '|' character. ### PARAMETERS from_field : This is the channel field that stores the delimeted member_id's. If the defined member_id is stored in this field, the entry is returned in the results. from_channel : This is the channel that can be defined that contains the field that stores the delimeter member_id's. If no channel is defined, all channels are included in the search. *All parameters from the exp:channel:entries are accepted.* ### Related Tags - ids_assigned_to_member(); - assigned_to_member - assigned_to_me(); ## get() This method is simply an alias to exp:channel:entries. This big difference here is you can actually nest your tags with unique prefixes so everything parses correctly. ### PARAMETERS *All parameters from the exp:channel:entries are accepted.* ### EXAMPLE &#123;exp:entries:get channel="channel_1" } {entry_id} {title} {some_custom_field} &#123;exp:entries:get channel="channel_2" prefix="2:" entry_id="{some_custom_field}" } {2:entry_id} {2:title} {2:some_custom_field} &#123;exp:entries:get channel="channel_3" prefix="3:" entry_id="{2:some_custom_field}" } {3:entry_id} {3:title} {3:some_custom_field} {/exp:entries:get} {/exp:entries:get} {/exp:entries:get} *Numbered prefixes are used to represent nested depth, you can use whatever you prefer. It's also importan to be mindful of performance. Test your code thoroughly when using nested tags.* ## by_category() This method makes working with categories easier. You can grab entries associated to category id, category name, category url title, and parent category id. ### PARAMETERS category_id : Specify one or category id's using a '|' delimiter. category_name : Specify one or category name's using a '|' delimiter. category_url_title : Specify one or category url title's using a '|' delimiter. category_parent_id : Specify one or category parent id's using a '|' delimiter. *All parameters from the exp:channel:entries are accepted.* ## profile() This method assumes a member profile is stored as a channel entry, and by default it assumes that channel name is "members". This tag really isn't much different than the exp:entries:get tag other than it makes pulling profile information more convenient. This method defaults to the member that is currently logged in. ### PARAMETERS *All parameters from the exp:channel:entries are accepted.* ## my_category_entries() This method uses the exp:entries:profile tag as a base, but rather than return the profile data, it returned all the entries with the same categories assigned to the profile entry. This is a way to easily use categories to related things across multiple channels. ### PARAMETERS *All parameters from the exp:channel:entries are accepted.* ## my_category_ids() This method uses the exp:entries:profile tag as a base, but rather than parse tagdata, it returns associated category id's to the returned profile entry. ### PARAMETERS *All parameters from the exp:channel:entries are accepted.* ## related() This returned the related entries to any specified entry_id. ### PARAMETERS rel_entry_id : This is the entry from which you wish to use to grab other related entries. So if you specific rel_entry_id 1, it will grab all the entries related to entry_id 1. *All parameters from the exp:channel:entries are accepted.* ## reverse_related() This method is the opposite of exp:entries:related. This returned all the reverse_relationships from any given entry_id. ### PARAMETERS *All parameters from the exp:channel:entries are accepted.*
{ "content_hash": "783f3ef81fdcb0fb0f3c1e1f9f9e22ad", "timestamp": "", "source": "github", "line_count": 235, "max_line_length": 319, "avg_line_length": 32.361702127659576, "alnum_prop": 0.7430637738330046, "repo_name": "objectivehtml/Entries", "id": "a15a487f7fb89989380d740a0a09d4261e028919", "size": "7605", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "PHP", "bytes": "108479" } ], "symlink_target": "" }
'use strict'; var fs = require('fs'); var path = require('path'); var https = require('https'); var mime = require('mime'); module.exports = function (port, callback) { var server = https.createServer({ key: fs.readFileSync(path.join(__dirname, '..', 'fixture', 'key.pem')), cert: fs.readFileSync(path.join(__dirname, '..', 'fixture', 'cert.pem')), }, function (req, res) { if (req.url === '/') { res.writeHead(200); res.end('<!DOCTYPE html>\n<html><body></body></html>'); } else { var file = req.url.substring(1); fs.stat(file, function (err) { if (err) { res.writeHead(404); res.end(); return; } res.writeHead(200, { 'Content-Type': mime.getType(file) }); fs.createReadStream(file).pipe(res); }); } }); function onError(errListener) { server.on('error', errListener); } function close() { server.close(); } server.on('error', function (err) { if (callback.called) { return; } callback(err, null, onError, close); callback.called = true; }); server.listen(port, function (err) { if (callback.called) { return; } callback(err, server.address().port, onError, close); callback.called = true; }); };
{ "content_hash": "cea93bce13c27f53bbbe62f967191002", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 77, "avg_line_length": 24.203703703703702, "alnum_prop": 0.5531752104055088, "repo_name": "mantoni/mochify.js", "id": "d9851eb55e76c90de527a35ff6fdda0b8ce998a3", "size": "1307", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/server.js", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "88" }, { "name": "HTML", "bytes": "102" }, { "name": "JavaScript", "bytes": "78608" }, { "name": "TypeScript", "bytes": "42" } ], "symlink_target": "" }
package com.amazonaws.services.kinesisanalyticsv2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.kinesisanalyticsv2.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * InputProcessingConfigurationDescriptionMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class InputProcessingConfigurationDescriptionMarshaller { private static final MarshallingInfo<StructuredPojo> INPUTLAMBDAPROCESSORDESCRIPTION_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("InputLambdaProcessorDescription").build(); private static final InputProcessingConfigurationDescriptionMarshaller instance = new InputProcessingConfigurationDescriptionMarshaller(); public static InputProcessingConfigurationDescriptionMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(InputProcessingConfigurationDescription inputProcessingConfigurationDescription, ProtocolMarshaller protocolMarshaller) { if (inputProcessingConfigurationDescription == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(inputProcessingConfigurationDescription.getInputLambdaProcessorDescription(), INPUTLAMBDAPROCESSORDESCRIPTION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
{ "content_hash": "42355bebdd214396dbed2441fffba539", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 159, "avg_line_length": 38.95454545454545, "alnum_prop": 0.779463243873979, "repo_name": "aws/aws-sdk-java", "id": "d8bfb356ca979619ad11dba06d7b9f813bd26f98", "size": "2294", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-kinesisanalyticsv2/src/main/java/com/amazonaws/services/kinesisanalyticsv2/model/transform/InputProcessingConfigurationDescriptionMarshaller.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
using extensions::AppWindow; using extensions::AppWindowRegistry; namespace { std::string GetAppShelfId(AppWindow* app_window) { if (app_window->window_type_is_panel()) return base::StringPrintf("panel:%d", app_window->session_id().id()); return app_window->extension_id(); } } // namespace AppWindowLauncherController::AppWindowLauncherController( ChromeLauncherController* owner) : owner_(owner), activation_client_(NULL) { AppWindowRegistry* registry = AppWindowRegistry::Get(owner->profile()); registry_.insert(registry); registry->AddObserver(this); if (ash::Shell::HasInstance()) { if (ash::Shell::GetInstance()->GetPrimaryRootWindow()) { activation_client_ = aura::client::GetActivationClient( ash::Shell::GetInstance()->GetPrimaryRootWindow()); if (activation_client_) activation_client_->AddObserver(this); } } } AppWindowLauncherController::~AppWindowLauncherController() { for (std::set<AppWindowRegistry*>::iterator it = registry_.begin(); it != registry_.end(); ++it) (*it)->RemoveObserver(this); if (activation_client_) activation_client_->RemoveObserver(this); for (WindowToAppShelfIdMap::iterator iter = window_to_app_shelf_id_map_.begin(); iter != window_to_app_shelf_id_map_.end(); ++iter) { iter->first->RemoveObserver(this); } } void AppWindowLauncherController::AdditionalUserAddedToSession( Profile* profile) { // TODO(skuhne): This was added for the legacy side by side mode in M32. If // this mode gets no longer pursued this special case can be removed. if (chrome::MultiUserWindowManager::GetMultiProfileMode() != chrome::MultiUserWindowManager::MULTI_PROFILE_MODE_MIXED) return; AppWindowRegistry* registry = AppWindowRegistry::Get(profile); if (registry_.find(registry) != registry_.end()) return; registry->AddObserver(this); registry_.insert(registry); } void AppWindowLauncherController::OnAppWindowIconChanged( AppWindow* app_window) { const std::string app_shelf_id = GetAppShelfId(app_window); AppControllerMap::iterator iter = app_controller_map_.find(app_shelf_id); if (iter == app_controller_map_.end()) return; AppWindowLauncherItemController* controller = iter->second; controller->set_image_set_by_controller(true); owner_->SetLauncherItemImage(controller->shelf_id(), app_window->app_icon().AsImageSkia()); } void AppWindowLauncherController::OnAppWindowShown(AppWindow* app_window, bool was_hidden) { aura::Window* window = app_window->GetNativeWindow(); if (!IsRegisteredApp(window)) RegisterApp(app_window); } void AppWindowLauncherController::OnAppWindowHidden(AppWindow* app_window) { aura::Window* window = app_window->GetNativeWindow(); if (IsRegisteredApp(window)) UnregisterApp(window); } // Called from aura::Window::~Window(), before delegate_->OnWindowDestroyed() // which destroys AppWindow, so both |window| and the associated AppWindow // are valid here. void AppWindowLauncherController::OnWindowDestroying(aura::Window* window) { UnregisterApp(window); } void AppWindowLauncherController::OnWindowActivated( aura::client::ActivationChangeObserver::ActivationReason reason, aura::Window* new_active, aura::Window* old_active) { // Make the newly active window the active (first) entry in the controller. AppWindowLauncherItemController* new_controller = ControllerForWindow(new_active); if (new_controller) { new_controller->SetActiveWindow(new_active); owner_->SetItemStatus(new_controller->shelf_id(), ash::STATUS_ACTIVE); } // Mark the old active window's launcher item as running (if different). AppWindowLauncherItemController* old_controller = ControllerForWindow(old_active); if (old_controller && old_controller != new_controller) owner_->SetItemStatus(old_controller->shelf_id(), ash::STATUS_RUNNING); } void AppWindowLauncherController::RegisterApp(AppWindow* app_window) { // Windows created by IME extension should be treated the same way as the // virtual keyboard window, which does not register itself in launcher. if (app_window->is_ime_window()) return; aura::Window* window = app_window->GetNativeWindow(); // Get the app's shelf identifier and add an entry to the map. DCHECK(window_to_app_shelf_id_map_.find(window) == window_to_app_shelf_id_map_.end()); const std::string app_shelf_id = GetAppShelfId(app_window); window_to_app_shelf_id_map_[window] = app_shelf_id; window->AddObserver(this); // Find or create an item controller and launcher item. std::string app_id = app_window->extension_id(); ash::ShelfItemStatus status = ash::wm::IsActiveWindow(window) ? ash::STATUS_ACTIVE : ash::STATUS_RUNNING; AppControllerMap::iterator iter = app_controller_map_.find(app_shelf_id); ash::ShelfID shelf_id = 0; if (iter != app_controller_map_.end()) { AppWindowLauncherItemController* controller = iter->second; DCHECK(controller->app_id() == app_id); shelf_id = controller->shelf_id(); controller->AddAppWindow(app_window, status); } else { LauncherItemController::Type type = app_window->window_type_is_panel() ? LauncherItemController::TYPE_APP_PANEL : LauncherItemController::TYPE_APP; AppWindowLauncherItemController* controller = new AppWindowLauncherItemController(type, app_shelf_id, app_id, owner_); controller->AddAppWindow(app_window, status); // If the app shelf id is not unique, and there is already a shelf // item for this app id (e.g. pinned), use that shelf item. if (app_shelf_id == app_id) shelf_id = owner_->GetShelfIDForAppID(app_id); if (shelf_id == 0) { shelf_id = owner_->CreateAppLauncherItem(controller, app_id, status); // Restore any existing app icon and flag as set. const gfx::Image& app_icon = app_window->app_icon(); if (!app_icon.IsEmpty()) { owner_->SetLauncherItemImage(shelf_id, app_icon.AsImageSkia()); controller->set_image_set_by_controller(true); } } else { owner_->SetItemController(shelf_id, controller); } const std::string app_shelf_id = GetAppShelfId(app_window); app_controller_map_[app_shelf_id] = controller; } owner_->SetItemStatus(shelf_id, status); ash::SetShelfIDForWindow(shelf_id, window); } void AppWindowLauncherController::UnregisterApp(aura::Window* window) { WindowToAppShelfIdMap::iterator iter1 = window_to_app_shelf_id_map_.find(window); DCHECK(iter1 != window_to_app_shelf_id_map_.end()); std::string app_shelf_id = iter1->second; window_to_app_shelf_id_map_.erase(iter1); window->RemoveObserver(this); AppControllerMap::iterator iter2 = app_controller_map_.find(app_shelf_id); DCHECK(iter2 != app_controller_map_.end()); AppWindowLauncherItemController* controller = iter2->second; controller->RemoveAppWindowForWindow(window); if (controller->app_window_count() == 0) { // If this is the last window associated with the app shelf id, close the // shelf item. ash::ShelfID shelf_id = controller->shelf_id(); owner_->CloseLauncherItem(shelf_id); app_controller_map_.erase(iter2); } } bool AppWindowLauncherController::IsRegisteredApp(aura::Window* window) { return window_to_app_shelf_id_map_.find(window) != window_to_app_shelf_id_map_.end(); } // Private Methods AppWindowLauncherItemController* AppWindowLauncherController::ControllerForWindow(aura::Window* window) { WindowToAppShelfIdMap::iterator iter1 = window_to_app_shelf_id_map_.find(window); if (iter1 == window_to_app_shelf_id_map_.end()) return NULL; std::string app_shelf_id = iter1->second; AppControllerMap::iterator iter2 = app_controller_map_.find(app_shelf_id); if (iter2 == app_controller_map_.end()) return NULL; return iter2->second; }
{ "content_hash": "8799d31c4a2cf6c2509474e988483cfa", "timestamp": "", "source": "github", "line_count": 208, "max_line_length": 80, "avg_line_length": 38.74038461538461, "alnum_prop": 0.6959543310995284, "repo_name": "highweb-project/highweb-webcl-html5spec", "id": "4e46569bc33dfccd2ac56a7ba41eeb480e7c2a19", "size": "8925", "binary": false, "copies": "1", "ref": "refs/heads/highweb-20160310", "path": "chrome/browser/ui/ash/launcher/app_window_launcher_controller.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // This program uses code hyperlinks available as part of the HyperAddin Visual Studio plug-in. // It is available from http://www.codeplex.com/hyperAddin #if !PLATFORM_UNIX #define FEATURE_MANAGED_ETW #if !ES_BUILD_STANDALONE && !CORECLR && !PROJECTN #define FEATURE_ACTIVITYSAMPLING #endif // !ES_BUILD_STANDALONE #endif // !PLATFORM_UNIX #if ES_BUILD_STANDALONE #define FEATURE_MANAGED_ETW_CHANNELS // #define FEATURE_ADVANCED_MANAGED_ETW_CHANNELS #endif /* DESIGN NOTES DESIGN NOTES DESIGN NOTES DESIGN NOTES */ // DESIGN NOTES // Over the years EventSource has become more complex and so it is important to understand // the basic structure of the code to insure that it does not grow more complex. // // Basic Model // // PRINCIPLE: EventSource - ETW decoupling // // Conceptually and EventSouce is something takes event logging data from the source methods // To the EventListener that can subscribe them. Note that CONCEPTUALLY EVENTSOURCES DON'T // KNOW ABOUT ETW!. The MODEL of the system is that there is a special EventListern Which // we will call the EtwEventListener, that forwards commands from ETW to EventSources and // listeners to the EventSources and forwards on those events to ETW. THus the model should // be that you DON'T NEED ETW. // // Now in actual practice, EventSouce have rather intimate knowledge of ETW and send events // to it directly, but this can be VIEWED AS AN OPTIMIATION. // // Basic Event Data Flow: // // There are two ways for event Data to enter the system // 1) WriteEvent* and friends. This is called the 'contract' based approach because // you write a method per event which forms a contract that is know at compile time. // In this scheme each event is given an EVENTID (small integer). which is its identity // 2) Write<T> methods. This is called the 'dynamic' approach because new events // can be created on the fly. Event identity is determined by the event NAME, and these // are not quite as efficient at runtime since you have at least a hash table lookup // on every event write. // // EventSource-EventListener transfer fully support both ways of writing events (either contract // based (WriteEvent*) or dynamic (Write<T>). Both way fully support the same set of data // types. It is suggested, however, that you use the contract based approach when the event scheme // is known at compile time (that is whenever possible). It is more efficient, but more importantly // it makes the contract very explicit, and centralizes all policy about logging. These are good // things. The Write<T> API is really meant for more ad-hoc // // Allowed Data. // // Note that EventSource-EventListeners have a conceptual serialization-deserialization that happens // during the transfer. In particular object identity is not preserved, some objects are morphed, // and not all data types are supported. In particular you can pass // // A Valid type to log to an EventSource include // * Primitive data types // * IEnumerable<T> of valid types T (this include arrays) (* New for V4.6) // * Explicitly Opted in class or struct with public property Getters over Valid types. (* New for V4.6) // // This set of types is roughly a generalization of JSON support (Basically primitives, bags, and arrays). // // Explicitly allowed structs include (* New for V4.6) // * Marked with the EventData attribute // * implicitly defined (e.g the C# new {x = 3, y = 5} syntax) // * KeyValuePair<K,V> (thus dictionaries can be passed since they are an IEnumerable of KeyValuePair) // // When classes are returned in an EventListener, what is returned is something that implements // IDictionary<string, T>. Thus when objects are passed to an EventSource they are transformed // into a key-value bag (the IDictionary<string, T>) for consumption in the listener. These // are obvious NOT the original objects. // // ETWserialization formats: // // As mentioned conceptually EventSource's send data to EventListeners and there is a conceptual // copy/morph of that data as described above. In addition the .NET framework supports a conceptual // ETWListener that will send the data to then ETW stream. If you use this feature, the data needs // to be serialized in a way that ETW supports. ETW supports the following serialization formats // // 1) Manifest Based serialization. // 2) SelfDescribing serialization (TraceLogging style in the TraceLogging directory) // // A key factor is that the Write<T> method, which support on the fly definition of events, can't // support the manifest based serialization because the manifest needs the schema of all events // to be known before any events are emitted. This implies the following // // If you use Write<T> and the output goes to ETW it will use the SelfDescribing format. // If you use the EventSource(string) constructor for an eventSource (in which you don't // create a subclass), the default is also to use Self-Describing serialization. In addition // you can use the EventSoruce(EventSourceSettings) constructor to also explicitly specify // Self-Describing serialization format. These effect the WriteEvent* APIs going to ETW. // // Note that none of this ETW serialization logic affects EventListeners. Only the ETW listener. // // ************************************************************************************* // *** INTERNALS: Event Propagation // // Data enters the system either though // // 1) A user defined method in the user defined subclass of EventSource which calls // A) A typesafe type specific overload of WriteEvent(ID, ...) e.g. WriteEvent(ID, string, string) // * which calls into the unsafe WriteEventCore(ID COUNT EventData*) WriteEventWithRelatedActivityIdCore() // B) The typesafe overload WriteEvent(ID, object[]) which calls the private helper WriteEventVarargs(ID, Guid* object[]) // C) Directly into the unsafe WriteEventCore(ID, COUNT EventData*) or WriteEventWithRelatedActivityIdCore() // // All event data eventually flows to one of // * WriteEventWithRelatedActivityIdCore(ID, Guid*, COUNT, EventData*) // * WriteEventVarargs(ID, Guid*, object[]) // // 2) A call to one of the overloads of Write<T>. All these overloads end up in // * WriteImpl<T>(EventName, Options, Data, Guid*, Guid*) // // On output there are the following routines // Writing to all listeners that are NOT ETW, we have the following routines // * WriteToAllListeners(ID, Guid*, COUNT, EventData*) // * WriteToAllListeners(ID, Guid*, object[]) // * WriteToAllListeners(NAME, Guid*, EventPayload) // // EventPayload is the internal type that implements the IDictionary<string, object> interface // The EventListeners will pass back for serialized classes for nested object, but // WriteToAllListeners(NAME, Guid*, EventPayload) unpacks this uses the fields as if they // were parameters to a method. // // The first two are used for the WriteEvent* case, and the later is used for the Write<T> case. // // Writing to ETW, Manifest Based // EventProvider.WriteEvent(EventDescriptor, Guid*, COUNT, EventData*) // EventProvider.WriteEvent(EventDescriptor, Guid*, object[]) // Writing to ETW, Self-Describing format // WriteMultiMerge(NAME, Options, Types, EventData*) // WriteMultiMerge(NAME, Options, Types, object[]) // WriteImpl<T> has logic that knows how to serialize (like WriteMultiMerge) but also knows // will write it to // // All ETW writes eventually call // EventWriteTransfer (native PINVOKE wrapper) // EventWriteTransferWrapper (fixes compat problem if you pass null as the related activityID) // EventProvider.WriteEventRaw - sets last error // EventSource.WriteEventRaw - Does EventSource exception handling logic // WriteMultiMerge // WriteImpl<T> // EventProvider.WriteEvent(EventDescriptor, Guid*, COUNT, EventData*) // EventProvider.WriteEvent(EventDescriptor, Guid*, object[]) // // Serialization: We have a bit of a hodge-podge of serializers right now. Only the one for ETW knows // how to deal with nested classes or arrays. I will call this serializer the 'TypeInfo' serializer // since it is the TraceLoggingTypeInfo structure that knows how to do this. Effectively for a type you // can call one of these // WriteMetadata - transforms the type T into serialization meta data blob for that type // WriteObjectData - transforms an object of T into serialization meta data blob for that type // GetData - transforms an object of T into its deserialized form suitable for passing to EventListener. // The first two are used to serialize something for ETW. The second one is used to transform the object // for use by the EventListener. We also have a 'DecodeObject' method that will take a EventData* and // deserialize to pass to an EventListener, but it only works on primitive types (types supported in version V4.5). // // It is an important observation that while EventSource does support users directly calling with EventData* // blobs, we ONLY support that for the primitive types (V4.5 level support). Thus while there is a EventData* // path through the system it is only for some types. The object[] path is the more general (but less efficient) path. // // TODO There is cleanup needed There should be no divergence until WriteEventRaw. // // TODO: We should have a single choke point (right now we always have this parallel EventData* and object[] path. This // was historical (at one point we tried to pass object directly from EventSoruce to EventListener. That was always // fragile and a compatibility headache, but we have finally been forced into the idea that there is always a transformation. // This allows us to use the EventData* form to be the canonical data format in the low level APIs. This also gives us the // opportunity to expose this format to EventListeners in the future. // using System; using System.Runtime.CompilerServices; #if FEATURE_ACTIVITYSAMPLING using System.Collections.Concurrent; #endif using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Reflection; using System.Resources; using System.Security; using System.Security.Permissions; using System.Text; using System.Threading; using Microsoft.Win32; #if ES_BUILD_STANDALONE using Environment = Microsoft.Diagnostics.Tracing.Internal.Environment; using EventDescriptor = Microsoft.Diagnostics.Tracing.EventDescriptor; #else using System.Threading.Tasks; using EventDescriptor = System.Diagnostics.Tracing.EventDescriptor; #endif using Microsoft.Reflection; #if !ES_BUILD_AGAINST_DOTNET_V35 using Contract = System.Diagnostics.Contracts.Contract; #else using Contract = Microsoft.Diagnostics.Contracts.Internal.Contract; #endif #if ES_BUILD_STANDALONE namespace Microsoft.Diagnostics.Tracing #else namespace System.Diagnostics.Tracing #endif { /// <summary> /// This class is meant to be inherited by a user-defined event source in order to define a managed /// ETW provider. Please See DESIGN NOTES above for the internal architecture. /// The minimal definition of an EventSource simply specifies a number of ETW event methods that /// call one of the EventSource.WriteEvent overloads, <see cref="EventSource.WriteEventCore"/>, /// or <see cref="EventSource.WriteEventWithRelatedActivityIdCore"/> to log them. This functionality /// is sufficient for many users. /// <para> /// To achieve more control over the ETW provider manifest exposed by the event source type, the /// [<see cref="EventAttribute"/>] attributes can be specified for the ETW event methods. /// </para><para> /// For very advanced EventSources, it is possible to intercept the commands being given to the /// eventSource and change what filtering is done (see EventListener.EnableEvents and /// <see cref="EventListener.DisableEvents"/>) or cause actions to be performed by the eventSource, /// e.g. dumping a data structure (see EventSource.SendCommand and /// <see cref="EventSource.OnEventCommand"/>). /// </para><para> /// The eventSources can be turned on with Windows ETW controllers (e.g. logman), immediately. /// It is also possible to control and intercept the data dispatcher programmatically. See /// <see cref="EventListener"/> for more. /// </para> /// </summary> /// <remarks> /// This is a minimal definition for a custom event source: /// <code> /// [EventSource(Name="Samples-Demos-Minimal")] /// sealed class MinimalEventSource : EventSource /// { /// public static MinimalEventSource Log = new MinimalEventSource(); /// public void Load(long ImageBase, string Name) { WriteEvent(1, ImageBase, Name); } /// public void Unload(long ImageBase) { WriteEvent(2, ImageBase); } /// private MinimalEventSource() {} /// } /// </code> /// </remarks> public partial class EventSource : IDisposable { #if FEATURE_EVENTSOURCE_XPLAT private static readonly EventListener persistent_Xplat_Listener = XplatEventLogger.InitializePersistentListener(); #endif //FEATURE_EVENTSOURCE_XPLAT /// <summary> /// The human-friendly name of the eventSource. It defaults to the simple name of the class /// </summary> public string Name { get { return m_name; } } /// <summary> /// Every eventSource is assigned a GUID to uniquely identify it to the system. /// </summary> public Guid Guid { get { return m_guid; } } /// <summary> /// Returns true if the eventSource has been enabled at all. This is the prefered test /// to be performed before a relatively expensive EventSource operation. /// </summary> [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] public bool IsEnabled() { return m_eventSourceEnabled; } /// <summary> /// Returns true if events with greater than or equal 'level' and have one of 'keywords' set are enabled. /// /// Note that the result of this function is only an approximation on whether a particular /// event is active or not. It is only meant to be used as way of avoiding expensive /// computation for logging when logging is not on, therefore it sometimes returns false /// positives (but is always accurate when returning false). EventSources are free to /// have additional filtering. /// </summary> [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] public bool IsEnabled(EventLevel level, EventKeywords keywords) { return IsEnabled(level, keywords, EventChannel.None); } /// <summary> /// Returns true if events with greater than or equal 'level' and have one of 'keywords' set are enabled, or /// if 'keywords' specifies a channel bit for a channel that is enabled. /// /// Note that the result of this function only an approximation on whether a particular /// event is active or not. It is only meant to be used as way of avoiding expensive /// computation for logging when logging is not on, therefore it sometimes returns false /// positives (but is always accurate when returning false). EventSources are free to /// have additional filtering. /// </summary> [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] public bool IsEnabled(EventLevel level, EventKeywords keywords, EventChannel channel) { if (!m_eventSourceEnabled) return false; if (!IsEnabledCommon(m_eventSourceEnabled, m_level, m_matchAnyKeyword, level, keywords, channel)) return false; #if !FEATURE_ACTIVITYSAMPLING return true; #else // FEATURE_ACTIVITYSAMPLING return true; #if OPTIMIZE_IS_ENABLED //================================================================================ // 2013/03/06 - The code below is a possible optimization for IsEnabled(level, kwd) // in case activity tracing/sampling is enabled. The added complexity of this // code however weighs against having it "on" until we know it's really needed. // For now we'll have this #ifdef-ed out in case we see evidence this is needed. //================================================================================ // At this point we believe the event is enabled, however we now need to check // if we filter because of activity // Optimization, all activity filters also register a delegate here, so if there // is no delegate, we know there are no activity filters, which means that there // is no additional filtering, which means that we can return true immediately. if (s_activityDying == null) return true; // if there's at least one legacy ETW listener we can't filter this if (m_legacySessions != null && m_legacySessions.Count > 0) return true; // if any event ID that triggers a new activity, or "transfers" activities // is covered by 'keywords' we can't filter this if (unchecked(((long)keywords & m_keywordTriggers)) != 0) return true; // See if all listeners have activity filters that would block the event. for (int perEventSourceSessionId = 0; perEventSourceSessionId < SessionMask.MAX; ++perEventSourceSessionId) { EtwSession etwSession = m_etwSessionIdMap[perEventSourceSessionId]; if (etwSession == null) continue; ActivityFilter activityFilter = etwSession.m_activityFilter; if (activityFilter == null || ActivityFilter.GetFilter(activityFilter, this) == null) { // No activity filter for ETW, if event is active for ETW, we can't filter. for (int i = 0; i < m_eventData.Length; i++) if (m_eventData[i].EnabledForETW) return true; } else if (ActivityFilter.IsCurrentActivityActive(activityFilter)) return true; } // for regular event listeners var curDispatcher = m_Dispatchers; while (curDispatcher != null) { ActivityFilter activityFilter = curDispatcher.m_Listener.m_activityFilter; if (activityFilter == null) { // See if any event is enabled. for (int i = 0; i < curDispatcher.m_EventEnabled.Length; i++) if (curDispatcher.m_EventEnabled[i]) return true; } else if (ActivityFilter.IsCurrentActivityActive(activityFilter)) return true; curDispatcher = curDispatcher.m_Next; } // Every listener has an activity filter that is blocking writing the event, // thus the event is not enabled. return false; #endif // OPTIMIZE_IS_ENABLED #endif // FEATURE_ACTIVITYSAMPLING } /// <summary> /// Returns the settings for the event source instance /// </summary> public EventSourceSettings Settings { get { return m_config; } } // Manifest support /// <summary> /// Returns the GUID that uniquely identifies the eventSource defined by 'eventSourceType'. /// This API allows you to compute this without actually creating an instance of the EventSource. /// It only needs to reflect over the type. /// </summary> public static Guid GetGuid(Type eventSourceType) { if (eventSourceType == null) throw new ArgumentNullException("eventSourceType"); Contract.EndContractBlock(); EventSourceAttribute attrib = (EventSourceAttribute)GetCustomAttributeHelper(eventSourceType, typeof(EventSourceAttribute)); string name = eventSourceType.Name; if (attrib != null) { if (attrib.Guid != null) { Guid g = Guid.Empty; #if !ES_BUILD_AGAINST_DOTNET_V35 if (Guid.TryParse(attrib.Guid, out g)) return g; #else try { return new Guid(attrib.Guid); } catch (Exception) { } #endif } if (attrib.Name != null) name = attrib.Name; } if (name == null) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidTypeName"), "eventSourceType"); return GenerateGuidFromName(name.ToUpperInvariant()); // Make it case insensitive. } /// <summary> /// Returns the official ETW Provider name for the eventSource defined by 'eventSourceType'. /// This API allows you to compute this without actually creating an instance of the EventSource. /// It only needs to reflect over the type. /// </summary> public static string GetName(Type eventSourceType) { return GetName(eventSourceType, EventManifestOptions.None); } /// <summary> /// Returns a string of the XML manifest associated with the eventSourceType. The scheme for this XML is /// documented at in EventManifest Schema http://msdn.microsoft.com/en-us/library/aa384043(VS.85).aspx. /// This is the preferred way of generating a manifest to be embedded in the ETW stream as it is fast and /// the fact that it only includes localized entries for the current UI culture is an acceptable tradeoff. /// </summary> /// <param name="eventSourceType">The type of the event source class for which the manifest is generated</param> /// <param name="assemblyPathToIncludeInManifest">The manifest XML fragment contains the string name of the DLL name in /// which it is embedded. This parameter specifies what name will be used</param> /// <returns>The XML data string</returns> public static string GenerateManifest(Type eventSourceType, string assemblyPathToIncludeInManifest) { return GenerateManifest(eventSourceType, assemblyPathToIncludeInManifest, EventManifestOptions.None); } /// <summary> /// Returns a string of the XML manifest associated with the eventSourceType. The scheme for this XML is /// documented at in EventManifest Schema http://msdn.microsoft.com/en-us/library/aa384043(VS.85).aspx. /// Pass EventManifestOptions.AllCultures when generating a manifest to be registered on the machine. This /// ensures that the entries in the event log will be "optimally" localized. /// </summary> /// <param name="eventSourceType">The type of the event source class for which the manifest is generated</param> /// <param name="assemblyPathToIncludeInManifest">The manifest XML fragment contains the string name of the DLL name in /// which it is embedded. This parameter specifies what name will be used</param> /// <param name="flags">The flags to customize manifest generation. If flags has bit OnlyIfNeededForRegistration specified /// this returns null when the eventSourceType does not require explicit registration</param> /// <returns>The XML data string or null</returns> public static string GenerateManifest(Type eventSourceType, string assemblyPathToIncludeInManifest, EventManifestOptions flags) { if (eventSourceType == null) throw new ArgumentNullException("eventSourceType"); Contract.EndContractBlock(); byte[] manifestBytes = EventSource.CreateManifestAndDescriptors(eventSourceType, assemblyPathToIncludeInManifest, null, flags); return (manifestBytes == null) ? null : Encoding.UTF8.GetString(manifestBytes, 0, manifestBytes.Length); } // EventListener support /// <summary> /// returns a list (IEnumerable) of all sources in the appdomain). EventListeners typically need this. /// </summary> /// <returns></returns> public static IEnumerable<EventSource> GetSources() { var ret = new List<EventSource>(); lock (EventListener.EventListenersLock) { foreach (WeakReference eventSourceRef in EventListener.s_EventSources) { EventSource eventSource = eventSourceRef.Target as EventSource; if (eventSource != null && !eventSource.IsDisposed) ret.Add(eventSource); } } return ret; } /// <summary> /// Send a command to a particular EventSource identified by 'eventSource'. /// Calling this routine simply forwards the command to the EventSource.OnEventCommand /// callback. What the EventSource does with the command and its arguments are from /// that point EventSource-specific. /// </summary> /// <param name="eventSource">The instance of EventSource to send the command to</param> /// <param name="command">A positive user-defined EventCommand, or EventCommand.SendManifest</param> /// <param name="commandArguments">A set of (name-argument, value-argument) pairs associated with the command</param> public static void SendCommand(EventSource eventSource, EventCommand command, IDictionary<string, string> commandArguments) { if (eventSource == null) throw new ArgumentNullException("eventSource"); // User-defined EventCommands should not conflict with the reserved commands. if ((int)command <= (int)EventCommand.Update && (int)command != (int)EventCommand.SendManifest) throw new ArgumentException(Environment.GetResourceString("EventSource_InvalidCommand"), "command"); eventSource.SendCommand(null, 0, 0, command, true, EventLevel.LogAlways, EventKeywords.None, commandArguments); } // ActivityID support (see also WriteEventWithRelatedActivityIdCore) /// <summary> /// When a thread starts work that is on behalf of 'something else' (typically another /// thread or network request) it should mark the thread as working on that other work. /// This API marks the current thread as working on activity 'activityID'. This API /// should be used when the caller knows the thread's current activity (the one being /// overwritten) has completed. Otherwise, callers should prefer the overload that /// return the oldActivityThatWillContinue (below). /// /// All events created with the EventSource on this thread are also tagged with the /// activity ID of the thread. /// /// It is common, and good practice after setting the thread to an activity to log an event /// with a 'start' opcode to indicate that precise time/thread where the new activity /// started. /// </summary> /// <param name="activityId">A Guid that represents the new activity with which to mark /// the current thread</param> [System.Security.SecuritySafeCritical] public static void SetCurrentThreadActivityId(Guid activityId) { #if FEATURE_MANAGED_ETW #if FEATURE_ACTIVITYSAMPLING Guid newId = activityId; #endif // FEATURE_ACTIVITYSAMPLING // We ignore errors to keep with the convention that EventSources do not throw errors. // Note we can't access m_throwOnWrites because this is a static method. if (UnsafeNativeMethods.ManifestEtw.EventActivityIdControl( UnsafeNativeMethods.ManifestEtw.ActivityControl.EVENT_ACTIVITY_CTRL_GET_SET_ID, ref activityId) == 0) { #if FEATURE_ACTIVITYSAMPLING var activityDying = s_activityDying; if (activityDying != null && newId != activityId) { if (activityId == Guid.Empty) { activityId = FallbackActivityId; } // OutputDebugString(string.Format("Activity dying: {0} -> {1}", activityId, newId)); activityDying(activityId); // This is actually the OLD activity ID. } #endif // FEATURE_ACTIVITYSAMPLING } #endif // FEATURE_MANAGED_ETW if (TplEtwProvider.Log != null) TplEtwProvider.Log.SetActivityId(activityId); } /// <summary> /// When a thread starts work that is on behalf of 'something else' (typically another /// thread or network request) it should mark the thread as working on that other work. /// This API marks the current thread as working on activity 'activityID'. It returns /// whatever activity the thread was previously marked with. There is a convention that /// callers can assume that callees restore this activity mark before the callee returns. /// To encourage this this API returns the old activity, so that it can be restored later. /// /// All events created with the EventSource on this thread are also tagged with the /// activity ID of the thread. /// /// It is common, and good practice after setting the thread to an activity to log an event /// with a 'start' opcode to indicate that precise time/thread where the new activity /// started. /// </summary> /// <param name="activityId">A Guid that represents the new activity with which to mark /// the current thread</param> /// <param name="oldActivityThatWillContinue">The Guid that represents the current activity /// which will continue at some point in the future, on the current thread</param> [System.Security.SecuritySafeCritical] public static void SetCurrentThreadActivityId(Guid activityId, out Guid oldActivityThatWillContinue) { oldActivityThatWillContinue = activityId; #if FEATURE_MANAGED_ETW // We ignore errors to keep with the convention that EventSources do not throw errors. // Note we can't access m_throwOnWrites because this is a static method. UnsafeNativeMethods.ManifestEtw.EventActivityIdControl( UnsafeNativeMethods.ManifestEtw.ActivityControl.EVENT_ACTIVITY_CTRL_GET_SET_ID, ref oldActivityThatWillContinue); #endif // FEATURE_MANAGED_ETW // We don't call the activityDying callback here because the caller has declared that // it is not dying. if (TplEtwProvider.Log != null) TplEtwProvider.Log.SetActivityId(activityId); } /// <summary> /// Retrieves the ETW activity ID associated with the current thread. /// </summary> public static Guid CurrentThreadActivityId { [System.Security.SecuritySafeCritical] get { // We ignore errors to keep with the convention that EventSources do not throw // errors. Note we can't access m_throwOnWrites because this is a static method. Guid retVal = new Guid(); #if FEATURE_MANAGED_ETW UnsafeNativeMethods.ManifestEtw.EventActivityIdControl( UnsafeNativeMethods.ManifestEtw.ActivityControl.EVENT_ACTIVITY_CTRL_GET_ID, ref retVal); #endif // FEATURE_MANAGED_ETW return retVal; } } #if !ES_BUILD_STANDALONE /// <summary> /// This property allows EventSource code to appropriately handle as "different" /// activities started on different threads that have not had an activity created on them. /// </summary> internal static Guid InternalCurrentThreadActivityId { [System.Security.SecurityCritical] get { Guid retval = CurrentThreadActivityId; if (retval == Guid.Empty) { retval = FallbackActivityId; } return retval; } } internal static Guid FallbackActivityId { [System.Security.SecurityCritical] get { #pragma warning disable 612, 618 // Managed thread IDs are more aggressively re-used than native thread IDs, // so we'll use the latter... return new Guid(unchecked((uint)AppDomain.GetCurrentThreadId()), unchecked((ushort)s_currentPid), unchecked((ushort)(s_currentPid >> 16)), 0x94, 0x1b, 0x87, 0xd5, 0xa6, 0x5c, 0x36, 0x64); #pragma warning restore 612, 618 } } #endif // !ES_BUILD_STANDALONE // Error APIs. (We don't throw by default, but you can probe for status) /// <summary> /// Because /// /// 1) Logging is often optional and thus should not generate fatal errors (exceptions) /// 2) EventSources are often initialized in class constructors (which propagate exceptions poorly) /// /// The event source constructor does not throw exceptions. Instead we remember any exception that /// was generated (it is also logged to Trace.WriteLine). /// </summary> public Exception ConstructionException { get { return m_constructionException; } } /// <summary> /// EventSources can have arbitrary string key-value pairs associated with them called Traits. /// These traits are not interpreted by the EventSource but may be interpreted by EventListeners /// (e.g. like the built in ETW listener). These traits are specififed at EventSource /// construction time and can be retrieved by using this GetTrait API. /// </summary> /// <param name="key">The key to look up in the set of key-value pairs passed to the EventSource constructor</param> /// <returns>The value string associated iwth key. Will return null if there is no such key.</returns> public string GetTrait(string key) { if (m_traits != null) { for (int i = 0; i < m_traits.Length - 1; i += 2) { if (m_traits[i] == key) return m_traits[i + 1]; } } return null; } /// <summary> /// Displays the name and GUID for the eventSource for debugging purposes. /// </summary> public override string ToString() { return Environment.GetResourceString("EventSource_ToString", Name, Guid); } /// <summary> /// Fires when a Command (e.g. Enable) comes from a an EventListener. /// </summary> public event EventHandler<EventCommandEventArgs> EventCommandExecuted { add { lock (this) { m_eventCommandExecuted += value; } // If we have an EventHandler<EventCommandEventArgs> attached to the EventSource before the first command arrives // It should get a chance to handle the deferred commands. EventCommandEventArgs deferredCommands = m_deferredCommands; while (deferredCommands != null) { value(this, deferredCommands); deferredCommands = deferredCommands.nextCommand; } } remove { lock (this) { m_eventCommandExecuted -= value; } } } #region protected /// <summary> /// This is the constructor that most users will use to create their eventSource. It takes /// no parameters. The ETW provider name and GUID of the EventSource are determined by the EventSource /// custom attribute (so you can determine these things declaratively). If the GUID for the eventSource /// is not specified in the EventSourceAttribute (recommended), it is Generated by hashing the name. /// If the ETW provider name of the EventSource is not given, the name of the EventSource class is used as /// the ETW provider name. /// </summary> protected EventSource() : this(EventSourceSettings.EtwManifestEventFormat) { } /// <summary> /// By default calling the 'WriteEvent' methods do NOT throw on errors (they silently discard the event). /// This is because in most cases users assume logging is not 'precious' and do NOT wish to have logging failures /// crash the program. However for those applications where logging is 'precious' and if it fails the caller /// wishes to react, setting 'throwOnEventWriteErrors' will cause an exception to be thrown if WriteEvent /// fails. Note the fact that EventWrite succeeds does not necessarily mean that the event reached its destination /// only that operation of writing it did not fail. These EventSources will not generate self-describing ETW events. /// /// For compatibility only use the EventSourceSettings.ThrowOnEventWriteErrors flag instead. /// </summary> // [Obsolete("Use the EventSource(EventSourceSettings) overload")] protected EventSource(bool throwOnEventWriteErrors) : this(EventSourceSettings.EtwManifestEventFormat | (throwOnEventWriteErrors ? EventSourceSettings.ThrowOnEventWriteErrors : 0)) { } /// <summary> /// Construct an EventSource with additional non-default settings (see EventSourceSettings for more) /// </summary> protected EventSource(EventSourceSettings settings) : this(settings, null) { } /// <summary> /// Construct an EventSource with additional non-default settings. /// /// Also specify a list of key-value pairs called traits (you must pass an even number of strings). /// The first string is the key and the second is the value. These are not interpreted by EventSource /// itself but may be interprated the listeners. Can be fetched with GetTrait(string). /// </summary> /// <param name="settings">See EventSourceSettings for more.</param> /// <param name="traits">A collection of key-value strings (must be an even number).</param> protected EventSource(EventSourceSettings settings, params string[] traits) { m_config = ValidateSettings(settings); var myType = this.GetType(); Initialize(GetGuid(myType), GetName(myType), traits); } /// <summary> /// This method is called when the eventSource is updated by the controller. /// </summary> protected virtual void OnEventCommand(EventCommandEventArgs command) { } #pragma warning disable 1591 // optimized for common signatures (no args) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId) { WriteEventCore(eventId, 0, null); } // optimized for common signatures (ints) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, int arg1) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[1]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 4; WriteEventCore(eventId, 1, descrs); } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, int arg1, int arg2) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 4; descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 4; WriteEventCore(eventId, 2, descrs); } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, int arg1, int arg2, int arg3) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[3]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 4; descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 4; descrs[2].DataPointer = (IntPtr)(&arg3); descrs[2].Size = 4; WriteEventCore(eventId, 3, descrs); } } // optimized for common signatures (longs) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, long arg1) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[1]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 8; WriteEventCore(eventId, 1, descrs); } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, long arg1, long arg2) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 8; descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 8; WriteEventCore(eventId, 2, descrs); } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, long arg1, long arg2, long arg3) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[3]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 8; descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 8; descrs[2].DataPointer = (IntPtr)(&arg3); descrs[2].Size = 8; WriteEventCore(eventId, 3, descrs); } } // optimized for common signatures (strings) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, string arg1) { if (m_eventSourceEnabled) { if (arg1 == null) arg1 = ""; fixed (char* string1Bytes = arg1) { EventSource.EventData* descrs = stackalloc EventSource.EventData[1]; descrs[0].DataPointer = (IntPtr)string1Bytes; descrs[0].Size = ((arg1.Length + 1) * 2); WriteEventCore(eventId, 1, descrs); } } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, string arg1, string arg2) { if (m_eventSourceEnabled) { if (arg1 == null) arg1 = ""; if (arg2 == null) arg2 = ""; fixed (char* string1Bytes = arg1) fixed (char* string2Bytes = arg2) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)string1Bytes; descrs[0].Size = ((arg1.Length + 1) * 2); descrs[1].DataPointer = (IntPtr)string2Bytes; descrs[1].Size = ((arg2.Length + 1) * 2); WriteEventCore(eventId, 2, descrs); } } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, string arg1, string arg2, string arg3) { if (m_eventSourceEnabled) { if (arg1 == null) arg1 = ""; if (arg2 == null) arg2 = ""; if (arg3 == null) arg3 = ""; fixed (char* string1Bytes = arg1) fixed (char* string2Bytes = arg2) fixed (char* string3Bytes = arg3) { EventSource.EventData* descrs = stackalloc EventSource.EventData[3]; descrs[0].DataPointer = (IntPtr)string1Bytes; descrs[0].Size = ((arg1.Length + 1) * 2); descrs[1].DataPointer = (IntPtr)string2Bytes; descrs[1].Size = ((arg2.Length + 1) * 2); descrs[2].DataPointer = (IntPtr)string3Bytes; descrs[2].Size = ((arg3.Length + 1) * 2); WriteEventCore(eventId, 3, descrs); } } } // optimized for common signatures (string and ints) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, string arg1, int arg2) { if (m_eventSourceEnabled) { if (arg1 == null) arg1 = ""; fixed (char* string1Bytes = arg1) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)string1Bytes; descrs[0].Size = ((arg1.Length + 1) * 2); descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 4; WriteEventCore(eventId, 2, descrs); } } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, string arg1, int arg2, int arg3) { if (m_eventSourceEnabled) { if (arg1 == null) arg1 = ""; fixed (char* string1Bytes = arg1) { EventSource.EventData* descrs = stackalloc EventSource.EventData[3]; descrs[0].DataPointer = (IntPtr)string1Bytes; descrs[0].Size = ((arg1.Length + 1) * 2); descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 4; descrs[2].DataPointer = (IntPtr)(&arg3); descrs[2].Size = 4; WriteEventCore(eventId, 3, descrs); } } } // optimized for common signatures (string and longs) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, string arg1, long arg2) { if (m_eventSourceEnabled) { if (arg1 == null) arg1 = ""; fixed (char* string1Bytes = arg1) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)string1Bytes; descrs[0].Size = ((arg1.Length + 1) * 2); descrs[1].DataPointer = (IntPtr)(&arg2); descrs[1].Size = 8; WriteEventCore(eventId, 2, descrs); } } } // optimized for common signatures (long and string) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, long arg1, string arg2) { if (m_eventSourceEnabled) { if (arg2 == null) arg2 = ""; fixed (char* string2Bytes = arg2) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 8; descrs[1].DataPointer = (IntPtr)string2Bytes; descrs[1].Size = ((arg2.Length + 1) * 2); WriteEventCore(eventId, 2, descrs); } } } // optimized for common signatures (int and string) [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, int arg1, string arg2) { if (m_eventSourceEnabled) { if (arg2 == null) arg2 = ""; fixed (char* string2Bytes = arg2) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 4; descrs[1].DataPointer = (IntPtr)string2Bytes; descrs[1].Size = ((arg2.Length + 1) * 2); WriteEventCore(eventId, 2, descrs); } } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, byte[] arg1) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; if (arg1 == null || arg1.Length == 0) { int blobSize = 0; descrs[0].DataPointer = (IntPtr)(&blobSize); descrs[0].Size = 4; descrs[1].DataPointer = (IntPtr)(&blobSize); // valid address instead of empty content descrs[1].Size = 0; WriteEventCore(eventId, 2, descrs); } else { int blobSize = arg1.Length; fixed (byte* blob = &arg1[0]) { descrs[0].DataPointer = (IntPtr)(&blobSize); descrs[0].Size = 4; descrs[1].DataPointer = (IntPtr)blob; descrs[1].Size = blobSize; WriteEventCore(eventId, 2, descrs); } } } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, long arg1, byte[] arg2) { if (m_eventSourceEnabled) { EventSource.EventData* descrs = stackalloc EventSource.EventData[3]; descrs[0].DataPointer = (IntPtr)(&arg1); descrs[0].Size = 8; if (arg2 == null || arg2.Length == 0) { int blobSize = 0; descrs[1].DataPointer = (IntPtr)(&blobSize); descrs[1].Size = 4; descrs[2].DataPointer = (IntPtr)(&blobSize); // valid address instead of empty contents descrs[2].Size = 0; WriteEventCore(eventId, 3, descrs); } else { int blobSize = arg2.Length; fixed (byte* blob = &arg2[0]) { descrs[1].DataPointer = (IntPtr)(&blobSize); descrs[1].Size = 4; descrs[2].DataPointer = (IntPtr)blob; descrs[2].Size = blobSize; WriteEventCore(eventId, 3, descrs); } } } } #pragma warning restore 1591 /// <summary> /// Used to construct the data structure to be passed to the native ETW APIs - EventWrite and EventWriteTransfer. /// </summary> protected internal struct EventData { /// <summary> /// Address where the one argument lives (if this points to managed memory you must ensure the /// managed object is pinned. /// </summary> public IntPtr DataPointer { get { return (IntPtr)m_Ptr; } set { m_Ptr = unchecked((long)value); } } /// <summary> /// Size of the argument referenced by DataPointer /// </summary> public int Size { get { return m_Size; } set { m_Size = value; } } #region private /// <summary> /// Initializes the members of this EventData object to point at a previously-pinned /// tracelogging-compatible metadata blob. /// </summary> /// <param name="pointer">Pinned tracelogging-compatible metadata blob.</param> /// <param name="size">The size of the metadata blob.</param> /// <param name="reserved">Value for reserved: 2 for per-provider metadata, 1 for per-event metadata</param> [SecurityCritical] internal unsafe void SetMetadata(byte* pointer, int size, int reserved) { this.m_Ptr = (long)(ulong)(UIntPtr)pointer; this.m_Size = size; this.m_Reserved = reserved; // Mark this descriptor as containing tracelogging-compatible metadata. } //Important, we pass this structure directly to the Win32 EventWrite API, so this structure must be layed out exactly // the way EventWrite wants it. internal long m_Ptr; internal int m_Size; #pragma warning disable 0649 internal int m_Reserved; // Used to pad the size to match the Win32 API #pragma warning restore 0649 #endregion } /// <summary> /// This routine allows you to create efficient WriteEvent helpers, however the code that you use to /// do this, while straightforward, is unsafe. /// </summary> /// <remarks> /// <code> /// protected unsafe void WriteEvent(int eventId, string arg1, long arg2) /// { /// if (IsEnabled()) /// { /// if (arg2 == null) arg2 = ""; /// fixed (char* string2Bytes = arg2) /// { /// EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; /// descrs[0].DataPointer = (IntPtr)(&amp;arg1); /// descrs[0].Size = 8; /// descrs[1].DataPointer = (IntPtr)string2Bytes; /// descrs[1].Size = ((arg2.Length + 1) * 2); /// WriteEventCore(eventId, 2, descrs); /// } /// } /// } /// </code> /// </remarks> [SecurityCritical] [CLSCompliant(false)] protected unsafe void WriteEventCore(int eventId, int eventDataCount, EventSource.EventData* data) { WriteEventWithRelatedActivityIdCore(eventId, null, eventDataCount, data); } /// <summary> /// This routine allows you to create efficient WriteEventWithRelatedActivityId helpers, however the code /// that you use to do this, while straightforward, is unsafe. The only difference from /// <see cref="WriteEventCore"/> is that you pass the relatedActivityId from caller through to this API /// </summary> /// <remarks> /// <code> /// protected unsafe void WriteEventWithRelatedActivityId(int eventId, Guid relatedActivityId, string arg1, long arg2) /// { /// if (IsEnabled()) /// { /// if (arg2 == null) arg2 = ""; /// fixed (char* string2Bytes = arg2) /// { /// EventSource.EventData* descrs = stackalloc EventSource.EventData[2]; /// descrs[0].DataPointer = (IntPtr)(&amp;arg1); /// descrs[0].Size = 8; /// descrs[1].DataPointer = (IntPtr)string2Bytes; /// descrs[1].Size = ((arg2.Length + 1) * 2); /// WriteEventWithRelatedActivityIdCore(eventId, relatedActivityId, 2, descrs); /// } /// } /// } /// </code> /// </remarks> [SecurityCritical] [CLSCompliant(false)] protected unsafe void WriteEventWithRelatedActivityIdCore(int eventId, Guid* relatedActivityId, int eventDataCount, EventSource.EventData* data) { if (m_eventSourceEnabled) { try { Contract.Assert(m_eventData != null); // You must have initialized this if you enabled the source. if (relatedActivityId != null) ValidateEventOpcodeForTransfer(ref m_eventData[eventId]); #if FEATURE_MANAGED_ETW if (m_eventData[eventId].EnabledForETW) { EventOpcode opcode = (EventOpcode)m_eventData[eventId].Descriptor.Opcode; EventActivityOptions activityOptions = m_eventData[eventId].ActivityOptions; Guid* pActivityId = null; Guid activityId = Guid.Empty; Guid relActivityId = Guid.Empty; if (opcode != EventOpcode.Info && relatedActivityId == null && ((activityOptions & EventActivityOptions.Disable) == 0)) { if (opcode == EventOpcode.Start) { m_activityTracker.OnStart(m_name, m_eventData[eventId].Name, m_eventData[eventId].Descriptor.Task, ref activityId, ref relActivityId, m_eventData[eventId].ActivityOptions); } else if (opcode == EventOpcode.Stop) { m_activityTracker.OnStop(m_name, m_eventData[eventId].Name, m_eventData[eventId].Descriptor.Task, ref activityId); } if (activityId != Guid.Empty) pActivityId = &activityId; if (relActivityId != Guid.Empty) relatedActivityId = &relActivityId; } #if FEATURE_ACTIVITYSAMPLING // this code should be kept in sync with WriteEventVarargs(). SessionMask etwSessions = SessionMask.All; // only compute etwSessions if there are *any* ETW filters enabled... if ((ulong)m_curLiveSessions != 0) etwSessions = GetEtwSessionMask(eventId, relatedActivityId); // OutputDebugString(string.Format("{0}.WriteEvent(id {1}) -> to sessions {2:x}", // m_name, m_eventData[eventId].Name, (ulong) etwSessions)); if ((ulong)etwSessions != 0 || m_legacySessions != null && m_legacySessions.Count > 0) { if (!SelfDescribingEvents) { if (etwSessions.IsEqualOrSupersetOf(m_curLiveSessions)) { // OutputDebugString(string.Format(" (1) id {0}, kwd {1:x}", // m_eventData[eventId].Name, m_eventData[eventId].Descriptor.Keywords)); // by default the Descriptor.Keyword will have the perEventSourceSessionId bit // mask set to 0x0f so, when all ETW sessions want the event we don't need to // synthesize a new one if (!m_provider.WriteEvent(ref m_eventData[eventId].Descriptor, pActivityId, relatedActivityId, eventDataCount, (IntPtr)data)) ThrowEventSourceException(); } else { long origKwd = unchecked((long)((ulong)m_eventData[eventId].Descriptor.Keywords & ~(SessionMask.All.ToEventKeywords()))); // OutputDebugString(string.Format(" (2) id {0}, kwd {1:x}", // m_eventData[eventId].Name, etwSessions.ToEventKeywords() | (ulong) origKwd)); // only some of the ETW sessions will receive this event. Synthesize a new // Descriptor whose Keywords field will have the appropriate bits set. // etwSessions might be 0, if there are legacy ETW listeners that want this event var desc = new EventDescriptor( m_eventData[eventId].Descriptor.EventId, m_eventData[eventId].Descriptor.Version, m_eventData[eventId].Descriptor.Channel, m_eventData[eventId].Descriptor.Level, m_eventData[eventId].Descriptor.Opcode, m_eventData[eventId].Descriptor.Task, unchecked((long)etwSessions.ToEventKeywords() | origKwd)); if (!m_provider.WriteEvent(ref desc, pActivityId, relatedActivityId, eventDataCount, (IntPtr)data)) ThrowEventSourceException(); } } else { TraceLoggingEventTypes tlet = m_eventData[eventId].TraceLoggingEventTypes; if (tlet == null) { tlet = new TraceLoggingEventTypes(m_eventData[eventId].Name, EventTags.None, m_eventData[eventId].Parameters); Interlocked.CompareExchange(ref m_eventData[eventId].TraceLoggingEventTypes, tlet, null); } long origKwd = unchecked((long)((ulong)m_eventData[eventId].Descriptor.Keywords & ~(SessionMask.All.ToEventKeywords()))); // TODO: activity ID support EventSourceOptions opt = new EventSourceOptions { Keywords = (EventKeywords)unchecked((long)etwSessions.ToEventKeywords() | origKwd), Level = (EventLevel)m_eventData[eventId].Descriptor.Level, Opcode = (EventOpcode)m_eventData[eventId].Descriptor.Opcode }; WriteMultiMerge(m_eventData[eventId].Name, ref opt, tlet, pActivityId, relatedActivityId, data); } } #else if (!SelfDescribingEvents) { if (!m_provider.WriteEvent(ref m_eventData[eventId].Descriptor, pActivityId, relatedActivityId, eventDataCount, (IntPtr)data)) ThrowEventSourceException(); } else { TraceLoggingEventTypes tlet = m_eventData[eventId].TraceLoggingEventTypes; if (tlet == null) { tlet = new TraceLoggingEventTypes(m_eventData[eventId].Name, m_eventData[eventId].Tags, m_eventData[eventId].Parameters); Interlocked.CompareExchange(ref m_eventData[eventId].TraceLoggingEventTypes, tlet, null); } EventSourceOptions opt = new EventSourceOptions { Keywords = (EventKeywords)m_eventData[eventId].Descriptor.Keywords, Level = (EventLevel)m_eventData[eventId].Descriptor.Level, Opcode = (EventOpcode)m_eventData[eventId].Descriptor.Opcode }; WriteMultiMerge(m_eventData[eventId].Name, ref opt, tlet, pActivityId, relatedActivityId, data); } #endif // FEATURE_ACTIVITYSAMPLING } #endif // FEATURE_MANAGED_ETW if (m_Dispatchers != null && m_eventData[eventId].EnabledForAnyListener) WriteToAllListeners(eventId, relatedActivityId, eventDataCount, data); } catch (Exception ex) { if (ex is EventSourceException) throw; else ThrowEventSourceException(ex); } } } // fallback varags helpers. /// <summary> /// This is the varargs helper for writing an event. It does create an array and box all the arguments so it is /// relatively inefficient and should only be used for relatively rare events (e.g. less than 100 / sec). If your /// rates are faster than that you should use <see cref="WriteEventCore"/> to create fast helpers for your particular /// method signature. Even if you use this for rare events, this call should be guarded by an <see cref="IsEnabled()"/> /// check so that the varargs call is not made when the EventSource is not active. /// </summary> [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] protected unsafe void WriteEvent(int eventId, params object[] args) { WriteEventVarargs(eventId, null, args); } /// <summary> /// This is the varargs helper for writing an event which also specifies a related activity. It is completely analogous /// to corresponding WriteEvent (they share implementation). It does create an array and box all the arguments so it is /// relatively inefficient and should only be used for relatively rare events (e.g. less than 100 / sec). If your /// rates are faster than that you should use <see cref="WriteEventWithRelatedActivityIdCore"/> to create fast helpers for your /// particular method signature. Even if you use this for rare events, this call should be guarded by an <see cref="IsEnabled()"/> /// check so that the varargs call is not made when the EventSource is not active. /// </summary> [SecuritySafeCritical] protected unsafe void WriteEventWithRelatedActivityId(int eventId, Guid relatedActivityId, params object[] args) { WriteEventVarargs(eventId, &relatedActivityId, args); } #endregion #region IDisposable Members /// <summary> /// Disposes of an EventSource. /// </summary> public void Dispose() { this.Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Disposes of an EventSource. /// </summary> /// <remarks> /// Called from Dispose() with disposing=true, and from the finalizer (~EventSource) with disposing=false. /// Guidelines: /// 1. We may be called more than once: do nothing after the first call. /// 2. Avoid throwing exceptions if disposing is false, i.e. if we're being finalized. /// </remarks> /// <param name="disposing">True if called from Dispose(), false if called from the finalizer.</param> protected virtual void Dispose(bool disposing) { if (disposing) { #if FEATURE_MANAGED_ETW // Send the manifest one more time to ensure circular buffers have a chance to get to this information // even in scenarios with a high volume of ETW events. if (m_eventSourceEnabled) { try { SendManifest(m_rawManifest); } catch (Exception) { } // If it fails, simply give up. m_eventSourceEnabled = false; } if (m_provider != null) { m_provider.Dispose(); m_provider = null; } #endif } m_eventSourceEnabled = false; } /// <summary> /// Finalizer for EventSource /// </summary> ~EventSource() { this.Dispose(false); } #endregion #region private #if FEATURE_ACTIVITYSAMPLING internal void WriteStringToListener(EventListener listener, string msg, SessionMask m) { Contract.Assert(listener == null || (uint)m == (uint)SessionMask.FromId(0)); if (m_eventSourceEnabled) { if (listener == null) { WriteEventString(0, unchecked((long)m.ToEventKeywords()), msg); } else { List<object> arg = new List<object>(); arg.Add(msg); EventWrittenEventArgs eventCallbackArgs = new EventWrittenEventArgs(this); eventCallbackArgs.EventId = 0; eventCallbackArgs.Payload = new ReadOnlyCollection<object>(arg); listener.OnEventWritten(eventCallbackArgs); } } } #endif [SecurityCritical] private unsafe void WriteEventRaw( ref EventDescriptor eventDescriptor, Guid* activityID, Guid* relatedActivityID, int dataCount, IntPtr data) { #if FEATURE_MANAGED_ETW if (m_provider == null) { ThrowEventSourceException(); } else { if (!m_provider.WriteEventRaw(ref eventDescriptor, activityID, relatedActivityID, dataCount, data)) ThrowEventSourceException(); } #endif // FEATURE_MANAGED_ETW } // FrameworkEventSource is on the startup path for the framework, so we have this internal overload that it can use // to prevent the working set hit from looking at the custom attributes on the type to get the Guid. internal EventSource(Guid eventSourceGuid, string eventSourceName) : this(eventSourceGuid, eventSourceName, EventSourceSettings.EtwManifestEventFormat) { } // Used by the internal FrameworkEventSource constructor and the TraceLogging-style event source constructor internal EventSource(Guid eventSourceGuid, string eventSourceName, EventSourceSettings settings, string[] traits = null) { m_config = ValidateSettings(settings); Initialize(eventSourceGuid, eventSourceName, traits); } /// <summary> /// This method is responsible for the common initialization path from our constructors. It must /// not leak any exceptions (otherwise, since most EventSource classes define a static member, /// "Log", such an exception would become a cached exception for the initialization of the static /// member, and any future access to the "Log" would throw the cached exception). /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1720:IdentifiersShouldNotContainTypeNames", MessageId = "guid")] [SecuritySafeCritical] private unsafe void Initialize(Guid eventSourceGuid, string eventSourceName, string[] traits) { try { m_traits = traits; if (m_traits != null && m_traits.Length % 2 != 0) throw new ArgumentException(Environment.GetResourceString("TraitEven"), "traits"); if (eventSourceGuid == Guid.Empty) throw new ArgumentException(Environment.GetResourceString("EventSource_NeedGuid")); if (eventSourceName == null) throw new ArgumentException(Environment.GetResourceString("EventSource_NeedName")); m_name = eventSourceName; m_guid = eventSourceGuid; #if FEATURE_ACTIVITYSAMPLING m_curLiveSessions = new SessionMask(0); m_etwSessionIdMap = new EtwSession[SessionMask.MAX]; #endif // FEATURE_ACTIVITYSAMPLING //Enable Implicit Activity tracker m_activityTracker = ActivityTracker.Instance; #if FEATURE_MANAGED_ETW // Create and register our provider traits. We do this early because it is needed to log errors // In the self-describing event case. this.InitializeProviderMetadata(); // Register the provider with ETW var provider = new OverideEventProvider(this); provider.Register(eventSourceGuid); #endif // Add the eventSource to the global (weak) list. // This also sets m_id, which is the index in the list. EventListener.AddEventSource(this); #if FEATURE_MANAGED_ETW // OK if we get this far without an exception, then we can at least write out error messages. // Set m_provider, which allows this. m_provider = provider; #if !ES_BUILD_STANDALONE // API available on OS >= Win 8 and patched Win 7. // Disable only for FrameworkEventSource to avoid recursion inside exception handling. var osVer = Environment.OSVersion.Version.Major * 10 + Environment.OSVersion.Version.Minor; if (this.Name != "System.Diagnostics.Eventing.FrameworkEventSource" || osVer >= 62) #endif { int setInformationResult; fixed (void* providerMetadata = this.providerMetadata) { setInformationResult = m_provider.SetInformation( UnsafeNativeMethods.ManifestEtw.EVENT_INFO_CLASS.SetTraits, providerMetadata, this.providerMetadata.Length); } } #endif // FEATURE_MANAGED_ETW Contract.Assert(!m_eventSourceEnabled); // We can't be enabled until we are completely initted. // We are logically completely initialized at this point. m_completelyInited = true; } catch (Exception e) { if (m_constructionException == null) m_constructionException = e; ReportOutOfBandMessage("ERROR: Exception during construction of EventSource " + Name + ": " + e.Message, true); } // Once m_completelyInited is set, you can have concurrency, so all work is under the lock. lock (EventListener.EventListenersLock) { // If there are any deferred commands, we can do them now. // This is the most likely place for exceptions to happen. // Note that we are NOT resetting m_deferredCommands to NULL here, // We are giving for EventHandler<EventCommandEventArgs> that will be attached later EventCommandEventArgs deferredCommands = m_deferredCommands; while (deferredCommands != null) { DoCommand(deferredCommands); // This can never throw, it catches them and reports the errors. deferredCommands = deferredCommands.nextCommand; } } } private static string GetName(Type eventSourceType, EventManifestOptions flags) { if (eventSourceType == null) throw new ArgumentNullException("eventSourceType"); Contract.EndContractBlock(); EventSourceAttribute attrib = (EventSourceAttribute)GetCustomAttributeHelper(eventSourceType, typeof(EventSourceAttribute), flags); if (attrib != null && attrib.Name != null) return attrib.Name; return eventSourceType.Name; } /// <summary> /// Implements the SHA1 hashing algorithm. Note that this /// implementation is for hashing public information. Do not /// use this code to hash private data, as this implementation does /// not take any steps to avoid information disclosure. /// </summary> private struct Sha1ForNonSecretPurposes { private long length; // Total message length in bits private uint[] w; // Workspace private int pos; // Length of current chunk in bytes /// <summary> /// Call Start() to initialize the hash object. /// </summary> public void Start() { if (this.w == null) { this.w = new uint[85]; } this.length = 0; this.pos = 0; this.w[80] = 0x67452301; this.w[81] = 0xEFCDAB89; this.w[82] = 0x98BADCFE; this.w[83] = 0x10325476; this.w[84] = 0xC3D2E1F0; } /// <summary> /// Adds an input byte to the hash. /// </summary> /// <param name="input">Data to include in the hash.</param> public void Append(byte input) { this.w[this.pos / 4] = (this.w[this.pos / 4] << 8) | input; if (64 == ++this.pos) { this.Drain(); } } /// <summary> /// Adds input bytes to the hash. /// </summary> /// <param name="input"> /// Data to include in the hash. Must not be null. /// </param> public void Append(byte[] input) { foreach (var b in input) { this.Append(b); } } /// <summary> /// Retrieves the hash value. /// Note that after calling this function, the hash object should /// be considered uninitialized. Subsequent calls to Append or /// Finish will produce useless results. Call Start() to /// reinitialize. /// </summary> /// <param name="output"> /// Buffer to receive the hash value. Must not be null. /// Up to 20 bytes of hash will be written to the output buffer. /// If the buffer is smaller than 20 bytes, the remaining hash /// bytes will be lost. If the buffer is larger than 20 bytes, the /// rest of the buffer is left unmodified. /// </param> public void Finish(byte[] output) { long l = this.length + 8 * this.pos; this.Append(0x80); while (this.pos != 56) { this.Append(0x00); } unchecked { this.Append((byte)(l >> 56)); this.Append((byte)(l >> 48)); this.Append((byte)(l >> 40)); this.Append((byte)(l >> 32)); this.Append((byte)(l >> 24)); this.Append((byte)(l >> 16)); this.Append((byte)(l >> 8)); this.Append((byte)l); int end = output.Length < 20 ? output.Length : 20; for (int i = 0; i != end; i++) { uint temp = this.w[80 + i / 4]; output[i] = (byte)(temp >> 24); this.w[80 + i / 4] = temp << 8; } } } /// <summary> /// Called when this.pos reaches 64. /// </summary> private void Drain() { for (int i = 16; i != 80; i++) { this.w[i] = Rol1((this.w[i - 3] ^ this.w[i - 8] ^ this.w[i - 14] ^ this.w[i - 16])); } unchecked { uint a = this.w[80]; uint b = this.w[81]; uint c = this.w[82]; uint d = this.w[83]; uint e = this.w[84]; for (int i = 0; i != 20; i++) { const uint k = 0x5A827999; uint f = (b & c) | ((~b) & d); uint temp = Rol5(a) + f + e + k + this.w[i]; e = d; d = c; c = Rol30(b); b = a; a = temp; } for (int i = 20; i != 40; i++) { uint f = b ^ c ^ d; const uint k = 0x6ED9EBA1; uint temp = Rol5(a) + f + e + k + this.w[i]; e = d; d = c; c = Rol30(b); b = a; a = temp; } for (int i = 40; i != 60; i++) { uint f = (b & c) | (b & d) | (c & d); const uint k = 0x8F1BBCDC; uint temp = Rol5(a) + f + e + k + this.w[i]; e = d; d = c; c = Rol30(b); b = a; a = temp; } for (int i = 60; i != 80; i++) { uint f = b ^ c ^ d; const uint k = 0xCA62C1D6; uint temp = Rol5(a) + f + e + k + this.w[i]; e = d; d = c; c = Rol30(b); b = a; a = temp; } this.w[80] += a; this.w[81] += b; this.w[82] += c; this.w[83] += d; this.w[84] += e; } this.length += 512; // 64 bytes == 512 bits this.pos = 0; } private static uint Rol1(uint input) { return (input << 1) | (input >> 31); } private static uint Rol5(uint input) { return (input << 5) | (input >> 27); } private static uint Rol30(uint input) { return (input << 30) | (input >> 2); } } private static Guid GenerateGuidFromName(string name) { byte[] bytes = Encoding.BigEndianUnicode.GetBytes(name); var hash = new Sha1ForNonSecretPurposes(); hash.Start(); hash.Append(namespaceBytes); hash.Append(bytes); Array.Resize(ref bytes, 16); hash.Finish(bytes); bytes[7] = unchecked((byte)((bytes[7] & 0x0F) | 0x50)); // Set high 4 bits of octet 7 to 5, as per RFC 4122 return new Guid(bytes); } [SecurityCritical] private unsafe object DecodeObject(int eventId, int parameterId, ref EventSource.EventData* data) { // TODO FIX : We use reflection which in turn uses EventSource, right now we carefully avoid // the recursion, but can we do this in a robust way? IntPtr dataPointer = data->DataPointer; // advance to next EventData in array ++data; Type dataType = m_eventData[eventId].Parameters[parameterId].ParameterType; Again: if (dataType == typeof(IntPtr)) { return *((IntPtr*)dataPointer); } else if (dataType == typeof(int)) { return *((int*)dataPointer); } else if (dataType == typeof(uint)) { return *((uint*)dataPointer); } else if (dataType == typeof(long)) { return *((long*)dataPointer); } else if (dataType == typeof(ulong)) { return *((ulong*)dataPointer); } else if (dataType == typeof(byte)) { return *((byte*)dataPointer); } else if (dataType == typeof(sbyte)) { return *((sbyte*)dataPointer); } else if (dataType == typeof(short)) { return *((short*)dataPointer); } else if (dataType == typeof(ushort)) { return *((ushort*)dataPointer); } else if (dataType == typeof(float)) { return *((float*)dataPointer); } else if (dataType == typeof(double)) { return *((double*)dataPointer); } else if (dataType == typeof(decimal)) { return *((decimal*)dataPointer); } else if (dataType == typeof(bool)) { // The manifest defines a bool as a 32bit type (WIN32 BOOL), not 1 bit as CLR Does. if (*((int*)dataPointer) == 1) { return true; } else { return false; } } else if (dataType == typeof(Guid)) { return *((Guid*)dataPointer); } else if (dataType == typeof(char)) { return *((char*)dataPointer); } else if (dataType == typeof(DateTime)) { long dateTimeTicks = *((long*)dataPointer); return DateTime.FromFileTimeUtc(dateTimeTicks); } else if (dataType == typeof(byte[])) { // byte[] are written to EventData* as an int followed by a blob int cbSize = *((int*)dataPointer); byte[] blob = new byte[cbSize]; dataPointer = data->DataPointer; data++; for (int i = 0; i < cbSize; ++i) blob[i] = *((byte*)dataPointer); return blob; } else if (dataType == typeof(byte*)) { // TODO: how do we want to handle this? For now we ignore it... return null; } else { if (dataType.IsEnum()) { dataType = Enum.GetUnderlyingType(dataType); goto Again; } // TODO FIX NOW Assuming that it is a string at this point is really likely to be fragile // We should do something better. // Everything else is marshaled as a string. // ETW strings are NULL-terminated, so marshal everything up to the first // null in the string. return System.Runtime.InteropServices.Marshal.PtrToStringUni(dataPointer); } } // Finds the Dispatcher (which holds the filtering state), for a given dispatcher for the current // eventSource). private EventDispatcher GetDispatcher(EventListener listener) { EventDispatcher dispatcher = m_Dispatchers; while (dispatcher != null) { if (dispatcher.m_Listener == listener) return dispatcher; dispatcher = dispatcher.m_Next; } return dispatcher; } [SecurityCritical] private unsafe void WriteEventVarargs(int eventId, Guid* childActivityID, object[] args) { if (m_eventSourceEnabled) { try { Contract.Assert(m_eventData != null); // You must have initialized this if you enabled the source. if (childActivityID != null) { ValidateEventOpcodeForTransfer(ref m_eventData[eventId]); // If you use WriteEventWithRelatedActivityID you MUST declare the first argument to be a GUID // with the name 'relatedActivityID, and NOT pass this argument to the WriteEvent method. // During manifest creation we modify the ParameterInfo[] that we store to strip out any // first parameter that is of type Guid and named "relatedActivityId." Thus, if you call // WriteEventWithRelatedActivityID from a method that doesn't name its first parameter correctly // we can end up in a state where the ParameterInfo[] doesn't have its first parameter stripped, // and this leads to a mismatch between the number of arguments and the number of ParameterInfos, // which would cause a cryptic IndexOutOfRangeException later if we don't catch it here. if (!m_eventData[eventId].HasRelatedActivityID) { throw new ArgumentException(Environment.GetResourceString("EventSource_NoRelatedActivityId")); } } LogEventArgsMismatches(m_eventData[eventId].Parameters, args); #if FEATURE_MANAGED_ETW if (m_eventData[eventId].EnabledForETW) { Guid* pActivityId = null; Guid activityId = Guid.Empty; Guid relatedActivityId = Guid.Empty; EventOpcode opcode = (EventOpcode)m_eventData[eventId].Descriptor.Opcode; EventActivityOptions activityOptions = m_eventData[eventId].ActivityOptions; if (childActivityID == null && ((activityOptions & EventActivityOptions.Disable) == 0)) { if (opcode == EventOpcode.Start) { m_activityTracker.OnStart(m_name, m_eventData[eventId].Name, m_eventData[eventId].Descriptor.Task, ref activityId, ref relatedActivityId, m_eventData[eventId].ActivityOptions); } else if (opcode == EventOpcode.Stop) { m_activityTracker.OnStop(m_name, m_eventData[eventId].Name, m_eventData[eventId].Descriptor.Task, ref activityId); } if (activityId != Guid.Empty) pActivityId = &activityId; if (relatedActivityId != Guid.Empty) childActivityID = &relatedActivityId; } #if FEATURE_ACTIVITYSAMPLING // this code should be kept in sync with WriteEventWithRelatedActivityIdCore(). SessionMask etwSessions = SessionMask.All; // only compute etwSessions if there are *any* ETW filters enabled... if ((ulong)m_curLiveSessions != 0) etwSessions = GetEtwSessionMask(eventId, childActivityID); if ((ulong)etwSessions != 0 || m_legacySessions != null && m_legacySessions.Count > 0) { if (!SelfDescribingEvents) { if (etwSessions.IsEqualOrSupersetOf(m_curLiveSessions)) { // by default the Descriptor.Keyword will have the perEventSourceSessionId bit // mask set to 0x0f so, when all ETW sessions want the event we don't need to // synthesize a new one if (!m_provider.WriteEvent(ref m_eventData[eventId].Descriptor, pActivityId, childActivityID, args)) ThrowEventSourceException(); } else { long origKwd = unchecked((long)((ulong)m_eventData[eventId].Descriptor.Keywords & ~(SessionMask.All.ToEventKeywords()))); // only some of the ETW sessions will receive this event. Synthesize a new // Descriptor whose Keywords field will have the appropriate bits set. var desc = new EventDescriptor( m_eventData[eventId].Descriptor.EventId, m_eventData[eventId].Descriptor.Version, m_eventData[eventId].Descriptor.Channel, m_eventData[eventId].Descriptor.Level, m_eventData[eventId].Descriptor.Opcode, m_eventData[eventId].Descriptor.Task, unchecked((long)(ulong)etwSessions | origKwd)); if (!m_provider.WriteEvent(ref desc, pActivityId, childActivityID, args)) ThrowEventSourceException(); } } else { TraceLoggingEventTypes tlet = m_eventData[eventId].TraceLoggingEventTypes; if (tlet == null) { tlet = new TraceLoggingEventTypes(m_eventData[eventId].Name, EventTags.None, m_eventData[eventId].Parameters); Interlocked.CompareExchange(ref m_eventData[eventId].TraceLoggingEventTypes, tlet, null); } long origKwd = unchecked((long)((ulong)m_eventData[eventId].Descriptor.Keywords & ~(SessionMask.All.ToEventKeywords()))); // TODO: activity ID support EventSourceOptions opt = new EventSourceOptions { Keywords = (EventKeywords)unchecked((long)(ulong)etwSessions | origKwd), Level = (EventLevel)m_eventData[eventId].Descriptor.Level, Opcode = (EventOpcode)m_eventData[eventId].Descriptor.Opcode }; WriteMultiMerge(m_eventData[eventId].Name, ref opt, tlet, pActivityId, childActivityID, args); } } #else if (!SelfDescribingEvents) { if (!m_provider.WriteEvent(ref m_eventData[eventId].Descriptor, pActivityId, childActivityID, args)) ThrowEventSourceException(); } else { TraceLoggingEventTypes tlet = m_eventData[eventId].TraceLoggingEventTypes; if (tlet == null) { tlet = new TraceLoggingEventTypes(m_eventData[eventId].Name, EventTags.None, m_eventData[eventId].Parameters); Interlocked.CompareExchange(ref m_eventData[eventId].TraceLoggingEventTypes, tlet, null); } // TODO: activity ID support EventSourceOptions opt = new EventSourceOptions { Keywords = (EventKeywords)m_eventData[eventId].Descriptor.Keywords, Level = (EventLevel)m_eventData[eventId].Descriptor.Level, Opcode = (EventOpcode)m_eventData[eventId].Descriptor.Opcode }; WriteMultiMerge(m_eventData[eventId].Name, ref opt, tlet, pActivityId, childActivityID, args); } #endif // FEATURE_ACTIVITYSAMPLING } #endif // FEATURE_MANAGED_ETW if (m_Dispatchers != null && m_eventData[eventId].EnabledForAnyListener) { #if !ES_BUILD_STANDALONE // Maintain old behavior - object identity is preserved if (AppContextSwitches.PreserveEventListnerObjectIdentity) { WriteToAllListeners(eventId, childActivityID, args); } else #endif // !ES_BUILD_STANDALONE { object[] serializedArgs = SerializeEventArgs(eventId, args); WriteToAllListeners(eventId, childActivityID, serializedArgs); } } } catch (Exception ex) { if (ex is EventSourceException) throw; else ThrowEventSourceException(ex); } } } [SecurityCritical] unsafe private object[] SerializeEventArgs(int eventId, object[] args) { TraceLoggingEventTypes eventTypes = m_eventData[eventId].TraceLoggingEventTypes; if (eventTypes == null) { eventTypes = new TraceLoggingEventTypes(m_eventData[eventId].Name, EventTags.None, m_eventData[eventId].Parameters); Interlocked.CompareExchange(ref m_eventData[eventId].TraceLoggingEventTypes, eventTypes, null); } var eventData = new object[eventTypes.typeInfos.Length]; for (int i = 0; i < eventTypes.typeInfos.Length; i++) { eventData[i] = eventTypes.typeInfos[i].GetData(args[i]); } return eventData; } /// <summary> /// We expect that the arguments to the Event method and the arguments to WriteEvent match. This function /// checks that they in fact match and logs a warning to the debugger if they don't. /// </summary> /// <param name="infos"></param> /// <param name="args"></param> private void LogEventArgsMismatches(ParameterInfo[] infos, object[] args) { #if !ES_BUILD_PCL // It would be nice to have this on PCL builds, but it would be pointless since there isn't support for // writing to the debugger log on PCL. bool typesMatch = args.Length == infos.Length; int i = 0; while (typesMatch && i < args.Length) { Type pType = infos[i].ParameterType; // Checking to see if the Parameter types (from the Event method) match the supplied argument types. // Fail if one of two things hold : either the argument type is not equal to the parameter type, or the // argument is null and the parameter type is non-nullable. if ((args[i] != null && (args[i].GetType() != pType)) || (args[i] == null && (!(pType.IsGenericType && pType.GetGenericTypeDefinition() == typeof(Nullable<>)))) ) { typesMatch = false; break; } ++i; } if (!typesMatch) { System.Diagnostics.Debugger.Log(0, null, Environment.GetResourceString("EventSource_VarArgsParameterMismatch") + "\r\n"); } #endif //!ES_BUILD_PCL } [SecurityCritical] unsafe private void WriteToAllListeners(int eventId, Guid* childActivityID, int eventDataCount, EventSource.EventData* data) { int paramCount = m_eventData[eventId].Parameters.Length; if (eventDataCount != paramCount) { ReportOutOfBandMessage(Environment.GetResourceString("EventSource_EventParametersMismatch", eventId, eventDataCount, paramCount), true); paramCount = Math.Min(paramCount, eventDataCount); } object[] args = new object[paramCount]; EventSource.EventData* dataPtr = data; for (int i = 0; i < paramCount; i++) args[i] = DecodeObject(eventId, i, ref dataPtr); WriteToAllListeners(eventId, childActivityID, args); } // helper for writing to all EventListeners attached the current eventSource. [SecurityCritical] unsafe private void WriteToAllListeners(int eventId, Guid* childActivityID, params object[] args) { EventWrittenEventArgs eventCallbackArgs = new EventWrittenEventArgs(this); eventCallbackArgs.EventId = eventId; if (childActivityID != null) eventCallbackArgs.RelatedActivityId = *childActivityID; eventCallbackArgs.EventName = m_eventData[eventId].Name; eventCallbackArgs.Message = m_eventData[eventId].Message; eventCallbackArgs.Payload = new ReadOnlyCollection<object>(args); DispatchToAllListeners(eventId, childActivityID, eventCallbackArgs); } [SecurityCritical] private unsafe void DispatchToAllListeners(int eventId, Guid* childActivityID, EventWrittenEventArgs eventCallbackArgs) { Exception lastThrownException = null; for (EventDispatcher dispatcher = m_Dispatchers; dispatcher != null; dispatcher = dispatcher.m_Next) { Contract.Assert(dispatcher.m_EventEnabled != null); if (eventId == -1 || dispatcher.m_EventEnabled[eventId]) { #if FEATURE_ACTIVITYSAMPLING var activityFilter = dispatcher.m_Listener.m_activityFilter; // order below is important as PassesActivityFilter will "flow" active activities // even when the current EventSource doesn't have filtering enabled. This allows // interesting activities to be updated so that sources that do sample can get // accurate data if (activityFilter == null || ActivityFilter.PassesActivityFilter(activityFilter, childActivityID, m_eventData[eventId].TriggersActivityTracking > 0, this, eventId) || !dispatcher.m_activityFilteringEnabled) #endif // FEATURE_ACTIVITYSAMPLING { try { dispatcher.m_Listener.OnEventWritten(eventCallbackArgs); } catch (Exception e) { ReportOutOfBandMessage("ERROR: Exception during EventSource.OnEventWritten: " + e.Message, false); lastThrownException = e; } } } } if (lastThrownException != null) { throw new EventSourceException(lastThrownException); } } [SecuritySafeCritical] [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] private unsafe void WriteEventString(EventLevel level, long keywords, string msgString) { #if FEATURE_MANAGED_ETW if (m_provider != null) { string eventName = "EventSourceMessage"; if (SelfDescribingEvents) { EventSourceOptions opt = new EventSourceOptions { Keywords = (EventKeywords)unchecked(keywords), Level = level }; var msg = new { message = msgString }; var tlet = new TraceLoggingEventTypes(eventName, EventTags.None, new Type[] { msg.GetType() }); WriteMultiMergeInner(eventName, ref opt, tlet, null, null, msg); } else { // We want the name of the provider to show up so if we don't have a manifest we create // on that at least has the provider name (I don't define any events). if (m_rawManifest == null && m_outOfBandMessageCount == 1) { ManifestBuilder manifestBuilder = new ManifestBuilder(Name, Guid, Name, null, EventManifestOptions.None); manifestBuilder.StartEvent(eventName, new EventAttribute(0) { Level = EventLevel.LogAlways, Task = (EventTask)0xFFFE }); manifestBuilder.AddEventParameter(typeof(string), "message"); manifestBuilder.EndEvent(); SendManifest(manifestBuilder.CreateManifest()); } // We use this low level routine to to bypass the enabled checking, since the eventSource itself is only partially inited. fixed (char* msgStringPtr = msgString) { EventDescriptor descr = new EventDescriptor(0, 0, 0, (byte)level, 0, 0, keywords); EventProvider.EventData data = new EventProvider.EventData(); data.Ptr = (ulong)msgStringPtr; data.Size = (uint)(2 * (msgString.Length + 1)); data.Reserved = 0; m_provider.WriteEvent(ref descr, null, null, 1, (IntPtr)((void*)&data)); } } } #endif // FEATURE_MANAGED_ETW } /// <summary> /// Since this is a means of reporting errors (see ReportoutOfBandMessage) any failure encountered /// while writing the message to any one of the listeners will be silently ignored. /// </summary> private void WriteStringToAllListeners(string eventName, string msg) { EventWrittenEventArgs eventCallbackArgs = new EventWrittenEventArgs(this); eventCallbackArgs.EventId = 0; eventCallbackArgs.Message = msg; eventCallbackArgs.Payload = new ReadOnlyCollection<object>(new List<object>() { msg }); eventCallbackArgs.PayloadNames = new ReadOnlyCollection<string>(new List<string> { "message" }); eventCallbackArgs.EventName = eventName; for (EventDispatcher dispatcher = m_Dispatchers; dispatcher != null; dispatcher = dispatcher.m_Next) { bool dispatcherEnabled = false; if (dispatcher.m_EventEnabled == null) { // if the listeners that weren't correctly initialized, we will send to it // since this is an error message and we want to see it go out. dispatcherEnabled = true; } else { // if there's *any* enabled event on the dispatcher we'll write out the string // otherwise we'll treat the listener as disabled and skip it for (int evtId = 0; evtId < dispatcher.m_EventEnabled.Length; ++evtId) { if (dispatcher.m_EventEnabled[evtId]) { dispatcherEnabled = true; break; } } } try { if (dispatcherEnabled) dispatcher.m_Listener.OnEventWritten(eventCallbackArgs); } catch { // ignore any exceptions thrown by listeners' OnEventWritten } } } #if FEATURE_ACTIVITYSAMPLING [SecurityCritical] unsafe private SessionMask GetEtwSessionMask(int eventId, Guid* childActivityID) { SessionMask etwSessions = new SessionMask(); for (int i = 0; i < SessionMask.MAX; ++i) { EtwSession etwSession = m_etwSessionIdMap[i]; if (etwSession != null) { ActivityFilter activityFilter = etwSession.m_activityFilter; // PassesActivityFilter() will flow "interesting" activities, so make sure // to perform this test first, before ORing with ~m_activityFilteringForETWEnabled // (note: the first test for !m_activityFilteringForETWEnabled[i] ensures we // do not fire events indiscriminately, when no filters are specified, but only // if, in addition, the session did not also enable ActivitySampling) if (activityFilter == null && !m_activityFilteringForETWEnabled[i] || activityFilter != null && ActivityFilter.PassesActivityFilter(activityFilter, childActivityID, m_eventData[eventId].TriggersActivityTracking > 0, this, eventId) || !m_activityFilteringForETWEnabled[i]) { etwSessions[i] = true; } } } // flow "interesting" activities for all legacy sessions in which there's some // level of activity tracing enabled (even other EventSources) if (m_legacySessions != null && m_legacySessions.Count > 0 && (EventOpcode)m_eventData[eventId].Descriptor.Opcode == EventOpcode.Send) { // only calculate InternalCurrentThreadActivityId once Guid* pCurrentActivityId = null; Guid currentActivityId; foreach (var legacyEtwSession in m_legacySessions) { if (legacyEtwSession == null) continue; ActivityFilter activityFilter = legacyEtwSession.m_activityFilter; if (activityFilter != null) { if (pCurrentActivityId == null) { currentActivityId = InternalCurrentThreadActivityId; pCurrentActivityId = &currentActivityId; } ActivityFilter.FlowActivityIfNeeded(activityFilter, pCurrentActivityId, childActivityID); } } } return etwSessions; } #endif // FEATURE_ACTIVITYSAMPLING /// <summary> /// Returns true if 'eventNum' is enabled if you only consider the level and matchAnyKeyword filters. /// It is possible that eventSources turn off the event based on additional filtering criteria. /// </summary> private bool IsEnabledByDefault(int eventNum, bool enable, EventLevel currentLevel, EventKeywords currentMatchAnyKeyword) { if (!enable) return false; EventLevel eventLevel = (EventLevel)m_eventData[eventNum].Descriptor.Level; EventKeywords eventKeywords = unchecked((EventKeywords)((ulong)m_eventData[eventNum].Descriptor.Keywords & (~(SessionMask.All.ToEventKeywords())))); #if FEATURE_MANAGED_ETW_CHANNELS EventChannel channel = unchecked((EventChannel)m_eventData[eventNum].Descriptor.Channel); #else EventChannel channel = EventChannel.None; #endif return IsEnabledCommon(enable, currentLevel, currentMatchAnyKeyword, eventLevel, eventKeywords, channel); } private bool IsEnabledCommon(bool enabled, EventLevel currentLevel, EventKeywords currentMatchAnyKeyword, EventLevel eventLevel, EventKeywords eventKeywords, EventChannel eventChannel) { if (!enabled) return false; // does is pass the level test? if ((currentLevel != 0) && (currentLevel < eventLevel)) return false; // if yes, does it pass the keywords test? if (currentMatchAnyKeyword != 0 && eventKeywords != 0) { #if FEATURE_MANAGED_ETW_CHANNELS // is there a channel with keywords that match currentMatchAnyKeyword? if (eventChannel != EventChannel.None && this.m_channelData != null && this.m_channelData.Length > (int)eventChannel) { EventKeywords channel_keywords = unchecked((EventKeywords)(m_channelData[(int)eventChannel] | (ulong)eventKeywords)); if (channel_keywords != 0 && (channel_keywords & currentMatchAnyKeyword) == 0) return false; } else #endif { if ((unchecked((ulong)eventKeywords & (ulong)currentMatchAnyKeyword)) == 0) return false; } } return true; } [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.NoInlining)] private void ThrowEventSourceException(Exception innerEx = null) { // If we fail during out of band logging we may end up trying // to throw another EventSourceException, thus hitting a StackOverflowException. // Avoid StackOverflow by making sure we do not recursively call this method. if (m_EventSourceExceptionRecurenceCount > 0) return; try { m_EventSourceExceptionRecurenceCount++; // TODO Create variations of EventSourceException that indicate more information using the error code. switch (EventProvider.GetLastWriteEventError()) { case EventProvider.WriteEventErrorCode.EventTooBig: ReportOutOfBandMessage("EventSourceException: " + Environment.GetResourceString("EventSource_EventTooBig"), true); if (ThrowOnEventWriteErrors) throw new EventSourceException(Environment.GetResourceString("EventSource_EventTooBig"), innerEx); break; case EventProvider.WriteEventErrorCode.NoFreeBuffers: ReportOutOfBandMessage("EventSourceException: " + Environment.GetResourceString("EventSource_NoFreeBuffers"), true); if (ThrowOnEventWriteErrors) throw new EventSourceException(Environment.GetResourceString("EventSource_NoFreeBuffers"), innerEx); break; case EventProvider.WriteEventErrorCode.NullInput: ReportOutOfBandMessage("EventSourceException: " + Environment.GetResourceString("EventSource_NullInput"), true); if (ThrowOnEventWriteErrors) throw new EventSourceException(Environment.GetResourceString("EventSource_NullInput"), innerEx); break; case EventProvider.WriteEventErrorCode.TooManyArgs: ReportOutOfBandMessage("EventSourceException: " + Environment.GetResourceString("EventSource_TooManyArgs"), true); if (ThrowOnEventWriteErrors) throw new EventSourceException(Environment.GetResourceString("EventSource_TooManyArgs"), innerEx); break; default: if (innerEx != null) ReportOutOfBandMessage("EventSourceException: " + innerEx.GetType() + ":" + innerEx.Message, true); else ReportOutOfBandMessage("EventSourceException", true); if (ThrowOnEventWriteErrors) throw new EventSourceException(innerEx); break; } } finally { m_EventSourceExceptionRecurenceCount--; } } private void ValidateEventOpcodeForTransfer(ref EventMetadata eventData) { if ((EventOpcode)eventData.Descriptor.Opcode != EventOpcode.Send && (EventOpcode)eventData.Descriptor.Opcode != EventOpcode.Receive && (EventOpcode)eventData.Descriptor.Opcode != EventOpcode.Start) { ThrowEventSourceException(); } } internal static EventOpcode GetOpcodeWithDefault(EventOpcode opcode, string eventName) { if (opcode == EventOpcode.Info && eventName != null) { if (eventName.EndsWith(s_ActivityStartSuffix)) { return EventOpcode.Start; } else if (eventName.EndsWith(s_ActivityStopSuffix)) { return EventOpcode.Stop; } } return opcode; } #if FEATURE_MANAGED_ETW /// <summary> /// This class lets us hook the 'OnEventCommand' from the eventSource. /// </summary> private class OverideEventProvider : EventProvider { public OverideEventProvider(EventSource eventSource) { this.m_eventSource = eventSource; } protected override void OnControllerCommand(ControllerCommand command, IDictionary<string, string> arguments, int perEventSourceSessionId, int etwSessionId) { // We use null to represent the ETW EventListener. EventListener listener = null; m_eventSource.SendCommand(listener, perEventSourceSessionId, etwSessionId, (EventCommand)command, IsEnabled(), Level, MatchAnyKeyword, arguments); } private EventSource m_eventSource; } #endif /// <summary> /// Used to hold all the static information about an event. This includes everything in the event /// descriptor as well as some stuff we added specifically for EventSource. see the /// code:m_eventData for where we use this. /// </summary> internal struct EventMetadata { public EventDescriptor Descriptor; public EventTags Tags; public bool EnabledForAnyListener; // true if any dispatcher has this event turned on public bool EnabledForETW; // is this event on for the OS ETW data dispatcher? public bool HasRelatedActivityID; // Set if the event method's first parameter is a Guid named 'relatedActivityId' #if !FEATURE_ACTIVITYSAMPLING #pragma warning disable 0649 #endif public byte TriggersActivityTracking; // count of listeners that marked this event as trigger for start of activity logging. #if !FEATURE_ACTIVITYSAMPLING #pragma warning restore 0649 #endif public string Name; // the name of the event public string Message; // If the event has a message associated with it, this is it. public ParameterInfo[] Parameters; // TODO can we remove? public TraceLoggingEventTypes TraceLoggingEventTypes; public EventActivityOptions ActivityOptions; }; // This is the internal entry point that code:EventListeners call when wanting to send a command to a // eventSource. The logic is as follows // // * if Command == Update // * perEventSourceSessionId specifies the per-provider ETW session ID that the command applies // to (if listener != null) // perEventSourceSessionId = 0 - reserved for EventListeners // perEventSourceSessionId = 1..SessionMask.MAX - reserved for activity tracing aware ETW sessions // perEventSourceSessionId-1 represents the bit in the reserved field (bits 44..47) in // Keywords that identifies the session // perEventSourceSessionId = SessionMask.MAX+1 - reserved for legacy ETW sessions; these are // discriminated by etwSessionId // * etwSessionId specifies a machine-wide ETW session ID; this allows correlation of // activity tracing across different providers (which might have different sessionIds // for the same ETW session) // * enable, level, matchAnyKeywords are used to set a default for all events for the // eventSource. In particular, if 'enabled' is false, 'level' and // 'matchAnyKeywords' are not used. // * OnEventCommand is invoked, which may cause calls to // code:EventSource.EnableEventForDispatcher which may cause changes in the filtering // depending on the logic in that routine. // * else (command != Update) // * Simply call OnEventCommand. The expectation is that filtering is NOT changed. // * The 'enabled' 'level', matchAnyKeyword' arguments are ignored (must be true, 0, 0). // // dispatcher == null has special meaning. It is the 'ETW' dispatcher. internal void SendCommand(EventListener listener, int perEventSourceSessionId, int etwSessionId, EventCommand command, bool enable, EventLevel level, EventKeywords matchAnyKeyword, IDictionary<string, string> commandArguments) { var commandArgs = new EventCommandEventArgs(command, commandArguments, this, listener, perEventSourceSessionId, etwSessionId, enable, level, matchAnyKeyword); lock (EventListener.EventListenersLock) { if (m_completelyInited) { // After the first command arrive after construction, we are ready to get rid of the deferred commands this.m_deferredCommands = null; // We are fully initialized, do the command DoCommand(commandArgs); } else { // We can't do the command, simply remember it and we do it when we are fully constructed. commandArgs.nextCommand = m_deferredCommands; m_deferredCommands = commandArgs; } } } /// <summary> /// We want the eventSource to be fully initialized when we do commands because that way we can send /// error messages and other logging directly to the event stream. Unfortunately we can get callbacks /// when we are not fully initialized. In that case we store them in 'commandArgs' and do them later. /// This helper actually does all actual command logic. /// </summary> internal void DoCommand(EventCommandEventArgs commandArgs) { // PRECONDITION: We should be holding the EventListener.EventListenersLock // We defer commands until we are completely inited. This allows error messages to be sent. Contract.Assert(m_completelyInited); #if FEATURE_MANAGED_ETW if (m_provider == null) // If we failed to construct return; #endif // FEATURE_MANAGED_ETW m_outOfBandMessageCount = 0; bool shouldReport = (commandArgs.perEventSourceSessionId > 0) && (commandArgs.perEventSourceSessionId <= SessionMask.MAX); try { EnsureDescriptorsInitialized(); Contract.Assert(m_eventData != null); // Find the per-EventSource dispatcher corresponding to registered dispatcher commandArgs.dispatcher = GetDispatcher(commandArgs.listener); if (commandArgs.dispatcher == null && commandArgs.listener != null) // dispatcher == null means ETW dispatcher throw new ArgumentException(Environment.GetResourceString("EventSource_ListenerNotFound")); if (commandArgs.Arguments == null) commandArgs.Arguments = new Dictionary<string, string>(); if (commandArgs.Command == EventCommand.Update) { // Set it up using the 'standard' filtering bitfields (use the "global" enable, not session specific one) for (int i = 0; i < m_eventData.Length; i++) EnableEventForDispatcher(commandArgs.dispatcher, i, IsEnabledByDefault(i, commandArgs.enable, commandArgs.level, commandArgs.matchAnyKeyword)); if (commandArgs.enable) { if (!m_eventSourceEnabled) { // EventSource turned on for the first time, simply copy the bits. m_level = commandArgs.level; m_matchAnyKeyword = commandArgs.matchAnyKeyword; } else { // Already enabled, make it the most verbose of the existing and new filter if (commandArgs.level > m_level) m_level = commandArgs.level; if (commandArgs.matchAnyKeyword == 0) m_matchAnyKeyword = 0; else if (m_matchAnyKeyword != 0) m_matchAnyKeyword = unchecked(m_matchAnyKeyword | commandArgs.matchAnyKeyword); } } // interpret perEventSourceSessionId's sign, and adjust perEventSourceSessionId to // represent 0-based positive values bool bSessionEnable = (commandArgs.perEventSourceSessionId >= 0); if (commandArgs.perEventSourceSessionId == 0 && commandArgs.enable == false) bSessionEnable = false; if (commandArgs.listener == null) { if (!bSessionEnable) commandArgs.perEventSourceSessionId = -commandArgs.perEventSourceSessionId; // for "global" enable/disable (passed in with listener == null and // perEventSourceSessionId == 0) perEventSourceSessionId becomes -1 --commandArgs.perEventSourceSessionId; } commandArgs.Command = bSessionEnable ? EventCommand.Enable : EventCommand.Disable; // perEventSourceSessionId = -1 when ETW sent a notification, but the set of active sessions // hasn't changed. // sesisonId = SessionMask.MAX when one of the legacy ETW sessions changed // 0 <= perEventSourceSessionId < SessionMask.MAX for activity-tracing aware sessions Contract.Assert(commandArgs.perEventSourceSessionId >= -1 && commandArgs.perEventSourceSessionId <= SessionMask.MAX); // Send the manifest if we are enabling an ETW session if (bSessionEnable && commandArgs.dispatcher == null) { // eventSourceDispatcher == null means this is the ETW manifest // Note that we unconditionally send the manifest whenever we are enabled, even if // we were already enabled. This is because there may be multiple sessions active // and we can't know that all the sessions have seen the manifest. if (!SelfDescribingEvents) SendManifest(m_rawManifest); } #if FEATURE_ACTIVITYSAMPLING if (bSessionEnable && commandArgs.perEventSourceSessionId != -1) { bool participateInSampling = false; string activityFilters; int sessionIdBit; ParseCommandArgs(commandArgs.Arguments, out participateInSampling, out activityFilters, out sessionIdBit); if (commandArgs.listener == null && commandArgs.Arguments.Count > 0 && commandArgs.perEventSourceSessionId != sessionIdBit) { throw new ArgumentException(Environment.GetResourceString("EventSource_SessionIdError", commandArgs.perEventSourceSessionId + SessionMask.SHIFT_SESSION_TO_KEYWORD, sessionIdBit + SessionMask.SHIFT_SESSION_TO_KEYWORD)); } if (commandArgs.listener == null) { UpdateEtwSession(commandArgs.perEventSourceSessionId, commandArgs.etwSessionId, true, activityFilters, participateInSampling); } else { ActivityFilter.UpdateFilter(ref commandArgs.listener.m_activityFilter, this, 0, activityFilters); commandArgs.dispatcher.m_activityFilteringEnabled = participateInSampling; } } else if (!bSessionEnable && commandArgs.listener == null) { // if we disable an ETW session, indicate that in a synthesized command argument if (commandArgs.perEventSourceSessionId >= 0 && commandArgs.perEventSourceSessionId < SessionMask.MAX) { commandArgs.Arguments["EtwSessionKeyword"] = (commandArgs.perEventSourceSessionId + SessionMask.SHIFT_SESSION_TO_KEYWORD).ToString(CultureInfo.InvariantCulture); } } #endif // FEATURE_ACTIVITYSAMPLING // Turn on the enable bit before making the OnEventCommand callback This allows you to do useful // things like log messages, or test if keywords are enabled in the callback. if (commandArgs.enable) { Contract.Assert(m_eventData != null); m_eventSourceEnabled = true; } this.OnEventCommand(commandArgs); var eventCommandCallback = this.m_eventCommandExecuted; if (eventCommandCallback != null) eventCommandCallback(this, commandArgs); #if FEATURE_ACTIVITYSAMPLING if (commandArgs.listener == null && !bSessionEnable && commandArgs.perEventSourceSessionId != -1) { // if we disable an ETW session, complete disabling it UpdateEtwSession(commandArgs.perEventSourceSessionId, commandArgs.etwSessionId, false, null, false); } #endif // FEATURE_ACTIVITYSAMPLING if (!commandArgs.enable) { // If we are disabling, maybe we can turn on 'quick checks' to filter // quickly. These are all just optimizations (since later checks will still filter) #if FEATURE_ACTIVITYSAMPLING // Turn off (and forget) any information about Activity Tracing. if (commandArgs.listener == null) { // reset all filtering information for activity-tracing-aware sessions for (int i = 0; i < SessionMask.MAX; ++i) { EtwSession etwSession = m_etwSessionIdMap[i]; if (etwSession != null) ActivityFilter.DisableFilter(ref etwSession.m_activityFilter, this); } m_activityFilteringForETWEnabled = new SessionMask(0); m_curLiveSessions = new SessionMask(0); // reset activity-tracing-aware sessions if (m_etwSessionIdMap != null) for (int i = 0; i < SessionMask.MAX; ++i) m_etwSessionIdMap[i] = null; // reset legacy sessions if (m_legacySessions != null) m_legacySessions.Clear(); } else { ActivityFilter.DisableFilter(ref commandArgs.listener.m_activityFilter, this); commandArgs.dispatcher.m_activityFilteringEnabled = false; } #endif // FEATURE_ACTIVITYSAMPLING // There is a good chance EnabledForAnyListener are not as accurate as // they could be, go ahead and get a better estimate. for (int i = 0; i < m_eventData.Length; i++) { bool isEnabledForAnyListener = false; for (EventDispatcher dispatcher = m_Dispatchers; dispatcher != null; dispatcher = dispatcher.m_Next) { if (dispatcher.m_EventEnabled[i]) { isEnabledForAnyListener = true; break; } } m_eventData[i].EnabledForAnyListener = isEnabledForAnyListener; } // If no events are enabled, disable the global enabled bit. if (!AnyEventEnabled()) { m_level = 0; m_matchAnyKeyword = 0; m_eventSourceEnabled = false; } } #if FEATURE_ACTIVITYSAMPLING UpdateKwdTriggers(commandArgs.enable); #endif // FEATURE_ACTIVITYSAMPLING } else { if (commandArgs.Command == EventCommand.SendManifest) { // TODO: should we generate the manifest here if we hadn't already? if (m_rawManifest != null) SendManifest(m_rawManifest); } // These are not used for non-update commands and thus should always be 'default' values // Contract.Assert(enable == true); // Contract.Assert(level == EventLevel.LogAlways); // Contract.Assert(matchAnyKeyword == EventKeywords.None); this.OnEventCommand(commandArgs); var eventCommandCallback = m_eventCommandExecuted; if (eventCommandCallback != null) eventCommandCallback(this, commandArgs); } #if FEATURE_ACTIVITYSAMPLING if (m_completelyInited && (commandArgs.listener != null || shouldReport)) { SessionMask m = SessionMask.FromId(commandArgs.perEventSourceSessionId); ReportActivitySamplingInfo(commandArgs.listener, m); } #endif // FEATURE_ACTIVITYSAMPLING } catch (Exception e) { // When the ETW session is created after the EventSource has registered with the ETW system // we can send any error messages here. ReportOutOfBandMessage("ERROR: Exception in Command Processing for EventSource " + Name + ": " + e.Message, true); // We never throw when doing a command. } } #if FEATURE_ACTIVITYSAMPLING internal void UpdateEtwSession( int sessionIdBit, int etwSessionId, bool bEnable, string activityFilters, bool participateInSampling) { if (sessionIdBit < SessionMask.MAX) { // activity-tracing-aware etw session if (bEnable) { var etwSession = EtwSession.GetEtwSession(etwSessionId, true); ActivityFilter.UpdateFilter(ref etwSession.m_activityFilter, this, sessionIdBit, activityFilters); m_etwSessionIdMap[sessionIdBit] = etwSession; m_activityFilteringForETWEnabled[sessionIdBit] = participateInSampling; } else { var etwSession = EtwSession.GetEtwSession(etwSessionId); m_etwSessionIdMap[sessionIdBit] = null; m_activityFilteringForETWEnabled[sessionIdBit] = false; if (etwSession != null) { ActivityFilter.DisableFilter(ref etwSession.m_activityFilter, this); // the ETW session is going away; remove it from the global list EtwSession.RemoveEtwSession(etwSession); } } m_curLiveSessions[sessionIdBit] = bEnable; } else { // legacy etw session if (bEnable) { if (m_legacySessions == null) m_legacySessions = new List<EtwSession>(8); var etwSession = EtwSession.GetEtwSession(etwSessionId, true); if (!m_legacySessions.Contains(etwSession)) m_legacySessions.Add(etwSession); } else { var etwSession = EtwSession.GetEtwSession(etwSessionId); if (etwSession != null) { if (m_legacySessions != null) m_legacySessions.Remove(etwSession); // the ETW session is going away; remove it from the global list EtwSession.RemoveEtwSession(etwSession); } } } } internal static bool ParseCommandArgs( IDictionary<string, string> commandArguments, out bool participateInSampling, out string activityFilters, out int sessionIdBit) { bool res = true; participateInSampling = false; string activityFilterString; if (commandArguments.TryGetValue("ActivitySamplingStartEvent", out activityFilters)) { // if a start event is specified default the event source to participate in sampling participateInSampling = true; } if (commandArguments.TryGetValue("ActivitySampling", out activityFilterString)) { if (string.Compare(activityFilterString, "false", StringComparison.OrdinalIgnoreCase) == 0 || activityFilterString == "0") participateInSampling = false; else participateInSampling = true; } string sSessionKwd; int sessionKwd = -1; if (!commandArguments.TryGetValue("EtwSessionKeyword", out sSessionKwd) || !int.TryParse(sSessionKwd, out sessionKwd) || sessionKwd < SessionMask.SHIFT_SESSION_TO_KEYWORD || sessionKwd >= SessionMask.SHIFT_SESSION_TO_KEYWORD + SessionMask.MAX) { sessionIdBit = -1; res = false; } else { sessionIdBit = sessionKwd - SessionMask.SHIFT_SESSION_TO_KEYWORD; } return res; } internal void UpdateKwdTriggers(bool enable) { if (enable) { // recompute m_keywordTriggers ulong gKeywords = unchecked((ulong)m_matchAnyKeyword); if (gKeywords == 0) gKeywords = 0xFFFFffffFFFFffff; m_keywordTriggers = 0; for (int sessId = 0; sessId < SessionMask.MAX; ++sessId) { EtwSession etwSession = m_etwSessionIdMap[sessId]; if (etwSession == null) continue; ActivityFilter activityFilter = etwSession.m_activityFilter; ActivityFilter.UpdateKwdTriggers(activityFilter, m_guid, this, unchecked((EventKeywords)gKeywords)); } } else { m_keywordTriggers = 0; } } #endif // FEATURE_ACTIVITYSAMPLING /// <summary> /// If 'value is 'true' then set the eventSource so that 'dispatcher' will receive event with the eventId /// of 'eventId. If value is 'false' disable the event for that dispatcher. If 'eventId' is out of /// range return false, otherwise true. /// </summary> internal bool EnableEventForDispatcher(EventDispatcher dispatcher, int eventId, bool value) { if (dispatcher == null) { if (eventId >= m_eventData.Length) return false; #if FEATURE_MANAGED_ETW if (m_provider != null) m_eventData[eventId].EnabledForETW = value; #endif } else { if (eventId >= dispatcher.m_EventEnabled.Length) return false; dispatcher.m_EventEnabled[eventId] = value; if (value) m_eventData[eventId].EnabledForAnyListener = true; } return true; } /// <summary> /// Returns true if any event at all is on. /// </summary> private bool AnyEventEnabled() { for (int i = 0; i < m_eventData.Length; i++) if (m_eventData[i].EnabledForETW || m_eventData[i].EnabledForAnyListener) return true; return false; } private bool IsDisposed { #if FEATURE_MANAGED_ETW get { return m_provider == null || m_provider.m_disposed; } #else get { return false; } // ETW is not present (true means that the EventSource is "off" / broken) #endif // FEATURE_MANAGED_ETW } [SecuritySafeCritical] private void EnsureDescriptorsInitialized() { #if !ES_BUILD_STANDALONE Contract.Assert(Monitor.IsEntered(EventListener.EventListenersLock)); #endif if (m_eventData == null) { Contract.Assert(m_rawManifest == null); m_rawManifest = CreateManifestAndDescriptors(this.GetType(), Name, this); Contract.Assert(m_eventData != null); // TODO Enforce singleton pattern foreach (WeakReference eventSourceRef in EventListener.s_EventSources) { EventSource eventSource = eventSourceRef.Target as EventSource; if (eventSource != null && eventSource.Guid == m_guid && !eventSource.IsDisposed) { if (eventSource != this) throw new ArgumentException(Environment.GetResourceString("EventSource_EventSourceGuidInUse", m_guid)); } } // Make certain all dispatchers also have their arrays initialized EventDispatcher dispatcher = m_Dispatchers; while (dispatcher != null) { if (dispatcher.m_EventEnabled == null) dispatcher.m_EventEnabled = new bool[m_eventData.Length]; dispatcher = dispatcher.m_Next; } } if (s_currentPid == 0) { #if ES_BUILD_STANDALONE && !ES_BUILD_PCL // for non-BCL EventSource we must assert SecurityPermission new SecurityPermission(PermissionState.Unrestricted).Assert(); #endif s_currentPid = Win32Native.GetCurrentProcessId(); } } // Send out the ETW manifest XML out to ETW // Today, we only send the manifest to ETW, custom listeners don't get it. [SecuritySafeCritical] private unsafe bool SendManifest(byte[] rawManifest) { bool success = true; if (rawManifest == null) return false; Contract.Assert(!SelfDescribingEvents); #if FEATURE_MANAGED_ETW fixed (byte* dataPtr = rawManifest) { // we don't want the manifest to show up in the event log channels so we specify as keywords // everything but the first 8 bits (reserved for the 8 channels) var manifestDescr = new EventDescriptor(0xFFFE, 1, 0, 0, 0xFE, 0xFFFE, 0x00ffFFFFffffFFFF); ManifestEnvelope envelope = new ManifestEnvelope(); envelope.Format = ManifestEnvelope.ManifestFormats.SimpleXmlFormat; envelope.MajorVersion = 1; envelope.MinorVersion = 0; envelope.Magic = 0x5B; // An unusual number that can be checked for consistency. int dataLeft = rawManifest.Length; envelope.ChunkNumber = 0; EventProvider.EventData* dataDescrs = stackalloc EventProvider.EventData[2]; dataDescrs[0].Ptr = (ulong)&envelope; dataDescrs[0].Size = (uint)sizeof(ManifestEnvelope); dataDescrs[0].Reserved = 0; dataDescrs[1].Ptr = (ulong)dataPtr; dataDescrs[1].Reserved = 0; int chunkSize = ManifestEnvelope.MaxChunkSize; TRY_AGAIN_WITH_SMALLER_CHUNK_SIZE: envelope.TotalChunks = (ushort)((dataLeft + (chunkSize - 1)) / chunkSize); while (dataLeft > 0) { dataDescrs[1].Size = (uint)Math.Min(dataLeft, chunkSize); if (m_provider != null) { if (!m_provider.WriteEvent(ref manifestDescr, null, null, 2, (IntPtr)dataDescrs)) { // Turns out that if users set the BufferSize to something less than 64K then WriteEvent // can fail. If we get this failure on the first chunk try again with something smaller // The smallest BufferSize is 1K so if we get to 256 (to account for envelope overhead), we can give up making it smaller. if (EventProvider.GetLastWriteEventError() == EventProvider.WriteEventErrorCode.EventTooBig) { if (envelope.ChunkNumber == 0 && chunkSize > 256) { chunkSize = chunkSize / 2; goto TRY_AGAIN_WITH_SMALLER_CHUNK_SIZE; } } success = false; if (ThrowOnEventWriteErrors) ThrowEventSourceException(); break; } } dataLeft -= chunkSize; dataDescrs[1].Ptr += (uint)chunkSize; envelope.ChunkNumber++; } } #endif // FEATURE_MANAGED_ETW return success; } #if ES_BUILD_PCL internal static Attribute GetCustomAttributeHelper(Type type, Type attributeType, EventManifestOptions flags = EventManifestOptions.None) { return GetCustomAttributeHelper(type.GetTypeInfo(), attributeType, flags); } #endif // Helper to deal with the fact that the type we are reflecting over might be loaded in the ReflectionOnly context. // When that is the case, we have the build the custom assemblies on a member by hand. internal static Attribute GetCustomAttributeHelper(MemberInfo member, Type attributeType, EventManifestOptions flags = EventManifestOptions.None) { if (!member.Module.Assembly.ReflectionOnly() && (flags & EventManifestOptions.AllowEventSourceOverride) == 0) { // Let the runtime to the work for us, since we can execute code in this context. Attribute firstAttribute = null; foreach (var attribute in member.GetCustomAttributes(attributeType, false)) { firstAttribute = (Attribute)attribute; break; } return firstAttribute; } #if !ES_BUILD_PCL // In the reflection only context, we have to do things by hand. string fullTypeNameToFind = attributeType.FullName; #if EVENT_SOURCE_LEGACY_NAMESPACE_SUPPORT fullTypeNameToFind = fullTypeNameToFind.Replace("System.Diagnostics.Eventing", "System.Diagnostics.Tracing"); #endif foreach (CustomAttributeData data in CustomAttributeData.GetCustomAttributes(member)) { if (AttributeTypeNamesMatch(attributeType, data.Constructor.ReflectedType)) { Attribute attr = null; Contract.Assert(data.ConstructorArguments.Count <= 1); if (data.ConstructorArguments.Count == 1) { attr = (Attribute)Activator.CreateInstance(attributeType, new object[] { data.ConstructorArguments[0].Value }); } else if (data.ConstructorArguments.Count == 0) { attr = (Attribute)Activator.CreateInstance(attributeType); } if (attr != null) { Type t = attr.GetType(); foreach (CustomAttributeNamedArgument namedArgument in data.NamedArguments) { PropertyInfo p = t.GetProperty(namedArgument.MemberInfo.Name, BindingFlags.Public | BindingFlags.Instance); object value = namedArgument.TypedValue.Value; if (p.PropertyType.IsEnum) { value = Enum.Parse(p.PropertyType, value.ToString()); } p.SetValue(attr, value, null); } return attr; } } } return null; #else // ES_BUILD_PCL throw new ArgumentException(Environment.GetResourceString("EventSource", "EventSource_PCLPlatformNotSupportedReflection")); #endif } /// <summary> /// Evaluates if two related "EventSource"-domain types should be considered the same /// </summary> /// <param name="attributeType">The attribute type in the load context - it's associated with the running /// EventSource type. This type may be different fromt he base type of the user-defined EventSource.</param> /// <param name="reflectedAttributeType">The attribute type in the reflection context - it's associated with /// the user-defined EventSource, and is in the same assembly as the eventSourceType passed to /// </param> /// <returns>True - if the types should be considered equivalent, False - otherwise</returns> private static bool AttributeTypeNamesMatch(Type attributeType, Type reflectedAttributeType) { return // are these the same type? attributeType == reflectedAttributeType || // are the full typenames equal? string.Equals(attributeType.FullName, reflectedAttributeType.FullName, StringComparison.Ordinal) || // are the typenames equal and the namespaces under "Diagnostics.Tracing" (typically // either Microsoft.Diagnostics.Tracing or System.Diagnostics.Tracing)? string.Equals(attributeType.Name, reflectedAttributeType.Name, StringComparison.Ordinal) && attributeType.Namespace.EndsWith("Diagnostics.Tracing") && (reflectedAttributeType.Namespace.EndsWith("Diagnostics.Tracing") #if EVENT_SOURCE_LEGACY_NAMESPACE_SUPPORT || reflectedAttributeType.Namespace.EndsWith("Diagnostics.Eventing") #endif ); } private static Type GetEventSourceBaseType(Type eventSourceType, bool allowEventSourceOverride, bool reflectionOnly) { // return false for "object" and interfaces if (eventSourceType.BaseType() == null) return null; // now go up the inheritance chain until hitting a concrete type ("object" at worse) do { eventSourceType = eventSourceType.BaseType(); } while (eventSourceType != null && eventSourceType.IsAbstract()); if (eventSourceType != null) { if (!allowEventSourceOverride) { if (reflectionOnly && eventSourceType.FullName != typeof(EventSource).FullName || !reflectionOnly && eventSourceType != typeof(EventSource)) return null; } else { if (eventSourceType.Name != "EventSource") return null; } } return eventSourceType; } // Use reflection to look at the attributes of a class, and generate a manifest for it (as UTF8) and // return the UTF8 bytes. It also sets up the code:EventData structures needed to dispatch events // at run time. 'source' is the event source to place the descriptors. If it is null, // then the descriptors are not creaed, and just the manifest is generated. private static byte[] CreateManifestAndDescriptors(Type eventSourceType, string eventSourceDllName, EventSource source, EventManifestOptions flags = EventManifestOptions.None) { ManifestBuilder manifest = null; bool bNeedsManifest = source != null ? !source.SelfDescribingEvents : true; Exception exception = null; // exception that might get raised during validation b/c we couldn't/didn't recover from a previous error byte[] res = null; if (eventSourceType.IsAbstract() && (flags & EventManifestOptions.Strict) == 0) return null; #if DEBUG && ES_BUILD_STANDALONE TestSupport.TestHooks.MaybeThrow(eventSourceType, TestSupport.Category.ManifestError, "EventSource_CreateManifestAndDescriptors", new ArgumentException("EventSource_CreateManifestAndDescriptors")); #endif try { MethodInfo[] methods = eventSourceType.GetMethods(BindingFlags.DeclaredOnly | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance); EventAttribute defaultEventAttribute; int eventId = 1; // The number given to an event that does not have a explicitly given ID. EventMetadata[] eventData = null; Dictionary<string, string> eventsByName = null; if (source != null || (flags & EventManifestOptions.Strict) != 0) { eventData = new EventMetadata[methods.Length + 1]; eventData[0].Name = ""; // Event 0 is the 'write messages string' event, and has an empty name. } // See if we have localization information. ResourceManager resources = null; EventSourceAttribute eventSourceAttrib = (EventSourceAttribute)GetCustomAttributeHelper(eventSourceType, typeof(EventSourceAttribute), flags); if (eventSourceAttrib != null && eventSourceAttrib.LocalizationResources != null) resources = new ResourceManager(eventSourceAttrib.LocalizationResources, eventSourceType.Assembly()); manifest = new ManifestBuilder(GetName(eventSourceType, flags), GetGuid(eventSourceType), eventSourceDllName, resources, flags); // Add an entry unconditionally for event ID 0 which will be for a string message. manifest.StartEvent("EventSourceMessage", new EventAttribute(0) { Level = EventLevel.LogAlways, Task = (EventTask)0xFFFE }); manifest.AddEventParameter(typeof(string), "message"); manifest.EndEvent(); // eventSourceType must be sealed and must derive from this EventSource if ((flags & EventManifestOptions.Strict) != 0) { bool typeMatch = GetEventSourceBaseType(eventSourceType, (flags & EventManifestOptions.AllowEventSourceOverride) != 0, eventSourceType.Assembly().ReflectionOnly()) != null; if (!typeMatch) manifest.ManifestError(Environment.GetResourceString("EventSource_TypeMustDeriveFromEventSource")); if (!eventSourceType.IsAbstract() && !eventSourceType.IsSealed()) manifest.ManifestError(Environment.GetResourceString("EventSource_TypeMustBeSealedOrAbstract")); } // Collect task, opcode, keyword and channel information #if FEATURE_MANAGED_ETW_CHANNELS && FEATURE_ADVANCED_MANAGED_ETW_CHANNELS foreach (var providerEnumKind in new string[] { "Keywords", "Tasks", "Opcodes", "Channels" }) #else foreach (var providerEnumKind in new string[] { "Keywords", "Tasks", "Opcodes" }) #endif { Type nestedType = eventSourceType.GetNestedType(providerEnumKind); if (nestedType != null) { if (eventSourceType.IsAbstract()) { manifest.ManifestError(Environment.GetResourceString("EventSource_AbstractMustNotDeclareKTOC", nestedType.Name)); } else { foreach (FieldInfo staticField in nestedType.GetFields(BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static)) { AddProviderEnumKind(manifest, staticField, providerEnumKind); } } } } // ensure we have keywords for the session-filtering reserved bits { manifest.AddKeyword("Session3", (long)0x1000 << 32); manifest.AddKeyword("Session2", (long)0x2000 << 32); manifest.AddKeyword("Session1", (long)0x4000 << 32); manifest.AddKeyword("Session0", (long)0x8000 << 32); } if (eventSourceType != typeof(EventSource)) { for (int i = 0; i < methods.Length; i++) { MethodInfo method = methods[i]; ParameterInfo[] args = method.GetParameters(); // Get the EventDescriptor (from the Custom attributes) EventAttribute eventAttribute = (EventAttribute)GetCustomAttributeHelper(method, typeof(EventAttribute), flags); // Compat: until v4.5.1 we ignored any non-void returning methods as well as virtual methods for // the only reason of limiting the number of methods considered to be events. This broke a common // design of having event sources implement specific interfaces. To fix this in a compatible way // we will now allow both non-void returning and virtual methods to be Event methods, as long // as they are marked with the [Event] attribute if (/* method.IsVirtual || */ method.IsStatic) { continue; } if (eventSourceType.IsAbstract()) { if (eventAttribute != null) manifest.ManifestError(Environment.GetResourceString("EventSource_AbstractMustNotDeclareEventMethods", method.Name, eventAttribute.EventId)); continue; } else if (eventAttribute == null) { // Methods that don't return void can't be events, if they're NOT marked with [Event]. // (see Compat comment above) if (method.ReturnType != typeof(void)) { continue; } // Continue to ignore virtual methods if they do NOT have the [Event] attribute // (see Compat comment above) if (method.IsVirtual) { continue; } // If we explicitly mark the method as not being an event, then honor that. if (GetCustomAttributeHelper(method, typeof(NonEventAttribute), flags) != null) continue; defaultEventAttribute = new EventAttribute(eventId); eventAttribute = defaultEventAttribute; } else if (eventAttribute.EventId <= 0) { manifest.ManifestError(Environment.GetResourceString("EventSource_NeedPositiveId", method.Name), true); continue; // don't validate anything else for this event } if (method.Name.LastIndexOf('.') >= 0) manifest.ManifestError(Environment.GetResourceString("EventSource_EventMustNotBeExplicitImplementation", method.Name, eventAttribute.EventId)); eventId++; string eventName = method.Name; if (eventAttribute.Opcode == EventOpcode.Info) // We are still using the default opcode. { // By default pick a task ID derived from the EventID, starting with the highest task number and working back bool noTask = (eventAttribute.Task == EventTask.None); if (noTask) eventAttribute.Task = (EventTask)(0xFFFE - eventAttribute.EventId); // Unless we explicitly set the opcode to Info (to override the auto-generate of Start or Stop opcodes, // pick a default opcode based on the event name (either Info or start or stop if the name ends with that suffix). if (!eventAttribute.IsOpcodeSet) eventAttribute.Opcode = GetOpcodeWithDefault(EventOpcode.Info, eventName); // Make the stop opcode have the same task as the start opcode. if (noTask) { if (eventAttribute.Opcode == EventOpcode.Start) { string taskName = eventName.Substring(0, eventName.Length - s_ActivityStartSuffix.Length); // Remove the Stop suffix to get the task name if (string.Compare(eventName, 0, taskName, 0, taskName.Length) == 0 && string.Compare(eventName, taskName.Length, s_ActivityStartSuffix, 0, Math.Max(eventName.Length - taskName.Length, s_ActivityStartSuffix.Length)) == 0) { // Add a task that is just the task name for the start event. This suppress the auto-task generation // That would otherwise happen (and create 'TaskName'Start as task name rather than just 'TaskName' manifest.AddTask(taskName, (int)eventAttribute.Task); } } else if (eventAttribute.Opcode == EventOpcode.Stop) { // Find the start associated with this stop event. We require start to be immediately before the stop int startEventId = eventAttribute.EventId - 1; if (eventData != null && startEventId < eventData.Length) { Contract.Assert(0 <= startEventId); // Since we reserve id 0, we know that id-1 is <= 0 EventMetadata startEventMetadata = eventData[startEventId]; // If you remove the Stop and add a Start does that name match the Start Event's Name? // Ideally we would throw an error string taskName = eventName.Substring(0, eventName.Length - s_ActivityStopSuffix.Length); // Remove the Stop suffix to get the task name if (startEventMetadata.Descriptor.Opcode == (byte)EventOpcode.Start && string.Compare(startEventMetadata.Name, 0, taskName, 0, taskName.Length) == 0 && string.Compare(startEventMetadata.Name, taskName.Length, s_ActivityStartSuffix, 0, Math.Max(startEventMetadata.Name.Length - taskName.Length, s_ActivityStartSuffix.Length)) == 0) { // Make the stop event match the start event eventAttribute.Task = (EventTask)startEventMetadata.Descriptor.Task; noTask = false; } } if (noTask && (flags & EventManifestOptions.Strict) != 0) // Throw an error if we can compatibly. throw new ArgumentException(Environment.GetResourceString("EventSource_StopsFollowStarts")); } } } bool hasRelatedActivityID = RemoveFirstArgIfRelatedActivityId(ref args); if (!(source != null && source.SelfDescribingEvents)) { manifest.StartEvent(eventName, eventAttribute); for (int fieldIdx = 0; fieldIdx < args.Length; fieldIdx++) { manifest.AddEventParameter(args[fieldIdx].ParameterType, args[fieldIdx].Name); } manifest.EndEvent(); } if (source != null || (flags & EventManifestOptions.Strict) != 0) { // Do checking for user errors (optional, but not a big deal so we do it). DebugCheckEvent(ref eventsByName, eventData, method, eventAttribute, manifest, flags); #if FEATURE_MANAGED_ETW_CHANNELS // add the channel keyword for Event Viewer channel based filters. This is added for creating the EventDescriptors only // and is not required for the manifest if (eventAttribute.Channel != EventChannel.None) { unchecked { eventAttribute.Keywords |= (EventKeywords)manifest.GetChannelKeyword(eventAttribute.Channel); } } #endif string eventKey = "event_" + eventName; string msg = manifest.GetLocalizedMessage(eventKey, CultureInfo.CurrentUICulture, etwFormat: false); // overwrite inline message with the localized message if (msg != null) eventAttribute.Message = msg; AddEventDescriptor(ref eventData, eventName, eventAttribute, args, hasRelatedActivityID); } } } // Tell the TraceLogging stuff where to start allocating its own IDs. NameInfo.ReserveEventIDsBelow(eventId); if (source != null) { TrimEventDescriptors(ref eventData); source.m_eventData = eventData; // officially initialize it. We do this at most once (it is racy otherwise). #if FEATURE_MANAGED_ETW_CHANNELS source.m_channelData = manifest.GetChannelData(); #endif } // if this is an abstract event source we've already performed all the validation we can if (!eventSourceType.IsAbstract() && (source == null || !source.SelfDescribingEvents)) { bNeedsManifest = (flags & EventManifestOptions.OnlyIfNeededForRegistration) == 0 #if FEATURE_MANAGED_ETW_CHANNELS || manifest.GetChannelData().Length > 0 #endif ; // if the manifest is not needed and we're not requested to validate the event source return early if (!bNeedsManifest && (flags & EventManifestOptions.Strict) == 0) return null; res = manifest.CreateManifest(); } } catch (Exception e) { // if this is a runtime manifest generation let the exception propagate if ((flags & EventManifestOptions.Strict) == 0) throw; // else store it to include it in the Argument exception we raise below exception = e; } if ((flags & EventManifestOptions.Strict) != 0 && (manifest.Errors.Count > 0 || exception != null)) { string msg = String.Empty; if (manifest.Errors.Count > 0) { bool firstError = true; foreach (string error in manifest.Errors) { if (!firstError) msg += Environment.NewLine; firstError = false; msg += error; } } else msg = "Unexpected error: " + exception.Message; throw new ArgumentException(msg, exception); } return bNeedsManifest ? res : null; } private static bool RemoveFirstArgIfRelatedActivityId(ref ParameterInfo[] args) { // If the first parameter is (case insensitive) 'relatedActivityId' then skip it. if (args.Length > 0 && args[0].ParameterType == typeof(Guid) && string.Compare(args[0].Name, "relatedActivityId", StringComparison.OrdinalIgnoreCase) == 0) { var newargs = new ParameterInfo[args.Length - 1]; Array.Copy(args, 1, newargs, 0, args.Length - 1); args = newargs; return true; } return false; } // adds a enumeration (keyword, opcode, task or channel) represented by 'staticField' // to the manifest. private static void AddProviderEnumKind(ManifestBuilder manifest, FieldInfo staticField, string providerEnumKind) { bool reflectionOnly = staticField.Module.Assembly.ReflectionOnly(); Type staticFieldType = staticField.FieldType; if (!reflectionOnly && (staticFieldType == typeof(EventOpcode)) || AttributeTypeNamesMatch(staticFieldType, typeof(EventOpcode))) { if (providerEnumKind != "Opcodes") goto Error; int value = (int)staticField.GetRawConstantValue(); manifest.AddOpcode(staticField.Name, value); } else if (!reflectionOnly && (staticFieldType == typeof(EventTask)) || AttributeTypeNamesMatch(staticFieldType, typeof(EventTask))) { if (providerEnumKind != "Tasks") goto Error; int value = (int)staticField.GetRawConstantValue(); manifest.AddTask(staticField.Name, value); } else if (!reflectionOnly && (staticFieldType == typeof(EventKeywords)) || AttributeTypeNamesMatch(staticFieldType, typeof(EventKeywords))) { if (providerEnumKind != "Keywords") goto Error; ulong value = unchecked((ulong)(long)staticField.GetRawConstantValue()); manifest.AddKeyword(staticField.Name, value); } #if FEATURE_MANAGED_ETW_CHANNELS && FEATURE_ADVANCED_MANAGED_ETW_CHANNELS else if (!reflectionOnly && (staticFieldType == typeof(EventChannel)) || AttributeTypeNamesMatch(staticFieldType, typeof(EventChannel))) { if (providerEnumKind != "Channels") goto Error; var channelAttribute = (EventChannelAttribute)GetCustomAttributeHelper(staticField, typeof(EventChannelAttribute)); manifest.AddChannel(staticField.Name, (byte)staticField.GetRawConstantValue(), channelAttribute); } #endif return; Error: manifest.ManifestError(Environment.GetResourceString("EventSource_EnumKindMismatch", staticField.Name, staticField.FieldType.Name, providerEnumKind)); } // Helper used by code:CreateManifestAndDescriptors to add a code:EventData descriptor for a method // with the code:EventAttribute 'eventAttribute'. resourceManger may be null in which case we populate it // it is populated if we need to look up message resources private static void AddEventDescriptor(ref EventMetadata[] eventData, string eventName, EventAttribute eventAttribute, ParameterInfo[] eventParameters, bool hasRelatedActivityID) { if (eventData == null || eventData.Length <= eventAttribute.EventId) { EventMetadata[] newValues = new EventMetadata[Math.Max(eventData.Length + 16, eventAttribute.EventId + 1)]; Array.Copy(eventData, newValues, eventData.Length); eventData = newValues; } eventData[eventAttribute.EventId].Descriptor = new EventDescriptor( eventAttribute.EventId, eventAttribute.Version, #if FEATURE_MANAGED_ETW_CHANNELS (byte)eventAttribute.Channel, #else (byte)0, #endif (byte)eventAttribute.Level, (byte)eventAttribute.Opcode, (int)eventAttribute.Task, unchecked((long)((ulong)eventAttribute.Keywords | SessionMask.All.ToEventKeywords()))); eventData[eventAttribute.EventId].Tags = eventAttribute.Tags; eventData[eventAttribute.EventId].Name = eventName; eventData[eventAttribute.EventId].Parameters = eventParameters; eventData[eventAttribute.EventId].Message = eventAttribute.Message; eventData[eventAttribute.EventId].ActivityOptions = eventAttribute.ActivityOptions; eventData[eventAttribute.EventId].HasRelatedActivityID = hasRelatedActivityID; } // Helper used by code:CreateManifestAndDescriptors that trims the m_eventData array to the correct // size after all event descriptors have been added. private static void TrimEventDescriptors(ref EventMetadata[] eventData) { int idx = eventData.Length; while (0 < idx) { --idx; if (eventData[idx].Descriptor.EventId != 0) break; } if (eventData.Length - idx > 2) // allow one wasted slot. { EventMetadata[] newValues = new EventMetadata[idx + 1]; Array.Copy(eventData, newValues, newValues.Length); eventData = newValues; } } // Helper used by code:EventListener.AddEventSource and code:EventListener.EventListener // when a listener gets attached to a eventSource internal void AddListener(EventListener listener) { lock (EventListener.EventListenersLock) { bool[] enabledArray = null; if (m_eventData != null) enabledArray = new bool[m_eventData.Length]; m_Dispatchers = new EventDispatcher(m_Dispatchers, enabledArray, listener); listener.OnEventSourceCreated(this); } } // Helper used by code:CreateManifestAndDescriptors to find user mistakes like reusing an event // index for two distinct events etc. Throws exceptions when it finds something wrong. private static void DebugCheckEvent(ref Dictionary<string, string> eventsByName, EventMetadata[] eventData, MethodInfo method, EventAttribute eventAttribute, ManifestBuilder manifest, EventManifestOptions options) { int evtId = eventAttribute.EventId; string evtName = method.Name; int eventArg = GetHelperCallFirstArg(method); if (eventArg >= 0 && evtId != eventArg) { manifest.ManifestError(Environment.GetResourceString("EventSource_MismatchIdToWriteEvent", evtName, evtId, eventArg), true); } if (evtId < eventData.Length && eventData[evtId].Descriptor.EventId != 0) { manifest.ManifestError(Environment.GetResourceString("EventSource_EventIdReused", evtName, evtId, eventData[evtId].Name), true); } // We give a task to things if they don't have one. // TODO this is moderately expensive (N*N). We probably should not even bother.... Contract.Assert(eventAttribute.Task != EventTask.None || eventAttribute.Opcode != EventOpcode.Info); for (int idx = 0; idx < eventData.Length; ++idx) { // skip unused Event IDs. if (eventData[idx].Name == null) continue; if (eventData[idx].Descriptor.Task == (int)eventAttribute.Task && eventData[idx].Descriptor.Opcode == (int)eventAttribute.Opcode) { manifest.ManifestError(Environment.GetResourceString("EventSource_TaskOpcodePairReused", evtName, evtId, eventData[idx].Name, idx)); // If we are not strict stop on first error. We have had problems with really large providers taking forever. because of many errors. if ((options & EventManifestOptions.Strict) == 0) break; } } // for non-default event opcodes the user must define a task! if (eventAttribute.Opcode != EventOpcode.Info) { bool failure = false; if (eventAttribute.Task == EventTask.None) failure = true; else { // If you have the auto-assigned Task, then you did not explicitly set one. // This is OK for Start events because we have special logic to assign the task to a prefix derived from the event name // But all other cases we want to catch the omission. var autoAssignedTask = (EventTask)(0xFFFE - evtId); if ((eventAttribute.Opcode != EventOpcode.Start && eventAttribute.Opcode != EventOpcode.Stop) && eventAttribute.Task == autoAssignedTask) failure = true; } if (failure) manifest.ManifestError(Environment.GetResourceString("EventSource_EventMustHaveTaskIfNonDefaultOpcode", evtName, evtId)); } // If we ever want to enforce the rule: MethodName = TaskName + OpcodeName here's how: // (the reason we don't is backwards compat and the need for handling this as a non-fatal error // by eventRegister.exe) // taskName & opcodeName could be passed in by the caller which has opTab & taskTab handy // if (!(((int)eventAttribute.Opcode == 0 && evtName == taskName) || (evtName == taskName+opcodeName))) // { // throw new WarningException(Environment.GetResourceString("EventSource_EventNameDoesNotEqualTaskPlusOpcode")); // } if (eventsByName == null) eventsByName = new Dictionary<string, string>(); if (eventsByName.ContainsKey(evtName)) manifest.ManifestError(Environment.GetResourceString("EventSource_EventNameReused", evtName), true); eventsByName[evtName] = evtName; } /// <summary> /// This method looks at the IL and tries to pattern match against the standard /// 'boilerplate' event body /// <code> /// { if (Enabled()) WriteEvent(#, ...) } /// </code> /// If the pattern matches, it returns the literal number passed as the first parameter to /// the WriteEvent. This is used to find common user errors (mismatching this /// number with the EventAttribute ID). It is only used for validation. /// </summary> /// <param name="method">The method to probe.</param> /// <returns>The literal value or -1 if the value could not be determined. </returns> [SecuritySafeCritical] [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "Switch statement is clearer than alternatives")] static private int GetHelperCallFirstArg(MethodInfo method) { #if !ES_BUILD_PCL // Currently searches for the following pattern // // ... // CAN ONLY BE THE INSTRUCTIONS BELOW // LDARG0 // LDC.I4 XXX // ... // CAN ONLY BE THE INSTRUCTIONS BELOW CAN'T BE A BRANCH OR A CALL // CALL // NOP // 0 or more times // RET // // If we find this pattern we return the XXX. Otherwise we return -1. (new ReflectionPermission(ReflectionPermissionFlag.MemberAccess)).Assert(); byte[] instrs = method.GetMethodBody().GetILAsByteArray(); int retVal = -1; for (int idx = 0; idx < instrs.Length; ) { switch (instrs[idx]) { case 0: // NOP case 1: // BREAK case 2: // LDARG_0 case 3: // LDARG_1 case 4: // LDARG_2 case 5: // LDARG_3 case 6: // LDLOC_0 case 7: // LDLOC_1 case 8: // LDLOC_2 case 9: // LDLOC_3 case 10: // STLOC_0 case 11: // STLOC_1 case 12: // STLOC_2 case 13: // STLOC_3 break; case 14: // LDARG_S case 16: // STARG_S idx++; break; case 20: // LDNULL break; case 21: // LDC_I4_M1 case 22: // LDC_I4_0 case 23: // LDC_I4_1 case 24: // LDC_I4_2 case 25: // LDC_I4_3 case 26: // LDC_I4_4 case 27: // LDC_I4_5 case 28: // LDC_I4_6 case 29: // LDC_I4_7 case 30: // LDC_I4_8 if (idx > 0 && instrs[idx - 1] == 2) // preceeded by LDARG0 retVal = instrs[idx] - 22; break; case 31: // LDC_I4_S if (idx > 0 && instrs[idx - 1] == 2) // preceeded by LDARG0 retVal = instrs[idx + 1]; idx++; break; case 32: // LDC_I4 idx += 4; break; case 37: // DUP break; case 40: // CALL idx += 4; if (retVal >= 0) { // Is this call just before return? for (int search = idx + 1; search < instrs.Length; search++) { if (instrs[search] == 42) // RET return retVal; if (instrs[search] != 0) // NOP break; } } retVal = -1; break; case 44: // BRFALSE_S case 45: // BRTRUE_S retVal = -1; idx++; break; case 57: // BRFALSE case 58: // BRTRUE retVal = -1; idx += 4; break; case 103: // CONV_I1 case 104: // CONV_I2 case 105: // CONV_I4 case 106: // CONV_I8 case 109: // CONV_U4 case 110: // CONV_U8 break; case 140: // BOX case 141: // NEWARR idx += 4; break; case 162: // STELEM_REF break; case 254: // PREFIX idx++; // Covers the CEQ instructions used in debug code for some reason. if (idx >= instrs.Length || instrs[idx] >= 6) goto default; break; default: /* Contract.Assert(false, "Warning: User validation code sub-optimial: Unsuported opcode " + instrs[idx] + " at " + idx + " in method " + method.Name); */ return -1; } idx++; } #endif return -1; } #if false // This routine is not needed at all, it was used for unit test debugging. [Conditional("DEBUG")] private static void OutputDebugString(string msg) { #if !ES_BUILD_PCL msg = msg.TrimEnd('\r', '\n') + string.Format(CultureInfo.InvariantCulture, ", Thrd({0})" + Environment.NewLine, Thread.CurrentThread.ManagedThreadId); System.Diagnostics.Debugger.Log(0, null, msg); #endif } #endif /// <summary> /// Sends an error message to the debugger (outputDebugString), as well as the EventListeners /// It will do this even if the EventSource is not enabled. /// TODO remove flush parameter it is not used. /// </summary> [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "This does not need to be correct when racing with other threads")] internal void ReportOutOfBandMessage(string msg, bool flush) { try { #if !ES_BUILD_PCL // send message to debugger without delay System.Diagnostics.Debugger.Log(0, null, msg + "\r\n"); #endif // Send it to all listeners. if (m_outOfBandMessageCount < 16 - 1) // Note this is only if size byte m_outOfBandMessageCount++; else { if (m_outOfBandMessageCount == 16) return; m_outOfBandMessageCount = 16; // Mark that we hit the limit. Notify them that this is the case. msg = "Reached message limit. End of EventSource error messages."; } WriteEventString(EventLevel.LogAlways, -1, msg); WriteStringToAllListeners("EventSourceMessage", msg); } catch (Exception) { } // If we fail during last chance logging, well, we have to give up.... } private EventSourceSettings ValidateSettings(EventSourceSettings settings) { var evtFormatMask = EventSourceSettings.EtwManifestEventFormat | EventSourceSettings.EtwSelfDescribingEventFormat; if ((settings & evtFormatMask) == evtFormatMask) throw new ArgumentException(Environment.GetResourceString("EventSource_InvalidEventFormat"), "settings"); // If you did not explicitly ask for manifest, you get self-describing. if ((settings & evtFormatMask) == 0) settings |= EventSourceSettings.EtwSelfDescribingEventFormat; return settings; } private bool ThrowOnEventWriteErrors { get { return (m_config & EventSourceSettings.ThrowOnEventWriteErrors) != 0; } set { if (value) m_config |= EventSourceSettings.ThrowOnEventWriteErrors; else m_config &= ~EventSourceSettings.ThrowOnEventWriteErrors; } } private bool SelfDescribingEvents { get { Contract.Assert(((m_config & EventSourceSettings.EtwManifestEventFormat) != 0) != ((m_config & EventSourceSettings.EtwSelfDescribingEventFormat) != 0)); return (m_config & EventSourceSettings.EtwSelfDescribingEventFormat) != 0; } set { if (!value) { m_config |= EventSourceSettings.EtwManifestEventFormat; m_config &= ~EventSourceSettings.EtwSelfDescribingEventFormat; } else { m_config |= EventSourceSettings.EtwSelfDescribingEventFormat; m_config &= ~EventSourceSettings.EtwManifestEventFormat; } } } #if FEATURE_ACTIVITYSAMPLING private void ReportActivitySamplingInfo(EventListener listener, SessionMask sessions) { Contract.Assert(listener == null || (uint)sessions == (uint)SessionMask.FromId(0)); for (int perEventSourceSessionId = 0; perEventSourceSessionId < SessionMask.MAX; ++perEventSourceSessionId) { if (!sessions[perEventSourceSessionId]) continue; ActivityFilter af; if (listener == null) { EtwSession etwSession = m_etwSessionIdMap[perEventSourceSessionId]; Contract.Assert(etwSession != null); af = etwSession.m_activityFilter; } else { af = listener.m_activityFilter; } if (af == null) continue; SessionMask m = new SessionMask(); m[perEventSourceSessionId] = true; foreach (var t in af.GetFilterAsTuple(m_guid)) { WriteStringToListener(listener, string.Format(CultureInfo.InvariantCulture, "Session {0}: {1} = {2}", perEventSourceSessionId, t.Item1, t.Item2), m); } bool participateInSampling = (listener == null) ? m_activityFilteringForETWEnabled[perEventSourceSessionId] : GetDispatcher(listener).m_activityFilteringEnabled; WriteStringToListener(listener, string.Format(CultureInfo.InvariantCulture, "Session {0}: Activity Sampling support: {1}", perEventSourceSessionId, participateInSampling ? "enabled" : "disabled"), m); } } #endif // FEATURE_ACTIVITYSAMPLING // private instance state private string m_name; // My friendly name (privided in ctor) internal int m_id; // A small integer that is unique to this instance. private Guid m_guid; // GUID representing the ETW eventSource to the OS. internal volatile EventMetadata[] m_eventData; // None per-event data private volatile byte[] m_rawManifest; // Bytes to send out representing the event schema private EventHandler<EventCommandEventArgs> m_eventCommandExecuted; private EventSourceSettings m_config; // configuration information // Enabling bits private bool m_eventSourceEnabled; // am I enabled (any of my events are enabled for any dispatcher) internal EventLevel m_level; // highest level enabled by any output dispatcher internal EventKeywords m_matchAnyKeyword; // the logical OR of all levels enabled by any output dispatcher (zero is a special case) meaning 'all keywords' // Dispatching state internal volatile EventDispatcher m_Dispatchers; // Linked list of code:EventDispatchers we write the data to (we also do ETW specially) #if FEATURE_MANAGED_ETW private volatile OverideEventProvider m_provider; // This hooks up ETW commands to our 'OnEventCommand' callback #endif private bool m_completelyInited; // The EventSource constructor has returned without exception. private Exception m_constructionException; // If there was an exception construction, this is it private byte m_outOfBandMessageCount; // The number of out of band messages sent (we throttle them private EventCommandEventArgs m_deferredCommands;// If we get commands before we are fully we store them here and run the when we are fully inited. private string[] m_traits; // Used to implement GetTraits internal static uint s_currentPid; // current process id, used in synthesizing quasi-GUIDs [ThreadStatic] private static byte m_EventSourceExceptionRecurenceCount = 0; // current recursion count inside ThrowEventSourceException #if FEATURE_MANAGED_ETW_CHANNELS internal volatile ulong[] m_channelData; #endif #if FEATURE_ACTIVITYSAMPLING private SessionMask m_curLiveSessions; // the activity-tracing aware sessions' bits private EtwSession[] m_etwSessionIdMap; // the activity-tracing aware sessions private List<EtwSession> m_legacySessions; // the legacy ETW sessions listening to this source internal long m_keywordTriggers; // a bit is set if it corresponds to a keyword that's part of an enabled triggering event internal SessionMask m_activityFilteringForETWEnabled; // does THIS EventSource have activity filtering turned on for each ETW session static internal Action<Guid> s_activityDying; // Fires when something calls SetCurrentThreadToActivity() // Also used to mark that activity tracing is on for some case #endif // FEATURE_ACTIVITYSAMPLING // We use a single instance of ActivityTracker for all EventSources instances to allow correlation between multiple event providers. // We have m_activityTracker field simply because instance field is more efficient than static field fetch. ActivityTracker m_activityTracker; internal const string s_ActivityStartSuffix = "Start"; internal const string s_ActivityStopSuffix = "Stop"; // used for generating GUID from eventsource name private static readonly byte[] namespaceBytes = new byte[] { 0x48, 0x2C, 0x2D, 0xB2, 0xC3, 0x90, 0x47, 0xC8, 0x87, 0xF8, 0x1A, 0x15, 0xBF, 0xC1, 0x30, 0xFB, }; #endregion } /// <summary> /// Enables specifying event source configuration options to be used in the EventSource constructor. /// </summary> [Flags] public enum EventSourceSettings { /// <summary> /// This specifies none of the special configuration options should be enabled. /// </summary> Default = 0, /// <summary> /// Normally an EventSource NEVER throws; setting this option will tell it to throw when it encounters errors. /// </summary> ThrowOnEventWriteErrors = 1, /// <summary> /// Setting this option is a directive to the ETW listener should use manifest-based format when /// firing events. This is the default option when defining a type derived from EventSource /// (using the protected EventSource constructors). /// Only one of EtwManifestEventFormat or EtwSelfDescribingEventFormat should be specified /// </summary> EtwManifestEventFormat = 4, /// <summary> /// Setting this option is a directive to the ETW listener should use self-describing event format /// when firing events. This is the default option when creating a new instance of the EventSource /// type (using the public EventSource constructors). /// Only one of EtwManifestEventFormat or EtwSelfDescribingEventFormat should be specified /// </summary> EtwSelfDescribingEventFormat = 8, } /// <summary> /// An EventListener represents a target for the events generated by EventSources (that is subclasses /// of <see cref="EventSource"/>), in the current appdomain. When a new EventListener is created /// it is logically attached to all eventSources in that appdomain. When the EventListener is Disposed, then /// it is disconnected from the event eventSources. Note that there is a internal list of STRONG references /// to EventListeners, which means that relying on the lack of references to EventListeners to clean up /// EventListeners will NOT work. You must call EventListener.Dispose explicitly when a dispatcher is no /// longer needed. /// <para> /// Once created, EventListeners can enable or disable on a per-eventSource basis using verbosity levels /// (<see cref="EventLevel"/>) and bitfields (<see cref="EventKeywords"/>) to further restrict the set of /// events to be sent to the dispatcher. The dispatcher can also send arbitrary commands to a particular /// eventSource using the 'SendCommand' method. The meaning of the commands are eventSource specific. /// </para><para> /// The Null Guid (that is (new Guid()) has special meaning as a wildcard for 'all current eventSources in /// the appdomain'. Thus it is relatively easy to turn on all events in the appdomain if desired. /// </para><para> /// It is possible for there to be many EventListener's defined in a single appdomain. Each dispatcher is /// logically independent of the other listeners. Thus when one dispatcher enables or disables events, it /// affects only that dispatcher (other listeners get the events they asked for). It is possible that /// commands sent with 'SendCommand' would do a semantic operation that would affect the other listeners /// (like doing a GC, or flushing data ...), but this is the exception rather than the rule. /// </para><para> /// Thus the model is that each EventSource keeps a list of EventListeners that it is sending events /// to. Associated with each EventSource-dispatcher pair is a set of filtering criteria that determine for /// that eventSource what events that dispatcher will receive. /// </para><para> /// Listeners receive the events on their 'OnEventWritten' method. Thus subclasses of EventListener must /// override this method to do something useful with the data. /// </para><para> /// In addition, when new eventSources are created, the 'OnEventSourceCreate' method is called. The /// invariant associated with this callback is that every eventSource gets exactly one /// 'OnEventSourceCreate' call for ever eventSource that can potentially send it log messages. In /// particular when a EventListener is created, typically a series of OnEventSourceCreate' calls are /// made to notify the new dispatcher of all the eventSources that existed before the EventListener was /// created. /// </para> /// </summary> public abstract class EventListener : IDisposable { private event EventHandler<EventSourceCreatedEventArgs> _EventSourceCreated; /// <summary> /// This event is raised whenever a new eventSource is 'attached' to the dispatcher. /// This can happen for all existing EventSources when the EventListener is created /// as well as for any EventSources that come into existence after the EventListener /// has been created. /// /// These 'catch up' events are called during the construction of the EventListener. /// Subclasses need to be prepared for that. /// /// In a multi-threaded environment, it is possible that 'EventSourceEventWrittenCallback' /// events for a particular eventSource to occur BEFORE the EventSourceCreatedCallback is issued. /// </summary> public event EventHandler<EventSourceCreatedEventArgs> EventSourceCreated { add { CallBackForExistingEventSources(false, value); this._EventSourceCreated = (EventHandler<EventSourceCreatedEventArgs>)Delegate.Combine(_EventSourceCreated, value); } remove { this._EventSourceCreated = (EventHandler<EventSourceCreatedEventArgs>)Delegate.Remove(_EventSourceCreated, value); } } /// <summary> /// This event is raised whenever an event has been written by a EventSource for which /// the EventListener has enabled events. /// </summary> public event EventHandler<EventWrittenEventArgs> EventWritten; /// <summary> /// Create a new EventListener in which all events start off turned off (use EnableEvents to turn /// them on). /// </summary> protected EventListener() { // This will cause the OnEventSourceCreated callback to fire. CallBackForExistingEventSources(true, (obj, args) => args.EventSource.AddListener(this) ); } /// <summary> /// Dispose should be called when the EventListener no longer desires 'OnEvent*' callbacks. Because /// there is an internal list of strong references to all EventListeners, calling 'Dispose' directly /// is the only way to actually make the listen die. Thus it is important that users of EventListener /// call Dispose when they are done with their logging. /// </summary> #if ES_BUILD_STANDALONE [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1063:ImplementIDisposableCorrectly")] #endif public virtual void Dispose() { lock (EventListenersLock) { Contract.Assert(s_Listeners != null); if (s_Listeners != null) { if (this == s_Listeners) { EventListener cur = s_Listeners; s_Listeners = this.m_Next; RemoveReferencesToListenerInEventSources(cur); } else { // Find 'this' from the s_Listeners linked list. EventListener prev = s_Listeners; for (; ; ) { EventListener cur = prev.m_Next; if (cur == null) break; if (cur == this) { // Found our Listener, remove references to to it in the eventSources prev.m_Next = cur.m_Next; // Remove entry. RemoveReferencesToListenerInEventSources(cur); break; } prev = cur; } } } Validate(); } } // We don't expose a Dispose(bool), because the contract is that you don't have any non-syncronous // 'cleanup' associated with this object /// <summary> /// Enable all events from the eventSource identified by 'eventSource' to the current /// dispatcher that have a verbosity level of 'level' or lower. /// /// This call can have the effect of REDUCING the number of events sent to the /// dispatcher if 'level' indicates a less verbose level than was previously enabled. /// /// This call never has an effect on other EventListeners. /// /// </summary> public void EnableEvents(EventSource eventSource, EventLevel level) { EnableEvents(eventSource, level, EventKeywords.None); } /// <summary> /// Enable all events from the eventSource identified by 'eventSource' to the current /// dispatcher that have a verbosity level of 'level' or lower and have a event keyword /// matching any of the bits in 'matchAnyKeyword'. /// /// This call can have the effect of REDUCING the number of events sent to the /// dispatcher if 'level' indicates a less verbose level than was previously enabled or /// if 'matchAnyKeyword' has fewer keywords set than where previously set. /// /// This call never has an effect on other EventListeners. /// </summary> public void EnableEvents(EventSource eventSource, EventLevel level, EventKeywords matchAnyKeyword) { EnableEvents(eventSource, level, matchAnyKeyword, null); } /// <summary> /// Enable all events from the eventSource identified by 'eventSource' to the current /// dispatcher that have a verbosity level of 'level' or lower and have a event keyword /// matching any of the bits in 'matchAnyKeyword' as well as any (eventSource specific) /// effect passing additional 'key-value' arguments 'arguments' might have. /// /// This call can have the effect of REDUCING the number of events sent to the /// dispatcher if 'level' indicates a less verbose level than was previously enabled or /// if 'matchAnyKeyword' has fewer keywords set than where previously set. /// /// This call never has an effect on other EventListeners. /// </summary> public void EnableEvents(EventSource eventSource, EventLevel level, EventKeywords matchAnyKeyword, IDictionary<string, string> arguments) { if (eventSource == null) { throw new ArgumentNullException("eventSource"); } Contract.EndContractBlock(); eventSource.SendCommand(this, 0, 0, EventCommand.Update, true, level, matchAnyKeyword, arguments); } /// <summary> /// Disables all events coming from eventSource identified by 'eventSource'. /// /// This call never has an effect on other EventListeners. /// </summary> public void DisableEvents(EventSource eventSource) { if (eventSource == null) { throw new ArgumentNullException("eventSource"); } Contract.EndContractBlock(); eventSource.SendCommand(this, 0, 0, EventCommand.Update, false, EventLevel.LogAlways, EventKeywords.None, null); } /// <summary> /// EventSourceIndex is small non-negative integer (suitable for indexing in an array) /// identifying EventSource. It is unique per-appdomain. Some EventListeners might find /// it useful to store additional information about each eventSource connected to it, /// and EventSourceIndex allows this extra information to be efficiently stored in a /// (growable) array (eg List(T)). /// </summary> public static int EventSourceIndex(EventSource eventSource) { return eventSource.m_id; } /// <summary> /// This method is called whenever a new eventSource is 'attached' to the dispatcher. /// This can happen for all existing EventSources when the EventListener is created /// as well as for any EventSources that come into existence after the EventListener /// has been created. /// /// These 'catch up' events are called during the construction of the EventListener. /// Subclasses need to be prepared for that. /// /// In a multi-threaded environment, it is possible that 'OnEventWritten' callbacks /// for a particular eventSource to occur BEFORE the OnEventSourceCreated is issued. /// </summary> /// <param name="eventSource"></param> internal protected virtual void OnEventSourceCreated(EventSource eventSource) { EventHandler<EventSourceCreatedEventArgs> callBack = this._EventSourceCreated; if(callBack != null) { EventSourceCreatedEventArgs args = new EventSourceCreatedEventArgs(); args.EventSource = eventSource; callBack(this, args); } } /// <summary> /// This method is called whenever an event has been written by a EventSource for which /// the EventListener has enabled events. /// </summary> /// <param name="eventData"></param> internal protected virtual void OnEventWritten(EventWrittenEventArgs eventData) { EventHandler<EventWrittenEventArgs> callBack = this.EventWritten; if (callBack != null) { callBack(this, eventData); } } #region private /// <summary> /// This routine adds newEventSource to the global list of eventSources, it also assigns the /// ID to the eventSource (which is simply the ordinal in the global list). /// /// EventSources currently do not pro-actively remove themselves from this list. Instead /// when eventSources's are GCed, the weak handle in this list naturally gets nulled, and /// we will reuse the slot. Today this list never shrinks (but we do reuse entries /// that are in the list). This seems OK since the expectation is that EventSources /// tend to live for the lifetime of the appdomain anyway (they tend to be used in /// global variables). /// </summary> /// <param name="newEventSource"></param> internal static void AddEventSource(EventSource newEventSource) { lock (EventListenersLock) { if (s_EventSources == null) s_EventSources = new List<WeakReference>(2); if (!s_EventSourceShutdownRegistered) { s_EventSourceShutdownRegistered = true; #if !ES_BUILD_PCL && !FEATURE_CORECLR AppDomain.CurrentDomain.ProcessExit += DisposeOnShutdown; AppDomain.CurrentDomain.DomainUnload += DisposeOnShutdown; #endif } // Periodically search the list for existing entries to reuse, this avoids // unbounded memory use if we keep recycling eventSources (an unlikely thing). int newIndex = -1; if (s_EventSources.Count % 64 == 63) // on every block of 64, fill up the block before continuing { int i = s_EventSources.Count; // Work from the top down. while (0 < i) { --i; WeakReference weakRef = s_EventSources[i]; if (!weakRef.IsAlive) { newIndex = i; weakRef.Target = newEventSource; break; } } } if (newIndex < 0) { newIndex = s_EventSources.Count; s_EventSources.Add(new WeakReference(newEventSource)); } newEventSource.m_id = newIndex; // Add every existing dispatcher to the new EventSource for (EventListener listener = s_Listeners; listener != null; listener = listener.m_Next) newEventSource.AddListener(listener); Validate(); } } // Whenver we have async callbacks from native code, there is an ugly issue where // during .NET shutdown native code could be calling the callback, but the CLR // has already prohibited callbacks to managed code in the appdomain, causing the CLR // to throw a COMPLUS_BOOT_EXCEPTION. The guideline we give is that you must unregister // such callbacks on process shutdown or appdomain so that unmanaged code will never // do this. This is what this callback is for. // See bug 724140 for more private static void DisposeOnShutdown(object sender, EventArgs e) { lock(EventListenersLock) { foreach (var esRef in s_EventSources) { EventSource es = esRef.Target as EventSource; if (es != null) es.Dispose(); } } } /// <summary> /// Helper used in code:Dispose that removes any references to 'listenerToRemove' in any of the /// eventSources in the appdomain. /// /// The EventListenersLock must be held before calling this routine. /// </summary> private static void RemoveReferencesToListenerInEventSources(EventListener listenerToRemove) { // Foreach existing EventSource in the appdomain foreach (WeakReference eventSourceRef in s_EventSources) { EventSource eventSource = eventSourceRef.Target as EventSource; if (eventSource != null) { // Is the first output dispatcher the dispatcher we are removing? if (eventSource.m_Dispatchers.m_Listener == listenerToRemove) eventSource.m_Dispatchers = eventSource.m_Dispatchers.m_Next; else { // Remove 'listenerToRemove' from the eventSource.m_Dispatchers linked list. EventDispatcher prev = eventSource.m_Dispatchers; for (; ; ) { EventDispatcher cur = prev.m_Next; if (cur == null) { Contract.Assert(false, "EventSource did not have a registered EventListener!"); break; } if (cur.m_Listener == listenerToRemove) { prev.m_Next = cur.m_Next; // Remove entry. break; } prev = cur; } } } } } /// <summary> /// Checks internal consistency of EventSources/Listeners. /// </summary> [Conditional("DEBUG")] internal static void Validate() { lock (EventListenersLock) { // Get all listeners Dictionary<EventListener, bool> allListeners = new Dictionary<EventListener, bool>(); EventListener cur = s_Listeners; while (cur != null) { allListeners.Add(cur, true); cur = cur.m_Next; } // For all eventSources int id = -1; foreach (WeakReference eventSourceRef in s_EventSources) { id++; EventSource eventSource = eventSourceRef.Target as EventSource; if (eventSource == null) continue; Contract.Assert(eventSource.m_id == id, "Unexpected event source ID."); // None listeners on eventSources exist in the dispatcher list. EventDispatcher dispatcher = eventSource.m_Dispatchers; while (dispatcher != null) { Contract.Assert(allListeners.ContainsKey(dispatcher.m_Listener), "EventSource has a listener not on the global list."); dispatcher = dispatcher.m_Next; } // Every dispatcher is on Dispatcher List of every eventSource. foreach (EventListener listener in allListeners.Keys) { dispatcher = eventSource.m_Dispatchers; for (; ; ) { Contract.Assert(dispatcher != null, "Listener is not on all eventSources."); if (dispatcher.m_Listener == listener) break; dispatcher = dispatcher.m_Next; } } } } } /// <summary> /// Gets a global lock that is intended to protect the code:s_Listeners linked list and the /// code:s_EventSources WeakReference list. (We happen to use the s_EventSources list as /// the lock object) /// </summary> internal static object EventListenersLock { get { if (s_EventSources == null) Interlocked.CompareExchange(ref s_EventSources, new List<WeakReference>(2), null); return s_EventSources; } } private void CallBackForExistingEventSources(bool addToListenersList, EventHandler<EventSourceCreatedEventArgs> callback) { lock (EventListenersLock) { // Disallow creating EventListener reentrancy. if (s_CreatingListener) throw new InvalidOperationException(Environment.GetResourceString("EventSource_ListenerCreatedInsideCallback")); try { s_CreatingListener = true; if (addToListenersList) { // Add to list of listeners in the system, do this BEFORE firing the 'OnEventSourceCreated' so that // Those added sources see this listener. this.m_Next = s_Listeners; s_Listeners = this; } // Find all existing eventSources call OnEventSourceCreated to 'catchup' // Note that we DO have reentrancy here because 'AddListener' calls out to user code (via OnEventSourceCreated callback) // We tolerate this by iterating over a copy of the list here. New event sources will take care of adding listeners themselves // EventSources are not guaranteed to be added at the end of the s_EventSource list -- We re-use slots when a new source // is created. WeakReference[] eventSourcesSnapshot = s_EventSources.ToArray(); for (int i = 0; i < eventSourcesSnapshot.Length; i++) { WeakReference eventSourceRef = eventSourcesSnapshot[i]; EventSource eventSource = eventSourceRef.Target as EventSource; if (eventSource != null) { EventSourceCreatedEventArgs args = new EventSourceCreatedEventArgs(); args.EventSource = eventSource; callback(this, args); } } Validate(); } finally { s_CreatingListener = false; } } } // Instance fields internal volatile EventListener m_Next; // These form a linked list in s_Listeners #if FEATURE_ACTIVITYSAMPLING internal ActivityFilter m_activityFilter; // If we are filtering by activity on this Listener, this keeps track of it. #endif // FEATURE_ACTIVITYSAMPLING // static fields /// <summary> /// The list of all listeners in the appdomain. Listeners must be explicitly disposed to remove themselves /// from this list. Note that EventSources point to their listener but NOT the reverse. /// </summary> internal static EventListener s_Listeners; /// <summary> /// The list of all active eventSources in the appdomain. Note that eventSources do NOT /// remove themselves from this list this is a weak list and the GC that removes them may /// not have happened yet. Thus it can contain event sources that are dead (thus you have /// to filter those out. /// </summary> internal static List<WeakReference> s_EventSources; /// <summary> /// Used to disallow reentrancy. /// </summary> private static bool s_CreatingListener = false; /// <summary> /// Used to register AD/Process shutdown callbacks. /// </summary> private static bool s_EventSourceShutdownRegistered = false; #endregion } /// <summary> /// Passed to the code:EventSource.OnEventCommand callback /// </summary> public class EventCommandEventArgs : EventArgs { /// <summary> /// Gets the command for the callback. /// </summary> public EventCommand Command { get; internal set; } /// <summary> /// Gets the arguments for the callback. /// </summary> public IDictionary<String, String> Arguments { get; internal set; } /// <summary> /// Enables the event that has the specified identifier. /// </summary> /// <param name="eventId">Event ID of event to be enabled</param> /// <returns>true if eventId is in range</returns> public bool EnableEvent(int eventId) { if (Command != EventCommand.Enable && Command != EventCommand.Disable) throw new InvalidOperationException(); return eventSource.EnableEventForDispatcher(dispatcher, eventId, true); } /// <summary> /// Disables the event that have the specified identifier. /// </summary> /// <param name="eventId">Event ID of event to be disabled</param> /// <returns>true if eventId is in range</returns> public bool DisableEvent(int eventId) { if (Command != EventCommand.Enable && Command != EventCommand.Disable) throw new InvalidOperationException(); return eventSource.EnableEventForDispatcher(dispatcher, eventId, false); } #region private internal EventCommandEventArgs(EventCommand command, IDictionary<string, string> arguments, EventSource eventSource, EventListener listener, int perEventSourceSessionId, int etwSessionId, bool enable, EventLevel level, EventKeywords matchAnyKeyword) { this.Command = command; this.Arguments = arguments; this.eventSource = eventSource; this.listener = listener; this.perEventSourceSessionId = perEventSourceSessionId; this.etwSessionId = etwSessionId; this.enable = enable; this.level = level; this.matchAnyKeyword = matchAnyKeyword; } internal EventSource eventSource; internal EventDispatcher dispatcher; // These are the arguments of sendCommand and are only used for deferring commands until after we are fully initialized. internal EventListener listener; internal int perEventSourceSessionId; internal int etwSessionId; internal bool enable; internal EventLevel level; internal EventKeywords matchAnyKeyword; internal EventCommandEventArgs nextCommand; // We form a linked list of these deferred commands. #endregion } /// <summary> /// EventSourceCreatedEventArgs is passed to <see cref="EventListener.EventSourceCreated"/> /// </summary> public class EventSourceCreatedEventArgs : EventArgs { /// <summary> /// The EventSource that is attaching to the listener. /// </summary> public EventSource EventSource { get; internal set; } } /// <summary> /// EventWrittenEventArgs is passed to the user-provided override for /// <see cref="EventListener.OnEventWritten"/> when an event is fired. /// </summary> public class EventWrittenEventArgs : EventArgs { /// <summary> /// The name of the event. /// </summary> public string EventName { get { if (m_eventName != null || EventId < 0) // TraceLogging convention EventID == -1 { return m_eventName; } else return m_eventSource.m_eventData[EventId].Name; } internal set { m_eventName = value; } } /// <summary> /// Gets the event ID for the event that was written. /// </summary> public int EventId { get; internal set; } /// <summary> /// Gets the activity ID for the thread on which the event was written. /// </summary> public Guid ActivityId { [System.Security.SecurityCritical] get { return EventSource.CurrentThreadActivityId; } } /// <summary> /// Gets the related activity ID if one was specified when the event was written. /// </summary> public Guid RelatedActivityId { [System.Security.SecurityCritical] get; internal set; } /// <summary> /// Gets the payload for the event. /// </summary> public ReadOnlyCollection<Object> Payload { get; internal set; } /// <summary> /// Gets the payload argument names. /// </summary> public ReadOnlyCollection<string> PayloadNames { get { // For contract based events we create the list lazily. if (m_payloadNames == null) { // Self described events are identified by id -1. Contract.Assert(EventId != -1); var names = new List<string>(); foreach (var parameter in m_eventSource.m_eventData[EventId].Parameters) { names.Add(parameter.Name); } m_payloadNames = new ReadOnlyCollection<string>(names); } return m_payloadNames; } internal set { m_payloadNames = value; } } /// <summary> /// Gets the event source object. /// </summary> public EventSource EventSource { get { return m_eventSource; } } /// <summary> /// Gets the keywords for the event. /// </summary> public EventKeywords Keywords { get { if (EventId < 0) // TraceLogging convention EventID == -1 return m_keywords; return (EventKeywords)m_eventSource.m_eventData[EventId].Descriptor.Keywords; } } /// <summary> /// Gets the operation code for the event. /// </summary> public EventOpcode Opcode { get { if (EventId < 0) // TraceLogging convention EventID == -1 return m_opcode; return (EventOpcode)m_eventSource.m_eventData[EventId].Descriptor.Opcode; } } /// <summary> /// Gets the task for the event. /// </summary> public EventTask Task { get { if (EventId < 0) // TraceLogging convention EventID == -1 return EventTask.None; return (EventTask)m_eventSource.m_eventData[EventId].Descriptor.Task; } } /// <summary> /// Any provider/user defined options associated with the event. /// </summary> public EventTags Tags { get { if (EventId < 0) // TraceLogging convention EventID == -1 return m_tags; return m_eventSource.m_eventData[EventId].Tags; } } /// <summary> /// Gets the message for the event. /// </summary> public string Message { get { if (EventId < 0) // TraceLogging convention EventID == -1 return m_message; else return m_eventSource.m_eventData[EventId].Message; } internal set { m_message = value; } } #if FEATURE_MANAGED_ETW_CHANNELS /// <summary> /// Gets the channel for the event. /// </summary> public EventChannel Channel { get { if (EventId < 0) // TraceLogging convention EventID == -1 return EventChannel.None; return (EventChannel)m_eventSource.m_eventData[EventId].Descriptor.Channel; } } #endif /// <summary> /// Gets the version of the event. /// </summary> public byte Version { get { if (EventId < 0) // TraceLogging convention EventID == -1 return 0; return m_eventSource.m_eventData[EventId].Descriptor.Version; } } /// <summary> /// Gets the level for the event. /// </summary> public EventLevel Level { get { if (EventId < 0) // TraceLogging convention EventID == -1 return m_level; return (EventLevel)m_eventSource.m_eventData[EventId].Descriptor.Level; } } #region private internal EventWrittenEventArgs(EventSource eventSource) { m_eventSource = eventSource; } private string m_message; private string m_eventName; private EventSource m_eventSource; private ReadOnlyCollection<string> m_payloadNames; internal EventTags m_tags; internal EventOpcode m_opcode; internal EventLevel m_level; internal EventKeywords m_keywords; #endregion } /// <summary> /// Allows customizing defaults and specifying localization support for the event source class to which it is applied. /// </summary> [AttributeUsage(AttributeTargets.Class)] public sealed class EventSourceAttribute : Attribute { /// <summary> /// Overrides the ETW name of the event source (which defaults to the class name) /// </summary> public string Name { get; set; } /// <summary> /// Overrides the default (calculated) Guid of an EventSource type. Explicitly defining a GUID is discouraged, /// except when upgrading existing ETW providers to using event sources. /// </summary> public string Guid { get; set; } /// <summary> /// <para> /// EventSources support localization of events. The names used for events, opcodes, tasks, keywords and maps /// can be localized to several languages if desired. This works by creating a ResX style string table /// (by simply adding a 'Resource File' to your project). This resource file is given a name e.g. /// 'DefaultNameSpace.ResourceFileName' which can be passed to the ResourceManager constructor to read the /// resources. This name is the value of the LocalizationResources property. /// </para><para> /// If LocalizationResources property is non-null, then EventSource will look up the localized strings for events by /// using the following resource naming scheme /// </para> /// <para>* event_EVENTNAME</para> /// <para>* task_TASKNAME</para> /// <para>* keyword_KEYWORDNAME</para> /// <para>* map_MAPNAME</para> /// <para> /// where the capitalized name is the name of the event, task, keyword, or map value that should be localized. /// Note that the localized string for an event corresponds to the Message string, and can have {0} values /// which represent the payload values. /// </para> /// </summary> public string LocalizationResources { get; set; } } /// <summary> /// Any instance methods in a class that subclasses <see cref="EventSource"/> and that return void are /// assumed by default to be methods that generate an ETW event. Enough information can be deduced from the /// name of the method and its signature to generate basic schema information for the event. The /// <see cref="EventAttribute"/> class allows you to specify additional event schema information for an event if /// desired. /// </summary> [AttributeUsage(AttributeTargets.Method)] public sealed class EventAttribute : Attribute { /// <summary>Construct an EventAttribute with specified eventId</summary> /// <param name="eventId">ID of the ETW event (an integer between 1 and 65535)</param> public EventAttribute(int eventId) { this.EventId = eventId; Level = EventLevel.Informational; this.m_opcodeSet = false; } /// <summary>Event's ID</summary> public int EventId { get; private set; } /// <summary>Event's severity level: indicates the severity or verbosity of the event</summary> public EventLevel Level { get; set; } /// <summary>Event's keywords: allows classification of events by "categories"</summary> public EventKeywords Keywords { get; set; } /// <summary>Event's operation code: allows defining operations, generally used with Tasks</summary> public EventOpcode Opcode { get { return m_opcode; } set { this.m_opcode = value; this.m_opcodeSet = true; } } internal bool IsOpcodeSet { get { return m_opcodeSet; } } /// <summary>Event's task: allows logical grouping of events</summary> public EventTask Task { get; set; } #if FEATURE_MANAGED_ETW_CHANNELS /// <summary>Event's channel: defines an event log as an additional destination for the event</summary> public EventChannel Channel { get; set; } #endif /// <summary>Event's version</summary> public byte Version { get; set; } /// <summary> /// This can be specified to enable formatting and localization of the event's payload. You can /// use standard .NET substitution operators (eg {1}) in the string and they will be replaced /// with the 'ToString()' of the corresponding part of the event payload. /// </summary> public string Message { get; set; } /// <summary> /// User defined options associated with the event. These do not have meaning to the EventSource but /// are passed through to listeners which given them semantics. /// </summary> public EventTags Tags { get; set; } /// <summary> /// Allows fine control over the Activity IDs generated by start and stop events /// </summary> public EventActivityOptions ActivityOptions { get; set; } #region private EventOpcode m_opcode; private bool m_opcodeSet; #endregion } /// <summary> /// By default all instance methods in a class that subclasses code:EventSource that and return /// void are assumed to be methods that generate an event. This default can be overridden by specifying /// the code:NonEventAttribute /// </summary> [AttributeUsage(AttributeTargets.Method)] public sealed class NonEventAttribute : Attribute { /// <summary> /// Constructs a default NonEventAttribute /// </summary> public NonEventAttribute() { } } // FUTURE we may want to expose this at some point once we have a partner that can help us validate the design. #if FEATURE_MANAGED_ETW_CHANNELS /// <summary> /// EventChannelAttribute allows customizing channels supported by an EventSource. This attribute must be /// applied to an member of type EventChannel defined in a Channels class nested in the EventSource class: /// <code> /// public static class Channels /// { /// [Channel(Enabled = true, EventChannelType = EventChannelType.Admin)] /// public const EventChannel Admin = (EventChannel)16; /// /// [Channel(Enabled = false, EventChannelType = EventChannelType.Operational)] /// public const EventChannel Operational = (EventChannel)17; /// } /// </code> /// </summary> [AttributeUsage(AttributeTargets.Field)] #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS public #endif class EventChannelAttribute : Attribute { /// <summary> /// Specified whether the channel is enabled by default /// </summary> public bool Enabled { get; set; } /// <summary> /// Legal values are in EventChannelType /// </summary> public EventChannelType EventChannelType { get; set; } #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS /// <summary> /// Specifies the isolation for the channel /// </summary> public EventChannelIsolation Isolation { get; set; } /// <summary> /// Specifies an SDDL access descriptor that controls access to the log file that backs the channel. /// See MSDN ((http://msdn.microsoft.com/en-us/library/windows/desktop/aa382741.aspx) for details. /// </summary> public string Access { get; set; } /// <summary> /// Allows importing channels defined in external manifests /// </summary> public string ImportChannel { get; set; } #endif // TODO: there is a convention that the name is the Provider/Type Should we provide an override? // public string Name { get; set; } } /// <summary> /// Allowed channel types /// </summary> #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS public #endif enum EventChannelType { /// <summary>The admin channel</summary> Admin = 1, /// <summary>The operational channel</summary> Operational, /// <summary>The Analytic channel</summary> Analytic, /// <summary>The debug channel</summary> Debug, } #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS /// <summary> /// Allowed isolation levels. See MSDN (http://msdn.microsoft.com/en-us/library/windows/desktop/aa382741.aspx) /// for the default permissions associated with each level. EventChannelIsolation and Access allows control over the /// access permissions for the channel and backing file. /// </summary> public enum EventChannelIsolation { /// <summary> /// This is the default isolation level. All channels that specify Application isolation use the same ETW session /// </summary> Application = 1, /// <summary> /// All channels that specify System isolation use the same ETW session /// </summary> System, /// <summary> /// Use sparingly! When specifying Custom isolation, a separate ETW session is created for the channel. /// Using Custom isolation lets you control the access permissions for the channel and backing file. /// Because there are only 64 ETW sessions available, you should limit your use of Custom isolation. /// </summary> Custom, } #endif #endif /// <summary> /// Describes the pre-defined command (EventCommandEventArgs.Command property) that is passed to the OnEventCommand callback. /// </summary> public enum EventCommand { /// <summary> /// Update EventSource state /// </summary> Update = 0, /// <summary> /// Request EventSource to generate and send its manifest /// </summary> SendManifest = -1, /// <summary> /// Enable event /// </summary> Enable = -2, /// <summary> /// Disable event /// </summary> Disable = -3 }; #region private classes #if FEATURE_ACTIVITYSAMPLING /// <summary> /// ActivityFilter is a helper structure that is used to keep track of run-time state /// associated with activity filtering. It is 1-1 with EventListeners (logically /// every listener has one of these, however we actually allocate them lazily), as well /// as 1-to-1 with tracing-aware EtwSessions. /// /// This structure also keeps track of the sampling counts associated with 'trigger' /// events. Because these trigger events are rare, and you typically only have one of /// them, we store them here as a linked list. /// </summary> internal sealed class ActivityFilter : IDisposable { /// <summary> /// Disable all activity filtering for the listener associated with 'filterList', /// (in the session associated with it) that is triggered by any event in 'source'. /// </summary> public static void DisableFilter(ref ActivityFilter filterList, EventSource source) { #if !ES_BUILD_STANDALONE Contract.Assert(Monitor.IsEntered(EventListener.EventListenersLock)); #endif if (filterList == null) return; ActivityFilter cur; // Remove it from anywhere in the list (except the first element, which has to // be treated specially) ActivityFilter prev = filterList; cur = prev.m_next; while (cur != null) { if (cur.m_providerGuid == source.Guid) { // update TriggersActivityTracking bit if (cur.m_eventId >= 0 && cur.m_eventId < source.m_eventData.Length) --source.m_eventData[cur.m_eventId].TriggersActivityTracking; // Remove it from the linked list. prev.m_next = cur.m_next; // dispose of the removed node cur.Dispose(); // update cursor cur = prev.m_next; } else { // update cursors prev = cur; cur = prev.m_next; } } // Sadly we have to treat the first element specially in linked list removal in C# if (filterList.m_providerGuid == source.Guid) { // update TriggersActivityTracking bit if (filterList.m_eventId >= 0 && filterList.m_eventId < source.m_eventData.Length) --source.m_eventData[filterList.m_eventId].TriggersActivityTracking; // We are the first element in the list. var first = filterList; filterList = first.m_next; // dispose of the removed node first.Dispose(); } // the above might have removed the one ActivityFilter in the session that contains the // cleanup delegate; re-create the delegate if needed if (filterList != null) { EnsureActivityCleanupDelegate(filterList); } } /// <summary> /// Currently this has "override" semantics. We first disable all filters /// associated with 'source', and next we add new filters for each entry in the /// string 'startEvents'. participateInSampling specifies whether non-startEvents /// always trigger or only trigger when current activity is 'active'. /// </summary> public static void UpdateFilter( ref ActivityFilter filterList, EventSource source, int perEventSourceSessionId, string startEvents) { #if !ES_BUILD_STANDALONE Contract.Assert(Monitor.IsEntered(EventListener.EventListenersLock)); #endif // first remove all filters associated with 'source' DisableFilter(ref filterList, source); if (!string.IsNullOrEmpty(startEvents)) { // ActivitySamplingStartEvents is a space-separated list of Event:Frequency pairs. // The Event may be specified by name or by ID. Errors in parsing such a pair // result in the error being reported to the listeners, and the pair being ignored. // E.g. "CustomActivityStart:1000 12:10" specifies that for event CustomActivityStart // we should initiate activity tracing once every 1000 events, *and* for event ID 12 // we should initiate activity tracing once every 10 events. string[] activityFilterStrings = startEvents.Split(' '); for (int i = 0; i < activityFilterStrings.Length; ++i) { string activityFilterString = activityFilterStrings[i]; int sampleFreq = 1; int eventId = -1; int colonIdx = activityFilterString.IndexOf(':'); if (colonIdx < 0) { source.ReportOutOfBandMessage("ERROR: Invalid ActivitySamplingStartEvent specification: " + activityFilterString, false); // ignore failure... continue; } string sFreq = activityFilterString.Substring(colonIdx + 1); if (!int.TryParse(sFreq, out sampleFreq)) { source.ReportOutOfBandMessage("ERROR: Invalid sampling frequency specification: " + sFreq, false); continue; } activityFilterString = activityFilterString.Substring(0, colonIdx); if (!int.TryParse(activityFilterString, out eventId)) { // reset eventId eventId = -1; // see if it's an event name for (int j = 0; j < source.m_eventData.Length; j++) { EventSource.EventMetadata[] ed = source.m_eventData; if (ed[j].Name != null && ed[j].Name.Length == activityFilterString.Length && string.Compare(ed[j].Name, activityFilterString, StringComparison.OrdinalIgnoreCase) == 0) { eventId = ed[j].Descriptor.EventId; break; } } } if (eventId < 0 || eventId >= source.m_eventData.Length) { source.ReportOutOfBandMessage("ERROR: Invalid eventId specification: " + activityFilterString, false); continue; } EnableFilter(ref filterList, source, perEventSourceSessionId, eventId, sampleFreq); } } } /// <summary> /// Returns the first ActivityFilter from 'filterList' corresponding to 'source'. /// </summary> public static ActivityFilter GetFilter(ActivityFilter filterList, EventSource source) { for (var af = filterList; af != null; af = af.m_next) { if (af.m_providerGuid == source.Guid && af.m_samplingFreq != -1) return af; } return null; } /// <summary> /// Returns a session mask representing all sessions in which the activity /// associated with the current thread is allowed through the activity filter. /// If 'triggeringEvent' is true the event MAY be a triggering event. Ideally /// most of the time this is false as you can guarentee this event is NOT a /// triggering event. If 'triggeringEvent' is true, then it checks the /// 'EventSource' and 'eventID' of the event being logged to see if it is actually /// a trigger. If so it activates the current activity. /// /// If 'childActivityID' is present, it will be added to the active set if the /// current activity is active. /// </summary> [SecurityCritical] unsafe public static bool PassesActivityFilter( ActivityFilter filterList, Guid* childActivityID, bool triggeringEvent, EventSource source, int eventId) { Contract.Assert(filterList != null && filterList.m_activeActivities != null); bool shouldBeLogged = false; if (triggeringEvent) { for (ActivityFilter af = filterList; af != null; af = af.m_next) { if (eventId == af.m_eventId && source.Guid == af.m_providerGuid) { // Update the sampling count with wrap-around int curSampleCount, newSampleCount; do { curSampleCount = af.m_curSampleCount; if (curSampleCount <= 1) newSampleCount = af.m_samplingFreq; // Wrap around, counting down to 1 else newSampleCount = curSampleCount - 1; } while (Interlocked.CompareExchange(ref af.m_curSampleCount, newSampleCount, curSampleCount) != curSampleCount); // If we hit zero, then start tracking the activity. if (curSampleCount <= 1) { Guid currentActivityId = EventSource.InternalCurrentThreadActivityId; Tuple<Guid, int> startId; // only add current activity if it's not already a root activity if (!af.m_rootActiveActivities.TryGetValue(currentActivityId, out startId)) { // EventSource.OutputDebugString(string.Format(" PassesAF - Triggering(session {0}, evt {1})", af.m_perEventSourceSessionId, eventId)); shouldBeLogged = true; af.m_activeActivities[currentActivityId] = Environment.TickCount; af.m_rootActiveActivities[currentActivityId] = Tuple.Create(source.Guid, eventId); } } else { // a start event following a triggering start event Guid currentActivityId = EventSource.InternalCurrentThreadActivityId; Tuple<Guid, int> startId; // only remove current activity if we added it if (af.m_rootActiveActivities.TryGetValue(currentActivityId, out startId) && startId.Item1 == source.Guid && startId.Item2 == eventId) { // EventSource.OutputDebugString(string.Format("Activity dying: {0} -> StartEvent({1})", currentActivityId, eventId)); // remove activity only from current logging scope (af) int dummy; af.m_activeActivities.TryRemove(currentActivityId, out dummy); } } break; } } } var activeActivities = GetActiveActivities(filterList); if (activeActivities != null) { // if we hadn't already determined this should be logged, test further if (!shouldBeLogged) { shouldBeLogged = !activeActivities.IsEmpty && activeActivities.ContainsKey(EventSource.InternalCurrentThreadActivityId); } if (shouldBeLogged && childActivityID != null && ((EventOpcode)source.m_eventData[eventId].Descriptor.Opcode == EventOpcode.Send)) { FlowActivityIfNeeded(filterList, null, childActivityID); // EventSource.OutputDebugString(string.Format(" PassesAF - activity {0}", *childActivityID)); } } // EventSource.OutputDebugString(string.Format(" PassesAF - shouldBeLogged(evt {0}) = {1:x}", eventId, shouldBeLogged)); return shouldBeLogged; } [System.Security.SecuritySafeCritical] public static bool IsCurrentActivityActive(ActivityFilter filterList) { var activeActivities = GetActiveActivities(filterList); if (activeActivities != null && activeActivities.ContainsKey(EventSource.InternalCurrentThreadActivityId)) return true; return false; } /// <summary> /// For the EventListener/EtwSession associated with 'filterList', add 'childActivityid' /// to list of active activities IF 'currentActivityId' is also active. Passing in a null /// value for 'currentActivityid' is an indication tha caller has already verified /// that the current activity is active. /// </summary> [SecurityCritical] unsafe public static void FlowActivityIfNeeded(ActivityFilter filterList, Guid* currentActivityId, Guid* childActivityID) { Contract.Assert(childActivityID != null); var activeActivities = GetActiveActivities(filterList); Contract.Assert(activeActivities != null); // take currentActivityId == null to mean we *know* the current activity is "active" if (currentActivityId != null && !activeActivities.ContainsKey(*currentActivityId)) return; if (activeActivities.Count > MaxActivityTrackCount) { TrimActiveActivityStore(activeActivities); // make sure current activity is still in the set: activeActivities[EventSource.InternalCurrentThreadActivityId] = Environment.TickCount; } // add child activity to list of actives activeActivities[*childActivityID] = Environment.TickCount; } /// <summary> /// </summary> public static void UpdateKwdTriggers(ActivityFilter activityFilter, Guid sourceGuid, EventSource source, EventKeywords sessKeywords) { for (var af = activityFilter; af != null; af = af.m_next) { if ((sourceGuid == af.m_providerGuid) && (source.m_eventData[af.m_eventId].TriggersActivityTracking > 0 || ((EventOpcode)source.m_eventData[af.m_eventId].Descriptor.Opcode == EventOpcode.Send))) { // we could be more precise here, if we tracked 'anykeywords' per session unchecked { source.m_keywordTriggers |= (source.m_eventData[af.m_eventId].Descriptor.Keywords & (long)sessKeywords); } } } } /// <summary> /// For the EventSource specified by 'sourceGuid' and the EventListener/EtwSession /// associated with 'this' ActivityFilter list, return configured sequence of /// [eventId, sampleFreq] pairs that defines the sampling policy. /// </summary> public IEnumerable<Tuple<int, int>> GetFilterAsTuple(Guid sourceGuid) { for (ActivityFilter af = this; af != null; af = af.m_next) { if (af.m_providerGuid == sourceGuid) yield return Tuple.Create(af.m_eventId, af.m_samplingFreq); } } /// <summary> /// The cleanup being performed consists of removing the m_myActivityDelegate from /// the static s_activityDying, therefore allowing the ActivityFilter to be reclaimed. /// </summary> public void Dispose() { #if !ES_BUILD_STANDALONE Contract.Assert(Monitor.IsEntered(EventListener.EventListenersLock)); #endif // m_myActivityDelegate is still alive (held by the static EventSource.s_activityDying). // Therefore we are ok to take a dependency on m_myActivityDelegate being valid even // during the finalization of the ActivityFilter if (m_myActivityDelegate != null) { EventSource.s_activityDying = (Action<Guid>)Delegate.Remove(EventSource.s_activityDying, m_myActivityDelegate); m_myActivityDelegate = null; } } #region private /// <summary> /// Creates a new ActivityFilter that is triggered by 'eventId' from 'source' ever /// 'samplingFreq' times the event fires. You can have several of these forming a /// linked list. /// </summary> private ActivityFilter(EventSource source, int perEventSourceSessionId, int eventId, int samplingFreq, ActivityFilter existingFilter = null) { m_providerGuid = source.Guid; m_perEventSourceSessionId = perEventSourceSessionId; m_eventId = eventId; m_samplingFreq = samplingFreq; m_next = existingFilter; Contract.Assert(existingFilter == null || (existingFilter.m_activeActivities == null) == (existingFilter.m_rootActiveActivities == null)); // if this is the first filter we add for this session, we need to create a new // table of activities. m_activeActivities is common across EventSources in the same // session ConcurrentDictionary<Guid, int> activeActivities = null; if (existingFilter == null || (activeActivities = GetActiveActivities(existingFilter)) == null) { m_activeActivities = new ConcurrentDictionary<Guid, int>(); m_rootActiveActivities = new ConcurrentDictionary<Guid, Tuple<Guid, int>>(); // Add a delegate to the 'SetCurrentThreadToActivity callback so that I remove 'dead' activities m_myActivityDelegate = GetActivityDyingDelegate(this); EventSource.s_activityDying = (Action<Guid>)Delegate.Combine(EventSource.s_activityDying, m_myActivityDelegate); } else { m_activeActivities = activeActivities; m_rootActiveActivities = existingFilter.m_rootActiveActivities; } } /// <summary> /// Ensure there's at least one ActivityFilter in the 'filterList' that contains an /// activity-removing delegate for the listener/session associated with 'filterList'. /// </summary> private static void EnsureActivityCleanupDelegate(ActivityFilter filterList) { if (filterList == null) return; for (ActivityFilter af = filterList; af != null; af = af.m_next) { if (af.m_myActivityDelegate != null) return; } // we didn't find a delegate filterList.m_myActivityDelegate = GetActivityDyingDelegate(filterList); EventSource.s_activityDying = (Action<Guid>)Delegate.Combine(EventSource.s_activityDying, filterList.m_myActivityDelegate); } /// <summary> /// Builds the delegate to be called when an activity is dying. This is responsible /// for performing whatever cleanup is needed for the ActivityFilter list passed in. /// This gets "added" to EventSource.s_activityDying and ends up being called from /// EventSource.SetCurrentThreadActivityId and ActivityFilter.PassesActivityFilter. /// </summary> /// <returns>The delegate to be called when an activity is dying</returns> private static Action<Guid> GetActivityDyingDelegate(ActivityFilter filterList) { return (Guid oldActivity) => { int dummy; filterList.m_activeActivities.TryRemove(oldActivity, out dummy); Tuple<Guid, int> dummyTuple; filterList.m_rootActiveActivities.TryRemove(oldActivity, out dummyTuple); }; } /// <summary> /// Enables activity filtering for the listener associated with 'filterList', triggering on /// the event 'eventID' from 'source' with a sampling frequency of 'samplingFreq' /// /// if 'eventID' is out of range (e.g. negative), it means we are not triggering (but we are /// activitySampling if something else triggered). /// </summary> /// <returns>true if activity sampling is enabled the samplingFreq is non-zero </returns> private static bool EnableFilter(ref ActivityFilter filterList, EventSource source, int perEventSourceSessionId, int eventId, int samplingFreq) { #if !ES_BUILD_STANDALONE Contract.Assert(Monitor.IsEntered(EventListener.EventListenersLock)); #endif Contract.Assert(samplingFreq > 0); Contract.Assert(eventId >= 0); filterList = new ActivityFilter(source, perEventSourceSessionId, eventId, samplingFreq, filterList); // Mark the 'quick Check' that indicates this is a trigger event. // If eventId is out of range then this mark is not done which has the effect of ignoring // the trigger. if (0 <= eventId && eventId < source.m_eventData.Length) ++source.m_eventData[eventId].TriggersActivityTracking; return true; } /// <summary> /// Normally this code never runs, it is here just to prevent run-away resource usage. /// </summary> private static void TrimActiveActivityStore(ConcurrentDictionary<Guid, int> activities) { if (activities.Count > MaxActivityTrackCount) { // Remove half of the oldest activity ids. var keyValues = activities.ToArray(); var tickNow = Environment.TickCount; // Sort by age, taking into account wrap-around. As long as x and y are within // 23 days of now then (0x7FFFFFFF & (tickNow - x.Value)) is the delta (even if // TickCount wraps). I then sort by DESCENDING age. (that is oldest value first) Array.Sort(keyValues, (x, y) => (0x7FFFFFFF & (tickNow - y.Value)) - (0x7FFFFFFF & (tickNow - x.Value))); for (int i = 0; i < keyValues.Length / 2; i++) { int dummy; activities.TryRemove(keyValues[i].Key, out dummy); } } } private static ConcurrentDictionary<Guid, int> GetActiveActivities( ActivityFilter filterList) { for (ActivityFilter af = filterList; af != null; af = af.m_next) { if (af.m_activeActivities != null) return af.m_activeActivities; } return null; } // m_activeActivities always points to the sample dictionary for EVERY ActivityFilter // in the m_next list. The 'int' value in the m_activities set is a timestamp // (Environment.TickCount) of when the entry was put in the system and is used to // remove 'old' entries that if the set gets too big. ConcurrentDictionary<Guid, int> m_activeActivities; // m_rootActiveActivities holds the "root" active activities, i.e. the activities // that were marked as active because a Start event fired on them. We need to keep // track of these to enable sampling in the scenario of an app's main thread that // never explicitly sets distinct activity IDs as it executes. To handle these // situations we manufacture a Guid from the thread's ID, and: // (a) we consider the firing of a start event when the sampling counter reaches // zero to mark the beginning of an interesting activity, and // (b) we consider the very next firing of the same start event to mark the // ending of that activity. // We use a ConcurrentDictionary to avoid taking explicit locks. // The key (a guid) represents the activity ID of the root active activity // The value is made up of the Guid of the event provider and the eventId of // the start event. ConcurrentDictionary<Guid, Tuple<Guid, int>> m_rootActiveActivities; Guid m_providerGuid; // We use the GUID rather than object identity because we don't want to keep the eventSource alive int m_eventId; // triggering event int m_samplingFreq; // Counter reset to this when it hits 0 int m_curSampleCount; // We count down to 0 and then activate the activity. int m_perEventSourceSessionId; // session ID bit for ETW, 0 for EventListeners const int MaxActivityTrackCount = 100000; // maximum number of tracked activities ActivityFilter m_next; // We create a linked list of these Action<Guid> m_myActivityDelegate; #endregion }; /// <summary> /// An EtwSession instance represents an activity-tracing-aware ETW session. Since these /// are limited to 8 concurrent sessions per machine (currently) we're going to store /// the active ones in a singly linked list. /// </summary> internal class EtwSession { public static EtwSession GetEtwSession(int etwSessionId, bool bCreateIfNeeded = false) { if (etwSessionId < 0) return null; EtwSession etwSession; foreach (var wrEtwSession in s_etwSessions) { #if ES_BUILD_STANDALONE if ((etwSession = (EtwSession) wrEtwSession.Target) != null && etwSession.m_etwSessionId == etwSessionId) return etwSession; #else if (wrEtwSession.TryGetTarget(out etwSession) && etwSession.m_etwSessionId == etwSessionId) return etwSession; #endif } if (!bCreateIfNeeded) return null; #if ES_BUILD_STANDALONE if (s_etwSessions == null) s_etwSessions = new List<WeakReference>(); etwSession = new EtwSession(etwSessionId); s_etwSessions.Add(new WeakReference(etwSession)); #else if (s_etwSessions == null) s_etwSessions = new List<WeakReference<EtwSession>>(); etwSession = new EtwSession(etwSessionId); s_etwSessions.Add(new WeakReference<EtwSession>(etwSession)); #endif if (s_etwSessions.Count > s_thrSessionCount) TrimGlobalList(); return etwSession; } public static void RemoveEtwSession(EtwSession etwSession) { Contract.Assert(etwSession != null); if (s_etwSessions == null || etwSession == null) return; s_etwSessions.RemoveAll((wrEtwSession) => { EtwSession session; #if ES_BUILD_STANDALONE return (session = (EtwSession) wrEtwSession.Target) != null && (session.m_etwSessionId == etwSession.m_etwSessionId); #else return wrEtwSession.TryGetTarget(out session) && (session.m_etwSessionId == etwSession.m_etwSessionId); #endif }); if (s_etwSessions.Count > s_thrSessionCount) TrimGlobalList(); } private static void TrimGlobalList() { if (s_etwSessions == null) return; s_etwSessions.RemoveAll((wrEtwSession) => { #if ES_BUILD_STANDALONE return wrEtwSession.Target == null; #else EtwSession session; return !wrEtwSession.TryGetTarget(out session); #endif }); } private EtwSession(int etwSessionId) { m_etwSessionId = etwSessionId; } public readonly int m_etwSessionId; // ETW session ID (as retrieved by EventProvider) public ActivityFilter m_activityFilter; // all filters enabled for this session #if ES_BUILD_STANDALONE private static List<WeakReference> s_etwSessions = new List<WeakReference>(); #else private static List<WeakReference<EtwSession>> s_etwSessions = new List<WeakReference<EtwSession>>(); #endif private const int s_thrSessionCount = 16; } #endif // FEATURE_ACTIVITYSAMPLING // holds a bitfield representing a session mask /// <summary> /// A SessionMask represents a set of (at most MAX) sessions as a bit mask. The perEventSourceSessionId /// is the index in the SessionMask of the bit that will be set. These can translate to /// EventSource's reserved keywords bits using the provided ToEventKeywords() and /// FromEventKeywords() methods. /// </summary> internal struct SessionMask { public SessionMask(SessionMask m) { m_mask = m.m_mask; } public SessionMask(uint mask = 0) { m_mask = mask & MASK; } public bool IsEqualOrSupersetOf(SessionMask m) { return (this.m_mask | m.m_mask) == this.m_mask; } public static SessionMask All { get { return new SessionMask(MASK); } } public static SessionMask FromId(int perEventSourceSessionId) { Contract.Assert(perEventSourceSessionId < MAX); return new SessionMask((uint)1 << perEventSourceSessionId); } public ulong ToEventKeywords() { return (ulong)m_mask << SHIFT_SESSION_TO_KEYWORD; } public static SessionMask FromEventKeywords(ulong m) { return new SessionMask((uint)(m >> SHIFT_SESSION_TO_KEYWORD)); } public bool this[int perEventSourceSessionId] { get { Contract.Assert(perEventSourceSessionId < MAX); return (m_mask & (1 << perEventSourceSessionId)) != 0; } set { Contract.Assert(perEventSourceSessionId < MAX); if (value) m_mask |= ((uint)1 << perEventSourceSessionId); else m_mask &= ~((uint)1 << perEventSourceSessionId); } } public static SessionMask operator |(SessionMask m1, SessionMask m2) { return new SessionMask(m1.m_mask | m2.m_mask); } public static SessionMask operator &(SessionMask m1, SessionMask m2) { return new SessionMask(m1.m_mask & m2.m_mask); } public static SessionMask operator ^(SessionMask m1, SessionMask m2) { return new SessionMask(m1.m_mask ^ m2.m_mask); } public static SessionMask operator ~(SessionMask m) { return new SessionMask(MASK & ~(m.m_mask)); } public static explicit operator ulong(SessionMask m) { return m.m_mask; } public static explicit operator uint(SessionMask m) { return m.m_mask; } private uint m_mask; internal const int SHIFT_SESSION_TO_KEYWORD = 44; // bits 44-47 inclusive are reserved internal const uint MASK = 0x0fU; // the mask of 4 reserved bits internal const uint MAX = 4; // maximum number of simultaneous ETW sessions supported } /// <summary> /// code:EventDispatchers are a simple 'helper' structure that holds the filtering state /// (m_EventEnabled) for a particular EventSource X EventListener tuple /// /// Thus a single EventListener may have many EventDispatchers (one for every EventSource /// that that EventListener has activate) and a Single EventSource may also have many /// event Dispatchers (one for every EventListener that has activated it). /// /// Logically a particular EventDispatcher belongs to exactly one EventSource and exactly /// one EventListener (alhtough EventDispatcher does not 'remember' the EventSource it is /// associated with. /// </summary> internal class EventDispatcher { internal EventDispatcher(EventDispatcher next, bool[] eventEnabled, EventListener listener) { m_Next = next; m_EventEnabled = eventEnabled; m_Listener = listener; } // Instance fields readonly internal EventListener m_Listener; // The dispatcher this entry is for internal bool[] m_EventEnabled; // For every event in a the eventSource, is it enabled? #if FEATURE_ACTIVITYSAMPLING internal bool m_activityFilteringEnabled; // does THIS EventSource have activity filtering turned on for this listener? #endif // FEATURE_ACTIVITYSAMPLING // Only guarenteed to exist after a InsureInit() internal EventDispatcher m_Next; // These form a linked list in code:EventSource.m_Dispatchers // Of all listeners for that eventSource. } /// <summary> /// Flags that can be used with EventSource.GenerateManifest to control how the ETW manifest for the EventSource is /// generated. /// </summary> [Flags] public enum EventManifestOptions { /// <summary> /// Only the resources associated with current UI culture are included in the manifest /// </summary> None = 0x0, /// <summary> /// Throw exceptions for any inconsistency encountered /// </summary> Strict = 0x1, /// <summary> /// Generate a "resources" node under "localization" for every satellite assembly provided /// </summary> AllCultures = 0x2, /// <summary> /// Generate the manifest only if the event source needs to be registered on the machine, /// otherwise return null (but still perform validation if Strict is specified) /// </summary> OnlyIfNeededForRegistration = 0x4, /// <summary> /// When generating the manifest do *not* enforce the rule that the current EventSource class /// must be the base class for the user-defined type passed in. This allows validation of .net /// event sources using the new validation code /// </summary> AllowEventSourceOverride = 0x8, } /// <summary> /// ManifestBuilder is designed to isolate the details of the message of the event from the /// rest of EventSource. This one happens to create XML. /// </summary> internal class ManifestBuilder { /// <summary> /// Build a manifest for 'providerName' with the given GUID, which will be packaged into 'dllName'. /// 'resources, is a resource manager. If specified all messages are localized using that manager. /// </summary> public ManifestBuilder(string providerName, Guid providerGuid, string dllName, ResourceManager resources, EventManifestOptions flags) { #if FEATURE_MANAGED_ETW_CHANNELS this.providerName = providerName; #endif this.flags = flags; this.resources = resources; sb = new StringBuilder(); events = new StringBuilder(); templates = new StringBuilder(); opcodeTab = new Dictionary<int, string>(); stringTab = new Dictionary<string, string>(); errors = new List<string>(); perEventByteArrayArgIndices = new Dictionary<string, List<int>>(); sb.AppendLine("<instrumentationManifest xmlns=\"http://schemas.microsoft.com/win/2004/08/events\">"); sb.AppendLine(" <instrumentation xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:win=\"http://manifests.microsoft.com/win/2004/08/windows/events\">"); sb.AppendLine(" <events xmlns=\"http://schemas.microsoft.com/win/2004/08/events\">"); sb.Append("<provider name=\"").Append(providerName). Append("\" guid=\"{").Append(providerGuid.ToString()).Append("}"); if (dllName != null) sb.Append("\" resourceFileName=\"").Append(dllName).Append("\" messageFileName=\"").Append(dllName); var symbolsName = providerName.Replace("-", "").Replace(".", "_"); // Period and - are illegal replace them. sb.Append("\" symbol=\"").Append(symbolsName); sb.Append("\">").AppendLine(); } public void AddOpcode(string name, int value) { if ((flags & EventManifestOptions.Strict) != 0) { if (value <= 10 || value >= 239) ManifestError(Environment.GetResourceString("EventSource_IllegalOpcodeValue", name, value)); string prevName; if (opcodeTab.TryGetValue(value, out prevName) && !name.Equals(prevName, StringComparison.Ordinal)) ManifestError(Environment.GetResourceString("EventSource_OpcodeCollision", name, prevName, value)); } opcodeTab[value] = name; } public void AddTask(string name, int value) { if ((flags & EventManifestOptions.Strict) != 0) { if (value <= 0 || value >= 65535) ManifestError(Environment.GetResourceString("EventSource_IllegalTaskValue", name, value)); string prevName; if (taskTab != null && taskTab.TryGetValue(value, out prevName) && !name.Equals(prevName, StringComparison.Ordinal)) ManifestError(Environment.GetResourceString("EventSource_TaskCollision", name, prevName, value)); } if (taskTab == null) taskTab = new Dictionary<int, string>(); taskTab[value] = name; } public void AddKeyword(string name, ulong value) { if ((value & (value - 1)) != 0) // Is it a power of 2? ManifestError(Environment.GetResourceString("EventSource_KeywordNeedPowerOfTwo", "0x" + value.ToString("x", CultureInfo.CurrentCulture), name), true); if ((flags & EventManifestOptions.Strict) != 0) { if (value >= 0x0000100000000000UL && !name.StartsWith("Session", StringComparison.Ordinal)) ManifestError(Environment.GetResourceString("EventSource_IllegalKeywordsValue", name, "0x" + value.ToString("x", CultureInfo.CurrentCulture))); string prevName; if (keywordTab != null && keywordTab.TryGetValue(value, out prevName) && !name.Equals(prevName, StringComparison.Ordinal)) ManifestError(Environment.GetResourceString("EventSource_KeywordCollision", name, prevName, "0x" + value.ToString("x", CultureInfo.CurrentCulture))); } if (keywordTab == null) keywordTab = new Dictionary<ulong, string>(); keywordTab[value] = name; } #if FEATURE_MANAGED_ETW_CHANNELS /// <summary> /// Add a channel. channelAttribute can be null /// </summary> public void AddChannel(string name, int value, EventChannelAttribute channelAttribute) { EventChannel chValue = (EventChannel)value; if (value < (int)EventChannel.Admin || value > 255) ManifestError(Environment.GetResourceString("EventSource_EventChannelOutOfRange", name, value)); else if (chValue >= EventChannel.Admin && chValue <= EventChannel.Debug && channelAttribute != null && EventChannelToChannelType(chValue) != channelAttribute.EventChannelType) { // we want to ensure developers do not define EventChannels that conflict with the builtin ones, // but we want to allow them to override the default ones... ManifestError(Environment.GetResourceString("EventSource_ChannelTypeDoesNotMatchEventChannelValue", name, ((EventChannel)value).ToString())); } // TODO: validate there are no conflicting manifest exposed names (generally following the format "provider/type") ulong kwd = GetChannelKeyword(chValue); if (channelTab == null) channelTab = new Dictionary<int, ChannelInfo>(4); channelTab[value] = new ChannelInfo { Name = name, Keywords = kwd, Attribs = channelAttribute }; } private EventChannelType EventChannelToChannelType(EventChannel channel) { #if !ES_BUILD_STANDALONE Contract.Assert(channel >= EventChannel.Admin && channel <= EventChannel.Debug); #endif return (EventChannelType)((int)channel - (int)EventChannel.Admin + (int)EventChannelType.Admin); } private EventChannelAttribute GetDefaultChannelAttribute(EventChannel channel) { EventChannelAttribute attrib = new EventChannelAttribute(); attrib.EventChannelType = EventChannelToChannelType(channel); if (attrib.EventChannelType <= EventChannelType.Operational) attrib.Enabled = true; return attrib; } public ulong[] GetChannelData() { if (this.channelTab == null) { return new ulong[0]; } // We create an array indexed by the channel id for fast look up. // E.g. channelMask[Admin] will give you the bit mask for Admin channel. int maxkey = -1; foreach (var item in this.channelTab.Keys) { if (item > maxkey) { maxkey = item; } } ulong[] channelMask = new ulong[maxkey + 1]; foreach (var item in this.channelTab) { channelMask[item.Key] = item.Value.Keywords; } return channelMask; } #endif public void StartEvent(string eventName, EventAttribute eventAttribute) { Contract.Assert(numParams == 0); Contract.Assert(this.eventName == null); this.eventName = eventName; numParams = 0; byteArrArgIndices = null; events.Append(" <event"). Append(" value=\"").Append(eventAttribute.EventId).Append("\""). Append(" version=\"").Append(eventAttribute.Version).Append("\""). Append(" level=\"").Append(GetLevelName(eventAttribute.Level)).Append("\""). Append(" symbol=\"").Append(eventName).Append("\""); // at this point we add to the manifest's stringTab a message that is as-of-yet // "untranslated to manifest convention", b/c we don't have the number or position // of any byte[] args (which require string format index updates) WriteMessageAttrib(events, "event", eventName, eventAttribute.Message); if (eventAttribute.Keywords != 0) events.Append(" keywords=\"").Append(GetKeywords((ulong)eventAttribute.Keywords, eventName)).Append("\""); if (eventAttribute.Opcode != 0) events.Append(" opcode=\"").Append(GetOpcodeName(eventAttribute.Opcode, eventName)).Append("\""); if (eventAttribute.Task != 0) events.Append(" task=\"").Append(GetTaskName(eventAttribute.Task, eventName)).Append("\""); #if FEATURE_MANAGED_ETW_CHANNELS if (eventAttribute.Channel != 0) { events.Append(" channel=\"").Append(GetChannelName(eventAttribute.Channel, eventName, eventAttribute.Message)).Append("\""); } #endif } public void AddEventParameter(Type type, string name) { if (numParams == 0) templates.Append(" <template tid=\"").Append(eventName).Append("Args\">").AppendLine(); if (type == typeof(byte[])) { // mark this index as "extraneous" (it has no parallel in the managed signature) // we use these values in TranslateToManifestConvention() if (byteArrArgIndices == null) byteArrArgIndices = new List<int>(4); byteArrArgIndices.Add(numParams); // add an extra field to the template representing the length of the binary blob numParams++; templates.Append(" <data name=\"").Append(name).Append("Size\" inType=\"win:UInt32\"/>").AppendLine(); } numParams++; templates.Append(" <data name=\"").Append(name).Append("\" inType=\"").Append(GetTypeName(type)).Append("\""); // TODO: for 'byte*' types it assumes the user provided length is named using the same naming convention // as for 'byte[]' args (blob_arg_name + "Size") if ((type.IsArray || type.IsPointer) && type.GetElementType() == typeof(byte)) { // add "length" attribute to the "blob" field in the template (referencing the field added above) templates.Append(" length=\"").Append(name).Append("Size\""); } // ETW does not support 64-bit value maps, so we don't specify these as ETW maps if (type.IsEnum() && Enum.GetUnderlyingType(type) != typeof(UInt64) && Enum.GetUnderlyingType(type) != typeof(Int64)) { templates.Append(" map=\"").Append(type.Name).Append("\""); if (mapsTab == null) mapsTab = new Dictionary<string, Type>(); if (!mapsTab.ContainsKey(type.Name)) mapsTab.Add(type.Name, type); // Remember that we need to dump the type enumeration } templates.Append("/>").AppendLine(); } public void EndEvent() { if (numParams > 0) { templates.Append(" </template>").AppendLine(); events.Append(" template=\"").Append(eventName).Append("Args\""); } events.Append("/>").AppendLine(); if (byteArrArgIndices != null) perEventByteArrayArgIndices[eventName] = byteArrArgIndices; // at this point we have all the information we need to translate the C# Message // to the manifest string we'll put in the stringTab string msg; if (stringTab.TryGetValue("event_" + eventName, out msg)) { msg = TranslateToManifestConvention(msg, eventName); stringTab["event_" + eventName] = msg; } eventName = null; numParams = 0; byteArrArgIndices = null; } #if FEATURE_MANAGED_ETW_CHANNELS // Channel keywords are generated one per channel to allow channel based filtering in event viewer. These keywords are autogenerated // by mc.exe for compiling a manifest and are based on the order of the channels (fields) in the Channels inner class (when advanced // channel support is enabled), or based on the order the predefined channels appear in the EventAttribute properties (for simple // support). The manifest generated *MUST* have the channels specified in the same order (that's how our computed keywords are mapped // to channels by the OS infrastructure). public ulong GetChannelKeyword(EventChannel channel) { if (channelTab == null) { channelTab = new Dictionary<int, ChannelInfo>(4); } if (channelTab.Count == MaxCountChannels) ManifestError(Environment.GetResourceString("EventSource_MaxChannelExceeded")); ulong channelKeyword; ChannelInfo info; if (!channelTab.TryGetValue((int)channel, out info)) { channelKeyword = nextChannelKeywordBit; nextChannelKeywordBit >>= 1; } else { channelKeyword = info.Keywords; } return channelKeyword; } #endif public byte[] CreateManifest() { string str = CreateManifestString(); return Encoding.UTF8.GetBytes(str); } public IList<string> Errors { get { return errors; } } /// <summary> /// When validating an event source it adds the error to the error collection. /// When not validating it throws an exception if runtimeCritical is "true". /// Otherwise the error is ignored. /// </summary> /// <param name="msg"></param> /// <param name="runtimeCritical"></param> public void ManifestError(string msg, bool runtimeCritical = false) { if ((flags & EventManifestOptions.Strict) != 0) errors.Add(msg); else if (runtimeCritical) throw new ArgumentException(msg); } private string CreateManifestString() { #if FEATURE_MANAGED_ETW_CHANNELS // Write out the channels if (channelTab != null) { sb.Append(" <channels>").AppendLine(); var sortedChannels = new List<KeyValuePair<int, ChannelInfo>>(); foreach (KeyValuePair<int, ChannelInfo> p in channelTab) { sortedChannels.Add(p); } sortedChannels.Sort((p1, p2) => -Comparer<ulong>.Default.Compare(p1.Value.Keywords, p2.Value.Keywords)); foreach (var kvpair in sortedChannels) { int channel = kvpair.Key; ChannelInfo channelInfo = kvpair.Value; string channelType = null; string elementName = "channel"; bool enabled = false; string fullName = null; #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS string isolation = null; string access = null; #endif if (channelInfo.Attribs != null) { var attribs = channelInfo.Attribs; if (Enum.IsDefined(typeof(EventChannelType), attribs.EventChannelType)) channelType = attribs.EventChannelType.ToString(); enabled = attribs.Enabled; #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS if (attribs.ImportChannel != null) { fullName = attribs.ImportChannel; elementName = "importChannel"; } if (Enum.IsDefined(typeof(EventChannelIsolation), attribs.Isolation)) isolation = attribs.Isolation.ToString(); access = attribs.Access; #endif } if (fullName == null) fullName = providerName + "/" + channelInfo.Name; sb.Append(" <").Append(elementName); sb.Append(" chid=\"").Append(channelInfo.Name).Append("\""); sb.Append(" name=\"").Append(fullName).Append("\""); if (elementName == "channel") // not applicable to importChannels. { WriteMessageAttrib(sb, "channel", channelInfo.Name, null); sb.Append(" value=\"").Append(channel).Append("\""); if (channelType != null) sb.Append(" type=\"").Append(channelType).Append("\""); sb.Append(" enabled=\"").Append(enabled.ToString().ToLower()).Append("\""); #if FEATURE_ADVANCED_MANAGED_ETW_CHANNELS if (access != null) sb.Append(" access=\"").Append(access).Append("\""); if (isolation != null) sb.Append(" isolation=\"").Append(isolation).Append("\""); #endif } sb.Append("/>").AppendLine(); } sb.Append(" </channels>").AppendLine(); } #endif // Write out the tasks if (taskTab != null) { sb.Append(" <tasks>").AppendLine(); var sortedTasks = new List<int>(taskTab.Keys); sortedTasks.Sort(); foreach (int task in sortedTasks) { sb.Append(" <task"); WriteNameAndMessageAttribs(sb, "task", taskTab[task]); sb.Append(" value=\"").Append(task).Append("\"/>").AppendLine(); } sb.Append(" </tasks>").AppendLine(); } // Write out the maps if (mapsTab != null) { sb.Append(" <maps>").AppendLine(); foreach (Type enumType in mapsTab.Values) { bool isbitmap = EventSource.GetCustomAttributeHelper(enumType, typeof(FlagsAttribute), flags) != null; string mapKind = isbitmap ? "bitMap" : "valueMap"; sb.Append(" <").Append(mapKind).Append(" name=\"").Append(enumType.Name).Append("\">").AppendLine(); // write out each enum value FieldInfo[] staticFields = enumType.GetFields(BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.Static); foreach (FieldInfo staticField in staticFields) { object constantValObj = staticField.GetRawConstantValue(); if (constantValObj != null) { long hexValue; if (constantValObj is int) hexValue = ((int)constantValObj); else if (constantValObj is long) hexValue = ((long)constantValObj); else continue; // ETW requires all bitmap values to be powers of 2. Skip the ones that are not. // TODO: Warn people about the dropping of values. if (isbitmap && ((hexValue & (hexValue - 1)) != 0 || hexValue == 0)) continue; sb.Append(" <map value=\"0x").Append(hexValue.ToString("x", CultureInfo.InvariantCulture)).Append("\""); WriteMessageAttrib(sb, "map", enumType.Name + "." + staticField.Name, staticField.Name); sb.Append("/>").AppendLine(); } } sb.Append(" </").Append(mapKind).Append(">").AppendLine(); } sb.Append(" </maps>").AppendLine(); } // Write out the opcodes sb.Append(" <opcodes>").AppendLine(); var sortedOpcodes = new List<int>(opcodeTab.Keys); sortedOpcodes.Sort(); foreach (int opcode in sortedOpcodes) { sb.Append(" <opcode"); WriteNameAndMessageAttribs(sb, "opcode", opcodeTab[opcode]); sb.Append(" value=\"").Append(opcode).Append("\"/>").AppendLine(); } sb.Append(" </opcodes>").AppendLine(); // Write out the keywords if (keywordTab != null) { sb.Append(" <keywords>").AppendLine(); var sortedKeywords = new List<ulong>(keywordTab.Keys); sortedKeywords.Sort(); foreach (ulong keyword in sortedKeywords) { sb.Append(" <keyword"); WriteNameAndMessageAttribs(sb, "keyword", keywordTab[keyword]); sb.Append(" mask=\"0x").Append(keyword.ToString("x", CultureInfo.InvariantCulture)).Append("\"/>").AppendLine(); } sb.Append(" </keywords>").AppendLine(); } sb.Append(" <events>").AppendLine(); sb.Append(events); sb.Append(" </events>").AppendLine(); sb.Append(" <templates>").AppendLine(); if (templates.Length > 0) { sb.Append(templates); } else { // Work around a cornercase ETW issue where a manifest with no templates causes // ETW events to not get sent to their associated channel. sb.Append(" <template tid=\"_empty\"></template>").AppendLine(); } sb.Append(" </templates>").AppendLine(); sb.Append("</provider>").AppendLine(); sb.Append("</events>").AppendLine(); sb.Append("</instrumentation>").AppendLine(); // Output the localization information. sb.Append("<localization>").AppendLine(); List<CultureInfo> cultures = null; if (resources != null && (flags & EventManifestOptions.AllCultures) != 0) { cultures = GetSupportedCultures(resources); } else { cultures = new List<CultureInfo>(); cultures.Add(CultureInfo.CurrentUICulture); } #if ES_BUILD_STANDALONE var sortedStrings = new List<string>(stringTab.Keys); sortedStrings.Sort(); #else // DD 947936 var sortedStrings = new string[stringTab.Keys.Count]; stringTab.Keys.CopyTo(sortedStrings, 0); // Avoid using public Array.Sort as that attempts to access BinaryCompatibility. Unfortunately FrameworkEventSource gets called // very early in the app domain creation, when _FusionStore is not set up yet, resulting in a failure to run the static constructory // for BinaryCompatibility. This failure is then cached and a TypeInitializationException is thrown every time some code attampts to // access BinaryCompatibility. ArraySortHelper<string>.IntrospectiveSort(sortedStrings, 0, sortedStrings.Length, Comparer<string>.Default); #endif foreach (var ci in cultures) { sb.Append(" <resources culture=\"").Append(ci.Name).Append("\">").AppendLine(); sb.Append(" <stringTable>").AppendLine(); foreach (var stringKey in sortedStrings) { string val = GetLocalizedMessage(stringKey, ci, etwFormat: true); sb.Append(" <string id=\"").Append(stringKey).Append("\" value=\"").Append(val).Append("\"/>").AppendLine(); } sb.Append(" </stringTable>").AppendLine(); sb.Append(" </resources>").AppendLine(); } sb.Append("</localization>").AppendLine(); sb.AppendLine("</instrumentationManifest>"); return sb.ToString(); } #region private private void WriteNameAndMessageAttribs(StringBuilder stringBuilder, string elementName, string name) { stringBuilder.Append(" name=\"").Append(name).Append("\""); WriteMessageAttrib(sb, elementName, name, name); } private void WriteMessageAttrib(StringBuilder stringBuilder, string elementName, string name, string value) { string key = elementName + "_" + name; // See if the user wants things localized. if (resources != null) { // resource fallback: strings in the neutral culture will take precedence over inline strings string localizedString = resources.GetString(key, CultureInfo.InvariantCulture); if (localizedString != null) value = localizedString; } if (value == null) return; stringBuilder.Append(" message=\"$(string.").Append(key).Append(")\""); string prevValue; if (stringTab.TryGetValue(key, out prevValue) && !prevValue.Equals(value)) { ManifestError(Environment.GetResourceString("EventSource_DuplicateStringKey", key), true); return; } stringTab[key] = value; } internal string GetLocalizedMessage(string key, CultureInfo ci, bool etwFormat) { string value = null; if (resources != null) { string localizedString = resources.GetString(key, ci); if (localizedString != null) { value = localizedString; if (etwFormat && key.StartsWith("event_")) { var evtName = key.Substring("event_".Length); value = TranslateToManifestConvention(value, evtName); } } } if (etwFormat && value == null) stringTab.TryGetValue(key, out value); return value; } /// <summary> /// There's no API to enumerate all languages an assembly is localized into, so instead /// we enumerate through all the "known" cultures and attempt to load a corresponding satellite /// assembly /// </summary> /// <param name="resources"></param> /// <returns></returns> private static List<CultureInfo> GetSupportedCultures(ResourceManager resources) { var cultures = new List<CultureInfo>(); #if !ES_BUILD_PCL && !FEATURE_CORECLR foreach (CultureInfo ci in CultureInfo.GetCultures(CultureTypes.SpecificCultures /*| CultureTypes.NeutralCultures*/)) { if (resources.GetResourceSet(ci, true, false) != null) cultures.Add(ci); } #endif // !ES_BUILD_PCL && !FEATURE_CORECLR if (!cultures.Contains(CultureInfo.CurrentUICulture)) cultures.Insert(0, CultureInfo.CurrentUICulture); return cultures; } private static string GetLevelName(EventLevel level) { return (((int)level >= 16) ? "" : "win:") + level.ToString(); } #if FEATURE_MANAGED_ETW_CHANNELS private string GetChannelName(EventChannel channel, string eventName, string eventMessage) { ChannelInfo info = null; if (channelTab == null || !channelTab.TryGetValue((int)channel, out info)) { if (channel < EventChannel.Admin) // || channel > EventChannel.Debug) ManifestError(Environment.GetResourceString("EventSource_UndefinedChannel", channel, eventName)); // allow channels to be auto-defined. The well known ones get their well known names, and the // rest get names Channel<N>. This allows users to modify the Manifest if they want more advanced features. if (channelTab == null) channelTab = new Dictionary<int, ChannelInfo>(4); string channelName = channel.ToString(); // For well know channels this is a nice name, otherwise a number if (EventChannel.Debug < channel) channelName = "Channel" + channelName; // Add a 'Channel' prefix for numbers. AddChannel(channelName, (int)channel, GetDefaultChannelAttribute(channel)); if (!channelTab.TryGetValue((int)channel, out info)) ManifestError(Environment.GetResourceString("EventSource_UndefinedChannel", channel, eventName)); } // events that specify admin channels *must* have non-null "Message" attributes if (resources != null && eventMessage == null) eventMessage = resources.GetString("event_" + eventName, CultureInfo.InvariantCulture); if (info.Attribs.EventChannelType == EventChannelType.Admin && eventMessage == null) ManifestError(Environment.GetResourceString("EventSource_EventWithAdminChannelMustHaveMessage", eventName, info.Name)); return info.Name; } #endif private string GetTaskName(EventTask task, string eventName) { if (task == EventTask.None) return ""; string ret; if (taskTab == null) taskTab = new Dictionary<int, string>(); if (!taskTab.TryGetValue((int)task, out ret)) ret = taskTab[(int)task] = eventName; return ret; } private string GetOpcodeName(EventOpcode opcode, string eventName) { switch (opcode) { case EventOpcode.Info: return "win:Info"; case EventOpcode.Start: return "win:Start"; case EventOpcode.Stop: return "win:Stop"; case EventOpcode.DataCollectionStart: return "win:DC_Start"; case EventOpcode.DataCollectionStop: return "win:DC_Stop"; case EventOpcode.Extension: return "win:Extension"; case EventOpcode.Reply: return "win:Reply"; case EventOpcode.Resume: return "win:Resume"; case EventOpcode.Suspend: return "win:Suspend"; case EventOpcode.Send: return "win:Send"; case EventOpcode.Receive: return "win:Receive"; } string ret; if (opcodeTab == null || !opcodeTab.TryGetValue((int)opcode, out ret)) { ManifestError(Environment.GetResourceString("EventSource_UndefinedOpcode", opcode, eventName), true); ret = null; } return ret; } private string GetKeywords(ulong keywords, string eventName) { string ret = ""; for (ulong bit = 1; bit != 0; bit <<= 1) { if ((keywords & bit) != 0) { string keyword = null; if ((keywordTab == null || !keywordTab.TryGetValue(bit, out keyword)) && (bit >= (ulong)0x1000000000000)) { // do not report Windows reserved keywords in the manifest (this allows the code // to be resilient to potential renaming of these keywords) keyword = string.Empty; } if (keyword == null) { ManifestError(Environment.GetResourceString("EventSource_UndefinedKeyword", "0x" + bit.ToString("x", CultureInfo.CurrentCulture), eventName), true); keyword = string.Empty; } if (ret.Length != 0 && keyword.Length != 0) ret = ret + " "; ret = ret + keyword; } } return ret; } private string GetTypeName(Type type) { if (type.IsEnum()) { FieldInfo[] fields = type.GetFields(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance); var typeName = GetTypeName(fields[0].FieldType); return typeName.Replace("win:Int", "win:UInt"); // ETW requires enums to be unsigned. } switch (type.GetTypeCode()) { case TypeCode.Boolean: return "win:Boolean"; case TypeCode.Byte: return "win:UInt8"; case TypeCode.Char: case TypeCode.UInt16: return "win:UInt16"; case TypeCode.UInt32: return "win:UInt32"; case TypeCode.UInt64: return "win:UInt64"; case TypeCode.SByte: return "win:Int8"; case TypeCode.Int16: return "win:Int16"; case TypeCode.Int32: return "win:Int32"; case TypeCode.Int64: return "win:Int64"; case TypeCode.String: return "win:UnicodeString"; case TypeCode.Single: return "win:Float"; case TypeCode.Double: return "win:Double"; case TypeCode.DateTime: return "win:FILETIME"; default: if (type == typeof(Guid)) return "win:GUID"; else if (type == typeof(IntPtr)) return "win:Pointer"; else if ((type.IsArray || type.IsPointer) && type.GetElementType() == typeof(byte)) return "win:Binary"; ManifestError(Environment.GetResourceString("EventSource_UnsupportedEventTypeInManifest", type.Name), true); return string.Empty; } } private static void UpdateStringBuilder(ref StringBuilder stringBuilder, string eventMessage, int startIndex, int count) { if (stringBuilder == null) stringBuilder = new StringBuilder(); stringBuilder.Append(eventMessage, startIndex, count); } // Manifest messages use %N conventions for their message substitutions. Translate from // .NET conventions. We can't use RegEx for this (we are in mscorlib), so we do it 'by hand' private string TranslateToManifestConvention(string eventMessage, string evtName) { StringBuilder stringBuilder = null; // We lazily create this int writtenSoFar = 0; int chIdx = -1; for (int i = 0; ; ) { if (i >= eventMessage.Length) { if (stringBuilder == null) return eventMessage; UpdateStringBuilder(ref stringBuilder, eventMessage, writtenSoFar, i - writtenSoFar); return stringBuilder.ToString(); } if (eventMessage[i] == '%') { // handle format message escaping character '%' by escaping it UpdateStringBuilder(ref stringBuilder, eventMessage, writtenSoFar, i - writtenSoFar); stringBuilder.Append("%%"); i++; writtenSoFar = i; } else if (i < eventMessage.Length - 1 && (eventMessage[i] == '{' && eventMessage[i + 1] == '{' || eventMessage[i] == '}' && eventMessage[i + 1] == '}')) { // handle C# escaped '{" and '}' UpdateStringBuilder(ref stringBuilder, eventMessage, writtenSoFar, i - writtenSoFar); stringBuilder.Append(eventMessage[i]); i++; i++; writtenSoFar = i; } else if (eventMessage[i] == '{') { int leftBracket = i; i++; int argNum = 0; while (i < eventMessage.Length && Char.IsDigit(eventMessage[i])) { argNum = argNum * 10 + eventMessage[i] - '0'; i++; } if (i < eventMessage.Length && eventMessage[i] == '}') { i++; UpdateStringBuilder(ref stringBuilder, eventMessage, writtenSoFar, leftBracket - writtenSoFar); int manIndex = TranslateIndexToManifestConvention(argNum, evtName); stringBuilder.Append('%').Append(manIndex); // An '!' after the insert specifier {n} will be interpreted as a literal. // We'll escape it so that mc.exe does not attempt to consider it the // beginning of a format string. if (i < eventMessage.Length && eventMessage[i] == '!') { i++; stringBuilder.Append("%!"); } writtenSoFar = i; } else { ManifestError(Environment.GetResourceString("EventSource_UnsupportedMessageProperty", evtName, eventMessage)); } } else if ((chIdx = "&<>'\"\r\n\t".IndexOf(eventMessage[i])) >= 0) { string[] escapes = { "&amp;", "&lt;", "&gt;", "&apos;", "&quot;", "%r", "%n", "%t" }; var update = new Action<char, string>( (ch, escape) => { UpdateStringBuilder(ref stringBuilder, eventMessage, writtenSoFar, i - writtenSoFar); i++; stringBuilder.Append(escape); writtenSoFar = i; }); update(eventMessage[i], escapes[chIdx]); } else i++; } } private int TranslateIndexToManifestConvention(int idx, string evtName) { List<int> byteArrArgIndices; if (perEventByteArrayArgIndices.TryGetValue(evtName, out byteArrArgIndices)) { foreach (var byArrIdx in byteArrArgIndices) { if (idx >= byArrIdx) ++idx; else break; } } return idx + 1; } #if FEATURE_MANAGED_ETW_CHANNELS class ChannelInfo { public string Name; public ulong Keywords; public EventChannelAttribute Attribs; } #endif Dictionary<int, string> opcodeTab; Dictionary<int, string> taskTab; #if FEATURE_MANAGED_ETW_CHANNELS Dictionary<int, ChannelInfo> channelTab; #endif Dictionary<ulong, string> keywordTab; Dictionary<string, Type> mapsTab; Dictionary<string, string> stringTab; // Maps unlocalized strings to localized ones #if FEATURE_MANAGED_ETW_CHANNELS ulong nextChannelKeywordBit = 0x8000000000000000; // available Keyword bit to be used for next channel definition const int MaxCountChannels = 8; // a manifest can defined at most 8 ETW channels #endif StringBuilder sb; // Holds the provider information. StringBuilder events; // Holds the events. StringBuilder templates; #if FEATURE_MANAGED_ETW_CHANNELS string providerName; #endif ResourceManager resources; // Look up localized strings here. EventManifestOptions flags; IList<string> errors; // list of currently encountered errors Dictionary<string, List<int>> perEventByteArrayArgIndices; // "event_name" -> List_of_Indices_of_Byte[]_Arg // State we track between StartEvent and EndEvent. string eventName; // Name of the event currently being processed. int numParams; // keeps track of the number of args the event has. List<int> byteArrArgIndices; // keeps track of the index of each byte[] argument #endregion } /// <summary> /// Used to send the m_rawManifest into the event dispatcher as a series of events. /// </summary> internal struct ManifestEnvelope { public const int MaxChunkSize = 0xFF00; public enum ManifestFormats : byte { SimpleXmlFormat = 1, // simply dump the XML manifest as UTF8 } public ManifestFormats Format; public byte MajorVersion; public byte MinorVersion; public byte Magic; public ushort TotalChunks; public ushort ChunkNumber; }; #endregion }
{ "content_hash": "4e37316dc3f4b75761f52c419997cb77", "timestamp": "", "source": "github", "line_count": 6924, "max_line_length": 222, "avg_line_length": 47.61712882726748, "alnum_prop": 0.5499861996172289, "repo_name": "geertdoornbos/coreclr", "id": "2eb7c38e3c8b864001e3e02fff9833ebcc3e71df", "size": "329703", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/mscorlib/src/System/Diagnostics/Eventing/EventSource.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "940308" }, { "name": "Awk", "bytes": "5652" }, { "name": "Batchfile", "bytes": "34605" }, { "name": "C", "bytes": "6442193" }, { "name": "C#", "bytes": "118913923" }, { "name": "C++", "bytes": "67206063" }, { "name": "CMake", "bytes": "528754" }, { "name": "Groff", "bytes": "529523" }, { "name": "Groovy", "bytes": "19067" }, { "name": "HTML", "bytes": "16196" }, { "name": "Makefile", "bytes": "2314" }, { "name": "Objective-C", "bytes": "224503" }, { "name": "Perl", "bytes": "63850" }, { "name": "PowerShell", "bytes": "4332" }, { "name": "Python", "bytes": "8165" }, { "name": "Shell", "bytes": "59294" }, { "name": "Smalltalk", "bytes": "1359502" } ], "symlink_target": "" }
using System.Resources; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("ThirdDeviceXaml")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("ThirdDeviceXaml")] [assembly: AssemblyCopyright("Copyright © 2014")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] [assembly: NeutralResourcesLanguage("en")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "a91fa43431b93eabd59755a3d7b7e39c", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 84, "avg_line_length": 36.1, "alnum_prop": 0.7451523545706371, "repo_name": "marwac-9/HomeAutomation", "id": "54deeab4c29e90b9f35a3cae56fa15e67c06e15d", "size": "1086", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "HomeAutomation/ThirdDeviceXaml/Properties/AssemblyInfo.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "107266" } ], "symlink_target": "" }
require 'spec_helper' klass = OneviewSDK::API500::C7000::PowerDevice RSpec.describe klass, integration: true, type: UPDATE do let(:current_client) { $client_500 } let(:current_secrets) { $secrets } include_examples 'PowerDeviceUpdateExample', 'integration api500 context' end
{ "content_hash": "3dec0dfaa8ca33320c679b3cc3eb0035", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 75, "avg_line_length": 35.375, "alnum_prop": 0.7561837455830389, "repo_name": "HewlettPackard/oneview-sdk-ruby", "id": "145d9688d5329815bd70df17653c15b93e050d58", "size": "894", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "spec/integration/resource/api600/c7000/power_device/update_spec.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ruby", "bytes": "2891159" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.mybatis.dao.ViewServerMapper"> <resultMap id="BaseResultMap" type="com.mybatis.model.ViewServer"> <result column="id" jdbcType="INTEGER" property="id" /> <result column="name" jdbcType="VARCHAR" property="name" /> <result column="org_id" jdbcType="INTEGER" property="orgId" /> <result column="organization_name" jdbcType="VARCHAR" property="organizationName" /> <result column="business_criticity" jdbcType="CHAR" property="businessCriticity" /> <result column="move2production" jdbcType="DATE" property="move2production" /> <result column="serialnumber" jdbcType="VARCHAR" property="serialnumber" /> <result column="location_id" jdbcType="INTEGER" property="locationId" /> <result column="location_name" jdbcType="VARCHAR" property="locationName" /> <result column="status" jdbcType="CHAR" property="status" /> <result column="brand_id" jdbcType="INTEGER" property="brandId" /> <result column="brand_name" jdbcType="VARCHAR" property="brandName" /> <result column="model_id" jdbcType="INTEGER" property="modelId" /> <result column="model_name" jdbcType="VARCHAR" property="modelName" /> <result column="asset_number" jdbcType="VARCHAR" property="assetNumber" /> <result column="purchase_date" jdbcType="DATE" property="purchaseDate" /> <result column="end_of_warranty" jdbcType="DATE" property="endOfWarranty" /> <result column="rack_id" jdbcType="INTEGER" property="rackId" /> <result column="rack_name" jdbcType="VARCHAR" property="rackName" /> <result column="enclosure_id" jdbcType="INTEGER" property="enclosureId" /> <result column="enclosure_name" jdbcType="VARCHAR" property="enclosureName" /> <result column="nb_u" jdbcType="INTEGER" property="nbU" /> <result column="managementip" jdbcType="VARCHAR" property="managementip" /> <result column="powerA_id" jdbcType="INTEGER" property="poweraId" /> <result column="powerA_name" jdbcType="VARCHAR" property="poweraName" /> <result column="powerB_id" jdbcType="INTEGER" property="powerbId" /> <result column="powerB_name" jdbcType="VARCHAR" property="powerbName" /> <result column="redundancy" jdbcType="VARCHAR" property="redundancy" /> <result column="osfamily_id" jdbcType="INTEGER" property="osfamilyId" /> <result column="osfamily_name" jdbcType="VARCHAR" property="osfamilyName" /> <result column="osversion_id" jdbcType="INTEGER" property="osversionId" /> <result column="osversion_name" jdbcType="VARCHAR" property="osversionName" /> <result column="oslicence_id" jdbcType="INTEGER" property="oslicenceId" /> <result column="oslicence_name" jdbcType="VARCHAR" property="oslicenceName" /> <result column="cpu" jdbcType="VARCHAR" property="cpu" /> <result column="ram" jdbcType="VARCHAR" property="ram" /> <result column="finalclass" jdbcType="VARCHAR" property="finalclass" /> <result column="friendlyname" jdbcType="VARCHAR" property="friendlyname" /> <result column="org_id_friendlyname" jdbcType="VARCHAR" property="orgIdFriendlyname" /> <result column="location_id_friendlyname" jdbcType="VARCHAR" property="locationIdFriendlyname" /> <result column="brand_id_friendlyname" jdbcType="VARCHAR" property="brandIdFriendlyname" /> <result column="model_id_friendlyname" jdbcType="VARCHAR" property="modelIdFriendlyname" /> <result column="rack_id_friendlyname" jdbcType="VARCHAR" property="rackIdFriendlyname" /> <result column="enclosure_id_friendlyname" jdbcType="VARCHAR" property="enclosureIdFriendlyname" /> <result column="powerA_id_friendlyname" jdbcType="VARCHAR" property="poweraIdFriendlyname" /> <result column="powerA_id_finalclass_recall" jdbcType="VARCHAR" property="poweraIdFinalclassRecall" /> <result column="powerB_id_friendlyname" jdbcType="VARCHAR" property="powerbIdFriendlyname" /> <result column="powerB_id_finalclass_recall" jdbcType="VARCHAR" property="powerbIdFinalclassRecall" /> <result column="osfamily_id_friendlyname" jdbcType="VARCHAR" property="osfamilyIdFriendlyname" /> <result column="osversion_id_friendlyname" jdbcType="VARCHAR" property="osversionIdFriendlyname" /> <result column="oslicence_id_friendlyname" jdbcType="VARCHAR" property="oslicenceIdFriendlyname" /> </resultMap> <resultMap extends="BaseResultMap" id="ResultMapWithBLOBs" type="com.mybatis.model.ViewServer"> <result column="description" jdbcType="LONGVARCHAR" property="description" /> </resultMap> <sql id="Example_Where_Clause"> <where> <foreach collection="oredCriteria" item="criteria" separator="or"> <if test="criteria.valid"> <trim prefix="(" prefixOverrides="and" suffix=")"> <foreach collection="criteria.criteria" item="criterion"> <choose> <when test="criterion.noValue"> and ${criterion.condition} </when> <when test="criterion.singleValue"> and ${criterion.condition} #{criterion.value} </when> <when test="criterion.betweenValue"> and ${criterion.condition} #{criterion.value} and #{criterion.secondValue} </when> <when test="criterion.listValue"> and ${criterion.condition} <foreach close=")" collection="criterion.value" item="listItem" open="(" separator=","> #{listItem} </foreach> </when> </choose> </foreach> </trim> </if> </foreach> </where> </sql> <sql id="Base_Column_List"> id, name, org_id, organization_name, business_criticity, move2production, serialnumber, location_id, location_name, status, brand_id, brand_name, model_id, model_name, asset_number, purchase_date, end_of_warranty, rack_id, rack_name, enclosure_id, enclosure_name, nb_u, managementip, powerA_id, powerA_name, powerB_id, powerB_name, redundancy, osfamily_id, osfamily_name, osversion_id, osversion_name, oslicence_id, oslicence_name, cpu, ram, finalclass, friendlyname, org_id_friendlyname, location_id_friendlyname, brand_id_friendlyname, model_id_friendlyname, rack_id_friendlyname, enclosure_id_friendlyname, powerA_id_friendlyname, powerA_id_finalclass_recall, powerB_id_friendlyname, powerB_id_finalclass_recall, osfamily_id_friendlyname, osversion_id_friendlyname, oslicence_id_friendlyname </sql> <sql id="Blob_Column_List"> description </sql> <select id="selectByExampleWithBLOBs" parameterType="com.mybatis.model.ViewServerExample" resultMap="ResultMapWithBLOBs"> select <if test="distinct"> distinct </if> 'id' as QUERYID, <include refid="Base_Column_List" /> , <include refid="Blob_Column_List" /> from view_server <if test="_parameter != null"> <include refid="Example_Where_Clause" /> </if> <if test="orderByClause != null"> order by ${orderByClause} </if> </select> <select id="selectByExample" parameterType="com.mybatis.model.ViewServerExample" resultMap="BaseResultMap"> select <if test="distinct"> distinct </if> 'id' as QUERYID, <include refid="Base_Column_List" /> from view_server <if test="_parameter != null"> <include refid="Example_Where_Clause" /> </if> <if test="orderByClause != null"> order by ${orderByClause} </if> </select> <select id="countByExample" parameterType="com.mybatis.model.ViewServerExample" resultType="java.lang.Long"> select count(*) from view_server <if test="_parameter != null"> <include refid="Example_Where_Clause" /> </if> </select> </mapper>
{ "content_hash": "d0f60036c089b10c8bd3918cc2a56a81", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 123, "avg_line_length": 56.191489361702125, "alnum_prop": 0.6826959485043544, "repo_name": "chenyiming/learn", "id": "b8b69bc08b28903289f2018b04a069eba9560a79", "size": "7923", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "springdemo/springmvc/mybatis/src/main/resources/mapping/ViewServerMapper.xml", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "600676" }, { "name": "HTML", "bytes": "330" }, { "name": "Java", "bytes": "865690" }, { "name": "JavaScript", "bytes": "662816" }, { "name": "Shell", "bytes": "85" } ], "symlink_target": "" }
import os import json import sqlite3 from ..common import consts, error_message, file_read, file_write from ..server_classes import AdminTask, Group from .builder import on_created, init_task_attr from jam.db.db_modules import SQLITE, get_db_module import jam.langs as langs def create_items(task): info = file_read(os.path.join(task.app.jam_dir, 'admin', 'builder_structure.info')) info = json.loads(info) task.set_info(info) def read_secret_key(task): result = None con = task.connect() try: cursor = con.cursor() cursor.execute('SELECT F_SECRET_KEY FROM SYS_PARAMS') rec = cursor.fetchall() result = rec[0][0] except: pass finally: con.close() if result is None: result = '' return result def check_version(task): pass def init_admin(task): check_version(task) langs.update_langs(task) create_items(task) update_admin_fields(task) consts.read_settings() consts.MAINTENANCE = False consts.write_settings(['MAINTENANCE']) consts.read_language() on_created(task) def create_admin(app): if os.path.exists(os.path.join(app.work_dir, '_admin.sqlite')): os.rename(os.path.join(app.work_dir, '_admin.sqlite'), \ os.path.join(app.work_dir, 'admin.sqlite')) task = AdminTask(app, 'admin', 'Administrator', '', SQLITE, db_database=os.path.join(app.work_dir, 'admin.sqlite')) app.admin = task task.secret_key = read_secret_key(task) init_admin(task) return task def update_admin_fields(task): def do_updates(con, field, item_name): if item_name == 'sys_privileges' and field.field_name.lower() == 'owner_item': cursor = con.cursor() cursor.execute("SELECT ID FROM SYS_ITEMS WHERE TABLE_ID > 0 AND DELETED = 0") details = cursor.fetchall() cursor.execute("SELECT ID FROM SYS_ROLES WHERE DELETED = 0") roles = cursor.fetchall() for d in details: for r in roles: cursor.execute(""" INSERT INTO SYS_PRIVILEGES (DELETED, OWNER_ID, OWNER_REC_ID, ITEM_ID, F_CAN_VIEW, F_CAN_CREATE, F_CAN_EDIT, F_CAN_DELETE) values (?, ?, ?, ?, ?, ?, ?, ?)""", (0, 2, r[0], d[0], True, True, True, True)) con.commit() def get_item_fields(item, table_name): cursor.execute('PRAGMA table_info(%s)' % table_name) rows = cursor.fetchall() result = [str(row[1]).upper() for row in rows] return result def check_item_fields(item, table_name=None): if not table_name: table_name = item.table_name.upper() fields = get_item_fields(item, table_name) for field in item._fields: if not field.field_name.upper() in fields: sql = 'ALTER TABLE %s ADD COLUMN %s %s' % \ (table_name, field.field_name.upper(), \ task.db_module.FIELD_TYPES[field.data_type]) cursor.execute(sql) con.commit() do_updates(con, field, item.item_name) def check_table_exists(item, table_name=None): if not table_name: table_name = item.table_name.upper() sql = 'SELECT name FROM sqlite_master WHERE type="table" AND UPPER(name)="%s"' % table_name cursor.execute(sql) rows = cursor.fetchall() if not rows: sql = 'CREATE TABLE %s (ID INTEGER PRIMARY KEY)' % table_name cursor.execute(sql) return True con = task.connect() try: cursor = con.cursor() for group in task.items: for item in group.items: if item.table_name and not item.master: if check_table_exists(item): check_item_fields(item) finally: con.close() def get_privileges(task, role_id): result = {} privliges = task.sys_privileges.copy() privliges.set_where(owner_rec_id=role_id) privliges.open() for p in privliges: result[p.item_id.value] = \ { 'can_view': p.f_can_view.value, 'can_create': p.f_can_create.value, 'can_edit': p.f_can_edit.value, 'can_delete': p.f_can_delete.value } return result def get_roles(task): privileges = {} roles = [] r = task.sys_roles.copy() r.open() for r in r: privileges[r.id.value] = get_privileges(task, r.id.value) roles.append([r.id.value, r.f_name.value]) return roles, privileges def login_user(task, log, password, admin, ip=None, session_uuid=None): user_id = None user_info = {} if consts.SAFE_MODE: users = task.sys_users.copy() users.set_where(f_password=password) users.open() for u in users: if u.f_login.value.strip() == log.strip() and u.f_password.value == password: if not admin or u.f_admin.value == admin: user_id = u.id.value user_info = { 'user_id': u.id.value, 'role_id': u.f_role.value, 'role_name': u.f_role.display_text, 'user_name': u.f_name.value, 'admin': u.f_admin.value } if ip or session_uuid: task.execute("UPDATE SYS_USERS SET F_IP='%s', F_UUID='%s' WHERE ID=%s" % (ip, session_uuid, u.id.value)) break return user_info def user_valid_ip(task, user_id, ip): res = task.select("SELECT F_IP FROM SYS_USERS WHERE ID=%s" % user_id) if res and res[0][0] == ip: return True return False def user_valid_uuid(task, user_id, session_uuid): res = task.select("SELECT F_UUID FROM SYS_USERS WHERE ID=%s" % user_id) if res and res[0][0] == session_uuid: return True return False def indexes_get_table_names(indexes): ids = [] for i in indexes: ids.append(i.owner_rec_id.value) items = indexes.task.sys_items.copy(handlers=False) items.set_where(id__in=ids) items.open(fields=['id', 'f_table_name']) table_names = {} for i in items: table_names[i.id.value] = i.f_table_name.value return table_names def drop_indexes_sql(task): db_module = task.task_db_module db_type = task.task_db_type indexes = task.sys_indices.copy(handlers=False) indexes.open() table_names = indexes_get_table_names(indexes) sqls = [] for i in indexes: if not (i.f_foreign_index.value and db_module.DATABASE == 'SQLITE'): table_name = table_names.get(i.owner_rec_id.value) if table_name: sqls.append(i.delete_index_sql(db_type, table_name)) return sqls def restore_indexes_sql(task): db_module = task.task_db_module db_type = task.task_db_type indexes = task.sys_indices.copy(handlers=False) indexes.open() table_names = indexes_get_table_names(indexes) sqls = [] for i in indexes: if not (i.f_foreign_index.value and db_module.DATABASE == 'SQLITE'): table_name = table_names.get(i.owner_rec_id.value) if table_name: sqls.append(i.create_index_sql(db_type, table_name)) return sqls
{ "content_hash": "f193c1c52820973c9978c2a50b92a10c", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 128, "avg_line_length": 34.71495327102804, "alnum_prop": 0.5703324808184144, "repo_name": "jam-py/jam-py", "id": "7ee784880d8d1bc57eca7d95d35dbcd0c7d17e46", "size": "7429", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "jam/admin/admin.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "37683" }, { "name": "HTML", "bytes": "67579" }, { "name": "JavaScript", "bytes": "2789171" }, { "name": "Python", "bytes": "432048" } ], "symlink_target": "" }
""" Description =========== Index calculation operations Functions ========= """ import xarray as xr import pandas as pd from cate.core.op import op, op_input from cate.ops.select import select_var from cate.ops.subset import subset_spatial from cate.ops.anomaly import anomaly_external from cate.core.types import PolygonLike, VarName, ValidationError from cate.util.monitor import Monitor _ALL_FILE_FILTER = dict(name='All Files', extensions=['*']) @op(tags=['index']) @op_input('file', file_open_mode='r', file_filters=[dict(name='NetCDF', extensions=['nc']), _ALL_FILE_FILTER]) @op_input('var', value_set_source='ds', data_type=VarName) def enso_nino34(ds: xr.Dataset, var: VarName.TYPE, file: str, threshold: float = None, monitor: Monitor = Monitor.NONE) -> pd.DataFrame: """ Calculate nino34 index, which is defined as a five month running mean of anomalies of monthly means of SST data in Nino3.4 region:: lon_min=-170 lat_min=-5 lon_max=-120 lat_max=5. :param ds: A monthly SST dataset :param file: Path to the reference data file e.g. a climatology. A suitable reference dataset can be generated using the long_term_average operation :param var: Dataset variable (geophysial quantity) to use for index calculation. :param threshold: If given, boolean El Nino/La Nina timeseries will be calculated and added to the output dataset according to the given threshold. Where anomaly larger than the positive value of the threshold indicates El Nino and anomaly smaller than the negative of the given threshold indicates La Nina. :param monitor: a progress monitor. :return: A dataset that contains the index timeseries. """ n34 = '-170, -5, -120, 5' name = 'ENSO N3.4 Index' return _generic_index_calculation(ds, var, n34, 5, file, name, threshold, monitor) @op(tags=['index']) @op_input('var', value_set_source='ds', data_type=VarName) @op_input('file', file_open_mode='r', file_filters=[dict(name='NetCDF', extensions=['nc']), _ALL_FILE_FILTER]) @op_input('region', value_set=['N1+2', 'N3', 'N34', 'N4', 'custom']) @op_input('custom_region', data_type=PolygonLike) def enso(ds: xr.Dataset, var: VarName.TYPE, file: str, region: str = 'n34', custom_region: PolygonLike.TYPE = None, threshold: float = None, monitor: Monitor = Monitor.NONE) -> pd.DataFrame: """ Calculate ENSO index, which is defined as a five month running mean of anomalies of monthly means of SST data in the given region. :param ds: A monthly SST dataset :param file: Path to the reference data file e.g. a climatology. A suitable reference dataset can be generated using the long_term_average operation :param var: Dataset variable to use for index calculation :param region: Region for index calculation, the default is Nino3.4 :param custom_region: If 'custom' is chosen as the 'region', this parameter has to be provided to set the desired region. :param threshold: If given, boolean El Nino/La Nina timeseries will be calculated and added to the output dataset, according to the given threshold. Where anomaly larger than then positive value of the threshold indicates El Nino and anomaly smaller than the negative of the given threshold indicates La Nina. :param monitor: a progress monitor. :return: A dataset that contains the index timeseries. """ regions = {'N1+2': '-90, -10, -80, 0', 'N3': '-150, -5, -90, 5', 'N3.4': '-170, -5, -120, 5', 'N4': '160, -5, -150, 5', 'custom': custom_region} converted_region = PolygonLike.convert(regions[region]) if not converted_region: raise ValidationError('No region has been provided to ENSO index calculation') name = 'ENSO ' + region + ' Index' if 'custom' == region: name = 'ENSO Index over ' + PolygonLike.format(converted_region) return _generic_index_calculation(ds, var, converted_region, 5, file, name, threshold, monitor) @op(tags=['index']) @op_input('var', value_set_source='ds', data_type=VarName) @op_input('file', file_open_mode='r', file_filters=[dict(name='NetCDF', extensions=['nc']), _ALL_FILE_FILTER]) def oni(ds: xr.Dataset, var: VarName.TYPE, file: str, threshold: float = None, monitor: Monitor = Monitor.NONE) -> pd.DataFrame: """ Calculate ONI index, which is defined as a three month running mean of anomalies of monthly means of SST data in the Nino3.4 region. :param ds: A monthly SST dataset :param file: Path to the reference data file e.g. a climatology. A suitable reference dataset can be generated using the long_term_average operation :param var: Dataset variable to use for index calculation :param threshold: If given, boolean El Nino/La Nina timeseries will be calculated and added to the output dataset, according to the given threshold. Where anomaly larger than then positive value of the threshold indicates El Nino and anomaly smaller than the negative of the given threshold indicates La Nina. :param monitor: a progress monitor. :return: A dataset that containts the index timeseries """ n34 = '-170, -5, -120, 5' name = 'ONI Index' return _generic_index_calculation(ds, var, n34, 3, file, name, threshold, monitor) def _generic_index_calculation(ds: xr.Dataset, var: VarName.TYPE, region: PolygonLike.TYPE, window: int, file: str, name: str, threshold: float = None, monitor: Monitor = Monitor.NONE) -> pd.DataFrame: """ A generic index calculation. Where an index is defined as an anomaly against the given reference of a moving average of the given window size of the given given region of the given variable of the given dataset. :param ds: Dataset from which to calculate the index :param var: Variable from which to calculate index :param region: Spatial subset from which to calculate the index :param window: Window size for the moving average :param file: Path to the reference file :param threshold: Absolute threshold that indicates an ENSO event :param name: Name of the index :param monitor: a progress monitor. :return: A dataset that contains the index timeseries """ var = VarName.convert(var) region = PolygonLike.convert(region) with monitor.starting("Calculate the index", total_work=2): ds = select_var(ds, var) ds_subset = subset_spatial(ds, region) anom = anomaly_external(ds_subset, file, monitor=monitor.child(1)) with monitor.child(1).observing("Calculate mean"): ts = anom.mean(dim=['lat', 'lon']) df = pd.DataFrame(data=ts[var].values, columns=[name], index=ts.time.values) retval = df.rolling(window=window, center=True).mean().dropna() if threshold is None: return retval retval['El Nino'] = pd.Series((retval[name] > threshold), index=retval.index) retval['La Nina'] = pd.Series((retval[name] < -threshold), index=retval.index) return retval
{ "content_hash": "a85974f265e4c5edf6537043fe03086b", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 110, "avg_line_length": 43.202312138728324, "alnum_prop": 0.6549371153331549, "repo_name": "CCI-Tools/cate-core", "id": "9ae190242b85d436f1e471979b8b1048445ecdfe", "size": "8641", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cate/ops/index.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "10820" }, { "name": "Jupyter Notebook", "bytes": "32802" }, { "name": "Python", "bytes": "1429412" }, { "name": "Shell", "bytes": "6597" } ], "symlink_target": "" }
ActiveRecord::Schema.define(version: 20170929051444) do create_table "adapter_snapshots", force: :cascade do |t| t.integer "assignment_snapshot_id" t.integer "subtask_id" t.text "description" t.text "description_url" t.text "details_json" t.boolean "fulfilled", default: false t.text "summary" t.text "value" t.datetime "created_at" t.datetime "updated_at" t.string "progress" t.string "status" t.boolean "requested", default: false end create_table "api_results", force: :cascade do |t| t.text "parsed_value" t.integer "custom_expectation_id" t.boolean "success", default: false t.datetime "created_at" t.datetime "updated_at" end create_table "assignment_requests", force: :cascade do |t| t.integer "assignment_id" t.string "body_hash" t.text "body_json" t.string "signature" t.datetime "created_at" t.datetime "updated_at" end create_table "assignment_scheduled_updates", force: :cascade do |t| t.integer "assignment_id" t.datetime "run_at" t.boolean "scheduled", default: false t.datetime "created_at" t.datetime "updated_at" end create_table "assignment_schedules", force: :cascade do |t| t.integer "assignment_id" t.string "minute" t.string "hour" t.string "day_of_month" t.string "month_of_year" t.string "day_of_week" t.datetime "created_at" t.datetime "updated_at" t.datetime "start_at" t.datetime "end_at" end create_table "assignment_snapshots", force: :cascade do |t| t.string "xid" t.text "value" t.text "status" t.text "details_json" t.boolean "fulfilled", default: false t.integer "assignment_id" t.datetime "created_at" t.datetime "updated_at" t.text "summary" t.text "description" t.text "description_url" t.string "progress" t.integer "adapter_index" t.integer "requester_id" t.string "request_type" t.integer "request_id" end create_table "assignment_types", force: :cascade do |t| t.string "name" t.string "description" t.text "json_schema" t.datetime "created_at" t.datetime "updated_at" t.boolean "unscheduled", default: false end create_table "assignments", force: :cascade do |t| t.datetime "created_at" t.datetime "updated_at" t.string "xid" t.datetime "start_at" t.datetime "end_at" t.string "status" t.integer "coordinator_id" t.boolean "skip_initial_snapshot", default: false end create_table "contracts", force: :cascade do |t| t.string "xid" t.text "json_body" t.string "status" t.datetime "created_at" t.datetime "updated_at" t.integer "coordinator_id" end create_table "coordinators", force: :cascade do |t| t.string "key" t.string "secret" t.datetime "created_at" t.datetime "updated_at" t.string "url" end create_table "custom_expectations", force: :cascade do |t| t.string "comparison" t.string "endpoint" t.string "field_list" t.string "final_value" t.datetime "created_at" t.datetime "updated_at" end create_table "delayed_jobs", force: :cascade do |t| t.integer "priority", default: 0, null: false t.integer "attempts", default: 0, null: false t.text "handler", null: false t.text "last_error" t.datetime "run_at" t.datetime "locked_at" t.datetime "failed_at" t.string "locked_by" t.string "queue" t.datetime "created_at" t.datetime "updated_at" end add_index "delayed_jobs", ["priority", "run_at"], name: "delayed_jobs_priority", using: :btree create_table "escrow_outcomes", force: :cascade do |t| t.integer "term_id" t.string "result" t.text "transaction_hex" t.datetime "created_at" t.datetime "updated_at" end create_table "ethereum_accounts", force: :cascade do |t| t.string "address" t.datetime "created_at" t.datetime "updated_at" t.integer "nonce", default: 0 t.boolean "current", default: true end create_table "ethereum_bytes32_oracles", force: :cascade do |t| t.string "address" t.string "update_address" t.datetime "created_at" t.datetime "updated_at" t.integer "ethereum_account_id" end create_table "ethereum_contract_templates", force: :cascade do |t| t.text "code" t.text "evm_hex" t.text "json_abi" t.text "solidity_abi" t.datetime "created_at" t.datetime "updated_at" t.integer "construction_gas" t.string "read_address" t.string "write_address" t.string "adapter_name" t.boolean "use_logs", default: false end create_table "ethereum_contracts", force: :cascade do |t| t.string "address" t.integer "template_id" t.integer "account_id" t.datetime "created_at" t.datetime "updated_at" t.integer "genesis_transaction_id" t.integer "owner_id" t.string "owner_type" end create_table "ethereum_events", force: :cascade do |t| t.string "address" t.string "block_hash" t.integer "block_number" t.text "data" t.integer "log_index" t.integer "log_subscription_id" t.string "transaction_hash" t.integer "transaction_index" end create_table "ethereum_formatted_oracles", force: :cascade do |t| t.string "address" t.string "update_address" t.integer "ethereum_account_id" t.datetime "created_at" t.datetime "updated_at" t.text "config_value" t.decimal "payment_amount", precision: 36, default: 0 end create_table "ethereum_int256_oracles", force: :cascade do |t| t.string "address" t.string "update_address" t.integer "ethereum_account_id" t.integer "result_multiplier" t.datetime "created_at" t.datetime "updated_at" end create_table "ethereum_log_subscriptions", force: :cascade do |t| t.integer "owner_id" t.string "owner_type" t.string "account" t.string "xid" t.datetime "end_at" t.datetime "created_at" t.datetime "updated_at" end create_table "ethereum_log_watchers", force: :cascade do |t| t.string "address" t.datetime "created_at" t.datetime "updated_at" end create_table "ethereum_oracle_writes", force: :cascade do |t| t.integer "oracle_id" t.string "txid" t.text "value" t.datetime "created_at" t.datetime "updated_at" t.string "oracle_type" t.decimal "amount_paid", precision: 36 end create_table "ethereum_oracles", force: :cascade do |t| t.text "endpoint" t.text "field_list" t.datetime "created_at" t.datetime "updated_at" end create_table "ethereum_transactions", force: :cascade do |t| t.string "txid" t.integer "account_id" t.integer "confirmations", default: 0 t.datetime "created_at" t.datetime "updated_at" t.text "raw_hex" t.integer "nonce" t.string "to" t.text "data" t.integer "gas_price", limit: 8 t.integer "gas_limit" t.decimal "value", precision: 36 end create_table "ethereum_uint256_oracles", force: :cascade do |t| t.string "address" t.string "update_address" t.integer "ethereum_account_id" t.datetime "created_at" t.datetime "updated_at" t.integer "result_multiplier" end create_table "external_adapters", force: :cascade do |t| t.string "url" t.datetime "created_at" t.datetime "updated_at" t.integer "assignment_type_id" t.string "username" t.string "password" end create_table "json_adapters", force: :cascade do |t| t.text "url" t.text "field_list" t.string "request_type" t.datetime "created_at" t.datetime "updated_at" t.text "headers_json" t.string "basic_auth_password" t.string "basic_auth_username" end create_table "json_receiver_requests", force: :cascade do |t| t.integer "json_receiver_id" t.text "data_json" t.datetime "created_at", null: false t.datetime "updated_at", null: false end create_table "json_receivers", force: :cascade do |t| t.string "xid" t.string "path_json" t.datetime "created_at", null: false t.datetime "updated_at", null: false end create_table "key_pairs", force: :cascade do |t| t.string "owner_type" t.integer "owner_id" t.string "public_key" t.datetime "created_at" t.datetime "updated_at" t.text "encrypted_private_key" end create_table "subtask_snapshot_requests", force: :cascade do |t| t.integer "subtask_id" t.text "data_json" t.datetime "created_at" t.datetime "updated_at" end create_table "subtasks", force: :cascade do |t| t.string "adapter_type" t.integer "adapter_id" t.integer "assignment_id" t.integer "index" t.text "adapter_params_json" t.datetime "created_at" t.datetime "updated_at" t.boolean "ready" t.string "xid" t.string "task_type" end create_table "terms", force: :cascade do |t| t.integer "contract_id" t.string "name" t.string "tracking" t.datetime "created_at" t.datetime "updated_at" t.datetime "start_at" t.datetime "end_at" t.integer "expectation_id" t.string "expectation_type" t.string "status" end end
{ "content_hash": "8ca59c573f07de122d0be1632ff190f0", "timestamp": "", "source": "github", "line_count": 350, "max_line_length": 96, "avg_line_length": 27.67142857142857, "alnum_prop": 0.6151781104801239, "repo_name": "oraclekit/smart_oracle", "id": "91ec1af949710078b405cfba962282cde7ed0a41", "size": "10426", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "db/schema.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "826" }, { "name": "HTML", "bytes": "6007" }, { "name": "JavaScript", "bytes": "638" }, { "name": "Ruby", "bytes": "442357" } ], "symlink_target": "" }