text
stringlengths 2
1.04M
| meta
dict |
|---|---|
import tempfile
import zipfile
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
SERVICE_NAME = 'consul'
# Some runtime properties to be used in teardown
runtime_props = ctx.instance.runtime_properties
runtime_props['service_name'] = SERVICE_NAME
HOME_DIR = join('/opt', SERVICE_NAME)
CONFIG_DIR = '/etc/consul.d'
runtime_props['files_to_remove'] = [HOME_DIR, CONFIG_DIR]
ctx_properties = utils.ctx_factory.create(SERVICE_NAME)
def install_consul():
consul_binary = join(HOME_DIR, 'consul')
utils.mkdir(dirname(consul_binary))
utils.mkdir(CONFIG_DIR)
consul_package = \
utils.download_cloudify_resource(ctx_properties['consul_package_url'],
SERVICE_NAME)
temp_dir = tempfile.mkdtemp()
try:
with zipfile.ZipFile(consul_package) as consul_archive:
consul_archive.extractall(temp_dir)
utils.move(join(temp_dir, 'consul'), consul_binary)
utils.chmod('+x', consul_binary)
finally:
utils.remove(temp_dir)
install_consul()
|
{
"content_hash": "bb571fe1285812defe1d00dbf33616f5",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 78,
"avg_line_length": 25.67391304347826,
"alnum_prop": 0.6663844199830652,
"repo_name": "isaac-s/cloudify-manager-blueprints",
"id": "5535b3d0a6557ef4e0af57f2a428e7264514fd12",
"size": "1204",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "components/consul/scripts/create.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "274"
},
{
"name": "Nginx",
"bytes": "712"
},
{
"name": "Python",
"bytes": "236873"
},
{
"name": "Shell",
"bytes": "7106"
}
],
"symlink_target": ""
}
|
build_zen_inplace:
cd ../src; python setup.py build_ext --inplace
build_pydoc: build_zen_inplace
export PYTHONPATH="../src:$PYTHONPATH"; epydoc --html -o ../pydoc/autogen --name Zen --url http://www.networkdynamics.org/static/zen zen
publish_pydoc:
scp -r autogen druths.webfactional.com:~/webapps/rr_static/zen
|
{
"content_hash": "f09d18a37e7dadcd231726a202714ed8",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 137,
"avg_line_length": 39.5,
"alnum_prop": 0.740506329113924,
"repo_name": "networkdynamics/zenlib",
"id": "f8c8232ee467003bc0d96cecdb7269d5cd0939ff",
"size": "317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydoc/Makefile",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "17243"
},
{
"name": "Makefile",
"bytes": "1849"
},
{
"name": "Python",
"bytes": "1064934"
}
],
"symlink_target": ""
}
|
from st2client.models import Resource, Trace, TriggerInstance, Rule, LiveAction
from st2client.exceptions.operations import OperationFailureException
from st2client.formatters import table
from st2client.formatters import execution as execution_formatter
from st2client.commands import resource
from st2client.utils.date import format_isodate
TRACE_ATTRIBUTE_DISPLAY_ORDER = ['id', 'trace_tag', 'action_executions', 'rules',
'trigger_instances', 'start_timestamp']
TRACE_HEADER_DISPLAY_ORDER = ['id', 'trace_tag', 'start_timestamp']
TRACE_COMPONENT_DISPLAY_LABELS = ['id', 'type', 'ref', 'updated_at']
TRACE_DISPLAY_ATTRIBUTES = ['all']
TRIGGER_INSTANCE_DISPLAY_OPTIONS = [
'all',
'trigger-instances',
'trigger_instances',
'triggerinstances',
'triggers'
]
ACTION_EXECUTION_DISPLAY_OPTIONS = [
'all',
'executions',
'action-executions',
'action_executions',
'actionexecutions',
'actions'
]
class TraceBranch(resource.ResourceBranch):
def __init__(self, description, app, subparsers, parent_parser=None):
super(TraceBranch, self).__init__(
Trace, description, app, subparsers,
parent_parser=parent_parser,
read_only=True,
commands={
'list': TraceListCommand,
'get': TraceGetCommand
})
class SingleTraceDisplayMixin(object):
def print_trace_details(self, trace, args, **kwargs):
options = {'attributes': TRACE_ATTRIBUTE_DISPLAY_ORDER if args.json else
TRACE_HEADER_DISPLAY_ORDER}
options['json'] = args.json
options['attribute_transform_functions'] = self.attribute_transform_functions
formatter = execution_formatter.ExecutionResult
self.print_output(trace, formatter, **options)
# Everything should be printed if we are printing json.
if args.json:
return
components = []
if any(attr in args.attr for attr in TRIGGER_INSTANCE_DISPLAY_OPTIONS):
components.extend([Resource(**{'id': trigger_instance['object_id'],
'type': TriggerInstance._alias.lower(),
'ref': trigger_instance['ref'],
'updated_at': trigger_instance['updated_at']})
for trigger_instance in trace.trigger_instances])
if any(attr in args.attr for attr in ['all', 'rules']):
components.extend([Resource(**{'id': rule['object_id'],
'type': Rule._alias.lower(),
'ref': rule['ref'],
'updated_at': rule['updated_at']})
for rule in trace.rules])
if any(attr in args.attr for attr in ACTION_EXECUTION_DISPLAY_OPTIONS):
components.extend([Resource(**{'id': execution['object_id'],
'type': LiveAction._alias.lower(),
'ref': execution['ref'],
'updated_at': execution['updated_at']})
for execution in trace.action_executions])
if components:
components.sort(key=lambda resource: resource.updated_at)
self.print_output(components, table.MultiColumnTable,
attributes=TRACE_COMPONENT_DISPLAY_LABELS,
json=args.json)
class TraceListCommand(resource.ResourceCommand, SingleTraceDisplayMixin):
display_attributes = ['id', 'trace_tag', 'start_timestamp']
attribute_transform_functions = {
'start_timestamp': format_isodate
}
attribute_display_order = TRACE_ATTRIBUTE_DISPLAY_ORDER
def __init__(self, resource, *args, **kwargs):
super(TraceListCommand, self).__init__(
resource, 'list', 'Get the list of the 50 most recent %s.' %
resource.get_plural_display_name().lower(),
*args, **kwargs)
self.group = self.parser.add_mutually_exclusive_group()
self.parser.add_argument('-n', '--last', type=int, dest='last',
default=50,
help=('List N most recent %s; '
'list all if 0.' %
resource.get_plural_display_name().lower()))
# Filter options
self.group.add_argument('-c', '--trace-tag', help='Trace-tag to filter the list.')
self.group.add_argument('-e', '--execution', help='Execution to filter the list.')
self.group.add_argument('-r', '--rule', help='Rule to filter the list.')
self.group.add_argument('-g', '--trigger-instance',
help='TriggerInstance to filter the list.')
# Display options
self.parser.add_argument('-a', '--attr', nargs='+',
default=self.display_attributes,
help=('List of attributes to include in the '
'output. "all" will return all '
'attributes.'))
self.parser.add_argument('-w', '--width', nargs='+', type=int,
default=None,
help=('Set the width of columns in output.'))
@resource.add_auth_token_to_kwargs_from_cli
def run(self, args, **kwargs):
# Filtering options
if args.trace_tag:
kwargs['trace_tag'] = args.trace_tag
if args.trigger_instance:
kwargs['trigger_instance'] = args.trigger_instance
if args.execution:
kwargs['execution'] = args.execution
if args.rule:
kwargs['rule'] = args.rule
return self.manager.query(limit=args.last, **kwargs)
def run_and_print(self, args, **kwargs):
instances = self.run(args, **kwargs)
if instances and len(instances) == 1:
# For a single Trace we must include the components unless
# user has overriden the attributes to display
if args.attr == self.display_attributes:
args.attr = ['all']
self.print_trace_details(trace=instances[0], args=args)
else:
self.print_output(reversed(instances), table.MultiColumnTable,
attributes=args.attr, widths=args.width,
json=args.json,
attribute_transform_functions=self.attribute_transform_functions)
class TraceGetCommand(resource.ResourceGetCommand, SingleTraceDisplayMixin):
display_attributes = ['all']
attribute_display_order = TRACE_ATTRIBUTE_DISPLAY_ORDER
attribute_transform_functions = {
'start_timestamp': format_isodate
}
pk_argument_name = 'id'
def __init__(self, resource, *args, **kwargs):
super(TraceGetCommand, self).__init__(resource, *args, **kwargs)
# Causation chains
self.causation_group = self.parser.add_mutually_exclusive_group()
self.causation_group.add_argument('-e', '--execution',
help='Execution to show causation chain.')
self.causation_group.add_argument('-r', '--rule', help='Rule to show causation chain.')
self.causation_group.add_argument('-g', '--trigger-instance',
help='TriggerInstance to show causation chain.')
# display filter group
self.display_filter_group = self.parser.add_argument_group()
self.display_filter_group.add_argument('--show-executions', action='store_true',
help='Only show executions.')
self.display_filter_group.add_argument('--show-rules', action='store_true',
help='Only show rules.')
self.display_filter_group.add_argument('--show-trigger-instances', action='store_true',
help='Only show trigger instances.')
self.display_filter_group.add_argument('-n', '--hide-noop-triggers', action='store_true',
help='Hide noop trigger instances.')
@resource.add_auth_token_to_kwargs_from_cli
def run(self, args, **kwargs):
resource_id = getattr(args, self.pk_argument_name, None)
return self.get_resource_by_id(resource_id, **kwargs)
@resource.add_auth_token_to_kwargs_from_cli
def run_and_print(self, args, **kwargs):
trace = None
try:
trace = self.run(args, **kwargs)
except resource.ResourceNotFoundError:
self.print_not_found(args.id)
raise OperationFailureException('Trace %s not found.' % (args.id))
# First filter for causation chains
trace = self._filter_trace_components(trace=trace, args=args)
# next filter for display purposes
trace = self._apply_display_filters(trace=trace, args=args)
return self.print_trace_details(trace=trace, args=args)
@staticmethod
def _filter_trace_components(trace, args):
"""
This function walks up the component causal chain. It only returns
properties in the causal chain and nothing else.
"""
# check if any filtering is desired
if not (args.execution or args.rule or args.trigger_instance):
return trace
component_id = None
component_type = None
# pick the right component type
if args.execution:
component_id = args.execution
component_type = 'action_execution'
elif args.rule:
component_id = args.rule
component_type = 'rule'
elif args.trigger_instance:
component_id = args.trigger_instance
component_type = 'trigger_instance'
# Initialize collection to use
action_executions = []
rules = []
trigger_instances = []
# setup flag to properly manage termination conditions
search_target_found = component_id and component_type
while search_target_found:
components_list = []
if component_type == 'action_execution':
components_list = trace.action_executions
to_update_list = action_executions
elif component_type == 'rule':
components_list = trace.rules
to_update_list = rules
elif component_type == 'trigger_instance':
components_list = trace.trigger_instances
to_update_list = trigger_instances
# Look for search_target in the right collection and
# once found look up the caused_by to keep movig up
# the chain.
search_target_found = False
# init to default value
component_caused_by_id = None
for component in components_list:
test_id = component['object_id']
if test_id == component_id:
caused_by = component.get('caused_by', {})
component_id = caused_by.get('id', None)
component_type = caused_by.get('type', None)
# If provided the component_caused_by_id must match as well. This is mostly
# applicable for rules since the same rule may appear multiple times and can
# only be distinguished by causing TriggerInstance.
if component_caused_by_id and component_caused_by_id != component_id:
continue
component_caused_by_id = None
to_update_list.append(component)
# In some cases the component_id and the causing component are combined to
# provide the complete causation chain. Think rule + triggerinstance
if component_id and ':' in component_id:
component_id_split = component_id.split(':')
component_id = component_id_split[0]
component_caused_by_id = component_id_split[1]
search_target_found = True
break
trace.action_executions = action_executions
trace.rules = rules
trace.trigger_instances = trigger_instances
return trace
@staticmethod
def _apply_display_filters(trace, args):
"""
This function looks at the disaply filters to determine which components
should be displayed.
"""
# If all the filters are false nothing is to be filtered.
all_component_types = not(args.show_executions or
args.show_rules or
args.show_trigger_instances)
# check if noop_triggers are to be hidden. This check applies whenever TriggerInstances
# are to be shown.
if (all_component_types or args.show_trigger_instances) and args.hide_noop_triggers:
filtered_trigger_instances = []
for trigger_instance in trace.trigger_instances:
is_noop_trigger_instance = True
for rule in trace.rules:
caused_by_id = rule.get('caused_by', {}).get('id', None)
if caused_by_id == trigger_instance['object_id']:
is_noop_trigger_instance = False
if not is_noop_trigger_instance:
filtered_trigger_instances.append(trigger_instance)
trace.trigger_instances = filtered_trigger_instances
if all_component_types:
return trace
if not args.show_executions:
trace.action_executions = []
if not args.show_rules:
trace.rules = []
if not args.show_trigger_instances:
trace.trigger_instances = []
return trace
|
{
"content_hash": "74382fdbe95bd1bc73ec8044cd596127",
"timestamp": "",
"source": "github",
"line_count": 322,
"max_line_length": 97,
"avg_line_length": 43.940993788819874,
"alnum_prop": 0.563078662803025,
"repo_name": "armab/st2",
"id": "fee81e7c3cb7e1c0cd074082f7402c5c185aa596",
"size": "14929",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "st2client/st2client/commands/trace.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "36807"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "3259877"
},
{
"name": "Shell",
"bytes": "27345"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
<title>Index of /construct/images/obo</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /></head>
<body>
<h1>Index of /construct/images/obo</h1>
<ul><li><a href="/construct/images/"> Parent Directory</a></li>
<li><a href="circlebutton.png"> circlebutton.png</a></li>
<li><a href="laptop.png"> laptop.png</a></li>
<li><a href="next.png"> next.png</a></li>
<li><a href="prev.png"> prev.png</a></li>
</ul>
</body></html>
|
{
"content_hash": "37b6012a395807c971313aac7e22549a",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 77,
"avg_line_length": 37,
"alnum_prop": 0.6447876447876448,
"repo_name": "gmyboy/Blog_server",
"id": "904fe167520d15fbdf3fa084d7cb857663a40811",
"size": "518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebRoot/images/obo/index.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "284868"
},
{
"name": "HTML",
"bytes": "885931"
},
{
"name": "Java",
"bytes": "227379"
},
{
"name": "JavaScript",
"bytes": "87227"
}
],
"symlink_target": ""
}
|
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "69b5261f582d7e08d77f5affd78a34f4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "04e9f5a52fa42ef6b0e6b365aa028a7d44dc5f13",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Rosales/Rosaceae/Rosa/Rosa canina/ Syn. Rosa lutetiana biserrata/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
package org.lnu.is.annotations;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for Limit values.
* @author ivanursul
*
*/
@Target({ ElementType.PARAMETER, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface Limit {
/**
* The name of the request attribute to bind to.
*/
String value() default "limit";
/**
* Whether the parameter is required.
* Default is true, leading to an exception thrown in case
* of the parameter missing in the request. Switch this to
* false if you prefer a null in case of the parameter missing.
* Alternatively, provide a {@link #defaultValue() defaultValue},
* which implicitly sets this flag to false.
*/
boolean required() default false;
/**
* The default value to use as a fallback. Supplying a default value
* implicitly sets {@link #required()} to false.
*/
String defaultValue() default "20";
}
|
{
"content_hash": "a5d002dfb128c3abf5c4289766a21dfa",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 72,
"avg_line_length": 29.025641025641026,
"alnum_prop": 0.696113074204947,
"repo_name": "ifnul/ums-backend",
"id": "f15ebff4ac796403c548a0d8185440cfa5f82a38",
"size": "1132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "is-lnu-common/src/main/java/org/lnu/is/annotations/Limit.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7045"
},
{
"name": "Java",
"bytes": "4184188"
},
{
"name": "Scala",
"bytes": "217850"
},
{
"name": "Shell",
"bytes": "1100"
}
],
"symlink_target": ""
}
|
layout: docs
slug: guides/resize
title: Adding Disk Space
category: cluster_management
sub_category: scaling
weight: 5
---
# Adding Disk Space to Your CoreOS Machine
On a CoreOS machine, the operating system itself is mounted as a read-only partition at `/usr`. The root partition provides read-write storage by default and on a fresh install is mostly blank. The default size of this partition depends on the platform but it is usually between 3GB and 16GB. If more space is required simply extend the virtual machine's disk image and CoreOS will fix the partition table and resize the root partition to fill the disk on the next boot.
## Amazon EC2
Amazon doesn't support directly resizing volumes, you must take a
snapshot and create a new volume based on that snapshot. Refer to
the AWS EC2 documentation on [expanding EBS volumes][ebs-expand-volume]
for detailed instructions.
[ebs-expand-volume]: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-expand-volume.html
## QEMU (qemu-img)
Even if you are not using Qemu itself the qemu-img tool is the easiest
to use. It will work on raw, qcow2, vmdk, and most other formats. The
command accepts either an absolute size or a relative size by
by adding `+` prefix. Unit suffixes such as `G` or `M` are also supported.
```sh
# Increase the disk size by 5GB
qemu-img resize coreos_production_qemu_image.img +5G
```
## VMware
The interface available for resizing disks in VMware varies depending on
the product. See this [Knowledge Base article][vmkb1004047] for details.
Most products include a tool called `vmware-vdiskmanager`. The size must
be the absolute disk size, relative sizes are not supported so be
careful to only increase the size, not shrink it. The unit
suffixes `Gb` and `Mb` are supported.
```sh
# Set the disk size to 20GB
vmware-vdiskmanager -x 20Gb coreos_developer_vmware_insecure.vmx
```
[vmkb1004047]: http://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=1004047
## VirtualBox
Use qemu-img or vmware-vdiskmanager as described above. VirtualBox does
not support resizing VMDK disk images, only VDI and VHD disks. Meanwhile
VirtualBox only supports using VMDK disk images with the OVF config file
format used for importing/exporting virtual machines.
If you have have no other options you can try converting the VMDK disk
image to a VDI image and configuring a new virtual machine with it:
```sh
VBoxManage clonehd old.vmdk new.vdi --format VDI
VBoxManage modifyhd new.vdi --resize 20480
```
|
{
"content_hash": "367c959e0cdfa76dc499a9ad5cb5df08",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 470,
"avg_line_length": 40.06349206349206,
"alnum_prop": 0.7832805071315373,
"repo_name": "cloudcube/coreos-manual-chinese",
"id": "390a044b2b97687164b3fcad19d407ea5fa1f073",
"size": "2528",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "cluster-management/scaling/adding-disk-space/index.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
.. _customize.calculators:
=========================
Using a Custom Calculator
=========================
By default, ramsey/uuid uses `brick/math`_ as its internal calculator. However,
you may change the calculator, if your needs require something else.
To swap the default calculator with your custom one, first make an adapter that
wraps your custom calculator and implements
:php:interface:`Ramsey\\Uuid\\Math\\CalculatorInterface`. This might look
something like this:
.. code-block:: php
:caption: Create a custom calculator wrapper that implements CalculatorInterface
:name: customize.calculators.wrapper-example
namespace MyProject;
use Other\OtherCalculator;
use Ramsey\Uuid\Math\CalculatorInterface;
use Ramsey\Uuid\Type\Integer as IntegerObject;
use Ramsey\Uuid\Type\NumberInterface;
class MyUuidCalculator implements CalculatorInterface
{
private $internalCalculator;
public function __construct(OtherCalculator $customCalculator)
{
$this->internalCalculator = $customCalculator;
}
public function add(NumberInterface $augend, NumberInterface ...$addends): NumberInterface
{
$value = $augend->toString();
foreach ($addends as $addend) {
$value = $this->internalCalculator->plus($value, $addend->toString());
}
return new IntegerObject($value);
}
/* ... Class truncated for brevity ... */
}
The easiest way to use your custom calculator wrapper is to instantiate a new
FeatureSet, set the calculator on it, and pass the FeatureSet into a new
UuidFactory. Using the factory, you may then generate and work with UUIDs, using
your custom calculator.
.. code-block:: php
:caption: Use your custom calculator wrapper when working with UUIDs
:name: customize.calculators.use-wrapper-example
use MyProject\MyUuidCalculator;
use Other\OtherCalculator;
use Ramsey\Uuid\FeatureSet;
use Ramsey\Uuid\UuidFactory;
$otherCalculator = new OtherCalculator();
$myUuidCalculator = new MyUuidCalculator($otherCalculator);
$featureSet = new FeatureSet();
$featureSet->setCalculator($myUuidCalculator);
$factory = new UuidFactory($featureSet);
$uuid = $factory->uuid1();
.. _brick/math: https://github.com/brick/math
|
{
"content_hash": "46fa67dca54aa1581f53fe9ba98fe68b",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 98,
"avg_line_length": 31.426666666666666,
"alnum_prop": 0.6911327959270259,
"repo_name": "michabbb-backup/uuid",
"id": "a2bcea0e804b0ef5b7c28e624e4e40f603b52784",
"size": "2357",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/customize/calculators.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "857"
},
{
"name": "PHP",
"bytes": "656439"
},
{
"name": "Shell",
"bytes": "2865"
}
],
"symlink_target": ""
}
|
using Microsoft.AspNetCore.Http;
namespace ByteNuts.NetCoreControls.Core.Extensions
{
public static class HttpRequestExtensions
{
public static string NccGetBaseUrl(this HttpContext context)
{
var request = context.Request;
var host = request.Host.ToUriComponent();
var pathBase = request.PathBase.ToUriComponent();
var baseUrl = string.Format("{0}://{1}{2}", request.Scheme, host, pathBase);
return baseUrl;
}
}
}
|
{
"content_hash": "cd236b528ec427a31974f6a8cc3adbbc",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 88,
"avg_line_length": 25.85,
"alnum_prop": 0.6266924564796905,
"repo_name": "ByteNuts/NetCoreControls",
"id": "bb0850c4a9b3e7d6701d1d2b769a91e07cac1b16",
"size": "519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NetCoreControls.Core/Extensions/HttpRequestExtensions.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "167134"
},
{
"name": "CSS",
"bytes": "1202"
},
{
"name": "JavaScript",
"bytes": "6391"
},
{
"name": "PowerShell",
"bytes": "2443"
}
],
"symlink_target": ""
}
|
<chapter>
<title>Java Source File Examples</title>
<para>The following example shows how to format a Java source file
containing a single public class. Interfaces are formatted similarly.</para>
<para>
<!-- do not break the following two lines -->
<programlisting
linenumbering="numbered"><?dbhtml linenumbering.everyNth="1"?><?dbfo linenumbering.everyNth="1"?>&JCoderZJavaExample;</programlisting>
</para>
</chapter>
|
{
"content_hash": "269ba0ec766e2378796716e1d16ba469",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 143,
"avg_line_length": 32.785714285714285,
"alnum_prop": 0.7015250544662309,
"repo_name": "jCoderZ/fawkez-old",
"id": "85ae2ca391ee9ec7ba1d2705ed3cdb2d83ceb18d",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/doc/guidelines/java_example.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "2954209"
},
{
"name": "JavaScript",
"bytes": "15190"
},
{
"name": "Shell",
"bytes": "261"
}
],
"symlink_target": ""
}
|
<?xml version="1.0"?>
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd">
<!--
Demonstrates the mapping of two subtyping one-to-one relationships
to association tables using <many-to-one> nested inside <join>.
Note that the <join> elements specify optional="true", and that
there is an inverse="true" side of both joins.
-->
<hibernate-mapping package="org.hibernate.test.onetoone.link">
<class name="Person">
<id name="name"/>
<property name="dob" type="date"/>
<join table="employeePerson"
inverse="true"
optional="true">
<key column="personId" unique="true"/>
<many-to-one name="employee"
column="employeeId"
cascade="all"
not-null="true"/>
</join>
<join table="customerPerson"
inverse="true"
optional="true">
<key column="personId" unique="true"/>
<many-to-one name="customer"
column="customerId"
cascade="all"
not-null="true"/>
</join>
</class>
<class name="Employee">
<id name="id" column="employeeId">
<generator class="native"/>
</id>
<join table="employeePerson"
optional="true">
<key column="employeeId"/>
<many-to-one name="person"
column="personId"
cascade="all"
not-null="true"
unique="true"/>
</join>
</class>
<class name="Customer">
<id name="id" column="customerId">
<generator class="native"/>
</id>
<join table="customerPerson" optional="true">
<key column="customerId"/>
<many-to-one name="person"
column="personId"
cascade="all"
not-null="true"
unique="true"/>
</join>
</class>
</hibernate-mapping>
|
{
"content_hash": "2ad67f60933ac9b1ffc69546837a3622",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 69,
"avg_line_length": 24.956521739130434,
"alnum_prop": 0.6248548199767712,
"repo_name": "HerrB92/obp",
"id": "956766532b53f505a0f79f6f759038c6859191ce",
"size": "1722",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "OpenBeaconPackage/libraries/hibernate-release-4.2.7.SP1/project/hibernate-core/src/test/java/org/hibernate/test/onetoone/link/Person.hbm.xml",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "181658"
},
{
"name": "Groovy",
"bytes": "98685"
},
{
"name": "Java",
"bytes": "34621856"
},
{
"name": "JavaScript",
"bytes": "356255"
},
{
"name": "Shell",
"bytes": "194"
},
{
"name": "XSLT",
"bytes": "21372"
}
],
"symlink_target": ""
}
|
namespace FluentValidation {
using Internal;
public class ValidationContext<T> : ValidationContext {
public ValidationContext(T instanceToValidate) : this(instanceToValidate, new PropertyChain(), ValidatorOptions.ValidatorSelectors.DefaultValidatorSelectorFactory()) {
}
public ValidationContext(T instanceToValidate, PropertyChain propertyChain, IValidatorSelector validatorSelector)
: base(instanceToValidate, propertyChain, validatorSelector) {
InstanceToValidate = instanceToValidate;
}
public new T InstanceToValidate { get; private set; }
}
public class ValidationContext {
public ValidationContext(object instanceToValidate)
: this (instanceToValidate, new PropertyChain(), ValidatorOptions.ValidatorSelectors.DefaultValidatorSelectorFactory()){
}
public ValidationContext(object instanceToValidate, PropertyChain propertyChain, IValidatorSelector validatorSelector) {
PropertyChain = new PropertyChain(propertyChain);
InstanceToValidate = instanceToValidate;
Selector = validatorSelector;
}
public PropertyChain PropertyChain { get; private set; }
public object InstanceToValidate { get; private set; }
public IValidatorSelector Selector { get; private set; }
public virtual bool IsChildContext { get; internal set; }
public ValidationContext Clone(PropertyChain chain = null, object instanceToValidate = null, IValidatorSelector selector = null) {
return new ValidationContext(instanceToValidate ?? this.InstanceToValidate, chain ?? this.PropertyChain, selector ?? this.Selector);
}
public ValidationContext CloneForChildValidator(object instanceToValidate) {
return new ValidationContext(instanceToValidate, PropertyChain, Selector) {
IsChildContext = true
};
}
}
}
|
{
"content_hash": "2e4f2e4a94fc33c07969e32bd4de28a4",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 169,
"avg_line_length": 38.51063829787234,
"alnum_prop": 0.7712707182320442,
"repo_name": "GDoronin/FluentValidation",
"id": "609307009e1323c7fc1947cb6280c5897bd32767",
"size": "2589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/FluentValidation/ValidationContext.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "561"
},
{
"name": "C#",
"bytes": "882204"
},
{
"name": "PowerShell",
"bytes": "1462"
},
{
"name": "Smalltalk",
"bytes": "2894"
}
],
"symlink_target": ""
}
|
<?php if (!defined('TL_ROOT')) die('You can not access this file directly!');
$GLOBALS['TL_LANG']['FMD']['navigationMenu'] = 'Navigation';
$GLOBALS['TL_LANG']['FMD']['navigation'] = array('Menu de navigation', 'Génère un menu de navigation à partir de la structure du site.');
$GLOBALS['TL_LANG']['FMD']['customnav'] = array('Navigation personnalisée', 'Génère un menu personnalisé.');
$GLOBALS['TL_LANG']['FMD']['breadcrumb'] = array('Navigation "fil d\'Ariane"', 'Génère un menu de type "fil d\'Ariane".');
$GLOBALS['TL_LANG']['FMD']['quicknav'] = array('Navigation rapide', 'Génère un menu de type liste déroulante à partir de la structure du site.');
$GLOBALS['TL_LANG']['FMD']['quicklink'] = array('Lien rapide', 'Génère un menu de type liste déroulante.');
$GLOBALS['TL_LANG']['FMD']['booknav'] = array('Navigation "Livre"', 'Génère un menu de type "Livre".');
$GLOBALS['TL_LANG']['FMD']['articlenav'] = array('Pagination d\'articles', 'Génère une pagination pour naviguer dans les articles.');
$GLOBALS['TL_LANG']['FMD']['sitemap'] = array('Plan du site', 'Génère une liste de toutes les pages de la structure du site.');
$GLOBALS['TL_LANG']['FMD']['user'] = 'Utilisateur';
$GLOBALS['TL_LANG']['FMD']['login'] = array('Formulaire de connexion', 'Génère un formulaire de connexion.');
$GLOBALS['TL_LANG']['FMD']['logout'] = array('Déconnexion automatique', 'Déconnecte automatiquement un membre.');
$GLOBALS['TL_LANG']['FMD']['personalData'] = array('Données personnelles', 'Génère un formulaire permettant de modifier les données personnelles d\'un membre.');
$GLOBALS['TL_LANG']['FMD']['application'] = 'Applications';
$GLOBALS['TL_LANG']['FMD']['form'] = array('Formulaire', 'Ajoute un formulaire dans la page.');
$GLOBALS['TL_LANG']['FMD']['search'] = array('Moteur de recherche', 'Ajoute un formulaire de recherche dans la page.');
$GLOBALS['TL_LANG']['FMD']['articleList'] = array('Liste d\'articles', 'Génère une liste d\'articles contenu dans une zone particulière.');
$GLOBALS['TL_LANG']['FMD']['miscellaneous'] = 'Divers';
$GLOBALS['TL_LANG']['FMD']['html'] = array('Code HTML personnalisé', 'Permet d\'inclure du code HTML personnalisé.');
$GLOBALS['TL_LANG']['FMD']['flash'] = array('Animation Flash', 'Permet d\'inclure une animation Flash dans une page.');
$GLOBALS['TL_LANG']['FMD']['randomImage'] = array('Image aléatoire', 'Ajoute une image aléatoire dans une page.');
?>
|
{
"content_hash": "ab5dfe9d986ef5b13e8f619c69fde855",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 161,
"avg_line_length": 88.81481481481481,
"alnum_prop": 0.6851542952460383,
"repo_name": "TechnoGate/contao_template",
"id": "7f1fed3b0204225ae1edd5b031003cc838bae2f9",
"size": "3987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contao/french_translation/system/modules/frontend/languages/fr/modules.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "14294"
},
{
"name": "JavaScript",
"bytes": "719"
},
{
"name": "PHP",
"bytes": "1535158"
},
{
"name": "Ruby",
"bytes": "18092"
}
],
"symlink_target": ""
}
|
using System;
using System.Collections.Generic;
using UIKit;
using CocosSharp;
using PilotAndGunPortable;
namespace PilotAndGunPortable.iOS
{
public partial class ViewController : UIViewController
{
public ViewController(IntPtr handle)
: base(handle)
{
}
public override void ViewDidLoad()
{
base.ViewDidLoad();
if (GameView != null)
{
// Set loading event to be called once game view is fully initialised
GameView.ViewCreated += GameDelegate.LoadGame;
}
}
public override void ViewWillDisappear(bool animated)
{
base.ViewWillDisappear(animated);
if (GameView != null)
GameView.Paused = true;
}
public override void ViewDidAppear(bool animated)
{
base.ViewDidAppear(animated);
if (GameView != null)
GameView.Paused = false;
}
public override void DidReceiveMemoryWarning()
{
base.DidReceiveMemoryWarning();
// Release any cached data, images, etc that aren't in use.
}
}
}
|
{
"content_hash": "e7869b33e7c372cbe076244a08f7f940",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 85,
"avg_line_length": 22.574074074074073,
"alnum_prop": 0.5594749794913864,
"repo_name": "wiphoenix/PilotAndGun",
"id": "c498ff862844126a839b5e1f6453a1b2938cc069",
"size": "1221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SourceCode/PilotAndGunPortable/PilotAndGunPortable/PilotAndGunPortable.iOS/ViewController.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "42305"
}
],
"symlink_target": ""
}
|
namespace Fixtures.AcceptanceTestsAzureCompositeModelClient.Models
{
using Fixtures.AcceptanceTestsAzureCompositeModelClient;
using Newtonsoft.Json;
using System.Linq;
public partial class BooleanWrapperInner
{
/// <summary>
/// Initializes a new instance of the BooleanWrapperInner class.
/// </summary>
public BooleanWrapperInner() { }
/// <summary>
/// Initializes a new instance of the BooleanWrapperInner class.
/// </summary>
public BooleanWrapperInner(bool? fieldTrue = default(bool?), bool? fieldFalse = default(bool?))
{
FieldTrue = fieldTrue;
FieldFalse = fieldFalse;
}
/// <summary>
/// </summary>
[JsonProperty(PropertyName = "field_true")]
public bool? FieldTrue { get; set; }
/// <summary>
/// </summary>
[JsonProperty(PropertyName = "field_false")]
public bool? FieldFalse { get; set; }
}
}
|
{
"content_hash": "885e91924f53f3a2d07e695e23dfe8df",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 103,
"avg_line_length": 29.5,
"alnum_prop": 0.6001994017946162,
"repo_name": "anudeepsharma/autorest",
"id": "c645f707d4b230c58383d2fbb8b0847d25693a93",
"size": "1315",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/generator/AutoRest.CSharp.Azure.Fluent.Tests/Expected/AcceptanceTests/AzureCompositeModelClient/Models/BooleanWrapperInner.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "14803140"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "CoffeeScript",
"bytes": "51933"
},
{
"name": "Go",
"bytes": "147575"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "6750471"
},
{
"name": "JavaScript",
"bytes": "4464125"
},
{
"name": "PowerShell",
"bytes": "37743"
},
{
"name": "Python",
"bytes": "2067697"
},
{
"name": "Ruby",
"bytes": "182108"
},
{
"name": "Shell",
"bytes": "142"
},
{
"name": "TypeScript",
"bytes": "241255"
}
],
"symlink_target": ""
}
|
namespace grpc_cb_core {
ClientAsyncWriterImplWrapper::ClientAsyncWriterImplWrapper(const ChannelSptr& channel,
const std::string& method,
const CompletionQueueSptr& cq_sptr,
int64_t timeout_ms)
: impl_sptr_(
new ClientAsyncWriterImpl(channel, method, cq_sptr, timeout_ms)) {
assert(cq_sptr);
assert(channel);
}
ClientAsyncWriterImplWrapper::~ClientAsyncWriterImplWrapper() {
impl_sptr_->Close(); // without handler
}
bool ClientAsyncWriterImplWrapper::Write(const std::string& request) {
return impl_sptr_->Write(request);
}
void ClientAsyncWriterImplWrapper::Close(const CloseCb& close_cb) {
impl_sptr_->Close(close_cb);
}
} // namespace grpc_cb_core
|
{
"content_hash": "c6edd517c9fd28fd9c6ddba3f1d5c54f",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 86,
"avg_line_length": 32.76,
"alnum_prop": 0.6227106227106227,
"repo_name": "jinq0123/grpc_cb_core",
"id": "8c9f2d32c2f6228bbb6e43dc782002fbaa15906e",
"size": "1074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/client/impl/client_async_writer_impl_wrapper.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1096"
},
{
"name": "C",
"bytes": "552"
},
{
"name": "C++",
"bytes": "173931"
},
{
"name": "CMake",
"bytes": "1619"
},
{
"name": "Lua",
"bytes": "1470"
},
{
"name": "Python",
"bytes": "1756"
}
],
"symlink_target": ""
}
|
package org.jboss.weld.module.web.context.http;
import java.lang.annotation.Annotation;
import jakarta.enterprise.context.Conversation;
import jakarta.enterprise.context.SessionScoped;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpSession;
import org.jboss.weld.Container;
import org.jboss.weld.config.ConfigurationKey;
import org.jboss.weld.config.WeldConfiguration;
import org.jboss.weld.contexts.AbstractBoundContext;
import org.jboss.weld.contexts.beanstore.AttributeBeanStore;
import org.jboss.weld.contexts.beanstore.BoundBeanStore;
import org.jboss.weld.contexts.beanstore.NamingScheme;
import org.jboss.weld.contexts.beanstore.SimpleBeanIdentifierIndexNamingScheme;
import org.jboss.weld.module.web.context.beanstore.http.EagerSessionBeanStore;
import org.jboss.weld.module.web.context.beanstore.http.LazySessionBeanStore;
import org.jboss.weld.context.http.HttpConversationContext;
import org.jboss.weld.context.http.HttpSessionContext;
import org.jboss.weld.logging.ContextLogger;
import org.jboss.weld.serialization.BeanIdentifierIndex;
public class HttpSessionContextImpl extends AbstractBoundContext<HttpServletRequest> implements HttpSessionContext {
// There is no need to store FQCN in a session key
static final String NAMING_SCHEME_PREFIX = "WELD_S";
static final String KEY_BEAN_ID_INDEX_HASH = NAMING_SCHEME_PREFIX + "_HASH";
private final NamingScheme namingScheme;
private final String contextId;
public HttpSessionContextImpl(String contextId, BeanIdentifierIndex index) {
super(contextId, true);
this.namingScheme = new SimpleBeanIdentifierIndexNamingScheme(NAMING_SCHEME_PREFIX, index);
this.contextId = contextId;
}
public boolean associate(HttpServletRequest request) {
// At this point the bean store should never be set - see also HttpContextLifecycle.nestedInvocationGuard
if (getBeanStore() != null) {
ContextLogger.LOG.beanStoreLeakDuringAssociation(this.getClass().getName(), request);
}
// We always associate a new bean store to avoid possible leaks (security threats)
setBeanStore(new LazySessionBeanStore(request, namingScheme, getServiceRegistry().getRequired(WeldConfiguration.class).getBooleanProperty(
ConfigurationKey.CONTEXT_ATTRIBUTES_LAZY_FETCH), getServiceRegistry()));
checkBeanIdentifierIndexConsistency(request);
return true;
}
public boolean destroy(HttpSession session) {
final BoundBeanStore beanStore = getBeanStore();
if (beanStore == null) {
try {
HttpConversationContext conversationContext = getConversationContext();
setBeanStore(new EagerSessionBeanStore(namingScheme, session, getServiceRegistry()));
activate();
invalidate();
conversationContext.destroy(session);
deactivate();
setBeanStore(null);
return true;
} finally {
cleanup();
}
} else {
// We are in a request, invalidate it
invalidate();
if (beanStore instanceof AttributeBeanStore) {
AttributeBeanStore attributeBeanStore = ((AttributeBeanStore) beanStore);
if (attributeBeanStore.isAttributeLazyFetchingEnabled()) {
// At this moment we have to sync the local bean store and the backing store
attributeBeanStore.fetchUninitializedAttributes();
}
}
getConversationContext().destroy(session);
return false;
}
}
public Class<? extends Annotation> getScope() {
return SessionScoped.class;
}
protected HttpConversationContext getConversationContext() {
return Container.instance(contextId).deploymentManager().instance().select(HttpConversationContext.class).get();
}
protected Conversation getConversation() {
return Container.instance(contextId).deploymentManager().instance().select(Conversation.class).get();
}
private void checkBeanIdentifierIndexConsistency(HttpServletRequest request) {
HttpSession session = request.getSession(false);
if (session != null) {
BeanIdentifierIndex index = getServiceRegistry().get(BeanIdentifierIndex.class);
if (index != null && index.isBuilt()) {
Object hash = session.getAttribute(KEY_BEAN_ID_INDEX_HASH);
if (hash != null) {
if (!index.getIndexHash().equals(hash)) {
throw ContextLogger.LOG.beanIdentifierIndexInconsistencyDetected(hash.toString(), index.getDebugInfo());
}
} else {
// Skip if bean index is empty
if (!index.isEmpty()) {
session.setAttribute(KEY_BEAN_ID_INDEX_HASH, index.getIndexHash());
}
}
}
}
}
}
|
{
"content_hash": "fbd9e1be7bc0aea4ec5f92b0d7677895",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 146,
"avg_line_length": 44.15652173913043,
"alnum_prop": 0.67684127609295,
"repo_name": "weld/core",
"id": "7dc227ffb74e70a85fb65b0647184db79f9f653b",
"size": "5078",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/web/src/main/java/org/jboss/weld/module/web/context/http/HttpSessionContextImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "7849"
},
{
"name": "HTML",
"bytes": "15226"
},
{
"name": "Java",
"bytes": "10585311"
}
],
"symlink_target": ""
}
|
#ifndef doxygen_ignore
#include <sbml/packages/qual/validator/QualConsistencyValidator.h>
/*
* Compile ConsistencyConstraints
*/
#include "constraints/QualConsistencyConstraints.cpp"
LIBSBML_CPP_NAMESPACE_BEGIN
/*
* Initializes this Validator with a set of Constraints.
*/
void
QualConsistencyValidator::init ()
{
#define AddingConstraintsToValidator 1
#include "constraints/QualConsistencyConstraints.cpp"
}
LIBSBML_CPP_NAMESPACE_END
#endif
/** @endcond */
|
{
"content_hash": "8fbe05627736ca73d6b5cb00d9b608bd",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 66,
"avg_line_length": 17.310344827586206,
"alnum_prop": 0.7350597609561753,
"repo_name": "TheCoSMoCompany/biopredyn",
"id": "5e6b58fb962f052374876154b53a78667b2fc157",
"size": "2147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Prototype/src/libsbml-5.10.0/src/sbml/packages/qual/validator/QualConsistencyValidator.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3535918"
},
{
"name": "C++",
"bytes": "26120778"
},
{
"name": "CMake",
"bytes": "455400"
},
{
"name": "CSS",
"bytes": "49020"
},
{
"name": "Gnuplot",
"bytes": "206"
},
{
"name": "HTML",
"bytes": "193068"
},
{
"name": "Java",
"bytes": "66517"
},
{
"name": "JavaScript",
"bytes": "3847"
},
{
"name": "Makefile",
"bytes": "30905"
},
{
"name": "Perl",
"bytes": "3018"
},
{
"name": "Python",
"bytes": "7891301"
},
{
"name": "Shell",
"bytes": "247654"
},
{
"name": "TeX",
"bytes": "22566"
},
{
"name": "XSLT",
"bytes": "55564"
}
],
"symlink_target": ""
}
|
using System.Collections.Generic;
using Newtonsoft.Json;
namespace IBM.WatsonDeveloperCloud.NaturalLanguageUnderstanding.v1.Model
{
/// <summary>
/// EntityMention.
/// </summary>
public class EntityMention
{
/// <summary>
/// Entity mention text.
/// </summary>
/// <value>Entity mention text.</value>
[JsonProperty("text", NullValueHandling = NullValueHandling.Ignore)]
public string Text { get; set; }
/// <summary>
/// Character offsets indicating the beginning and end of the mention in the analyzed text.
/// </summary>
/// <value>Character offsets indicating the beginning and end of the mention in the analyzed text.</value>
[JsonProperty("location", NullValueHandling = NullValueHandling.Ignore)]
public List<long?> Location { get; set; }
}
}
|
{
"content_hash": "4a9bba61cd5f5ed7727a3c658a5eb76a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 114,
"avg_line_length": 32.44444444444444,
"alnum_prop": 0.639269406392694,
"repo_name": "mediumTaj/dotnet-standard-sdk-generated",
"id": "8068b1e014bba4811f0cc9c646c976188f126cad",
"size": "1479",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/IBM.WatsonDeveloperCloud.NaturalLanguageUnderstanding.v1/Model/EntityMention.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "1800833"
},
{
"name": "HTML",
"bytes": "71030"
},
{
"name": "PowerShell",
"bytes": "3709"
}
],
"symlink_target": ""
}
|
<div class="commune_descr limited">
<p>
Saint-Avit-de-Soulège est
un village
situé dans le département de Gironde en Aquitaine. Elle comptait 76 habitants en 2008.</p>
<p>À coté de Saint-Avit-de-Soulège sont positionnées géographiquement les communes de
<a href="{{VLROOT}}/immobilier/caplong_33094/">Caplong</a> à 4 km, 213 habitants,
<a href="{{VLROOT}}/immobilier/saint-quentin-de-caplong_33467/">Saint-Quentin-de-Caplong</a> localisée à 3 km, 252 habitants,
<a href="{{VLROOT}}/immobilier/saint-antoine-de-breuilh_24370/">Saint-Antoine-de-Breuilh</a> située à 4 km, 2 022 habitants,
<a href="{{VLROOT}}/immobilier/leves-et-thoumeyragues_33242/">Les Lèves-et-Thoumeyragues</a> située à 4 km, 530 habitants,
<a href="{{VLROOT}}/immobilier/eynesse_33160/">Eynesse</a> à 2 km, 549 habitants,
<a href="{{VLROOT}}/immobilier/saint-seurin-de-prats_24501/">Saint-Seurin-de-Prats</a> localisée à 3 km, 494 habitants,
entre autres. De plus, Saint-Avit-de-Soulège est située à seulement 28 km de <a href="{{VLROOT}}/immobilier/bergerac_24037/">Bergerac</a>.</p>
<p>Si vous envisagez de demenager à Saint-Avit-de-Soulège, vous pourrez aisément trouver une maison à acheter. </p>
<p>Le nombre de logements, à Saint-Avit-de-Soulège, était réparti en 2011 en deux appartements et 43 maisons soit
un marché relativement équilibré.</p>
</div>
|
{
"content_hash": "6ba57274e32c27205959f5fd37548f19",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 148,
"avg_line_length": 81.94117647058823,
"alnum_prop": 0.7422828427853554,
"repo_name": "donaldinou/frontend",
"id": "51fc9c69f1952b69e72e0b319482e8f30f26f7d5",
"size": "1426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Viteloge/CoreBundle/Resources/descriptions/33377.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3073"
},
{
"name": "CSS",
"bytes": "111338"
},
{
"name": "HTML",
"bytes": "58634405"
},
{
"name": "JavaScript",
"bytes": "88564"
},
{
"name": "PHP",
"bytes": "841919"
}
],
"symlink_target": ""
}
|
@class MCLLoginManager;
static const NSInteger MCLHTTPErrorCodeNoInternetConnection = -2;
static const NSInteger MCLHTTPErrorCodeMServiceConnection = -1;
static const NSInteger MCLHTTPErrorCodeInvalidLogin = 401;
@protocol MCLHTTPClient <NSObject>
- (instancetype)initWithLoginManager:(MCLLoginManager *)loginManager;
- (void)getRequestToUrlString:(NSString *)urlString
needsLogin:(BOOL)needsLogin
completionHandler:(void (^)(NSError *error, NSDictionary *json))completion;
- (void)postRequestToUrlString:(NSString *)urlString
withVars:(NSDictionary *)vars
needsLogin:(BOOL)needsLogin
completionHandler:(void (^)(NSError *error, NSDictionary *json))completion;
- (void)postRequestToUrlString:(NSString *)urlString
withJSON:(NSDictionary *)json
needsLogin:(BOOL)needsLogin
completionHandler:(void (^)(NSError *error, NSDictionary *json))completion;
- (void)putRequestToUrlString:(NSString *)urlString
withVars:(NSDictionary *)vars
needsLogin:(BOOL)needsLogin
completionHandler:(void (^)(NSError *error, NSDictionary *json))completion;
- (void)putRequestToUrlString:(NSString *)urlString
withJSON:(NSDictionary *)json
needsLogin:(BOOL)needsLogin
completionHandler:(void (^)(NSError *error, NSDictionary *json))completion;
- (void)deleteRequestToUrlString:(NSString *)urlString
withVars:(NSDictionary *)vars
needsLogin:(BOOL)needsLogin
completionHandler:(void (^)(NSError *error, NSDictionary *json))completion;
@end
|
{
"content_hash": "0220c12515774a94c93c0824868ce1c5",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 90,
"avg_line_length": 43.25,
"alnum_prop": 0.6641618497109827,
"repo_name": "Stitch7/mclient",
"id": "25dd58a2650ed54477f7139a2cab4d40565b5a66",
"size": "1923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mclient/Network/HTTPClient/MCLHTTPClient.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "542"
},
{
"name": "HTML",
"bytes": "4452"
},
{
"name": "Objective-C",
"bytes": "752494"
},
{
"name": "Ruby",
"bytes": "8296"
},
{
"name": "Swift",
"bytes": "331731"
}
],
"symlink_target": ""
}
|
<?php defined('APPPATH') or die('No script access'); ?>
<?php $PAGE_TITLE = page_title('Forbidden'); ?>
<div class="small-page err">
<h2>403</h2>
<p>You shall not pass.</p>
</div>
|
{
"content_hash": "9906455dc6d7ff6d9ea9ee33bc9e96f9",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 55,
"avg_line_length": 26.857142857142858,
"alnum_prop": 0.601063829787234,
"repo_name": "aravindanve/tiny-fw",
"id": "660423f695593d2df18444b62081715a7d2d118c",
"size": "188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "partials/forbidden.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "229"
},
{
"name": "CSS",
"bytes": "2861"
},
{
"name": "PHP",
"bytes": "19051"
}
],
"symlink_target": ""
}
|
"""
Regression tests for Ecobee 3.
https://github.com/home-assistant/home-assistant/issues/15336
"""
from unittest import mock
from homekit import AccessoryDisconnectedError
from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY
from homeassistant.components.climate.const import (
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_HUMIDITY,
)
from tests.components.homekit_controller.common import (
FakePairing,
device_config_changed,
setup_accessories_from_file,
setup_test_accessories,
Helper,
time_changed,
)
async def test_ecobee3_setup(hass):
"""Test that a Ecbobee 3 can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "ecobee3.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
climate = entity_registry.async_get("climate.homew")
assert climate.unique_id == "homekit-123456789012-16"
climate_helper = Helper(
hass, "climate.homew", pairing, accessories[0], config_entry
)
climate_state = await climate_helper.poll_and_get_state()
assert climate_state.attributes["friendly_name"] == "HomeW"
assert climate_state.attributes["supported_features"] == (
SUPPORT_TARGET_TEMPERATURE | SUPPORT_TARGET_HUMIDITY
)
assert climate_state.attributes["hvac_modes"] == [
"off",
"heat",
"cool",
"heat_cool",
]
assert climate_state.attributes["min_temp"] == 7.2
assert climate_state.attributes["max_temp"] == 33.3
assert climate_state.attributes["min_humidity"] == 20
assert climate_state.attributes["max_humidity"] == 50
occ1 = entity_registry.async_get("binary_sensor.kitchen")
assert occ1.unique_id == "homekit-AB1C-56"
occ1_helper = Helper(
hass, "binary_sensor.kitchen", pairing, accessories[0], config_entry
)
occ1_state = await occ1_helper.poll_and_get_state()
assert occ1_state.attributes["friendly_name"] == "Kitchen"
occ2 = entity_registry.async_get("binary_sensor.porch")
assert occ2.unique_id == "homekit-AB2C-56"
occ3 = entity_registry.async_get("binary_sensor.basement")
assert occ3.unique_id == "homekit-AB3C-56"
device_registry = await hass.helpers.device_registry.async_get_registry()
climate_device = device_registry.async_get(climate.device_id)
assert climate_device.manufacturer == "ecobee Inc."
assert climate_device.name == "HomeW"
assert climate_device.model == "ecobee3"
assert climate_device.sw_version == "4.2.394"
assert climate_device.via_device_id is None
# Check that an attached sensor has its own device entity that
# is linked to the bridge
sensor_device = device_registry.async_get(occ1.device_id)
assert sensor_device.manufacturer == "ecobee Inc."
assert sensor_device.name == "Kitchen"
assert sensor_device.model == "REMOTE SENSOR"
assert sensor_device.sw_version == "1.0.0"
assert sensor_device.via_device_id == climate_device.id
async def test_ecobee3_setup_from_cache(hass, hass_storage):
"""Test that Ecbobee can be correctly setup from its cached entity map."""
accessories = await setup_accessories_from_file(hass, "ecobee3.json")
hass_storage["homekit_controller-entity-map"] = {
"version": 1,
"data": {
"pairings": {
"00:00:00:00:00:00": {
"config_num": 1,
"accessories": [
a.to_accessory_and_service_list() for a in accessories
],
}
}
},
}
await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
climate = entity_registry.async_get("climate.homew")
assert climate.unique_id == "homekit-123456789012-16"
occ1 = entity_registry.async_get("binary_sensor.kitchen")
assert occ1.unique_id == "homekit-AB1C-56"
occ2 = entity_registry.async_get("binary_sensor.porch")
assert occ2.unique_id == "homekit-AB2C-56"
occ3 = entity_registry.async_get("binary_sensor.basement")
assert occ3.unique_id == "homekit-AB3C-56"
async def test_ecobee3_setup_connection_failure(hass):
"""Test that Ecbobee can be correctly setup from its cached entity map."""
accessories = await setup_accessories_from_file(hass, "ecobee3.json")
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Test that the connection fails during initial setup.
# No entities should be created.
list_accessories = "list_accessories_and_characteristics"
with mock.patch.object(FakePairing, list_accessories) as laac:
laac.side_effect = AccessoryDisconnectedError("Connection failed")
# If there is no cached entity map and the accessory connection is
# failing then we have to fail the config entry setup.
config_entry, pairing = await setup_test_accessories(hass, accessories)
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
climate = entity_registry.async_get("climate.homew")
assert climate is None
# When accessory raises ConfigEntryNoteReady HA will retry - lets make
# sure there is no cruft causing conflicts left behind by now doing
# a successful setup.
# We just advance time by 5 minutes so that the retry happens, rather
# than manually invoking async_setup_entry - this means we need to
# make sure the IpPairing mock is in place or we'll try to connect to
# a real device. Normally this mocking is done by the helper in
# setup_test_accessories.
pairing_cls_loc = "homekit.controller.ip_implementation.IpPairing"
with mock.patch(pairing_cls_loc) as pairing_cls:
pairing_cls.return_value = pairing
await time_changed(hass, 5 * 60)
climate = entity_registry.async_get("climate.homew")
assert climate.unique_id == "homekit-123456789012-16"
occ1 = entity_registry.async_get("binary_sensor.kitchen")
assert occ1.unique_id == "homekit-AB1C-56"
occ2 = entity_registry.async_get("binary_sensor.porch")
assert occ2.unique_id == "homekit-AB2C-56"
occ3 = entity_registry.async_get("binary_sensor.basement")
assert occ3.unique_id == "homekit-AB3C-56"
async def test_ecobee3_add_sensors_at_runtime(hass):
"""Test that new sensors are automatically added."""
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Set up a base Ecobee 3 with no additional sensors.
# There shouldn't be any entities but climate visible.
accessories = await setup_accessories_from_file(hass, "ecobee3_no_sensors.json")
await setup_test_accessories(hass, accessories)
climate = entity_registry.async_get("climate.homew")
assert climate.unique_id == "homekit-123456789012-16"
occ1 = entity_registry.async_get("binary_sensor.kitchen")
assert occ1 is None
occ2 = entity_registry.async_get("binary_sensor.porch")
assert occ2 is None
occ3 = entity_registry.async_get("binary_sensor.basement")
assert occ3 is None
# Now added 3 new sensors at runtime - sensors should appear and climate
# shouldn't be duplicated.
accessories = await setup_accessories_from_file(hass, "ecobee3.json")
await device_config_changed(hass, accessories)
occ1 = entity_registry.async_get("binary_sensor.kitchen")
assert occ1.unique_id == "homekit-AB1C-56"
occ2 = entity_registry.async_get("binary_sensor.porch")
assert occ2.unique_id == "homekit-AB2C-56"
occ3 = entity_registry.async_get("binary_sensor.basement")
assert occ3.unique_id == "homekit-AB3C-56"
|
{
"content_hash": "482fdbe3a8e7bcd840c6ce208477ee64",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 84,
"avg_line_length": 37.21153846153846,
"alnum_prop": 0.6918604651162791,
"repo_name": "qedi-r/home-assistant",
"id": "8473d2352782655321430ea71ac23c15e2d86573",
"size": "7740",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "tests/components/homekit_controller/specific_devices/test_ecobee3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18564720"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<!--[if IE]><![endif]-->
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title>Class InputFilesProvider
</title>
<meta name="viewport" content="width=device-width">
<meta name="title" content="Class InputFilesProvider
">
<meta name="generator" content="docfx 2.56.6.0">
<link rel="shortcut icon" href="../favicon.ico">
<link rel="stylesheet" href="../styles/docfx.vendor.css">
<link rel="stylesheet" href="../styles/docfx.css">
<link rel="stylesheet" href="../styles/main.css">
<meta property="docfx:navrel" content="../toc.html">
<meta property="docfx:tocrel" content="toc.html">
<!-- Global site tag (gtag.js) - Google Analytics -->
<script async="" src="https://www.googletagmanager.com/gtag/js?id=G-4PT0B2VQYN"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'G-4PT0B2VQYN');
</script>
</head>
<body data-spy="scroll" data-target="#affix" data-offset="120">
<div id="wrapper">
<header>
<nav id="autocollapse" class="navbar navbar-inverse ng-scope" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#navbar">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="../index.html">
<img id="logo" class="svg" src="../logo.svg" alt="">
</a>
</div>
<div class="collapse navbar-collapse" id="navbar">
<form class="navbar-form navbar-right" role="search" id="search">
<div class="form-group">
<input type="text" class="form-control" id="search-query" placeholder="Search" autocomplete="off">
</div>
</form>
</div>
</div>
</nav>
<div class="subnav navbar navbar-default">
<div class="container hide-when-search" id="breadcrumb">
<ul class="breadcrumb">
<li></li>
</ul>
</div>
</div>
</header>
<div role="main" class="container body-content hide-when-search">
<div class="sidenav hide-when-search">
<a class="btn toc-toggle collapse" data-toggle="collapse" href="#sidetoggle" aria-expanded="false" aria-controls="sidetoggle">Show / Hide Table of Contents</a>
<div class="sidetoggle collapse" id="sidetoggle">
<div id="sidetoc"></div>
</div>
</div>
<div class="article row grid-right">
<div class="col-md-10">
<article class="content wrap" id="_content" data-uid="ZptSharp.BulkRendering.InputFilesProvider">
<h1 id="ZptSharp_BulkRendering_InputFilesProvider" data-uid="ZptSharp.BulkRendering.InputFilesProvider" class="text-break">Class InputFilesProvider
</h1>
<div class="markdown level0 summary"><p>Implementation of <a class="xref" href="ZptSharp.BulkRendering.IGetsInputFiles.html">IGetsInputFiles</a> which gets input files from a request.</p>
</div>
<div class="markdown level0 conceptual"></div>
<div class="inheritance">
<h5>Inheritance</h5>
<div class="level0"><span class="xref">System.Object</span></div>
<div class="level1"><span class="xref">InputFilesProvider</span></div>
</div>
<div classs="implements">
<h5>Implements</h5>
<div><a class="xref" href="ZptSharp.BulkRendering.IGetsInputFiles.html">IGetsInputFiles</a></div>
</div>
<div class="inheritedMembers">
<h5>Inherited Members</h5>
<div>
<span class="xref">System.Object.Equals(System.Object)</span>
</div>
<div>
<span class="xref">System.Object.Equals(System.Object, System.Object)</span>
</div>
<div>
<span class="xref">System.Object.GetHashCode()</span>
</div>
<div>
<span class="xref">System.Object.GetType()</span>
</div>
<div>
<span class="xref">System.Object.MemberwiseClone()</span>
</div>
<div>
<span class="xref">System.Object.ReferenceEquals(System.Object, System.Object)</span>
</div>
<div>
<span class="xref">System.Object.ToString()</span>
</div>
</div>
<h6><strong>Namespace</strong>: <a class="xref" href="ZptSharp.BulkRendering.html">ZptSharp.BulkRendering</a></h6>
<h6><strong>Assembly</strong>: ZptSharp.dll</h6>
<h5 id="ZptSharp_BulkRendering_InputFilesProvider_syntax">Syntax</h5>
<div class="codewrapper">
<pre><code class="lang-csharp hljs">public class InputFilesProvider : IGetsInputFiles</code></pre>
</div>
<h3 id="methods">Methods
</h3>
<span class="small pull-right mobile-hide">
<span class="divider">|</span>
<a href="https://github.com/csf-dev/ZPT-Sharp/new/master/apiSpec/new?filename=ZptSharp_BulkRendering_InputFilesProvider_GetInputFilesAsync_ZptSharp_BulkRendering_BulkRenderingRequest_System_Threading_CancellationToken_.md&value=---%0Auid%3A%20ZptSharp.BulkRendering.InputFilesProvider.GetInputFilesAsync(ZptSharp.BulkRendering.BulkRenderingRequest%2CSystem.Threading.CancellationToken)%0Asummary%3A%20'*You%20can%20override%20summary%20for%20the%20API%20here%20using%20*MARKDOWN*%20syntax'%0A---%0A%0A*Please%20type%20below%20more%20information%20about%20this%20API%3A*%0A%0A">Improve this Doc</a>
</span>
<span class="small pull-right mobile-hide">
<a href="https://github.com/csf-dev/ZPT-Sharp/blob/master/ZptSharp/BulkRendering/InputFilesProvider.cs/#L22">View Source</a>
</span>
<a id="ZptSharp_BulkRendering_InputFilesProvider_GetInputFilesAsync_" data-uid="ZptSharp.BulkRendering.InputFilesProvider.GetInputFilesAsync*"></a>
<h4 id="ZptSharp_BulkRendering_InputFilesProvider_GetInputFilesAsync_ZptSharp_BulkRendering_BulkRenderingRequest_System_Threading_CancellationToken_" data-uid="ZptSharp.BulkRendering.InputFilesProvider.GetInputFilesAsync(ZptSharp.BulkRendering.BulkRenderingRequest,System.Threading.CancellationToken)">GetInputFilesAsync(BulkRenderingRequest, CancellationToken)</h4>
<div class="markdown level1 summary"><p>Gets a collection of the input files to be processed in the bulk-rendering operation.</p>
</div>
<div class="markdown level1 conceptual"></div>
<h5 class="decalaration">Declaration</h5>
<div class="codewrapper">
<pre><code class="lang-csharp hljs">public Task<IEnumerable<InputFile>> GetInputFilesAsync(BulkRenderingRequest request, CancellationToken token = default(CancellationToken))</code></pre>
</div>
<h5 class="parameters">Parameters</h5>
<table class="table table-bordered table-striped table-condensed">
<thead>
<tr>
<th>Type</th>
<th>Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><a class="xref" href="ZptSharp.BulkRendering.BulkRenderingRequest.html">BulkRenderingRequest</a></td>
<td><span class="parametername">request</span></td>
<td><p>The bulk-rendering request.</p>
</td>
</tr>
<tr>
<td><span class="xref">System.Threading.CancellationToken</span></td>
<td><span class="parametername">token</span></td>
<td><p>An optional cancellation token.</p>
</td>
</tr>
</tbody>
</table>
<h5 class="returns">Returns</h5>
<table class="table table-bordered table-striped table-condensed">
<thead>
<tr>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="xref">System.Threading.Tasks.Task</span><<span class="xref">System.Collections.Generic.IEnumerable</span><<a class="xref" href="ZptSharp.BulkRendering.InputFile.html">InputFile</a>>></td>
<td><p>A collection of input files.</p>
</td>
</tr>
</tbody>
</table>
<h3 id="implements">Implements</h3>
<div>
<a class="xref" href="ZptSharp.BulkRendering.IGetsInputFiles.html">IGetsInputFiles</a>
</div>
</article>
</div>
<div class="hidden-sm col-md-2" role="complementary">
<div class="sideaffix">
<div class="contribution">
<ul class="nav">
<li>
<a href="https://github.com/csf-dev/ZPT-Sharp/new/master/apiSpec/new?filename=ZptSharp_BulkRendering_InputFilesProvider.md&value=---%0Auid%3A%20ZptSharp.BulkRendering.InputFilesProvider%0Asummary%3A%20'*You%20can%20override%20summary%20for%20the%20API%20here%20using%20*MARKDOWN*%20syntax'%0A---%0A%0A*Please%20type%20below%20more%20information%20about%20this%20API%3A*%0A%0A" class="contribution-link">Improve this Doc</a>
</li>
<li>
<a href="https://github.com/csf-dev/ZPT-Sharp/blob/master/ZptSharp/BulkRendering/InputFilesProvider.cs/#L14" class="contribution-link">View Source</a>
</li>
</ul>
</div>
<nav class="bs-docs-sidebar hidden-print hidden-xs hidden-sm affix" id="affix">
<h5>In This Article</h5>
<div></div>
</nav>
</div>
</div>
</div>
</div>
<footer>
<div class="grad-bottom"></div>
<div class="footer">
<div class="container">
<span class="pull-right">
<a href="#top">Back to top</a>
</span>
<span>Generated by <strong>DocFX</strong></span>
</div>
</div>
</footer>
</div>
<script type="text/javascript" src="../styles/docfx.vendor.js"></script>
<script type="text/javascript" src="../styles/docfx.js"></script>
<script type="text/javascript" src="../styles/main.js"></script>
</body>
</html>
|
{
"content_hash": "3e4f83243f75bf6727b8ec66dac2bd4f",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 605,
"avg_line_length": 44.130434782608695,
"alnum_prop": 0.6262068965517241,
"repo_name": "csf-dev/ZPT-Sharp",
"id": "ed8bd6d05d6674adfcd38afd6b9e50167526008b",
"size": "10152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/api/ZptSharp.BulkRendering.InputFilesProvider.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "4949"
},
{
"name": "Batchfile",
"bytes": "3142"
},
{
"name": "C#",
"bytes": "1670750"
},
{
"name": "HTML",
"bytes": "146448"
},
{
"name": "PowerShell",
"bytes": "2416"
}
],
"symlink_target": ""
}
|
layout: post
title: "Upsampling and Image Segmentation with Tensorflow and TF-Slim"
description:
comments: true
categories:
- Tensorflow
- TF-Slim
---
A post showing how to perform Upsampling and Image Segmentation
with a recently released TF-Slim library and pretrained models.
___
### Introduction
In the previous post, we saw how to do _Image Classification_
by performing crop of the central part of an image and
making an inference using one of the standart classification models.
After that, we saw how to perform the network inference on the whole
image by changing the network to _fully convolutional_ one. This approach
gave us a downsampled prediction map for the image -- that happened due
to the fact that _max-pooling_ layers are used in the network architecture.
This prediction map can be treated as an efficient way to make an inference of
the network for the whole image. You can also think about it as a way to make
_Image Segmentation_, but it is not an actual _Segmentation_, because
the standart network models were trained to perform _Classification_.
To make it perform an actual _Segmentation_, we will have to train it
on _Segmentation_ dataset in a special way like in the paper _Fully convolutional
networks for semantic segmentation_ by Long et al. Two of the most popular general
_Segmentation_ datasets are: [Microsoft COCO](http://mscoco.org/) and [PASCAL VOC](http://host.robots.ox.ac.uk/pascal/VOC/).
In this post, we will perform image upsampling to get the prediction map
that is of the same size as an input image. We will do this using _transposed convolution_
(also known as _deconvolution_). It is recommended not to use the _deconvolution_ name for this
operation as it can be confused with another operation and it does not represent accurately
the actual process that is being performed. The most accurate name for the kind of operation
that we will perform in this post is _fractionally strided convolution_.
We will cover a small part of theory necessary for understanding and some resources will be cited.
One question might be raised up now: Why do we need to perform upsampling using _fractionally
strided convolution_? Why can't we just use some library to do this for us? The answer is: we
need to do this because we need to define the upsampling operation as a layer in the network.
And why do we need it as a layer? Because we will have to perform training where the image and
respective _Segmentation_ groundtruth will be given to us -- and we will have to perform
training using backpropagation.
As it is [known](http://www.robots.ox.ac.uk/~vgg/practicals/cnn/) , each layer in the network has to be able to perform three operations:
_forward propagation_, _backward propagation_ and _update_ which performs updates
to the weights of the layer during training. By doing the upsampling with _transposed convolution_
we will have all of these operations defined and we will be able to perform training.
By the end of the post, we will implement the upsampling and will make sure it is correct
by comparing it to the implementation of the [scikit-image library](http://scikit-image.org/).
To be more specific we will have _FCN-32_ _Segmentation_ network implemented which is
described in the paper _Fully convolutional networks for semantic segmentation_.
To perform the training, the loss function has to be defined and training dataset provided.
The blog post is created using jupyter notebook. After each chunk of a code
you can see the result of its evaluation. You can also get the notebook
file from [here](https://github.com/warmspringwinds/tensorflow_notes/blob/master/upsampling_segmentation.ipynb).
### Setup
To be able to run the provided code, follow the previous post and run the
following command to run the code on the first _GPU_ and specify the folder
with downloaded classification models:
```python
from __future__ import division
import sys
import os
import numpy as np
from matplotlib import pyplot as plt
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
sys.path.append("/home/dpakhom1/workspace/models/slim")
# A place where you have downloaded a network checkpoint -- look at the previous post
checkpoints_dir = '/home/dpakhom1/checkpoints'
sys.path.append("/home/dpakhom1/workspace/models/slim")
```
### Image Upsampling
_Image Upsampling_ is a specific case of _Resampling_.
According to a definition, provided in [this article about _Resampling_](http://avisynth.nl/index.php/Resampling):
```
The idea behind resampling is to reconstruct the continuous signal from the original sampled signal and resample it again using more samples (which is called interpolation or upsampling) or fewer samples (which is called decimation or downsampling)
```
In other words, we can approximate the continious signal from the points that
we have and sample new ones from the reconstructed signal. So, to be more specific,
in our case, we have downsampled prediction map -- these are points from which we
want to reconstruct original signal. And if we are able to approximate original
signal, we can sample more points and, therefore, perform upsampling.
We say "to approximate" here because the continious signal will most probably
not be reconstructed perfectly. Under a certain ideal conditions, the signal
can be perfectly reconstructed though. There is a _Nyquist–Shannon sampling theorem_
that states that the signal can be ideally reconstructed if _x(t) contains no
sinusoidal component at exactly frequency B, or that B must be strictly less than one half
the sample rate_. Basically, the sampling frequency should be bigger by the factor of two
than the biggest frequency that input signal contains.
But what is the exact equation to get this reconstruction? Taking the equation from this [source](http://avisynth.nl/index.php/Resampling):
s(x) = sum_n s(n*T) * sinc((x-n*T)/T), with sinc(x) = sin(pi*x)/(pi*x) for x!=0, and = 1 for x=0
with fs = 1/T the sampling rate, s(n*T) the samples of s(x) and sinc(x) the resampling kernel.
[Wikipedia article](https://en.wikipedia.org/wiki/Nyquist%E2%80%93Shannon_sampling_theorem) has a great explanation of the equation:
```
A mathematically ideal way to interpolate the sequence involves the use of sinc functions. Each sample in the sequence is replaced by a sinc function, centered on the time axis at the original location of the sample, nT, with the amplitude of the sinc function scaled to the sample value, x[n]. Subsequently, the sinc functions are summed into a continuous function. A mathematically equivalent method is to convolve one sinc function with a series of Dirac delta pulses, weighted by the sample values. Neither method is numerically practical. Instead, some type of approximation of the sinc functions, finite in length, is used. The imperfections attributable to the approximation are known as interpolation error.
```
So, we can see that the continious signal is reconstructed by placing the resampling kernel
function at each point that we have and summing up everything (this explanation omits some
details for simplicity). It should be stated here that the resampling kernel shouldn't necessary
be sinc function. For example, the bilinear resampling kernel can be useв. You can find more
examples [here](http://avisynth.nl/index.php/Resampling). Also, one important point
from the explanation above is that mathematically equivalent way is to convolve the
kernel function with series of Dirac delta pulses, weighted by the sample values. These
two equavalent ways to perform reconstruction are important as they will make understanding
of how transposed convolution work and that each transposed convolution has an equivalent
convolution.
Let's perform image upsampling using built-in function from [scikit-image library](http://scikit-image.org/).
We will need this to validate if our implementation of bilinear upsampling is correct later in the post.
This is exactly how the implementation of bilinear upsampling [was validated](http://nbviewer.jupyter.org/gist/tnarihi/54744612d35776f53278) before [being merged](https://github.com/BVLC/caffe/pull/2213). Part of the code in the post was taken from
there. Below we will perform the upsampling with _factor_ 3 meaning that an output size
will be three times bigger than an input.
```python
%matplotlib inline
from numpy import ogrid, repeat, newaxis
from skimage import io
# Generate image that will be used for test upsampling
# Number of channels is 3 -- we also treat the number of
# samples like the number of classes, because later on
# that will be used to upsample predictions from the network
imsize = 3
x, y = ogrid[:imsize, :imsize]
img = repeat((x + y)[..., newaxis], 3, 2) / float(imsize + imsize)
io.imshow(img, interpolation='none')
```
<matplotlib.image.AxesImage at 0x7fe530704050>

```python
import skimage.transform
def upsample_skimage(factor, input_img):
# Pad with 0 values, similar to how Tensorflow does it.
# Order=1 is bilinear upsampling
return skimage.transform.rescale(input_img,
factor,
mode='constant',
cval=0,
order=1)
upsampled_img_skimage = upsample_skimage(factor=3, input_img=img)
io.imshow(upsampled_img_skimage, interpolation='none')
```
<matplotlib.image.AxesImage at 0x7fe528c4f9d0>

### Transposed convolution
In the paper by Long et al. it was stated that upsampling can be performed using
fractionally strided convolution (transposed convolution). But first it is
necessary to understand how transposed convolution works.To understand that,
we should look at a usual convolution and see that it convolves
the image and depending on the parameters (stride, kernel size, padding) reduces
the input image. What if we would be able to perform an operation that goes
in the opposite direction -- from small input to the bigger one while preserving the
connectivity pattern. Here is an illustration:

Convolution is a linear operation and, therefore, it can be represented as a matrix
multiplication. To achieve the result described above, we only need to traspose the
matrix that defines a particular convolution. The resulted operation is no longer
a convolution, but it can still be represented as a convolution, which won't be
as efficient as transposing a convolution. To get more information about the equivalence
and transposed convolution in general we refer reader to [this paper](https://arxiv.org/pdf/1609.07009.pdf)
and [this guide](https://arxiv.org/pdf/1603.07285.pdf).
So, if we define a bilinear upsampling kernel and perform fractionally strided
convolution on the image, we will get an upsampled output, which will be defined
as a layer in the network and will make it possible for us to perform backpropagation.
For the FCN-32 we will use bilinear upsampling kernel as an initialization, meaning that
the network can learn a more suitable kernel during backpropagation.
To make the code below more easy to read, we will provide some statements that can be
derived from the [following article](http://avisynth.nl/index.php/Resampling). The _factor_
of upsampling is equal to the stride of transposed convolution. The kernel of the upsampling
operation is determined by the identity: __2 * factor - factor % 2__.
Below, we will define the bilinear interpolation using transposed convolution operation
in Tensorflow. We will perform this operation on cpu, because later in the post we will
need the same piece of code to perfom memory consuming operation that won't fit into GPU.
After performing the interpolation, we compare our results to the results that were
obtained by the function from scikit-image.
```python
from __future__ import division
import numpy as np
import tensorflow as tf
def get_kernel_size(factor):
"""
Find the kernel size given the desired factor of upsampling.
"""
return 2 * factor - factor % 2
def upsample_filt(size):
"""
Make a 2D bilinear kernel suitable for upsampling of the given (h, w) size.
"""
factor = (size + 1) // 2
if size % 2 == 1:
center = factor - 1
else:
center = factor - 0.5
og = np.ogrid[:size, :size]
return (1 - abs(og[0] - center) / factor) * \
(1 - abs(og[1] - center) / factor)
def bilinear_upsample_weights(factor, number_of_classes):
"""
Create weights matrix for transposed convolution with bilinear filter
initialization.
"""
filter_size = get_kernel_size(factor)
weights = np.zeros((filter_size,
filter_size,
number_of_classes,
number_of_classes), dtype=np.float32)
upsample_kernel = upsample_filt(filter_size)
for i in xrange(number_of_classes):
weights[:, :, i, i] = upsample_kernel
return weights
def upsample_tf(factor, input_img):
number_of_classes = input_img.shape[2]
new_height = input_img.shape[0] * factor
new_width = input_img.shape[1] * factor
expanded_img = np.expand_dims(input_img, axis=0)
with tf.Graph().as_default():
with tf.Session() as sess:
with tf.device("/cpu:0"):
upsample_filt_pl = tf.placeholder(tf.float32)
logits_pl = tf.placeholder(tf.float32)
upsample_filter_np = bilinear_upsample_weights(factor,
number_of_classes)
res = tf.nn.conv2d_transpose(logits_pl, upsample_filt_pl,
output_shape=[1, new_height, new_width, number_of_classes],
strides=[1, factor, factor, 1])
final_result = sess.run(res,
feed_dict={upsample_filt_pl: upsample_filter_np,
logits_pl: expanded_img})
return final_result.squeeze()
upsampled_img_tf = upsample_tf(factor=3, input_img=img)
io.imshow(upsampled_img_tf)
```
<matplotlib.image.AxesImage at 0x7fe4f81cae10>

```python
# Test if the results of upsampling are the same
np.allclose(upsampled_img_skimage, upsampled_img_tf)
```
True
```python
for factor in xrange(2, 10):
upsampled_img_skimage = upsample_skimage(factor=factor, input_img=img)
upsampled_img_tf = upsample_tf(factor=factor, input_img=img)
are_equal = np.allclose(upsampled_img_skimage, upsampled_img_tf)
print("Check for factor {}: {}".format(factor, are_equal))
```
Check for factor 2: True
Check for factor 3: True
Check for factor 4: True
Check for factor 5: True
Check for factor 6: True
Check for factor 7: True
Check for factor 8: True
Check for factor 9: True
### Upsampled predictions
So let's apply our upsampling to the actual predictions. We will take the _VGG-16_
model that we used in the previous post for classification and apply our upsampling
to the downsampled predictions that we get from the network.
Before applying the code below I had to change a [certain line](https://github.com/tensorflow/tensorflow/blob/r0.11/tensorflow/contrib/slim/python/slim/nets/vgg.py#L165)
in the definition of _VGG-16_ model to prevent it from reducing the size even more. To be more specific,
I had to change the _7x7_ convolutional layer padding to _SAME_ option. This was done by the authors
because they wanted to get single prediction for the input image of standart size. But in case of segmentation
we don't need this, because otherwise by upsampling by factor 32 we won't get
the image of the same size as the input. After making the aforementioned change, the issue was eliminated.
Be careful because the code below and specifically the upsampling variable consumes a huge
amount of space (~15 Gb). This is due to the fact that we have huge filters _64 by 64_
and _1000_ classes. Moreover, we actually don't use a lot of space of the upsampling variable,
because we define only the diagonal submatrices, therefore, a lot of space is wasted.
This is only done for demonstration purposes for _1000_ classes and standart _Segmentation_
datasets usually contain less classes (20 classes on PASCAL VOC).
```python
%matplotlib inline
from matplotlib import pyplot as plt
import numpy as np
import os
import tensorflow as tf
import urllib2
from datasets import imagenet
from nets import vgg
from preprocessing import vgg_preprocessing
checkpoints_dir = '/home/dpakhom1/checkpoints'
slim = tf.contrib.slim
# Load the mean pixel values and the function
# that performs the subtraction
from preprocessing.vgg_preprocessing import (_mean_image_subtraction,
_R_MEAN, _G_MEAN, _B_MEAN)
slim = tf.contrib.slim
# Function to nicely print segmentation results with
# colorbar showing class names
def discrete_matshow(data, labels_names=[], title=""):
fig_size = [7, 6]
plt.rcParams["figure.figsize"] = fig_size
#get discrete colormap
cmap = plt.get_cmap('Paired', np.max(data)-np.min(data)+1)
# set limits .5 outside true range
mat = plt.matshow(data,
cmap=cmap,
vmin = np.min(data)-.5,
vmax = np.max(data)+.5)
#tell the colorbar to tick at integers
cax = plt.colorbar(mat,
ticks=np.arange(np.min(data),np.max(data)+1))
# The names to be printed aside the colorbar
if labels_names:
cax.ax.set_yticklabels(labels_names)
if title:
plt.suptitle(title, fontsize=15, fontweight='bold')
with tf.Graph().as_default():
url = ("https://upload.wikimedia.org/wikipedia/commons/d/d9/"
"First_Student_IC_school_bus_202076.jpg")
image_string = urllib2.urlopen(url).read()
image = tf.image.decode_jpeg(image_string, channels=3)
# Convert image to float32 before subtracting the
# mean pixel value
image_float = tf.to_float(image, name='ToFloat')
# Subtract the mean pixel value from each pixel
processed_image = _mean_image_subtraction(image_float,
[_R_MEAN, _G_MEAN, _B_MEAN])
input_image = tf.expand_dims(processed_image, 0)
with slim.arg_scope(vgg.vgg_arg_scope()):
# spatial_squeeze option enables to use network in a fully
# convolutional manner
logits, _ = vgg.vgg_16(input_image,
num_classes=1000,
is_training=False,
spatial_squeeze=False)
# For each pixel we get predictions for each class
# out of 1000. We need to pick the one with the highest
# probability. To be more precise, these are not probabilities,
# because we didn't apply softmax. But if we pick a class
# with the highest value it will be equivalent to picking
# the highest value after applying softmax
pred = tf.argmax(logits, dimension=3)
init_fn = slim.assign_from_checkpoint_fn(
os.path.join(checkpoints_dir, 'vgg_16.ckpt'),
slim.get_model_variables('vgg_16'))
with tf.Session() as sess:
init_fn(sess)
segmentation, np_image, np_logits = sess.run([pred, image, logits])
# Remove the first empty dimension
segmentation = np.squeeze(segmentation)
names = imagenet.create_readable_names_for_imagenet_labels()
# Let's get unique predicted classes (from 0 to 1000) and
# relable the original predictions so that classes are
# numerated starting from zero
unique_classes, relabeled_image = np.unique(segmentation,
return_inverse=True)
segmentation_size = segmentation.shape
relabeled_image = relabeled_image.reshape(segmentation_size)
labels_names = []
for index, current_class_number in enumerate(unique_classes):
labels_names.append(str(index) + ' ' + names[current_class_number+1])
# Show the downloaded image
plt.figure()
plt.imshow(np_image.astype(np.uint8))
plt.suptitle("Input Image", fontsize=14, fontweight='bold')
plt.axis('off')
plt.show()
discrete_matshow(data=relabeled_image, labels_names=labels_names, title="Segmentation")
```


And now let's upsample the predictions that we got for the image using the bilinear
upsampling kernel.
```python
upsampled_logits = upsample_tf(factor=32, input_img=np_logits.squeeze())
upsampled_predictions = upsampled_logits.squeeze().argmax(axis=2)
unique_classes, relabeled_image = np.unique(upsampled_predictions,
return_inverse=True)
relabeled_image = relabeled_image.reshape(upsampled_predictions.shape)
labels_names = []
for index, current_class_number in enumerate(unique_classes):
labels_names.append(str(index) + ' ' + names[current_class_number+1])
# Show the downloaded image
plt.figure()
plt.imshow(np_image.astype(np.uint8))
plt.suptitle("Input Image", fontsize=14, fontweight='bold')
plt.axis('off')
plt.show()
discrete_matshow(data=relabeled_image, labels_names=labels_names, title="Segmentation")
```


The ouput results that we got are quite noisy, but we got an approximate segmentation for the bus.
To be more precise, it is not a segmentation but regions where the network was evaluated
and gave the following predictions.
The next stage is to perform training of the whole system on a specific Segmentation dataset.
### Conclusion and Discussion
In this post we covered the transposed convolution and specifically the
implementation of bilinear interpolation using transposed convolution. We applied
it to downsampled predictions to upsample them and get predictions for the whole
input image.
We also saw that you might experience problems with space if you do segmentation
for a huge number of classes. Because of that, we had to perform that operation
on CPU.
|
{
"content_hash": "13bb1bde62690347957c14219b5a882d",
"timestamp": "",
"source": "github",
"line_count": 580,
"max_line_length": 715,
"avg_line_length": 38.734482758620686,
"alnum_prop": 0.724160954330989,
"repo_name": "warmspringwinds/warmspringwinds.github.io",
"id": "ba26c87f0e703ccdf948b4cbe84db326e07da50f",
"size": "22473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2016-11-22-upsampling-and-image-segmentation-with-tensorflow-and-tf-slim.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23734"
},
{
"name": "HTML",
"bytes": "10327"
},
{
"name": "JavaScript",
"bytes": "125"
},
{
"name": "Ruby",
"bytes": "6378"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "ff7887918a681810ccab866c0980899d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "2b29955ae6d384902804f1fa07ff7f1b1a6cdc95",
"size": "176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Proteales/Proteaceae/Synaphea/Synaphea nexosa/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
<?php
/*
* This file is part of PHP CS Fixer.
*
* (c) Fabien Potencier <fabien@symfony.com>
* Dariusz Rumiński <dariusz.ruminski@gmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
namespace PhpCsFixer\Fixer\CastNotation;
use PhpCsFixer\AbstractFixer;
use PhpCsFixer\FixerDefinition\CodeSample;
use PhpCsFixer\FixerDefinition\FixerDefinition;
use PhpCsFixer\Tokenizer\Token;
use PhpCsFixer\Tokenizer\Tokens;
/**
* @author SpacePossum
*/
final class LowercaseCastFixer extends AbstractFixer
{
/**
* {@inheritdoc}
*/
public function fix(\SplFileInfo $file, Tokens $tokens)
{
for ($index = 0, $count = $tokens->count(); $index < $count; ++$index) {
if (!$tokens[$index]->isCast()) {
continue;
}
$tokens[$index]->setContent(strtolower($tokens[$index]->getContent()));
}
}
/**
* {@inheritdoc}
*/
public function getDefinition()
{
return new FixerDefinition(
'Cast should be written in lower case.',
array(
new CodeSample(
'<?php
$a = (BOOLEAN) $b;
$a = (BOOL) $b;
$a = (INTEGER) $b;
$a = (INT) $b;
$a = (DOUBLE) $b;
$a = (FLoaT) $b;
$a = (reaL) $b;
$a = (flOAT) $b;
$a = (sTRING) $b;
$a = (ARRAy) $b;
$a = (OBJect) $b;
$a = (UNset) $b;
$a = (Binary) $b;
'
),
)
);
}
/**
* {@inheritdoc}
*/
public function isCandidate(Tokens $tokens)
{
return $tokens->isAnyTokenKindsFound(Token::getCastTokenKinds());
}
}
|
{
"content_hash": "30e5f46a393637e7813dd5d4084652fe",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 83,
"avg_line_length": 22.17105263157895,
"alnum_prop": 0.5507418397626113,
"repo_name": "felixgomez/PHP-CS-Fixer",
"id": "b57fe664a13d7efcdb81b24c753676211146299e",
"size": "1686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Fixer/CastNotation/LowercaseCastFixer.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "2065233"
},
{
"name": "Shell",
"bytes": "808"
}
],
"symlink_target": ""
}
|
layout: post
permalink: testability
title: Testability
tags: [programming, swift, testing]
---
I found an inconsistency in an app, which had to do with the color of a certain action's title.
Easy fix, I thought. I can fix this without too much work.
<!--more-->
So that's what I did. And I was right — it was an easy fix. However I wanted to go a step further, to prevent this from happening in the future, and to validate the current behavior.
## The problem
The problem was a `UIAlertAction` having a style of `.default` (making the button blue), where it was expected to be `.destructive` (which would make it red). The action was deleting a post, after all. The code looked something like this, and was inside a `UIViewController` subclass:
```swift
func presentActionsAlert(for input: [String]) {
let alertController = UIAlertController(
title: nil,
message: nil,
preferredStyle: .actionSheet
)
for value in input {
switch value {
case "share":
let action = UIAlertAction(title: "share", style: .default)
alertController.addAction(action)
case "delete":
let action = UIAlertAction(title: "delete", style: .default)
alertController.addAction(action)
default:
break
}
}
let cancelAction = UIAlertAction(title: "cancel", style: .cancel)
alertController.addAction(cancelAction)
present(alertController, animated: true)
}
```
The fix was easy:
```diff
- let action = UIAlertAction(title: "delete", style: .default)
+ let action = UIAlertAction(title: "delete", style: .destructive)
```
So I committed the fix, but before opening a pull request I wanted to check if all tests still succeeded (as to not waste resources on our continuous integration infrastructure).
Guess what. All tests passed. 😱
So I took another look at the function, and thought... this makes sense. How would I ever test this?
## Improving testability
So I refactored the function to only create the `UIAlertController`, without the side effect of presenting it.
```diff
- func presentActionsAlert(for input: [String]) {
+ func actionsAlert(for input: [String]) -> UIAlertController {
- present(alertController, animated: true)
+ return alertController
```
What is nice about this, is that we reduced our function to a function that does not do more than getting our `input` and providing us with an `output`[^1].
At this point, I was ready to write a unit test, that would validate some assumptions — in this case checking that if we pass in a `"delete"` as input, expecting a `.destructive` style as the output. If this would be changed in the future, the corresponding unit test would fail.
Lets take a look at this unit test.
```swift
class MyTest: XCTestCase {
let myViewController = MyViewController()
func test_that_aDeleteInput_has_aDestructiveStyle() {
let input = "delete"
let output = myViewController.actionsAlert(for: [input])
XCTAssertEqual(
output.actions.first { $0.title == input }?.style,
.destructive,
"A `delete` input should have a `.destructive` style."
)
}
}
```
And there we go: our assumption is being validated in a test[^2]. 🎉
## Conclusion
It is good to take a look and see if you can improve a piece of code, especially when it involved a contained fix like this one. Check if there is something to be done to leave the code a bit cleaner than you found it[^3].
[^1]: If that makes you think about functional programming: you're right. In fact, in its current form, this is a [pure function](https://en.wikipedia.org/wiki/Functional_programming#Pure_functions). And if pure functions are awesome for something, it is testability!
[^2]: In the real test case I wrote, I validated some other things, like there being a `.cancel` action present, as well as the `actions`'s count when we pass in some input that would end up in the `default` case.
[^3]: Now that the function does not rely on anything within the `UIViewController`, we could even take it out of there. Tearing down Massive ViewControllers one function at a time.
|
{
"content_hash": "607d0e0ff6ade3928c245fd574671e12",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 284,
"avg_line_length": 40.650485436893206,
"alnum_prop": 0.7117267733460712,
"repo_name": "BasThomas/basthomas.github.io",
"id": "5ee8f30e20cb3a2c9643d115e77c644ae989916f",
"size": "4201",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "_posts/2018-04-29-testability.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "200"
},
{
"name": "SCSS",
"bytes": "4352"
}
],
"symlink_target": ""
}
|
using System;
using RESTyard.AspNetCore.Hypermedia;
namespace RESTyard.AspNetCore.WebApi.Formatter
{
public interface IHypermediaConverter
{
string ConvertToString(HypermediaObject hypermediaObject);
}
}
|
{
"content_hash": "be43f23ce8828b2b0d5a9cf21896b7be",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 66,
"avg_line_length": 22.4,
"alnum_prop": 0.7767857142857143,
"repo_name": "bluehands/WebApiHypermediaExtensions",
"id": "430fe21eb75929c2b2f2b6d319e62df47c758b62",
"size": "224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/RESTyard.AspNetCore/WebApi/Formatter/IHypermediaConverter.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "605950"
},
{
"name": "HTML",
"bytes": "2474"
},
{
"name": "PowerShell",
"bytes": "32"
}
],
"symlink_target": ""
}
|
/**
* This file is part of Easylogging++ samples
* Demonstration of simple VC++ project.
*
* PLEASE NOTE: We have ELPP_FEATURE_PERFORMANCE_TRACKING preprocessor defined in project settings
* Otherwise we will get linker error
*
* Revision: 1.1
* @author mkhan3189
*/
#include "easylogging++.h"
INITIALIZE_EASYLOGGINGPP
TIMED_SCOPE(appTimer, "myapplication");
int main() {
LOG(INFO) << "Starting...";
el::Loggers::removeFlag(el::LoggingFlag::AllowVerboseIfModuleNotSpecified);
{
TIMED_SCOPE(tmr, "write-simple");
LOG(INFO) << "Test " << __FILE__;
}
VLOG(3) << "Test verbose";
system("pause");
}
|
{
"content_hash": "ce1540db103b8487d58ff1498e2c9c3b",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 97,
"avg_line_length": 22.06896551724138,
"alnum_prop": 0.6640625,
"repo_name": "rflament/loggedfs",
"id": "d226631793d123e59ba4f757c3e1ae2d3399c71f",
"size": "640",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vendor/github.com/muflihun/easyloggingpp/samples/VC++/VCPP2015_Win32/VCPP2015_Win32/main.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "36685"
},
{
"name": "Makefile",
"bytes": "1712"
},
{
"name": "Roff",
"bytes": "1213"
},
{
"name": "Shell",
"bytes": "1338"
}
],
"symlink_target": ""
}
|
<?php
namespace Application\Model;
// Add these import statements
use Zend\InputFilter\Factory as InputFactory;
use Zend\InputFilter\InputFilter;
use Zend\InputFilter\InputFilterAwareInterface;
use Zend\InputFilter\InputFilterInterface;
class Jsprofile implements InputFilterAwareInterface
{
protected $inputFilter;
public $profileId;
public $jsId;
public $headline;
public $curDesignation;
public $curCompanyId;
public $curLocation;
public $prefLocation;
public $functionalArea;
public $curRole;
public $industryId;
public $totExp;
public $salary;
public $highestEducation;
public $keySkils;
public $dateAdded;
public $dateModified;
public function exchangeArray($data)
{
$this->profileId = (isset($data['profileId'])) ? $data['profileId'] : null;
$this->jsId = (isset($data['jsId'])) ? $data['jsId'] : null;
$this->headline = (isset($data['headline'])) ? $data['headline'] : null;
$this->curDesignation = (isset($data['curDesignation'])) ? $data['curDesignation'] : null;
$this->curCompanyId = (isset($data['curCompanyId'])) ? $data['curCompanyId'] : null;
$this->curLocation = (isset($data['curLocation'])) ? $data['curLocation'] : null;
$this->prefLocation = (isset($data['prefLocation'])) ? $data['prefLocation'] : null;
$this->functionalArea = (isset($data['functionalArea'])) ? $data['functionalArea'] : null;
$this->curRole = (isset($data['curRole'])) ? $data['curRole'] : null;
$this->industryId = (isset($data['industryId'])) ? $data['industryId'] : null;
$this->totExp = (isset($data['totExp'])) ? $data['totExp'] : null;
$this->salary = (isset($data['salary'])) ? $data['salary'] : null;
$this->highestEducation = (isset($data['highestEducation'])) ? $data['highestEducation'] : null;
$this->keySkils = (isset($data['keySkils'])) ? $data['keySkils'] : null;
$this->dateAdded = (isset($data['dateAdded'])) ? $data['dateAdded'] : date('Y-M-d H:i:s');
$this->dateModified = (isset($data['dateModified'])) ? $data['dateModified'] : date('Y-M-d H:i:s');
}
public function getArrayCopy()
{
return get_object_vars($this);
}
public function setInputFilter(InputFilterInterface $inputFilter)
{
throw new \Exception("Not used");
}
public function getInputFilter()
{
if (!$this->inputFilter) {
$inputFilter = new InputFilter();
$factory = new InputFactory();
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$inputFilter->add(array(
'name' => 'active',
//'required' => true,
'filters' => array(
array('name' => 'StripTags'),
array('name' => 'StringTrim'),
),
'validators' => array(),
));
$this->inputFilter = $inputFilter;
}
return $this->inputFilter;
}
}
|
{
"content_hash": "a544d2bfef3a24f03372ea5caf2b6cbe",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 101,
"avg_line_length": 35.7953488372093,
"alnum_prop": 0.43243243243243246,
"repo_name": "naveennimbal/jobstonaukri",
"id": "5e7b3509d305544998e06d8833321cb4ac776ffb",
"size": "7696",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "module/Application/src/Application/Model/Jsprofile.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "711"
},
{
"name": "Batchfile",
"bytes": "61"
},
{
"name": "CSS",
"bytes": "328956"
},
{
"name": "HTML",
"bytes": "974903"
},
{
"name": "JavaScript",
"bytes": "781384"
},
{
"name": "PHP",
"bytes": "359393"
},
{
"name": "Shell",
"bytes": "59"
}
],
"symlink_target": ""
}
|
package ferjorosa.graph.search;
import ferjorosa.graph.AbstractNode;
import ferjorosa.graph.Edge;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A visitor to record the discovering and finishing times
* of the nodes during a depth first search.
* @author leonard
*
*/
public class TimeVisitor extends AbstractVisitor {
private boolean reVisitTwice = false;
private AbstractNode root = null;
public TimeVisitor() {
time = 0;
}
/**
* Constructor
* @param initialTime initial time
*/
public TimeVisitor(int initialTime) {
time = initialTime;
}
public void setRoot(AbstractNode node)
{
root = node;
}
public boolean reVisit()
{
return reVisitTwice;
}
public boolean discover(AbstractNode node, Edge edge) {
if (discoveringTimes.containsKey(node) ||
ignoredEdges.contains(edge))
{
if(discoveringTimes.containsKey(node) && node.equals(root))
{
reVisitTwice = true;
}
return false;
}
discoveringTimes.put(node, time++);
return true;
}
public void finish(AbstractNode node) {
finishingTimes.put(node, time++);
}
public boolean discovered(AbstractNode node) {
return discoveringTimes.containsKey(node);
}
/**
* Indicates an edge that should be ignored during the search.
* @param edge an edge that should be ignored
*/
public void addIgnoredEdge(Edge edge) {
ignoredEdges.add(edge);
}
public final Map<AbstractNode, Integer> discoveringTimes =
new HashMap<AbstractNode, Integer>();
public final Map<AbstractNode, Integer> finishingTimes =
new HashMap<AbstractNode, Integer>();
private int time = 0;
private Set<Edge> ignoredEdges = new HashSet<Edge>();
}
|
{
"content_hash": "a9a4a123ae5c7cd66f2b635e1a84a2c0",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 66,
"avg_line_length": 23.048192771084338,
"alnum_prop": 0.6419236800836383,
"repo_name": "fernandoj92/mvca-parkinson",
"id": "a0af883860ecf404ec4609b143f106ae6869a75d",
"size": "1913",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ltm-analysis/src/main/java/ferjorosa/graph/search/TimeVisitor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "553"
},
{
"name": "Java",
"bytes": "3340311"
},
{
"name": "R",
"bytes": "829"
},
{
"name": "Scala",
"bytes": "6170"
}
],
"symlink_target": ""
}
|
using System;
using System.Net;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Shapes;
using System.Xml.Linq;
using System.Linq;
using System.Windows.Resources;
using System.Collections.Generic;
using Tff.Panzer.Models.Geography;
using Tff.Panzer.Models;
using Tff.Panzer.Models.Campaign;
namespace Tff.Panzer.Factories.Campaign
{
public class CampaignStepFactory
{
public List<CampaignStep> CampaignSteps { get; private set; }
public CampaignBriefingFactory CampaignBriefingFactory { get; private set; }
public CampaignStepFactory()
{
CampaignSteps = new List<CampaignStep>();
CampaignBriefingFactory = new CampaignBriefingFactory();
Uri uri = new Uri(Constants.Campaign_StepDataPath, UriKind.Relative);
XElement applicationXml;
StreamResourceInfo xmlStream = Application.GetResourceStream(uri);
applicationXml = XElement.Load(xmlStream.Stream);
var data = from t in applicationXml.Descendants("Campaign_Step")
select t;
CampaignStep campaignStep = null;
foreach (var d in data)
{
campaignStep = new CampaignStep();
campaignStep.CampaignStepId = (Int32)d.Element("CampaignStepId");
campaignStep.CampaignStepDescription = (String)d.Element("CampaignStepDesc");
campaignStep.CampaignBriefing = CampaignBriefingFactory.GetCampaignBriefing((Int32)d.Element("BriefingId"));
campaignStep.ScenarioId = ((Int32)d.Element("ScenarioId"));
CampaignSteps.Add(campaignStep);
}
}
public CampaignStep GetCampaignStep(int campaignStepId)
{
CampaignStep step = (from b in this.CampaignSteps
where b.CampaignStepId == campaignStepId
select b).FirstOrDefault();
return step;
}
}
}
|
{
"content_hash": "c22d43a2179233ed604886e09c6d1d02",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 124,
"avg_line_length": 36.13333333333333,
"alnum_prop": 0.6526752767527675,
"repo_name": "jamessdixon/PanzerGeneral",
"id": "3b0c2eeb205013d03d0f20195d2d232ae20774b2",
"size": "2170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tff.PanzerGeneral_Solution/Tff.PanzerGeneral.UI.WindowsPhone7/Factories/Campaign/CampaignStepFactory.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "524594"
}
],
"symlink_target": ""
}
|
L.Control.MousePosition = L.Control.extend({
options: {
position: 'bottomleft',
separator: ' : ',
emptyString: 'Unavailable',
lngFirst: false,
numDigits: 5,
lngFormatter: undefined,
latFormatter: undefined,
prefix: ""
},
onAdd: function (map) {
this._container = L.DomUtil.create('div', 'leaflet-control-mouseposition');
L.DomEvent.disableClickPropagation(this._container);
map.on('mousemove', this._onMouseMove, this);
this._container.innerHTML = this.options.emptyString;
return this._container;
},
onRemove: function (map) {
map.off('mousemove', this._onMouseMove)
},
_onMouseMove: function (e) {
var lng = this.options.lngFormatter ? this.options.lngFormatter(e.latlng.wrap().lng) : L.Util.formatNum(e.latlng.wrap().lng, this.options.numDigits);
var lat = this.options.latFormatter ? this.options.latFormatter(e.latlng.lat) : L.Util.formatNum(e.latlng.lat, this.options.numDigits);
var value = this.options.lngFirst ? lng + this.options.separator + lat : lat + this.options.separator + lng;
var prefixAndValue = this.options.prefix + ' ' + value;
this._container.innerHTML = prefixAndValue;
}
});
L.Map.mergeOptions({
positionControl: false
});
L.Map.addInitHook(function () {
if (this.options.positionControl) {
this.positionControl = new L.Control.MousePosition();
this.addControl(this.positionControl);
}
});
L.control.mousePosition = function (options) {
return new L.Control.MousePosition(options);
};
|
{
"content_hash": "2b1c7270a181fd1568079fab2fb5f507",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 151,
"avg_line_length": 30.666666666666668,
"alnum_prop": 0.7167119565217391,
"repo_name": "eirikaa/explorer",
"id": "cb9552841c0fe9828a1e446623cf84b27ccd53be",
"size": "1472",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phonegap/explorer/www/leaflet-mouse-position/src/L.Control.MousePosition.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "8627"
},
{
"name": "C",
"bytes": "1025"
},
{
"name": "C#",
"bytes": "1116"
},
{
"name": "C++",
"bytes": "5727"
},
{
"name": "CSS",
"bytes": "207274"
},
{
"name": "HTML",
"bytes": "63110"
},
{
"name": "Java",
"bytes": "13644"
},
{
"name": "JavaScript",
"bytes": "624907"
},
{
"name": "Objective-C",
"bytes": "241486"
},
{
"name": "Shell",
"bytes": "2433"
}
],
"symlink_target": ""
}
|
<?
/*
#1 this is the template for posts on the homepage, the archive page, and the "related posts" on single pages
#2 if is single, the 'item' is 'col-sm-3' because the display is 4 items across on single pages ("Related Post").
#3 if it is home or archive, the 'item' is 'col-sm-4'
#4 if its home and the first post, the first news-item is a wider box and first 'item' is 'col-sm-8', then the rest are 'col-sm-4' like all other home/archives
*/
?>
<?php
$thumb = wp_get_attachment_image_src( get_post_thumbnail_id($post->ID), 'large' );
$url = $thumb['0'];
$wrapperclass = ( $wp_query->current_post%4 == 0 && (int)( $wp_query->current_post / 3 ) < 3 && !is_paged() && is_home() ) ? 'citem col-xs-12 col-sm-8' : 'item col-xs-12 col-sm-6 col-md-4';
?>
<div class="<?=$wrapperclass?> <?php foreach(get_the_category() as $category) { echo $category->slug . ' ';} ?>" id="post-<?php the_ID(); ?>">
<div class="news-item ">
<? /* the actual post images are backgrounds. transparent placeholders GIFs fill the actual space, so all blocks are the same size.
If no post image is present, a default image loads instead */
/* if is home or news category archive, the first post gets a wider placeholder image. if single, just the normal placeholder for all */
?>
<?php if ( has_post_thumbnail()) { ?>
<div class="post-image" style="background-image:url(<?=$url?>);">
<?php } else { ?>
<div class="post-image" style="background-image:url(<?= get_template_directory_uri(); ?>/dist/images/default-tall.png)">
<?php } ?>
<a href="<?php the_permalink() ?>" rel="bookmark" title="<?php the_title_attribute(); ?>">
<?php if (is_single()) { ?>
<img src="<?= get_template_directory_uri(); ?>/dist/images/blank-image.gif" alt="<?php the_title_attribute(); ?> - <?= get_bloginfo("name"); ?>" class="placeholder" />
<?php } else { ?>
<?php if( $wp_query->current_post%4 == 0 && (int)( $wp_query->current_post / 3 ) < 3 && !is_paged() && is_home() ) : ?>
<img src="<?= get_template_directory_uri(); ?>/dist/images/blank-image-wide.gif" alt="<?php the_title_attribute(); ?> - <?= get_bloginfo("name"); ?>" class="placeholder" />
<?php else : ?>
<img src="<?= get_template_directory_uri(); ?>/dist/images/blank-image.gif" alt="<?php the_title_attribute(); ?> - <?= get_bloginfo("name"); ?>" class="placeholder" />
<?php endif; ?>
<?php } ?>
</a>
</div>
<?php /* REUSED snippet to display title, category, subtitle */ ?>
<?php get_template_part('templates/snippet', 'feed-header'); ?>
</div>
</div>
|
{
"content_hash": "aaf9f95f36cdcf9e2450e5ca02137536",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 189,
"avg_line_length": 52.88,
"alnum_prop": 0.6002269288956127,
"repo_name": "damnmagazine/damn-sage",
"id": "b59d2ce8eca74ceb06c961797cc211653d73e468",
"size": "2644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/content.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "151591"
},
{
"name": "HTML",
"bytes": "63742"
},
{
"name": "JavaScript",
"bytes": "15227"
},
{
"name": "PHP",
"bytes": "248875"
}
],
"symlink_target": ""
}
|
Example usage of the context package, for a lightning talk at GoAKL
See article here: https://medium.com/@ambot/exploring-the-context-package-db30a818d563#.2q34adjd7
|
{
"content_hash": "5856bd40c15685cdc5e74f488e302353",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 97,
"avg_line_length": 55.666666666666664,
"alnum_prop": 0.8083832335329342,
"repo_name": "laher/context-example",
"id": "f9863b71c35f87b9b708b0995224b50dd5a909a6",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "4562"
}
],
"symlink_target": ""
}
|
var DOCUMENTER_CURRENT_VERSION = "previews/PR489";
|
{
"content_hash": "be9e2ba9e626b7e63e6c2aa3626cada9",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 50,
"avg_line_length": 51,
"alnum_prop": 0.7843137254901961,
"repo_name": "dmbates/MixedModels.jl",
"id": "a6b08a140dab5e20c8a616900ea8e5e88158b9e8",
"size": "51",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "previews/PR489/siteinfo.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "138755"
}
],
"symlink_target": ""
}
|
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context="com.bignerdranch.android.geoquiz.CheatActivity">
<item android:id="@+id/action_settings"
android:title="@string/action_settings"
android:orderInCategory="100"
app:showAsAction="never"/>
</menu>
|
{
"content_hash": "951486d434906d50f93fb1e52792c96e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 69,
"avg_line_length": 47.22222222222222,
"alnum_prop": 0.68,
"repo_name": "walbed/BNRGeoQuiz",
"id": "08cefbf85963ef6193156c1338d0dfaa5f8b2ca5",
"size": "425",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/src/main/res/menu/menu_cheat.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "13739"
}
],
"symlink_target": ""
}
|
<wpml-config>
<custom-types>
<custom-type translate="1">project</custom-type>
<custom-type translate="1">testimonial</custom-type>
</custom-types>
<taxonomies>
<taxonomy translate="1">project-category</taxonomy>
<taxonomy translate="1">testimonial-category</taxonomy>
</taxonomies>
</wpml-config>
|
{
"content_hash": "4878c2c88688e4996ca48b0d1959bb01",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 63,
"avg_line_length": 28.916666666666668,
"alnum_prop": 0.6512968299711815,
"repo_name": "sipa-mict/localgov59",
"id": "41852c97145766fc890b078eb7e6f585a0fcacfd",
"size": "347",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "public_html/wp-content/themes/LocalLite/wpml-config.xml",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "841"
},
{
"name": "CSS",
"bytes": "5483279"
},
{
"name": "HTML",
"bytes": "3304925"
},
{
"name": "JavaScript",
"bytes": "5534618"
},
{
"name": "PHP",
"bytes": "18173361"
},
{
"name": "PLSQL",
"bytes": "1644182"
}
],
"symlink_target": ""
}
|
module ErrorMerger
VERSION = '1.1.0'
end
|
{
"content_hash": "264037168decbe5afade23e7f51df658",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 19,
"avg_line_length": 14.333333333333334,
"alnum_prop": 0.6976744186046512,
"repo_name": "zarqman/error_merger",
"id": "06c92e68a2f1d274715c03d63df6ba716749e1a0",
"size": "43",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/error_merger/version.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "6741"
}
],
"symlink_target": ""
}
|
function createCalendar(selector, events) {
var i, j, k;
if (typeof selector === 'string') {
var root = document.querySelector(selector);
}
root.style.height = '1000px';
var fragment = document.createDocumentFragment();
var calendar = document.createElement('table');
calendar.style.border = '1px solid black';
calendar.style.borderCollapse = 'collapse';
calendar.style.height = '80%';
calendar.style.width = '70%';
calendar.style.textAlign = 'center';
var week = document.createElement('tr');
week.style.border = '1px solid black';
var date = document.createElement('th');
date.style.height = '20px';
date.style.border = '1px solid black';
date.style.backgroundColor = 'lightgrey';
var day = document.createElement('td');
day.style.border = '1px solid black';
day.style.textAlign = 'left';
day.style.height = '150px';
var daysCount = 0;
var dateCount = 0;
var weekDays = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'];
for (i = 0; i < 10; i += 1) {
var currentWeek = week.cloneNode(true);
if (i % 2 === 0) {
for (j = 0; j < 7; j += 1) {
if (dateCount < 30) {
dateCount += 1;
var currentDate = date.cloneNode(true);
currentDate.innerText = weekDays[j] + ' ' + dateCount + ' ' + 'June 2014';
currentDate.setAttribute('id', '0' + dateCount);
currentWeek.appendChild(currentDate);
}
}
} else {
for (k = 0; k < 7; k += 1) {
if (daysCount < 30) {
daysCount += 1;
var currentDay = day.cloneNode(true);
currentDay.setAttribute('id', '' + daysCount);
currentWeek.appendChild(currentDay);
}
}
}
fragment.appendChild(currentWeek);
}
calendar.appendChild(fragment);
root.appendChild(calendar);
calendar.addEventListener('mouseover', function (ev) {
var target = ev.target;
if (target.tagName === 'TD') {
var currentDate = document.getElementById('0' + target.id);
currentDate.style.background = 'lightgreen';
}
}, false);
calendar.addEventListener('mouseout', function (ev) {
var target = ev.target;
if (target.tagName === 'TD') {
var currentDate = document.getElementById('0' + target.id);
currentDate.style.background = 'lightgrey';
}
}, false);
calendar.addEventListener('click', function (ev) {
var target = ev.target;
var dates = document.getElementsByTagName('td');
var currentDate = document.getElementById('0' + target.id);
if (target.tagName === 'TD') {
currentDate.style.background = 'red';
target.style.background = 'yellow';
for (var date in dates) {
if(date.id !== target.id) {
date.style.background = 'none';
}
}
}
}, false);
var allDays = calendar.getElementsByTagName('td');
var eventsLength = events.length;
var daysLength = allDays.length;
for (i = 0; i < eventsLength; i += 1) {
for (j = 0; j < daysLength; j += 1) {
if (events[i].date === allDays[j].id) {
allDays[j].innerText = events[i].hour + ' ' + events[i].duration + ' ' + events[i].title;
}
}
}
}
|
{
"content_hash": "983ae1fc19b40c5204cc986d6684a3d0",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 105,
"avg_line_length": 36.40816326530612,
"alnum_prop": 0.5322309417040358,
"repo_name": "Vyara/JavaScript-UI-and-DOM-Homeworks",
"id": "e3bbd8f65893254b4f5d87aa2827b6029cd2ad09",
"size": "3568",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Sample Exams/Sample-exam-1/task-1/task-files/scripts.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5850"
},
{
"name": "HTML",
"bytes": "16074"
},
{
"name": "JavaScript",
"bytes": "210948"
}
],
"symlink_target": ""
}
|
import { KeyDictionary } from 'core/generics';
export enum ClientResponseSource {
remote = 'remote',
cache = 'cache',
local = 'local'
}
export type ClientResponse<TStatus, TData> = {
source: ClientResponseSource;
status: TStatus;
data: TData;
rejected?: boolean;
}
export type HttpClientResponse = ClientResponse<number, string>;
export type JsonClientResponse = ClientResponse<number, KeyDictionary<any>>;
export type ProcessClientResponse = ClientResponse<string, string>;
|
{
"content_hash": "7845da8934b2dad6c13fc5d1f6862126",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 76,
"avg_line_length": 24.65,
"alnum_prop": 0.7525354969574036,
"repo_name": "pflannery/vscode-versionlens",
"id": "7ced150be19a92a72ccd367b48c2c59f89d94a81",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/core/clients/definitions/clientResponses.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "101777"
},
{
"name": "TypeScript",
"bytes": "3852"
}
],
"symlink_target": ""
}
|
//
// CJImagePickerViewController.h
// CJPickerDemo
//
// Created by ciyouzen on 2015/8/31.
// Copyright © 2015年 dvlproad. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <Masonry/Masonry.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "CJAlumbImageModel.h"
#import "CJAlumbSectionDataModel.h"
typedef void(^CJDealSelectedImageModelsBlock)(NSMutableArray<CJAlumbImageModel *> *bSelectedImageModels);
/**
* 自定义的“图片选择器CJImagePickerViewController”
*/
@interface CJImagePickerViewController : UIViewController {
}
@property (nonatomic, assign) NSInteger canMaxChooseImageCount; /**< 可一次性选取的最大数目 */
@property (nonatomic, copy) void (^pickCompleteBlock)(NSArray<CJAlumbImageModel *> *imageModels); /**< 照片选取完毕后 */
@property (nonatomic, strong) ALAssetsGroup *assetsGroup;
@property (nonatomic, strong) NSMutableArray *assets;
/*
* 初始化
*
* @param overLimitBlock 超过最大选择图片数量的限制回调
* @param clickImageBlock 点击图片执行的事件
* @param previewAction 点击底部左侧"预览"执行的事件
* @param pickFinishBlock 点击底部右侧"完成"执行的事件
*
* @return 照片选择器
*/
- (instancetype)initWithOverLimitBlock:(void(^)(void))overLimitBlock
clickImageBlock:(void(^)(CJAlumbImageModel *imageModel))clickImageBlock
previewAction:(void(^)(NSArray *bTotoalImageModels, NSMutableArray<CJAlumbImageModel *> *bSelectedImageModels))previewAction
pickFinishBlock:(void(^)(UIViewController *bVC, NSArray<CJAlumbImageModel *> *bSelectedImageModels))pickFinishBlock NS_DESIGNATED_INITIALIZER;
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE;
- (instancetype)initWithCoder:(NSCoder *)aDecoder NS_UNAVAILABLE;
- (void)hcRefreshViewDidDataUpdated;
@end
|
{
"content_hash": "4ac5680853bcbd2082ab2f5da8544efa",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 165,
"avg_line_length": 30.766666666666666,
"alnum_prop": 0.7329360780065005,
"repo_name": "dvlproad/CJUIKit",
"id": "129cb87d2053e405ae26985248703aee28e64173",
"size": "2023",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CJComplexUIKitDemo/Pods/CJImagePickerKit/CJImagePickerKit/CustomImagePickerController/CJImagePickerViewController.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "196"
},
{
"name": "HTML",
"bytes": "9762"
},
{
"name": "Objective-C",
"bytes": "5000370"
},
{
"name": "Ruby",
"bytes": "48699"
},
{
"name": "Swift",
"bytes": "127064"
}
],
"symlink_target": ""
}
|
package io.gomint.proxy.util;
import io.gomint.jraknet.PacketBuffer;
import lombok.Getter;
import java.nio.ByteBuffer;
import java.util.BitSet;
/**
* @author geNAZt
* @version 1.0
*/
@Getter
public class Palette {
@Getter
private enum PaletteVersion {
P1( 1, 32 ),
P2( 2, 16 ),
P3( 3, 10, 2 ),
P4( 4, 8 ),
P5( 5, 6, 2 ),
P6( 6, 5, 2 ),
P8( 8, 4 ),
P16( 16, 2 );
private final byte versionId;
private final byte amountOfWords;
private final byte amountOfPadding;
PaletteVersion( int versionId, int amountOfWords ) {
this( versionId, amountOfWords, 0 );
}
PaletteVersion( int versionId, int amountOfWords, int amountOfPadding ) {
this.versionId = (byte) versionId;
this.amountOfWords = (byte) amountOfWords;
this.amountOfPadding = (byte) amountOfPadding;
}
}
private ByteBuffer dataOutput;
private PacketBuffer buffer;
private PaletteVersion paletteVersion = null;
// Output indexes
private short[] output = null;
// Input bitset
private BitSet input = null;
private int inputIndex = 0;
private int wordsWritten = 0;
/**
* Construct a new reader for the given palette version
*
* @param in of the data
* @param version of the palette
*/
public Palette( PacketBuffer in, byte version ) {
this.buffer = in;
for ( PaletteVersion paletteVersionCanidate : PaletteVersion.values() ) {
if ( paletteVersionCanidate.getVersionId() == version ) {
this.paletteVersion = paletteVersionCanidate;
break;
}
}
if ( this.paletteVersion == null ) {
throw new IllegalArgumentException( "Palette version " + version + " is unknown" );
}
}
public Palette( ByteBuffer data, int version ) {
this.dataOutput = data;
for ( PaletteVersion paletteVersionCanidate : PaletteVersion.values() ) {
if ( paletteVersionCanidate.getVersionId() == version ) {
this.paletteVersion = paletteVersionCanidate;
break;
}
}
if ( this.paletteVersion == null ) {
throw new IllegalArgumentException( "Palette version " + version + " is unknown" );
}
}
public void addIndex( Integer id ) {
// Do we need new input?
if ( this.input == null ) {
this.input = new BitSet( 32 );
this.inputIndex = 0;
}
// Check if old input is full and we need a new one
if ( this.wordsWritten == this.paletteVersion.getAmountOfWords() ) {
// Write to output
this.dataOutput.putInt( this.convert( this.input ) );
// New input
this.input = new BitSet( 32 );
this.inputIndex = 0;
this.wordsWritten = 0;
}
// Write id
while ( id != 0L ) {
if ( id % 2L != 0 ) {
this.input.set( this.inputIndex );
}
++this.inputIndex;
id = id >>> 1;
}
// Increment written words
this.wordsWritten++;
}
public void finish() {
this.dataOutput.putInt( this.convert( this.input ) );
this.input = null;
}
public short[] getIndexes() {
// Do we need to read first?
if ( this.output == null ) {
this.output = new short[4096];
// We need the amount of iterations
int iterations = (int) Math.floor( 4096 / (float) this.paletteVersion.getAmountOfWords() );
for ( int i = 0; i < iterations; i++ ) {
BitSet bitSet = convert( this.buffer.readLInt() );
int index = 0;
for ( byte b = 0; b < this.paletteVersion.getAmountOfWords(); b++ ) {
short val = 0;
int innerShiftIndex = 0;
for ( byte i1 = 0; i1 < this.paletteVersion.getVersionId(); i1++ ) {
if ( bitSet.get( index++ ) ) {
val ^= 1 << innerShiftIndex;
}
innerShiftIndex++;
}
int setIndex = ( i * this.paletteVersion.getAmountOfWords() ) + b;
if ( setIndex < 4096 ) {
this.output[setIndex] = val;
}
}
}
}
return this.output;
}
public boolean isPadded() {
return this.paletteVersion.getAmountOfPadding() > 0;
}
private int convert( BitSet bits ) {
int value = 0;
for ( int i = 0; i < bits.length(); ++i ) {
value += bits.get( i ) ? ( 1L << i ) : 0L;
}
return value;
}
private BitSet convert( int value ) {
BitSet bits = new BitSet( 32 );
int index = 0;
while ( value != 0L ) {
if ( value % 2L != 0 ) {
bits.set( index );
}
++index;
value = value >>> 1;
}
return bits;
}
}
|
{
"content_hash": "2fba7da087594073cedbb0b99365a051",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 103,
"avg_line_length": 27.1139896373057,
"alnum_prop": 0.5052551117905599,
"repo_name": "GoMint/Proxy",
"id": "7e688221cd1d6c7fd478d2940f3ee7c2a178c85e",
"size": "5413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/io/gomint/proxy/util/Palette.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "195485"
}
],
"symlink_target": ""
}
|
package hashtree
import (
"crypto/sha256"
"hash"
"testing"
)
var fileBench = NewFile()
var treeBench = NewTree()
var refBench = sha256.New()
var buf = make([]byte, 20480)
func benchmarkSize(b *testing.B, hash hash.Hash, size int) {
b.SetBytes(int64(size))
for i := 0; i < b.N; i++ {
hash.Reset()
hash.Write(buf[:size])
hash.Sum(nil)
}
}
func BenchmarkFile8Bytes(b *testing.B) {
benchmarkSize(b, fileBench, 8)
}
func BenchmarkFile1K(b *testing.B) {
benchmarkSize(b, fileBench, 1024)
}
func BenchmarkFile20K(b *testing.B) {
benchmarkSize(b, fileBench, 20480)
}
func BenchmarkTree8Bytes(b *testing.B) {
benchmarkSize(b, treeBench, 8)
}
func BenchmarkTree1K(b *testing.B) {
benchmarkSize(b, treeBench, 1024)
}
func BenchmarkTree20K(b *testing.B) {
benchmarkSize(b, treeBench, 20480)
}
func BenchmarkRef8Bytes(b *testing.B) {
benchmarkSize(b, refBench, 8)
}
func BenchmarkRef1K(b *testing.B) {
benchmarkSize(b, refBench, 1024)
}
func BenchmarkRef20K(b *testing.B) {
benchmarkSize(b, refBench, 20480)
}
|
{
"content_hash": "a716d484cf2c1848aa18485df11579d0",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 60,
"avg_line_length": 18.07017543859649,
"alnum_prop": 0.7048543689320388,
"repo_name": "xiegeo/fensan",
"id": "5f8bd7ad7ab9d4d9d931995051ed4a165b61cbc3",
"size": "1030",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hashtree/benchmark_test.go",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Go",
"bytes": "124241"
}
],
"symlink_target": ""
}
|
<!--
Copyright 2014-2016 CyberVision, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.kaaproject.kaa</groupId>
<version>0.8.0</version>
<artifactId>server</artifactId>
</parent>
<groupId>org.kaaproject.kaa.server</groupId>
<artifactId>common</artifactId>
<packaging>pom</packaging>
<name>Kaa Common Server Components</name>
<url>http://kaaproject.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<main.dir>${basedir}/../..</main.dir>
</properties>
<modules>
<module>zk</module>
<module>thrift</module>
<module>thrift-cli-server</module>
<module>thrift-cli-client</module>
<module>dao</module>
<module>nosql</module>
<module>dto</module>
<module>utils</module>
<module>netty-server</module>
<module>log-shared</module>
<module>admin-rest-client</module>
<module>server-shared</module>
<module>transport-shared</module>
<module>verifier-shared</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>${mongo-driver.version}</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>${commons-lang.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>
|
{
"content_hash": "eb2f0a884595a848b689ca43b540e3ef",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 108,
"avg_line_length": 36.208955223880594,
"alnum_prop": 0.6376751854905194,
"repo_name": "liuhu/Kaa",
"id": "d6c063e0ac70311f5b33bfb7a7bda33e9e187676",
"size": "2426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/common/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4762"
},
{
"name": "C",
"bytes": "1397470"
},
{
"name": "C++",
"bytes": "1227671"
},
{
"name": "CMake",
"bytes": "71884"
},
{
"name": "CSS",
"bytes": "10373"
},
{
"name": "HTML",
"bytes": "6884"
},
{
"name": "Java",
"bytes": "9328357"
},
{
"name": "Makefile",
"bytes": "5541"
},
{
"name": "Objective-C",
"bytes": "1172379"
},
{
"name": "Python",
"bytes": "128276"
},
{
"name": "Ruby",
"bytes": "247"
},
{
"name": "Shell",
"bytes": "90772"
},
{
"name": "Thrift",
"bytes": "10264"
},
{
"name": "XSLT",
"bytes": "4062"
}
],
"symlink_target": ""
}
|
UPDATE meta SET meta_value='65' WHERE meta_key='schema_version';
# Patch identifier
INSERT INTO meta (species_id, meta_key, meta_value)
VALUES (NULL, 'patch', 'patch_64_65_a.sql|schema_version');
|
{
"content_hash": "2d304728f9a3976477bc040f7fe01e51",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 64,
"avg_line_length": 39.8,
"alnum_prop": 0.7236180904522613,
"repo_name": "at7/ensembl",
"id": "1f4cf46f2786137eaafe8c953645ff347eda2710",
"size": "985",
"binary": false,
"copies": "2",
"ref": "refs/heads/release/79",
"path": "sql/patch_64_65_a.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7944"
},
{
"name": "HTML",
"bytes": "1434"
},
{
"name": "PLpgSQL",
"bytes": "219434"
},
{
"name": "Perl",
"bytes": "5798176"
},
{
"name": "Perl 6",
"bytes": "437567"
},
{
"name": "Prolog",
"bytes": "2102"
},
{
"name": "Shell",
"bytes": "8641"
}
],
"symlink_target": ""
}
|
'''
@author: MengLai
'''
import os
import tempfile
import uuid
import time
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstacklib.utils.ssh as ssh
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
def test():
test_util.test_dsc('Create test vm to test zstack upgrade by -u.')
if os.path.exists('/home/installation-package/zstack'):
image_name = os.environ.get('imageName_i_c7_z_1.2')
elif os.path.exists('/home/installation-package/mevoco'):
image_name = os.environ.get('imageName_i_c7_m_1.2')
vm = test_stub.create_vlan_vm(image_name)
test_obj_dict.add_vm(vm)
if os.environ.get('zstackManagementIp') == None:
vm.check()
else:
time.sleep(60)
#vm_inv = vm.get_vm()
#vm_ip = vm_inv.vmNics[0].ip
#ssh_cmd = 'ssh -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null %s' % vm_ip
#ssh.make_ssh_no_password(vm_ip, test_lib.lib_get_vm_username(vm_inv), \
# test_lib.lib_get_vm_password(vm_inv))
#test_stub.copy_id_dsa(vm_inv, ssh_cmd, tmp_file)
#test_stub.copy_id_dsa_pub(vm_inv)
vm_inv = test_stub.create_vm_scenario(image_name, vm_name)
vm_ip = vm_inv.vmNics[0].ip
test_lib.lib_wait_target_up(vm_ip, 22)
test_stub.make_ssh_no_password(vm_ip, tmp_file)
test_util.test_dsc('Update MN IP')
test_stub.update_mn_hostname(vm_ip, tmp_file)
test_stub.update_mn_ip(vm_ip, tmp_file)
#test_stub.reset_rabbitmq(vm_ip, tmp_file)
test_stub.start_mn(vm_ip, tmp_file)
test_stub.check_installation(vm_ip, tmp_file)
#test_util.test_dsc('Update MN IP')
#cmd = '%s "zstack-ctl change_ip --ip="%s ' % (ssh_cmd, vm_ip)
#process_result = test_stub.execute_shell_in_process(cmd, tmp_file)
#cmd = '%s "zstack-ctl start"' % ssh_cmd
#process_result = test_stub.execute_shell_in_process(cmd, tmp_file)
#test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
pkg_num = 1.3
curren_num = float(os.environ.get('releasePkgNum'))
while pkg_num <= curren_num:
test_util.test_dsc('Upgrade zstack to %s' % pkg_num)
upgrade_target_file = '/root/zstack-upgrade-all-in-one.tgz'
upgrade_pkg = os.environ.get('zstackPkg_%s' % pkg_num)
test_stub.prepare_upgrade_test_env(vm_inv, upgrade_target_file, upgrade_pkg)
test_stub.upgrade_zstack(ssh_cmd, upgrade_target_file, tmp_file)
test_stub.check_zstack_version(ssh_cmd, tmp_file, vm_inv, str(pkg_num))
test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
pkg_num = pkg_num + 0.1
test_util.test_dsc('Upgrade zstack to latest')
upgrade_target_file = '/root/zstack-upgrade-all-in-one.tgz'
test_stub.prepare_test_env(vm_inv, upgrade_target_file)
test_stub.upgrade_zstack(ssh_cmd, upgrade_target_file, tmp_file)
zstack_latest_version = os.environ.get('zstackLatestVersion')
test_stub.check_zstack_version(ssh_cmd, tmp_file, vm_inv, zstack_latest_version)
test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
os.system('rm -f %s' % tmp_file)
vm.destroy()
test_util.test_pass('ZStack upgrade Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
os.system('rm -f %s' % tmp_file)
test_lib.lib_error_cleanup(test_obj_dict)
|
{
"content_hash": "00faf01da66a2270b9961c19e0d4a41d",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 107,
"avg_line_length": 38.66304347826087,
"alnum_prop": 0.6513916221535001,
"repo_name": "zstackorg/zstack-woodpecker",
"id": "f3d15b8c75fd3da06525bf4f4f1d8354bffca5b8",
"size": "3557",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/installation/upgrade/test_zs_upgd_cnt_1.2_latest_on_cos7.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "46522"
},
{
"name": "Makefile",
"bytes": "692"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "2891030"
},
{
"name": "Shell",
"bytes": "54266"
}
],
"symlink_target": ""
}
|
set(CMAKE_DEPENDS_LANGUAGES
"CXX"
)
# The set of files for implicit dependencies of each language:
set(CMAKE_DEPENDS_CHECK_CXX
"/home/noah/starid/libstarid/cereal-1.2.2/unittests/unordered_multiset.cpp" "/home/noah/starid/libstarid/cereal-1.2.2/unittests/CMakeFiles/test_unordered_multiset.dir/unordered_multiset.cpp.o"
)
set(CMAKE_CXX_COMPILER_ID "GNU")
# The include file search paths:
set(CMAKE_CXX_TARGET_INCLUDE_PATH
"libstarid/cereal-1.2.2/./include"
)
# Targets to which this target links.
set(CMAKE_TARGET_LINKED_INFO_FILES
)
# Fortran module output directory.
set(CMAKE_Fortran_TARGET_MODULE_DIR "")
|
{
"content_hash": "a9f9790f7093d3c77e4658850b6c5f51",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 194,
"avg_line_length": 31.3,
"alnum_prop": 0.7587859424920128,
"repo_name": "noahhsmith/starid",
"id": "9ff703a63078b6166c0117245699d772c29e7628",
"size": "693",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libstarid/cereal-1.2.2/unittests/CMakeFiles/test_unordered_multiset.dir/DependInfo.cmake",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2491"
},
{
"name": "C",
"bytes": "234071"
},
{
"name": "C++",
"bytes": "10949006"
},
{
"name": "CMake",
"bytes": "517023"
},
{
"name": "CSS",
"bytes": "5464"
},
{
"name": "Cuda",
"bytes": "130730"
},
{
"name": "Fortran",
"bytes": "1326303"
},
{
"name": "HTML",
"bytes": "1139"
},
{
"name": "JavaScript",
"bytes": "7967"
},
{
"name": "Makefile",
"bytes": "931324"
},
{
"name": "Matlab",
"bytes": "5878"
},
{
"name": "Objective-C",
"bytes": "120"
},
{
"name": "Python",
"bytes": "581779"
},
{
"name": "Shell",
"bytes": "31831"
}
],
"symlink_target": ""
}
|
/**
* This file is automatically generated by wheat-build.
* Do not modify this file -- YOUR CHANGES WILL BE ERASED!
*/
package x7c1.linen.res.layout;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.ViewGroup;
import android.view.View;
import android.support.v7.widget.RecyclerView;
import x7c1.wheat.ancient.resource.ViewHolderProvider;
import x7c1.wheat.ancient.resource.ViewHolderProviderFactory;
import x7c1.linen.R;
import x7c1.linen.glue.res.layout.SettingSourceAttach;
public class SettingSourceAttachProvider implements ViewHolderProvider<SettingSourceAttach> {
private final LayoutInflater inflater;
public SettingSourceAttachProvider(Context context){
this.inflater = LayoutInflater.from(context);
}
public SettingSourceAttachProvider(LayoutInflater inflater){
this.inflater = inflater;
}
@Override
public int layoutId(){
return R.layout.setting_source_attach;
}
@Override
public SettingSourceAttach inflateOn(ViewGroup parent){
return inflate(parent, false);
}
@Override
public SettingSourceAttach inflate(ViewGroup parent, boolean attachToRoot){
View view = inflater.inflate(R.layout.setting_source_attach, parent, attachToRoot);
return factory().createViewHolder(view);
}
@Override
public SettingSourceAttach inflate(){
return inflate(null, false);
}
public static ViewHolderProviderFactory<SettingSourceAttach> factory(){
return new ViewHolderProviderFactory<SettingSourceAttach>() {
@Override
public ViewHolderProvider<SettingSourceAttach> create(LayoutInflater inflater){
return new SettingSourceAttachProvider(inflater);
}
@Override
public ViewHolderProvider<SettingSourceAttach> create(Context context){
return new SettingSourceAttachProvider(context);
}
@Override
public SettingSourceAttach createViewHolder(View view){
return new SettingSourceAttach(
view,
(android.support.v7.widget.RecyclerView) view.findViewById(R.id.setting_source_attach__channels)
);
}
};
}
}
|
{
"content_hash": "18d118523c79b3341f5c49c94545a7bb",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 116,
"avg_line_length": 32.40845070422535,
"alnum_prop": 0.6936114732724902,
"repo_name": "x7c1/Linen",
"id": "63865711082268b0fb2f66d40c6ee081de757e3d",
"size": "2301",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linen-starter/src/main/java/x7c1/linen/res/layout/SettingSourceAttachProvider.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "175600"
},
{
"name": "Ruby",
"bytes": "1183"
},
{
"name": "Scala",
"bytes": "658350"
},
{
"name": "Shell",
"bytes": "807"
}
],
"symlink_target": ""
}
|
package ru.stqa.test_courses.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqa.test_courses.addressbook.model.ContactData;
import java.util.Arrays;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by i-ru on 09.05.2017.
*/
public class ContactEmailTests extends TestBase {
@Test
public void testContactEmail() {
app.goTo().homePage();
ContactData contact = app.contact().all().iterator().next();
ContactData contactInfoFromEditForm = app.contact().infoFromEditForm(contact);
assertThat(contact.getAllEmails(), equalTo(mergeEmails(contactInfoFromEditForm)));
}
private String mergeEmails(ContactData contact) {
return Arrays.asList(contact.getEmail(), contact.getEmail_2(), contact.getEmail_3())
.stream().filter((s) -> !s.equals(""))
.collect(Collectors.joining("\n"));
}
}
|
{
"content_hash": "414f9d99f02df2ec55ebb50740d90ba2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 92,
"avg_line_length": 32.193548387096776,
"alnum_prop": 0.7014028056112225,
"repo_name": "DEX-6/test_courses",
"id": "c26eb8391fed69a024697efca717a77f6a06ba04",
"size": "998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "addressbook-web-tests/src/test/java/ru/stqa/test_courses/addressbook/tests/ContactEmailTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "69431"
}
],
"symlink_target": ""
}
|
const path = require('path');
const assert = require('yeoman-assert');
const helpers = require('yeoman-test');
const fse = require('fs-extra');
const expectedFiles = require('./utils/expected-files');
describe('JHipster generator common', () => {
before(done => {
helpers
.run(require.resolve('../generators/common'))
.inTmpDir(dir => {
fse.copySync(path.join(__dirname, '../test/templates/default'), dir);
})
.withOptions({
fromCli: true,
skipInstall: true,
skipChecks: true,
})
.on('end', done);
});
it('creates common files', () => {
assert.file(expectedFiles.common);
});
});
|
{
"content_hash": "6d1c6114f6b2fc3f1e693c32099a3293",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 85,
"avg_line_length": 30,
"alnum_prop": 0.5253333333333333,
"repo_name": "PierreBesson/generator-jhipster",
"id": "de0a39e49fd300e9bf70709b806de598e9223c3d",
"size": "750",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "test/common.spec.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "23758"
},
{
"name": "Dockerfile",
"bytes": "2040"
},
{
"name": "HTML",
"bytes": "475598"
},
{
"name": "Java",
"bytes": "1329725"
},
{
"name": "JavaScript",
"bytes": "3814904"
},
{
"name": "SCSS",
"bytes": "45521"
},
{
"name": "Shell",
"bytes": "55469"
},
{
"name": "TypeScript",
"bytes": "1248017"
},
{
"name": "Vue",
"bytes": "156185"
}
],
"symlink_target": ""
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_92) on Tue Nov 02 10:36:42 GMT 2021 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class com.github.rvesse.airline.annotations.help.HideSection (Airline - Library 2.8.3 API)</title>
<meta name="date" content="2021-11-02">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.github.rvesse.airline.annotations.help.HideSection (Airline - Library 2.8.3 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../com/github/rvesse/airline/annotations/help/HideSection.html" title="annotation in com.github.rvesse.airline.annotations.help">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?com/github/rvesse/airline/annotations/help/class-use/HideSection.html" target="_top">Frames</a></li>
<li><a href="HideSection.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class com.github.rvesse.airline.annotations.help.HideSection" class="title">Uses of Class<br>com.github.rvesse.airline.annotations.help.HideSection</h2>
</div>
<div class="classUseContainer">No usage of com.github.rvesse.airline.annotations.help.HideSection</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../com/github/rvesse/airline/annotations/help/HideSection.html" title="annotation in com.github.rvesse.airline.annotations.help">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?com/github/rvesse/airline/annotations/help/class-use/HideSection.html" target="_top">Frames</a></li>
<li><a href="HideSection.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2012–2021. All rights reserved.</small></p>
</body>
</html>
|
{
"content_hash": "ccde29026747de924275c6e6879ddbb7",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 174,
"avg_line_length": 38.76190476190476,
"alnum_prop": 0.6134316134316135,
"repo_name": "rvesse/airline",
"id": "775ee2bf04e631fc5be104d3be03edd1a43d07c2",
"size": "4884",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/javadoc/2.8.3/airline/com/github/rvesse/airline/annotations/help/class-use/HideSection.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "91"
},
{
"name": "Java",
"bytes": "2176058"
},
{
"name": "Shell",
"bytes": "4550"
}
],
"symlink_target": ""
}
|
package com.docuware.dev.schema._public.services.platform;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import java.net.URI;
import com.docuware.dev.Extensions.*;
import java.util.concurrent.CompletableFuture;
import java.util.*;
import com.docuware.dev.schema._public.services.Link;
import com.docuware.dev.schema._public.services.platform.SelectListResult;
import com.docuware.dev.schema._public.services.platform.SelectListExpression;
import javax.xml.bind.JAXBElement;
import javax.xml.namespace.QName;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import com.docuware.dev.schema._public.services.Links;
import com.docuware.dev.settings.interop.DWFieldType;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "StampFormField", propOrder = {
"proxy",
"links"
})
public class StampFormField implements IRelationsWithProxy {
private HttpClientProxy proxy;//test
@XmlElement(name = "Links", namespace = "http://dev.docuware.com/schema/public/services")
protected Links links;
@XmlAttribute(name = "Name", required = true)
protected String name;
@XmlAttribute(name = "Label", required = true)
protected String label;
@XmlAttribute(name = "Length")
protected Integer length;
@XmlAttribute(name = "Precision")
protected Integer precision;
@XmlAttribute(name = "DWFieldType")
protected DWFieldType dwFieldType;
@XmlAttribute(name = "Mask")
protected String mask;
@XmlAttribute(name = "MaskErrorText")
protected String maskErrorText;
@XmlAttribute(name = "SampleEditText")
protected String sampleEditText;
public Links getLinks() {
return links;
}
public void setLinks(Links value) {
this.links = value;
}
/**Name of the form field*/
public String getName() {
return name;
}
/**Name of the form field*/
public void setName(String value) {
this.name = value;
}
/**The label used to represent the form field when the stamp is set on the client.*/
public String getLabel() {
return label;
}
public void setLabel(String value) {
this.label = value;
}
/**Lenght of the form field*/
public int getLength() {
if (length == null) {
return 0;
} else {
return length;
}
}
/**Lenght of the form field*/
public void setLength(Integer value) {
this.length = value;
}
/**Gets the number of digits after the decimal point in case of a numeric field.*/
public int getPrecision() {
if (precision == null) {
return 0;
} else {
return precision;
}
}
/**Gets the number of digits after the decimal point in case of a numeric field.*/
public void setPrecision(Integer value) {
this.precision = value;
}
/**The DocuWare type of the field.*/
public DWFieldType getDWFieldType() {
if (dwFieldType == null) {
return DWFieldType.TEXT;
} else {
return dwFieldType;
}
}
/**The DocuWare type of the field.*/
public void setDWFieldType(DWFieldType value) {
this.dwFieldType = value;
}
/**Mask(regular expression) for limiting the input options for the form field.*/
public String getMask() {
return mask;
}
/**Mask(regular expression) for limiting the input options for the form field.*/
public void setMask(String value) {
this.mask = value;
}
/**Error message to display if the input does not match the mask definition.*/
public String getMaskErrorText() {
return maskErrorText;
}
/**Error message to display if the input does not match the mask definition.*/
public void setMaskErrorText(String value) {
this.maskErrorText = value;
}
/**Sample entry that matches the mask definition.*/
@Dolphin
public String getSampleEditText() {
return sampleEditText;
}
/**Sample entry that matches the mask definition.*/
@Dolphin
public void setSampleEditText(String value) {
this.sampleEditText = value;
}
/**
* Gets the proxy.
*
* @return The proxy
*/
@Extension
public HttpClientProxy getProxy() {
return this.proxy;
}
/**
* Sets the HTTP Communication Proxy which is used in futher HTTP communication.
*
* @param proxy The new proxy
*/
@Extension
public void setProxy(HttpClientProxy proxy) {
this.proxy = proxy;
}
/**
* Gets the base URI of the specified relations instance.
*
* @return The base URI of the specified relations instance.
*/
@Extension
public URI getBaseUri() {
return RelationsWithProxyExtensions.getBaseUri(this);
}
/**
* Gets the link by its name.
*
* @param relationName Name of the relation
* @return The link, if it exists; null otherwise.
*/
@Extension
public Link getLink(String relationName) {
return RelationExtension.getLink(this, relationName);
}
/**
* Gets the URI of the relation specified by the name.
*
* @param relationName Name of the relation
* @return The link, if it exists; null otherwise.
*/
@Extension
public String getRelationUri(String relationName) {
return RelationExtension.getRelationUri(this, relationName);
}
/**
* Gets the URI of the relation specified by the name.
*
* @param relationName Name of the relation
* @return The link, if it exists.
* @throws RuntimeException: The specified Link is not found
*/
@Extension
public String getRelationUriOrThrow(String relationName) {
return RelationExtension.getRelationUriOrThrow(this, relationName);
}
/**
* Determines whether the specified link exists.
*
* @param relationName Name of the relation
* @return True, if the specified link exists; otherwise, False.
*/
@Extension
public boolean hasRelationUri(String relationName) {
return RelationExtension.hasRelationUri(this, relationName);
}
/**
* Gets the Uri of the Link for the relation "SelectListValues".
* Returns the Uri of the Link for the relation "SelectListValues", if this links exists, or null, if this link does not exists. The returned link can be relative or absolute. If it is a relative link you must set it in the right context yourself.
* @return the requested URI
*/
public URI getSelectListValuesRelationLink() {
return MethodInvocation.getLink(this, links, "selectListValues");
}
/**
* Calls the HTTP Get Method on the link for the relation "SelectListValues".
*/
public SelectListResult getSelectListResultFromSelectListValuesRelation() {
return MethodInvocation.<SelectListResult>get(this, links, "selectListValues", SelectListResult.class);
}
/**
* Calls the HTTP Get Method on the link for the relation "SelectListValues" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> getSelectListResultFromSelectListValuesRelationAsync() {
return MethodInvocation.<SelectListResult>getAsync(this, links, "selectListValues", SelectListResult.class);
}
/**
* Calls the HTTP Get Method on the link for the relation "SelectListValues" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> getSelectListResultFromSelectListValuesRelationAsync(CancellationToken ct) {
return MethodInvocation.<SelectListResult>getAsync(this, links, "selectListValues", SelectListResult.class, ct);
}
/**
* Calls the HTTP post Method on the link for the relation "SelectListValues".
*/
public SelectListResult postToSelectListValuesRelationForSelectListResult(SelectListExpression data) {
return MethodInvocation.<SelectListResult, SelectListExpression> post(this, links, "selectListValues", SelectListResult.class, new JAXBElement(new QName("http://dev.docuware.com/schema/public/services/platform", "SelectListExpression"), SelectListExpression.class, null, data), "application/vnd.docuware.platform.selectlistexpression+xml", "application/vnd.docuware.platform.selectlistresult+xml");
}
/**
* Calls the HTTP post Method on the link for the relation "SelectListValues" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> postToSelectListValuesRelationForSelectListResultAsync(SelectListExpression data) {
return MethodInvocation.<SelectListResult, SelectListExpression >postAsync(this, links, "selectListValues", SelectListResult.class, new JAXBElement(new QName("http://dev.docuware.com/schema/public/services/platform", "SelectListExpression"), SelectListExpression.class, null, data), "application/vnd.docuware.platform.selectlistexpression+xml", "application/vnd.docuware.platform.selectlistresult+xml");
}
/**
* Calls the HTTP post Method on the link for the relation "SelectListValues" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> postToSelectListValuesRelationForSelectListResultAsync(CancellationToken ct, SelectListExpression data) {
return MethodInvocation.<SelectListResult, SelectListExpression >postAsync(this, links, "selectListValues", SelectListResult.class, new JAXBElement(new QName("http://dev.docuware.com/schema/public/services/platform", "SelectListExpression"), SelectListExpression.class, null, data), "application/vnd.docuware.platform.selectlistexpression+xml", "application/vnd.docuware.platform.selectlistresult+xml", ct);
}
/**
* Gets the Uri of the Link for the relation "SimpleSelectList".
* Returns the Uri of the Link for the relation "SimpleSelectList", if this links exists, or null, if this link does not exists. The returned link can be relative or absolute. If it is a relative link you must set it in the right context yourself.
* @return the requested URI
*/
public URI getSimpleSelectListRelationLink() {
return MethodInvocation.getLink(this, links, "simpleSelectList");
}
/**
* Calls the HTTP Get Method on the link for the relation "SimpleSelectList".
*/
public SelectListResult getSelectListResultFromSimpleSelectListRelation() {
return MethodInvocation.<SelectListResult>get(this, links, "simpleSelectList", SelectListResult.class);
}
/**
* Calls the HTTP Get Method on the link for the relation "SimpleSelectList" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> getSelectListResultFromSimpleSelectListRelationAsync() {
return MethodInvocation.<SelectListResult>getAsync(this, links, "simpleSelectList", SelectListResult.class);
}
/**
* Calls the HTTP Get Method on the link for the relation "SimpleSelectList" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> getSelectListResultFromSimpleSelectListRelationAsync(CancellationToken ct) {
return MethodInvocation.<SelectListResult>getAsync(this, links, "simpleSelectList", SelectListResult.class, ct);
}
/**
* Calls the HTTP post Method on the link for the relation "SimpleSelectList".
*/
public SelectListResult postToSimpleSelectListRelationForSelectListResult(SelectListExpression data) {
return MethodInvocation.<SelectListResult, SelectListExpression> post(this, links, "simpleSelectList", SelectListResult.class, new JAXBElement(new QName("http://dev.docuware.com/schema/public/services/platform", "SelectListExpression"), SelectListExpression.class, null, data), "application/vnd.docuware.platform.selectlistexpression+xml", "application/vnd.docuware.platform.selectlistresult+xml");
}
/**
* Calls the HTTP post Method on the link for the relation "SimpleSelectList" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> postToSimpleSelectListRelationForSelectListResultAsync(SelectListExpression data) {
return MethodInvocation.<SelectListResult, SelectListExpression >postAsync(this, links, "simpleSelectList", SelectListResult.class, new JAXBElement(new QName("http://dev.docuware.com/schema/public/services/platform", "SelectListExpression"), SelectListExpression.class, null, data), "application/vnd.docuware.platform.selectlistexpression+xml", "application/vnd.docuware.platform.selectlistresult+xml");
}
/**
* Calls the HTTP post Method on the link for the relation "SimpleSelectList" asynchronously.
*/
public CompletableFuture<DeserializedHttpResponseGen<SelectListResult>> postToSimpleSelectListRelationForSelectListResultAsync(CancellationToken ct, SelectListExpression data) {
return MethodInvocation.<SelectListResult, SelectListExpression >postAsync(this, links, "simpleSelectList", SelectListResult.class, new JAXBElement(new QName("http://dev.docuware.com/schema/public/services/platform", "SelectListExpression"), SelectListExpression.class, null, data), "application/vnd.docuware.platform.selectlistexpression+xml", "application/vnd.docuware.platform.selectlistresult+xml", ct);
}
}
|
{
"content_hash": "23a944ed1a675251dd042b11b257b70e",
"timestamp": "",
"source": "github",
"line_count": 340,
"max_line_length": 415,
"avg_line_length": 40.129411764705885,
"alnum_prop": 0.7138669012019936,
"repo_name": "DocuWare/PlatformJavaClient",
"id": "557a67b4e29d6d15cf6bd41e61e7d659279a844f",
"size": "13644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/docuware/dev/schema/_public/services/platform/StampFormField.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1758216"
}
],
"symlink_target": ""
}
|
from jpype import JPackage, java
import sys
from . import common
class MroTestCase(common.JPypeTestCase):
def testMro(self):
C = JPackage('jpype.mro').C
def testMultipleInterfaces(self):
j = JPackage("jpype").mro.MultipleInterfaces
myinstance = j()
|
{
"content_hash": "6a11ae603ff6d3d559540b6d3f6d6724",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 52,
"avg_line_length": 26.727272727272727,
"alnum_prop": 0.6598639455782312,
"repo_name": "icedwater/jpype",
"id": "37bbd385e09cc5d08f98be33540fc2836beaa000",
"size": "1071",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/jpypetest/mro.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "C",
"bytes": "37981"
},
{
"name": "C++",
"bytes": "495450"
},
{
"name": "Java",
"bytes": "32214"
},
{
"name": "Objective-C",
"bytes": "387"
},
{
"name": "PowerShell",
"bytes": "9130"
},
{
"name": "Python",
"bytes": "132551"
},
{
"name": "XSLT",
"bytes": "3460"
}
],
"symlink_target": ""
}
|
import inspect
import os.path
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.volume.drivers.emc import emc_vmax_fast
from cinder.volume.drivers.emc import emc_vmax_https
from cinder.volume.drivers.emc import emc_vmax_masking
from cinder.volume.drivers.emc import emc_vmax_provision
from cinder.volume.drivers.emc import emc_vmax_provision_v3
from cinder.volume.drivers.emc import emc_vmax_utils
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
try:
import pywbem
pywbemAvailable = True
except ImportError:
pywbemAvailable = False
CINDER_EMC_CONFIG_FILE = '/etc/cinder/cinder_emc_config.xml'
CINDER_EMC_CONFIG_FILE_PREFIX = '/etc/cinder/cinder_emc_config_'
CINDER_EMC_CONFIG_FILE_POSTFIX = '.xml'
BACKENDNAME = 'volume_backend_name'
PREFIXBACKENDNAME = 'capabilities:volume_backend_name'
PORTGROUPNAME = 'portgroupname'
EMC_ROOT = 'root/emc'
POOL = 'storagetype:pool'
ARRAY = 'storagetype:array'
FASTPOLICY = 'storagetype:fastpolicy'
BACKENDNAME = 'volume_backend_name'
COMPOSITETYPE = 'storagetype:compositetype'
STRIPECOUNT = 'storagetype:stripecount'
MEMBERCOUNT = 'storagetype:membercount'
STRIPED = 'striped'
CONCATENATED = 'concatenated'
SMI_VERSION_8 = 800
# V3
SLO = 'storagetype:slo'
WORKLOAD = 'storagetype:workload'
INTERVAL = 'storagetype:interval'
RETRIES = 'storagetype:retries'
ISV3 = 'isV3'
TRUNCATE_5 = 5
TRUNCATE_8 = 8
emc_opts = [
cfg.StrOpt('cinder_emc_config_file',
default=CINDER_EMC_CONFIG_FILE,
help='use this file for cinder emc plugin '
'config data'), ]
CONF.register_opts(emc_opts)
class EMCVMAXCommon(object):
"""Common class for SMI-S based EMC volume drivers.
This common class is for EMC volume drivers based on SMI-S.
It supports VNX and VMAX arrays.
"""
VERSION = "2.0.0"
stats = {'driver_version': '1.0',
'free_capacity_gb': 0,
'reserved_percentage': 0,
'storage_protocol': None,
'total_capacity_gb': 0,
'vendor_name': 'EMC',
'volume_backend_name': None}
pool_info = {'backend_name': None,
'config_file': None,
'arrays_info': {}}
def __init__(self, prtcl, version, configuration=None):
if not pywbemAvailable:
LOG.info(_LI(
"Module PyWBEM not installed. "
"Install PyWBEM using the python-pywbem package."))
self.protocol = prtcl
self.configuration = configuration
self.configuration.append_config_values(emc_opts)
self.conn = None
self.url = None
self.user = None
self.passwd = None
self.masking = emc_vmax_masking.EMCVMAXMasking(prtcl)
self.utils = emc_vmax_utils.EMCVMAXUtils(prtcl)
self.fast = emc_vmax_fast.EMCVMAXFast(prtcl)
self.provision = emc_vmax_provision.EMCVMAXProvision(prtcl)
self.provisionv3 = emc_vmax_provision_v3.EMCVMAXProvisionV3(prtcl)
self.version = version
self._gather_info()
def _gather_info(self):
"""Gather the relevant information for update_volume_stats."""
if hasattr(self.configuration, 'cinder_emc_config_file'):
self.pool_info['config_file'] = (
self.configuration.cinder_emc_config_file)
else:
self.pool_info['config_file'] = (
self.configuration.safe_get('cinder_emc_config_file'))
self.pool_info['backend_name'] = (
self.configuration.safe_get('volume_backend_name'))
LOG.debug(
"Updating volume stats on file %(emcConfigFileName)s on "
"backend %(backendName)s.",
{'emcConfigFileName': self.pool_info['config_file'],
'backendName': self.pool_info['backend_name']})
self.pool_info['arrays_info'] = (
self.utils.parse_file_to_get_array_map(
self.pool_info['config_file']))
def create_volume(self, volume):
"""Creates a EMC(VMAX) volume from a pre-existing storage pool.
For a concatenated compositeType:
If the volume size is over 240GB then a composite is created
EMCNumberOfMembers > 1, otherwise it defaults to a non composite
For a striped compositeType:
The user must supply an extra spec to determine how many metas
will make up the striped volume. If the meta size is greater
than 240GB an error is returned to the user. Otherwise the
EMCNumberOfMembers is what the user specifies.
:param volume: volume Object
:returns: dict -- volumeDict - the volume dictionary
"""
volumeSize = int(self.utils.convert_gb_to_bits(volume['size']))
volumeName = volume['id']
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
if extraSpecs[ISV3]:
rc, volumeDict, storageSystemName = (
self._create_v3_volume(volume, volumeName, volumeSize,
extraSpecs))
else:
rc, volumeDict, storageSystemName = (
self._create_composite_volume(volume, volumeName, volumeSize,
extraSpecs))
# If volume is created as part of a consistency group.
if 'consistencygroup_id' in volume and volume['consistencygroup_id']:
cgName = self.utils.truncate_string(
volume['consistencygroup_id'], 8)
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
replicationService = (
self.utils.find_replication_service(self.conn,
storageSystemName))
cgInstanceName = (
self._find_consistency_group(replicationService, cgName))
self.provision.add_volume_to_cg(self.conn,
replicationService,
cgInstanceName,
volumeInstance.path,
cgName,
volumeName,
extraSpecs)
LOG.info(_LI("Leaving create_volume: %(volumeName)s "
"Return code: %(rc)lu "
"volume dict: %(name)s."),
{'volumeName': volumeName,
'rc': rc,
'name': volumeDict})
# Adding version information
volumeDict['version'] = self.version
return volumeDict
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot.
For VMAX, replace snapshot with clone.
:param volume: volume Object
:param snapshot: snapshot object
:returns: dict -- the cloned volume dictionary
:raises: VolumeBackendAPIException
"""
LOG.debug("Entering create_volume_from_snapshot.")
snapshot['host'] = volume['host']
extraSpecs = self._initial_setup(snapshot)
self.conn = self._get_ecom_connection()
snapshotInstance = self._find_lun(snapshot)
storageSystem = snapshotInstance['SystemName']
syncName = self.utils.find_sync_sv_by_target(
self.conn, storageSystem, snapshotInstance, extraSpecs, True)
if syncName is not None:
repservice = self.utils.find_replication_service(self.conn,
storageSystem)
if repservice is None:
exception_message = (_("Cannot find Replication Service to "
"create volume for snapshot %s.")
% snapshotInstance)
raise exception.VolumeBackendAPIException(
data=exception_message)
self.provision.delete_clone_relationship(
self.conn, repservice, syncName, extraSpecs)
snapshot['host'] = volume['host']
return self._create_cloned_volume(volume, snapshot, extraSpecs, False)
def create_cloned_volume(self, cloneVolume, sourceVolume):
"""Creates a clone of the specified volume.
:param cloneVolume: clone volume Object
:param sourceVolume: volume object
:returns: cloneVolumeDict -- the cloned volume dictionary
"""
extraSpecs = self._initial_setup(sourceVolume)
return self._create_cloned_volume(cloneVolume, sourceVolume,
extraSpecs, False)
def delete_volume(self, volume):
"""Deletes a EMC(VMAX) volume.
:param volume: volume Object
"""
LOG.info(_LI("Deleting Volume: %(volume)s"),
{'volume': volume['name']})
rc, volumeName = self._delete_volume(volume)
LOG.info(_LI("Leaving delete_volume: %(volumename)s Return code: "
"%(rc)lu."),
{'volumename': volumeName,
'rc': rc})
def create_snapshot(self, snapshot, volume):
"""Creates a snapshot.
For VMAX, replace snapshot with clone.
:param snapshot: snapshot object
:param volume: volume Object to create snapshot from
:returns: dict -- the cloned volume dictionary
"""
extraSpecs = self._initial_setup(volume)
return self._create_cloned_volume(snapshot, volume, extraSpecs, True)
def delete_snapshot(self, snapshot, volume):
"""Deletes a snapshot.
:param snapshot: snapshot object
:param volume: volume Object to create snapshot from
"""
LOG.info(_LI("Delete Snapshot: %(snapshotName)s."),
{'snapshotName': snapshot['name']})
snapshot['host'] = volume['host']
self._delete_snapshot(snapshot)
def _remove_members(self, controllerConfigService,
volumeInstance, connector, extraSpecs):
"""This method unmaps a volume from a host.
Removes volume from the Device Masking Group that belongs to
a Masking View.
Check if fast policy is in the extra specs. If it isn't we do
not need to do any thing for FAST.
Assume that isTieringPolicySupported is False unless the FAST
policy is in the extra specs and tiering is enabled on the array.
:param controllerConfigService: instance name of
ControllerConfigurationService
:param volumeInstance: volume Object
:param connector: the connector object
:param extraSpecs: extra specifications
:returns: storageGroupInstanceName
"""
volumeName = volumeInstance['ElementName']
LOG.debug("Detaching volume %s.", volumeName)
return self.masking.remove_and_reset_members(
self.conn, controllerConfigService, volumeInstance,
volumeName, extraSpecs, connector)
def _unmap_lun(self, volume, connector):
"""Unmaps a volume from the host.
:param volume: the volume Object
:param connector: the connector Object
:raises: VolumeBackendAPIException
"""
extraSpecs = self._initial_setup(volume)
volumename = volume['name']
LOG.info(_LI("Unmap volume: %(volume)s."),
{'volume': volumename})
device_info = self.find_device_number(volume, connector['host'])
device_number = device_info['hostlunid']
if device_number is None:
LOG.info(_LI("Volume %s is not mapped. No volume to unmap."),
volumename)
return
vol_instance = self._find_lun(volume)
storage_system = vol_instance['SystemName']
configservice = self.utils.find_controller_configuration_service(
self.conn, storage_system)
if configservice is None:
exception_message = (_("Cannot find Controller Configuration "
"Service for storage system "
"%(storage_system)s.")
% {'storage_system': storage_system})
raise exception.VolumeBackendAPIException(data=exception_message)
self._remove_members(configservice, vol_instance, connector,
extraSpecs)
def initialize_connection(self, volume, connector):
"""Initializes the connection and returns device and connection info.
The volume may be already mapped, if this is so the deviceInfo tuple
is returned. If the volume is not already mapped then we need to
gather information to either 1. Create an new masking view or 2. Add
the volume to an existing storage group within an already existing
maskingview.
The naming convention is the following:
initiatorGroupName = OS-<shortHostName>-<shortProtocol>-IG
e.g OS-myShortHost-I-IG
storageGroupName = OS-<shortHostName>-<poolName>-<shortProtocol>-SG
e.g OS-myShortHost-SATA_BRONZ1-I-SG
portGroupName = OS-<target>-PG The portGroupName will come from
the EMC configuration xml file.
These are precreated. If the portGroup does not exist
then an error will be returned to the user
maskingView = OS-<shortHostName>-<poolName>-<shortProtocol>-MV
e.g OS-myShortHost-SATA_BRONZ1-I-MV
:param volume: volume Object
:param connector: the connector Object
:returns: dict -- deviceInfoDict - device information dict
:raises: VolumeBackendAPIException
"""
extraSpecs = self._initial_setup(volume)
volumeName = volume['name']
LOG.info(_LI("Initialize connection: %(volume)s."),
{'volume': volumeName})
self.conn = self._get_ecom_connection()
deviceInfoDict = self.find_device_number(volume, connector['host'])
maskingViewDict = self._populate_masking_dict(
volume, connector, extraSpecs)
if ('hostlunid' in deviceInfoDict and
deviceInfoDict['hostlunid'] is not None):
isSameHost = self._is_same_host(connector, deviceInfoDict)
if isSameHost:
# Device is already mapped to same host so we will leave
# the state as is.
deviceNumber = deviceInfoDict['hostlunid']
LOG.info(_LI("Volume %(volume)s is already mapped. "
"The device number is %(deviceNumber)s."),
{'volume': volumeName,
'deviceNumber': deviceNumber})
else:
deviceInfoDict = self._attach_volume(
volume, connector, extraSpecs, maskingViewDict, True)
else:
deviceInfoDict = self._attach_volume(
volume, connector, extraSpecs, maskingViewDict)
if self.protocol.lower() == 'iscsi':
return self._find_ip_protocol_endpoints(
self.conn, deviceInfoDict['storagesystem'],
maskingViewDict['pgGroupName'])
else:
return deviceInfoDict
def _attach_volume(self, volume, connector, extraSpecs,
maskingViewDict, isLiveMigration=None):
"""Attach a volume to a host.
If live migration is being undertaken then the volume
remains attached to the source host.
:params volume: the volume object
:params connector: the connector object
:param extraSpecs: extra specifications
:param maskingViewDict: masking view information
:param isLiveMigration: boolean, can be None
:returns: dict -- deviceInfoDict
:raises: VolumeBackendAPIException
"""
volumeName = volume['name']
maskingViewDict = self._populate_masking_dict(
volume, connector, extraSpecs)
if isLiveMigration:
maskingViewDict['isLiveMigration'] = True
else:
maskingViewDict['isLiveMigration'] = False
rollbackDict = self.masking.setup_masking_view(
self.conn, maskingViewDict, extraSpecs)
# Find host lun id again after the volume is exported to the host.
deviceInfoDict = self.find_device_number(volume, connector['host'])
if 'hostlunid' not in deviceInfoDict:
# Did not successfully attach to host,
# so a rollback for FAST is required.
LOG.error(_LE("Error Attaching volume %(vol)s."),
{'vol': volumeName})
if ((rollbackDict['fastPolicyName'] is not None) or
(rollbackDict['isV3'] is not None)):
(self.masking
._check_if_rollback_action_for_masking_required(
self.conn, rollbackDict))
exception_message = (_("Error Attaching volume %(vol)s.")
% {'vol': volumeName})
raise exception.VolumeBackendAPIException(
data=exception_message)
return deviceInfoDict
def _is_same_host(self, connector, deviceInfoDict):
"""Check if the host is the same.
Check if the host to attach to is the same host
that is already attached. This is necessary for
live migration.
:params connector: the connector object
:params deviceInfoDict: the device information dictionary
:returns: boolean -- True if the host is the same, False otherwise.
"""
if 'host' in connector:
currentHost = connector['host']
if ('maskingview' in deviceInfoDict and
deviceInfoDict['maskingview'] is not None):
if currentHost in deviceInfoDict['maskingview']:
return True
return False
def terminate_connection(self, volume, connector):
"""Disallow connection from connector.
:params volume: the volume Object
:params connector: the connector Object
"""
volumename = volume['name']
LOG.info(_LI("Terminate connection: %(volume)s."),
{'volume': volumename})
self._unmap_lun(volume, connector)
def extend_volume(self, volume, newSize):
"""Extends an existing volume.
Prequisites:
1. The volume must be composite e.g StorageVolume.EMCIsComposite=True
2. The volume can only be concatenated
e.g StorageExtent.IsConcatenated=True
:params volume: the volume Object
:params newSize: the new size to increase the volume to
:returns: dict -- modifiedVolumeDict - the extended volume Object
:raises: VolumeBackendAPIException
"""
originalVolumeSize = volume['size']
volumeName = volume['name']
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
exceptionMessage = (_("Cannot find Volume: %(volumename)s. "
"Extend operation. Exiting....")
% {'volumename': volumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if int(originalVolumeSize) > int(newSize):
exceptionMessage = (_(
"Your original size: %(originalVolumeSize)s GB is greater "
"than: %(newSize)s GB. Only Extend is supported. Exiting...")
% {'originalVolumeSize': originalVolumeSize,
'newSize': newSize})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
additionalVolumeSize = six.text_type(
int(newSize) - int(originalVolumeSize))
additionalVolumeSize = self.utils.convert_gb_to_bits(
additionalVolumeSize)
# This is V2
rc, modifiedVolumeDict = self._extend_composite_volume(
volumeInstance, volumeName, newSize, additionalVolumeSize,
extraSpecs)
# Check the occupied space of the new extended volume.
extendedVolumeInstance = self.utils.find_volume_instance(
self.conn, modifiedVolumeDict, volumeName)
extendedVolumeSize = self.utils.get_volume_size(
self.conn, extendedVolumeInstance)
LOG.debug(
"The actual volume size of the extended volume: %(volumeName)s "
"is %(volumeSize)s.",
{'volumeName': volumeName,
'volumeSize': extendedVolumeSize})
# If the requested size and the actual size don't
# tally throw an exception.
newSizeBits = self.utils.convert_gb_to_bits(newSize)
diffVolumeSize = self.utils.compare_size(
newSizeBits, extendedVolumeSize)
if diffVolumeSize != 0:
exceptionMessage = (_(
"The requested size : %(requestedSize)s is not the same as "
"resulting size: %(resultSize)s.")
% {'requestedSize': newSizeBits,
'resultSize': extendedVolumeSize})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
LOG.debug(
"Leaving extend_volume: %(volumeName)s. "
"Return code: %(rc)lu, "
"volume dict: %(name)s.",
{'volumeName': volumeName,
'rc': rc,
'name': modifiedVolumeDict})
return modifiedVolumeDict
def update_volume_stats(self):
"""Retrieve stats info."""
pools = []
backendName = self.pool_info['backend_name']
for arrayInfo in self.pool_info['arrays_info']:
self._set_ecom_credentials(arrayInfo)
# Check what type of array it is
isV3 = self.utils.isArrayV3(self.conn, arrayInfo['SerialNumber'])
if isV3:
location_info, total_capacity_gb, free_capacity_gb = (
self._update_srp_stats(arrayInfo))
poolName = ("%(slo)s+%(poolName)s+%(array)s"
% {'slo': arrayInfo['SLO'],
'poolName': arrayInfo['PoolName'],
'array': arrayInfo['SerialNumber']})
else:
# This is V2
location_info, total_capacity_gb, free_capacity_gb = (
self._update_pool_stats(backendName, arrayInfo))
poolName = ("%(poolName)s+%(array)s"
% {'poolName': arrayInfo['PoolName'],
'array': arrayInfo['SerialNumber']})
pool = {'pool_name': poolName,
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb,
'reserved_percentage': 0,
'QoS_support': False,
'location_info': location_info,
'consistencygroup_support': True}
pools.append(pool)
data = {'vendor_name': "EMC",
'driver_version': self.version,
'storage_protocol': 'unknown',
'volume_backend_name': self.pool_info['backend_name'] or
self.__class__.__name__,
# Use zero capacities here so we always use a pool.
'total_capacity_gb': 0,
'free_capacity_gb': 0,
'reserved_percentage': 0,
'pools': pools}
return data
def _update_srp_stats(self, arrayInfo):
"""Update SRP stats.
:param arrayInfo: array information
:returns: location_info
:returns: totalManagedSpaceGbs
:returns: remainingManagedSpaceGbs
"""
totalManagedSpaceGbs, remainingManagedSpaceGbs = (
self.provisionv3.get_srp_pool_stats(self.conn,
arrayInfo))
LOG.info(_LI(
"Capacity stats for SRP pool %(poolName)s on array "
"%(arrayName)s total_capacity_gb=%(total_capacity_gb)lu, "
"free_capacity_gb=%(free_capacity_gb)lu"),
{'poolName': arrayInfo['PoolName'],
'arrayName': arrayInfo['SerialNumber'],
'total_capacity_gb': totalManagedSpaceGbs,
'free_capacity_gb': remainingManagedSpaceGbs})
location_info = ("%(arrayName)s#%(poolName)s#%(slo)s#%(workload)s"
% {'arrayName': arrayInfo['SerialNumber'],
'poolName': arrayInfo['PoolName'],
'slo': arrayInfo['SLO'],
'workload': arrayInfo['Workload']})
return location_info, totalManagedSpaceGbs, remainingManagedSpaceGbs
def retype(self, ctxt, volume, new_type, diff, host):
"""Migrate volume to another host using retype.
:param ctxt: context
:param volume: the volume object including the volume_type_id
:param new_type: the new volume type.
:param diff: Unused parameter.
:param host: The host dict holding the relevant target(destination)
information
:returns: boolean -- True if retype succeeded, False if error
"""
volumeName = volume['name']
volumeStatus = volume['status']
LOG.info(_LI("Migrating using retype Volume: %(volume)s."),
{'volume': volumeName})
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
LOG.error(_LE("Volume %(name)s not found on the array. "
"No volume to migrate using retype."),
{'name': volumeName})
return False
if extraSpecs[ISV3]:
return self._slo_workload_migration(volumeInstance, volume, host,
volumeName, volumeStatus,
new_type, extraSpecs)
else:
return self._pool_migration(volumeInstance, volume, host,
volumeName, volumeStatus,
extraSpecs[FASTPOLICY],
new_type, extraSpecs)
def migrate_volume(self, ctxt, volume, host, new_type=None):
"""Migrate volume to another host.
:param ctxt: context
:param volume: the volume object including the volume_type_id
:param host: the host dict holding the relevant target(destination)
information
:param new_type: None
:returns: boolean -- Always returns True
:returns: dict -- Empty dict {}
"""
LOG.warning(_LW("The VMAX plugin only supports Retype. "
"If a pool based migration is necessary "
"this will happen on a Retype "
"From the command line: "
"cinder --os-volume-api-version 2 retype <volumeId> "
"<volumeType> --migration-policy on-demand"))
return True, {}
def _migrate_volume(
self, volume, volumeInstance, targetPoolName,
targetFastPolicyName, sourceFastPolicyName, extraSpecs,
new_type=None):
"""Migrate volume to another host.
:param volume: the volume object including the volume_type_id
:param volumeInstance: the volume instance
:param targetPoolName: the target poolName
:param targetFastPolicyName: the target FAST policy name, can be None
:param sourceFastPolicyName: the source FAST policy name, can be None
:param extraSpecs: extra specifications
:param new_type: None
:returns: boolean -- True/False
:returns: list -- empty list
"""
volumeName = volume['name']
storageSystemName = volumeInstance['SystemName']
sourcePoolInstanceName = self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstance.path)
moved, rc = self._migrate_volume_from(
volume, volumeInstance, targetPoolName, sourceFastPolicyName,
extraSpecs)
if moved is False and sourceFastPolicyName is not None:
# Return the volume to the default source fast policy storage
# group because the migrate was unsuccessful.
LOG.warning(_LW(
"Failed to migrate: %(volumeName)s from "
"default source storage group "
"for FAST policy: %(sourceFastPolicyName)s. "
"Attempting cleanup... "),
{'volumeName': volumeName,
'sourceFastPolicyName': sourceFastPolicyName})
if sourcePoolInstanceName == self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstance.path):
self._migrate_cleanup(self.conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName, extraSpecs)
else:
# Migrate was successful but still issues.
self._migrate_rollback(
self.conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName, sourcePoolInstanceName,
extraSpecs)
return moved
if targetFastPolicyName == 'None':
targetFastPolicyName = None
if moved is True and targetFastPolicyName is not None:
if not self._migrate_volume_fast_target(
volumeInstance, storageSystemName,
targetFastPolicyName, volumeName, extraSpecs):
LOG.warning(_LW(
"Attempting a rollback of: %(volumeName)s to "
"original pool %(sourcePoolInstanceName)s."),
{'volumeName': volumeName,
'sourcePoolInstanceName': sourcePoolInstanceName})
self._migrate_rollback(
self.conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName, sourcePoolInstanceName,
extraSpecs)
if rc == 0:
moved = True
return moved
def _migrate_rollback(self, conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName, sourcePoolInstanceName, extraSpecs):
"""Full rollback.
Failed on final step on adding migrated volume to new target
default storage group for the target FAST policy.
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param sourceFastPolicyName: the source FAST policy name
:param volumeName: the volume Name
:param sourcePoolInstanceName: the instance name of the source pool
:param extraSpecs: extra specifications
"""
LOG.warning(_LW("_migrate_rollback on : %(volumeName)s."),
{'volumeName': volumeName})
storageRelocationService = self.utils.find_storage_relocation_service(
conn, storageSystemName)
try:
self.provision.migrate_volume_to_storage_pool(
conn, storageRelocationService, volumeInstance.path,
sourcePoolInstanceName, extraSpecs)
except Exception:
LOG.error(_LE(
"Failed to return volume %(volumeName)s to "
"original storage pool. Please contact your system "
"administrator to return it to the correct location."),
{'volumeName': volumeName})
if sourceFastPolicyName is not None:
self.add_to_default_SG(
conn, volumeInstance, storageSystemName, sourceFastPolicyName,
volumeName, extraSpecs)
def _migrate_cleanup(self, conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName, extraSpecs):
"""If the migrate fails, put volume back to source FAST SG.
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param sourceFastPolicyName: the source FAST policy name
:param volumeName: the volume Name
:param extraSpecs: extra specifications
:returns: boolean -- True/False
"""
LOG.warning(_LW("_migrate_cleanup on : %(volumeName)s."),
{'volumeName': volumeName})
return_to_default = True
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
# Check to see what SG it is in.
assocStorageGroupInstanceNames = (
self.utils.get_storage_groups_from_volume(conn,
volumeInstance.path))
# This is the SG it should be in.
defaultStorageGroupInstanceName = (
self.fast.get_policy_default_storage_group(
conn, controllerConfigurationService, sourceFastPolicyName))
for assocStorageGroupInstanceName in assocStorageGroupInstanceNames:
# It is in the incorrect storage group.
if (assocStorageGroupInstanceName !=
defaultStorageGroupInstanceName):
self.provision.remove_device_from_storage_group(
conn, controllerConfigurationService,
assocStorageGroupInstanceName,
volumeInstance.path, volumeName, extraSpecs)
else:
# The volume is already in the default.
return_to_default = False
if return_to_default:
self.add_to_default_SG(
conn, volumeInstance, storageSystemName, sourceFastPolicyName,
volumeName, extraSpecs)
return return_to_default
def _migrate_volume_fast_target(
self, volumeInstance, storageSystemName,
targetFastPolicyName, volumeName, extraSpecs):
"""If the target host is FAST enabled.
If the target host is FAST enabled then we need to add it to the
default storage group for that policy.
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param targetFastPolicyName: the target fast policy name
:param volumeName: the volume name
:param extraSpecs: extra specifications
:returns: boolean -- True/False
"""
falseRet = False
LOG.info(_LI(
"Adding volume: %(volumeName)s to default storage group "
"for FAST policy: %(fastPolicyName)s."),
{'volumeName': volumeName,
'fastPolicyName': targetFastPolicyName})
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
defaultStorageGroupInstanceName = (
self.fast.get_or_create_default_storage_group(
self.conn, controllerConfigurationService,
targetFastPolicyName, volumeInstance, extraSpecs))
if defaultStorageGroupInstanceName is None:
LOG.error(_LE(
"Unable to create or get default storage group for FAST policy"
": %(fastPolicyName)s."),
{'fastPolicyName': targetFastPolicyName})
return falseRet
defaultStorageGroupInstanceName = (
self.fast.add_volume_to_default_storage_group_for_fast_policy(
self.conn, controllerConfigurationService, volumeInstance,
volumeName, targetFastPolicyName, extraSpecs))
if defaultStorageGroupInstanceName is None:
LOG.error(_LE(
"Failed to verify that volume was added to storage group for "
"FAST policy: %(fastPolicyName)s."),
{'fastPolicyName': targetFastPolicyName})
return falseRet
return True
def _migrate_volume_from(self, volume, volumeInstance,
targetPoolName, sourceFastPolicyName,
extraSpecs):
"""Check FAST policies and migrate from source pool.
:param volume: the volume object including the volume_type_id
:param volumeInstance: the volume instance
:param targetPoolName: the target poolName
:param sourceFastPolicyName: the source FAST policy name, can be None
:param extraSpecs: extra specifications
:returns: boolean -- True/False
:returns: int -- the return code from migrate operation
"""
falseRet = (False, -1)
volumeName = volume['name']
storageSystemName = volumeInstance['SystemName']
LOG.debug("sourceFastPolicyName is : %(sourceFastPolicyName)s.",
{'sourceFastPolicyName': sourceFastPolicyName})
# If the source volume is FAST enabled it must first be removed
# from the default storage group for that policy.
if sourceFastPolicyName is not None:
self.remove_from_default_SG(
self.conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName, extraSpecs)
# Migrate from one pool to another.
storageRelocationService = self.utils.find_storage_relocation_service(
self.conn, storageSystemName)
targetPoolInstanceName = self.utils.get_pool_by_name(
self.conn, targetPoolName, storageSystemName)
if targetPoolInstanceName is None:
LOG.error(_LE(
"Error finding target pool instance name for pool: "
"%(targetPoolName)s."),
{'targetPoolName': targetPoolName})
return falseRet
try:
rc = self.provision.migrate_volume_to_storage_pool(
self.conn, storageRelocationService, volumeInstance.path,
targetPoolInstanceName, extraSpecs)
except Exception:
# Rollback by deleting the volume if adding the volume to the
# default storage group were to fail.
LOG.exception(_LE(
"Error migrating volume: %(volumename)s. "
"to target pool %(targetPoolName)s."),
{'volumename': volumeName,
'targetPoolName': targetPoolName})
return falseRet
# Check that the volume is now migrated to the correct storage pool,
# if it is terminate the migrate session.
foundPoolInstanceName = self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstance.path)
if (foundPoolInstanceName is None or
(foundPoolInstanceName['InstanceID'] !=
targetPoolInstanceName['InstanceID'])):
LOG.error(_LE(
"Volume : %(volumeName)s. was not successfully migrated to "
"target pool %(targetPoolName)s."),
{'volumeName': volumeName,
'targetPoolName': targetPoolName})
return falseRet
else:
LOG.debug("Terminating migration session on: %(volumeName)s.",
{'volumeName': volumeName})
self.provision._terminate_migrate_session(
self.conn, volumeInstance.path, extraSpecs)
if rc == 0:
moved = True
return moved, rc
def remove_from_default_SG(
self, conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName, extraSpecs):
"""For FAST, remove volume from default storage group.
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param sourceFastPolicyName: the source FAST policy name
:param volumeName: the volume Name
:param extraSpecs: extra specifications
:raises: VolumeBackendAPIException
"""
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
try:
defaultStorageGroupInstanceName = (
self.masking.remove_device_from_default_storage_group(
conn, controllerConfigurationService,
volumeInstance.path, volumeName, sourceFastPolicyName,
extraSpecs))
except Exception:
exceptionMessage = (_(
"Failed to remove: %(volumename)s. "
"from the default storage group for "
"FAST policy %(fastPolicyName)s.")
% {'volumename': volumeName,
'fastPolicyName': sourceFastPolicyName})
LOG.exception(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if defaultStorageGroupInstanceName is None:
LOG.warning(_LW(
"The volume: %(volumename)s "
"was not first part of the default storage "
"group for FAST policy %(fastPolicyName)s."),
{'volumename': volumeName,
'fastPolicyName': sourceFastPolicyName})
def add_to_default_SG(
self, conn, volumeInstance, storageSystemName,
targetFastPolicyName, volumeName, extraSpecs):
"""For FAST, add volume to default storage group.
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param targetFastPolicyName: the target FAST policy name
:param volumeName: the volume Name
:param extraSpecs: extra specifications
"""
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
assocDefaultStorageGroupName = (
self.fast
.add_volume_to_default_storage_group_for_fast_policy(
conn, controllerConfigurationService, volumeInstance,
volumeName, targetFastPolicyName, extraSpecs))
if assocDefaultStorageGroupName is None:
LOG.error(_LE(
"Failed to add %(volumeName)s "
"to default storage group for fast policy "
"%(fastPolicyName)s."),
{'volumeName': volumeName,
'fastPolicyName': targetFastPolicyName})
def _is_valid_for_storage_assisted_migration_v3(
self, volumeInstanceName, host, sourceArraySerialNumber,
sourcePoolName, volumeName, volumeStatus, sgName):
"""Check if volume is suitable for storage assisted (pool) migration.
:param volumeInstanceName: the volume instance id
:param host: the host object
:param sourceArraySerialNumber: the array serial number of
the original volume
:param sourcePoolName: the pool name of the original volume
:param volumeName: the name of the volume to be migrated
:param volumeStatus: the status of the volume
:param sgName: storage group name
:returns: boolean -- True/False
:returns: string -- targetSlo
:returns: string -- targetWorkload
"""
falseRet = (False, None, None)
if 'location_info' not in host['capabilities']:
LOG.error(_LE('Error getting array, pool, SLO and workload.'))
return falseRet
info = host['capabilities']['location_info']
LOG.debug("Location info is : %(info)s.",
{'info': info})
try:
infoDetail = info.split('#')
targetArraySerialNumber = infoDetail[0]
targetPoolName = infoDetail[1]
targetSlo = infoDetail[2]
targetWorkload = infoDetail[3]
except KeyError:
LOG.error(_LE("Error parsing array, pool, SLO and workload."))
if targetArraySerialNumber not in sourceArraySerialNumber:
LOG.error(_LE(
"The source array : %(sourceArraySerialNumber)s does not "
"match the target array: %(targetArraySerialNumber)s "
"skipping storage-assisted migration."),
{'sourceArraySerialNumber': sourceArraySerialNumber,
'targetArraySerialNumber': targetArraySerialNumber})
return falseRet
if targetPoolName not in sourcePoolName:
LOG.error(_LE(
"Only SLO/workload migration within the same SRP Pool "
"is supported in this version "
"The source pool : %(sourcePoolName)s does not "
"match the target array: %(targetPoolName)s. "
"Skipping storage-assisted migration."),
{'sourcePoolName': sourcePoolName,
'targetPoolName': targetPoolName})
return falseRet
foundStorageGroupInstanceName = (
self.utils.get_storage_group_from_volume(
self.conn, volumeInstanceName, sgName))
if foundStorageGroupInstanceName is None:
LOG.warning(_LW(
"Volume: %(volumeName)s is not currently "
"belonging to any storage group."),
{'volumeName': volumeName})
else:
storageGroupInstance = self.conn.GetInstance(
foundStorageGroupInstanceName)
emcFastSetting = self.utils._get_fast_settings_from_storage_group(
storageGroupInstance)
targetCombination = ("%(targetSlo)s+%(targetWorkload)s"
% {'targetSlo': targetSlo,
'targetWorkload': targetWorkload})
if targetCombination in emcFastSetting:
LOG.error(_LE(
"No action required. Volume: %(volumeName)s is "
"already part of slo/workload combination: "
"%(targetCombination)s."),
{'volumeName': volumeName,
'targetCombination': targetCombination})
return falseRet
return (True, targetSlo, targetWorkload)
def _is_valid_for_storage_assisted_migration(
self, volumeInstanceName, host, sourceArraySerialNumber,
volumeName, volumeStatus):
"""Check if volume is suitable for storage assisted (pool) migration.
:param volumeInstanceName: the volume instance id
:param host: the host object
:param sourceArraySerialNumber: the array serial number of
the original volume
:param volumeName: the name of the volume to be migrated
:param volumeStatus: the status of the volume e.g
:returns: boolean -- True/False
:returns: string -- targetPool
:returns: string -- targetFastPolicy
"""
falseRet = (False, None, None)
if 'location_info' not in host['capabilities']:
LOG.error(_LE("Error getting target pool name and array."))
return falseRet
info = host['capabilities']['location_info']
LOG.debug("Location info is : %(info)s.",
{'info': info})
try:
infoDetail = info.split('#')
targetArraySerialNumber = infoDetail[0]
targetPoolName = infoDetail[1]
targetFastPolicy = infoDetail[2]
except KeyError:
LOG.error(_LE(
"Error parsing target pool name, array, and fast policy."))
if targetArraySerialNumber not in sourceArraySerialNumber:
LOG.error(_LE(
"The source array : %(sourceArraySerialNumber)s does not "
"match the target array: %(targetArraySerialNumber)s, "
"skipping storage-assisted migration."),
{'sourceArraySerialNumber': sourceArraySerialNumber,
'targetArraySerialNumber': targetArraySerialNumber})
return falseRet
# Get the pool from the source array and check that is different
# to the pool in the target array.
assocPoolInstanceName = self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstanceName)
assocPoolInstance = self.conn.GetInstance(
assocPoolInstanceName)
if assocPoolInstance['ElementName'] == targetPoolName:
LOG.error(_LE(
"No action required. Volume: %(volumeName)s is "
"already part of pool: %(pool)s."),
{'volumeName': volumeName,
'pool': targetPoolName})
return falseRet
LOG.info(_LI("Volume status is: %s."), volumeStatus)
if (host['capabilities']['storage_protocol'] != self.protocol and
(volumeStatus != 'available' and volumeStatus != 'retyping')):
LOG.error(_LE(
"Only available volumes can be migrated between "
"different protocols."))
return falseRet
return (True, targetPoolName, targetFastPolicy)
def _set_config_file_and_get_extra_specs(self, volume, volumeTypeId=None):
"""Given the volume object get the associated volumetype.
Given the volume object get the associated volumetype and the
extra specs associated with it.
Based on the name of the config group, register the config file
:param volume: the volume object including the volume_type_id
:param volumeTypeId: Optional override of volume['volume_type_id']
:returns: dict -- the extra specs dict
:returns: string -- configuration file
"""
extraSpecs = self.utils.get_volumetype_extraspecs(volume, volumeTypeId)
configGroup = None
# If there are no extra specs then the default case is assumed.
if extraSpecs:
configGroup = self.configuration.config_group
configurationFile = self._register_config_file_from_config_group(
configGroup)
return extraSpecs, configurationFile
def _get_ecom_connection(self):
"""Get the ecom connection.
:returns: pywbem.WBEMConnection -- conn, the ecom connection
:raises: VolumeBackendAPIException
"""
if self.ecomUseSSL:
argspec = inspect.getargspec(pywbem.WBEMConnection.__init__)
if any("ca_certs" in s for s in argspec.args):
updatedPywbem = True
else:
updatedPywbem = False
pywbem.cim_http.wbem_request = emc_vmax_https.wbem_request
if updatedPywbem:
conn = pywbem.WBEMConnection(
self.url,
(self.user, self.passwd),
default_namespace='root/emc',
x509={"key_file":
self.configuration.safe_get(
'driver_client_cert_key'),
"cert_file":
self.configuration.safe_get('driver_client_cert')},
ca_certs=self.ecomCACert,
no_verification=self.ecomNoVerification)
else:
conn = pywbem.WBEMConnection(
self.url,
(self.user, self.passwd),
default_namespace='root/emc',
x509={"key_file":
self.configuration.safe_get(
'driver_client_cert_key'),
"cert_file":
self.configuration.safe_get('driver_client_cert')})
else:
conn = pywbem.WBEMConnection(
self.url,
(self.user, self.passwd),
default_namespace='root/emc')
conn.debug = True
if conn is None:
exception_message = (_("Cannot connect to ECOM server."))
raise exception.VolumeBackendAPIException(data=exception_message)
return conn
def _find_pool_in_array(self, arrayStr, poolNameInStr, isV3):
"""Find a pool based on the pool name on a given array.
:param arrayStr: the array Serial number (String)
:param poolNameInStr: the name of the poolname (String)
:param isv3: True/False
:returns: foundPoolInstanceName - the CIM Instance Name of the Pool
:returns: string -- systemNameStr
:raises: VolumeBackendAPIException
"""
foundPoolInstanceName = None
systemNameStr = None
storageSystemInstanceName = self.utils.find_storageSystem(
self.conn, arrayStr)
if isV3:
foundPoolInstanceName, systemNameStr = (
self.utils.get_pool_and_system_name_v3(
self.conn, storageSystemInstanceName, poolNameInStr))
else:
foundPoolInstanceName, systemNameStr = (
self.utils.get_pool_and_system_name_v2(
self.conn, storageSystemInstanceName, poolNameInStr))
if foundPoolInstanceName is None:
exceptionMessage = (_("Pool %(poolNameInStr)s is not found.")
% {'poolNameInStr': poolNameInStr})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if systemNameStr is None:
exception_message = (_("Storage system not found for pool "
"%(poolNameInStr)s.")
% {'poolNameInStr': poolNameInStr})
LOG.error(exception_message)
raise exception.VolumeBackendAPIException(data=exception_message)
LOG.debug("Pool: %(pool)s SystemName: %(systemname)s.",
{'pool': foundPoolInstanceName,
'systemname': systemNameStr})
return foundPoolInstanceName, systemNameStr
def _find_lun(self, volume):
"""Given the volume get the instance from it.
:param volume: volume object
:returns: foundVolumeinstance
"""
foundVolumeinstance = None
volumename = volume['name']
loc = volume['provider_location']
if self.conn is None:
self.conn = self._get_ecom_connection()
if isinstance(loc, six.string_types):
name = eval(loc)
keys = name['keybindings']
systemName = keys['SystemName']
prefix1 = 'SYMMETRIX+'
prefix2 = 'SYMMETRIX-+-'
smiversion = self.utils.get_smi_version(self.conn)
if smiversion > SMI_VERSION_8 and prefix1 in systemName:
keys['SystemName'] = systemName.replace(prefix1, prefix2)
name['keybindings'] = keys
instancename = self.utils.get_instance_name(
name['classname'], name['keybindings'])
# Allow for an external app to delete the volume.
LOG.debug("Volume instance name: %(in)s",
{'in': instancename})
try:
foundVolumeinstance = self.conn.GetInstance(instancename)
except Exception:
foundVolumeinstance = None
if foundVolumeinstance is None:
LOG.debug("Volume %(volumename)s not found on the array.",
{'volumename': volumename})
else:
LOG.debug("Volume name: %(volumename)s Volume instance: "
"%(foundVolumeinstance)s.",
{'volumename': volumename,
'foundVolumeinstance': foundVolumeinstance})
return foundVolumeinstance
def _find_storage_sync_sv_sv(self, snapshot, volume, extraSpecs,
waitforsync=True):
"""Find the storage synchronized name.
:param snapshot: snapshot object
:param volume: volume object
:param extraSpecs: extra specifications
:param waitforsync: boolean -- Wait for Solutions Enabler sync.
:returns: string -- foundsyncname
:returns: string -- storage_system
"""
snapshotname = snapshot['name']
volumename = volume['name']
LOG.debug("Source: %(volumename)s Target: %(snapshotname)s.",
{'volumename': volumename, 'snapshotname': snapshotname})
snapshot_instance = self._find_lun(snapshot)
volume_instance = self._find_lun(volume)
storage_system = volume_instance['SystemName']
classname = 'SE_StorageSynchronized_SV_SV'
bindings = {'SyncedElement': snapshot_instance.path,
'SystemElement': volume_instance.path}
foundsyncname = self.utils.get_instance_name(classname, bindings)
if foundsyncname is None:
LOG.debug(
"Source: %(volumename)s Target: %(snapshotname)s. "
"Storage Synchronized not found.",
{'volumename': volumename,
'snapshotname': snapshotname})
else:
LOG.debug("Storage system: %(storage_system)s. "
"Storage Synchronized instance: %(sync)s.",
{'storage_system': storage_system,
'sync': foundsyncname})
# Wait for SE_StorageSynchronized_SV_SV to be fully synced.
if waitforsync:
self.utils.wait_for_sync(self.conn, foundsyncname,
extraSpecs)
return foundsyncname, storage_system
def _find_initiator_names(self, connector):
foundinitiatornames = []
iscsi = 'iscsi'
fc = 'fc'
name = 'initiator name'
if self.protocol.lower() == iscsi and connector['initiator']:
foundinitiatornames.append(connector['initiator'])
elif self.protocol.lower() == fc and connector['wwpns']:
for wwn in connector['wwpns']:
foundinitiatornames.append(wwn)
name = 'world wide port names'
if foundinitiatornames is None or len(foundinitiatornames) == 0:
msg = (_("Error finding %s.") % name)
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
LOG.debug("Found %(name)s: %(initiator)s.",
{'name': name,
'initiator': foundinitiatornames})
return foundinitiatornames
def find_device_number(self, volume, host):
"""Given the volume dict find a device number.
Find a device number that a host can see
for a volume.
:param volume: the volume dict
:param host: host from connector
:returns: dict -- the data dict
"""
maskedvols = []
data = {}
foundNumDeviceNumber = None
foundMaskingViewName = None
volumeName = volume['name']
volumeInstance = self._find_lun(volume)
storageSystemName = volumeInstance['SystemName']
unitnames = self.conn.ReferenceNames(
volumeInstance.path,
ResultClass='CIM_ProtocolControllerForUnit')
for unitname in unitnames:
controller = unitname['Antecedent']
classname = controller['CreationClassName']
index = classname.find('Symm_LunMaskingView')
if index > -1:
unitinstance = self.conn.GetInstance(unitname,
LocalOnly=False)
numDeviceNumber = int(unitinstance['DeviceNumber'],
16)
foundNumDeviceNumber = numDeviceNumber
controllerInstance = self.conn.GetInstance(controller,
LocalOnly=False)
propertiesList = controllerInstance.properties.items()
for properties in propertiesList:
if properties[0] == 'ElementName':
cimProperties = properties[1]
foundMaskingViewName = cimProperties.value
devicedict = {'hostlunid': foundNumDeviceNumber,
'storagesystem': storageSystemName,
'maskingview': foundMaskingViewName}
maskedvols.append(devicedict)
if not maskedvols:
LOG.debug(
"Device number not found for volume "
"%(volumeName)s %(volumeInstance)s.",
{'volumeName': volumeName,
'volumeInstance': volumeInstance.path})
else:
hoststr = ("-%(host)s-"
% {'host': host})
for maskedvol in maskedvols:
if hoststr.lower() in maskedvol['maskingview'].lower():
data = maskedvol
break
if not data:
LOG.warning(_LW(
"Volume is masked but not to host %(host)s as "
"expected. Returning empty dictionary."),
{'host': hoststr})
LOG.debug("Device info: %(data)s.", {'data': data})
return data
def get_target_wwns(self, storageSystem, connector):
"""Find target WWNs.
:param storageSystem: the storage system name
:param connector: the connector dict
:returns: list -- targetWwns, the target WWN list
:raises: VolumeBackendAPIException
"""
targetWwns = []
storageHardwareService = self.utils.find_storage_hardwareid_service(
self.conn, storageSystem)
hardwareIdInstances = self._find_storage_hardwareids(
connector, storageHardwareService)
LOG.debug(
"EMCGetTargetEndpoints: Service: %(service)s, "
"Storage HardwareIDs: %(hardwareIds)s.",
{'service': storageHardwareService,
'hardwareIds': hardwareIdInstances})
for hardwareIdInstance in hardwareIdInstances:
LOG.debug("HardwareID instance is: %(hardwareIdInstance)s.",
{'hardwareIdInstance': hardwareIdInstance})
try:
_rc, targetEndpoints = (
self.provision.get_target_endpoints(
self.conn, storageHardwareService, hardwareIdInstance))
except Exception:
errorMessage = (_(
"Unable to get target endpoints for hardwareId "
"%(hardwareIdInstance)s.")
% {'hardwareIdInstance': hardwareIdInstance})
LOG.exception(errorMessage)
raise exception.VolumeBackendAPIException(data=errorMessage)
if targetEndpoints:
endpoints = targetEndpoints['TargetEndpoints']
LOG.debug("There are %(len)lu endpoints.",
{'len': len(endpoints)})
for targetendpoint in endpoints:
wwn = targetendpoint['Name']
# Add target wwn to the list if it is not already there.
if not any(d == wwn for d in targetWwns):
targetWwns.append(wwn)
else:
LOG.error(_LE(
"Target end points do not exist for hardware Id: "
"%(hardwareIdInstance)s."),
{'hardwareIdInstance': hardwareIdInstance})
LOG.debug("Target WWNs: %(targetWwns)s.",
{'targetWwns': targetWwns})
return targetWwns
def _find_storage_hardwareids(
self, connector, hardwareIdManagementService):
"""Find the storage hardware ID instances.
:param connector: the connector dict
:param hardwareIdManagementService: the storage Hardware
management service
:returns: list -- the list of storage hardware ID instances
"""
foundHardwareIdList = []
wwpns = self._find_initiator_names(connector)
hardwareIdInstances = (
self.utils.get_hardware_id_instances_from_array(
self.conn, hardwareIdManagementService))
for hardwareIdInstance in hardwareIdInstances:
storageId = hardwareIdInstance['StorageID']
for wwpn in wwpns:
if wwpn.lower() == storageId.lower():
# Check that the found hardwareId has not been
# deleted. If it has, we don't want to add it to the list.
instance = self.utils.get_existing_instance(
self.conn, hardwareIdInstance.path)
if instance is None:
# HardwareId doesn't exist any more. Skip it.
break
foundHardwareIdList.append(hardwareIdInstance.path)
break
LOG.debug("Storage Hardware IDs for %(wwpns)s is "
"%(foundInstances)s.",
{'wwpns': wwpns,
'foundInstances': foundHardwareIdList})
return foundHardwareIdList
def _register_config_file_from_config_group(self, configGroupName):
"""Given the config group name register the file.
:param configGroupName: the config group name
:returns: string -- configurationFile - name of the configuration file
"""
if configGroupName is None:
return CINDER_EMC_CONFIG_FILE
if hasattr(self.configuration, 'cinder_emc_config_file'):
configurationFile = self.configuration.cinder_emc_config_file
else:
configurationFile = (
("%(prefix)s%(configGroupName)s%(postfix)s"
% {'prefix': CINDER_EMC_CONFIG_FILE_PREFIX,
'configGroupName': configGroupName,
'postfix': CINDER_EMC_CONFIG_FILE_POSTFIX}))
# The file saved in self.configuration may not be the correct one,
# double check.
if configGroupName not in configurationFile:
configurationFile = (
("%(prefix)s%(configGroupName)s%(postfix)s"
% {'prefix': CINDER_EMC_CONFIG_FILE_PREFIX,
'configGroupName': configGroupName,
'postfix': CINDER_EMC_CONFIG_FILE_POSTFIX}))
if os.path.isfile(configurationFile):
LOG.debug("Configuration file : %(configurationFile)s exists.",
{'configurationFile': configurationFile})
else:
exceptionMessage = (_(
"Configuration file %(configurationFile)s does not exist.")
% {'configurationFile': configurationFile})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return configurationFile
def _set_ecom_credentials(self, arrayInfo):
"""Given the array record set the ecom credentials.
:param arrayInfo: record
:raises: VolumeBackendAPIException
"""
ip = arrayInfo['EcomServerIp']
port = arrayInfo['EcomServerPort']
self.user = arrayInfo['EcomUserName']
self.passwd = arrayInfo['EcomPassword']
self.ecomUseSSL = arrayInfo['EcomUseSSL']
self.ecomCACert = arrayInfo['EcomCACert']
self.ecomNoVerification = arrayInfo['EcomNoVerification']
ip_port = ("%(ip)s:%(port)s"
% {'ip': ip,
'port': port})
if self.ecomUseSSL:
self.url = ("https://%(ip_port)s"
% {'ip_port': ip_port})
else:
self.url = ("http://%(ip_port)s"
% {'ip_port': ip_port})
self.conn = self._get_ecom_connection()
def _initial_setup(self, volume, volumeTypeId=None):
"""Necessary setup to accumulate the relevant information.
The volume object has a host in which we can parse the
config group name. The config group name is the key to our EMC
configuration file. The emc configuration file contains pool name
and array name which are mandatory fields.
FastPolicy is optional.
StripedMetaCount is an extra spec that determines whether
the composite volume should be concatenated or striped.
:param volume: the volume Object
:param volumeTypeId: Optional override of volume['volume_type_id']
:returns: dict -- extra spec dict
:raises: VolumeBackendAPIException
"""
try:
extraSpecs, configurationFile = (
self._set_config_file_and_get_extra_specs(
volume, volumeTypeId))
pool = self._validate_pool(volume)
LOG.debug("Pool returned is %(pool)s.",
{'pool': pool})
arrayInfo = self.utils.parse_file_to_get_array_map(
configurationFile)
poolRecord = self.utils.extract_record(arrayInfo, pool)
if not poolRecord:
exceptionMessage = (_(
"Unable to get corresponding record for pool."))
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
self._set_ecom_credentials(poolRecord)
isV3 = self.utils.isArrayV3(
self.conn, poolRecord['SerialNumber'])
if isV3:
extraSpecs = self._set_v3_extra_specs(extraSpecs, poolRecord)
else:
# V2 extra specs
extraSpecs = self._set_v2_extra_specs(extraSpecs, poolRecord)
except Exception:
import sys
exceptionMessage = (_(
"Unable to get configuration information necessary to "
"create a volume: %(errorMessage)s.")
% {'errorMessage': sys.exc_info()[1]})
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return extraSpecs
def _get_pool_and_storage_system(self, extraSpecs):
"""Given the extra specs get the pool and storage system name.
:param extraSpecs: extra specifications
:returns: poolInstanceName The pool instance name
:returns: string -- the storage system name
:raises: VolumeBackendAPIException
"""
try:
array = extraSpecs[ARRAY]
poolInstanceName, storageSystemStr = self._find_pool_in_array(
array, extraSpecs[POOL], extraSpecs[ISV3])
except Exception:
exceptionMessage = (_(
"You must supply an array in your EMC configuration file."))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if poolInstanceName is None or storageSystemStr is None:
exceptionMessage = (_(
"Cannot get necessary pool or storage system information."))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return poolInstanceName, storageSystemStr
def _populate_masking_dict(self, volume, connector, extraSpecs):
"""Get all the names of the maskingView and subComponents.
:param volume: the volume object
:param connector: the connector object
:param extraSpecs: extra specifications
:returns: dict -- a dictionary with masking view information
"""
maskingViewDict = {}
hostName = connector['host']
uniqueName = self.utils.generate_unique_trunc_pool(extraSpecs[POOL])
isV3 = extraSpecs[ISV3]
maskingViewDict['isV3'] = isV3
protocol = self.utils.get_short_protocol_type(self.protocol)
shortHostName = self.utils.get_host_short_name(hostName)
if isV3:
slo = extraSpecs[SLO]
workload = extraSpecs[WORKLOAD]
maskingViewDict['slo'] = slo
maskingViewDict['workload'] = workload
maskingViewDict['pool'] = uniqueName
prefix = (
("OS-%(shortHostName)s-%(poolName)s-%(slo)s-%(workload)s"
% {'shortHostName': shortHostName,
'poolName': uniqueName,
'slo': slo,
'workload': workload}))
else:
maskingViewDict['fastPolicy'] = extraSpecs[FASTPOLICY]
if maskingViewDict['fastPolicy']:
uniqueName = self.utils.generate_unique_trunc_fastpolicy(
maskingViewDict['fastPolicy']) + '-FP'
prefix = (
("OS-%(shortHostName)s-%(poolName)s-%(protocol)s"
% {'shortHostName': shortHostName,
'poolName': uniqueName,
'protocol': protocol}))
maskingViewDict['sgGroupName'] = ("%(prefix)s-SG"
% {'prefix': prefix})
maskingViewDict['maskingViewName'] = ("%(prefix)s-MV"
% {'prefix': prefix})
volumeName = volume['name']
volumeInstance = self._find_lun(volume)
storageSystemName = volumeInstance['SystemName']
maskingViewDict['controllerConfigService'] = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
# The portGroup is gotten from emc xml config file.
maskingViewDict['pgGroupName'] = extraSpecs[PORTGROUPNAME]
maskingViewDict['igGroupName'] = (
("OS-%(shortHostName)s-%(protocol)s-IG"
% {'shortHostName': shortHostName,
'protocol': protocol}))
maskingViewDict['connector'] = connector
maskingViewDict['volumeInstance'] = volumeInstance
maskingViewDict['volumeName'] = volumeName
maskingViewDict['storageSystemName'] = storageSystemName
return maskingViewDict
def _add_volume_to_default_storage_group_on_create(
self, volumeDict, volumeName, storageConfigService,
storageSystemName, fastPolicyName, extraSpecs):
"""Add the volume to the default storage group for that policy.
On a create when fast policy is enable add the volume to the default
storage group for that policy. If it fails do the necessary rollback.
:param volumeDict: the volume dictionary
:param volumeName: the volume name (String)
:param storageConfigService: the storage configuration service
:param storageSystemName: the storage system name (String)
:param fastPolicyName: the fast policy name (String)
:param extraSpecs: extra specifications
:returns: dict -- maskingViewDict with masking view information
:raises: VolumeBackendAPIException
"""
try:
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
defaultSgName = self.fast.format_default_sg_string(fastPolicyName)
self.fast.add_volume_to_default_storage_group_for_fast_policy(
self.conn, controllerConfigurationService, volumeInstance,
volumeName, fastPolicyName, extraSpecs)
foundStorageGroupInstanceName = (
self.utils.get_storage_group_from_volume(
self.conn, volumeInstance.path, defaultSgName))
if foundStorageGroupInstanceName is None:
exceptionMessage = (_(
"Error adding Volume: %(volumeName)s "
"with instance path: %(volumeInstancePath)s.")
% {'volumeName': volumeName,
'volumeInstancePath': volumeInstance.path})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
except Exception:
# Rollback by deleting the volume if adding the volume to the
# default storage group were to fail.
errorMessage = (_(
"Rolling back %(volumeName)s by deleting it.")
% {'volumeName': volumeName})
LOG.exception(errorMessage)
self.provision.delete_volume_from_pool(
self.conn, storageConfigService, volumeInstance.path,
volumeName, extraSpecs)
raise exception.VolumeBackendAPIException(data=errorMessage)
def _create_and_get_unbound_volume(
self, conn, storageConfigService, compositeVolumeInstanceName,
additionalSize, extraSpecs):
"""Create an unbound volume.
Create an unbound volume so it is in the correct state to add to a
composite volume.
:param conn: the connection information to the ecom server
:param storageConfigService: the storage config service instance name
:param compositeVolumeInstanceName: the composite volume instance name
:param additionalSize: the size you want to increase the volume by
:param extraSpecs: extra specifications
:returns: volume instance modifiedCompositeVolumeInstance
"""
assocPoolInstanceName = self.utils.get_assoc_pool_from_volume(
conn, compositeVolumeInstanceName)
appendVolumeInstance = self._create_and_get_volume_instance(
conn, storageConfigService, assocPoolInstanceName, 'appendVolume',
additionalSize, extraSpecs)
isVolumeBound = self.utils.is_volume_bound_to_pool(
conn, appendVolumeInstance)
if 'True' in isVolumeBound:
appendVolumeInstance = (
self._unbind_and_get_volume_from_storage_pool(
conn, storageConfigService,
appendVolumeInstance.path, 'appendVolume', extraSpecs))
return appendVolumeInstance
def _create_and_get_volume_instance(
self, conn, storageConfigService, poolInstanceName,
volumeName, volumeSize, extraSpecs):
"""Create and get a new volume.
:param conn: the connection information to the ecom server
:param storageConfigService: the storage config service instance name
:param poolInstanceName: the pool instance name
:param volumeName: the volume name
:param volumeSize: the size to create the volume
:param extraSpecs: extra specifications
:returns: volumeInstance -- the volume instance
"""
volumeDict, _rc = (
self.provision.create_volume_from_pool(
self.conn, storageConfigService, volumeName, poolInstanceName,
volumeSize, extraSpecs))
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
return volumeInstance
def _unbind_and_get_volume_from_storage_pool(
self, conn, storageConfigService,
volumeInstanceName, volumeName, extraSpecs):
"""Unbind a volume from a pool and return the unbound volume.
:param conn: the connection information to the ecom server
:param storageConfigService: the storage config service instance name
:param volumeInstanceName: the volume instance name
:param volumeName: string the volumeName
:param extraSpecs: extra specifications
:returns: unboundVolumeInstance -- the unbound volume instance
"""
_rc, _job = (
self.provision.unbind_volume_from_storage_pool(
conn, storageConfigService, volumeInstanceName,
volumeName, extraSpecs))
# Check that the volume in unbound
volumeInstance = conn.GetInstance(volumeInstanceName)
isVolumeBound = self.utils.is_volume_bound_to_pool(
conn, volumeInstance)
if 'False' not in isVolumeBound:
exceptionMessage = (_(
"Failed to unbind volume %(volume)s")
% {'volume': volumeInstanceName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return volumeInstance
def _modify_and_get_composite_volume_instance(
self, conn, elementCompositionServiceInstanceName, volumeInstance,
appendVolumeInstanceName, volumeName, compositeType, extraSpecs):
"""Given an existing composite volume add a new composite volume to it.
:param conn: the connection information to the ecom server
:param elementCompositionServiceInstanceName: the storage element
composition service instance name
:param volumeInstance: the volume instance
:param appendVolumeInstanceName: the appended volume instance name
:param volumeName: the volume name
:param compositeType: concatenated
:param extraSpecs: extra specifications
:returns: int -- the return code
:returns: dict -- modifiedVolumeDict - the modified volume dict
"""
isComposite = self.utils.check_if_volume_is_composite(
self.conn, volumeInstance)
if 'True' in isComposite:
rc, job = self.provision.modify_composite_volume(
conn, elementCompositionServiceInstanceName,
volumeInstance.path, appendVolumeInstanceName, extraSpecs)
elif 'False' in isComposite:
rc, job = self.provision.create_new_composite_volume(
conn, elementCompositionServiceInstanceName,
volumeInstance.path, appendVolumeInstanceName, compositeType,
extraSpecs)
else:
LOG.error(_LE(
"Unable to determine whether %(volumeName)s is "
"composite or not."),
{'volumeName': volumeName})
raise
modifiedVolumeDict = self.provision.get_volume_dict_from_job(
conn, job['Job'])
return rc, modifiedVolumeDict
def _get_or_create_default_storage_group(
self, conn, storageSystemName, volumeDict, volumeName,
fastPolicyName, extraSpecs):
"""Get or create a default storage group for a fast policy.
:param conn: the connection information to the ecom server
:param storageSystemName: the storage system name
:param volumeDict: the volume dictionary
:param volumeName: the volume name
:param fastPolicyName: the fast policy name
:param extraSpecs: extra specifications
:returns: defaultStorageGroupInstanceName
"""
controllerConfigService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
defaultStorageGroupInstanceName = (
self.fast.get_or_create_default_storage_group(
self.conn, controllerConfigService, fastPolicyName,
volumeInstance, extraSpecs))
return defaultStorageGroupInstanceName
def _create_cloned_volume(
self, cloneVolume, sourceVolume, extraSpecs, isSnapshot=False):
"""Create a clone volume from the source volume.
:param cloneVolume: clone volume
:param sourceVolume: source of the clone volume
:param extraSpecs: extra specs
:param isSnapshot: boolean -- Defaults to False
:returns: dict -- cloneDict the cloned volume dictionary
:raises: VolumeBackendAPIException
"""
sourceName = sourceVolume['name']
cloneName = cloneVolume['name']
LOG.info(_LI(
"Create a replica from Volume: Clone Volume: %(cloneName)s "
"Source Volume: %(sourceName)s."),
{'cloneName': cloneName,
'sourceName': sourceName})
self.conn = self._get_ecom_connection()
sourceInstance = self._find_lun(sourceVolume)
storageSystem = sourceInstance['SystemName']
repServCapabilityInstanceName = (
self.utils.find_replication_service_capabilities(self.conn,
storageSystem))
is_clone_license = self.utils.is_clone_licensed(
self.conn, repServCapabilityInstanceName)
if is_clone_license is False:
exceptionMessage = (_(
"Clone feature is not licensed on %(storageSystem)s.")
% {'storageSystem': storageSystem})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
repServiceInstanceName = self.utils.find_replication_service(
self.conn, storageSystem)
LOG.debug("Create volume replica: Volume: %(cloneName)s "
"Source Volume: %(sourceName)s "
"Method: CreateElementReplica "
"ReplicationService: %(service)s ElementName: "
"%(elementname)s SyncType: 8 SourceElement: "
"%(sourceelement)s.",
{'cloneName': cloneName,
'sourceName': sourceName,
'service': repServiceInstanceName,
'elementname': cloneName,
'sourceelement': sourceInstance.path})
if extraSpecs[ISV3]:
rc, cloneDict = self._create_replica_v3(repServiceInstanceName,
cloneVolume,
sourceVolume,
sourceInstance,
isSnapshot,
extraSpecs)
else:
rc, cloneDict = self._create_clone_v2(repServiceInstanceName,
cloneVolume,
sourceVolume,
sourceInstance,
isSnapshot,
extraSpecs)
LOG.debug("Leaving _create_cloned_volume: Volume: "
"%(cloneName)s Source Volume: %(sourceName)s "
"Return code: %(rc)lu.",
{'cloneName': cloneName,
'sourceName': sourceName,
'rc': rc})
# Adding version information
cloneDict['version'] = self.version
return cloneDict
def _add_clone_to_default_storage_group(
self, fastPolicyName, storageSystemName, cloneDict, cloneName,
extraSpecs):
"""Helper function to add clone to the default storage group.
:param fastPolicyName: the fast policy name
:param storageSystemName: the storage system name
:param cloneDict: clone dictionary
:param cloneName: clone name
:param extraSpecs: extra specifications
:raises: VolumeBackendAPIException
"""
# Check if the clone/snapshot volume already part of the default sg.
cloneInstance = self.utils.find_volume_instance(
self.conn, cloneDict, cloneName)
if self.fast.is_volume_in_default_SG(self.conn, cloneInstance.path):
return
# If FAST enabled place clone volume or volume from snapshot to
# default storage group.
LOG.debug("Adding volume: %(cloneName)s to default storage group "
"for FAST policy: %(fastPolicyName)s.",
{'cloneName': cloneName,
'fastPolicyName': fastPolicyName})
storageConfigService = (
self.utils.find_storage_configuration_service(
self.conn, storageSystemName))
defaultStorageGroupInstanceName = (
self._get_or_create_default_storage_group(
self.conn, storageSystemName, cloneDict, cloneName,
fastPolicyName, extraSpecs))
if defaultStorageGroupInstanceName is None:
exceptionMessage = (_(
"Unable to create or get default storage group for FAST "
"policy: %(fastPolicyName)s.")
% {'fastPolicyName': fastPolicyName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
self._add_volume_to_default_storage_group_on_create(
cloneDict, cloneName, storageConfigService, storageSystemName,
fastPolicyName, extraSpecs)
def _delete_volume(self, volume):
"""Helper function to delete the specified volume.
:param volume: volume object to be deleted
:returns: tuple -- rc (int return code), volumeName (string vol name)
"""
volumeName = volume['name']
rc = -1
errorRet = (rc, volumeName)
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
LOG.error(_LE(
"Volume %(name)s not found on the array. "
"No volume to delete."),
{'name': volumeName})
return errorRet
storageConfigService = self.utils.find_storage_configuration_service(
self.conn, volumeInstance['SystemName'])
deviceId = volumeInstance['DeviceID']
if extraSpecs[ISV3]:
storageGroupName = self.utils.get_v3_storage_group_name(
extraSpecs[POOL], extraSpecs[SLO],
extraSpecs[WORKLOAD])
rc = self._delete_from_pool_v3(
storageConfigService, volumeInstance, volumeName,
deviceId, storageGroupName, extraSpecs)
else:
rc = self._delete_from_pool(storageConfigService, volumeInstance,
volumeName, deviceId,
extraSpecs[FASTPOLICY],
extraSpecs)
return (rc, volumeName)
def _remove_device_from_storage_group(
self, controllerConfigurationService, volumeInstanceName,
volumeName, extraSpecs):
"""Check if volume is part of a storage group prior to delete.
Log a warning if volume is part of storage group.
:param controllerConfigurationService: controller configuration service
:param volumeInstanceName: volume instance name
:param volumeName: volume name (string)
:param extraSpecs: extra specifications
"""
storageGroupInstanceNames = (
self.masking.get_associated_masking_groups_from_device(
self.conn, volumeInstanceName))
if storageGroupInstanceNames:
LOG.warning(_LW(
"Pre check for deletion. "
"Volume: %(volumeName)s is part of a storage group. "
"Attempting removal from %(storageGroupInstanceNames)s."),
{'volumeName': volumeName,
'storageGroupInstanceNames': storageGroupInstanceNames})
for storageGroupInstanceName in storageGroupInstanceNames:
self.provision.remove_device_from_storage_group(
self.conn, controllerConfigurationService,
storageGroupInstanceName,
volumeInstanceName, volumeName, extraSpecs)
def _find_lunmasking_scsi_protocol_controller(self, storageSystemName,
connector):
"""Find LunMaskingSCSIProtocolController for the local host.
Find out how many volumes are mapped to a host
associated to the LunMaskingSCSIProtocolController.
:param storageSystemName: the storage system name
:param connector: volume object to be deleted
:returns: foundControllerInstanceName
"""
foundControllerInstanceName = None
initiators = self._find_initiator_names(connector)
storageSystemInstanceName = self.utils.find_storageSystem(
self.conn, storageSystemName)
controllerInstanceNames = self.conn.AssociatorNames(
storageSystemInstanceName,
ResultClass='EMC_LunMaskingSCSIProtocolController')
for controllerInstanceName in controllerInstanceNames:
try:
# This is a check to see if the controller has
# been deleted.
self.conn.GetInstance(controllerInstanceName)
storageHardwareIdInstances = self.conn.Associators(
controllerInstanceName,
ResultClass='EMC_StorageHardwareID')
for storageHardwareIdInstance in storageHardwareIdInstances:
# If EMC_StorageHardwareID matches the initiator, we
# found the existing EMC_LunMaskingSCSIProtocolController.
hardwareid = storageHardwareIdInstance['StorageID']
for initiator in initiators:
if hardwareid.lower() == initiator.lower():
# This is a check to see if the controller
# has been deleted.
instance = self.utils.get_existing_instance(
self.conn, controllerInstanceName)
if instance is None:
# Skip this controller as it doesn't exist
# any more.
pass
else:
foundControllerInstanceName = (
controllerInstanceName)
break
if foundControllerInstanceName is not None:
break
except pywbem.cim_operations.CIMError as arg:
instance = self.utils.process_exception_args(
arg, controllerInstanceName)
if instance is None:
# Skip this controller as it doesn't exist any more.
pass
if foundControllerInstanceName is not None:
break
LOG.debug("LunMaskingSCSIProtocolController for storage system "
"%(storage_system)s and initiator %(initiator)s is "
"%(ctrl)s.",
{'storage_system': storageSystemName,
'initiator': initiators,
'ctrl': foundControllerInstanceName})
return foundControllerInstanceName
def get_num_volumes_mapped(self, volume, connector):
"""Returns how many volumes are in the same zone as the connector.
Find out how many volumes are mapped to a host
associated to the LunMaskingSCSIProtocolController.
:param volume: volume object to be deleted
:param connector: volume object to be deleted
:returns: int -- numVolumesMapped
:raises: VolumeBackendAPIException
"""
volumename = volume['name']
vol_instance = self._find_lun(volume)
if vol_instance is None:
msg = (_("Volume %(name)s not found on the array. "
"Cannot determine if there are volumes mapped.")
% {'name': volumename})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
storage_system = vol_instance['SystemName']
ctrl = self._find_lunmasking_scsi_protocol_controller(
storage_system,
connector)
LOG.debug("LunMaskingSCSIProtocolController for storage system "
"%(storage)s and %(connector)s is %(ctrl)s.",
{'storage': storage_system,
'connector': connector,
'ctrl': ctrl})
# Return 0 if masking view does not exist.
if ctrl is None:
return 0
associators = self.conn.Associators(
ctrl,
ResultClass='EMC_StorageVolume')
numVolumesMapped = len(associators)
LOG.debug("Found %(numVolumesMapped)d volumes on storage system "
"%(storage)s mapped to %(connector)s.",
{'numVolumesMapped': numVolumesMapped,
'storage': storage_system,
'connector': connector})
return numVolumesMapped
def _delete_snapshot(self, snapshot):
"""Helper function to delete the specified snapshot.
:param snapshot: snapshot object to be deleted
:raises: VolumeBackendAPIException
"""
LOG.debug("Entering delete_snapshot.")
snapshotname = snapshot['name']
LOG.info(_LI("Delete Snapshot: %(snapshot)s."),
{'snapshot': snapshotname})
extraSpecs = self._initial_setup(snapshot)
self.conn = self._get_ecom_connection()
if not extraSpecs[ISV3]:
snapshotInstance = self._find_lun(snapshot)
storageSystem = snapshotInstance['SystemName']
# Wait for it to fully sync in case there is an ongoing
# create volume from snapshot request.
syncName = self.utils.find_sync_sv_by_target(
self.conn, storageSystem, snapshotInstance, extraSpecs,
True)
if syncName is None:
LOG.info(_LI(
"Snapshot: %(snapshot)s: not found on the array."),
{'snapshot': snapshotname})
else:
repservice = self.utils.find_replication_service(self.conn,
storageSystem)
if repservice is None:
exception_message = _(
"Cannot find Replication Service to"
" delete snapshot %s.") % snapshotname
raise exception.VolumeBackendAPIException(
data=exception_message)
# Break the replication relationship
LOG.debug("Deleting snap relationship: Target: %(snapshot)s "
"Method: ModifyReplicaSynchronization "
"Replication Service: %(service)s Operation: 8 "
"Synchronization: %(syncName)s.",
{'snapshot': snapshotname,
'service': repservice,
'syncName': syncName})
self.provision.delete_clone_relationship(
self.conn, repservice, syncName, extraSpecs, True)
# Delete the target device.
self._delete_volume(snapshot)
def create_consistencygroup(self, context, group):
"""Creates a consistency group.
:param context: the context
:param group: the group object to be created
:returns: dict -- modelUpdate = {'status': 'available'}
:raises: VolumeBackendAPIException
"""
LOG.info(_LI("Create Consistency Group: %(group)s."),
{'group': group['id']})
modelUpdate = {'status': 'available'}
volumeTypeId = group['volume_type_id'].replace(",", "")
cgName = self.utils.truncate_string(group['id'], 8)
extraSpecs = self._initial_setup(None, volumeTypeId)
_poolInstanceName, storageSystem = (
self._get_pool_and_storage_system(extraSpecs))
self.conn = self._get_ecom_connection()
# Find storage system.
try:
replicationService = self.utils.find_replication_service(
self.conn, storageSystem)
self.provision.create_consistency_group(
self.conn, replicationService, cgName, extraSpecs)
except Exception:
exceptionMessage = (_("Failed to create consistency group:"
" %(cgName)s.")
% {'cgName': cgName})
LOG.exception(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return modelUpdate
def delete_consistencygroup(self, context, group, volumes):
"""Deletes a consistency group.
:param context: the context
:param group: the group object to be deleted
:param volumes: the list of volumes in the consisgroup to be deleted
:returns: dict -- modelUpdate
:returns: list -- list of volume objects
:raises: VolumeBackendAPIException
"""
LOG.info(_LI("Delete Consistency Group: %(group)s."),
{'group': group['id']})
cgName = self.utils.truncate_string(group['id'], 8)
modelUpdate = {}
modelUpdate['status'] = group['status']
volumeTypeId = group['volume_type_id'].replace(",", "")
extraSpecs = self._initial_setup(None, volumeTypeId)
_poolInstanceName, storageSystem = (
self._get_pool_and_storage_system(extraSpecs))
try:
replicationService = self.utils.find_replication_service(
self.conn, storageSystem)
storageConfigservice = (
self.utils.find_storage_configuration_service(
self.conn, storageSystem))
cgInstanceName = self._find_consistency_group(
replicationService, cgName)
if cgInstanceName is None:
exception_message = (_("Cannot find CG group %s.") %
cgName)
raise exception.VolumeBackendAPIException(
data=exception_message)
memberInstanceNames = self._get_members_of_replication_group(
cgInstanceName)
self.provision.delete_consistency_group(self.conn,
replicationService,
cgInstanceName, cgName,
extraSpecs)
# Do a bulk delete, a lot faster than single deletes.
if memberInstanceNames:
volumes, modelUpdate = self._do_bulk_delete(
storageSystem, memberInstanceNames, storageConfigservice,
volumes, modelUpdate, extraSpecs[ISV3], extraSpecs)
except Exception:
exceptionMessage = (_(
"Failed to delete consistency group: %(cgName)s.")
% {'cgName': cgName})
LOG.exception(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return modelUpdate, volumes
def _do_bulk_delete(self, storageSystem, memberInstanceNames,
storageConfigservice, volumes, modelUpdate, isV3,
extraSpecs):
"""Do a bulk delete.
:param storageSystem: storage system name
:param memberInstanceNames: volume Instance names
:param storageConfigservice: storage config service
:param volumes: volume objects
:param modelUpdate: dict
:param isV3: boolean
:param extraSpecs: extra specifications
:returns: list -- list of volume objects
:returns: dict -- modelUpdate
"""
try:
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystem))
for memberInstanceName in memberInstanceNames:
self._remove_device_from_storage_group(
controllerConfigurationService, memberInstanceName,
'Member Volume', extraSpecs)
if isV3:
self.provisionv3.delete_volume_from_pool(
self.conn, storageConfigservice,
memberInstanceNames, None, extraSpecs)
else:
self.provision.delete_volume_from_pool(
self.conn, storageConfigservice,
memberInstanceNames, None, extraSpecs)
for volumeRef in volumes:
volumeRef['status'] = 'deleted'
except Exception:
for volumeRef in volumes:
volumeRef['status'] = 'error_deleting'
modelUpdate['status'] = 'error_deleting'
return volumes, modelUpdate
def create_cgsnapshot(self, context, cgsnapshot, db):
"""Creates a cgsnapshot.
:param context: the context
:param cgsnapshot: the consistency group snapshot to be created
:param db: cinder database
:returns: dict -- modelUpdate
:returns: list -- list of snapshots
:raises: VolumeBackendAPIException
"""
consistencyGroup = db.consistencygroup_get(
context, cgsnapshot['consistencygroup_id'])
LOG.info(_LI(
"Create snapshot for Consistency Group %(cgId)s "
"cgsnapshotID: %(cgsnapshot)s."),
{'cgsnapshot': cgsnapshot['id'],
'cgId': cgsnapshot['consistencygroup_id']})
cgName = self.utils.truncate_string(
cgsnapshot['consistencygroup_id'], 8)
modelUpdate = {'status': 'available'}
volumeTypeId = consistencyGroup['volume_type_id'].replace(",", "")
extraSpecs = self._initial_setup(None, volumeTypeId)
self.conn = self._get_ecom_connection()
_poolInstanceName, storageSystem = (
self._get_pool_and_storage_system(extraSpecs))
try:
replicationService = self.utils.find_replication_service(
self.conn, storageSystem)
cgInstanceName = (
self._find_consistency_group(replicationService, cgName))
if cgInstanceName is None:
exception_message = (_("Cannot find CG group %s.") % cgName)
raise exception.VolumeBackendAPIException(
data=exception_message)
memberInstanceNames = self._get_members_of_replication_group(
cgInstanceName)
# Create the target consistency group.
targetCgName = self.utils.truncate_string(cgsnapshot['id'], 8)
self.provision.create_consistency_group(
self.conn, replicationService, targetCgName, extraSpecs)
targetCgInstanceName = self._find_consistency_group(
replicationService, targetCgName)
LOG.info(_LI("Create target consistency group %(targetCg)s."),
{'targetCg': targetCgInstanceName})
for memberInstanceName in memberInstanceNames:
volInstance = self.conn.GetInstance(
memberInstanceName, LocalOnly=False)
numOfBlocks = volInstance['NumberOfBlocks']
blockSize = volInstance['BlockSize']
volumeSizeInbits = numOfBlocks * blockSize
targetVolumeName = 'targetVol'
volume = {'size': int(self.utils.convert_bits_to_gbs(
volumeSizeInbits))}
if extraSpecs[ISV3]:
_rc, volumeDict, _storageSystemName = (
self._create_v3_volume(
volume, targetVolumeName, volumeSizeInbits,
extraSpecs))
else:
_rc, volumeDict, _storageSystemName = (
self._create_composite_volume(
volume, targetVolumeName, volumeSizeInbits,
extraSpecs))
targetVolumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, targetVolumeName)
LOG.debug("Create target volume for member volume "
"Source volume: %(memberVol)s "
"Target volume %(targetVol)s.",
{'memberVol': memberInstanceName,
'targetVol': targetVolumeInstance.path})
self.provision.add_volume_to_cg(self.conn,
replicationService,
targetCgInstanceName,
targetVolumeInstance.path,
targetCgName,
targetVolumeName,
extraSpecs)
# Less than 5 characters relationship name.
relationName = self.utils.truncate_string(cgsnapshot['id'], 5)
if extraSpecs[ISV3]:
self.provisionv3.create_group_replica(
self.conn, replicationService, cgInstanceName,
targetCgInstanceName, relationName, extraSpecs)
else:
self.provision.create_group_replica(
self.conn, replicationService, cgInstanceName,
targetCgInstanceName, relationName, extraSpecs)
# Break the replica group relationship.
rgSyncInstanceName = self.utils.find_group_sync_rg_by_target(
self.conn, storageSystem, targetCgInstanceName, extraSpecs,
True)
if rgSyncInstanceName is not None:
repservice = self.utils.find_replication_service(
self.conn, storageSystem)
if repservice is None:
exception_message = (_(
"Cannot find Replication service on system %s.") %
storageSystem)
raise exception.VolumeBackendAPIException(
data=exception_message)
if extraSpecs[ISV3]:
# Operation 7: dissolve for snapVx.
operation = self.utils.get_num(9, '16')
self.provisionv3.break_replication_relationship(
self.conn, repservice, rgSyncInstanceName, operation,
extraSpecs)
else:
self.provision.delete_clone_relationship(self.conn, repservice,
rgSyncInstanceName,
extraSpecs)
except Exception:
modelUpdate['status'] = 'error'
self.utils.populate_cgsnapshot_status(
context, db, cgsnapshot['id'], modelUpdate['status'])
exceptionMessage = (_("Failed to create snapshot for cg:"
" %(cgName)s.")
% {'cgName': cgName})
LOG.exception(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
snapshots = self.utils.populate_cgsnapshot_status(
context, db, cgsnapshot['id'], modelUpdate['status'])
return modelUpdate, snapshots
def delete_cgsnapshot(self, context, cgsnapshot, db):
"""Delete a cgsnapshot.
:param context: the context
:param cgsnapshot: the consistency group snapshot to be created
:param db: cinder database
:returns: dict -- modelUpdate
:returns: list -- list of snapshots
:raises: VolumeBackendAPIException
"""
consistencyGroup = db.consistencygroup_get(
context, cgsnapshot['consistencygroup_id'])
snapshots = db.snapshot_get_all_for_cgsnapshot(
context, cgsnapshot['id'])
LOG.info(_LI(
"Delete snapshot for source CG %(cgId)s "
"cgsnapshotID: %(cgsnapshot)s."),
{'cgsnapshot': cgsnapshot['id'],
'cgId': cgsnapshot['consistencygroup_id']})
modelUpdate = {'status': 'deleted'}
volumeTypeId = consistencyGroup['volume_type_id'].replace(",", "")
extraSpecs = self._initial_setup(None, volumeTypeId)
self.conn = self._get_ecom_connection()
_poolInstanceName, storageSystem = (
self._get_pool_and_storage_system(extraSpecs))
try:
targetCgName = self.utils.truncate_string(cgsnapshot['id'], 8)
modelUpdate, snapshots = self._delete_cg_and_members(
storageSystem, targetCgName, modelUpdate,
snapshots, extraSpecs)
except Exception:
modelUpdate['status'] = 'error_deleting'
self.utils.populate_cgsnapshot_status(
context, db, cgsnapshot['id'], modelUpdate['status'])
exceptionMessage = (_("Failed to delete snapshot for cg: "
"%(cgId)s.")
% {'cgId': cgsnapshot['consistencygroup_id']})
LOG.exception(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
snapshots = self.utils.populate_cgsnapshot_status(
context, db, cgsnapshot['id'], modelUpdate['status'])
return modelUpdate, snapshots
def _find_consistency_group(self, replicationService, cgName):
"""Finds a CG given its name.
:param replicationService: the replication service
:param cgName: the consistency group name
:returns: foundCgInstanceName
"""
foundCgInstanceName = None
cgInstanceNames = (
self.conn.AssociatorNames(replicationService,
ResultClass='CIM_ReplicationGroup'))
for cgInstanceName in cgInstanceNames:
instance = self.conn.GetInstance(cgInstanceName, LocalOnly=False)
if cgName == instance['ElementName']:
foundCgInstanceName = cgInstanceName
break
return foundCgInstanceName
def _get_members_of_replication_group(self, cgInstanceName):
"""Get the members of consistency group.
:param cgInstanceName: the CG instance name
:returns: list -- memberInstanceNames
"""
memberInstanceNames = self.conn.AssociatorNames(
cgInstanceName,
AssocClass='CIM_OrderedMemberOfCollection')
return memberInstanceNames
def _create_composite_volume(
self, volume, volumeName, volumeSize, extraSpecs,
memberCount=None):
"""Create a composite volume (V2).
:param volume: the volume object
:param volumeName: the name of the volume
:param volumeSize: the size of the volume
:param extraSpecs: extra specifications
:param memberCount: the number of meta members in a composite volume
:returns: int -- return code
:returns: dict -- volumeDict
:returns: string -- storageSystemName
:raises: VolumeBackendAPIException
"""
if not memberCount:
memberCount, errorDesc = self.utils.determine_member_count(
volume['size'], extraSpecs[MEMBERCOUNT],
extraSpecs[COMPOSITETYPE])
if errorDesc is not None:
exceptionMessage = (_("The striped meta count of "
"%(memberCount)s is too small for "
"volume: %(volumeName)s, "
"with size %(volumeSize)s.")
% {'memberCount': memberCount,
'volumeName': volumeName,
'volumeSize': volume['size']})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
poolInstanceName, storageSystemName = (
self._get_pool_and_storage_system(extraSpecs))
LOG.debug("Create Volume: %(volume)s Pool: %(pool)s "
"Storage System: %(storageSystem)s "
"Size: %(size)lu MemberCount: %(memberCount)s.",
{'volume': volumeName,
'pool': poolInstanceName,
'storageSystem': storageSystemName,
'size': volumeSize,
'memberCount': memberCount})
elementCompositionService = (
self.utils.find_element_composition_service(self.conn,
storageSystemName))
storageConfigService = self.utils.find_storage_configuration_service(
self.conn, storageSystemName)
# If FAST is intended to be used we must first check that the pool
# is associated with the correct storage tier.
if extraSpecs[FASTPOLICY] is not None:
foundPoolInstanceName = self.fast.get_pool_associated_to_policy(
self.conn, extraSpecs[FASTPOLICY], extraSpecs[ARRAY],
storageConfigService, poolInstanceName)
if foundPoolInstanceName is None:
exceptionMessage = (_("Pool: %(poolName)s. "
"is not associated to storage tier for "
"fast policy %(fastPolicy)s.")
% {'poolName': extraSpecs[POOL],
'fastPolicy':
extraSpecs[FASTPOLICY]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
compositeType = self.utils.get_composite_type(
extraSpecs[COMPOSITETYPE])
volumeDict, rc = self.provision.create_composite_volume(
self.conn, elementCompositionService, volumeSize, volumeName,
poolInstanceName, compositeType, memberCount, extraSpecs)
# Now that we have already checked that the pool is associated with
# the correct storage tier and the volume was successfully created
# add the volume to the default storage group created for
# volumes in pools associated with this fast policy.
if extraSpecs[FASTPOLICY]:
LOG.info(_LI(
"Adding volume: %(volumeName)s to default storage group"
" for FAST policy: %(fastPolicyName)s."),
{'volumeName': volumeName,
'fastPolicyName': extraSpecs[FASTPOLICY]})
defaultStorageGroupInstanceName = (
self._get_or_create_default_storage_group(
self.conn, storageSystemName, volumeDict,
volumeName, extraSpecs[FASTPOLICY], extraSpecs))
if not defaultStorageGroupInstanceName:
exceptionMessage = (_(
"Unable to create or get default storage group for "
"FAST policy: %(fastPolicyName)s.")
% {'fastPolicyName': extraSpecs[FASTPOLICY]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
self._add_volume_to_default_storage_group_on_create(
volumeDict, volumeName, storageConfigService,
storageSystemName, extraSpecs[FASTPOLICY], extraSpecs)
return rc, volumeDict, storageSystemName
def _create_v3_volume(
self, volume, volumeName, volumeSize, extraSpecs):
"""Create a volume (V3).
:param volume: the volume object
:param volumeName: the volume name
:param volumeSize: the volume size
:param extraSpecs: extra specifications
:returns: int -- return code
:returns: dict -- volumeDict
:returns: string -- storageSystemName
:raises: VolumeBackendAPIException
"""
isValidSLO, isValidWorkload = self.utils.verify_slo_workload(
extraSpecs[SLO], extraSpecs[WORKLOAD])
if not isValidSLO or not isValidWorkload:
exceptionMessage = (_(
"Either SLO: %(slo)s or workload %(workload)s is invalid. "
"Examine previous error statement for valid values.")
% {'slo': extraSpecs[SLO],
'workload': extraSpecs[WORKLOAD]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
poolInstanceName, storageSystemName = (
self._get_pool_and_storage_system(extraSpecs))
LOG.debug("Create Volume: %(volume)s Pool: %(pool)s "
"Storage System: %(storageSystem)s "
"Size: %(size)lu.",
{'volume': volumeName,
'pool': poolInstanceName,
'storageSystem': storageSystemName,
'size': volumeSize})
storageConfigService = self.utils.find_storage_configuration_service(
self.conn, storageSystemName)
# A volume created without specifying a storage group during
# creation time is allocated from the default SRP pool and
# assigned the optimized SLO.
sgInstanceName = self._get_or_create_storage_group_v3(
extraSpecs[POOL], extraSpecs[SLO],
extraSpecs[WORKLOAD], storageSystemName, extraSpecs)
volumeDict, rc = self.provisionv3.create_volume_from_sg(
self.conn, storageConfigService, volumeName,
sgInstanceName, volumeSize, extraSpecs)
return rc, volumeDict, storageSystemName
def _get_or_create_storage_group_v3(
self, poolName, slo, workload, storageSystemName, extraSpecs):
"""Get or create storage group_v3 (V3).
:param poolName: the SRP pool nsmr
:param slo: the SLO
:param workload: the workload
:param storageSystemName: storage system name
:param extraSpecs: extra specifications
:returns: sgInstanceName
"""
storageGroupName = self.utils.get_v3_storage_group_name(
poolName, slo, workload)
controllerConfigService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
sgInstanceName = self.utils.find_storage_masking_group(
self.conn, controllerConfigService, storageGroupName)
if sgInstanceName is None:
sgInstanceName = self.provisionv3.create_storage_group_v3(
self.conn, controllerConfigService, storageGroupName,
poolName, slo, workload, extraSpecs)
return sgInstanceName
def _extend_composite_volume(self, volumeInstance, volumeName,
newSize, additionalVolumeSize, extraSpecs):
"""Extend a composite volume (V2).
:param volumeInstance: the volume instance
:param volumeName: the name of the volume
:param newSize: in GBs
:param additionalVolumeSize: additional volume size
:param extraSpecs: extra specifications
:returns: int -- return code
:returns: dict -- modifiedVolumeDict
:raises: VolumeBackendAPIException
"""
# Is the volume extendable.
isConcatenated = self.utils.check_if_volume_is_extendable(
self.conn, volumeInstance)
if 'True' not in isConcatenated:
exceptionMessage = (_(
"Volume: %(volumeName)s is not a concatenated volume. "
"You can only perform extend on concatenated volume. "
"Exiting...")
% {'volumeName': volumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
else:
compositeType = self.utils.get_composite_type(CONCATENATED)
LOG.debug("Extend Volume: %(volume)s New size: %(newSize)s GBs.",
{'volume': volumeName,
'newSize': newSize})
deviceId = volumeInstance['DeviceID']
storageSystemName = volumeInstance['SystemName']
LOG.debug(
"Device ID: %(deviceid)s: Storage System: "
"%(storagesystem)s.",
{'deviceid': deviceId,
'storagesystem': storageSystemName})
storageConfigService = self.utils.find_storage_configuration_service(
self.conn, storageSystemName)
elementCompositionService = (
self.utils.find_element_composition_service(
self.conn, storageSystemName))
# Create a volume to the size of the
# newSize - oldSize = additionalVolumeSize.
unboundVolumeInstance = self._create_and_get_unbound_volume(
self.conn, storageConfigService, volumeInstance.path,
additionalVolumeSize, extraSpecs)
if unboundVolumeInstance is None:
exceptionMessage = (_(
"Error Creating unbound volume on an Extend operation."))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
# Add the new unbound volume to the original composite volume.
rc, modifiedVolumeDict = (
self._modify_and_get_composite_volume_instance(
self.conn, elementCompositionService, volumeInstance,
unboundVolumeInstance.path, volumeName, compositeType,
extraSpecs))
if modifiedVolumeDict is None:
exceptionMessage = (_(
"On an Extend Operation, error adding volume to composite "
"volume: %(volumename)s.")
% {'volumename': volumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return rc, modifiedVolumeDict
def _slo_workload_migration(self, volumeInstance, volume, host,
volumeName, volumeStatus, newType,
extraSpecs):
"""Migrate from SLO/Workload combination to another (V3).
:param volumeInstance: the volume instance
:param volume: the volume object
:param host: the host object
:param volumeName: the name of the volume
:param volumeStatus: the volume status
:param newType: the type to migrate to
:param extraSpecs: extra specifications
:returns: boolean -- True if migration succeeded, False if error.
"""
storageGroupName = self.utils.get_v3_storage_group_name(
extraSpecs[POOL], extraSpecs[SLO], extraSpecs[WORKLOAD])
volumeInstanceName = volumeInstance.path
isValid, targetSlo, targetWorkload = (
self._is_valid_for_storage_assisted_migration_v3(
volumeInstanceName, host, extraSpecs[ARRAY],
extraSpecs[POOL], volumeName, volumeStatus,
storageGroupName))
storageSystemName = volumeInstance['SystemName']
if not isValid:
LOG.error(_LE(
"Volume %(name)s is not suitable for storage "
"assisted migration using retype."),
{'name': volumeName})
return False
if volume['host'] != host['host']:
LOG.debug(
"Retype Volume %(name)s from source host %(sourceHost)s "
"to target host %(targetHost)s.",
{'name': volumeName,
'sourceHost': volume['host'],
'targetHost': host['host']})
return self._migrate_volume_v3(
volume, volumeInstance, extraSpecs[POOL], targetSlo,
targetWorkload, storageSystemName, newType, extraSpecs)
return False
def _migrate_volume_v3(
self, volume, volumeInstance, poolName, targetSlo,
targetWorkload, storageSystemName, newType, extraSpecs):
"""Migrate from one slo/workload combination to another (V3).
This requires moving the volume from its current SG to a
new or existing SG that has the target attributes.
:param volume: the volume object
:param volumeInstance: the volume instance
:param poolName: the SRP Pool Name
:param targetSlo: the target SLO
:param targetWorkload: the target workload
:param storageSystemName: the storage system name
:param newType: the type to migrate to
:param extraSpecs: extra specifications
:returns: boolean -- True if migration succeeded, False if error.
"""
volumeName = volume['name']
controllerConfigService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
defaultSgName = self.utils.get_v3_storage_group_name(
extraSpecs[POOL], extraSpecs[SLO], extraSpecs[WORKLOAD])
foundStorageGroupInstanceName = (
self.utils.get_storage_group_from_volume(
self.conn, volumeInstance.path, defaultSgName))
if foundStorageGroupInstanceName is None:
LOG.warning(_LW(
"Volume : %(volumeName)s is not currently "
"belonging to any storage group."),
{'volumeName': volumeName})
else:
self.provision.remove_device_from_storage_group(
self.conn,
controllerConfigService,
foundStorageGroupInstanceName,
volumeInstance.path,
volumeName, extraSpecs)
# Check that it has been removed.
sgFromVolRemovedInstanceName = (
self.utils.wrap_get_storage_group_from_volume(
self.conn, volumeInstance.path, defaultSgName))
if sgFromVolRemovedInstanceName is not None:
LOG.error(_LE(
"Volume : %(volumeName)s has not been "
"removed from source storage group %(storageGroup)s."),
{'volumeName': volumeName,
'storageGroup': sgFromVolRemovedInstanceName})
return False
storageGroupName = self.utils.get_v3_storage_group_name(
poolName, targetSlo, targetWorkload)
targetSgInstanceName = self._get_or_create_storage_group_v3(
poolName, targetSlo, targetWorkload, storageSystemName,
extraSpecs)
if targetSgInstanceName is None:
LOG.error(_LE(
"Failed to get or create storage group %(storageGroupName)s."),
{'storageGroupName': storageGroupName})
return False
self.masking.add_volume_to_storage_group(
self.conn, controllerConfigService, targetSgInstanceName,
volumeInstance, volumeName, storageGroupName, extraSpecs)
# Check that it has been added.
sgFromVolAddedInstanceName = (
self.utils.get_storage_group_from_volume(
self.conn, volumeInstance.path, storageGroupName))
if sgFromVolAddedInstanceName is None:
LOG.error(_LE(
"Volume : %(volumeName)s has not been "
"added to target storage group %(storageGroup)s."),
{'volumeName': volumeName,
'storageGroup': targetSgInstanceName})
return False
return True
def _pool_migration(self, volumeInstance, volume, host,
volumeName, volumeStatus,
fastPolicyName, newType, extraSpecs):
"""Migrate from one pool to another (V2).
:param volumeInstance: the volume instance
:param volume: the volume object
:param host: the host object
:param volumeName: the name of the volume
:param volumeStatus: the volume status
:param fastPolicyName: the FAST policy Name
:param newType: the type to migrate to
:param extraSpecs: extra specifications
:returns: boolean -- True if migration succeeded, False if error.
"""
storageSystemName = volumeInstance['SystemName']
isValid, targetPoolName, targetFastPolicyName = (
self._is_valid_for_storage_assisted_migration(
volumeInstance.path, host, storageSystemName,
volumeName, volumeStatus))
if not isValid:
LOG.error(_LE(
"Volume %(name)s is not suitable for storage "
"assisted migration using retype."),
{'name': volumeName})
return False
if volume['host'] != host['host']:
LOG.debug(
"Retype Volume %(name)s from source host %(sourceHost)s "
"to target host %(targetHost)s.",
{'name': volumeName,
'sourceHost': volume['host'],
'targetHost': host['host']})
return self._migrate_volume(
volume, volumeInstance, targetPoolName, targetFastPolicyName,
fastPolicyName, extraSpecs, newType)
return False
def _update_pool_stats(
self, backendName, arrayInfo):
"""Update pool statistics (V2).
:param backendName: the backend name
:param arrayInfo: the arrayInfo
:returns: location_info, total_capacity_gb, free_capacity_gb
"""
if arrayInfo['FastPolicy']:
LOG.debug(
"Fast policy %(fastPolicyName)s is enabled on %(arrayName)s.",
{'fastPolicyName': arrayInfo['FastPolicy'],
'arrayName': arrayInfo['SerialNumber']})
else:
LOG.debug(
"No Fast policy for Array:%(arrayName)s "
"backend:%(backendName)s.",
{'arrayName': arrayInfo['SerialNumber'],
'backendName': backendName})
storageSystemInstanceName = self.utils.find_storageSystem(
self.conn, arrayInfo['SerialNumber'])
isTieringPolicySupported = (
self.fast.is_tiering_policy_enabled_on_storage_system(
self.conn, storageSystemInstanceName))
if (arrayInfo['FastPolicy'] is not None and
isTieringPolicySupported is True): # FAST enabled
total_capacity_gb, free_capacity_gb = (
self.fast.get_capacities_associated_to_policy(
self.conn, arrayInfo['SerialNumber'],
arrayInfo['FastPolicy']))
LOG.info(_LI(
"FAST: capacity stats for policy %(fastPolicyName)s on array "
"%(arrayName)s. total_capacity_gb=%(total_capacity_gb)lu, "
"free_capacity_gb=%(free_capacity_gb)lu."),
{'fastPolicyName': arrayInfo['FastPolicy'],
'arrayName': arrayInfo['SerialNumber'],
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb})
else: # NON-FAST
total_capacity_gb, free_capacity_gb = (
self.utils.get_pool_capacities(self.conn,
arrayInfo['PoolName'],
arrayInfo['SerialNumber']))
LOG.info(_LI(
"NON-FAST: capacity stats for pool %(poolName)s on array "
"%(arrayName)s total_capacity_gb=%(total_capacity_gb)lu, "
"free_capacity_gb=%(free_capacity_gb)lu."),
{'poolName': arrayInfo['PoolName'],
'arrayName': arrayInfo['SerialNumber'],
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb})
location_info = ("%(arrayName)s#%(poolName)s#%(policyName)s"
% {'arrayName': arrayInfo['SerialNumber'],
'poolName': arrayInfo['PoolName'],
'policyName': arrayInfo['FastPolicy']})
return location_info, total_capacity_gb, free_capacity_gb
def _set_v2_extra_specs(self, extraSpecs, poolRecord):
"""Set the VMAX V2 extra specs.
:param extraSpecs: extra specifications
:param poolRecord: pool record
:returns: dict -- the extraSpecs
:raises: VolumeBackendAPIException
"""
try:
stripedMetaCount = extraSpecs[STRIPECOUNT]
extraSpecs[MEMBERCOUNT] = stripedMetaCount
extraSpecs[COMPOSITETYPE] = STRIPED
LOG.debug(
"There are: %(stripedMetaCount)s striped metas in "
"the extra specs.",
{'stripedMetaCount': stripedMetaCount})
except KeyError:
memberCount = '1'
extraSpecs[MEMBERCOUNT] = memberCount
extraSpecs[COMPOSITETYPE] = CONCATENATED
LOG.debug("StripedMetaCount is not in the extra specs.")
# Get the FAST policy from the file. This value can be None if the
# user doesn't want to associate with any FAST policy.
if poolRecord['FastPolicy']:
LOG.debug("The fast policy name is: %(fastPolicyName)s.",
{'fastPolicyName': poolRecord['FastPolicy']})
extraSpecs[FASTPOLICY] = poolRecord['FastPolicy']
extraSpecs[ISV3] = False
extraSpecs = self._set_common_extraSpecs(extraSpecs, poolRecord)
LOG.debug("Pool is: %(pool)s "
"Array is: %(array)s "
"FastPolicy is: %(fastPolicy)s "
"CompositeType is: %(compositeType)s "
"MemberCount is: %(memberCount)s.",
{'pool': extraSpecs[POOL],
'array': extraSpecs[ARRAY],
'fastPolicy': extraSpecs[FASTPOLICY],
'compositeType': extraSpecs[COMPOSITETYPE],
'memberCount': extraSpecs[MEMBERCOUNT]})
return extraSpecs
def _set_v3_extra_specs(self, extraSpecs, poolRecord):
"""Set the VMAX V3 extra specs.
If SLO or workload are not specified then the default
values are NONE and the Optimized SLO will be assigned to the
volume.
:param extraSpecs: extra specifications
:param poolRecord: pool record
:returns: dict -- the extra specifications dictionary
"""
extraSpecs[SLO] = poolRecord['SLO']
extraSpecs[WORKLOAD] = poolRecord['Workload']
extraSpecs[ISV3] = True
extraSpecs = self._set_common_extraSpecs(extraSpecs, poolRecord)
LOG.debug("Pool is: %(pool)s "
"Array is: %(array)s "
"SLO is: %(slo)s "
"Workload is: %(workload)s.",
{'pool': extraSpecs[POOL],
'array': extraSpecs[ARRAY],
'slo': extraSpecs[SLO],
'workload': extraSpecs[WORKLOAD]})
return extraSpecs
def _set_common_extraSpecs(self, extraSpecs, poolRecord):
"""Set common extra specs.
The extraSpecs are common to v2 and v3
:param extraSpecs: extra specifications
:param poolRecord: pool record
:returns: dict -- the extra specifications dictionary
"""
extraSpecs[POOL] = poolRecord['PoolName']
extraSpecs[ARRAY] = poolRecord['SerialNumber']
extraSpecs[PORTGROUPNAME] = poolRecord['PortGroup']
if 'Interval' in poolRecord and poolRecord['Interval']:
extraSpecs[INTERVAL] = poolRecord['Interval']
LOG.debug("The user defined interval is : %(intervalInSecs)s.",
{'intervalInSecs': poolRecord['Interval']})
else:
LOG.debug("Interval not overridden, default of 10 assumed.")
if 'Retries' in poolRecord and poolRecord['Retries']:
extraSpecs[RETRIES] = poolRecord['Retries']
LOG.debug("The user defined retries is : %(retries)s.",
{'retries': poolRecord['Retries']})
else:
LOG.debug("Retries not overridden, default of 60 assumed.")
return extraSpecs
def _delete_from_pool(self, storageConfigService, volumeInstance,
volumeName, deviceId, fastPolicyName, extraSpecs):
"""Delete from pool (v2).
:param storageConfigService: the storage config service
:param volumeInstance: the volume instance
:param volumeName: the volume Name
:param deviceId: the device ID of the volume
:param fastPolicyName: the FAST policy name(if it exists)
:param extraSpecs: extra specifications
:returns: int -- return code
:raises: VolumeBackendAPIException
"""
storageSystemName = volumeInstance['SystemName']
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
if fastPolicyName is not None:
defaultStorageGroupInstanceName = (
self.masking.remove_device_from_default_storage_group(
self.conn, controllerConfigurationService,
volumeInstance.path, volumeName, fastPolicyName,
extraSpecs))
if defaultStorageGroupInstanceName is None:
LOG.warning(_LW(
"The volume: %(volumename)s. was not first part of the "
"default storage group for FAST policy %(fastPolicyName)s"
"."),
{'volumename': volumeName,
'fastPolicyName': fastPolicyName})
# Check if it is part of another storage group.
self._remove_device_from_storage_group(
controllerConfigurationService,
volumeInstance.path, volumeName, extraSpecs)
else:
# Check if volume is part of a storage group.
self._remove_device_from_storage_group(
controllerConfigurationService,
volumeInstance.path, volumeName, extraSpecs)
LOG.debug("Delete Volume: %(name)s Method: EMCReturnToStoragePool "
"ConfigService: %(service)s TheElement: %(vol_instance)s "
"DeviceId: %(deviceId)s.",
{'service': storageConfigService,
'name': volumeName,
'vol_instance': volumeInstance.path,
'deviceId': deviceId})
try:
rc = self.provision.delete_volume_from_pool(
self.conn, storageConfigService, volumeInstance.path,
volumeName, extraSpecs)
except Exception:
# If we cannot successfully delete the volume then we want to
# return the volume to the default storage group.
if (fastPolicyName is not None and
defaultStorageGroupInstanceName is not None and
storageSystemName is not None):
assocDefaultStorageGroupName = (
self.fast
.add_volume_to_default_storage_group_for_fast_policy(
self.conn, controllerConfigurationService,
volumeInstance, volumeName, fastPolicyName,
extraSpecs))
if assocDefaultStorageGroupName is None:
LOG.error(_LE(
"Failed to Roll back to re-add volume %(volumeName)s "
"to default storage group for fast policy "
"%(fastPolicyName)s. Please contact your sysadmin to "
"get the volume returned to the default "
"storage group."),
{'volumeName': volumeName,
'fastPolicyName': fastPolicyName})
errorMessage = (_("Failed to delete volume %(volumeName)s.") %
{'volumeName': volumeName})
LOG.exception(errorMessage)
raise exception.VolumeBackendAPIException(data=errorMessage)
return rc
def _delete_from_pool_v3(self, storageConfigService, volumeInstance,
volumeName, deviceId, storageGroupName,
extraSpecs):
"""Delete from pool (v3).
:param storageConfigService: the storage config service
:param volumeInstance: the volume instance
:param volumeName: the volume Name
:param deviceId: the device ID of the volume
:param storageGroupName: the name of the default SG
:param extraSpecs: extra specifications
:returns: int -- return code
:raises: VolumeBackendAPIException
"""
storageSystemName = volumeInstance['SystemName']
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
# Check if it is part of a storage group and delete it
# extra logic for case when volume is the last member.
sgFromVolInstanceName = self.masking.remove_and_reset_members(
self.conn, controllerConfigurationService, volumeInstance,
volumeName, extraSpecs, None, 'noReset')
LOG.debug("Delete Volume: %(name)s Method: EMCReturnToStoragePool "
"ConfigServic: %(service)s TheElement: %(vol_instance)s "
"DeviceId: %(deviceId)s.",
{'service': storageConfigService,
'name': volumeName,
'vol_instance': volumeInstance.path,
'deviceId': deviceId})
try:
rc = self.provisionv3.delete_volume_from_pool(
self.conn, storageConfigService, volumeInstance.path,
volumeName, extraSpecs)
except Exception:
# If we cannot successfully delete the volume, then we want to
# return the volume to the default storage group,
# which should be the SG it previously belonged to.
storageGroupInstanceName = self.utils.find_storage_masking_group(
self.conn, controllerConfigurationService, storageGroupName)
if sgFromVolInstanceName is not storageGroupInstanceName:
LOG.debug(
"Volume: %(volumeName)s was not previously part of "
" %(storageGroupInstanceName)s. "
"Returning to %(storageGroupName)s.",
{'volumeName': volumeName,
'storageGroupInstanceName': storageGroupInstanceName,
'storageGroupName': storageGroupName})
if storageGroupInstanceName is not None:
self.masking.add_volume_to_storage_group(
self.conn, controllerConfigurationService,
storageGroupInstanceName, volumeInstance, volumeName,
storageGroupName, extraSpecs)
errorMessage = (_("Failed to delete volume %(volumeName)s.") %
{'volumeName': volumeName})
LOG.exception(errorMessage)
raise exception.VolumeBackendAPIException(data=errorMessage)
return rc
def _create_clone_v2(self, repServiceInstanceName, cloneVolume,
sourceVolume, sourceInstance, isSnapshot,
extraSpecs):
"""Create a clone (v2).
:param repServiceInstanceName: the replication service
:param cloneVolume: the clone volume object
:param sourceVolume: the source volume object
:param sourceInstance: the device ID of the volume
:param isSnapshot: check to see if it is a snapshot
:param extraSpecs: extra specifications
:returns: int -- return code
:raises: VolumeBackendAPIException
"""
# Check if the source volume contains any meta devices.
metaHeadInstanceName = self.utils.get_volume_meta_head(
self.conn, sourceInstance.path)
if metaHeadInstanceName is None: # Simple volume.
return self._create_v2_replica_and_delete_clone_relationship(
repServiceInstanceName, cloneVolume, sourceVolume,
sourceInstance, None, extraSpecs, isSnapshot)
else: # Composite volume with meta device members.
# Check if the meta members capacity.
metaMemberInstanceNames = (
self.utils.get_composite_elements(
self.conn, sourceInstance))
volumeCapacities = self.utils.get_meta_members_capacity_in_byte(
self.conn, metaMemberInstanceNames)
LOG.debug("Volume capacities: %(metasizes)s.",
{'metasizes': volumeCapacities})
if len(set(volumeCapacities)) == 1:
LOG.debug("Meta volume all of the same size.")
return self._create_v2_replica_and_delete_clone_relationship(
repServiceInstanceName, cloneVolume, sourceVolume,
sourceInstance, None, extraSpecs, isSnapshot)
LOG.debug("Meta volumes are of different sizes, "
"%d different sizes.", len(set(volumeCapacities)))
baseTargetVolumeInstance = None
for volumeSizeInbits in volumeCapacities:
if baseTargetVolumeInstance is None: # Create base volume.
baseVolumeName = "TargetBaseVol"
volume = {'size': int(self.utils.convert_bits_to_gbs(
volumeSizeInbits))}
_rc, baseVolumeDict, storageSystemName = (
self._create_composite_volume(
volume, baseVolumeName, volumeSizeInbits,
extraSpecs, 1))
baseTargetVolumeInstance = self.utils.find_volume_instance(
self.conn, baseVolumeDict, baseVolumeName)
LOG.debug("Base target volume %(targetVol)s created. "
"capacity in bits: %(capInBits)lu.",
{'capInBits': volumeSizeInbits,
'targetVol': baseTargetVolumeInstance.path})
else: # Create append volume
targetVolumeName = "MetaVol"
volume = {'size': int(self.utils.convert_bits_to_gbs(
volumeSizeInbits))}
storageConfigService = (
self.utils.find_storage_configuration_service(
self.conn, storageSystemName))
unboundVolumeInstance = (
self._create_and_get_unbound_volume(
self.conn, storageConfigService,
baseTargetVolumeInstance.path, volumeSizeInbits,
extraSpecs))
if unboundVolumeInstance is None:
exceptionMessage = (_(
"Error Creating unbound volume."))
LOG.error(exceptionMessage)
# Remove target volume
self._delete_target_volume_v2(storageConfigService,
baseTargetVolumeInstance,
extraSpecs)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
# Append the new unbound volume to the
# base target composite volume.
baseTargetVolumeInstance = self.utils.find_volume_instance(
self.conn, baseVolumeDict, baseVolumeName)
try:
elementCompositionService = (
self.utils.find_element_composition_service(
self.conn, storageSystemName))
compositeType = self.utils.get_composite_type(
extraSpecs[COMPOSITETYPE])
_rc, modifiedVolumeDict = (
self._modify_and_get_composite_volume_instance(
self.conn,
elementCompositionService,
baseTargetVolumeInstance,
unboundVolumeInstance.path,
targetVolumeName,
compositeType,
extraSpecs))
if modifiedVolumeDict is None:
exceptionMessage = (_(
"Error appending volume %(volumename)s to "
"target base volume.")
% {'volumename': targetVolumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
except Exception:
exceptionMessage = (_(
"Exception appending meta volume to target volume "
"%(volumename)s.")
% {'volumename': baseVolumeName})
LOG.error(exceptionMessage)
# Remove append volume and target base volume
self._delete_target_volume_v2(
storageConfigService, unboundVolumeInstance,
extraSpecs)
self._delete_target_volume_v2(
storageConfigService, baseTargetVolumeInstance,
extraSpecs)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
LOG.debug("Create V2 replica for meta members of different sizes.")
return self._create_v2_replica_and_delete_clone_relationship(
repServiceInstanceName, cloneVolume, sourceVolume,
sourceInstance, baseTargetVolumeInstance, extraSpecs,
isSnapshot)
def _create_v2_replica_and_delete_clone_relationship(
self, repServiceInstanceName, cloneVolume, sourceVolume,
sourceInstance, targetInstance, extraSpecs, isSnapshot=False):
"""Create a replica and delete the clone relationship.
:param repServiceInstanceName: the replication service
:param cloneVolume: the clone volume object
:param sourceVolume: the source volume object
:param sourceInstance: the source volume instance
:param targetInstance: the target volume instance
:param extraSpecs: extra specifications
:param isSnapshot: check to see if it is a snapshot
:returns: int -- return code
:returns: dict -- cloneDict
"""
sourceName = sourceVolume['name']
cloneName = cloneVolume['name']
try:
rc, job = self.provision.create_element_replica(
self.conn, repServiceInstanceName, cloneName, sourceName,
sourceInstance, targetInstance, extraSpecs)
except Exception:
exceptionMessage = (_(
"Exception during create element replica. "
"Clone name: %(cloneName)s "
"Source name: %(sourceName)s "
"Extra specs: %(extraSpecs)s ")
% {'cloneName': cloneName,
'sourceName': sourceName,
'extraSpecs': extraSpecs})
LOG.error(exceptionMessage)
if targetInstance is not None:
# Check if the copy session exists.
storageSystem = targetInstance['SystemName']
syncInstanceName = self.utils.find_sync_sv_by_target(
self.conn, storageSystem, targetInstance, False)
if syncInstanceName is not None:
# Remove the Clone relationship.
rc, job = self.provision.delete_clone_relationship(
self.conn, repServiceInstanceName, syncInstanceName,
extraSpecs, True)
storageConfigService = (
self.utils.find_storage_configuration_service(
self.conn, storageSystem))
self._delete_target_volume_v2(
storageConfigService, targetInstance, extraSpecs)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
cloneDict = self.provision.get_volume_dict_from_job(
self.conn, job['Job'])
fastPolicyName = extraSpecs[FASTPOLICY]
if isSnapshot:
if fastPolicyName is not None:
storageSystemName = sourceInstance['SystemName']
self._add_clone_to_default_storage_group(
fastPolicyName, storageSystemName, cloneDict, cloneName,
extraSpecs)
LOG.info(_LI("Snapshot creation %(cloneName)s completed. "
"Source Volume: %(sourceName)s."),
{'cloneName': cloneName,
'sourceName': sourceName})
return rc, cloneDict
cloneVolume['provider_location'] = six.text_type(cloneDict)
syncInstanceName, storageSystemName = (
self._find_storage_sync_sv_sv(cloneVolume, sourceVolume,
extraSpecs))
# Remove the Clone relationship so it can be used as a regular lun.
# 8 - Detach operation.
rc, job = self.provision.delete_clone_relationship(
self.conn, repServiceInstanceName, syncInstanceName,
extraSpecs)
if fastPolicyName is not None:
self._add_clone_to_default_storage_group(
fastPolicyName, storageSystemName, cloneDict, cloneName,
extraSpecs)
return rc, cloneDict
def get_target_wwns_from_masking_view(
self, storageSystem, volume, connector):
"""Find target WWNs via the masking view.
:param storageSystem: the storage system name
:param volume: volume to be attached
:param connector: the connector dict
:returns: list -- the target WWN list
"""
targetWwns = []
mvInstanceName = self.get_masking_view_by_volume(volume, connector)
if mvInstanceName is not None:
targetWwns = self.masking.get_target_wwns(
self.conn, mvInstanceName)
LOG.info(_LI("Target wwns in masking view %(maskingView)s: "
"%(targetWwns)s."),
{'maskingView': mvInstanceName,
'targetWwns': six.text_type(targetWwns)})
return targetWwns
def get_port_group_from_masking_view(self, maskingViewInstanceName):
"""Get the port groups in a masking view.
:param maskingViewInstanceName: masking view instance name
:returns: portGroupInstanceName
"""
return self.masking.get_port_group_from_masking_view(
self.conn, maskingViewInstanceName)
def get_masking_view_by_volume(self, volume, connector):
"""Given volume, retrieve the masking view instance name.
:param volume: the volume
:param connector: the connector object
:returns: maskingviewInstanceName
"""
LOG.debug("Finding Masking View for volume %(volume)s.",
{'volume': volume})
volumeInstance = self._find_lun(volume)
return self.masking.get_masking_view_by_volume(
self.conn, volumeInstance, connector)
def get_masking_views_by_port_group(self, portGroupInstanceName):
"""Given port group, retrieve the masking view instance name.
:param portGroupInstanceName: port group instance name
:returns: list -- maskingViewInstanceNames
"""
LOG.debug("Finding Masking Views for port group %(pg)s.",
{'pg': portGroupInstanceName})
return self.masking.get_masking_views_by_port_group(
self.conn, portGroupInstanceName)
def _create_replica_v3(
self, repServiceInstanceName, cloneVolume,
sourceVolume, sourceInstance, isSnapshot, extraSpecs):
"""Create a replica.
V3 specific function, create replica for source volume,
including clone and snapshot.
:param repServiceInstanceName: the replication service
:param cloneVolume: the clone volume object
:param sourceVolume: the source volume object
:param sourceInstance: the device ID of the volume
:param isSnapshot: boolean -- check to see if it is a snapshot
:param extraSpecs: extra specifications
:returns: int -- return code
:returns: dict -- cloneDict
"""
cloneName = cloneVolume['name']
# Default syncType 8: clone.
syncType = self.utils.get_num(8, '16')
# Default operation 8: Detach for clone.
operation = self.utils.get_num(8, '16')
numOfBlocks = sourceInstance['NumberOfBlocks']
blockSize = sourceInstance['BlockSize']
volumeSizeInbits = numOfBlocks * blockSize
volume = {'size':
int(self.utils.convert_bits_to_gbs(volumeSizeInbits))}
_rc, volumeDict, _storageSystemName = (
self._create_v3_volume(
volume, cloneName, volumeSizeInbits, extraSpecs))
targetInstance = self.utils.find_volume_instance(
self.conn, volumeDict, cloneName)
LOG.debug("Create replica target volume "
"source volume: %(sourceVol)s, "
"target volume: %(targetVol)s.",
{'sourceVol': sourceInstance.path,
'targetVol': targetInstance.path})
if isSnapshot:
# SyncType 7: snap, VG3R default snapshot is snapVx.
syncType = self.utils.get_num(7, '16')
# Operation 9: Dissolve for snapVx.
operation = self.utils.get_num(9, '16')
try:
_rc, job = (
self.provisionv3.create_element_replica(
self.conn, repServiceInstanceName, cloneName, syncType,
sourceInstance, extraSpecs, targetInstance))
except Exception:
LOG.warning(_LW(
"Clone failed on V3. Cleaning up the target volume. "
"Clone name: %(cloneName)s "),
{'cloneName': cloneName})
# Check if the copy session exists.
storageSystem = targetInstance['SystemName']
syncInstanceName = self.utils.find_sync_sv_by_target(
self.conn, storageSystem, targetInstance, False)
if syncInstanceName is not None:
# Break the clone relationship.
rc, job = self.provisionv3.break_replication_relationship(
self.conn, repServiceInstanceName, syncInstanceName,
operation, extraSpecs, True)
storageConfigService = (
self.utils.find_storage_configuration_service(
self.conn, storageSystem))
deviceId = targetInstance['DeviceID']
volumeName = targetInstance['Name']
storageGroupName = self.utils.get_v3_storage_group_name(
extraSpecs[POOL], extraSpecs[SLO],
extraSpecs[WORKLOAD])
rc = self._delete_from_pool_v3(
storageConfigService, targetInstance, volumeName,
deviceId, storageGroupName, extraSpecs)
# Re-throw the exception.
raise
cloneDict = self.provisionv3.get_volume_dict_from_job(
self.conn, job['Job'])
cloneVolume['provider_location'] = six.text_type(cloneDict)
syncInstanceName, _storageSystem = (
self._find_storage_sync_sv_sv(cloneVolume, sourceVolume,
extraSpecs, True))
# Detach/dissolve the clone/snap relationship.
# 8 - Detach operation.
# 9 - Dissolve operation.
if isSnapshot:
# Operation 9: dissolve for snapVx.
operation = self.utils.get_num(9, '16')
else:
# Operation 8: detach for clone.
operation = self.utils.get_num(8, '16')
rc, job = self.provisionv3.break_replication_relationship(
self.conn, repServiceInstanceName, syncInstanceName,
operation, extraSpecs)
return rc, cloneDict
def _delete_cg_and_members(
self, storageSystem, cgName, modelUpdate, volumes, extraSpecs):
"""Helper function to delete a consistencygroup and its member volumes.
:param storageSystem: storage system
:param cgName: consistency group name
:param modelUpdate: dict -- the model update dict
:param volumes: the list of member volumes
:param extraSpecs: extra specifications
:returns: dict -- modelUpdate
:returns: list -- the updated list of member volumes
:raises: VolumeBackendAPIException
"""
replicationService = self.utils.find_replication_service(
self.conn, storageSystem)
storageConfigservice = (
self.utils.find_storage_configuration_service(
self.conn, storageSystem))
cgInstanceName = self._find_consistency_group(
replicationService, cgName)
if cgInstanceName is None:
exception_message = (_("Cannot find CG group %s.") % cgName)
raise exception.VolumeBackendAPIException(
data=exception_message)
memberInstanceNames = self._get_members_of_replication_group(
cgInstanceName)
self.provision.delete_consistency_group(
self.conn, replicationService, cgInstanceName, cgName,
extraSpecs)
if memberInstanceNames:
try:
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystem))
for memberInstanceName in memberInstanceNames:
self._remove_device_from_storage_group(
controllerConfigurationService,
memberInstanceName, 'Member Volume', extraSpecs)
LOG.debug("Deleting CG members. CG: %(cg)s "
"%(numVols)lu member volumes: %(memVols)s.",
{'cg': cgInstanceName,
'numVols': len(memberInstanceNames),
'memVols': memberInstanceNames})
if extraSpecs[ISV3]:
self.provisionv3.delete_volume_from_pool(
self.conn, storageConfigservice,
memberInstanceNames, None, extraSpecs)
else:
self.provision.delete_volume_from_pool(
self.conn, storageConfigservice,
memberInstanceNames, None, extraSpecs)
for volumeRef in volumes:
volumeRef['status'] = 'deleted'
except Exception:
for volumeRef in volumes:
volumeRef['status'] = 'error_deleting'
modelUpdate['status'] = 'error_deleting'
return modelUpdate, volumes
def _delete_target_volume_v2(
self, storageConfigService, targetVolumeInstance, extraSpecs):
"""Helper function to delete the clone target volume instance.
:param storageConfigService: storage configuration service instance
:param targetVolumeInstance: clone target volume instance
:param extraSpecs: extra specifications
"""
deviceId = targetVolumeInstance['DeviceID']
volumeName = targetVolumeInstance['Name']
rc = self._delete_from_pool(storageConfigService,
targetVolumeInstance,
volumeName, deviceId,
extraSpecs[FASTPOLICY],
extraSpecs)
return rc
def _validate_pool(self, volume):
"""Get the pool from volume['host'].
There may be backward compatibiliy concerns, so putting in a
check to see if a version has been added to provider_location.
If it has, we know we are at the current version, if not, we
assume it was created pre 'Pool Aware Scheduler' feature.
:param volume: the volume Object
:returns: string -- pool
:raises: VolumeBackendAPIException
"""
pool = None
# Volume is None in CG ops.
if volume is None:
return pool
# This check is for all operations except a create.
# On a create provider_location is None
try:
if volume['provider_location']:
version = self._get_version_from_provider_location(
volume['provider_location'])
if not version:
return pool
except KeyError:
return pool
try:
pool = volume_utils.extract_host(volume['host'], 'pool')
if pool:
LOG.debug("Pool from volume['host'] is %(pool)s.",
{'pool': pool})
else:
exceptionMessage = (_(
"Pool from volume['host'] %(host)s not found.")
% {'host': volume['host']})
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
except Exception as ex:
exceptionMessage = (_(
"Pool from volume['host'] failed with: %(ex)s.")
% {'ex': ex})
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
return pool
def _get_version_from_provider_location(self, loc):
"""Get the version from the provider location.
:param loc: the provider_location dict
:returns: version or None
"""
version = None
try:
if isinstance(loc, six.string_types):
name = eval(loc)
version = name['version']
except KeyError:
pass
return version
def manage_existing(self, volume, external_ref):
"""Manages an existing VMAX Volume (import to Cinder).
Renames the existing volume to match the expected name for the volume.
Also need to consider things like QoS, Emulation, account/tenant.
:param volume: the volume object including the volume_type_id
:param external_ref: reference to the existing volume
:returns: dict -- model_update
:raises: VolumeBackendAPIException
"""
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
arrayName, deviceId = self.utils.get_array_and_device_id(volume,
external_ref)
# Manage existing volume is not supported if fast enabled.
if extraSpecs[FASTPOLICY]:
LOG.warning(_LW(
"FAST is enabled. Policy: %(fastPolicyName)s."),
{'fastPolicyName': extraSpecs[FASTPOLICY]})
exceptionMessage = (_(
"Manage volume is not supported if FAST is enable. "
"FAST policy: %(fastPolicyName)s.")
% {'fastPolicyName': extraSpecs[FASTPOLICY]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
# Check if the volume is attached by checking if in any masking view.
volumeInstanceName = (
self.utils.find_volume_by_device_id_on_array(self.conn,
arrayName, deviceId))
sgInstanceNames = (
self.utils.get_storage_groups_from_volume(
self.conn, volumeInstanceName))
for sgInstanceName in sgInstanceNames:
mvInstanceName = self.masking.get_masking_view_from_storage_group(
self.conn, sgInstanceName)
if mvInstanceName:
exceptionMessage = (_(
"Unable to import volume %(deviceId)s to cinder. "
"Volume is in masking view %(mv)s.")
% {'deviceId': deviceId,
'mv': mvInstanceName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
# Check if there is any associated snapshots with the volume.
cinderPoolInstanceName, storageSystemName = (
self._get_pool_and_storage_system(extraSpecs))
repSessionInstanceName = (
self.utils.get_associated_replication_from_source_volume(
self.conn, storageSystemName, deviceId))
if repSessionInstanceName:
exceptionMessage = (_(
"Unable to import volume %(deviceId)s to cinder. "
"It is the source volume of replication session %(sync)s.")
% {'deviceId': deviceId,
'sync': repSessionInstanceName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
# Make sure the existing external volume is in the same storage pool.
volumePoolInstanceName = (
self.utils.get_assoc_pool_from_volume(self.conn,
volumeInstanceName))
volumePoolName = volumePoolInstanceName['InstanceID']
cinderPoolName = cinderPoolInstanceName['InstanceID']
LOG.debug("Storage pool of existing volume: %(volPool)s, "
"Storage pool currently managed by cinder: %(cinderPool)s.",
{'volPool': volumePoolName,
'cinderPool': cinderPoolName})
if volumePoolName != cinderPoolName:
exceptionMessage = (_(
"Unable to import volume %(deviceId)s to cinder. The external "
"volume is not in the pool managed by current cinder host.")
% {'deviceId': deviceId})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
# Rename the volume
volumeId = volume['name']
volumeElementName = self.utils.get_volume_element_name(volumeId)
LOG.debug("Rename volume %(vol)s to %(elementName)s.",
{'vol': volumeInstanceName,
'elementName': volumeElementName})
volumeInstance = self.utils.rename_volume(self.conn,
volumeInstanceName,
volumeElementName)
keys = {}
volpath = volumeInstance.path
keys['CreationClassName'] = volpath['CreationClassName']
keys['SystemName'] = volpath['SystemName']
keys['DeviceID'] = volpath['DeviceID']
keys['SystemCreationClassName'] = volpath['SystemCreationClassName']
model_update = {}
provider_location = {}
provider_location['classname'] = volpath['CreationClassName']
provider_location['keybindings'] = keys
model_update.update({'display_name': volumeElementName})
volume['provider_location'] = six.text_type(provider_location)
model_update.update({'provider_location': volume['provider_location']})
return model_update
def manage_existing_get_size(self, volume, external_ref):
"""Return size of an existing VMAX volume to manage_existing.
:param self: reference to class
:param volume: the volume object including the volume_type_id
:param external_ref: reference to the existing volume
:returns: size of the volume in GB
"""
LOG.debug("Volume in manage_existing_get_size: %(volume)s.",
{'volume': volume})
arrayName, deviceId = self.utils.get_array_and_device_id(volume,
external_ref)
volumeInstanceName = (
self.utils.find_volume_by_device_id_on_array(self.conn,
arrayName, deviceId))
volumeInstance = self.conn.GetInstance(volumeInstanceName)
byteSize = self.utils.get_volume_size(self.conn, volumeInstance)
gbSize = int(byteSize) / units.Gi
LOG.debug(
"Size of volume %(deviceID)s is %(volumeSize)s GB.",
{'deviceID': deviceId,
'volumeSize': gbSize})
return gbSize
def unmanage(self, volume):
"""Export VMAX volume from Cinder.
Leave the volume intact on the backend array.
:param volume: the volume object
:raises: VolumeBackendAPIException
"""
volumeName = volume['name']
volumeId = volume['id']
LOG.debug("Unmanage volume %(name)s, id=%(id)s",
{'name': volumeName,
'id': volumeId})
self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
exceptionMessage = (_("Cannot find Volume: %(id)s. "
"unmanage operation. Exiting...")
% {'id': volumeId})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
# Rename the volume to volumeId, thus remove the 'OS-' prefix.
volumeInstance = self.utils.rename_volume(self.conn,
volumeInstance,
volumeId)
def update_consistencygroup(self, group, add_volumes,
remove_volumes):
"""Updates LUNs in consistency group.
:param group: storage configuration service instance
:param add_volumes: the volumes uuids you want to add to the CG
:param remove_volumes: the volumes uuids you want to remove from
the CG
"""
LOG.info(_LI("Update Consistency Group: %(group)s. "
"This adds and/or removes volumes from a CG."),
{'group': group['id']})
modelUpdate = {'status': 'available'}
volumeTypeId = group['volume_type_id'].replace(",", "")
cg_name = self.utils.truncate_string(group['id'], 8)
extraSpecs = self._initial_setup(None, volumeTypeId)
_poolInstanceName, storageSystem = (
self._get_pool_and_storage_system(extraSpecs))
add_vols = [vol for vol in add_volumes] if add_volumes else []
add_instance_names = self._get_volume_instance_names(add_vols)
remove_vols = [vol for vol in remove_volumes] if remove_volumes else []
remove_instance_names = self._get_volume_instance_names(remove_vols)
self.conn = self._get_ecom_connection()
try:
replicationService = self.utils.find_replication_service(
self.conn, storageSystem)
cgInstanceName = (
self._find_consistency_group(replicationService, cg_name))
if cgInstanceName is None:
raise exception.ConsistencyGroupNotFound(
consistencygroup_id=cg_name)
# Add volume(s) to a consistency group
if add_instance_names:
self.provision.add_volume_to_cg(
self.conn, replicationService, cgInstanceName,
add_instance_names, cg_name, None,
extraSpecs)
# Remove volume(s) from a consistency group
if remove_instance_names:
self.provision.remove_volume_from_cg(
self.conn, replicationService, cgInstanceName,
remove_instance_names, cg_name, None,
extraSpecs)
except exception.ConsistencyGroupNotFound:
raise
except Exception as ex:
LOG.error(_LE("Exception: %(ex)s"), {'ex': ex})
exceptionMessage = (_("Failed to update consistency group:"
" %(cgName)s.")
% {'cgName': cg_name})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return modelUpdate, None, None
def _get_volume_instance_names(self, volumes):
"""Get volume instance names from volume.
:param volumes: volume objects
:returns: volume instance names
"""
volumeInstanceNames = []
for volume in volumes:
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
LOG.error(_LE("Volume %(name)s not found on the array."),
{'name': volume['name']})
else:
volumeInstanceNames.append(volumeInstance.path)
return volumeInstanceNames
def create_consistencygroup_from_src(self, context, group, volumes,
cgsnapshot, snapshots, db):
"""Creates the consistency group from source.
Currently the source can only be a cgsnapshot.
:param context: the context
:param group: the consistency group object to be created
:param volumes: volumes in the consistency group
:param cgsnapshot: the source consistency group snapshot
:param snapshots: snapshots of the source volumes
:param db: database
:returns: model_update, volumes_model_update
model_update is a dictionary of cg status
volumes_model_update is a list of dictionaries of volume
update
"""
LOG.debug("Enter EMCVMAXCommon::create_consistencygroup_from_src. "
"Group to be created: %(cgId)s, "
"Source snapshot: %(cgSnapshot)s.",
{'cgId': group['id'],
'cgSnapshot': cgsnapshot['consistencygroup_id']})
volumeTypeId = group['volume_type_id'].replace(",", "")
extraSpecs = self._initial_setup(None, volumeTypeId)
self.create_consistencygroup(context, group)
targetCgName = self.utils.truncate_string(group['id'], TRUNCATE_8)
if not snapshots:
exceptionMessage = (_("No source snapshots provided to create "
"consistency group %s.") % targetCgName)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
modelUpdate = {'status': 'available'}
_poolInstanceName, storageSystem = (
self._get_pool_and_storage_system(extraSpecs))
try:
replicationService = self.utils.find_replication_service(
self.conn, storageSystem)
if replicationService is None:
exceptionMessage = (_(
"Cannot find replication service on system %s.") %
storageSystem)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
targetCgInstanceName = self._find_consistency_group(
replicationService, targetCgName)
LOG.debug("Create CG %(targetCg)s from snapshot.",
{'targetCg': targetCgInstanceName})
for volume, snapshot in zip(volumes, snapshots):
volumeSizeInbits = int(self.utils.convert_gb_to_bits(
snapshot['volume_size']))
targetVolumeName = 'targetVol'
volume = {'size': int(self.utils.convert_bits_to_gbs(
volumeSizeInbits))}
if extraSpecs[ISV3]:
_rc, volumeDict, _storageSystemName = (
self._create_v3_volume(
volume, targetVolumeName, volumeSizeInbits,
extraSpecs))
else:
_rc, volumeDict, _storageSystemName = (
self._create_composite_volume(
volume, targetVolumeName, volumeSizeInbits,
extraSpecs))
targetVolumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, targetVolumeName)
LOG.debug("Create target volume for member snapshot. "
"Source snapshot: %(snapshot)s, "
"Target volume: %(targetVol)s.",
{'snapshot': snapshot['id'],
'targetVol': targetVolumeInstance.path})
self.provision.add_volume_to_cg(self.conn,
replicationService,
targetCgInstanceName,
targetVolumeInstance.path,
targetCgName,
targetVolumeName,
extraSpecs)
sourceCgName = self.utils.truncate_string(cgsnapshot['id'],
TRUNCATE_8)
sourceCgInstanceName = self._find_consistency_group(
replicationService, sourceCgName)
if sourceCgInstanceName is None:
exceptionMessage = (_("Cannot find source CG instance. "
"consistencygroup_id: %s.") %
cgsnapshot['consistencygroup_id'])
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
relationName = self.utils.truncate_string(group['id'], TRUNCATE_5)
if extraSpecs[ISV3]:
self.provisionv3.create_group_replica(
self.conn, replicationService, sourceCgInstanceName,
targetCgInstanceName, relationName, extraSpecs)
else:
self.provision.create_group_replica(
self.conn, replicationService, sourceCgInstanceName,
targetCgInstanceName, relationName, extraSpecs)
# Break the replica group relationship.
rgSyncInstanceName = self.utils.find_group_sync_rg_by_target(
self.conn, storageSystem, targetCgInstanceName, extraSpecs,
True)
if rgSyncInstanceName is not None:
if extraSpecs[ISV3]:
# Operation 9: dissolve for snapVx
operation = self.utils.get_num(9, '16')
self.provisionv3.break_replication_relationship(
self.conn, replicationService, rgSyncInstanceName,
operation, extraSpecs)
else:
self.provision.delete_clone_relationship(
self.conn, replicationService,
rgSyncInstanceName, extraSpecs)
except Exception as ex:
modelUpdate['status'] = 'error'
cgSnapshotId = cgsnapshot['consistencygroup_id']
volumes_model_update = self.utils.get_volume_model_updates(
context, db, group['id'], modelUpdate['status'])
LOG.error(_LE("Exception: %(ex)s."), {'ex': ex})
exceptionMessage = (_("Failed to create CG %(cgName)s "
"from snapshot %(cgSnapshot)s.")
% {'cgName': targetCgName,
'cgSnapshot': cgSnapshotId})
LOG.error(exceptionMessage)
return modelUpdate, volumes_model_update
volumes_model_update = self.utils.get_volume_model_updates(
context, db, group['id'], modelUpdate['status'])
return modelUpdate, volumes_model_update
def _find_ip_protocol_endpoints(self, conn, storageSystemName,
portgroupname):
"""Find the IP protocol endpoint for ISCSI
:param storageSystemName: the system name
:param portgroupname: the portgroup name
:returns: foundIpAddresses
"""
foundipaddresses = []
configservice = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
portgroupinstancename = (
self.masking.find_port_group(conn, configservice, portgroupname))
iscsiendpointinstancenames = (
self.utils.get_iscsi_protocol_endpoints(
conn, portgroupinstancename))
for iscsiendpointinstancename in iscsiendpointinstancenames:
tcpendpointinstancenames = (
self.utils.get_tcp_protocol_endpoints(
conn, iscsiendpointinstancename))
for tcpendpointinstancename in tcpendpointinstancenames:
ipendpointinstancenames = (
self.utils.get_ip_protocol_endpoints(
conn, tcpendpointinstancename))
for ipendpointinstancename in ipendpointinstancenames:
ipaddress = (
self.utils.get_iscsi_ip_address(
conn, ipendpointinstancename))
foundipaddresses.append(ipaddress)
return foundipaddresses
|
{
"content_hash": "f1d8008de456ac62c27c6c3aab9ebc59",
"timestamp": "",
"source": "github",
"line_count": 4334,
"max_line_length": 79,
"avg_line_length": 44.109137055837564,
"alnum_prop": 0.5753129429980802,
"repo_name": "apporc/cinder",
"id": "f1935fe54a9dc928e00ababc87fff50f1b2d1c4f",
"size": "191812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/volume/drivers/emc/emc_vmax_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13595277"
},
{
"name": "Shell",
"bytes": "8222"
}
],
"symlink_target": ""
}
|
<?php
namespace Sabre\VObject\Property;
use Sabre\VObject\Component\VCard;
class TextTest extends \PHPUnit_Framework_TestCase {
function assertVCard21serialization($propValue, $expected) {
$doc = new VCard(array(
'VERSION'=>'2.1',
'PROP' => $propValue
), false);
// Adding quoted-printable, because we're testing if it gets removed
// automatically.
$doc->PROP['ENCODING'] = 'QUOTED-PRINTABLE';
$doc->PROP['P1'] = 'V1';
$output = $doc->serialize();
$this->assertEquals("BEGIN:VCARD\r\nVERSION:2.1\r\n$expected\r\nEND:VCARD\r\n", $output);
}
function testSerializeVCard21() {
$this->assertVCard21Serialization(
'f;oo',
'PROP;P1=V1:f;oo'
);
}
function testSerializeVCard21Array() {
$this->assertVCard21Serialization(
array('f;oo','bar'),
'PROP;P1=V1:f\;oo;bar'
);
}
function testSerializeVCard21Fold() {
$this->assertVCard21Serialization(
str_repeat('x',80),
'PROP;P1=V1:' . str_repeat('x',64) . "\r\n " . str_repeat('x',16)
);
}
function testSerializeQuotedPrintable() {
$this->assertVCard21Serialization(
"foo\r\nbar",
'PROP;P1=V1;ENCODING=QUOTED-PRINTABLE:foo=0D=0Abar'
);
}
function testSerializeQuotedPrintableFold() {
$this->assertVCard21Serialization(
"foo\r\nbarxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"PROP;P1=V1;ENCODING=QUOTED-PRINTABLE:foo=0D=0Abarxxxxxxxxxxxxxxxxxxxxxxxxxx=\r\n xxx"
);
}
function testValidateMinimumPropValue() {
$vcard = <<<IN
BEGIN:VCARD
VERSION:4.0
UID:foo
FN:Hi!
N:A
END:VCARD
IN;
$vcard = \Sabre\VObject\Reader::read($vcard);
$this->assertEquals(1, count($vcard->validate()));
$this->assertEquals(1, count($vcard->N->getParts()));
$vcard->validate(\Sabre\VObject\Node::REPAIR);
$this->assertEquals(5, count($vcard->N->getParts()));
}
}
|
{
"content_hash": "36a95e7f40addd651acbd5435fa80d98",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 98,
"avg_line_length": 21.75,
"alnum_prop": 0.5795019157088123,
"repo_name": "jxwang0/SoCal",
"id": "8f698f7d851681d9099af12f430127b2b8a54850",
"size": "2088",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "sabredav/vendor/sabre/vobject/tests/VObject/Property/TextTest.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "2326"
},
{
"name": "C++",
"bytes": "511"
},
{
"name": "CSS",
"bytes": "92762"
},
{
"name": "HTML",
"bytes": "58196"
},
{
"name": "JavaScript",
"bytes": "3155330"
},
{
"name": "PHP",
"bytes": "29105"
},
{
"name": "PLpgSQL",
"bytes": "184"
},
{
"name": "Python",
"bytes": "4009"
},
{
"name": "Shell",
"bytes": "348"
}
],
"symlink_target": ""
}
|
"""Strategy combinations for combinations.combine()."""
import tensorflow.compat.v2 as tf
multidevice_strategies = [
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
tf.__internal__.distribute.combinations.tpu_strategy,
]
multiworker_strategies = [
tf.__internal__.distribute.combinations.multi_worker_mirrored_2x1_cpu,
tf.__internal__.distribute.combinations.multi_worker_mirrored_2x1_gpu,
tf.__internal__.distribute.combinations.multi_worker_mirrored_2x2_gpu,
]
strategies_minus_default_minus_tpu = [
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.one_device_strategy_gpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
tf.__internal__.distribute.combinations.central_storage_strategy_with_gpu_and_cpu, # noqa: E501
]
strategies_minus_tpu = [
tf.__internal__.distribute.combinations.default_strategy,
tf.__internal__.distribute.combinations.one_device_strategy,
tf.__internal__.distribute.combinations.one_device_strategy_gpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu,
tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus,
tf.__internal__.distribute.combinations.central_storage_strategy_with_gpu_and_cpu, # noqa: E501
]
multi_worker_mirrored_strategies = [
tf.__internal__.distribute.combinations.multi_worker_mirrored_2x1_cpu,
tf.__internal__.distribute.combinations.multi_worker_mirrored_2x1_gpu,
tf.__internal__.distribute.combinations.multi_worker_mirrored_2x2_gpu,
]
tpu_strategies = [
tf.__internal__.distribute.combinations.tpu_strategy,
]
parameter_server_strategies_single_worker = [
tf.__internal__.distribute.combinations.parameter_server_strategy_1worker_2ps_cpu, # noqa: E501
tf.__internal__.distribute.combinations.parameter_server_strategy_1worker_2ps_1gpu, # noqa: E501
]
parameter_server_strategies_multi_worker = [
tf.__internal__.distribute.combinations.parameter_server_strategy_3worker_2ps_cpu, # noqa: E501
tf.__internal__.distribute.combinations.parameter_server_strategy_3worker_2ps_1gpu, # noqa: E501
]
all_strategies = strategies_minus_tpu + tpu_strategies
|
{
"content_hash": "2ccbeed692e1b1210a177e84295e2c8d",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 101,
"avg_line_length": 44.648148148148145,
"alnum_prop": 0.7515553712152634,
"repo_name": "keras-team/keras",
"id": "8261e2386ce7abe13469839a7d9aed400bdd5765",
"size": "3100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keras/distribute/strategy_combinations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "900"
},
{
"name": "Python",
"bytes": "11342063"
},
{
"name": "Shell",
"bytes": "11489"
},
{
"name": "Starlark",
"bytes": "273139"
}
],
"symlink_target": ""
}
|
import { _ } from 'meteor/underscore';
import { dbArena } from '/db/dbArena';
import { dbArenaFighters } from '/db/dbArenaFighters';
// 計算出所有報名者的攻擊次序
export function computeArenaAttackSequences() {
const lastArenaData = dbArena.findOne({}, { sort: { beginDate: -1 }, fields: { _id: 1 } });
if (! lastArenaData) {
return;
}
const { _id: arenaId } = lastArenaData;
const fighterCompanyIdList = _.pluck(dbArenaFighters.find({ arenaId }, { fields: { companyId: 1 } }).fetch(), 'companyId');
const shuffledFighterCompanyIdList = _.shuffle(fighterCompanyIdList);
const attackSequence = _.range(shuffledFighterCompanyIdList.length);
dbArena.update(arenaId, { $set: { shuffledFighterCompanyIdList } });
dbArenaFighters
.find({}, { fields: { _id: 1, companyId: 1 } })
.forEach((fighter) => {
const thisFighterIndex = _.indexOf(shuffledFighterCompanyIdList, fighter.companyId);
const thisAttackSequence = _.without(attackSequence, thisFighterIndex);
const shuffledAttackSequence = _.shuffle(thisAttackSequence);
dbArenaFighters.update(fighter._id, { $set: { attackSequence: shuffledAttackSequence } });
});
}
|
{
"content_hash": "9300dece86c3d057c0fa230a895d393c",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 125,
"avg_line_length": 40.10344827586207,
"alnum_prop": 0.6973344797936372,
"repo_name": "silentvow/acgn-stock",
"id": "802348b8df4e029f325b0cf8390b90f8341e026d",
"size": "1189",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "server/functions/arena/computeArenaAttackSequences.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14096"
},
{
"name": "HTML",
"bytes": "245877"
},
{
"name": "JavaScript",
"bytes": "1143029"
}
],
"symlink_target": ""
}
|
// This is an open source non-commercial project. Dear PVS-Studio, please check it.
// PVS-Studio Static Code Analyzer for C, C++ and C#: http://www.viva64.com
/* Unifor.cs --
* Ars Magna project, http://arsmagna.ru
* -------------------------------------------------------
* Status: poor
*/
#region Using directives
using System;
using AM;
using AM.Collections;
using AM.Logging;
using CodeJam;
using JetBrains.Annotations;
using ManagedIrbis.Pft.Infrastructure.Unifors;
using MoonSharp.Interpreter;
#endregion
namespace ManagedIrbis.Pft.Infrastructure
{
/// <summary>
/// Unifor.
/// </summary>
[PublicAPI]
[MoonSharpUserData]
public sealed class Unifor
: IFormatExit
{
#region Properties
/// <summary>
/// Registry.
/// </summary>
[NotNull]
public static CaseInsensitiveDictionary<Action<PftContext, PftNode, string>> Registry
{
get; private set;
}
/// <summary>
/// Throw an exception on empty UNIFOR?
/// </summary>
public static bool ThrowOnEmpty { get; set; }
/// <summary>
/// Throw an exception on unknown key?
/// </summary>
public static bool ThrowOnUnknown { get; set; }
#endregion
#region Construction
static Unifor()
{
ThrowOnEmpty = false;
ThrowOnUnknown = false;
Registry = new CaseInsensitiveDictionary<Action<PftContext, PftNode, string>>();
RegisterActions();
}
#endregion
#region Private members
private static void RegisterActions()
{
Registry.Add("0", Unifor0.FormatAll);
Registry.Add("1", Unifor1.GetElement);
Registry.Add("2", Unifor2.GetMaxMfn);
Registry.Add("3", Unifor3.PrintDate);
Registry.Add("4", Unifor4.FormatPreviousVersion);
// "5" unknown
Registry.Add("6", Unifor6.ExecuteNestedFormat);
Registry.Add("7", Unifor7.FormatDocuments);
Registry.Add("8", Unifor8.FormatWithFst);
Registry.Add("9", Unifor9.RemoveDoubleQuotes);
Registry.Add("A", UniforA.GetFieldRepeat);
Registry.Add("B", UniforB.Convolution);
Registry.Add("C", UniforC.CheckIsbn);
Registry.Add("D", UniforD.FormatDocumentDB);
Registry.Add("E", UniforE.GetFirstWords);
Registry.Add("F", UniforF.GetLastWords);
Registry.Add("G", UniforG.GetPart);
Registry.Add("H", UniforH.ExtractAngleBrackets);
Registry.Add("I", UniforI.GetIniFileEntry);
Registry.Add("J", UniforJ.GetTermRecordCountDB);
Registry.Add("K", UniforK.GetMenuEntry);
Registry.Add("L", UniforL.ContinueTerm);
Registry.Add("M", UniforM.Sort);
Registry.Add("O", UniforO.AllExemplars);
Registry.Add("P", UniforP.UniqueField);
Registry.Add("Q", UniforQ.ToLower);
Registry.Add("R", UniforR.RandomNumber);
Registry.Add("S", UniforS.Add);
Registry.Add("S0", UniforS.Clear);
Registry.Add("SA", UniforS.Arabic);
Registry.Add("SX", UniforS.Roman);
Registry.Add("T", UniforT.Transliterate);
Registry.Add("U", UniforU.Cumulate);
Registry.Add("V", UniforU.Decumulate);
Registry.Add("W", UniforU.Check);
Registry.Add("X", UniforX.RemoveAngleBrackets);
Registry.Add("Y", UniforY.FreeExemplars);
Registry.Add("Z", UniforZ.GenerateExemplars);
Registry.Add("+0", UniforPlus0.FormatAll);
Registry.Add("+1", UniforPlus1.ClearGlobals);
Registry.Add("+1A", UniforPlus1.AddGlobals);
Registry.Add("+1G", UniforPlus1.DistinctGlobals);
Registry.Add("+1I", UniforPlus1.DistinctList);
Registry.Add("+1K", UniforPlus1.DecodeGlobals);
Registry.Add("+1M", UniforPlus1.MultiplyGlobals);
Registry.Add("+1O", UniforPlus1.DecodeList);
Registry.Add("+1R", UniforPlus1.ReadGlobal);
Registry.Add("+1S", UniforPlus1.SubstractGlobals);
Registry.Add("+1T", UniforPlus1.SortGlobals);
Registry.Add("+1V", UniforPlus1.SortList);
Registry.Add("+1W", UniforPlus1.WriteGlobal);
Registry.Add("+2", UniforPlus2.System);
Registry.Add("+3A", UniforPlus3.FieldsToText);
Registry.Add("+3C", UniforPlus3.TruncateFullTextFileName);
Registry.Add("+3D", UniforPlus3.UrlDecode);
Registry.Add("+3E", UniforPlus3.UrlEncode);
Registry.Add("+3G", UniforPlus3.GetExternalDbTermRecordCount);
Registry.Add("+3H", UniforPlus3.HtmlSpecialChars);
Registry.Add("+3J", UniforPlus3.GetTermCount);
Registry.Add("+3S", UniforPlus3.SearchFormat);
Registry.Add("+3T", UniforPlus3.Divide);
Registry.Add("+3U", UniforPlus3.ConvertToUtf);
Registry.Add("+3W", UniforPlus3.ConvertToAnsi);
Registry.Add("+3+", UniforPlus3.ReplacePlus);
Registry.Add("+4", UniforPlus4.GetField);
Registry.Add("+5", UniforPlus5.GetMenuEntry);
Registry.Add("+6", UniforPlus6.GetRecordStatus);
Registry.Add("+61", UniforPlus6.GetRecordStatus);
Registry.Add("+62", UniforPlus6.GetRecordStatus);
Registry.Add("+63", UniforPlus6.GetRecordStatus);
Registry.Add("+7", UniforPlus7.ClearGlobals);
Registry.Add("+7A", UniforPlus7.UnionGlobals);
Registry.Add("+7G", UniforPlus7.DistinctGlobal);
Registry.Add("+7M", UniforPlus7.MultiplyGlobals);
Registry.Add("+7R", UniforPlus7.ReadGlobal);
Registry.Add("+7S", UniforPlus7.SubstractGlobals);
Registry.Add("+7T", UniforPlus7.SortGlobal);
Registry.Add("+7U", UniforPlus7.AppendGlobal);
Registry.Add("+7W", UniforPlus7.WriteGlobal);
Registry.Add("+8", UniforPlus8.ExecuteNativeMethod);
Registry.Add("+90", UniforPlus9.GetIndex);
Registry.Add("+91", UniforPlus9.GetFileName);
Registry.Add("+92", UniforPlus9.GetDirectoryName);
Registry.Add("+93", UniforPlus9.GetExtension);
Registry.Add("+94", UniforPlus9.GetDrive);
Registry.Add("+95", UniforPlus9.StringLength);
Registry.Add("+96", UniforPlus9.Substring);
Registry.Add("+97", UniforPlus9.ToUpper);
Registry.Add("+98", UniforPlus9.ReplaceCharacter);
Registry.Add("+99", UniforPlus9.AssignGlobals);
Registry.Add("+9A", UniforPlus9.GetFileSize);
// ibatrak (неописанный клон функции +98)
Registry.Add("+9B", UniforPlus9.ReplaceCharacter);
Registry.Add("+9C", UniforPlus9.GetFileContent);
Registry.Add("+9D", UniforPlus9.SaveBinaryResource);
Registry.Add("+9E", UniforPlus9.FormatFileSize);
Registry.Add("+9F", UniforPlus9.GetCharacter);
Registry.Add("+9G", UniforPlus9.SplitWords);
Registry.Add("+9H", UniforPlus9.ConcatenateStrings);
Registry.Add("+9I", UniforPlus9.ReplaceString);
Registry.Add("+9J", UniforPlus9.ReadFileAsBinaryResource);
Registry.Add("+9K", UniforPlus9.DeleteFiles);
Registry.Add("+9L", UniforPlus9.FileExist);
Registry.Add("+9N", UniforPlus9.NextTerm);
Registry.Add("+9P", UniforPlus9.PreviousTerm);
Registry.Add("+9R", UniforPlus9.RomanToArabic);
Registry.Add("+9S", UniforPlus9.FindSubstring);
Registry.Add("+9T", UniforPlus9.PrintNumbers);
Registry.Add("+9V", UniforPlus9.GetGeneration);
Registry.Add("+9X", UniforPlus9.ArabicToRoman);
Registry.Add("+B", UniforPlusB.ByteSum);
Registry.Add("+C", UniforPlusC.Increment);
Registry.Add("+D", UniforPlusD.GetDatabaseName);
Registry.Add("+E", UniforPlusE.GetFieldIndex);
Registry.Add("+F", UniforPlusF.CleanRtf);
Registry.Add("+H", UniforPlusH.Take3Of4);
Registry.Add("+I", UniforPlusI.BuildLink);
Registry.Add("+K", UniforPlusK.GetAuthorSign);
Registry.Add("+N", UniforPlusN.GetFieldCount);
Registry.Add("+P", UniforPlusP.GetPosting);
Registry.Add("+R", UniforPlusR.TrimAtLastDot);
Registry.Add("+S", UniforPlusS.DecodeTitle);
Registry.Add("+U", UniforPlusU.RepeatString);
Registry.Add("+V", UniforPlusV.Substring);
Registry.Add("+W", UniforPlusW.Increment);
Registry.Add("+X", UniforPlusX.SearchIncrement);
Registry.Add("+Z", UniforPlusZ.AnsiToOem);
Registry.Add("+@", UniforPlusAt.FormatJson);
Registry.Add("++A", UniforPlusPlusA.GetPhrase);
Registry.Add("++B", UniforPlusPlusB.Unknown);
Registry.Add("++C", UniforPlusPlusC.WorkWithGlobalCounter);
Registry.Add("++0", UniforPlusPlus0.FormatAll);
Registry.Add("++1", UniforPlusPlus1.Unknown);
Registry.Add("+\\", UniforPlusBackslash.ConvertBackslashes);
Registry.Add("+*", UniforPlusStar.GetGuid);
Registry.Add("!", UniforBang. CleanDoubleText);
Registry.Add("=", UniforEqual.CompareWithMask);
Registry.Add("[", UniforSquareBracket.CleanContextMarkup);
Registry.Add("&", UniforAmpersand.Unknown);
Registry.Add(":", UniforColon.CheckRights);
}
#endregion
#region Public methods
/// <summary>
/// Find action for specified expression.
/// </summary>
public static Action<PftContext, PftNode, string> FindAction
(
[NotNull] ref string expression
)
{
var keys = Registry.Keys;
int bestMatch = 0;
Action<PftContext, PftNode, string> result = null;
StringComparison comparison = StringUtility.GetCaseInsensitiveComparison();
foreach (string key in keys)
{
if (key.Length > bestMatch
&& expression.StartsWith(key, comparison))
{
bestMatch = key.Length;
result = Registry[key];
}
}
if (bestMatch != 0)
{
expression = expression.Substring(bestMatch);
}
return result;
}
// ================================================================
#endregion
#region IFormatExit members
/// <inheritdoc cref="IFormatExit.Name" />
public string Name { get { return "unifor"; } }
/// <inheritdoc cref="IFormatExit.Execute" />
public void Execute
(
PftContext context,
PftNode node,
string expression
)
{
Code.NotNull(context, "context");
if (string.IsNullOrEmpty(expression))
{
Log.Error
(
"Unifor::Execute: "
+ "empty expression: "
+ this
);
if (ThrowOnEmpty)
{
throw new PftSemanticException
(
"Unifor::Execute: "
+ "empty expression: "
+ this
);
}
return;
}
Action<PftContext, PftNode, string> action
= FindAction(ref expression);
if (ReferenceEquals(action, null))
{
Log.Error
(
"Unifor::Execute: "
+ "unknown action="
+ expression.ToVisibleString()
);
if (ThrowOnUnknown)
{
throw new PftException
(
"Unknown unifor: "
+ expression.ToVisibleString()
);
}
}
else
{
action
(
context,
node,
expression
);
}
}
#endregion
}
}
|
{
"content_hash": "3eca057096351a22c485b403627b4f66",
"timestamp": "",
"source": "github",
"line_count": 331,
"max_line_length": 93,
"avg_line_length": 38.468277945619334,
"alnum_prop": 0.5405638891070447,
"repo_name": "amironov73/ManagedIrbis",
"id": "286cfded9c240a4ccde2d7df5f67ee1f8cfbcbf7",
"size": "12757",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/Classic/Libs/ManagedIrbis/Source/Pft/Infrastructure/Unifor.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "92910"
},
{
"name": "ASP.NET",
"bytes": "413"
},
{
"name": "Batchfile",
"bytes": "33021"
},
{
"name": "C",
"bytes": "24669"
},
{
"name": "C#",
"bytes": "19567730"
},
{
"name": "CSS",
"bytes": "170"
},
{
"name": "F*",
"bytes": "362819"
},
{
"name": "HTML",
"bytes": "5592"
},
{
"name": "JavaScript",
"bytes": "5342"
},
{
"name": "Pascal",
"bytes": "152697"
},
{
"name": "Shell",
"bytes": "524"
},
{
"name": "Smalltalk",
"bytes": "29356"
},
{
"name": "TeX",
"bytes": "44337"
},
{
"name": "VBA",
"bytes": "46543"
},
{
"name": "Witcher Script",
"bytes": "40165"
}
],
"symlink_target": ""
}
|
namespace impala {
struct ArrayValueBuilder;
struct HdfsFileDesc;
/// This scanner parses Parquet files located in HDFS, and writes the content as tuples in
/// the Impala in-memory representation of data, e.g. (tuples, rows, row batches).
/// For the file format spec, see: github.com/apache/parquet-format
///
/// ---- Schema resolution ----
/// Additional columns are allowed at the end in either the table or file schema (i.e.,
/// extra columns at the end of the schema or extra fields at the end of a struct). If
/// there are extra columns in the file schema, they are simply ignored. If there are
/// extra in the table schema, we return NULLs for those columns (if they're
/// materialized).
///
/// ---- Disk IO ----
/// Parquet (and other columnar formats) use scan ranges differently than other formats.
/// Each materialized column maps to a single ScanRange per row group. For streaming
/// reads, all the columns need to be read in parallel. This is done by issuing one
/// ScanRange (in IssueInitialRanges()) for the file footer per split.
/// ProcessSplit() is called once for each original split and determines the row groups
/// whose midpoints fall within that split. We use the mid-point to determine whether a
/// row group should be processed because if the row group size is less than or equal to
/// the split size, the mid point guarantees that we have at least 50% of the row group in
/// the current split. ProcessSplit() then computes the column ranges for these row groups
/// and submits them to the IoMgr for immediate scheduling (so they don't surface in
/// DiskIoMgr::GetNextRange()). Scheduling them immediately also guarantees they are all
/// read at once.
///
/// Like the other scanners, each parquet scanner object is one to one with a
/// ScannerContext. Unlike the other scanners though, the context will have multiple
/// streams, one for each column. Row groups are processed one at a time this way.
///
/// ---- Nested types ----
/// This scanner supports reading and materializing nested data. For a good overview of
/// how nested data is encoded, see blog.twitter.com/2013/dremel-made-simple-with-parquet.
/// For how SQL nested schemas are translated to parquet schemas, see
/// github.com/apache/parquet-format/blob/master/LogicalTypes.md#nested-types.
///
/// Examples:
/// For these examples, we will use the following table definition:
/// tbl:
/// id bigint
/// array_col array<array<int>>
///
/// The table definition could correspond to the following parquet schema (note the
/// required 'id' field. If written by Impala, all non-repeated fields would be optional,
/// but we can read repeated fields as well):
///
/// required group record d=0 r=0
/// req int64 id d=0 r=0
/// opt group array_col (LIST) d=1 r=0
/// repeated group list d=2 r=1
/// opt group item (LIST) d=3 r=1
/// repeated group list d=4 r=2
/// opt int32 item d=5 r=2
///
/// Each element in the schema has been annotated with the maximum def level and maximum
/// rep level corresponding to that element. Note that repeated elements add a def
/// level. This distinguished between 0 items (empty list) and more than 0 items
/// (non-empty list). The containing optional LIST element for each array determines
/// whether the whole list is null or non-null.
///
/// Only scalar schema elements are materialized in parquet files; internal nested
/// elements can be reconstructed using the def and rep levels. To illustrate this, here
/// is data containing every valid definition and repetition for the materialized int
/// 'item' element. The data records appear on the left, the encoded definition levels,
/// repetition levels, and values for the 'item' field appear on the right (the encoded
/// 'id' field is not shown).
///
/// record d r v
/// ------------------------------------
/// {id: 0, array_col: NULL} 0 0 -
/// {id: 1, array_col: []} 1 0 -
/// {id: 2, array_col: [NULL]} 2 0 -
/// {id: 3, array_col: [[]]} 3 0 -
/// {id: 4, array_col: [[NULL]]} 4 0 -
/// {id: 5, array_col: [[1, 5 0 1
/// NULL], 4 2 -
/// [2]]} 5 1 2
/// {id: 6, array_col: [[3]]} 5 0 3
///
/// * Example query 1:
/// select id, inner.item from tbl t, t.array_col outer, outer.item inner
/// Results from above sample data:
/// 4,NULL
/// 5,1
/// 5,NULL
/// 5,2
/// 6,3
///
/// Descriptors:
/// Tuple(id=0 tuple_path=[] slots=[
/// Slot(id=0 type=ARRAY col_path=[1] collection_item_tuple_id=1),
/// Slot(id=2 type=BIGINT col_path=[0])])
/// Tuple(id=1 tuple_path=[1] slots=[
/// Slot(id=1 type=ARRAY col_path=[1,0] collection_item_tuple_id=2)])
/// Tuple(id=2 tuple_path=[1, 0] slots=[
/// Slot(id=3 type=INT col_path=[1,0,0])])
///
/// The parquet scanner will materialize the following in-memory row batch:
/// RowBatch
/// +==========+
/// | 0 | NULL |
/// |----------|
/// | 1 | NULL | outer
/// |----------| +======+
/// | 2 | --------->| NULL |
/// | | | +======+
/// |----------|
/// | | | +======+
/// | 3 | --------->| NULL |
/// | | | +======+
/// | | | inner
/// |----------| +======+ +======+
/// | 4 | --------->| -------->| NULL |
/// | | | +======+ +======+
/// | | |
/// |----------| +======+ +======+
/// | 5 | --------->| -------->| 1 |
/// | | | | | +------+
/// | | | | | | NULL |
/// | | | +------+ +======+
/// | | | | |
/// | | | | | +======+
/// | | | | -------->| 2 |
/// | | | +======+ +======+
/// | | |
/// |----------| +======+ +======+
/// | 6 | --------->| -------->| 3 |
/// +==========+ +======+ +======+
///
/// The top-level row batch contains two slots, one containing the int64_t 'id' slot and
/// the other containing the ArrayValue 'array_col' slot. The ArrayValues in turn
/// contain pointers to their item tuple data. Each item tuple contains a single
/// ArrayColumn slot ('array_col.item'). The inner ArrayValues' item tuples contain a
/// single int 'item' slot.
///
/// Note that the scanner materializes a NULL ArrayValue for empty arrays. This is
/// technically a bug (it should materialize an ArrayValue with num_tuples = 0), but we
/// don't distinguish between these two cases yet.
/// TODO: fix this (IMPALA-2272)
///
/// The column readers that materialize this structure form a tree analagous to the
/// materialized output:
/// CollectionColumnReader slot_id=0 node="repeated group list (d=2 r=1)"
/// CollectionColumnReader slot_id=1 node="repeated group list (d=4 r=2)"
/// ScalarColumnReader<int32_t> slot_id=3 node="opt int32 item (d=5 r=2)"
/// ScalarColumnReader<int64_t> slot_id=2 node="req int64 id (d=0 r=0)"
///
/// Note that the collection column readers reference the "repeated group item" schema
/// element of the serialized array, not the outer "opt group" element. This is what
/// causes the bug described above, it should consider both elements.
///
/// * Example query 2:
/// select inner.item from tbl.array_col.item inner;
/// Results from the above sample data:
/// NULL
/// 1
/// NULL
/// 2
/// 3
///
/// Descriptors:
/// Tuple(id=0 tuple_path=[1, 0] slots=[
/// Slot(id=0 type=INT col_path=[1,0,0])])
///
/// In-memory row batch:
/// +======+
/// | NULL |
/// |------|
/// | 1 |
/// |------|
/// | NULL |
/// |------|
/// | 2 |
/// |------|
/// | 3 |
/// +======+
///
/// Column readers:
/// ScalarColumnReader<int32_t> slot_id=0 node="opt int32 item (d=5 r=2)"
///
/// In this example, the scanner doesn't materialize a nested in-memory result, since
/// only the single int 'item' slot is materialized. However, it still needs to read the
/// nested data as shown above. An important point to notice is that a tuple is not
/// materialized for every rep and def level pair read -- there are 9 of these pairs
/// total in the sample data above, but only 5 tuples are materialized. This is because
/// in this case, nothing should be materialized for NULL or empty arrays, since we're
/// only materializing the innermost item. If a def level is read that doesn't
/// correspond to any item value (NULL or otherwise), the scanner advances to the next
/// rep and def levels without materializing a tuple.
///
/// * Example query 3:
/// select id, inner.item from tbl t, t.array_col.item inner
/// Results from the above sample data (same as example 1):
/// 4,NULL
/// 5,1
/// 5,NULL
/// 5,2
/// 6,3
///
/// Descriptors:
/// Tuple(id=0 tuple_path=[] slots=[
/// Slot(id=0 type=ARRAY col_path=[2]),
/// Slot(id=1 type=BIGINT col_path=[0])])
/// Tuple(id=1 tuple_path=[2, 0] slots=[
/// Slot(id=2 type=INT col_path=[2,0,0])])
///
/// In-memory row batch:
/// RowBatch
/// +==========+
/// | 0 | NULL |
/// |----------|
/// | 1 | NULL |
/// |----------| inner
/// | 2 | --------->+======+
/// | | | +======+
/// |----------|
/// | | |
/// | 3 | --------->+======+
/// | | | +======+
/// | | |
/// |----------| +======+
/// | 4 | --------->| NULL |
/// | | | +======+
/// | | |
/// |----------| +======+
/// | 5 | --------->| 1 |
/// | | | +------+
/// | | | | NULL |
/// | | | +------+
/// | | | | 2 |
/// | | | +======+
/// | | |
/// |----------| +======+
/// | 6 | --------->| 3 |
/// +==========+ +======+
///
/// Column readers:
/// CollectionColumnReader slot_id=0 node="repeated group list (d=2 r=1)"
/// ScalarColumnReader<int32_t> slot_id=2 node="opt int32 item (d=5 r=2)"
/// ScalarColumnReader<int32_t> id=1 node="req int64 id (d=0 r=0)"
///
/// In this example, the scanner materializes a "flattened" version of inner, rather
/// than the full 3-level structure. Note that the collection reader references the
/// outer array, which determines how long each materialized array is, and the items in
/// the array are from the inner array.
class HdfsParquetScanner : public HdfsScanner {
public:
HdfsParquetScanner(HdfsScanNode* scan_node, RuntimeState* state);
virtual ~HdfsParquetScanner();
virtual Status Prepare(ScannerContext* context);
virtual void Close();
virtual Status ProcessSplit();
/// Issue just the footer range for each file. We'll then parse the footer and pick
/// out the columns we want.
static Status IssueInitialRanges(HdfsScanNode* scan_node,
const std::vector<HdfsFileDesc*>& files);
struct FileVersion {
/// Application that wrote the file. e.g. "IMPALA"
std::string application;
/// Version of the application that wrote the file, expressed in three parts
/// (<major>.<minor>.<patch>). Unspecified parts default to 0, and extra parts are
/// ignored. e.g.:
/// "1.2.3" => {1, 2, 3}
/// "1.2" => {1, 2, 0}
/// "1.2-cdh5" => {1, 2, 0}
struct {
int major;
int minor;
int patch;
} version;
/// If true, this file was generated by an Impala internal release
bool is_impala_internal;
FileVersion() : is_impala_internal(false) { }
/// Parses the version from the created_by string
FileVersion(const std::string& created_by);
/// Returns true if version is strictly less than <major>.<minor>.<patch>
bool VersionLt(int major, int minor = 0, int patch = 0) const;
/// Returns true if version is equal to <major>.<minor>.<patch>
bool VersionEq(int major, int minor, int patch) const;
};
private:
/// Internal representation of a column schema (including nested-type columns).
struct SchemaNode {
/// The corresponding schema element defined in the file metadata
const parquet::SchemaElement* element;
/// The index into the RowGroup::columns list if this column is materialized in the
/// file (i.e. it's a scalar type). -1 for nested types.
int col_idx;
/// The maximum definition level of this column, i.e., the definition level that
/// corresponds to a non-NULL value. Valid values are >= 0.
int max_def_level;
/// The maximum repetition level of this column. Valid values are >= 0.
int max_rep_level;
/// The definition level of the most immediate ancestor of this node with repeated
/// field repetition type. 0 if there are no repeated ancestors.
int def_level_of_immediate_repeated_ancestor;
/// Any nested schema nodes. Empty for non-nested types.
std::vector<SchemaNode> children;
SchemaNode() : element(NULL), col_idx(-1), max_def_level(-1), max_rep_level(-1),
def_level_of_immediate_repeated_ancestor(-1) { }
std::string DebugString(int indent = 0) const;
bool is_repeated() const {
return element->repetition_type == parquet::FieldRepetitionType::REPEATED;
}
};
/// Size of the file footer. This is a guess. If this value is too little, we will
/// need to issue another read.
static const int64_t FOOTER_SIZE;
/// Class that implements Parquet definition and repetition level decoding.
class LevelDecoder;
/// Per column reader.
class ColumnReader;
friend class ColumnReader;
class CollectionColumnReader;
friend class CollectionColumnReader;
class BaseScalarColumnReader;
friend class BaseScalarColumnReader;
template<typename T, bool MATERIALIZED> class ScalarColumnReader;
template<typename T, bool MATERIALIZED> friend class ScalarColumnReader;
class BoolColumnReader;
friend class BoolColumnReader;
/// Column reader for each materialized columns for this file.
std::vector<ColumnReader*> column_readers_;
/// File metadata thrift object
parquet::FileMetaData file_metadata_;
/// Version of the application that wrote this file.
FileVersion file_version_;
/// The root schema node for this file
SchemaNode schema_;
/// Scan range for the metadata.
const DiskIoMgr::ScanRange* metadata_range_;
/// Pool to copy dictionary page buffer into. This pool is shared across all the
/// pages in a column chunk.
boost::scoped_ptr<MemPool> dictionary_pool_;
/// Timer for materializing rows. This ignores time getting the next buffer.
ScopedTimer<MonotonicStopWatch> assemble_rows_timer_;
/// Number of cols that need to be read.
RuntimeProfile::Counter* num_cols_counter_;
/// Number of row groups that need to be read.
RuntimeProfile::Counter* num_row_groups_counter_;
/// Reads data using 'column_readers' to materialize instances of 'tuple_desc'
/// (including recursively reading collections).
///
/// If reading into a collection, 'array_value_builder' should be non-NULL and
/// 'new_collection_rep_level' set appropriately. Otherwise, 'array_value_builder'
/// should be NULL and 'new_collection_rep_level' should be -1.
///
/// Returns when the row group is complete, the end of the current collection is reached
/// as indicated by 'new_collection_rep_level' (if materializing a collection), or
/// some other condition causes execution to halt (e.g. parse_error_ set, cancellation).
///
/// Returns false if execution should be aborted for some reason, e.g. parse_error_ is
/// set, the query is cancelled, or the scan node limit was reached. Otherwise returns
/// true.
///
/// 'row_group_idx' is used for error checking when this is called on the table-level
/// tuple. If reading into a collection, 'row_group_idx' doesn't matter.
///
/// IN_COLLECTION is true if the columns we are materializing are part of a Parquet
/// collection. MATERIALIZING_COLLECTION is true if we are materializing tuples inside
/// a nested collection.
template <bool IN_COLLECTION, bool MATERIALIZING_COLLECTION>
bool AssembleRows(const TupleDescriptor* tuple_desc,
const std::vector<ColumnReader*>& column_readers, int new_collection_rep_level,
int row_group_idx, ArrayValueBuilder* array_value_builder);
/// Function used by AssembleRows() to read a single row into 'tuple'. Returns false if
/// execution should be aborted for some reason, otherwise returns true.
/// materialize_tuple is an in/out parameter. It is set to true by the caller to
/// materialize the tuple. If any conjuncts fail, materialize_tuple is set to false
/// by ReadRow().
/// 'tuple_materialized' is an output parameter set by this function. If false is
/// returned, there are no guarantees about 'materialize_tuple' or the state of
/// column_readers, so execution should be halted immediately.
/// The template argument IN_COLLECTION allows an optimized version of this code to
/// be produced in the case when we are materializing the top-level tuple.
template <bool IN_COLLECTION>
inline bool ReadRow(const std::vector<ColumnReader*>& column_readers, Tuple* tuple,
MemPool* pool, bool* materialize_tuple);
/// Find and return the last split in the file if it is assigned to this scan node.
/// Returns NULL otherwise.
static DiskIoMgr::ScanRange* FindFooterSplit(HdfsFileDesc* file);
/// Validate column offsets by checking if the dictionary page comes before the data
/// pages and checking if the column offsets lie within the file.
Status ValidateColumnOffsets(const parquet::RowGroup& row_group);
/// Process the file footer and parse file_metadata_. This should be called with the
/// last FOOTER_SIZE bytes in context_.
/// *eosr is a return value. If true, the scan range is complete (e.g. select count(*))
Status ProcessFooter(bool* eosr);
/// Populates 'column_readers' for the slots in 'tuple_desc', including creating child
/// readers for any collections. Schema resolution is handled in this function as
/// well. Fills in the appropriate template tuple slot with NULL for any materialized
/// fields missing in the file.
Status CreateColumnReaders(const TupleDescriptor& tuple_desc,
std::vector<ColumnReader*>* column_readers);
/// Creates a column reader for 'node'. slot_desc may be NULL, in which case the
/// returned column reader can only be used to read def/rep levels.
/// 'is_collection_field' should be set to true if the returned reader is reading a
/// collection. This cannot be determined purely by 'node' because a repeated scalar
/// node represents both an array and the array's items (in this case
/// 'is_collection_field' should be true if the reader reads one value per array, and
/// false if it reads one value per item). The reader is added to the runtime state's
/// object pool. Does not create child readers for collection readers; these must be
/// added by the caller.
ColumnReader* CreateReader(const SchemaNode& node, bool is_collection_field,
const SlotDescriptor* slot_desc);
/// Creates a column reader that reads one value for each item in the table or
/// collection element corresponding to 'parent_path'. 'parent_path' should point to
/// either a collection element or the root schema (i.e. empty path). The returned
/// reader has no slot desc associated with it, meaning only NextLevels() and not
/// ReadValue() can be called on it.
///
/// This is used for counting item values, rather than materializing any values. For
/// example, in a count(*) over a collection, there are no values to materialize, but we
/// still need to iterate over every item in the collection to count them.
Status CreateCountingReader(
const SchemaPath& parent_path, ColumnReader** reader);
/// Walks file_metadata_ and initiates reading the materialized columns. This
/// initializes 'column_readers' and issues the reads for the columns. 'column_readers'
/// should be the readers used to materialize a single tuple (i.e., column_readers_ or
/// the children of a collection node).
Status InitColumns(
int row_group_idx, const std::vector<ColumnReader*>& column_readers);
/// Validates the file metadata
Status ValidateFileMetadata();
/// Validates the column metadata to make sure this column is supported (e.g. encoding,
/// type, etc) and matches the type of col_reader's slot desc.
Status ValidateColumn(const BaseScalarColumnReader& col_reader, int row_group_idx);
/// Performs some validation once we've reached the end of a row group to help detect
/// bugs or bad input files.
Status ValidateEndOfRowGroup(const std::vector<ColumnReader*>& column_readers,
int row_group_idx, int64_t rows_read);
/// Part of the HdfsScanner interface, not used in Parquet.
Status InitNewRange() { return Status::OK(); };
/// Unflattens the schema metadata from a Parquet file metadata and converts it to our
/// SchemaNode representation. Returns the result in 'n' unless an error status is
/// returned. Does not set the slot_desc field of any SchemaNode.
Status CreateSchemaTree(const std::vector<parquet::SchemaElement>& schema,
SchemaNode* node) const;
/// Recursive implementation used internally by the above CreateSchemaTree() function.
Status CreateSchemaTree(const std::vector<parquet::SchemaElement>& schema,
int max_def_level, int max_rep_level, int ira_def_level, int* idx, int* col_idx,
SchemaNode* node) const;
/// Traverses 'schema_' according to 'path', returning the result in 'node'. If 'path'
/// does not exist in this file's schema, 'missing_field' is set to true and
/// Status::OK() is returned, otherwise 'missing_field' is set to false. If 'path'
/// resolves to a collecton position field, *pos_field is set to true. Otherwise
/// 'pos_field' is set to false. Returns a non-OK status if 'path' cannot be resolved
/// against the file's schema (e.g., unrecognized collection schema).
///
/// Tries to resolve assuming either two- or three-level array encoding in
/// 'schema_'. Returns a bad status if resolution fails in both cases.
Status ResolvePath(const SchemaPath& path, SchemaNode** node, bool* pos_field,
bool* missing_field);
/// The 'array_encoding' parameter determines whether to assume one-, two-, or
/// three-level array encoding. The returned status is not logged (i.e. it's an expected
/// error).
enum ArrayEncoding {
ONE_LEVEL,
TWO_LEVEL,
THREE_LEVEL
};
Status ResolvePathHelper(ArrayEncoding array_encoding, const std::vector<int>& path,
SchemaNode** node, bool* pos_field, bool* missing_field);
/// Helper functions for ResolvePathHelper().
/// Advances 'node' to one of its children based on path[next_idx]. Returns the child
/// node or sets 'missing_field' to true.
SchemaNode* NextSchemaNode(const SchemaPath& path, int next_idx, SchemaNode* node,
bool* missing_field);
/// The ResolvePathHelper() logic for arrays.
Status ResolveArray(ArrayEncoding array_encoding, const SchemaPath& path, int idx,
SchemaNode** node, bool* pos_field, bool* missing_field);
/// The ResolvePathHelper() logic for maps.
Status ResolveMap(const SchemaPath& path, int idx, SchemaNode** node,
bool* missing_field);
/// The ResolvePathHelper() logic for scalars (just does validation since there's no
/// more actual work to be done).
Status ValidateScalarNode(const SchemaNode& node, const ColumnType& col_type,
const SchemaPath& path, int idx);
};
} // namespace impala
#endif
|
{
"content_hash": "ce882f8fe2fe6cff44d22a2b5b766f3f",
"timestamp": "",
"source": "github",
"line_count": 543,
"max_line_length": 90,
"avg_line_length": 44.173112338858196,
"alnum_prop": 0.6339948303176853,
"repo_name": "cchanning/Impala",
"id": "954ad4ddc7d9adb2a1198c8d21f70b1f123fab5a",
"size": "24726",
"binary": false,
"copies": "1",
"ref": "refs/heads/cdh5-trunk",
"path": "be/src/exec/hdfs-parquet-scanner.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "203216"
},
{
"name": "C++",
"bytes": "7656531"
},
{
"name": "CMake",
"bytes": "105273"
},
{
"name": "CSS",
"bytes": "89516"
},
{
"name": "Groff",
"bytes": "1633"
},
{
"name": "HTML",
"bytes": "56"
},
{
"name": "Java",
"bytes": "3650396"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Lex",
"bytes": "21812"
},
{
"name": "PLSQL",
"bytes": "3066"
},
{
"name": "PLpgSQL",
"bytes": "393"
},
{
"name": "Protocol Buffer",
"bytes": "630"
},
{
"name": "Python",
"bytes": "1812408"
},
{
"name": "SQLPL",
"bytes": "187"
},
{
"name": "Shell",
"bytes": "161872"
},
{
"name": "Thrift",
"bytes": "243952"
},
{
"name": "Yacc",
"bytes": "80218"
}
],
"symlink_target": ""
}
|
// -----------------------------------------------------------------------
// <copyright file="CatalogTest.cs" company="PlayFab Inc">
// Copyright 2015 PlayFab Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// -----------------------------------------------------------------------
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Consul.Test
{
[TestClass]
public class CatalogTest
{
[TestMethod]
public void Catalog_Datacenters()
{
var client = new Client();
var datacenterList = client.Catalog.Datacenters();
Assert.AreNotEqual(0, datacenterList.Response.Length);
}
[TestMethod]
public void Catalog_Nodes()
{
var client = new Client();
var nodeList = client.Catalog.Nodes();
Assert.AreNotEqual(0, nodeList.LastIndex);
Assert.AreNotEqual(0, nodeList.Response.Length);
// make sure deserialization is working right
Assert.IsNotNull(nodeList.Response[0].Address);
Assert.IsNotNull(nodeList.Response[0].Name);
}
[TestMethod]
public void Catalog_Services()
{
var client = new Client();
var servicesList = client.Catalog.Services();
Assert.AreNotEqual(0, servicesList.LastIndex);
Assert.AreNotEqual(0, servicesList.Response.Count);
}
[TestMethod]
public void Catalog_Service()
{
var client = new Client();
var serviceList = client.Catalog.Service("consul");
Assert.AreNotEqual(0, serviceList.LastIndex);
Assert.AreNotEqual(0, serviceList.Response.Length);
}
[TestMethod]
public void Catalog_Node()
{
var client = new Client();
var node = client.Catalog.Node(client.Agent.NodeName);
Assert.AreNotEqual(0, node.LastIndex);
Assert.IsNotNull(node.Response.Services);
}
[TestMethod]
public void Catalog_RegistrationDeregistration()
{
var client = new Client();
var service = new AgentService()
{
ID = "redis1",
Service = "redis",
Tags = new[] {"master", "v1"},
Port = 8000
};
var check = new AgentCheck()
{
Node = "foobar",
CheckID = "service:redis1",
Name = "Redis health check",
Notes = "Script based health check",
Status = CheckStatus.Passing,
ServiceID = "redis1"
};
var registration = new CatalogRegistration()
{
Datacenter = "dc1",
Node = "foobar",
Address = "192.168.10.10",
Service = service,
Check = check
};
client.Catalog.Register(registration);
var node = client.Catalog.Node("foobar");
Assert.IsTrue(node.Response.Services.ContainsKey("redis1"));
var health = client.Health.Node("foobar");
Assert.AreEqual("service:redis1", health.Response[0].CheckID);
var dereg = new CatalogDeregistration()
{
Datacenter = "dc1",
Node = "foobar",
Address = "192.168.10.10",
CheckID = "service:redis1"
};
client.Catalog.Deregister(dereg);
health = client.Health.Node("foobar");
Assert.AreEqual(0, health.Response.Length);
dereg = new CatalogDeregistration()
{
Datacenter = "dc1",
Node = "foobar",
Address = "192.168.10.10"
};
client.Catalog.Deregister(dereg);
node = client.Catalog.Node("foobar");
Assert.IsNull(node.Response);
}
}
}
|
{
"content_hash": "ba9ddc8ea66d8ed3a04652a5fefcb8fb",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 78,
"avg_line_length": 31.410958904109588,
"alnum_prop": 0.522895769733973,
"repo_name": "highlyunavailable/consuldotnet",
"id": "4975e17e9374bc628c0d00fa78a9ac1e201ba97b",
"size": "4588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Consul.Test/CatalogTest.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "298905"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>interval: 4 m 18 s 🏆</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.8.0 / interval - 4.2.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
interval
<small>
4.2.0
<span class="label label-success">4 m 18 s 🏆</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-07-10 09:04:22 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-07-10 09:04:22 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.8.0 Formal proof management system
num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.07.1 The OCaml compiler (virtual package)
ocaml-base-compiler 4.07.1 Official release 4.07.1
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "guillaume.melquiond@inria.fr"
homepage: "https://coqinterval.gitlabpages.inria.fr/"
dev-repo: "git+https://gitlab.inria.fr/coqinterval/interval.git"
bug-reports: "https://gitlab.inria.fr/coqinterval/interval/issues"
license: "CeCILL-C"
build: [
["autoconf"] {dev}
["./configure"]
["./remake" "-j%{jobs}%"]
]
install: ["./remake" "install"]
depends: [
"coq" {>= "8.8" & < "8.14~"}
"coq-bignums"
"coq-flocq" {>= "3.1" & < "4~"}
"coq-mathcomp-ssreflect" {>= "1.6"}
"coq-coquelicot" {>= "3.0"}
"conf-autoconf" {build & dev}
("conf-g++" {build} | "conf-clang" {build})
]
tags: [
"keyword:interval arithmetic"
"keyword:decision procedure"
"keyword:floating-point arithmetic"
"keyword:reflexive tactic"
"keyword:Taylor models"
"category:Mathematics/Real Calculus and Topology"
"category:Computer Science/Decision Procedures and Certified Algorithms/Decision procedures"
"logpath:Interval"
"date:2021-03-27"
]
authors: [
"Guillaume Melquiond <guillaume.melquiond@inria.fr>"
"Érik Martin-Dorel <erik.martin-dorel@irit.fr>"
"Pierre Roux <pierre.roux@onera.fr>"
"Thomas Sibut-Pinote <thomas.sibut-pinote@inria.fr>"
]
synopsis: "A Coq tactic for proving bounds on real-valued expressions automatically"
url {
src: "https://coqinterval.gitlabpages.inria.fr/releases/interval-4.2.0.tar.gz"
checksum: "sha512=f6e87d52e83e48de5d67fb157dd623eaa6eaa338b5d452cf8c13b9abef3583787198e09c49ea2556ff81b91f907741a19ca66c1e0bc78d2807a7936a2883a349"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-interval.4.2.0 coq.8.8.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam list; echo; ulimit -Sv 4000000; timeout 4h opam install -y --deps-only coq-interval.4.2.0 coq.8.8.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>15 m 48 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam list; echo; ulimit -Sv 16000000; timeout 4h opam install -y -v coq-interval.4.2.0 coq.8.8.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>4 m 18 s</dd>
</dl>
<h2>Installation size</h2>
<p>Total: 25 M</p>
<ul>
<li>13 M <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Basic_rec.vo</code></li>
<li>2 M <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Tactic.vo</code></li>
<li>1 M <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Interval/Float.vo</code></li>
<li>746 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Eval/Prog.vo</code></li>
<li>636 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Taylor_model_sharp.vo</code></li>
<li>623 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Tactics/Integral_helper.vo</code></li>
<li>410 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Tactics/Plot_helper.vo</code></li>
<li>406 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Interval/Float_full.vo</code></li>
<li>395 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Eval/Eval.vo</code></li>
<li>383 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Taylor_model.vo</code></li>
<li>361 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Datatypes.vo</code></li>
<li>332 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Interval/Transcend.vo</code></li>
<li>322 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Integral/Integral.vo</code></li>
<li>316 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Tactics/Interval_helper.vo</code></li>
<li>247 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Specific_ops.vo</code></li>
<li>233 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Generic_proof.vo</code></li>
<li>209 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Integral/Bertrand.vo</code></li>
<li>199 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Bound.vo</code></li>
<li>158 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Real/Xreal_derive.vo</code></li>
<li>155 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Interval/Interval.vo</code></li>
<li>150 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Specific_bigint.vo</code></li>
<li>140 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Integral/Refine.vo</code></li>
<li>138 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Missing/Stdlib.vo</code></li>
<li>137 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Bound_quad.vo</code></li>
<li>135 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Missing/Coquelicot.vo</code></li>
<li>129 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Eval/Reify.vo</code></li>
<li>127 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Specific_stdz.vo</code></li>
<li>118 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Sig.vo</code></li>
<li>116 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Eval/Tree.vo</code></li>
<li>115 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Integral/Priority.vo</code></li>
<li>110 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Interval/Interval_compl.vo</code></li>
<li>110 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Generic.vo</code></li>
<li>101 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Missing/MathComp.vo</code></li>
<li>96 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Generic_ops.vo</code></li>
<li>91 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Tactic_float.vo</code></li>
<li>87 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Real/Taylor.vo</code></li>
<li>75 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Real/Xreal.vo</code></li>
<li>72 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Interval/Univariate_sig.vo</code></li>
<li>72 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Poly/Taylor_poly.vo</code></li>
<li>71 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Basic.vo</code></li>
<li>61 K <code>../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Interval/Float/Specific_sig.vo</code></li>
</ul>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq-interval.4.2.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
|
{
"content_hash": "e7882da3d59e5d69ccb2e9ac0d0d22c2",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 159,
"avg_line_length": 56.84931506849315,
"alnum_prop": 0.5904417670682731,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "67415689425702c8162f88c952ef9f7b7d21ba31",
"size": "12476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.07.1-2.0.6/released/8.8.0/interval/4.2.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
<!--start-code-->
```js
const instance = (
<Panel shaded bordered bodyFill style={{ display: 'inline-block', width: 240 }}>
<img src="https://via.placeholder.com/240x240" height="240" />
<Panel header="RSUITE">
<p>
<small>A suite of React components, sensible UI design, and a friendly development experience.</small>
</p>
</Panel>
</Panel>
);
ReactDOM.render(instance);
```
<!--end-code-->
|
{
"content_hash": "8283423464cd449b87b617b5ad024616",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 110,
"avg_line_length": 23.88888888888889,
"alnum_prop": 0.6186046511627907,
"repo_name": "suitejs/suite",
"id": "46ef912b1e12a8296a30b92c6774a903367e9267",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/pages/components/panel/en-US/card.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "178942"
}
],
"symlink_target": ""
}
|
package org.codehaus.groovy.transform;
import groovy.transform.Sortable;
import org.codehaus.groovy.ast.ASTNode;
import org.codehaus.groovy.ast.AnnotatedNode;
import org.codehaus.groovy.ast.AnnotationNode;
import org.codehaus.groovy.ast.ClassHelper;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.InnerClassNode;
import org.codehaus.groovy.ast.MethodNode;
import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.ast.PropertyNode;
import org.codehaus.groovy.classgen.VariableScopeVisitor;
import org.codehaus.groovy.runtime.AbstractComparator;
import org.codehaus.groovy.ast.stmt.BlockStatement;
import org.codehaus.groovy.ast.stmt.Statement;
import org.codehaus.groovy.control.CompilePhase;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.runtime.StringGroovyMethods;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import static org.codehaus.groovy.ast.ClassHelper.isPrimitiveType;
import static org.codehaus.groovy.ast.ClassHelper.make;
import static org.codehaus.groovy.ast.tools.GeneralUtils.*;
import static org.codehaus.groovy.ast.tools.GenericsUtils.makeClassSafe;
import static org.codehaus.groovy.ast.tools.GenericsUtils.makeClassSafeWithGenerics;
import static org.codehaus.groovy.ast.tools.GenericsUtils.newClass;
/**
* Injects a set of Comparators and sort methods.
*
* @author Andres Almiray
* @author Paul King
*/
@GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION)
public class SortableASTTransformation extends AbstractASTTransformation {
private static final ClassNode MY_TYPE = make(Sortable.class);
private static final String MY_TYPE_NAME = "@" + MY_TYPE.getNameWithoutPackage();
private static final ClassNode COMPARABLE_TYPE = makeClassSafe(Comparable.class);
private static final ClassNode COMPARATOR_TYPE = makeClassSafe(Comparator.class);
private static final String VALUE = "value";
private static final String OTHER = "other";
private static final String THIS_HASH = "thisHash";
private static final String OTHER_HASH = "otherHash";
private static final String ARG0 = "arg0";
private static final String ARG1 = "arg1";
public void visit(ASTNode[] nodes, SourceUnit source) {
init(nodes, source);
AnnotationNode annotation = (AnnotationNode) nodes[0];
AnnotatedNode parent = (AnnotatedNode) nodes[1];
if (parent instanceof ClassNode) {
createSortable(annotation, (ClassNode) parent);
}
}
private void createSortable(AnnotationNode annotation, ClassNode classNode) {
List<String> includes = getMemberList(annotation, "includes");
List<String> excludes = getMemberList(annotation, "excludes");
if (!checkIncludeExclude(annotation, excludes, includes, MY_TYPE_NAME)) return;
if (classNode.isInterface()) {
addError(MY_TYPE_NAME + " cannot be applied to interface " + classNode.getName(), annotation);
}
List<PropertyNode> properties = findProperties(annotation, classNode, includes, excludes);
implementComparable(classNode);
classNode.addMethod(new MethodNode(
"compareTo",
ACC_PUBLIC,
ClassHelper.int_TYPE,
params(param(newClass(classNode), OTHER)),
ClassNode.EMPTY_ARRAY,
createCompareToMethodBody(properties)
));
for (PropertyNode property : properties) {
createComparatorFor(classNode, property);
}
new VariableScopeVisitor(sourceUnit, true).visitClass(classNode);
}
private void implementComparable(ClassNode classNode) {
if (!classNode.implementsInterface(COMPARABLE_TYPE)) {
classNode.addInterface(makeClassSafeWithGenerics(Comparable.class, classNode));
}
}
private static Statement createCompareToMethodBody(List<PropertyNode> properties) {
List<Statement> statements = new ArrayList<Statement>();
// if (this.is(other)) return 0;
statements.add(ifS(callThisX("is", args(OTHER)), returnS(constX(0))));
if (properties.isEmpty()) {
// perhaps overkill but let compareTo be based on hashes for commutativity
// return this.hashCode() <=> other.hashCode()
statements.add(declS(varX(THIS_HASH, ClassHelper.Integer_TYPE), callX(varX("this"), "hashCode")));
statements.add(declS(varX(OTHER_HASH, ClassHelper.Integer_TYPE), callX(varX(OTHER), "hashCode")));
statements.add(returnS(cmpX(varX(THIS_HASH), varX(OTHER_HASH))));
} else {
// int value = 0;
statements.add(declS(varX(VALUE, ClassHelper.int_TYPE), constX(0)));
for (PropertyNode property : properties) {
String propName = property.getName();
// value = this.prop <=> other.prop;
statements.add(assignS(varX(VALUE), cmpX(propX(varX("this"), propName), propX(varX(OTHER), propName))));
// if (value != 0) return value;
statements.add(ifS(neX(varX(VALUE), constX(0)), returnS(varX(VALUE))));
}
// objects are equal
statements.add(returnS(constX(0)));
}
final BlockStatement body = new BlockStatement();
body.addStatements(statements);
return body;
}
private static Statement createCompareMethodBody(PropertyNode property) {
String propName = property.getName();
return block(
// if (arg0 == arg1) return 0;
ifS(eqX(varX(ARG0), varX(ARG1)), returnS(constX(0))),
// if (arg0 != null && arg1 == null) return -1;
ifS(andX(notNullX(varX(ARG0)), equalsNullX(varX(ARG1))), returnS(constX(-1))),
// if (arg0 == null && arg1 != null) return 1;
ifS(andX(equalsNullX(varX(ARG0)), notNullX(varX(ARG1))), returnS(constX(1))),
// return arg0.prop <=> arg1.prop;
returnS(cmpX(propX(varX(ARG0), propName), propX(varX(ARG1), propName)))
);
}
private static void createComparatorFor(ClassNode classNode, PropertyNode property) {
String propName = property.getName();
String className = classNode.getName() + "$" + StringGroovyMethods.capitalize(propName) + "Comparator";
ClassNode superClass = makeClassSafeWithGenerics(AbstractComparator.class, classNode);
InnerClassNode cmpClass = new InnerClassNode(classNode, className, ACC_PRIVATE | ACC_STATIC, superClass);
classNode.getModule().addClass(cmpClass);
cmpClass.addMethod(new MethodNode(
"compare",
ACC_PUBLIC,
ClassHelper.int_TYPE,
params(param(newClass(classNode), ARG0), param(newClass(classNode), ARG1)),
ClassNode.EMPTY_ARRAY,
createCompareMethodBody(property)
));
String fieldName = "this$" + StringGroovyMethods.capitalize(propName) + "Comparator";
// private final Comparator this$<property>Comparator = new <type>$<property>Comparator();
FieldNode cmpField = classNode.addField(
fieldName,
ACC_STATIC | ACC_FINAL | ACC_PRIVATE | ACC_SYNTHETIC,
COMPARATOR_TYPE,
ctorX(cmpClass));
classNode.addMethod(new MethodNode(
"comparatorBy" + StringGroovyMethods.capitalize(propName),
ACC_PUBLIC | ACC_STATIC,
COMPARATOR_TYPE,
Parameter.EMPTY_ARRAY,
ClassNode.EMPTY_ARRAY,
returnS(fieldX(cmpField))
));
}
private List<PropertyNode> findProperties(AnnotationNode annotation, ClassNode classNode, final List<String> includes, final List<String> excludes) {
List<PropertyNode> properties = new ArrayList<PropertyNode>();
for (PropertyNode property : classNode.getProperties()) {
String propertyName = property.getName();
if (property.isStatic() ||
excludes.contains(propertyName) ||
!includes.isEmpty() && !includes.contains(propertyName)) continue;
properties.add(property);
}
for (String name : includes) {
checkKnownProperty(annotation, name, properties);
}
for (PropertyNode pNode : properties) {
checkComparable(pNode);
}
if (!includes.isEmpty()) {
Comparator<PropertyNode> includeComparator = new Comparator<PropertyNode>() {
public int compare(PropertyNode o1, PropertyNode o2) {
return new Integer(includes.indexOf(o1.getName())).compareTo(includes.indexOf(o2.getName()));
}
};
Collections.sort(properties, includeComparator);
}
return properties;
}
private void checkComparable(PropertyNode pNode) {
if (pNode.getType().implementsInterface(COMPARABLE_TYPE) || isPrimitiveType(pNode.getType()) || hasAnnotation(pNode.getType(), MY_TYPE)) {
return;
}
addError("Error during " + MY_TYPE_NAME + " processing: property '" +
pNode.getName() + "' must be Comparable", pNode);
}
private void checkKnownProperty(AnnotationNode annotation, String name, List<PropertyNode> properties) {
for (PropertyNode pNode: properties) {
if (name.equals(pNode.getName())) {
return;
}
}
addError("Error during " + MY_TYPE_NAME + " processing: tried to include unknown property '" +
name + "'", annotation);
}
}
|
{
"content_hash": "3869cd7d97205c81b7d67e3fc9bb3794",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 153,
"avg_line_length": 45.01376146788991,
"alnum_prop": 0.6505655762763681,
"repo_name": "komalsukhani/alioth-groovy2",
"id": "055694fb1b0ec047ad127e058df377bf2bc9ceb3",
"size": "10433",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/org/codehaus/groovy/transform/SortableASTTransformation.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "10776"
},
{
"name": "C",
"bytes": "1903"
},
{
"name": "CSS",
"bytes": "133881"
},
{
"name": "GAP",
"bytes": "222575"
},
{
"name": "Groovy",
"bytes": "6797795"
},
{
"name": "HTML",
"bytes": "145020"
},
{
"name": "Java",
"bytes": "10551850"
},
{
"name": "JavaScript",
"bytes": "1191"
},
{
"name": "Shell",
"bytes": "14622"
},
{
"name": "Smarty",
"bytes": "374"
},
{
"name": "TeX",
"bytes": "35901"
},
{
"name": "XSLT",
"bytes": "27856"
}
],
"symlink_target": ""
}
|
package com.google.code.validationframework.swing.property;
import com.google.code.validationframework.api.common.Disposable;
import com.google.code.validationframework.base.property.AbstractReadableWritableProperty;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
/**
* Readable/writable property representing the size of a {@link Component} (possibly a {@link java.awt.Window}).
* <p>
* It is possible to control the size of the component by setting the value of this property or by calling the {@link
* Component#setSize(Dimension)} method of that component.
*
* <p>
* However, note that the layout manager of the parent container may also modify the size of the component.
* <p>
* Note that changing the width or height attribute of the {@link Dimension} object directly will have no effect on this
* property. It is therefore not advised to do so.
* <p>
* Finally note that null values are not supported by this property.
*
* @see Component#getSize()
* @see Component#setSize(Dimension)
*/
public class ComponentSizeProperty extends AbstractReadableWritableProperty<Dimension,
Dimension> implements Disposable {
/**
* Size tracker.
*/
private class EventAdapter implements ComponentListener {
/**
* @see ComponentListener#componentShown(ComponentEvent)
*/
@Override
public void componentShown(ComponentEvent e) {
// Nothing to be done
}
/**
* @see ComponentListener#componentHidden(ComponentEvent)
*/
@Override
public void componentHidden(ComponentEvent e) {
// Nothing to be done
}
/**
* @see ComponentListener#componentResized(ComponentEvent)
*/
@Override
public void componentResized(ComponentEvent e) {
updateFromComponent();
}
/**
* @see ComponentListener#componentMoved(ComponentEvent)
*/
@Override
public void componentMoved(ComponentEvent e) {
// Nothing to be done
}
/**
* Sets the value of the property based on the size of the component.
*/
private void updateFromComponent() {
updatingFromComponent = true;
setValue(component.getSize());
updatingFromComponent = false;
}
}
/**
* Component to track the size of.
*/
private final Component component;
/**
* Size tracker.
*/
private final EventAdapter eventAdapter = new EventAdapter();
/**
* Current property value.
*/
private Dimension value = null;
/**
* Flag indicating whether the {@link #setValue(Dimension)} call is due to a property change event.
*/
private boolean updatingFromComponent = false;
/**
* Constructor specifying the component for which the property applies.
*
* @param component Component whose size property is to be tracked.
*/
public ComponentSizeProperty(Component component) {
super();
// Hook to component
this.component = component;
this.component.addComponentListener(eventAdapter);
// Set initial value
value = component.getSize();
}
/**
* @see Disposable#dispose()
*/
@Override
public void dispose() {
// Unhook from component
component.removeComponentListener(eventAdapter);
}
/**
* @see AbstractReadableWritableProperty#getValue()
*/
@Override
public Dimension getValue() {
return value;
}
/**
* @see AbstractReadableWritableProperty#setValue(Object)
*/
@Override
public void setValue(Dimension value) {
if (!isNotifyingListeners()) {
if (updatingFromComponent) {
Dimension oldValue = this.value;
this.value = value;
maybeNotifyListeners(oldValue, this.value);
} else {
component.setSize(value);
}
}
}
}
|
{
"content_hash": "8f2052cf6a49c5d6b55f2f773fb98d7c",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 120,
"avg_line_length": 28.04054054054054,
"alnum_prop": 0.6293975903614458,
"repo_name": "padrig64/ValidationFramework",
"id": "ddd12589b7d1d3367d476016ee6a89a06f45efc9",
"size": "5532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "validationframework-swing/src/main/java/com/google/code/validationframework/swing/property/ComponentSizeProperty.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "1890396"
}
],
"symlink_target": ""
}
|
class OmniauthCallbacksController < Devise::OmniauthCallbacksController
# def twitter
# @user = User.from_omniauth(request.env["omniauth.auth"]).except("extra")
#
# if @user.persisted?
# flash.notice = "ログインしました!"
# sign_in_and_redirect workdays_path
# else
# session["devise.user_attributes"] = @user.attributes
# redirect_to new_user_registration_url
# end
# end
end
|
{
"content_hash": "2e64372e2d0e543b86b5732ca5153a30",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 78,
"avg_line_length": 31.846153846153847,
"alnum_prop": 0.6690821256038647,
"repo_name": "takudo/simple-kintai",
"id": "0d304acd7fc540aabee60fe59466088ed5901572",
"size": "432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/controllers/omniauth_callbacks_controller.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4674"
},
{
"name": "CoffeeScript",
"bytes": "3900"
},
{
"name": "HTML",
"bytes": "16976"
},
{
"name": "JavaScript",
"bytes": "761"
},
{
"name": "Ruby",
"bytes": "54405"
}
],
"symlink_target": ""
}
|
package com.claytontii.minecraft.DogFight;
import org.bukkit.Material;
import org.bukkit.inventory.ItemStack;
public class ItemUtil {
public static ItemStack createItem(Material material, int number, String name) {
ItemStack item = new ItemStack(material, number);
item.getItemMeta().setDisplayName(name);
return item;
}
}
|
{
"content_hash": "34bc4246d05c522393ed5c020cb61658",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 84,
"avg_line_length": 25.642857142857142,
"alnum_prop": 0.724233983286908,
"repo_name": "ctII/DogFight",
"id": "cec7b563f40c191f8d9056cd2ccf47574c9cc5b3",
"size": "359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/claytontii/minecraft/DogFight/ItemUtil.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "19273"
}
],
"symlink_target": ""
}
|
package huahua.viewpager;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.apkplugin.android.fragment.BasePluginFragment;
public class fragment2 extends BasePluginFragment {
private View mMainView;
@Override
public void onAttach(Activity activity) {
System.out.println("fragment2.onAttach()");
super.onAttach(activity);
}
@Override
public void onHiddenChanged(boolean hidden) {
super.onHiddenChanged(hidden);
System.out.println("fragment2.onHiddenChanged() " + hidden);
}
@Override
public void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
Log.v("huahua", "fragment2-->onCreate()");
LayoutInflater inflater = getActivity().getLayoutInflater();
mMainView = inflater.inflate(R.layout.fragment2,
(ViewGroup) getActivity().findViewById(R.id.viewpager), false);
}
@Override
public View generateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// TODO Auto-generated method stub
Log.v("huahua", "fragment2-->onCreateView()");
ViewGroup p = (ViewGroup) mMainView.getParent();
if (p != null) {
p.removeAllViewsInLayout();
Log.v("huahua", "fragment2-->移除已存在的View");
}
return mMainView;
}
@Override
public void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
Log.v("huahua", "fragment2-->onDestroy()");
}
@Override
public void onPause() {
// TODO Auto-generated method stub
super.onPause();
Log.v("huahua", "fragment2-->onPause()");
}
@Override
public void onResume() {
// TODO Auto-generated method stub
super.onResume();
Log.v("huahua", "fragment2-->onResume()");
}
@Override
public void onDetach() {
// TODO Auto-generated method stub
super.onDetach();
}
@Override
public void onStart() {
// TODO Auto-generated method stub
super.onStart();
Log.v("huahua", "fragment2-->onStart()");
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onActivityCreated(savedInstanceState);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
super.onActivityResult(requestCode, resultCode, data);
}
@Override
public void onDestroyView() {
// TODO Auto-generated method stub
super.onDestroyView();
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onViewCreated(view, savedInstanceState);
}
@Override
public void onStop() {
// TODO Auto-generated method stub
super.onStop();
Log.v("huahua", "fragment2-->onStop()");
}
}
|
{
"content_hash": "d896101f44c48846b601fc68df160fa5",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 77,
"avg_line_length": 24.81512605042017,
"alnum_prop": 0.696241110734846,
"repo_name": "XinLan087/android_plugin",
"id": "209208d4ea0555bb04e89710e527590d2df84b69",
"size": "2965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AndroidPlugin_MyViewPagerActivity/src/huahua/viewpager/fragment2.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1239"
},
{
"name": "IDL",
"bytes": "2477"
},
{
"name": "Java",
"bytes": "728295"
},
{
"name": "Makefile",
"bytes": "317"
},
{
"name": "Shell",
"bytes": "550"
}
],
"symlink_target": ""
}
|
using NUnit.Framework;
using Tautalos.Unity.Mobius.Signals;
using Tautalos.Unity.Mobius.Channels;
namespace Tautalos.Unity.Mobius.Tests
{
[TestFixture()]
internal class EmptySignallerTest
{
[Test,
Category("Given the EmptySignaller"),
Description("When asked if the EmptySignaller, Then it should answer positive")]
public void ShouldAnswerPositiveThatIsTheEmptySignaller ()
{
Assert.IsTrue (EmptySignaller.Instance.IsEmpty);
}
[Test,
Category("Given the EmptySignaller"),
Description("When asked for the Owner, Then it should return itself")]
public void ShouldReturnItselfAsTheOwner ()
{
Assert.AreSame (EmptySignaller.Instance, EmptySignaller.Instance.Owner);
}
[Test,
Category("Given the EmptySignaller"),
Description("When asked for the Channel, Then it should return the EmptyChannel")]
public void ShouldReturnTheEmptyChannelAsTheChannel ()
{
Assert.AreSame (EmptySignaller.Instance.Channel, EmptyChannel.Instance);
}
}
}
|
{
"content_hash": "b7a8c92e5a2196f9cb82fc36649aa6c4",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 85,
"avg_line_length": 26.394736842105264,
"alnum_prop": 0.7477567298105683,
"repo_name": "tautalos/Unity.Mobius",
"id": "8ad25f74e8e8ca21a69cab3e4e918d2628f4ca9d",
"size": "1003",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Assets/editor/tests/Signal/EmptySignallerTest.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1107884"
},
{
"name": "GLSL",
"bytes": "24326"
},
{
"name": "JavaScript",
"bytes": "69639"
}
],
"symlink_target": ""
}
|
<?php
namespace CheckDigit\Format;
class CreditCardTest extends \PHPUnit_Framework_TestCase
{
/**
* Data Provider for testValidate
* @return array
*/
public function testValidateProvider() {
return array(
array(CreditCard::AMERICAN_EXPRESS, "340000000000009", true),
array(CreditCard::AMERICAN_EXPRESS, "378282246310005", true),
array(CreditCard::AMERICAN_EXPRESS, "371449635398431", true),
array(CreditCard::AMERICAN_EXPRESS, "378734493671000", true),
//Dankort
array(CreditCard::DANKORT, "5019717010103742", true),
//Diners Club
array(CreditCard::DINERS_CLUB_CARTEBLANCHE, "30569309025904", true),
array(CreditCard::DINERS_CLUB_CARTEBLANCHE, "30000000000004", true),
array(CreditCard::DINERS_CLUB_CARTEBLANCHE, "30000000000004", true),
array(CreditCard::DINERS_CLUB_INTERNATIONAL, "3600000000000008", true),
array(CreditCard::DINERS_CLUB_US, "5400000000000005", true),
array(CreditCard::DINERS_CLUB_US, "5600000000000003", true),
//Discover
array(CreditCard::DISCOVER, "6011111111111117", true),
array(CreditCard::DISCOVER, "6011000990139424", true),
array(CreditCard::DISCOVER, "6011000000000004", true),
//JCB
array(CreditCard::JCB, "2131000000000008", true),
array(CreditCard::JCB, "3530111333300000", true),
array(CreditCard::JCB, "3566002020360505", true),
//MasterCard
array(CreditCard::MASTERCARD, "5555555555554444", true),
array(CreditCard::MASTERCARD, "5105105105105100", true),
array(CreditCard::MASTERCARD, "5500000000000004", true),
//Visa
array(CreditCard::VISA, "4111111111111111", true),
array(CreditCard::VISA, "4012888888881881", true),
array(CreditCard::VISA, "4222222222222", true),
);
}
/**
* Test the validate function
*
* @param int $cardType
* @param string $cardNumber
* @param bool $isValid
*
* @dataProvider testValidateProvider
*/
public function testValidate($cardType, $cardNumber, $isValid) {
$validator = new CreditCard();
$validator->setAllowedCardTypes($cardType);
$validateResult = $validator->validate($cardNumber);
if ($isValid === true) {
$this->assertEquals(true, $validateResult, "Card Number should have passed validation: " . json_encode($validator->getErrors()));
$this->assertEquals(array(), $validator->getErrors());
}
else {
$this->assertEquals(false, $validateResult, "Card Number should not have passed validation");
$this->assertEquals(1, count($validator->getErrors()));
}
}
/**
* Test that the allowed card types option is working properly
*
* @param array $allowedCardTypes
* @param string $cardNumber
* @param bool $isValid
*/
public function testAllowedCardTypes() {
$validVisa = "4111111111111111";
$validMastercard = "5555555555554444";
$validAmex = "340000000000009";
$validator = new CreditCard();
// Test Allowed VISA only
$validator->setAllowedCardTypes(CreditCard::VISA);
$this->assertEquals(true, $validator->validate($validVisa), "Visa card should validate okay");
$this->assertEquals(false, $validator->validate($validMastercard), "Mastercard is not in allowed card types");
$this->assertEquals(false, $validator->validate($validAmex), "Amex is not in allowed card types");
// Test Allowed Mastercard Only
$validator->setAllowedCardTypes(CreditCard::MASTERCARD);
$this->assertEquals(false, $validator->validate($validVisa), "Visa card is not in allowed card types");
$this->assertEquals(true, $validator->validate($validMastercard), "Mastercard should validate okay");
$this->assertEquals(false, $validator->validate($validAmex), "Amex is not in allowed card types");
// Test Allowed Amex Only
$validator->setAllowedCardTypes(CreditCard::AMERICAN_EXPRESS);
$this->assertEquals(false, $validator->validate($validVisa), "Visa card is not in allowed card types");
$this->assertEquals(false, $validator->validate($validMastercard), "Mastercard is not in allowed card types");
$this->assertEquals(true, $validator->validate($validAmex), "Amex should validate okay");
// Test Visa and Mastercard using bit operations
$validator->setAllowedCardTypes(CreditCard::VISA | CreditCard::MASTERCARD);
$this->assertEquals(true, $validator->validate($validVisa), "Visa should validate okay");
$this->assertEquals(true, $validator->validate($validMastercard), "Mastercard should validate okay");
$this->assertEquals(false, $validator->validate($validAmex), "Amex is not in allowed card types");
// Test Mastercard and Amex using an array
$validator->setAllowedCardTypes(array(CreditCard::MASTERCARD, CreditCard::AMERICAN_EXPRESS));
$this->assertEquals(false, $validator->validate($validVisa), "Visa is not in allowed card type");
$this->assertEquals(true, $validator->validate($validMastercard), "Mastercard should validate okay");
$this->assertEquals(true, $validator->validate($validAmex), "Amex should validate okay");
// Check ALL
$validator->setAllowedCardTypes(CreditCard::ALL);
$this->assertEquals(true, $validator->validate($validVisa), "Visa should validate okay");
$this->assertEquals(true, $validator->validate($validMastercard), "Mastercard should validate okay");
$this->assertEquals(true, $validator->validate($validAmex), "Amex should validate okay");
}
}
|
{
"content_hash": "b087e9e5e0724a598cba2e629c48a426",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 141,
"avg_line_length": 48.48360655737705,
"alnum_prop": 0.6503803888419273,
"repo_name": "afoozle/checkdigit",
"id": "078b00e06939e3bb1eb1e46118abfaffbb5bc6ab",
"size": "6084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/CheckDigit/Format/CreditCardTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "32556"
}
],
"symlink_target": ""
}
|
import StyleSheet from 'react-style';
import { IDCell } from './components/IDCell';
import { TextCell } from './components/TextCell';
import { CheckBoxCell } from './components/CheckBoxCell';
let styles = StyleSheet.create({
id: {
flex: .2,
marginRight: 5
},
text: {
flex: 1.5,
marginRight: 10
},
checkbox: {
flex: 1.5
}
});
export const schema = {
userId: {
style: styles.id,
header: 'User ID',
component: IDCell
},
id: {
style: styles.id,
header: 'Todo ID',
component: IDCell
},
title: {
style: styles.text,
header: 'Note',
component: TextCell
},
completed: {
style: styles.checkbox,
header: 'Completed',
component: CheckBoxCell
}
};
|
{
"content_hash": "7b76993960766e3d9c7ea8922b67e2f5",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 57,
"avg_line_length": 16.444444444444443,
"alnum_prop": 0.5932432432432433,
"repo_name": "mrblueblue/redux-playground",
"id": "956adaf5d00d643197eb6426098716d559271d4c",
"size": "750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/table-schema.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "241"
},
{
"name": "JavaScript",
"bytes": "9436"
}
],
"symlink_target": ""
}
|
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**invoice** | [**\XeroAPI\XeroPHP\Models\Accounting\Invoice**](Invoice.md) | |
**overpayment** | [**\XeroAPI\XeroPHP\Models\Accounting\Overpayment**](Overpayment.md) | | [optional]
**prepayment** | [**\XeroAPI\XeroPHP\Models\Accounting\Prepayment**](Prepayment.md) | | [optional]
**credit_note** | [**\XeroAPI\XeroPHP\Models\Accounting\CreditNote**](CreditNote.md) | | [optional]
**amount** | **double** | the amount being applied to the invoice |
**date** | **string** | the date the allocation is applied YYYY-MM-DD. |
**status_attribute_string** | **string** | A string to indicate if a invoice status | [optional]
**validation_errors** | [**\XeroAPI\XeroPHP\Models\Accounting\ValidationError[]**](ValidationError.md) | Displays array of validation error messages from the API | [optional]
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
{
"content_hash": "5b42014459b1b7d15e9af5cfeffdb8cd",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 175,
"avg_line_length": 76.07142857142857,
"alnum_prop": 0.6553990610328638,
"repo_name": "unaio/una",
"id": "17e8334b7527beb15a0e5def58b7ad5d3c3afd0c",
"size": "1093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/boonex/xero/plugins/xeroapi/xero-php-oauth2/doc/accounting/Model/Allocation.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9522763"
},
{
"name": "Dockerfile",
"bytes": "2367"
},
{
"name": "HTML",
"bytes": "6194660"
},
{
"name": "JavaScript",
"bytes": "24733694"
},
{
"name": "Less",
"bytes": "3020615"
},
{
"name": "Makefile",
"bytes": "1196"
},
{
"name": "PHP",
"bytes": "158741504"
},
{
"name": "Ruby",
"bytes": "210"
},
{
"name": "Shell",
"bytes": "3327"
},
{
"name": "Smarty",
"bytes": "3461"
}
],
"symlink_target": ""
}
|
import {expect} from 'chai'
import Server from '../../src'
import {getRandomString, createTestInstance, deleteTestInstance} from '../utils'
describe('Class', function() {
let _class = null
const testClassName = getRandomString()
const instanceName = getRandomString()
before(function(done) {
const ctx = {
meta: {
socket: 'test-socket',
token: process.env.E2E_ACCOUNT_KEY
}
}
createTestInstance(instanceName)
.then(instanceObj => {
ctx.meta.instance = instanceObj.name
_class = new Server(ctx)._class
done()
})
.catch(err => {
console.log(err)
done(err)
})
})
after(function(done) {
deleteTestInstance(instanceName)
.then(() => {
done()
})
.catch(() => {
done()
})
})
it('can create a class', function(done) {
_class
.create({
name: testClassName,
schema: [{type: 'string', name: 'parameter_name'}]
})
.then(res => {
expect(res.name).to.be.equal(testClassName)
done()
})
.catch(err => {
console.log(err)
done(err)
})
})
it('can delete a class', function(done) {
_class
.delete(testClassName)
.then(classObj => {
expect(classObj).to.be.an('undefined')
done()
})
.catch(err => {
console.log(err)
done(err)
})
})
})
|
{
"content_hash": "5aeac28038d0e81dc62994a4f0d22974",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 80,
"avg_line_length": 21.52238805970149,
"alnum_prop": 0.5270457697642164,
"repo_name": "Syncano/syncano-server-js",
"id": "d5bb7d367b7e483198d6f8bd9d5b47cb7fb57796",
"size": "1442",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "test/e2e/class.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "101818"
}
],
"symlink_target": ""
}
|
// Copyright 2018-2022, University of Colorado Boulder
/**
* 'Directrix' checkbox.
*
* @author Chris Malley (PixelZoom, Inc.)
*/
import merge from '../../../../phet-core/js/merge.js';
import { Line } from '../../../../scenery/js/imports.js';
import GQColors from '../../common/GQColors.js';
import GQConstants from '../../common/GQConstants.js';
import GQCheckbox from '../../common/view/GQCheckbox.js';
import graphingQuadratics from '../../graphingQuadratics.js';
import GraphingQuadraticsStrings from '../../GraphingQuadraticsStrings.js';
class DirectrixCheckbox extends GQCheckbox {
/**
* @param {BooleanProperty} directrixVisibleProperty
* @param {Object} [options]
*/
constructor( directrixVisibleProperty, options ) {
options = merge( {
// phet-io
phetioDocumentation: 'checkbox that shows the directrix on the graph'
}, options );
// icon is a horizontal dashed line
assert && assert( !options.icon, 'DirectrixCheckbox sets icon' );
options.icon = new Line( 0, 0, 5 * GQConstants.DIRECTRIX_LINE_DASH[ 0 ], 0, {
stroke: GQColors.DIRECTRIX,
lineWidth: GQConstants.DIRECTRIX_LINE_WIDTH,
lineDash: GQConstants.DIRECTRIX_LINE_DASH
} );
super( directrixVisibleProperty, GraphingQuadraticsStrings.directrix, options );
}
}
graphingQuadratics.register( 'DirectrixCheckbox', DirectrixCheckbox );
export default DirectrixCheckbox;
|
{
"content_hash": "56ac6b0fc83e4ca75b341fb56cbda893",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 84,
"avg_line_length": 31.444444444444443,
"alnum_prop": 0.6989399293286219,
"repo_name": "phetsims/graphing-quadratics",
"id": "2c6d7e7d3c56a582a3a5d55bc8032759768be991",
"size": "1415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/focusanddirectrix/view/DirectrixCheckbox.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4579"
},
{
"name": "JavaScript",
"bytes": "273909"
},
{
"name": "TypeScript",
"bytes": "2442"
}
],
"symlink_target": ""
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package game.tests;
import game.logic.Dice;
import game.objects.AttackOutcome;
import game.objects.exceptions.DiceException;
import java.util.ArrayList;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author Simeon
*/
public class DiceTest {
ArrayList<Integer> attackerDice;
ArrayList<Integer> defenderDice;
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
attackerDice = null;
defenderDice = null;
}
@Test(expected = DiceException.class)
public void testNumberOfDice_0_1() throws DiceException {
Dice.Roll(0, 1);
}
@Test(expected = DiceException.class)
public void testNumberOfDice_1_0() throws DiceException {
Dice.Roll(1, 0);
}
@Test(expected = DiceException.class)
public void testNumberOfDice_0_0() throws DiceException {
Dice.Roll(0, 0);
}
@Test(expected = DiceException.class)
public void testNumberOfDice_9_1() throws DiceException {
Dice.Roll(9, 1);
}
@Test(expected = DiceException.class)
public void testNumberOfDice_1_9() throws DiceException {
Dice.Roll(1, 9);
}
@Test(expected = DiceException.class)
public void testNumberOfDice_9_9() throws DiceException {
Dice.Roll(9, 9);
}
@Test
public void testDiceRolled() throws DiceException {
int nAttackerDice = 3;
int nDefenderDice = 2;
AttackOutcome ao = Dice.Roll(nAttackerDice, nDefenderDice);
assumeTrue(ao.getAttackerDice().size() == nAttackerDice);
assertTrue(ao.getDefenderDice().size() == nDefenderDice);
}
@Test
public void testTroopLosses() throws DiceException {
// for loop because there would be a chance of accidentally passing this test even if it were misbehaving
for (int run = 0; run < 10000; run++) {
AttackOutcome ao = Dice.Roll(3, 2);
attackerDice = ao.getAttackerDice();
defenderDice = ao.getDefenderDice();
int attackerLosses = 0;
int defenderLosses = 0;
for (int i = 0; i < 2; i++) {
if (attackerDice.get(i) > defenderDice.get(i)) {
defenderLosses++;
} else {
attackerLosses++;
}
}
assumeTrue(ao.getTroopsLostByAttacker() == attackerLosses);
assumeTrue(ao.getTroopsLostByDefender() == defenderLosses);
}
}
}
|
{
"content_hash": "e49be3b8c708723a9ae2655cde2d7c8d",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 113,
"avg_line_length": 28.700934579439252,
"alnum_prop": 0.6284597850862911,
"repo_name": "HDXconnor/Risque",
"id": "ba692ac2f1f994f82610b91ad5a4b18d431da112",
"size": "3071",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/game/tests/DiceTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8382"
},
{
"name": "Java",
"bytes": "103764"
},
{
"name": "JavaScript",
"bytes": "23226"
}
],
"symlink_target": ""
}
|
/** @file STEPFileReader.cpp
* @brief Implementation of the STEP file parser, which fills a
* STEP::DB with data read from a file.
*/
#include "STEPFileReader.h"
#include "STEPFileEncoding.h"
#include "TinyFormatter.h"
#include "fast_atof.h"
#include <boost/make_shared.hpp>
using namespace Assimp;
namespace EXPRESS = STEP::EXPRESS;
#include <functional>
// ------------------------------------------------------------------------------------------------
// From http://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
// trim from start
static inline std::string <rim(std::string &s) {
s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1( std::ptr_fun(Assimp::IsSpace<char>))));
return s;
}
// trim from end
static inline std::string &rtrim(std::string &s) {
s.erase(std::find_if(s.rbegin(), s.rend(), std::not1( std::ptr_fun(Assimp::IsSpace<char>))).base(),s.end());
return s;
}
// trim from both ends
static inline std::string &trim(std::string &s) {
return ltrim(rtrim(s));
}
// ------------------------------------------------------------------------------------------------
std::string AddLineNumber(const std::string& s,uint64_t line /*= LINE_NOT_SPECIFIED*/, const std::string& prefix = "")
{
return line == STEP::SyntaxError::LINE_NOT_SPECIFIED ? prefix+s : static_cast<std::string>( (Formatter::format(),prefix,"(line ",line,") ",s) );
}
// ------------------------------------------------------------------------------------------------
std::string AddEntityID(const std::string& s,uint64_t entity /*= ENTITY_NOT_SPECIFIED*/, const std::string& prefix = "")
{
return entity == STEP::TypeError::ENTITY_NOT_SPECIFIED ? prefix+s : static_cast<std::string>( (Formatter::format(),prefix,"(entity #",entity,") ",s));
}
// ------------------------------------------------------------------------------------------------
STEP::SyntaxError::SyntaxError (const std::string& s,uint64_t line /* = LINE_NOT_SPECIFIED */)
: DeadlyImportError(AddLineNumber(s,line))
{
}
// ------------------------------------------------------------------------------------------------
STEP::TypeError::TypeError (const std::string& s,uint64_t entity /* = ENTITY_NOT_SPECIFIED */,uint64_t line /*= LINE_NOT_SPECIFIED*/)
: DeadlyImportError(AddLineNumber(AddEntityID(s,entity),line))
{
}
// ------------------------------------------------------------------------------------------------
STEP::DB* STEP::ReadFileHeader(boost::shared_ptr<IOStream> stream)
{
boost::shared_ptr<StreamReaderLE> reader = boost::shared_ptr<StreamReaderLE>(new StreamReaderLE(stream));
std::auto_ptr<STEP::DB> db = std::auto_ptr<STEP::DB>(new STEP::DB(reader));
LineSplitter& splitter = db->GetSplitter();
if (!splitter || *splitter != "ISO-10303-21;") {
throw STEP::SyntaxError("expected magic token: ISO-10303-21",1);
}
HeaderInfo& head = db->GetHeader();
for(++splitter; splitter; ++splitter) {
const std::string& s = *splitter;
if (s == "DATA;") {
// here we go, header done, start of data section
++splitter;
break;
}
// want one-based line numbers for human readers, so +1
const uint64_t line = splitter.get_index()+1;
if (s.substr(0,11) == "FILE_SCHEMA") {
const char* sz = s.c_str()+11;
SkipSpaces(sz,&sz);
boost::shared_ptr< const EXPRESS::DataType > schema = EXPRESS::DataType::Parse(sz);
// the file schema should be a regular list entity, although it usually contains exactly one entry
// since the list itself is contained in a regular parameter list, we actually have
// two nested lists.
const EXPRESS::LIST* list = dynamic_cast<const EXPRESS::LIST*>(schema.get());
if (list && list->GetSize()) {
list = dynamic_cast<const EXPRESS::LIST*>( (*list)[0].get() );
if (!list) {
throw STEP::SyntaxError("expected FILE_SCHEMA to be a list",line);
}
// XXX need support for multiple schemas?
if (list->GetSize() > 1) {
DefaultLogger::get()->warn(AddLineNumber("multiple schemas currently not supported",line));
}
const EXPRESS::STRING* string;
if (!list->GetSize() || !(string=dynamic_cast<const EXPRESS::STRING*>( (*list)[0].get() ))) {
throw STEP::SyntaxError("expected FILE_SCHEMA to contain a single string literal",line);
}
head.fileSchema = *string;
}
}
// XXX handle more header fields
}
return db.release();
}
namespace {
// ------------------------------------------------------------------------------------------------
// check whether the given line contains an entity definition (i.e. starts with "#<number>=")
bool IsEntityDef(const std::string& snext)
{
if (snext[0] == '#') {
// it is only a new entity if it has a '=' after the
// entity ID.
for(std::string::const_iterator it = snext.begin()+1; it != snext.end(); ++it) {
if (*it == '=') {
return true;
}
if ((*it < '0' || *it > '9') && *it != ' ') {
break;
}
}
}
return false;
}
}
// ------------------------------------------------------------------------------------------------
void STEP::ReadFile(DB& db,const EXPRESS::ConversionSchema& scheme,
const char* const* types_to_track, size_t len,
const char* const* inverse_indices_to_track, size_t len2)
{
db.SetSchema(scheme);
db.SetTypesToTrack(types_to_track,len);
db.SetInverseIndicesToTrack(inverse_indices_to_track,len2);
const DB::ObjectMap& map = db.GetObjects();
LineSplitter& splitter = db.GetSplitter();
while (splitter) {
bool has_next = false;
std::string s = *splitter;
if (s == "ENDSEC;") {
break;
}
s.erase(std::remove(s.begin(), s.end(), ' '), s.end());
// want one-based line numbers for human readers, so +1
const uint64_t line = splitter.get_index()+1;
// LineSplitter already ignores empty lines
ai_assert(s.length());
if (s[0] != '#') {
DefaultLogger::get()->warn(AddLineNumber("expected token \'#\'",line));
++splitter;
continue;
}
// ---
// extract id, entity class name and argument string,
// but don't create the actual object yet.
// ---
const std::string::size_type n0 = s.find_first_of('=');
if (n0 == std::string::npos) {
DefaultLogger::get()->warn(AddLineNumber("expected token \'=\'",line));
++splitter;
continue;
}
const uint64_t id = strtoul10_64(s.substr(1,n0-1).c_str());
if (!id) {
DefaultLogger::get()->warn(AddLineNumber("expected positive, numeric entity id",line));
++splitter;
continue;
}
std::string::size_type n1 = s.find_first_of('(',n0);
if (n1 == std::string::npos) {
has_next = true;
bool ok = false;
for( ++splitter; splitter; ++splitter) {
const std::string& snext = *splitter;
if (snext.empty()) {
continue;
}
// the next line doesn't start an entity, so maybe it is
// just a continuation for this line, keep going
if (!IsEntityDef(snext)) {
s.append(snext);
n1 = s.find_first_of('(',n0);
ok = (n1 != std::string::npos);
}
else {
break;
}
}
if(!ok) {
DefaultLogger::get()->warn(AddLineNumber("expected token \'(\'",line));
continue;
}
}
std::string::size_type n2 = s.find_last_of(')');
if (n2 == std::string::npos || n2 < n1 || n2 == s.length() - 1 || s[n2 + 1] != ';') {
has_next = true;
bool ok = false;
for( ++splitter; splitter; ++splitter) {
const std::string& snext = *splitter;
if (snext.empty()) {
continue;
}
// the next line doesn't start an entity, so maybe it is
// just a continuation for this line, keep going
if (!IsEntityDef(snext)) {
s.append(snext);
n2 = s.find_last_of(')');
ok = !(n2 == std::string::npos || n2 < n1 || n2 == s.length() - 1 || s[n2 + 1] != ';');
}
else {
break;
}
}
if(!ok) {
DefaultLogger::get()->warn(AddLineNumber("expected token \')\'",line));
continue;
}
}
if (map.find(id) != map.end()) {
DefaultLogger::get()->warn(AddLineNumber((Formatter::format(),"an object with the id #",id," already exists"),line));
}
std::string::size_type ns = n0;
do ++ns; while( IsSpace(s.at(ns)));
std::string::size_type ne = n1;
do --ne; while( IsSpace(s.at(ne)));
std::string type = s.substr(ns,ne-ns+1);
std::transform( type.begin(), type.end(), type.begin(), &Assimp::ToLower<char> );
const char* sz = scheme.GetStaticStringForToken(type);
if(sz) {
const std::string::size_type len = n2-n1+1;
char* const copysz = new char[len+1];
std::copy(s.c_str()+n1,s.c_str()+n2+1,copysz);
copysz[len] = '\0';
db.InternInsert(new LazyObject(db,id,line,sz,copysz));
}
if(!has_next) {
++splitter;
}
}
if (!splitter) {
DefaultLogger::get()->warn("STEP: ignoring unexpected EOF");
}
if ( !DefaultLogger::isNullLogger()){
DefaultLogger::get()->debug((Formatter::format(),"STEP: got ",map.size()," object records with ",
db.GetRefs().size()," inverse index entries"));
}
}
// ------------------------------------------------------------------------------------------------
boost::shared_ptr<const EXPRESS::DataType> EXPRESS::DataType::Parse(const char*& inout,uint64_t line, const EXPRESS::ConversionSchema* schema /*= NULL*/)
{
const char* cur = inout;
SkipSpaces(&cur);
if (*cur == ',' || IsSpaceOrNewLine(*cur)) {
throw STEP::SyntaxError("unexpected token, expected parameter",line);
}
// just skip over constructions such as IFCPLANEANGLEMEASURE(0.01) and read only the value
if (schema) {
bool ok = false;
for(const char* t = cur; *t && *t != ')' && *t != ','; ++t) {
if (*t=='(') {
if (!ok) {
break;
}
for(--t;IsSpace(*t);--t);
std::string s(cur,static_cast<size_t>(t-cur+1));
std::transform(s.begin(),s.end(),s.begin(),&ToLower<char> );
if (schema->IsKnownToken(s)) {
for(cur = t+1;*cur++ != '(';);
const boost::shared_ptr<const EXPRESS::DataType> dt = Parse(cur);
inout = *cur ? cur+1 : cur;
return dt;
}
break;
}
else if (!IsSpace(*t)) {
ok = true;
}
}
}
if (*cur == '*' ) {
inout = cur+1;
return boost::make_shared<EXPRESS::ISDERIVED>();
}
else if (*cur == '$' ) {
inout = cur+1;
return boost::make_shared<EXPRESS::UNSET>();
}
else if (*cur == '(' ) {
// start of an aggregate, further parsing is done by the LIST factory constructor
inout = cur;
return EXPRESS::LIST::Parse(inout,line,schema);
}
else if (*cur == '.' ) {
// enum (includes boolean)
const char* start = ++cur;
for(;*cur != '.';++cur) {
if (*cur == '\0') {
throw STEP::SyntaxError("enum not closed",line);
}
}
inout = cur+1;
return boost::make_shared<EXPRESS::ENUMERATION>(std::string(start, static_cast<size_t>(cur-start) ));
}
else if (*cur == '#' ) {
// object reference
return boost::make_shared<EXPRESS::ENTITY>(strtoul10_64(++cur,&inout));
}
else if (*cur == '\'' ) {
// string literal
const char* start = ++cur;
for(;*cur != '\'';++cur) {
if (*cur == '\0') {
throw STEP::SyntaxError("string literal not closed",line);
}
}
if (cur[1] == '\'') {
// Vesanen: more than 2 escaped ' in one literal!
do {
for(cur += 2;*cur != '\'';++cur) {
if (*cur == '\0') {
throw STEP::SyntaxError("string literal not closed",line);
}
}
}
while(cur[1] == '\'');
}
inout = cur + 1;
// assimp is supposed to output UTF8 strings, so we have to deal
// with foreign encodings.
std::string stemp = std::string(start, static_cast<size_t>(cur - start));
if(!StringToUTF8(stemp)) {
// TODO: route this to a correct logger with line numbers etc., better error messages
DefaultLogger::get()->error("an error occurred reading escape sequences in ASCII text");
}
return boost::make_shared<EXPRESS::STRING>(stemp);
}
else if (*cur == '\"' ) {
throw STEP::SyntaxError("binary data not supported yet",line);
}
// else -- must be a number. if there is a decimal dot in it,
// parse it as real value, otherwise as integer.
const char* start = cur;
for(;*cur && *cur != ',' && *cur != ')' && !IsSpace(*cur);++cur) {
if (*cur == '.') {
double f;
inout = fast_atoreal_move<double>(start,f);
return boost::make_shared<EXPRESS::REAL>(f);
}
}
bool neg = false;
if (*start == '-') {
neg = true;
++start;
}
else if (*start == '+') {
++start;
}
int64_t num = static_cast<int64_t>( strtoul10_64(start,&inout) );
return boost::make_shared<EXPRESS::INTEGER>(neg?-num:num);
}
// ------------------------------------------------------------------------------------------------
boost::shared_ptr<const EXPRESS::LIST> EXPRESS::LIST::Parse(const char*& inout,uint64_t line, const EXPRESS::ConversionSchema* schema /*= NULL*/)
{
const boost::shared_ptr<EXPRESS::LIST> list = boost::make_shared<EXPRESS::LIST>();
EXPRESS::LIST::MemberList& members = list->members;
const char* cur = inout;
if (*cur++ != '(') {
throw STEP::SyntaxError("unexpected token, expected \'(\' token at beginning of list",line);
}
// estimate the number of items upfront - lists can grow large
size_t count = 1;
for(const char* c=cur; *c && *c != ')'; ++c) {
count += (*c == ',' ? 1 : 0);
}
members.reserve(count);
for(;;++cur) {
if (!*cur) {
throw STEP::SyntaxError("unexpected end of line while reading list");
}
SkipSpaces(cur,&cur);
if (*cur == ')') {
break;
}
members.push_back( EXPRESS::DataType::Parse(cur,line,schema));
SkipSpaces(cur,&cur);
if (*cur != ',') {
if (*cur == ')') {
break;
}
throw STEP::SyntaxError("unexpected token, expected \',\' or \')\' token after list element",line);
}
}
inout = cur+1;
return list;
}
// ------------------------------------------------------------------------------------------------
STEP::LazyObject::LazyObject(DB& db, uint64_t id,uint64_t /*line*/, const char* const type,const char* args)
: id(id)
, type(type)
, db(db)
, args(args)
, obj()
{
// find any external references and store them in the database.
// this helps us emulate STEPs INVERSE fields.
if (db.KeepInverseIndicesForType(type)) {
const char* a = args;
// do a quick scan through the argument tuple and watch out for entity references
int64_t skip_depth = 0;
while(*a) {
if (*a == '(') {
++skip_depth;
}
else if (*a == ')') {
--skip_depth;
}
if (skip_depth >= 1 && *a=='#') {
const char* tmp;
const int64_t num = static_cast<int64_t>( strtoul10_64(a+1,&tmp) );
db.MarkRef(num,id);
}
++a;
}
}
}
// ------------------------------------------------------------------------------------------------
STEP::LazyObject::~LazyObject()
{
// make sure the right dtor/operator delete get called
if (obj) {
delete obj;
}
else delete[] args;
}
// ------------------------------------------------------------------------------------------------
void STEP::LazyObject::LazyInit() const
{
const EXPRESS::ConversionSchema& schema = db.GetSchema();
STEP::ConvertObjectProc proc = schema.GetConverterProc(type);
if (!proc) {
throw STEP::TypeError("unknown object type: " + std::string(type),id);
}
const char* acopy = args;
boost::shared_ptr<const EXPRESS::LIST> conv_args = EXPRESS::LIST::Parse(acopy,STEP::SyntaxError::LINE_NOT_SPECIFIED,&db.GetSchema());
delete[] args;
args = NULL;
// if the converter fails, it should throw an exception, but it should never return NULL
try {
obj = proc(db,*conv_args);
}
catch(const TypeError& t) {
// augment line and entity information
throw TypeError(t.what(),id);
}
++db.evaluated_count;
ai_assert(obj);
// store the original id in the object instance
obj->SetID(id);
}
|
{
"content_hash": "89bb291f387f055c9d4f3897e9057dcb",
"timestamp": "",
"source": "github",
"line_count": 527,
"max_line_length": 153,
"avg_line_length": 30.554079696394687,
"alnum_prop": 0.5476338343062973,
"repo_name": "spetz911/almaty3d",
"id": "113ab50a1d8820c25e122530ab6055589d262ef6",
"size": "17864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ThirdParty/assimp/src/STEPFileReader.cpp",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1464184"
},
{
"name": "C++",
"bytes": "10479967"
},
{
"name": "CMake",
"bytes": "79756"
},
{
"name": "GLSL",
"bytes": "292672"
},
{
"name": "Logos",
"bytes": "577807"
},
{
"name": "Objective-C",
"bytes": "4462"
},
{
"name": "Perl",
"bytes": "145"
}
],
"symlink_target": ""
}
|
________________________________________________________________________
This file is part of Logtalk <https://logtalk.org/>
Copyright 1998-2022 Paulo Moura <pmoura@logtalk.org>
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
________________________________________________________________________
To load this example and for sample queries, please see the `SCRIPT.txt`
file.
This folder contains some examples of parametric objects and categories.
Object and category parameters are logical variables that are shared with
all the entity predicates. This example illustrates accessing parameters
using the `parameter/1` and `this/1` built-in methods. Parameters can also
be accessed using _parameter variables_ as illustrated by the `parvars`
example.
|
{
"content_hash": "ae24e40e6d2da1144683c81142964b44",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 74,
"avg_line_length": 43.96551724137931,
"alnum_prop": 0.7035294117647058,
"repo_name": "LogtalkDotOrg/logtalk3",
"id": "96f2707a3ed46690e5594c058e0aad0037867b09",
"size": "1275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/parametric/NOTES.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1466"
},
{
"name": "CSS",
"bytes": "20149"
},
{
"name": "CodeQL",
"bytes": "1295"
},
{
"name": "Common Lisp",
"bytes": "258800"
},
{
"name": "Dockerfile",
"bytes": "1948"
},
{
"name": "Emacs Lisp",
"bytes": "11861"
},
{
"name": "HTML",
"bytes": "1958537"
},
{
"name": "Inno Setup",
"bytes": "50865"
},
{
"name": "JavaScript",
"bytes": "153065"
},
{
"name": "Logtalk",
"bytes": "4960912"
},
{
"name": "Lua",
"bytes": "23902"
},
{
"name": "Makefile",
"bytes": "604"
},
{
"name": "PDDL",
"bytes": "6555764"
},
{
"name": "PHP",
"bytes": "26584"
},
{
"name": "PLSQL",
"bytes": "362"
},
{
"name": "PowerShell",
"bytes": "296191"
},
{
"name": "Prolog",
"bytes": "10156468"
},
{
"name": "Python",
"bytes": "16672"
},
{
"name": "Ruby",
"bytes": "10762"
},
{
"name": "Shell",
"bytes": "360508"
},
{
"name": "Starlark",
"bytes": "913"
},
{
"name": "Tcl",
"bytes": "5409"
},
{
"name": "TeX",
"bytes": "12936"
},
{
"name": "Vim Script",
"bytes": "19406"
},
{
"name": "Vim Snippet",
"bytes": "1921"
},
{
"name": "XSLT",
"bytes": "148349"
},
{
"name": "YASnippet",
"bytes": "8069"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<!-- DO NOT EDIT! This test has been generated by /html/canvas/tools/gentest.py. -->
<title>Canvas test: 2d.fillStyle.parse.invalid.css-color-4-hsl-1</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/html/canvas/resources/canvas-tests.js"></script>
<link rel="stylesheet" href="/html/canvas/resources/canvas-tests.css">
<body class="show_output">
<h1>2d.fillStyle.parse.invalid.css-color-4-hsl-1</h1>
<p class="desc"></p>
<p class="output">Actual output:</p>
<canvas id="c" class="output" width="100" height="50"><p class="fallback">FAIL (fallback content)</p></canvas>
<p class="output expectedtext">Expected output:<p><img src="/images/green-100x50.png" class="output expected" id="expected" alt="">
<ul id="d"></ul>
<script>
var t = async_test("");
_addTest(function(canvas, ctx) {
ctx.fillStyle = '#0f0';
try { ctx.fillStyle = 'hsl(0, 100%, 50% / 1)'; } catch (e) { } // this shouldn't throw, but it shouldn't matter here if it does
ctx.fillRect(0, 0, 100, 50);
_assertPixel(canvas, 50,25, 0,255,0,255);
});
</script>
|
{
"content_hash": "a28f2c805734559d8e1fa976609264dd",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 131,
"avg_line_length": 36.25806451612903,
"alnum_prop": 0.6868327402135231,
"repo_name": "chromium/chromium",
"id": "1850125baa451ac098c4979d3cbb07966679267e",
"size": "1124",
"binary": false,
"copies": "12",
"ref": "refs/heads/main",
"path": "third_party/blink/web_tests/external/wpt/html/canvas/element/fill-and-stroke-styles/2d.fillStyle.parse.invalid.css-color-4-hsl-1.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import DS from 'ember-data';
export default DS.ModelFragment.extend({
// TODO: Add attributes
AllowedHeaders: DS.hasManyFragments('stringValue'),
AllowedMethods: DS.hasManyFragments('stringValue'),
AllowedOrigins: DS.hasManyFragments('stringValue'),
MaxAgeInSeconds: DS.attr('number')
});
|
{
"content_hash": "1dd418c5af38a17f9024951d3a77b7c3",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 55,
"avg_line_length": 34.44444444444444,
"alnum_prop": 0.7354838709677419,
"repo_name": "Pandacorn/deco",
"id": "697e8ff1d1d7ce90c7d6259e752977a21dac1bdd",
"size": "310",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "app/models/cors-rule.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10296"
},
{
"name": "HTML",
"bytes": "35682"
},
{
"name": "JavaScript",
"bytes": "230166"
},
{
"name": "Processing",
"bytes": "2827"
},
{
"name": "Shell",
"bytes": "37"
}
],
"symlink_target": ""
}
|
<?php
namespace Harp\Range\Test;
use Harp\Range\Test\Model\TestModel;
use Harp\Range\Range;
class IntegrationTest extends AbstractTestCase
{
public function testRange()
{
$model = TestModel::find(1);
$this->assertEquals(new Range(10, 32), $model->getDays());
$model->setDays(new Range(4, 12));
TestModel::save($model);
$this->assertQueries([
'SELECT `TestModel`.* FROM `TestModel` WHERE (`id` = 1) LIMIT 1',
'UPDATE `TestModel` SET `days` = "4|12" WHERE (`id` = 1)',
]);
}
}
|
{
"content_hash": "bcdf69e6c5c562f1c53b1e9c0be7ef64",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 77,
"avg_line_length": 21.807692307692307,
"alnum_prop": 0.582010582010582,
"repo_name": "harp-orm/range",
"id": "36823f81114a0a4080a5b77983133c7c227f2830",
"size": "726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/src/IntergrationTest.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PHP",
"bytes": "17523"
}
],
"symlink_target": ""
}
|
package com.mobileer.oboetester;
import java.io.IOException;
/**
* Base class for any audio input or output.
*/
public abstract class AudioStreamBase {
private StreamConfiguration mRequestedStreamConfiguration;
private StreamConfiguration mActualStreamConfiguration;
AudioStreamBase.DoubleStatistics mLatencyStatistics;
private int mBufferSizeInFrames;
public StreamStatus getStreamStatus() {
StreamStatus status = new StreamStatus();
status.bufferSize = getBufferSizeInFrames();
status.xRunCount = getXRunCount();
status.framesRead = getFramesRead();
status.framesWritten = getFramesWritten();
status.callbackCount = getCallbackCount();
status.latency = getLatency();
mLatencyStatistics.add(status.latency);
status.callbackTimeStr = getCallbackTimeStr();
status.cpuLoad = getCpuLoad();
status.state = getState();
return status;
}
public DoubleStatistics getLatencyStatistics() {
return mLatencyStatistics;
}
public static class DoubleStatistics {
private double sum;
private int count;
private double minimum = Double.MAX_VALUE;
private double maximum = Double.MIN_VALUE;
void add(double statistic) {
if (statistic <= 0.0) return;
sum += statistic;
count++;
minimum = Math.min(statistic, minimum);
maximum = Math.max(statistic, maximum);
}
double getAverage() {
return sum / count;
}
public String dump() {
if (count == 0) return "?";
return String.format("%3.1f/%3.1f/%3.1f ms", minimum, getAverage(), maximum);
}
}
/**
* Changes dynamic at run-time.
*/
public static class StreamStatus {
public int bufferSize;
public int xRunCount;
public long framesWritten;
public long framesRead;
public double latency; // msec
public int state;
public long callbackCount;
public int framesPerCallback;
public double cpuLoad;
public String callbackTimeStr;
// These are constantly changing.
String dump(int framesPerBurst) {
if (bufferSize < 0 || framesWritten < 0) {
return "idle";
}
StringBuffer buffer = new StringBuffer();
buffer.append("time between callbacks = " + callbackTimeStr + "\n");
buffer.append("written "
+ String.format("0x%08X", framesWritten)
+ " - read " + String.format("0x%08X", framesRead)
+ " = " + (framesWritten - framesRead) + " frames\n");
String cpuLoadText = String.format("%2d%c", (int)(cpuLoad * 100), '%');
buffer.append(
convertStateToString(state)
+ ", #cb=" + callbackCount
+ ", f/cb=" + String.format("%3d", framesPerCallback)
+ ", " + cpuLoadText + " cpu"
+ "\n");
buffer.append("buffer size = ");
if (bufferSize < 0) {
buffer.append("?");
} else {
int numBuffers = bufferSize / framesPerBurst;
int remainder = bufferSize - (numBuffers * framesPerBurst);
buffer.append(bufferSize + " = (" + numBuffers + " * " + framesPerBurst + ") + " + remainder);
}
buffer.append(", xRun# = " + ((xRunCount < 0) ? "?" : xRunCount));
return buffer.toString();
}
/**
* Converts ints from Oboe index to human-readable stream state
*/
private String convertStateToString(int stateId) {
final String[] STATE_ARRAY = {"Uninit.", "Unknown", "Open", "Starting", "Started",
"Pausing", "Paused", "Flushing", "Flushed",
"Stopping", "Stopped", "Closing", "Closed", "Disconn."};
if (stateId < 0 || stateId >= STATE_ARRAY.length) {
return "Invalid - " + stateId;
}
return STATE_ARRAY[stateId];
}
}
/**
*
* @param requestedConfiguration
* @param actualConfiguration
* @param bufferSizeInFrames
* @throws IOException
*/
public void open(StreamConfiguration requestedConfiguration,
StreamConfiguration actualConfiguration,
int bufferSizeInFrames) throws IOException {
mRequestedStreamConfiguration = requestedConfiguration;
mActualStreamConfiguration = actualConfiguration;
mBufferSizeInFrames = bufferSizeInFrames;
mLatencyStatistics = new AudioStreamBase.DoubleStatistics();
}
public abstract boolean isInput();
public void startPlayback() throws IOException {}
public void stopPlayback() throws IOException {}
public abstract int write(float[] buffer, int offset, int length);
public abstract void close();
public int getChannelCount() {
return mActualStreamConfiguration.getChannelCount();
}
public int getSampleRate() {
return mActualStreamConfiguration.getSampleRate();
}
public int getFramesPerBurst() {
return mActualStreamConfiguration.getFramesPerBurst();
}
public int getBufferCapacityInFrames() {
return mBufferSizeInFrames;
}
public int getBufferSizeInFrames() {
return mBufferSizeInFrames;
}
public int setBufferSizeInFrames(int bufferSize) {
throw new UnsupportedOperationException("bufferSize cannot be changed");
}
public long getCallbackCount() { return -1; }
public int getLastErrorCallbackResult() { return 0; }
public long getFramesWritten() { return -1; }
public long getFramesRead() { return -1; }
public double getLatency() { return -1.0; }
public double getCpuLoad() { return 0.0; }
public String getCallbackTimeStr() { return "?"; };
public int getState() { return -1; }
public boolean isThresholdSupported() {
return false;
}
public void setWorkload(double workload) {}
public abstract int getXRunCount();
}
|
{
"content_hash": "a9261438485b7abc1a0621073ce00023",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 110,
"avg_line_length": 31.59090909090909,
"alnum_prop": 0.594404476418865,
"repo_name": "google/oboe",
"id": "b6eb6ff1ece0f071a100dd21ec0d4b404cc5b6dd",
"size": "6870",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "apps/OboeTester/app/src/main/java/com/mobileer/oboetester/AudioStreamBase.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "8153"
},
{
"name": "C++",
"bytes": "909040"
},
{
"name": "CMake",
"bytes": "9059"
},
{
"name": "Java",
"bytes": "508578"
},
{
"name": "Kotlin",
"bytes": "22851"
},
{
"name": "Python",
"bytes": "7896"
},
{
"name": "Shell",
"bytes": "10076"
}
],
"symlink_target": ""
}
|
/*
* Copyright (C) 1990,91 Silicon Graphics, Inc.
*
_______________________________________________________________________
______________ S I L I C O N G R A P H I C S I N C . ____________
|
| $Revision: 1.1.1.1 $
|
| Classes:
| SoRotateDiscDragger
|
| Author(s): Paul Isaacs, Howard Look
|
______________ S I L I C O N G R A P H I C S I N C . ____________
_______________________________________________________________________
*/
#include <stdio.h>
#include <Inventor/SbLinear.h>
#include <Inventor/SoDB.h>
#include <Inventor/sensors/SoFieldSensor.h>
#include <Inventor/SoInput.h>
#include <Inventor/SoPath.h>
#include <Inventor/projectors/SbPlaneProjector.h>
#include <Inventor/draggers/SoRotateDiscDragger.h>
#include <Inventor/nodes/SoSwitch.h>
#include <Inventor/nodes/SoSeparator.h>
#include "geom/SoRotateDiscDraggerGeom.h"
SO_KIT_SOURCE(SoRotateDiscDragger);
////////////////////////////////////////////////////////////////////////
//
// Description:
// Constructor
//
SoRotateDiscDragger::SoRotateDiscDragger()
//
////////////////////////////////////////////////////////////////////////
{
SO_KIT_CONSTRUCTOR(SoRotateDiscDragger);
isBuiltIn = TRUE;
// Put this stuff under the geomSeparator so it will draw more
// efficiently.
SO_KIT_ADD_CATALOG_ENTRY(rotatorSwitch, SoSwitch, TRUE,
geomSeparator,\x0,FALSE);
SO_KIT_ADD_CATALOG_ENTRY(rotator, SoSeparator, TRUE,
rotatorSwitch,\x0,TRUE);
SO_KIT_ADD_CATALOG_ENTRY(rotatorActive, SoSeparator, TRUE,
rotatorSwitch,\x0,TRUE);
SO_KIT_ADD_CATALOG_ENTRY(feedbackSwitch, SoSwitch, TRUE,
geomSeparator,\x0,FALSE);
SO_KIT_ADD_CATALOG_ENTRY(feedback, SoSeparator, TRUE,
feedbackSwitch,\x0,TRUE);
SO_KIT_ADD_CATALOG_ENTRY(feedbackActive, SoSeparator, TRUE,
feedbackSwitch,\x0,TRUE);
// read geometry for shared parts
if (SO_KIT_IS_FIRST_INSTANCE())
readDefaultParts("rotateDiscDragger.iv", geomBuffer, sizeof(geomBuffer) );
SO_KIT_ADD_FIELD(rotation, (0.0, 0.0, 0.0, 1.0));
SO_KIT_INIT_INSTANCE();
// create the parts...
setPartAsDefault("rotator", "rotateDiscRotator");
setPartAsDefault("rotatorActive",
"rotateDiscRotatorActive");
setPartAsDefault("feedback", "rotateDiscFeedback");
setPartAsDefault("feedbackActive",
"rotateDiscFeedbackActive");
// Set the switches to 0...
setSwitchValue( rotatorSwitch.getValue(), 0 );
setSwitchValue( feedbackSwitch.getValue(), 0 );
// our humble projector
planeProj = new SbPlaneProjector();
// add the callbacks to perform the dragging
addStartCallback( &SoRotateDiscDragger::startCB );
addMotionCallback( &SoRotateDiscDragger::motionCB );
addFinishCallback( &SoRotateDiscDragger::doneCB );
// Updates the scaleFactor field when the motionMatrix is set.
addValueChangedCallback( &SoRotateDiscDragger::valueChangedCB );
// Updates the motionMatrix when the scaleFactor field is set.
fieldSensor
= new SoFieldSensor(&SoRotateDiscDragger::fieldSensorCB, this);
fieldSensor->setPriority( 0 );
setUpConnections( TRUE, TRUE );
}
////////////////////////////////////////////////////////////////////////
//
// Description:
// Destructor
//
SoRotateDiscDragger::~SoRotateDiscDragger()
//
////////////////////////////////////////////////////////////////////////
{
delete planeProj;
if (fieldSensor )
delete fieldSensor;
}
// detach/attach any sensors, callbacks, and/or field connections.
// Called by: start/end of SoBaseKit::readInstance
// and on new copy by: start/end of SoBaseKit::copy.
// Classes that redefine must call setUpConnections(TRUE,TRUE)
// at end of constructor.
// Returns the state of the node when this was called.
SbBool
SoRotateDiscDragger::setUpConnections( SbBool onOff, SbBool doItAlways )
{
if ( !doItAlways && connectionsSetUp == onOff)
return onOff;
if ( onOff ) {
// We connect AFTER base class.
SoDragger::setUpConnections( onOff, FALSE );
// Call the sensor CBs to make things are up-to-date.
fieldSensorCB( this, NULL );
// Connect the field sensors
if (fieldSensor->getAttachedField() != &rotation)
fieldSensor->attach( &rotation );
}
else {
// We disconnect BEFORE base class.
// Disconnect the field sensors.
if (fieldSensor->getAttachedField())
fieldSensor->detach();
SoDragger::setUpConnections( onOff, FALSE );
}
return !(connectionsSetUp = onOff);
}
////////////////////////////////////////////////////////////////////////
//
// Description:
// Rotate the rotateDiscDragger based on mouse motion.
//
// Use: private
//
void
SoRotateDiscDragger::dragStart()
//
////////////////////////////////////////////////////////////////////////
{
// Set the switches to 1...
setSwitchValue( rotatorSwitch.getValue(), 1 );
setSwitchValue( feedbackSwitch.getValue(), 1 );
// Establish the projector plane in working space.
// Working space is space at end of motion matrix.
// The plane used should be the x-y plane.
SbVec3f startLocalHitPt = getLocalStartingPoint();
SbLine workSpaceAxis( SbVec3f(0,0,0), SbVec3f(0,0,1) );
SbVec3f planeOrigin = workSpaceAxis.getClosestPoint(startLocalHitPt);
planeProj->setPlane(SbPlane(SbVec3f(0,0,1), planeOrigin));
}
////////////////////////////////////////////////////////////////////////
//
// Description:
// Rotate the rotateDiscDragger based on mouse motion.
//
// Use: private
//
void
SoRotateDiscDragger::drag()
//
////////////////////////////////////////////////////////////////////////
{
// Set up the projector space and view.
// Working space is space at end of motion matrix.
planeProj->setViewVolume( getViewVolume() );
planeProj->setWorkingSpace( getLocalToWorldMatrix() );
// Get newHitPt and startHitPt in workspace.
SbVec3f newHitPt
= planeProj->project( getNormalizedLocaterPosition());
SbVec3f startHitPt = getLocalStartingPoint();
// Find the amount of rotation
SbVec3f oldVec = startHitPt;
SbVec3f newVec = newHitPt;
// Remove the part of these vectors that is parallel to the normal
oldVec -= SbVec3f( 0, 0, oldVec[2] );
newVec -= SbVec3f( 0, 0, newVec[2] );
// deltaRot is how much we rotated since the mouse button went down.
SbRotation deltaRot = SbRotation( oldVec, newVec );
// Append this to the startMotionMatrix, which we saved at the beginning
// of the drag, to find the current motion matrix.
setMotionMatrix(
appendRotation( getStartMotionMatrix(), deltaRot, SbVec3f(0,0,0)));
}
////////////////////////////////////////////////////////////////////////
//
// Description:
// Rotate the rotateDiscDragger based on mouse motion.
//
// Use: private
//
void
SoRotateDiscDragger::dragFinish()
//
////////////////////////////////////////////////////////////////////////
{
// Set the switches to 0...
setSwitchValue( rotatorSwitch.getValue(), 0 );
setSwitchValue( feedbackSwitch.getValue(), 0 );
}
////////////////////////////////////////////////////////////////////
// Stubs for callbacks
////////////////////////////////////////////////////////////////////
void
SoRotateDiscDragger::startCB( void *, SoDragger *inDragger )
{
SoRotateDiscDragger *dl = (SoRotateDiscDragger *) inDragger;
dl->dragStart();
}
void
SoRotateDiscDragger::motionCB( void *, SoDragger *inDragger )
{
SoRotateDiscDragger *dl = (SoRotateDiscDragger *) inDragger;
dl->drag();
}
void
SoRotateDiscDragger::doneCB( void *, SoDragger *inDragger )
{
SoRotateDiscDragger *dl = (SoRotateDiscDragger *) inDragger;
dl->dragFinish();
}
void
SoRotateDiscDragger::valueChangedCB( void *, SoDragger *inDragger )
{
SoRotateDiscDragger *m = (SoRotateDiscDragger *) inDragger;
SbMatrix motMat = m->getMotionMatrix();
SbVec3f trans, scale;
SbRotation rot, scaleOrient;
getTransformFast( motMat, trans, rot, scale, scaleOrient );
// Disconnect the field sensor
m->fieldSensor->detach();
if ( m->rotation.getValue() != rot )
m->rotation = rot;
// Reconnect the field sensor
m->fieldSensor->attach( &(m->rotation) );
}
void
SoRotateDiscDragger::fieldSensorCB( void *inDragger, SoSensor * )
{
SoRotateDiscDragger *dragger = (SoRotateDiscDragger *) inDragger;
// Incorporate the new field value into the matrix...
SbMatrix motMat = dragger->getMotionMatrix();
dragger->workFieldsIntoTransform( motMat );
dragger->setMotionMatrix( motMat );
}
|
{
"content_hash": "277468a3136fda7df6d5940922770490",
"timestamp": "",
"source": "github",
"line_count": 291,
"max_line_length": 76,
"avg_line_length": 29.367697594501717,
"alnum_prop": 0.6087058272876199,
"repo_name": "OpenXIP/xip-libraries",
"id": "9176060ffb7b82b5a64cc11b1c51fe253e2e6bd6",
"size": "10052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/extern/inventor/lib/interaction/src/draggers/SoRotateDiscDragger.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "8314"
},
{
"name": "C",
"bytes": "21064260"
},
{
"name": "C#",
"bytes": "41726"
},
{
"name": "C++",
"bytes": "33308677"
},
{
"name": "D",
"bytes": "373"
},
{
"name": "Java",
"bytes": "59889"
},
{
"name": "JavaScript",
"bytes": "35954"
},
{
"name": "Objective-C",
"bytes": "272450"
},
{
"name": "Perl",
"bytes": "727865"
},
{
"name": "Prolog",
"bytes": "101780"
},
{
"name": "Puppet",
"bytes": "371631"
},
{
"name": "Python",
"bytes": "162364"
},
{
"name": "Shell",
"bytes": "906979"
},
{
"name": "Smalltalk",
"bytes": "10530"
},
{
"name": "SuperCollider",
"bytes": "2169433"
},
{
"name": "Tcl",
"bytes": "10289"
}
],
"symlink_target": ""
}
|
package numbers
import (
"testing"
"github.com/stretchr/testify/assert"
)
var workingTests = map[string]int{
"I": 1,
"XC": 90,
"DCCCXCIX": 899,
"IX": 9,
"CXXV": 125,
"MMMMCMXCIX": 4999,
}
func TestToRoman(t *testing.T) {
for expected, input := range workingTests {
output, err := ToRoman(input)
if err == nil {
assert.Equal(t, expected, output)
} else {
assert.Fail(t, "An error was given unexpectatly")
}
}
}
func TestToRomanError(t *testing.T) {
tests := []int{
-1,
0,
}
for _, input := range tests {
_, err := ToRoman(input)
assert.EqualError(t, err, "Number not convertable")
}
}
func TestFromRoman(t *testing.T) {
for input, expected := range workingTests {
output, err := FromRoman(input)
if err == nil {
assert.Equal(t, expected, output)
} else {
assert.Fail(t, "An error was given unexpectatly")
}
}
}
func TestFromRomanError(t *testing.T) {
tests := []string{
"foobar",
"",
}
for _, input := range tests {
_, err := FromRoman(input)
assert.EqualError(t, err, "String not convertable")
}
}
|
{
"content_hash": "cb451cecc4d0f233fd222a307228c24b",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 53,
"avg_line_length": 17.28125,
"alnum_prop": 0.6075949367088608,
"repo_name": "sbani/go-humanizer",
"id": "6c8633858ce01d8b5a5a781279b1f18796d16ed2",
"size": "1106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numbers/roman_test.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "17035"
},
{
"name": "Shell",
"bytes": "328"
}
],
"symlink_target": ""
}
|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using Android.App;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Plugin.Android.AugustLock")]
[assembly: AssemblyDescription("August Smart Lock Plugin for Xamarin Android")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Marcus Lum")]
[assembly: AssemblyProduct("Plugin.Android.AugustLock")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
[assembly: ComVisible(false)]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.1.0")]
[assembly: AssemblyFileVersion("1.1.0")]
|
{
"content_hash": "3512fa7ee4f76c0529ef7e4b3808c0ba",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 84,
"avg_line_length": 37.86666666666667,
"alnum_prop": 0.7517605633802817,
"repo_name": "Marcus-L/xamarin-august-ble",
"id": "a27c212a2b3840dbc9b1164247854f9293a9ff20",
"size": "1139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Plugin.Android.AugustLock/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "17884"
}
],
"symlink_target": ""
}
|
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Naming conventions</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.79.1">
<link rel="home" href="../index.html" title="Chapter 1. The Variadic Macro Data Library 1.10">
<link rel="up" href="../index.html" title="Chapter 1. The Variadic Macro Data Library 1.10">
<link rel="prev" href="../index.html" title="Chapter 1. The Variadic Macro Data Library 1.10">
<link rel="next" href="vmd_whyhow.html" title="Why and how to use">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="../index.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../index.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../index.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="vmd_whyhow.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h2 class="title" style="clear: both">
<a name="variadic_macro_data.vmd_naming"></a><a class="link" href="vmd_naming.html" title="Naming conventions">Naming conventions</a>
</h2></div></div></div>
<p>
All of the macros in the library begin with the prefix BOOST_VMD_, where VMD
stands for 'Variadic Macro Data'.
</p>
<p>
Following the prefix, certain names in the macros refer to data types in this
library or Boost PP. These names and their data types are:
</p>
<div class="orderedlist"><ol class="orderedlist" type="1">
<li class="listitem">
TUPLE = Boost PP tuple data type.
</li>
<li class="listitem">
ARRAY = Boost PP array data type.
</li>
<li class="listitem">
LIST = Boost PP list data type.
</li>
<li class="listitem">
SEQ = Boost PP seq data type.
</li>
<li class="listitem">
IDENTIFIER = A VMD identifier
</li>
<li class="listitem">
NUMBER = A VMD number, which is a Boost PP number with some enhancements
</li>
<li class="listitem">
TYPE = A VMD type
</li>
</ol></div>
<p>
I have used most of these names in order to mimic the naming of Boost PP as
closely as possible. Subsequent use of the words 'array', 'list', 'seq', and
'tuple' refer to these Boost PP data types unless otherwise noted. See the
help for Boost PP for any explanation of these data types.
</p>
<p>
The term 'sequence' refers to a sequence of VMD data types and is not the same
as a Boost PP sequence which is always referred to in this documentation as
a 'seq'.
</p>
<p>
The term 'return' refers to the expansion of a macro. I use the terminology
of a macro "returning some data" rather than the terminology of a
macro "expanding to some data", even if the latter is more accurate,
because it more closely corresponds to the way I believe C++ programmers think
about macro programming.
</p>
<p>
The term 'emptiness' refers to no preprocessor data being passed to or returned
from a macro. I have avoided the word 'nothing' because it has too vague a
meaning.
</p>
<p>
The term 'data type' refers to the various preprocessor input types which VMD
can parse and which are listed above, also including emptiness.
</p>
<p>
The term 'v-type' refers to a VMD type, the term 'number' returns to a VMD
number and the term 'identifier' refers to a VMD identifier. All these will
be explained in their proper place.
</p>
<p>
The term "UB" stands for "undefined behavior" as it is
specified in the C++ standard.
</p>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2010-2017 Tropic Software
East Inc</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="../index.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../index.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../index.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="vmd_whyhow.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
|
{
"content_hash": "27b9e3ef7e8c4026b5d29efcbd8f4efe",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 410,
"avg_line_length": 48.41121495327103,
"alnum_prop": 0.6318532818532818,
"repo_name": "arangodb/arangodb",
"id": "ef54075d9fade70daec192539b4681a26ae9bf5e",
"size": "5187",
"binary": false,
"copies": "4",
"ref": "refs/heads/devel",
"path": "3rdParty/boost/1.78.0/libs/vmd/doc/html/variadic_macro_data/vmd_naming.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "61827"
},
{
"name": "C",
"bytes": "311036"
},
{
"name": "C++",
"bytes": "35149373"
},
{
"name": "CMake",
"bytes": "387268"
},
{
"name": "CSS",
"bytes": "210549"
},
{
"name": "EJS",
"bytes": "232160"
},
{
"name": "HTML",
"bytes": "23114"
},
{
"name": "JavaScript",
"bytes": "33841256"
},
{
"name": "LLVM",
"bytes": "15003"
},
{
"name": "NASL",
"bytes": "381737"
},
{
"name": "NSIS",
"bytes": "47138"
},
{
"name": "Pascal",
"bytes": "75391"
},
{
"name": "Perl",
"bytes": "9811"
},
{
"name": "PowerShell",
"bytes": "6806"
},
{
"name": "Python",
"bytes": "190515"
},
{
"name": "SCSS",
"bytes": "255542"
},
{
"name": "Shell",
"bytes": "133576"
},
{
"name": "TypeScript",
"bytes": "179074"
},
{
"name": "Yacc",
"bytes": "79620"
}
],
"symlink_target": ""
}
|
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>Strings</title>
<link rel="stylesheet" href="../../../../resources/sap/ui/core/themes/sap_horizon/library.css" id="sap-ui-theme-sap.ui.core" data-sap-ui-ready="true">
<link rel="stylesheet" href="../../../../resources/sap/ui/layout/themes/sap_horizon/library.css" id="sap-ui-theme-sap.ui.layout" data-sap-ui-ready="true">
<link rel="stylesheet" href="../../../../resources/sap/m/themes/sap_horizon/library.css" id="sap-ui-theme-sap.m" data-sap-ui-ready="true">
<link rel="stylesheet" href="../../../../resources/sap/ui/table/themes/sap_horizon/library.css" id="sap-ui-theme-sap.ui.table" data-sap-ui-ready="true">
<link rel="stylesheet" href="./Strings.css">
<script id="sap-ui-bootstrap"
src="../../../../resources/sap-ui-core.js"
data-sap-ui-async="true"
data-sap-ui-compatVersion="edge"
data-sap-ui-libs="sap.m,sap.ui.table,sap.ui.layout"
data-sap-ui-onInit="module:local/Strings"
data-sap-ui-resourceroots='{"local": "./"}'
data-sap-ui-theme="sap_horizon">
</script>
</head>
<body class="sapUiBody" id="content">
</body>
</html>
|
{
"content_hash": "a18330bc550edbba142a54db42e8f19e",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 156,
"avg_line_length": 45.52,
"alnum_prop": 0.6590509666080844,
"repo_name": "SAP/openui5",
"id": "4a41a10310000f1e9624eadd5189bafab2e3e708",
"size": "1138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sap.ui.core/test/sap/ui/core/Strings.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "294216"
},
{
"name": "Gherkin",
"bytes": "17201"
},
{
"name": "HTML",
"bytes": "6443688"
},
{
"name": "Java",
"bytes": "83398"
},
{
"name": "JavaScript",
"bytes": "109546491"
},
{
"name": "Less",
"bytes": "8741757"
},
{
"name": "TypeScript",
"bytes": "20918"
}
],
"symlink_target": ""
}
|
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Windows.Media;
using WpfMath.Atoms;
using WpfMath.Boxes;
namespace WpfMath
{
// Represents mathematical formula that can be rendered.
public sealed class TexFormula
{
public string? TextStyle
{
get;
set;
}
internal Atom? RootAtom
{
get;
set;
}
public SourceSpan? Source { get; set; }
public TexRenderer GetRenderer(TexStyle style,
double scale,
string? systemTextFontName,
Brush? background = null,
Brush? foreground = null)
{
var mathFont = new DefaultTexFont(scale);
var textFont = systemTextFontName == null ? (ITeXFont)mathFont : GetSystemFont(systemTextFontName, scale);
var environment = new TexEnvironment(style, mathFont, textFont, background, foreground);
return new TexRenderer(CreateBox(environment), scale);
}
public void Add(TexFormula formula, SourceSpan? source = null)
{
Debug.Assert(formula != null);
Debug.Assert(formula.RootAtom != null);
this.Add(
formula.RootAtom is RowAtom
? new RowAtom(source, formula.RootAtom)
: formula.RootAtom,
source);
}
/// <summary>
/// Adds an atom to the formula. If the <see cref="RootAtom"/> exists and is not a <see cref="RowAtom"/>, it
/// will become one.
/// </summary>
/// <param name="atom">The atom to add.</param>
/// <param name="rowSource">The source that will be set for the resulting row atom.</param>
internal void Add(Atom atom, SourceSpan? rowSource)
{
if (this.RootAtom == null)
{
this.RootAtom = atom;
}
else
{
var elements = (this.RootAtom is RowAtom r
? (IEnumerable<Atom>) r.Elements
: new[] { this.RootAtom }).ToList();
elements.Add(atom);
this.RootAtom = new RowAtom(rowSource, elements);
}
}
public void SetForeground(Brush brush)
{
if (this.RootAtom is StyledAtom sa)
{
this.RootAtom = sa with { Foreground = brush };
}
else
{
this.RootAtom = new StyledAtom(this.RootAtom?.Source, this.RootAtom, null, brush);
}
}
public void SetBackground(Brush brush)
{
if (this.RootAtom is StyledAtom sa)
{
this.RootAtom = sa with { Background = brush };
}
else
{
this.RootAtom = new StyledAtom(this.RootAtom?.Source, this.RootAtom, brush, null);
}
}
internal Box CreateBox(TexEnvironment environment)
{
if (this.RootAtom == null)
return StrutBox.Empty;
else
return this.RootAtom.CreateBox(environment);
}
internal static SystemFont GetSystemFont(string fontName, double size)
{
var fontFamily = Fonts.SystemFontFamilies.First(
ff => ff.ToString() == fontName || ff.FamilyNames.Values?.Contains(fontName) == true);
return new SystemFont(size, fontFamily);
}
}
}
|
{
"content_hash": "d3865ce6db68e9e0f3f7170b4bf363eb",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 118,
"avg_line_length": 32.669642857142854,
"alnum_prop": 0.5132549877015578,
"repo_name": "ForNeVeR/wpf-math",
"id": "1eb3ae74990681364ae75dd83c45afa780c519f1",
"size": "3659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/WpfMath/TexFormula.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "301231"
},
{
"name": "F#",
"bytes": "34381"
},
{
"name": "PowerShell",
"bytes": "2514"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="averiBlue">#0094ff</color>
<color name="averiBlueHighlight">#7fc9ff</color>
<color name="sunsetOrange">#ff9800</color>
<color name="sunsetOrangeHighlight">#ffc400</color>
<color name="ubePurple">#9124f0</color>
<color name="ubePurpleHighlight">#af81f9</color>
<color name="lovelyRed">#ef0a1a</color>
<color name="lovelyRedHighlight">#fc7668</color>
<color name="chromeGray">#666666</color>
<color name="chromeGrayHighlight">#999999</color>
<color name="duskGray">#222222</color>
</resources>
|
{
"content_hash": "55484dcf06761048bdbab604b4dc6e70",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 55,
"avg_line_length": 42.857142857142854,
"alnum_prop": 0.6883333333333334,
"repo_name": "MarquisLP/World-Scribe",
"id": "702a4aeb937f853792c4d87d16ae48d4e18ee52a",
"size": "600",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/src/main/res/values/colors.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "407101"
}
],
"symlink_target": ""
}
|
var express = require('express');
var path = require('path');
var favicon = require('serve-favicon');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var passport = require('passport');
//var routes = require('./routes/index');
var users = require('./routes/users');
var passport = require('passport');
var flash = require('connect-flash');
var morgan = require('morgan');
var session = require('express-session');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'jade');
// uncomment after placing your favicon in /public
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use(morgan('dev')); // log every request to the console
require('./config/passport')(passport); // pass passport for configuration
// required for passport
app.use(session({ secret: 'ilovescotchscotchyscotchscotch' })); // session secret
app.use(passport.initialize());
app.use(passport.session()); // persistent login sessions
app.use(flash()); // use connect-flash for flash messages stored in session
require('./routes/index.js')(app, passport); // load our routes and pass in our app and fully configured passport
//app.use('/', routes);
app.use('/users', users);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});
// error handlers
// development error handler
// will print stacktrace
if (app.get('env') === 'development') {
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
});
});
}
// production error handler
// no stacktraces leaked to user
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: {}
});
});
module.exports = app;
|
{
"content_hash": "bf1d65688d8d48f77681c7035732da40",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 113,
"avg_line_length": 29.718309859154928,
"alnum_prop": 0.6758293838862559,
"repo_name": "banacer/lab221",
"id": "c2f541d85dc68fa42cd6fe3f2505363ffb279b30",
"size": "2110",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "person_identification_webapp/app.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "29343"
},
{
"name": "C++",
"bytes": "27049"
},
{
"name": "CSS",
"bytes": "111"
},
{
"name": "HTML",
"bytes": "5640"
},
{
"name": "Java",
"bytes": "251873"
},
{
"name": "JavaScript",
"bytes": "11904"
},
{
"name": "Jupyter Notebook",
"bytes": "67214"
},
{
"name": "Makefile",
"bytes": "482"
},
{
"name": "Processing",
"bytes": "7977"
},
{
"name": "Python",
"bytes": "50439"
}
],
"symlink_target": ""
}
|
using System.Threading.Tasks;
using JaffaMania.Website.ApiFeatures.Contestants;
using JaffaMania.Website.Tests.Mocks;
using Microsoft.AspNetCore.Mvc;
using Xunit;
namespace JaffaMania.Website.Tests.ApiFeatures.Contestants
{
public class ContestantsController_GetContestantById_Should
{
[Fact]
[Trait("Unit Test", "WebApi")]
public async Task ReturnsHttp200OkResultWhenContestantsAreFound()
{
// Arrange
var mediator = MediatorMock.CreateWithResults();
var systemUnderTest = new ContestantsController(mediator.Object);
// Act
var result = await systemUnderTest.GetContestantById("ab38493a-7186-4ed0-8e65-7ef6e5c69744");
// Assert
Assert.IsType<OkObjectResult>(result);
}
[Fact]
[Trait("Unit Test", "WebApi")]
public async Task ReturnsHttp404NotFoundResultWhenNoContestantsAreFound()
{
// Arrange
var mediator = MediatorMock.CreateWithNoResults();
var systemUnderTest = new ContestantsController(mediator.Object);
// Act
var result = await systemUnderTest.GetContestantById("ab38493a-7186-4ed0-8e65-7ef6e5c69744");
// Assert
Assert.IsType<NotFoundResult>(result);
}
[Fact]
[Trait("Unit Test", "WebApi")]
public async Task ReturnsHttp400BadRequestResultWhenContestantIdIsNotValidGuid()
{
// Arrange
var mediator = MediatorMock.CreateWithNoResults();
var systemUnderTest = new ContestantsController(mediator.Object);
// Act
var result = await systemUnderTest.GetContestantById("");
// Assert
Assert.IsType<BadRequestObjectResult>(result);
}
}
}
|
{
"content_hash": "62a367bb916523f30b6efff82cc90830",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 105,
"avg_line_length": 31.271186440677965,
"alnum_prop": 0.6308943089430894,
"repo_name": "AndyHale/JaffaMania",
"id": "48d3595c795b23f99c72cb5bd5b61c7c9899bb74",
"size": "1845",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tests/JaffaMania.Website.Tests/ApiFeatures/Contestants/ContestantsController_GetContestantById_Should.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "174"
},
{
"name": "C#",
"bytes": "38466"
},
{
"name": "CSS",
"bytes": "1571"
},
{
"name": "HTML",
"bytes": "2267"
}
],
"symlink_target": ""
}
|
package dhg.ccg.parse.dep
import dhg.ccg.cat.Cat
import dhg.util._
import dhg.ccg.tagdict.TagDictionary
import dhg.ccg.cat._
import dhg.util.viz._
import dhg.ccg.parse._
import dhg.ccg.parse.pcfg.CfgGuideChart
class DepParserEvaluator(outputFile: Option[String], printEverything: Boolean = false) extends ParserEvaluator {
type Word = String
def evaluate(model: GuideChartParser, testData: TraversableOnce[(Option[CfgGuideChart], CcgTree)], tagdict: TagDictionary[Cat],
printFailedParses: Boolean = false,
verbose: Boolean = false): Double = {
var dependencyCorrect = 0
var dependencyTotal = 0
var dependencyParsedTotal = 0
var constituentCorrect = 0
var constituentTotal = 0
var constituentParsedTotal = 0
var fullConstituentParseCorrect = 0
var supertagCorrect = 0
var supertagTotal = 0
var supertagParsedTotal = 0
var parseFound = 0
var fullDepParseCorrect = 0
var totalSentences = 0
val errors = collection.mutable.Map.empty[(Cat, Cat), Int] // (gold, model) -> count
val failures = collection.mutable.Set.empty[(CcgTree, CcgTree)] // (gold, model)
var totalTime = 0L
def printAll() = {
println(f"Parse Found: ${parseFound * 100.0 / totalSentences}%.2f ($parseFound/$totalSentences)")
println(f"Full Parse Correct: ${fullDepParseCorrect * 100.0 / totalSentences}%.2f (${fullDepParseCorrect}/${totalSentences})")
println(f"Parse Found Supertag Accuracy: ${supertagCorrect * 100.0 / supertagParsedTotal}%.2f ($supertagCorrect/$supertagParsedTotal)")
println(f"Parse Found Dependency Accuracy: ${dependencyCorrect * 100.0 / dependencyParsedTotal}%.2f ($dependencyCorrect/$dependencyParsedTotal)")
val dependencyAccuracy = dependencyCorrect * 100.0 / dependencyTotal
println(f"Supertag Accuracy: ${supertagCorrect * 100.0 / supertagTotal}%.2f ($supertagCorrect/$supertagTotal)")
println(f"Dependency Accuracy: ${dependencyAccuracy}%.2f ($dependencyCorrect/$dependencyTotal)")
//val errorsToShow = errors.desc.take(10).map { case (((gtc, gtp), (mtc, mtp)), count) => (count.toString, f"$gtc -> $gtp", f"$mtc -> $mtp") }
val errorsToShow = errors.desc.take(10).map { case ((g, m), count) => (count.toString, f"$g", f"$m") }
val (maxCountWidth, maxTagWidth) =
if (errorsToShow.isEmpty) (0, 0)
else {
val a = errorsToShow.map(_._1.length).max max 5
val b = errorsToShow.map { case (_, gt, mt) => gt.size }.max
(a, b)
}
println(f" ${"count".padLeft(maxCountWidth)} ${"gold".padRight(maxTagWidth)} ${"model"}")
for ((count, gt, mt) <- errorsToShow) {
println(f" ${count.padLeft(maxCountWidth)} ${gt.padRight(maxTagWidth)} ${mt}")
}
println(f"avg tagging: ${totalTime / 1000.0 / testData.size}%.4f sec")
dependencyAccuracy
}
writeUsing(File(outputFile.getOrElse("/dev/null"))) { output =>
time("testing", {
for (((guideChart, goldTree), i) <- testData.toIterator.zipWithIndex) {
if (verbose) print(f"${i + 1}%5s, length=${goldTree.length}%3d, avg-ambig=${goldTree.words.map(tagdict(_).size).avg}%5.2f, ") // num-parses=${parseCounter.countParses(goldTree.words, Vector.empty, tagdict)}%18s, ")
val startTime = System.currentTimeMillis()
val modelParsed = guideChart.flatMap(model.parseAndProbFromGuideChart).map(_._1)
val parseTime = System.currentTimeMillis() - startTime
totalTime += parseTime
if (verbose) println(f"time=${parseTime / 1000.0}%.3f sec, avg time: ${totalTime / 1000.0 / (i + 1)}%.3f sec")
if ((i + 1) % 100 == 0) println(f"${i + 1}%5s, total time: ${totalTime / 1000.0}%.3f sec, avg time: ${totalTime / 1000.0 / (i + 1)}%.4f sec")
output.wl(i)
output.wl(goldTree.words.mkString(" "))
output.wl(f"model: $modelParsed")
output.wl(f"gold: $goldTree")
output.wl(f"${parseTime / 1000.0}%.3f sec")
output.wl
val n = goldTree.length
val goldDepTree = DepTree.fromCcgTree(goldTree)
val goldDepMap = depSet(goldDepTree).toMap
// val goldNtTable = CcgTreeUtil.toNtTable(goldTree)
//
// def traverse(t: CcgTree, i: Int, j: Int): Boolean = {
// val gl = goldNtTable(i)(j)
// val ml = Some(t.cat)
// val correct = gl == ml
// if (correct) { constituentCorrect += 1 }
//
// constituentTotal += 1
// constituentParsedTotal += 1
//
// t match {
// case CcgLeaf(cat, word, _) =>
// correct
//
// case CcgBinode(cat, l, r) =>
// val li = i
// val lj = i + l.length
// val ri = i + l.length
// val rj = j
// val lmatch = traverse(l, li, lj)
// val rmatch = traverse(r, ri, rj)
// correct && lmatch && rmatch
//
// case CcgUnode(cat, s) =>
// correct && traverse(s, i, j)
// }
// }
modelParsed match {
case Some(modelParse) =>
assert(modelParse.length == n, f"\n${goldTree.words.mkString(" ")}\n${modelParse.words.mkString(" ")}")
val modelDepTree = DepTree.fromCcgTree(modelParse)
val modelDepMap = depSet(modelDepTree).toMap
// val fullTreeCorrect = traverse(modelParse, 0, n)
// if (fullTreeCorrect) { fullConstituentParseCorrect += 1 }
if (goldDepTree == modelDepTree) { fullDepParseCorrect += 1 }
else { failures.add(goldTree, modelParse) }
dependencyParsedTotal += (goldDepMap.size + 1)
supertagParsedTotal += goldTree.length
parseFound += 1
for (((gw, gt), (mw, mt)) <- goldTree.tagged zipSafe modelParse.tagged) {
if (gt != mt) {
errors.updateOrElseWith((gt, mt), 0)(_ + 1)
}
else {
supertagCorrect += 1
}
}
// TreeViz.drawTree(goldDepTree)
// TreeViz.drawTree(modelDepTree)
if ((modelDepTree.word, modelDepTree.index) == (goldDepTree.word, goldDepTree.index)) { // check root
dependencyCorrect += 1
}
else {
// errors.updateOrElseWith((
// (goldDepTree.word.toString, goldDepMap.get((modelDepTree.word, modelDepTree.index)).fold("<ROOT>")(_._1.toString)),
// (modelDepTree.word.toString, "<ROOT>") //
// ), 0)(_ + 1)
}
for ((mc, mp) <- modelDepMap) {
if (goldDepMap.get(mc).exists(_ == mp)) {
dependencyCorrect += 1
}
else {
// errors.updateOrElseWith((
// (mc._1.toString, goldDepMap.get(mc).fold("<ROOT>")(_._1.toString)),
// (mc._1.toString, mp._1.toString) //
// ), 0)(_ + 1)
}
}
case None =>
if (printFailedParses) {
println(f"\nFailed parse:")
for (w <- goldTree.words) {
println(f" $w : ${tagdict(w).mkString(", ")}")
}
println(goldTree.pretty)
println
}
def traverseTotal(t: CcgTree): Unit = {
constituentTotal += 1
t match {
case CcgBinode(_, l, r) =>
traverseTotal(l); traverseTotal(r)
case CcgUnode(_, s) =>
traverseTotal(s)
case CcgLeaf(_, _, _) =>
supertagTotal += 1
}
}
traverseTotal(goldTree)
}
dependencyTotal += (goldDepMap.size + 1)
supertagTotal += goldTree.length
totalSentences += 1
if(printEverything) printAll()
}
})
}
val dependencyAccuracy = printAll()
// failures.toVector.sortBy(_._1.length).foreach {
// case (g, m) =>
// TreeViz.drawTree(g)
// TreeViz.drawTree(DepTree.fromCcgTree(g))
// TreeViz.drawTree(m)
// TreeViz.drawTree(DepTree.fromCcgTree(m))
// }
dependencyAccuracy
}
private[this] def depSet(deptree: DepTree): Vector[((Word, Int), (Word, Int))] = {
deptree match {
case DepTree(word, index, _, children) =>
children.map { c =>
(c.word, c.index) -> (word, index)
} ++ children.flatMap(depSet)
}
}
override def toString = f"DepParserEvaluator(outputFile=$outputFile)"
}
|
{
"content_hash": "6bf384742ab8e8fcc972ab352dded524",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 227,
"avg_line_length": 41.92760180995475,
"alnum_prop": 0.5220159723721131,
"repo_name": "dhgarrette/2015-ccg-parsing",
"id": "c4e0b1f762553f11ab2c6e48753b3a6818b53858",
"size": "9266",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/scala/dhg/ccg/parse/dep/DepParserEvaluator.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "73351"
},
{
"name": "Scala",
"bytes": "2508485"
}
],
"symlink_target": ""
}
|
namespace nc {
namespace htsim {
void ManualFlowDriver::AddData(const std::vector<AddDataEvent>& events) {
add_data_events_.insert(add_data_events_.end(), events.begin(), events.end());
std::stable_sort(add_data_events_.begin(), add_data_events_.end(),
[](const AddDataEvent& lhs, const AddDataEvent& rhs) {
return lhs.at > rhs.at;
});
}
AddDataEvent ManualFlowDriver::Next() {
AddDataEvent to_return = PeekNextAddData();
PopNextAddData();
return to_return;
}
const AddDataEvent& ManualFlowDriver::PeekNextAddData() {
if (add_data_events_.empty()) {
return kAddDataInfinity;
}
return add_data_events_.back();
}
void ManualFlowDriver::PopNextAddData() {
if (!add_data_events_.empty()) {
add_data_events_.pop_back();
}
}
RateKeyFrame ConstantRateFlowDriver::NextKeyFrame() {
if (next_key_frame_index_ == rate_change_key_frames_.size()) {
return RateKeyFrame(EventQueueTime::MaxTime(), curr_rate_);
}
return rate_change_key_frames_[next_key_frame_index_];
}
void ConstantRateFlowDriver::AdvanceToNextKeyFrame() {
RateKeyFrame next_key_frame = NextKeyFrame();
curr_time_ = next_key_frame.at;
curr_rate_ = next_key_frame.rate_bps;
inter_packet_gap_ =
EventQueueTime(second_.Raw() / ((curr_rate_ / 8.0) / packet_size_bytes_));
++next_key_frame_index_;
}
AddDataEvent ConstantRateFlowDriver::Next() {
if (curr_time_ == EventQueueTime::MaxTime()) {
return {EventQueueTime::MaxTime(), 0};
}
EventQueueTime next_packet_time = curr_time_ + inter_packet_gap_;
RateKeyFrame next_key_frame = NextKeyFrame();
if (inter_packet_gap_.isZero() || next_key_frame.at < next_packet_time) {
AdvanceToNextKeyFrame();
return Next();
}
curr_time_ = next_packet_time;
return {next_packet_time, packet_size_bytes_};
}
void ConstantRateFlowDriver::AddRateChangeKeyframes(
const std::vector<RateKeyFrame>& key_frames) {
rate_change_key_frames_.insert(rate_change_key_frames_.end(),
key_frames.begin(), key_frames.end());
std::stable_sort(rate_change_key_frames_.begin(),
rate_change_key_frames_.end(),
[](const RateKeyFrame& lhs, const RateKeyFrame& rhs) {
return lhs.at < rhs.at;
});
}
FeedbackLoopFlowDriver::FeedbackLoopFlowDriver(
const std::string& id,
std::unique_ptr<ObjectSizeAndWaitTimeGenerator> generator,
EventQueue* event_queue)
: EventConsumer(id, event_queue),
generator_(std::move(generator)),
data_to_add_(0),
connection_(nullptr) {
ScheduleNext();
}
void FeedbackLoopFlowDriver::HandleEvent() {
uint64_t prev_data_to_add = data_to_add_;
if (!data_to_add_) {
ScheduleNext();
}
if (prev_data_to_add) {
connection_->OnSendBufferDrained([this] { ScheduleNext(); });
connection_->AddData(data_to_add_);
}
}
void FeedbackLoopFlowDriver::ConnectionAttached(Connection* connection) {
connection_ = connection;
}
void FeedbackLoopFlowDriver::ScheduleNext() {
ObjectSizeAndWaitTime next = generator_->Next();
data_to_add_ = next.object_size;
EnqueueIn(next.wait_time);
}
OpenLoopFlowDriver::OpenLoopFlowDriver(
const std::string& id,
std::unique_ptr<ObjectSizeAndWaitTimeGenerator> generator,
EventQueue* event_queue)
: EventConsumer(id, event_queue),
generator_(std::move(generator)),
data_to_add_(0),
connection_(nullptr) {
ScheduleNext();
}
void OpenLoopFlowDriver::HandleEvent() {
connection_->OnSendBufferDrained([] {});
connection_->AddData(data_to_add_);
ScheduleNext();
}
void OpenLoopFlowDriver::ConnectionAttached(Connection* connection) {
connection_ = connection;
}
void OpenLoopFlowDriver::ScheduleNext() {
ObjectSizeAndWaitTime next = generator_->Next();
data_to_add_ = next.object_size;
EnqueueIn(next.wait_time);
}
void FlowPack::Init() {
AddFirstEvents();
num_events_cached_ = CacheEvents();
if (num_events_cached_) {
EnqueueAt(pending_events_[0].add_data_event.at);
}
}
void FlowPack::AddDriver(std::unique_ptr<FlowDriver> driver,
Connection* connection) {
if (driver->type() == FlowDriver::INDEPENDENT) {
std::unique_ptr<IndependentFlowDriver> independent_flow_driver(
static_cast<IndependentFlowDriver*>(driver.release()));
ConnectionAndIndependentDriver connection_and_driver;
connection_and_driver.driver = std::move(independent_flow_driver);
connection_and_driver.connection = connection;
independent_drivers_.emplace_back(std::move(connection_and_driver));
} else if (driver->type() == FlowDriver::DEPENDENT) {
std::unique_ptr<ConnectionDependentFlowDriver> dependent_flow_driver(
static_cast<ConnectionDependentFlowDriver*>(driver.release()));
dependent_flow_driver->ConnectionAttached(connection);
dependent_drivers_.emplace_back(std::move(dependent_flow_driver));
}
}
void FlowPack::HandleEvent() {
const Event& ev = pending_events_[next_event_index_++];
Connection* connection = ev.connection_and_driver->connection;
if (ev.add_data_event.close) {
connection->Close();
} else {
uint64_t data_to_add = ev.add_data_event.bytes;
if (data_to_add) {
connection->AddData(data_to_add);
}
}
if (next_event_index_ == num_events_cached_) {
num_events_cached_ = CacheEvents();
next_event_index_ = 0;
if (num_events_cached_ == 0) {
return;
}
}
EnqueueAt(pending_events_[next_event_index_].add_data_event.at);
}
void FlowPack::AddFirstEvents() {
Event ev;
for (auto& connection_and_driver : independent_drivers_) {
ev.connection_and_driver = &connection_and_driver;
ev.add_data_event = connection_and_driver.driver->Next();
if (ev.add_data_event.at != EventQueueTime::MaxTime()) {
queue_.emplace(std::move(ev));
}
}
}
size_t FlowPack::CacheEvents() {
size_t i;
Event ev;
for (i = 0; i < kEventCacheSize; ++i) {
if (!queue_.size()) {
break;
}
pending_events_[i] = std::move(const_cast<Event&>(queue_.top()));
queue_.pop();
Event& curr_event = pending_events_[i];
ev.connection_and_driver = curr_event.connection_and_driver;
ev.add_data_event = curr_event.connection_and_driver->driver->Next();
if (ev.add_data_event.at != EventQueueTime::MaxTime()) {
queue_.emplace(std::move(ev));
}
}
LOG(INFO) << "Cached " << i << " events";
return i;
}
DefaultObjectSizeAndWaitTimeGenerator::DefaultObjectSizeAndWaitTimeGenerator(
size_t mean_object_size_bytes, bool size_fixed,
std::chrono::milliseconds mean_wait_time_ms, bool wait_time_fixed,
double seed, EventQueue* event_queue)
: mean_object_size_(mean_object_size_bytes),
mean_wait_time_(mean_wait_time_ms.count()),
object_size_fixed_(size_fixed),
wait_time_fixed_(wait_time_fixed),
generator_(seed),
object_size_distribution_(1.0 / mean_object_size_bytes),
wait_time_distribution_(1.0 / mean_wait_time_ms.count()),
constant_delay_ms_(0),
event_queue_(event_queue) {}
ObjectSizeAndWaitTime DefaultObjectSizeAndWaitTimeGenerator::Next() {
bool max_object_size =
mean_object_size_ == std::numeric_limits<uint64_t>::max();
bool min_wait_time = mean_wait_time_ == 0;
size_t object_size_bytes;
if (max_object_size) {
object_size_bytes = std::numeric_limits<uint64_t>::max();
} else if (object_size_fixed_) {
object_size_bytes = mean_object_size_;
} else {
object_size_bytes = object_size_distribution_(generator_);
object_size_bytes = std::max(1ul, object_size_bytes);
}
size_t wait_time_ms;
if (min_wait_time) {
wait_time_ms = 0;
} else if (wait_time_fixed_) {
wait_time_ms = mean_wait_time_;
} else {
wait_time_ms = constant_delay_ms_ + wait_time_distribution_(generator_);
wait_time_ms = std::max(1ul, wait_time_ms);
}
return {object_size_bytes, event_queue_->RawMillisToTime(wait_time_ms)};
}
} // namespace htsim
} // namespace ncode
|
{
"content_hash": "e0086b463a6b316d062a175055981831",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 80,
"avg_line_length": 30.736641221374047,
"alnum_prop": 0.6643486899292189,
"repo_name": "ngvozdiev/ncode-common",
"id": "50e8cd8697d94a1ed19242facb5068e725c468c5",
"size": "8164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/htsim/flow_driver.cc",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "618533"
},
{
"name": "C++",
"bytes": "2181830"
},
{
"name": "CMake",
"bytes": "17426"
},
{
"name": "HTML",
"bytes": "34663"
},
{
"name": "Python",
"bytes": "24653"
}
],
"symlink_target": ""
}
|
// Open an OpenGl window and display a rectangle using a OpenGl GraphicContext
package main
import (
"image/color"
"log"
"runtime"
"github.com/go-gl/gl/v2.1/gl"
"github.com/go-gl/glfw/v3.1/glfw"
"github.com/llgcode/draw2d"
"github.com/llgcode/draw2d/draw2dgl"
)
var (
// global rotation
rotate int
width, height int
redraw = true
font draw2d.FontData
)
func reshape(window *glfw.Window, w, h int) {
gl.ClearColor(1, 1, 1, 1)
/* Establish viewing area to cover entire window. */
gl.Viewport(0, 0, int32(w), int32(h))
/* PROJECTION Matrix mode. */
gl.MatrixMode(gl.PROJECTION)
/* Reset project matrix. */
gl.LoadIdentity()
/* Map abstract coords directly to window coords. */
gl.Ortho(0, float64(w), 0, float64(h), -1, 1)
/* Invert Y axis so increasing Y goes down. */
gl.Scalef(1, -1, 1)
/* Shift origin up to upper-left corner. */
gl.Translatef(0, float32(-h), 0)
gl.Enable(gl.BLEND)
gl.BlendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA)
gl.Disable(gl.DEPTH_TEST)
width, height = w, h
redraw = true
}
// Ask to refresh
func invalidate() {
redraw = true
}
func display() {
gl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)
gl.LineWidth(1)
gc := draw2dgl.NewGraphicContext(width, height)
gc.SetFontData(draw2d.FontData{
Name: "luxi",
Family: draw2d.FontFamilyMono,
Style: draw2d.FontStyleBold | draw2d.FontStyleItalic})
gc.BeginPath()
draw2d.RoundRect(gc, 200, 200, 600, 600, 100, 100)
gc.SetFillColor(color.RGBA{0, 0, 0, 0xff})
gc.Fill()
gl.Flush() /* Single buffered, so needs a flush. */
}
func init() {
runtime.LockOSThread()
}
func main() {
err := glfw.Init()
if err != nil {
panic(err)
}
defer glfw.Terminate()
width, height = 800, 800
window, err := glfw.CreateWindow(width, height, "Show RoundedRect", nil, nil)
if err != nil {
panic(err)
}
window.MakeContextCurrent()
window.SetSizeCallback(reshape)
window.SetKeyCallback(onKey)
window.SetCharCallback(onChar)
glfw.SwapInterval(1)
err = gl.Init()
if err != nil {
panic(err)
}
reshape(window, width, height)
for !window.ShouldClose() {
if redraw {
display()
window.SwapBuffers()
redraw = false
}
glfw.PollEvents()
// time.Sleep(2 * time.Second)
}
}
func onChar(w *glfw.Window, char rune) {
log.Println(char)
}
func onKey(w *glfw.Window, key glfw.Key, scancode int, action glfw.Action, mods glfw.ModifierKey) {
switch {
case key == glfw.KeyEscape && action == glfw.Press,
key == glfw.KeyQ && action == glfw.Press:
w.SetShouldClose(true)
}
}
|
{
"content_hash": "2c4f3591e1b9a2836e572a71b30b07b3",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 99,
"avg_line_length": 21.853448275862068,
"alnum_prop": 0.6745562130177515,
"repo_name": "stanim/draw2d",
"id": "15ba1e9470e2d309e4a5aaba35f39b17b9a034d3",
"size": "2535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/helloworldgl/helloworldgl.go",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Go",
"bytes": "194374"
}
],
"symlink_target": ""
}
|
using UnityEngine;
namespace UnitySampleAssets.ImageEffects
{
[ExecuteInEditMode]
[AddComponentMenu("Image Effects/Edge Detection (Color)")]
public class EdgeDetectEffect : ImageEffectBase
{
public float threshold = 0.2F;
// Called by camera to apply image effect
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
material.SetFloat("_Treshold", threshold*threshold);
Graphics.Blit(source, destination, material);
}
}
}
|
{
"content_hash": "11497d1db3803e1744d8b1188ce337e8",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 83,
"avg_line_length": 29.61111111111111,
"alnum_prop": 0.6754221388367729,
"repo_name": "noplisu/GGJ2015",
"id": "3b93236301a64a94950565b1a0bd692686429203",
"size": "593",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Assets/Vendor/SampleAssets/Effects/ImageEffects (Pro Only)/Scripts/EdgeDetectEffect.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "6413"
}
],
"symlink_target": ""
}
|
var hammerhead = window.getTestCafeModule('hammerhead');
var browserUtils = hammerhead.utils.browser;
var testCafeCore = window.getTestCafeModule('testCafeCore');
var preventRealEvents = testCafeCore.get('./prevent-real-events');
var parseKeySequence = testCafeCore.get('./utils/parse-key-sequence');
var testCafeAutomation = window.getTestCafeModule('testCafeAutomation');
var PressAutomation = testCafeAutomation.Press;
preventRealEvents();
$(document).ready(function () {
var TEST_ELEMENT_CLASS = 'testElement';
function checkSubmitRaisedOnEnterPress ($form, $input, needSubmit) {
var submitHandlerExecuted = false;
$form[0].addEventListener('submit', function (e) {
submitHandlerExecuted = true;
e.preventDefault();
});
var callback = function () {
ok(needSubmit === submitHandlerExecuted, 'submit handler executed');
start();
};
$input[0].focus();
runPressAutomation('enter', callback);
}
function isInputValueValid ($el) {
var el = $el[0];
return browserUtils.isSafari || !el.validity || el.validity.valid;
}
function runPressAutomation (keys, callback) {
var pressAutomation = new PressAutomation(parseKeySequence(keys).combinations);
pressAutomation
.run()
.then(callback);
}
QUnit.testDone(function () {
$('.' + TEST_ELEMENT_CLASS).remove();
});
module('form with two text inputs');
asyncTest('submit button (input type="submit")', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('submit button (button type="submit")', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<button type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('submit button (button without declared type)', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<button></button>').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('without submit button', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, false);
});
asyncTest('not-submit button (button type="button")', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<button type="button"></button>').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, false);
});
asyncTest('disabled submit button', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<button type="submit" disabled></button>').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, false);
});
asyncTest('inputs types "text" and "search" and without submit button', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="search">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, false);
});
asyncTest('valid and invalid text input ("text" and "url") and submit button', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var $urlInput = $('<input type="url">').addClass(TEST_ELEMENT_CLASS).val('test').appendTo($form);
$('<button type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, isInputValueValid($urlInput));
});
module('form with one text input');
asyncTest('input type = "text"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "search"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="search">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "url"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="url">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "number"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="number">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "tel"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="tel">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "password"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="password">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "email"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="email">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('input type = "date"', function () {
var needChangeType = browserUtils.isIE && browserUtils.version > 11;
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="' + (needChangeType ? 'email' : 'date') +
'">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
//HACK: For tests in MSEdge. MSEdge fails when we try to create input with type = 'date'
if (needChangeType)
$input[0].type = 'date';
//webkit does not submit date input on enter
checkSubmitRaisedOnEnterPress($form, $input, !browserUtils.isWebKit || browserUtils.isSafari);
});
asyncTest('input type = "time"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="time">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
//webkit does not submit time input on enter
checkSubmitRaisedOnEnterPress($form, $input, !browserUtils.isWebKit || browserUtils.isSafari);
});
asyncTest('input type = "radio"', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="radio">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, false);
});
asyncTest('input type = "url" with invalid value', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="url">').addClass(TEST_ELEMENT_CLASS).val('test').appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, isInputValueValid($input));
});
module('form with two different type inputs');
asyncTest('one text input and one not-text input (type = "checkbox"), text input is focused', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="checkbox">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, true);
});
asyncTest('one text input and one not-text input (type = "checkbox"), checkbox is focused', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="checkbox">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
checkSubmitRaisedOnEnterPress($form, $input, browserUtils.isFirefox);
});
module('event handlers');
asyncTest('check all handlers are executed - form with submit button', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var $button = $('<input type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var inlineSubmitHandlerExecuted = false;
var jQuerySubmitHandlerExecuted = false;
var submitHandlerExecuted = false;
var buttonClickHandlerExecuted = false;
var inputKeydownHandlerExecuted = false;
var inputKeyupHandlerExecuted = false;
var inputKeypressHandlerExecuted = false;
$input.bind('keydown', function () {
inputKeydownHandlerExecuted = true;
});
$input.bind('keypress', function () {
inputKeypressHandlerExecuted = true;
});
$input.bind('keyup', function () {
inputKeyupHandlerExecuted = true;
});
$button.bind('click', function () {
buttonClickHandlerExecuted = true;
});
$form.bind('submit', function () {
jQuerySubmitHandlerExecuted = true;
});
$form[0].onsubmit = function () {
inlineSubmitHandlerExecuted = true;
};
$form[0].addEventListener('submit', function (e) {
submitHandlerExecuted = true;
e.preventDefault();
});
var callback = function () {
ok(jQuerySubmitHandlerExecuted, 'jQuery submit handler executed');
ok(submitHandlerExecuted, 'submit handler executed');
ok(inlineSubmitHandlerExecuted, 'inline submit handler executed');
ok(buttonClickHandlerExecuted, 'button click handler executed');
ok(inputKeydownHandlerExecuted, 'input keydown handler executed');
ok(inputKeypressHandlerExecuted, 'input keydown handler executed');
ok(inputKeyupHandlerExecuted, 'input keydown handler executed');
start();
};
$input[0].focus();
runPressAutomation('enter', callback);
});
asyncTest('check all handlers are executed - form without submit button', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var inlineSubmitHandlerExecuted = false;
var jQuerySubmitHandlerExecuted = false;
var submitHandlerExecuted = false;
var inputKeydownHandlerExecuted = false;
var inputKeyupHandlerExecuted = false;
var inputKeypressHandlerExecuted = false;
var submitFunctionCalled = false;
$input.bind('keydown', function () {
inputKeydownHandlerExecuted = true;
});
$input.bind('keypress', function () {
inputKeypressHandlerExecuted = true;
});
$input.bind('keyup', function () {
inputKeyupHandlerExecuted = true;
});
$form.bind('submit', function () {
jQuerySubmitHandlerExecuted = true;
});
$form[0].onsubmit = function () {
inlineSubmitHandlerExecuted = true;
};
$form[0].addEventListener('submit', function (e) {
submitHandlerExecuted = true;
if (browserUtils.isFirefox)
e.preventDefault();
});
//submit event dispatching leads to form submit in FireFox
//in other browsers we call submit function after submit event dispatched (if there are no submit buttons on form)
if (!browserUtils.isFirefox) {
$form[0].submit = function () {
submitFunctionCalled = true;
};
}
var callback = function () {
ok(jQuerySubmitHandlerExecuted, 'jQuery submit handler executed');
ok(submitHandlerExecuted, 'submit handler executed');
ok(inlineSubmitHandlerExecuted, 'inline submit handler executed');
ok(inputKeydownHandlerExecuted, 'input keydown handler executed');
ok(inputKeypressHandlerExecuted, 'input keydown handler executed');
ok(inputKeyupHandlerExecuted, 'input keydown handler executed');
if (!browserUtils.isFirefox)
ok(submitFunctionCalled, 'submit function called');
start();
};
$input[0].focus();
runPressAutomation('enter', callback);
});
//when enter pressed in a form input, browser sends click event to form submit button
asyncTest('form must not be submitted if submit button click event was prevented', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var $button = $('<input type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var submitHandlerExecuted = false;
var clickHandlerExecuted = false;
$button[0].addEventListener('click', function (e) {
clickHandlerExecuted = true;
e.preventDefault();
});
$form[0].addEventListener('submit', function () {
submitHandlerExecuted = true;
});
var callback = function () {
ok(clickHandlerExecuted, 'click executed');
ok(!submitHandlerExecuted, 'submit not executed');
start();
};
$input[0].focus();
runPressAutomation('enter', callback);
});
asyncTest('form must not be submitted if enter keydown event was prevented', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var keydownHandlerExecuted = false;
var submitHandlerExecuted = false;
var ENTER_KEY_CODE = 13;
$('<input type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
$input.bind('keydown', function (e) {
keydownHandlerExecuted = true;
if (e.keyCode === ENTER_KEY_CODE)
e.preventDefault();
});
$form[0].addEventListener('submit', function () {
submitHandlerExecuted = true;
});
var callback = function () {
ok(keydownHandlerExecuted, 'keydown handler was executed');
ok(!submitHandlerExecuted, 'submit handler was not executed');
start();
};
$input[0].focus();
runPressAutomation('enter', callback);
});
asyncTest('form must not be submitted if it has inputs with failed validation', function () {
var $form = $('<form></form>').addClass(TEST_ELEMENT_CLASS).appendTo('body');
var $input = $('<input type="text">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var $emailInput = $('<input type="email">').addClass(TEST_ELEMENT_CLASS).val('test').appendTo($form);
var $button = $('<input type="submit">').addClass(TEST_ELEMENT_CLASS).appendTo($form);
var submitHandlerExecuted = false;
var clickHandlerExecuted = false;
$button[0].addEventListener('click', function () {
clickHandlerExecuted = true;
});
$form[0].addEventListener('submit', function (e) {
submitHandlerExecuted = true;
e.preventDefault();
});
var callback = function () {
ok(clickHandlerExecuted, 'click executed');
equal(submitHandlerExecuted, isInputValueValid($emailInput));
start();
};
$input[0].focus();
runPressAutomation('enter', callback);
});
});
|
{
"content_hash": "c3076b33c9c28a2cab42f2a1f1ca32d3",
"timestamp": "",
"source": "github",
"line_count": 441,
"max_line_length": 122,
"avg_line_length": 41.54875283446712,
"alnum_prop": 0.6006112536156742,
"repo_name": "helen-dikareva/testcafe-phoenix",
"id": "ad0cad5efe49312c987d88f5f1bb9af570787f05",
"size": "18323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/client/fixtures/automation/submit-on-enter-test.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12879"
},
{
"name": "HTML",
"bytes": "113954"
},
{
"name": "JavaScript",
"bytes": "1908190"
}
],
"symlink_target": ""
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_45) on Thu Nov 13 21:21:54 UTC 2014 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
AddressTypes (Apache Hadoop Main 2.6.0 API)
</TITLE>
<META NAME="date" CONTENT="2014-11-13">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="AddressTypes (Apache Hadoop Main 2.6.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/AddressTypes.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV CLASS
<A HREF="../../../../../../org/apache/hadoop/registry/client/types/Endpoint.html" title="class in org.apache.hadoop.registry.client.types"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/registry/client/types/AddressTypes.html" target="_top"><B>FRAMES</B></A>
<A HREF="AddressTypes.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#field_summary">FIELD</A> | CONSTR | METHOD</FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: <A HREF="#field_detail">FIELD</A> | CONSTR | METHOD</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
org.apache.hadoop.registry.client.types</FONT>
<BR>
Interface AddressTypes</H2>
<HR>
<DL>
<DT><PRE><FONT SIZE="-1">@InterfaceAudience.Public
@InterfaceStability.Evolving
</FONT>public interface <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.30"><B>AddressTypes</B></A></DL>
</PRE>
<P>
Enum of address types -as integers.
Why integers and not enums? Cross platform serialization as JSON
<P>
<P>
<HR>
<P>
<!-- =========== FIELD SUMMARY =========== -->
<A NAME="field_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Field Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_HOSTNAME_AND_PORT">ADDRESS_HOSTNAME_AND_PORT</A></B></CODE>
<BR>
hostname/FQDN and port pair: "host/port".</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_HOSTNAME_FIELD">ADDRESS_HOSTNAME_FIELD</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_OTHER">ADDRESS_OTHER</A></B></CODE>
<BR>
Any other address: "".</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_PATH">ADDRESS_PATH</A></B></CODE>
<BR>
Path <code>/a/b/c</code> style: "path".</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_PORT_FIELD">ADDRESS_PORT_FIELD</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_URI">ADDRESS_URI</A></B></CODE>
<BR>
URI entries: "uri".</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/apache/hadoop/registry/client/types/AddressTypes.html#ADDRESS_ZOOKEEPER">ADDRESS_ZOOKEEPER</A></B></CODE>
<BR>
Zookeeper addresses as a triple : "zktriple".</TD>
</TR>
</TABLE>
<P>
<!-- ============ FIELD DETAIL =========== -->
<A NAME="field_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Field Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="ADDRESS_HOSTNAME_AND_PORT"><!-- --></A><H3>
ADDRESS_HOSTNAME_AND_PORT</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.40"><B>ADDRESS_HOSTNAME_AND_PORT</B></A></PRE>
<DL>
<DD>hostname/FQDN and port pair: "host/port".
The host/domain name and port are set as separate strings in the address
list, e.g.
<pre>
["namenode.example.org", "50070"]
</pre>
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_HOSTNAME_AND_PORT">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="ADDRESS_HOSTNAME_FIELD"><!-- --></A><H3>
ADDRESS_HOSTNAME_FIELD</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.41"><B>ADDRESS_HOSTNAME_FIELD</B></A></PRE>
<DL>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_HOSTNAME_FIELD">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="ADDRESS_PORT_FIELD"><!-- --></A><H3>
ADDRESS_PORT_FIELD</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.42"><B>ADDRESS_PORT_FIELD</B></A></PRE>
<DL>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_PORT_FIELD">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="ADDRESS_PATH"><!-- --></A><H3>
ADDRESS_PATH</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.53"><B>ADDRESS_PATH</B></A></PRE>
<DL>
<DD>Path <code>/a/b/c</code> style: "path".
The entire path is encoded in a single entry
<pre>
["/users/example/dataset"]
</pre>
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_PATH">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="ADDRESS_URI"><!-- --></A><H3>
ADDRESS_URI</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.63"><B>ADDRESS_URI</B></A></PRE>
<DL>
<DD>URI entries: "uri".
<pre>
["http://example.org"]
</pre>
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_URI">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="ADDRESS_ZOOKEEPER"><!-- --></A><H3>
ADDRESS_ZOOKEEPER</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.88"><B>ADDRESS_ZOOKEEPER</B></A></PRE>
<DL>
<DD>Zookeeper addresses as a triple : "zktriple".
<p>
These are provide as a 3 element tuple of: hostname, port
and optionally path (depending on the application)
<p>
A single element would be
<pre>
["zk1","2181","/registry"]
</pre>
An endpoint with multiple elements would list them as
<pre>
[
["zk1","2181","/registry"]
["zk2","1600","/registry"]
]
</pre>
the third element in each entry , the path, MUST be the same in each entry.
A client reading the addresses of an endpoint is free to pick any
of the set, so they must be the same.
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_ZOOKEEPER">Constant Field Values</A></DL>
</DL>
<HR>
<A NAME="ADDRESS_OTHER"><!-- --></A><H3>
ADDRESS_OTHER</H3>
<PRE>
static final <A HREF="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <A HREF="../../../../../../src-html/org/apache/hadoop/registry/client/types/AddressTypes.html#line.93"><B>ADDRESS_OTHER</B></A></PRE>
<DL>
<DD>Any other address: "".
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../../../constant-values.html#org.apache.hadoop.registry.client.types.AddressTypes.ADDRESS_OTHER">Constant Field Values</A></DL>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/AddressTypes.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV CLASS
<A HREF="../../../../../../org/apache/hadoop/registry/client/types/Endpoint.html" title="class in org.apache.hadoop.registry.client.types"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/registry/client/types/AddressTypes.html" target="_top"><B>FRAMES</B></A>
<A HREF="AddressTypes.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#field_summary">FIELD</A> | CONSTR | METHOD</FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: <A HREF="#field_detail">FIELD</A> | CONSTR | METHOD</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2014 <a href="http://www.apache.org">Apache Software Foundation</a>. All Rights Reserved.
</BODY>
</HTML>
|
{
"content_hash": "ac85af177b77efa89c5ff10a86a55dde",
"timestamp": "",
"source": "github",
"line_count": 362,
"max_line_length": 303,
"avg_line_length": 46.17955801104972,
"alnum_prop": 0.6456900161512233,
"repo_name": "gsoundar/mambo-ec2-deploy",
"id": "985737234d510cb1d23493447990d230a675b0e1",
"size": "16717",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "packages/hadoop-2.6.0/share/doc/hadoop/api/org/apache/hadoop/registry/client/types/AddressTypes.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "23179"
},
{
"name": "CSS",
"bytes": "39965"
},
{
"name": "HTML",
"bytes": "263271260"
},
{
"name": "Java",
"bytes": "103085"
},
{
"name": "JavaScript",
"bytes": "1347"
},
{
"name": "Python",
"bytes": "4101"
},
{
"name": "Ruby",
"bytes": "262588"
},
{
"name": "Shell",
"bytes": "118548"
}
],
"symlink_target": ""
}
|
working with liudas:
public void Setup() {
//Examples of fluent API
//API change per liudas i blame skrilex
using(BloomFilter filter = BloomFilter.BestBeforeYouReach(25).WithConllisionProbabilityOf(0.1)) // maybe LimitedTo(25)
{
}
using(BloomFilter filter = BloomFilter.ForExpected(25).WithConllisionProbability(0.1)
{
}
using(BloomFilter filter = BloomFilter.ForExpected(25).WithConllisionProbability(0.1).Unmanaged();
{
}
using(BloomFilter filter = BloomFilter.WithSize(25).WithConllisionProbability(0.1).Unmanaged();
{
}
using(BloomFilter filter = BloomFilter.Default())
{
}
using(BloomFilter filter = new BloomFilter()) //equivalent to above
{
}
using(BloomFilter filter = BloomFilter.ForExpected(25).WithConllisionProbability(0.1).PersitentTo(@".\urlFilter.bf")) // vs PersistedTo
{
}
using(BloomFilter filter = BloomFilter.ForExpected(25).WithConllisionProbability(0.1).OnTransientMemoryMap())
{
}
using(BloomFilter filter = BloomFilter.ForExpected(25).WithConllisionProbability(0.1).PersistedTo(@".\urlFilter.bf"))
{
}
//resize
using(BloomFilter filter = BloomFilter.ForExpected(25).WithCollisionProbability(0.1).PersitentTo(@".\urlFilter.bf").WithAutoResizing())
{
}
}
|
{
"content_hash": "d06fd47e9d9d34c56feb64a3152262fc",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 136,
"avg_line_length": 20.93220338983051,
"alnum_prop": 0.7441295546558705,
"repo_name": "gregoryyoung/bloomburger",
"id": "01cf5ce0a6deac0ba1d09b57d8485aec3f3f5e76",
"size": "1235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shitbird/liudas.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "35813"
}
],
"symlink_target": ""
}
|
/*!
* Angular Material Design
* https://github.com/angular/material
* @license MIT
* v0.6.1-master-fcb9b2c
*/
md-backdrop.md-opaque.md-pink-theme {
background-color: rgba(0, 0, 0, 0.3);
position: absolute; }
md-bottom-sheet.md-pink-theme {
background-color: #fafafa;
border-top-color: #bdbdbd; }
md-bottom-sheet.md-pink-theme.md-list md-item {
color: rgba(0, 0, 0, 0.54); }
md-bottom-sheet.md-pink-theme .md-subheader {
background-color: #fafafa; }
md-bottom-sheet.md-pink-theme .md-subheader {
color: rgba(0, 0, 0, 0.54); }
.md-button.md-pink-theme {
border-radius: 3px; }
.md-button.md-pink-theme:not([disabled]):hover, .md-button.md-pink-theme:not([disabled]):focus {
background-color: rgba(158, 158, 158, 0.2); }
.md-button.md-pink-theme.md-fab {
border-radius: 50%; }
.md-button.md-pink-theme.md-primary {
color: #ec407a;
fill: #ec407a; }
.md-button.md-pink-theme.md-warn {
color: #ef5350;
fill: #ef5350; }
.md-button.md-pink-theme.md-raised, .md-button.md-pink-theme.md-fab {
background-color: rgba(158, 158, 158, 0.185); }
.md-button.md-pink-theme.md-raised:not([disabled]):hover, .md-button.md-pink-theme.md-raised:not([disabled]):focus, .md-button.md-pink-theme.md-fab:not([disabled]):hover, .md-button.md-pink-theme.md-fab:not([disabled]):focus {
background-color: rgba(158, 158, 158, 0.3); }
.md-button.md-pink-theme.md-raised.md-primary, .md-button.md-pink-theme.md-fab.md-primary {
color: white;
background-color: #e91e63; }
.md-button.md-pink-theme.md-raised.md-primary:not([disabled]):hover, .md-button.md-pink-theme.md-raised.md-primary:not([disabled]):focus, .md-button.md-pink-theme.md-fab.md-primary:not([disabled]):hover, .md-button.md-pink-theme.md-fab.md-primary:not([disabled]):focus {
background-color: #d81b60; }
.md-button.md-pink-theme.md-raised.md-warn, .md-button.md-pink-theme.md-fab.md-warn {
color: white;
background-color: #f44336; }
.md-button.md-pink-theme.md-raised.md-warn:not([disabled]):hover, .md-button.md-pink-theme.md-raised.md-warn:not([disabled]):focus, .md-button.md-pink-theme.md-fab.md-warn:not([disabled]):hover, .md-button.md-pink-theme.md-fab.md-warn:not([disabled]):focus {
background-color: #d32f2f; }
.md-button.md-pink-theme[disabled], .md-button.md-pink-theme.md-raised[disabled], .md-button.md-pink-theme.md-fab[disabled] {
color: rgba(0, 0, 0, 0.26);
fill: rgba(0, 0, 0, 0.26);
background-color: transparent;
cursor: not-allowed; }
md-card.md-pink-theme {
border-radius: 2px; }
md-card.md-pink-theme .md-card-image {
border-radius: 2px 2px 0 0; }
md-checkbox.md-pink-theme .md-ripple {
color: #43a047; }
md-checkbox.md-pink-theme.md-checked .md-ripple {
color: #757575; }
md-checkbox.md-pink-theme .md-icon {
border-color: rgba(0, 0, 0, 0.54); }
md-checkbox.md-pink-theme.md-checked .md-icon {
background-color: rgba(102, 187, 106, 0.87); }
md-checkbox.md-pink-theme.md-checked .md-icon:after {
border-color: #eeeeee; }
md-checkbox.md-pink-theme[disabled] .md-icon {
border-color: rgba(0, 0, 0, 0.26); }
md-checkbox.md-pink-theme[disabled].md-checked .md-icon {
background-color: rgba(0, 0, 0, 0.26); }
md-content.md-pink-theme {
background-color: #ffffff; }
md-dialog.md-pink-theme {
border-radius: 4px;
background-color: #ffffff; }
md-dialog.md-pink-theme.md-content-overflow .md-actions {
border-top-color: rgba(0, 0, 0, 0.12); }
md-divider.md-pink-theme {
border-top-color: rgba(0, 0, 0, 0.12); }
md-progress-circular.md-pink-theme {
background-color: transparent; }
md-progress-circular.md-pink-theme .md-inner .md-gap {
border-top-color: #e91e63;
border-bottom-color: #e91e63; }
md-progress-circular.md-pink-theme .md-inner .md-left .md-half-circle, md-progress-circular.md-pink-theme .md-inner .md-right .md-half-circle {
border-top-color: #e91e63; }
md-progress-circular.md-pink-theme .md-inner .md-right .md-half-circle {
border-right-color: #e91e63; }
md-progress-circular.md-pink-theme .md-inner .md-left .md-half-circle {
border-left-color: #e91e63; }
md-progress-linear.md-pink-theme .md-container {
background-color: #f8bbd0; }
md-progress-linear.md-pink-theme .md-bar {
background-color: #e91e63; }
md-progress-linear.md-pink-theme[md-mode=buffer] .md-dashed:before {
background: radial-gradient(#f8bbd0 0%, #f8bbd0 16%, transparent 42%); }
md-progress-linear.md-pink-theme[md-mode=buffer] .md-bar1 {
background-color: #f8bbd0; }
md-radio-button.md-pink-theme .md-off, md-switch.md-pink-theme .md-switch-thumb .md-off {
border-color: rgba(0, 0, 0, 0.54); }
md-radio-button.md-pink-theme .md-on, md-switch.md-pink-theme .md-switch-thumb .md-on {
background-color: rgba(102, 187, 106, 0.87); }
md-radio-button.md-pink-theme.md-checked .md-off, md-switch.md-pink-theme .md-switch-thumb.md-checked .md-off {
border-color: rgba(102, 187, 106, 0.87); }
md-radio-button.md-pink-theme.md-checked .md-ink-ripple, md-switch.md-pink-theme .md-switch-thumb.md-checked .md-ink-ripple {
color: rgba(102, 187, 106, 0.87); }
md-radio-button.md-pink-theme .md-container .md-ripple, md-switch.md-pink-theme .md-switch-thumb .md-container .md-ripple {
color: #43a047; }
md-radio-button.md-pink-theme[disabled] .md-container .md-off, md-switch.md-pink-theme .md-switch-thumb[disabled] .md-container .md-off {
border-color: rgba(0, 0, 0, 0.26); }
md-radio-button.md-pink-theme[disabled] .md-container .md-on, md-switch.md-pink-theme .md-switch-thumb[disabled] .md-container .md-on {
border-color: rgba(0, 0, 0, 0.26); }
md-radio-group.md-pink-theme:focus {
border-color: rgba(0, 0, 0, 0.73); }
md-slider.md-pink-theme .md-track {
background-color: rgba(0, 0, 0, 0.26); }
md-slider.md-pink-theme .md-track-fill {
background-color: #e91e63; }
md-slider.md-pink-theme .md-thumb:after {
border-color: #e91e63;
background-color: #e91e63; }
md-slider.md-pink-theme .md-sign {
background-color: #e91e63; }
md-slider.md-pink-theme .md-sign:after {
border-top-color: #e91e63; }
md-slider.md-pink-theme .md-thumb-text {
color: white; }
md-slider.md-pink-theme .md-focus-thumb {
background-color: rgba(0, 0, 0, 0.54); }
md-slider.md-pink-theme .md-focus-ring {
border-color: rgba(0, 0, 0, 0.12); }
md-slider.md-pink-theme .md-disabled-thumb {
border-color: #ffffff; }
md-slider.md-pink-theme.md-min .md-thumb:after {
background-color: #ffffff; }
md-slider.md-pink-theme[disabled] .md-thumb:after {
border-color: #bdbdbd; }
md-slider.md-pink-theme[disabled]:not(.md-min) .md-thumb:after {
background-color: #bdbdbd; }
.md-subheader.md-pink-theme {
color: rgba(0, 0, 0, 0.54);
background-color: #ffffff; }
.md-subheader.md-pink-theme.md-primary {
color: #e91e63; }
md-switch.md-pink-theme .md-thumb {
background-color: #fafafa; }
md-switch.md-pink-theme .md-bar {
background-color: #9e9e9e; }
md-switch.md-pink-theme.md-checked .md-thumb {
background-color: #e91e63; }
md-switch.md-pink-theme.md-checked .md-bar {
background-color: rgba(233, 30, 99, 0.5); }
md-switch.md-pink-theme[disabled] .md-thumb {
background-color: #bdbdbd; }
md-switch.md-pink-theme[disabled] .md-bar {
background-color: rgba(0, 0, 0, 0.12); }
md-switch.md-pink-theme:focus .md-text {
border-color: black;
border-style: dotted; }
md-tabs.md-pink-theme .md-header {
background-color: #e91e63; }
md-tabs.md-pink-theme md-tabs-ink-bar {
color: #ffff85;
background: #ffff85; }
md-tabs.md-pink-theme md-tab {
color: #f8bbd0; }
md-tabs.md-pink-theme md-tab.active {
color: white; }
md-tabs.md-pink-theme md-tab[disabled] {
color: rgba(0, 0, 0, 0.12); }
md-tabs.md-pink-theme md-tab:focus {
border-color: rgba(0, 0, 0, 0.73); }
md-tabs.md-pink-theme md-tab .md-ripple-container {
color: #ffff85; }
md-input-group.md-pink-theme input, md-input-group.md-pink-theme textarea {
text-shadow: none; }
md-input-group.md-pink-theme input:-ms-input-placeholder, md-input-group.md-pink-theme textarea:-ms-input-placeholder {
color: rgba(0, 0, 0, 0.26); }
md-input-group.md-pink-theme input::-webkit-input-placeholder, md-input-group.md-pink-theme textarea::-webkit-input-placeholder {
color: rgba(0, 0, 0, 0.26); }
md-input-group.md-pink-theme label {
text-shadow: none;
color: rgba(0, 0, 0, 0.26); }
md-input-group.md-pink-theme input, md-input-group.md-pink-theme textarea {
color: rgba(0, 0, 0, 0.73);
border-color: rgba(0, 0, 0, 0.12); }
md-input-group.md-pink-theme.md-input-focused input, md-input-group.md-pink-theme.md-input-focused textarea {
border-color: #e91e63; }
md-input-group.md-pink-theme.md-input-focused label {
color: #e91e63; }
md-input-group.md-pink-theme.md-input-has-value:not(.md-input-focused) label {
color: rgba(0, 0, 0, 0.372); }
md-input-group.md-pink-theme[disabled] input, md-input-group.md-pink-theme[disabled] textarea {
border-bottom-color: rgba(0, 0, 0, 0.12);
color: rgba(0, 0, 0, 0.26);
background-image: linear-gradient(to right, rgba(0, 0, 0, 0.19) 0%, rgba(0, 0, 0, 0.19) 50%, rgba(0, 0, 0, 0) 0%); }
md-toast.md-pink-theme {
background-color: #323232;
color: white; }
md-toast.md-pink-theme .md-button {
color: white; }
md-toast.md-pink-theme .md-action {
color: #ff4081; }
md-toolbar.md-pink-theme {
background-color: #e91e63;
color: white; }
md-toolbar.md-pink-theme .md-button {
color: white; }
md-tooltip.md-pink-theme {
color: #ffffff; }
md-tooltip.md-pink-theme .md-background {
background-color: rgba(0, 0, 0, 0.52); }
|
{
"content_hash": "d563c284cc4c45cfc34c8266c383decc",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 276,
"avg_line_length": 42.05263157894737,
"alnum_prop": 0.6793909052982895,
"repo_name": "DonPage/sync-player",
"id": "de1a11eb578d2cf2392d23bbab81d83c2df51743",
"size": "9588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bower_components/angular-material/themes/pink-theme.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "663"
},
{
"name": "JavaScript",
"bytes": "15512"
}
],
"symlink_target": ""
}
|
<?php
namespace CascadeEnergy\ElasticSearch\Iterators;
use Elasticsearch\Client;
/**
* Based on the original iterator class from the Elasticsearch package.
*
* Updated to allow the scroll ID to be explicitly cleared, which makes it possible to better control and handle
* errors (particularly 404 exceptions) that might occur when the scroll ID is cleared.
*/
class SearchResponseIterator implements \Iterator
{
/**
* @var Client
*/
private $client;
/**
* @var array
*/
private $params;
/**
* @var int
*/
private $currentKey;
/**
* @var array
*/
private $currentScrolledResponse;
/**
* @var string
*/
private $scrollId;
/**
* @var string
*/
private $scrollTtl;
/**
* Constructor
*
* @param Client $client
* @param array $params Associative array of parameters
* @see Client::search()
*/
public function __construct(Client $client, array $params)
{
$this->client = $client;
$this->params = $params;
if (isset($params['scroll'])) {
$this->scrollTtl = $params['scroll'];
}
}
/**
* Sets the time to live duration of a scroll window
*
* @param string $timeToLive
* @return $this
*/
public function setScrollTimeout($timeToLive)
{
$this->scrollTtl = $timeToLive;
return $this;
}
/**
* Clears the current scroll window if there is a scroll_id stored
*
* @return void
*/
public function clearScroll()
{
if (!empty($this->scrollId)) {
$scrollId = $this->scrollId;
$this->scrollId = null;
$this->client->clearScroll(['scroll_id' => $scrollId]);
}
}
/**
* Rewinds the iterator by performing the initial search.
*
* The "search_type" parameter will determine if the first "page" contains
* hits or if the first page contains just a "scroll_id"
*
* @return void
* @see Iterator::rewind()
*/
public function rewind()
{
$this->clearScroll();
$this->currentKey = 0;
$this->currentScrolledResponse = $this->client->search($this->params);
$this->scrollId = $this->currentScrolledResponse['_scroll_id'];
}
/**
* Fetches every "page" after the first one using the latest "scroll_id"
*
* @return void
* @see Iterator::next()
*/
public function next()
{
$this->currentKey++;
$this->currentScrolledResponse = $this->client->scroll(
array(
'scroll_id' => $this->scrollId,
'scroll' => $this->scrollTtl
)
);
$this->scrollId = $this->currentScrolledResponse['_scroll_id'];
}
/**
* Returns a boolean value indicating if the current page is valid or not
* based on the number of hits in the page considering that the first page
* might not include any hits
*
* @return bool
* @see Iterator::valid()
*/
public function valid()
{
return ($this->currentKey === 0) || isset($this->currentScrolledResponse['hits']['hits'][0]);
}
/**
* Returns the current "page"
*
* @return array
* @see Iterator::current()
*/
public function current()
{
return $this->currentScrolledResponse;
}
/**
* Returns the current "page number" of the current "page"
*
* @return int
* @see Iterator::key()
*/
public function key()
{
return $this->currentKey;
}
}
|
{
"content_hash": "d8f04a0863f47642b35e89d6235ce720",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 112,
"avg_line_length": 23.20253164556962,
"alnum_prop": 0.5548281505728314,
"repo_name": "CascadeEnergy/php-elasticsearch",
"id": "2fc853125945bf7db98bf901598f23d8cb597495",
"size": "3666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Iterators/SearchResponseIterator.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "34941"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.